language
stringclasses 1
value | repo
stringclasses 346
values | path
stringlengths 6
201
| class_span
dict | source
stringlengths 21
2.38M
| target
stringlengths 1
96
|
|---|---|---|---|---|---|
python
|
networkx__networkx
|
networkx/algorithms/flow/utils.py
|
{
"start": 281,
"end": 1084
}
|
class ____:
"""Mechanism for iterating over out-edges incident to a node in a circular
manner. StopIteration exception is raised when wraparound occurs.
"""
__slots__ = ("_edges", "_it", "_curr")
def __init__(self, edges):
self._edges = edges
if self._edges:
self._rewind()
def get(self):
return self._curr
def move_to_next(self):
try:
self._curr = next(self._it)
except StopIteration:
self._rewind()
raise
def _rewind(self):
self._it = iter(self._edges.items())
self._curr = next(self._it)
def __eq__(self, other):
return (getattr(self, "_curr", None), self._edges) == (
(getattr(other, "_curr", None), other._edges)
)
|
CurrentEdge
|
python
|
tensorflow__tensorflow
|
tensorflow/python/keras/legacy_tf_layers/convolutional.py
|
{
"start": 9906,
"end": 19421
}
|
class ____(keras_layers.Conv2D, base.Layer):
"""2D convolution layer (e.g. spatial convolution over images).
This layer creates a convolution kernel that is convolved
(actually cross-correlated) with the layer input to produce a tensor of
outputs. If `use_bias` is True (and a `bias_initializer` is provided),
a bias vector is created and added to the outputs. Finally, if
`activation` is not `None`, it is applied to the outputs as well.
Args:
filters: Integer, the dimensionality of the output space (i.e. the number
of filters in the convolution).
kernel_size: An integer or tuple/list of 2 integers, specifying the
height and width of the 2D convolution window.
Can be a single integer to specify the same value for
all spatial dimensions.
strides: An integer or tuple/list of 2 integers,
specifying the strides of the convolution along the height and width.
Can be a single integer to specify the same value for
all spatial dimensions.
Specifying any stride value != 1 is incompatible with specifying
any `dilation_rate` value != 1.
padding: One of `"valid"` or `"same"` (case-insensitive).
`"valid"` means no padding. `"same"` results in padding evenly to
the left/right or up/down of the input such that output has the same
height/width dimension as the input.
data_format: A string, one of `channels_last` (default) or `channels_first`.
The ordering of the dimensions in the inputs.
`channels_last` corresponds to inputs with shape
`(batch, height, width, channels)` while `channels_first` corresponds to
inputs with shape `(batch, channels, height, width)`.
dilation_rate: An integer or tuple/list of 2 integers, specifying
the dilation rate to use for dilated convolution.
Can be a single integer to specify the same value for
all spatial dimensions.
Currently, specifying any `dilation_rate` value != 1 is
incompatible with specifying any stride value != 1.
activation: Activation function. Set it to None to maintain a
linear activation.
use_bias: Boolean, whether the layer uses a bias.
kernel_initializer: An initializer for the convolution kernel.
bias_initializer: An initializer for the bias vector. If None, the default
initializer will be used.
kernel_regularizer: Optional regularizer for the convolution kernel.
bias_regularizer: Optional regularizer for the bias vector.
activity_regularizer: Optional regularizer function for the output.
kernel_constraint: Optional projection function to be applied to the
kernel after being updated by an `Optimizer` (e.g. used to implement
norm constraints or value constraints for layer weights). The function
must take as input the unprojected variable and must return the
projected variable (which must have the same shape). Constraints are
not safe to use when doing asynchronous distributed training.
bias_constraint: Optional projection function to be applied to the
bias after being updated by an `Optimizer`.
trainable: Boolean, if `True` also add variables to the graph collection
`GraphKeys.TRAINABLE_VARIABLES` (see `tf.Variable`).
name: A string, the name of the layer.
"""
def __init__(self, filters,
kernel_size,
strides=(1, 1),
padding='valid',
data_format='channels_last',
dilation_rate=(1, 1),
activation=None,
use_bias=True,
kernel_initializer=None,
bias_initializer=init_ops.zeros_initializer(),
kernel_regularizer=None,
bias_regularizer=None,
activity_regularizer=None,
kernel_constraint=None,
bias_constraint=None,
trainable=True,
name=None,
**kwargs):
super(Conv2D, self).__init__(
filters=filters,
kernel_size=kernel_size,
strides=strides,
padding=padding,
data_format=data_format,
dilation_rate=dilation_rate,
activation=activation,
use_bias=use_bias,
kernel_initializer=kernel_initializer,
bias_initializer=bias_initializer,
kernel_regularizer=kernel_regularizer,
bias_regularizer=bias_regularizer,
activity_regularizer=activity_regularizer,
kernel_constraint=kernel_constraint,
bias_constraint=bias_constraint,
trainable=trainable,
name=name, **kwargs)
def conv2d(inputs,
filters,
kernel_size,
strides=(1, 1),
padding='valid',
data_format='channels_last',
dilation_rate=(1, 1),
activation=None,
use_bias=True,
kernel_initializer=None,
bias_initializer=init_ops.zeros_initializer(),
kernel_regularizer=None,
bias_regularizer=None,
activity_regularizer=None,
kernel_constraint=None,
bias_constraint=None,
trainable=True,
name=None,
reuse=None):
"""Functional interface for the 2D convolution layer.
This layer creates a convolution kernel that is convolved
(actually cross-correlated) with the layer input to produce a tensor of
outputs. If `use_bias` is True (and a `bias_initializer` is provided),
a bias vector is created and added to the outputs. Finally, if
`activation` is not `None`, it is applied to the outputs as well.
Args:
inputs: Tensor input.
filters: Integer, the dimensionality of the output space (i.e. the number
of filters in the convolution).
kernel_size: An integer or tuple/list of 2 integers, specifying the
height and width of the 2D convolution window.
Can be a single integer to specify the same value for
all spatial dimensions.
strides: An integer or tuple/list of 2 integers,
specifying the strides of the convolution along the height and width.
Can be a single integer to specify the same value for
all spatial dimensions.
Specifying any stride value != 1 is incompatible with specifying
any `dilation_rate` value != 1.
padding: One of `"valid"` or `"same"` (case-insensitive).
`"valid"` means no padding. `"same"` results in padding evenly to
the left/right or up/down of the input such that output has the same
height/width dimension as the input.
data_format: A string, one of `channels_last` (default) or `channels_first`.
The ordering of the dimensions in the inputs.
`channels_last` corresponds to inputs with shape
`(batch, height, width, channels)` while `channels_first` corresponds to
inputs with shape `(batch, channels, height, width)`.
dilation_rate: An integer or tuple/list of 2 integers, specifying
the dilation rate to use for dilated convolution.
Can be a single integer to specify the same value for
all spatial dimensions.
Currently, specifying any `dilation_rate` value != 1 is
incompatible with specifying any stride value != 1.
activation: Activation function. Set it to None to maintain a
linear activation.
use_bias: Boolean, whether the layer uses a bias.
kernel_initializer: An initializer for the convolution kernel.
bias_initializer: An initializer for the bias vector. If None, the default
initializer will be used.
kernel_regularizer: Optional regularizer for the convolution kernel.
bias_regularizer: Optional regularizer for the bias vector.
activity_regularizer: Optional regularizer function for the output.
kernel_constraint: Optional projection function to be applied to the
kernel after being updated by an `Optimizer` (e.g. used to implement
norm constraints or value constraints for layer weights). The function
must take as input the unprojected variable and must return the
projected variable (which must have the same shape). Constraints are
not safe to use when doing asynchronous distributed training.
bias_constraint: Optional projection function to be applied to the
bias after being updated by an `Optimizer`.
trainable: Boolean, if `True` also add variables to the graph collection
`GraphKeys.TRAINABLE_VARIABLES` (see `tf.Variable`).
name: A string, the name of the layer.
reuse: Boolean, whether to reuse the weights of a previous layer
by the same name.
Returns:
Output tensor.
Raises:
ValueError: if eager execution is enabled.
"""
warnings.warn('`tf.layers.conv2d` is deprecated and '
'will be removed in a future version. '
'Please Use `tf.keras.layers.Conv2D` instead.')
layer = Conv2D(
filters=filters,
kernel_size=kernel_size,
strides=strides,
padding=padding,
data_format=data_format,
dilation_rate=dilation_rate,
activation=activation,
use_bias=use_bias,
kernel_initializer=kernel_initializer,
bias_initializer=bias_initializer,
kernel_regularizer=kernel_regularizer,
bias_regularizer=bias_regularizer,
activity_regularizer=activity_regularizer,
kernel_constraint=kernel_constraint,
bias_constraint=bias_constraint,
trainable=trainable,
name=name,
_reuse=reuse,
_scope=name)
return layer.apply(inputs)
|
Conv2D
|
python
|
ijl__orjson
|
test/test_enum.py
|
{
"start": 249,
"end": 296
}
|
class ____(enum.IntFlag):
ONE = 1
|
IntFlagEnum
|
python
|
langchain-ai__langchain
|
libs/core/langchain_core/load/load.py
|
{
"start": 1110,
"end": 9295
}
|
class ____:
"""Reviver for JSON objects."""
def __init__(
self,
secrets_map: dict[str, str] | None = None,
valid_namespaces: list[str] | None = None,
secrets_from_env: bool = True, # noqa: FBT001,FBT002
additional_import_mappings: dict[tuple[str, ...], tuple[str, ...]]
| None = None,
*,
ignore_unserializable_fields: bool = False,
) -> None:
"""Initialize the reviver.
Args:
secrets_map: A map of secrets to load.
If a secret is not found in the map, it will be loaded from the
environment if `secrets_from_env` is `True`.
valid_namespaces: A list of additional namespaces (modules)
to allow to be deserialized.
secrets_from_env: Whether to load secrets from the environment.
additional_import_mappings: A dictionary of additional namespace mappings
You can use this to override default mappings or add new mappings.
ignore_unserializable_fields: Whether to ignore unserializable fields.
"""
self.secrets_from_env = secrets_from_env
self.secrets_map = secrets_map or {}
# By default, only support langchain, but user can pass in additional namespaces
self.valid_namespaces = (
[*DEFAULT_NAMESPACES, *valid_namespaces]
if valid_namespaces
else DEFAULT_NAMESPACES
)
self.additional_import_mappings = additional_import_mappings or {}
self.import_mappings = (
{
**ALL_SERIALIZABLE_MAPPINGS,
**self.additional_import_mappings,
}
if self.additional_import_mappings
else ALL_SERIALIZABLE_MAPPINGS
)
self.ignore_unserializable_fields = ignore_unserializable_fields
def __call__(self, value: dict[str, Any]) -> Any:
"""Revive the value.
Args:
value: The value to revive.
Returns:
The revived value.
Raises:
ValueError: If the namespace is invalid.
ValueError: If trying to deserialize something that cannot
be deserialized in the current version of langchain-core.
NotImplementedError: If the object is not implemented and
`ignore_unserializable_fields` is False.
"""
if (
value.get("lc") == 1
and value.get("type") == "secret"
and value.get("id") is not None
):
[key] = value["id"]
if key in self.secrets_map:
return self.secrets_map[key]
if self.secrets_from_env and key in os.environ and os.environ[key]:
return os.environ[key]
return None
if (
value.get("lc") == 1
and value.get("type") == "not_implemented"
and value.get("id") is not None
):
if self.ignore_unserializable_fields:
return None
msg = (
"Trying to load an object that doesn't implement "
f"serialization: {value}"
)
raise NotImplementedError(msg)
if (
value.get("lc") == 1
and value.get("type") == "constructor"
and value.get("id") is not None
):
[*namespace, name] = value["id"]
mapping_key = tuple(value["id"])
if (
namespace[0] not in self.valid_namespaces
# The root namespace ["langchain"] is not a valid identifier.
or namespace == ["langchain"]
):
msg = f"Invalid namespace: {value}"
raise ValueError(msg)
# Has explicit import path.
if mapping_key in self.import_mappings:
import_path = self.import_mappings[mapping_key]
# Split into module and name
import_dir, name = import_path[:-1], import_path[-1]
# Import module
mod = importlib.import_module(".".join(import_dir))
elif namespace[0] in DISALLOW_LOAD_FROM_PATH:
msg = (
"Trying to deserialize something that cannot "
"be deserialized in current version of langchain-core: "
f"{mapping_key}."
)
raise ValueError(msg)
# Otherwise, treat namespace as path.
else:
mod = importlib.import_module(".".join(namespace))
cls = getattr(mod, name)
# The class must be a subclass of Serializable.
if not issubclass(cls, Serializable):
msg = f"Invalid namespace: {value}"
raise ValueError(msg)
# We don't need to recurse on kwargs
# as json.loads will do that for us.
kwargs = value.get("kwargs", {})
return cls(**kwargs)
return value
@beta()
def loads(
text: str,
*,
secrets_map: dict[str, str] | None = None,
valid_namespaces: list[str] | None = None,
secrets_from_env: bool = True,
additional_import_mappings: dict[tuple[str, ...], tuple[str, ...]] | None = None,
ignore_unserializable_fields: bool = False,
) -> Any:
"""Revive a LangChain class from a JSON string.
Equivalent to `load(json.loads(text))`.
Args:
text: The string to load.
secrets_map: A map of secrets to load.
If a secret is not found in the map, it will be loaded from the environment
if `secrets_from_env` is `True`.
valid_namespaces: A list of additional namespaces (modules)
to allow to be deserialized.
secrets_from_env: Whether to load secrets from the environment.
additional_import_mappings: A dictionary of additional namespace mappings
You can use this to override default mappings or add new mappings.
ignore_unserializable_fields: Whether to ignore unserializable fields.
Returns:
Revived LangChain objects.
"""
return json.loads(
text,
object_hook=Reviver(
secrets_map,
valid_namespaces,
secrets_from_env,
additional_import_mappings,
ignore_unserializable_fields=ignore_unserializable_fields,
),
)
@beta()
def load(
obj: Any,
*,
secrets_map: dict[str, str] | None = None,
valid_namespaces: list[str] | None = None,
secrets_from_env: bool = True,
additional_import_mappings: dict[tuple[str, ...], tuple[str, ...]] | None = None,
ignore_unserializable_fields: bool = False,
) -> Any:
"""Revive a LangChain class from a JSON object.
Use this if you already have a parsed JSON object,
eg. from `json.load` or `orjson.loads`.
Args:
obj: The object to load.
secrets_map: A map of secrets to load.
If a secret is not found in the map, it will be loaded from the environment
if `secrets_from_env` is `True`.
valid_namespaces: A list of additional namespaces (modules)
to allow to be deserialized.
secrets_from_env: Whether to load secrets from the environment.
additional_import_mappings: A dictionary of additional namespace mappings
You can use this to override default mappings or add new mappings.
ignore_unserializable_fields: Whether to ignore unserializable fields.
Returns:
Revived LangChain objects.
"""
reviver = Reviver(
secrets_map,
valid_namespaces,
secrets_from_env,
additional_import_mappings,
ignore_unserializable_fields=ignore_unserializable_fields,
)
def _load(obj: Any) -> Any:
if isinstance(obj, dict):
# Need to revive leaf nodes before reviving this node
loaded_obj = {k: _load(v) for k, v in obj.items()}
return reviver(loaded_obj)
if isinstance(obj, list):
return [_load(o) for o in obj]
return obj
return _load(obj)
|
Reviver
|
python
|
urllib3__urllib3
|
src/urllib3/http2/connection.py
|
{
"start": 11712,
"end": 12674
}
|
class ____(BaseHTTPResponse):
# TODO: This is a woefully incomplete response object, but works for non-streaming.
def __init__(
self,
status: int,
headers: HTTPHeaderDict,
request_url: str,
data: bytes,
decode_content: bool = False, # TODO: support decoding
) -> None:
super().__init__(
status=status,
headers=headers,
# Following CPython, we map HTTP versions to major * 10 + minor integers
version=20,
version_string="HTTP/2",
# No reason phrase in HTTP/2
reason=None,
decode_content=decode_content,
request_url=request_url,
)
self._data = data
self.length_remaining = 0
@property
def data(self) -> bytes:
return self._data
def get_redirect_location(self) -> None:
return None
def close(self) -> None:
pass
|
HTTP2Response
|
python
|
ansible__ansible
|
lib/ansible/playbook/attribute.py
|
{
"start": 5611,
"end": 5668
}
|
class ____(Attribute):
...
|
NonInheritableFieldAttribute
|
python
|
huggingface__transformers
|
tests/models/sam2_video/test_processor_sam2_video.py
|
{
"start": 1039,
"end": 5191
}
|
class ____(ProcessorTesterMixin, unittest.TestCase):
processor_class = Sam2VideoProcessor
@unittest.skip("Sam2VideoProcessor call take in images only")
def test_processor_with_multiple_inputs(self):
pass
def prepare_image_inputs(self):
"""This function prepares a list of PIL images, or a list of numpy arrays if one specifies numpify=True,
or a list of PyTorch tensors if one specifies torchify=True.
"""
image_inputs = torch.randint(0, 256, size=(1, 3, 30, 400), dtype=torch.uint8)
# image_inputs = [Image.fromarray(np.moveaxis(x, 0, -1)) for x in image_inputs]
return image_inputs
def prepare_mask_inputs(self):
"""This function prepares a list of PIL images, or a list of numpy arrays if one specifies numpify=True,
or a list of PyTorch tensors if one specifies torchify=True.
"""
mask_inputs = torch.randint(0, 256, size=(1, 30, 400), dtype=torch.uint8)
# mask_inputs = [Image.fromarray(x) for x in mask_inputs]
return mask_inputs
def test_image_processor_no_masks(self):
image_processor = self.get_component("image_processor")
video_processor = self.get_component("video_processor")
processor = Sam2VideoProcessor(image_processor=image_processor, video_processor=video_processor)
image_input = self.prepare_image_inputs()
input_feat_extract = image_processor(image_input)
input_processor = processor(images=image_input)
for key in input_feat_extract.keys():
if key == "pixel_values":
for input_feat_extract_item, input_processor_item in zip(
input_feat_extract[key], input_processor[key]
):
np.testing.assert_array_equal(input_feat_extract_item, input_processor_item)
else:
self.assertEqual(input_feat_extract[key], input_processor[key])
for image in input_feat_extract.pixel_values:
self.assertEqual(image.shape, (3, 1024, 1024))
for original_size in input_feat_extract.original_sizes:
np.testing.assert_array_equal(original_size, np.array([30, 400]))
def test_image_processor_with_masks(self):
image_processor = self.get_component("image_processor")
video_processor = self.get_component("video_processor")
processor = Sam2VideoProcessor(image_processor=image_processor, video_processor=video_processor)
image_input = self.prepare_image_inputs()
mask_input = self.prepare_mask_inputs()
input_feat_extract = image_processor(images=image_input, segmentation_maps=mask_input, return_tensors="pt")
input_processor = processor(images=image_input, segmentation_maps=mask_input, return_tensors="pt")
for key in input_feat_extract.keys():
self.assertAlmostEqual(input_feat_extract[key].sum(), input_processor[key].sum(), delta=1e-2)
for label in input_feat_extract.labels:
self.assertEqual(label.shape, (256, 256))
@require_torch
def test_post_process_masks(self):
image_processor = self.get_component("image_processor")
video_processor = self.get_component("video_processor")
processor = Sam2VideoProcessor(image_processor=image_processor, video_processor=video_processor)
dummy_masks = [torch.ones((1, 3, 5, 5))]
original_sizes = [[1764, 2646]]
masks = processor.post_process_masks(dummy_masks, original_sizes)
self.assertEqual(masks[0].shape, (1, 3, 1764, 2646))
masks = processor.post_process_masks(dummy_masks, torch.tensor(original_sizes))
self.assertEqual(masks[0].shape, (1, 3, 1764, 2646))
# should also work with np
dummy_masks = [np.ones((1, 3, 5, 5))]
masks = processor.post_process_masks(dummy_masks, np.array(original_sizes))
self.assertEqual(masks[0].shape, (1, 3, 1764, 2646))
dummy_masks = [[1, 0], [0, 1]]
with self.assertRaises(TypeError):
masks = processor.post_process_masks(dummy_masks, np.array(original_sizes))
|
Sam2VideoProcessorTest
|
python
|
apache__airflow
|
dev/breeze/src/airflow_breeze/utils/release_validator.py
|
{
"start": 1395,
"end": 5492
}
|
class ____(ABC):
def __init__(
self,
version: str,
svn_path: Path,
airflow_repo_root: Path,
):
self.version = version
self.svn_path = svn_path
self.airflow_repo_root = airflow_repo_root
self.results: list[ValidationResult] = []
@abstractmethod
def get_distribution_name(self) -> str:
pass
@abstractmethod
def get_svn_directory(self) -> Path:
pass
@abstractmethod
def get_expected_files(self) -> list[str]:
pass
@abstractmethod
def build_packages(self) -> bool:
pass
@abstractmethod
def validate_svn_files(self) -> ValidationResult:
pass
@abstractmethod
def validate_reproducible_build(self) -> ValidationResult:
pass
@abstractmethod
def validate_signatures(self) -> ValidationResult:
pass
@abstractmethod
def validate_checksums(self) -> ValidationResult:
pass
@abstractmethod
def validate_licenses(self) -> ValidationResult:
pass
@property
def check_methods(self) -> dict[CheckType, Callable]:
return {
CheckType.SVN: self.validate_svn_files,
CheckType.REPRODUCIBLE_BUILD: self.validate_reproducible_build,
CheckType.SIGNATURES: self.validate_signatures,
CheckType.CHECKSUMS: self.validate_checksums,
CheckType.LICENSES: self.validate_licenses,
}
def validate(self, checks: list[CheckType] | None = None) -> bool:
if checks is None:
checks = [
CheckType.SVN,
CheckType.REPRODUCIBLE_BUILD,
CheckType.SIGNATURES,
CheckType.CHECKSUMS,
CheckType.LICENSES,
]
console_print(f"\n[bold cyan]Validating {self.get_distribution_name()} {self.version}[/bold cyan]")
console_print(f"SVN Path: {self.svn_path}")
console_print(f"Airflow Root: {self.airflow_repo_root}")
for check_type in checks:
if check_type in self.check_methods:
result = self.check_methods[check_type]()
self.results.append(result)
self._print_summary()
return all(r.passed for r in self.results)
def _print_result(self, result: ValidationResult):
status = "[green]PASSED[/green]" if result.passed else "[red]FAILED[/red]"
console_print(f"Status: {status} - {result.message}")
if result.details:
for detail in result.details:
console_print(f" {detail}")
if result.duration_seconds:
console_print(f"Duration: {result.duration_seconds:.1f}s")
def _print_summary(self):
console_print("\n" + "=" * 70)
passed_count = sum(1 for r in self.results if r.passed)
total_count = len(self.results)
if passed_count == total_count:
console_print(f"[bold green]ALL CHECKS PASSED ({passed_count}/{total_count})[/bold green]")
console_print("\nYou can vote +1 (binding) on this release.")
else:
failed_count = total_count - passed_count
console_print(
f"[bold red]SOME CHECKS FAILED ({failed_count} failed, {passed_count} passed)[/bold red]"
)
console_print("\nFailed checks:")
for result in self.results:
if not result.passed:
console_print(f" - {result.check_type.value}: {result.message}")
console_print("\nPlease review failures above before voting.")
total_duration = sum(r.duration_seconds or 0 for r in self.results)
console_print(f"\nTotal validation time: {total_duration:.1f}s")
console_print("=" * 70)
def _strip_rc_suffix(self, version: str) -> str:
return re.sub(r"rc\d+$", "", version)
def _get_version_suffix(self) -> str:
if "rc" in self.version:
match = re.search(r"(rc\d+)$", self.version)
if match:
return match.group(1)
return ""
|
ReleaseValidator
|
python
|
sqlalchemy__sqlalchemy
|
test/dialect/postgresql/test_types.py
|
{
"start": 187320,
"end": 187397
}
|
class ____(_NumRangeTests, _RangeTypeRoundTrip):
pass
|
NumRangeRoundTripTest
|
python
|
pallets__werkzeug
|
src/werkzeug/datastructures/structures.py
|
{
"start": 18773,
"end": 19922
}
|
class ____(t.Generic[K, V]):
"""Wraps values in the :class:`OrderedMultiDict`. This makes it
possible to keep an order over multiple different keys. It requires
a lot of extra memory and slows down access a lot, but makes it
possible to access elements in O(1) and iterate in O(n).
"""
__slots__ = ("prev", "key", "value", "next")
def __init__(self, omd: _OrderedMultiDict[K, V], key: K, value: V) -> None:
self.prev: _omd_bucket[K, V] | None = omd._last_bucket
self.key: K = key
self.value: V = value
self.next: _omd_bucket[K, V] | None = None
if omd._first_bucket is None:
omd._first_bucket = self
if omd._last_bucket is not None:
omd._last_bucket.next = self
omd._last_bucket = self
def unlink(self, omd: _OrderedMultiDict[K, V]) -> None:
if self.prev:
self.prev.next = self.next
if self.next:
self.next.prev = self.prev
if omd._first_bucket is self:
omd._first_bucket = self.next
if omd._last_bucket is self:
omd._last_bucket = self.prev
|
_omd_bucket
|
python
|
huggingface__transformers
|
src/transformers/models/bros/modeling_bros.py
|
{
"start": 4377,
"end": 6956
}
|
class ____(nn.Module):
"""Construct the embeddings from word, position and token_type embeddings."""
def __init__(self, config):
super().__init__()
self.word_embeddings = nn.Embedding(config.vocab_size, config.hidden_size, padding_idx=config.pad_token_id)
self.position_embeddings = nn.Embedding(config.max_position_embeddings, config.hidden_size)
self.token_type_embeddings = nn.Embedding(config.type_vocab_size, config.hidden_size)
self.LayerNorm = nn.LayerNorm(config.hidden_size, eps=config.layer_norm_eps)
self.dropout = nn.Dropout(config.hidden_dropout_prob)
# position_ids (1, len position emb) is contiguous in memory and exported when serialized
self.register_buffer("position_ids", torch.arange(config.max_position_embeddings).expand((1, -1)))
self.register_buffer(
"token_type_ids",
torch.zeros(
self.position_ids.size(),
dtype=torch.long,
device=self.position_ids.device,
),
persistent=False,
)
def forward(
self,
input_ids: Optional[torch.Tensor] = None,
token_type_ids: Optional[torch.Tensor] = None,
position_ids: Optional[torch.Tensor] = None,
inputs_embeds: Optional[torch.Tensor] = None,
) -> torch.Tensor:
if input_ids is not None:
input_shape = input_ids.size()
else:
input_shape = inputs_embeds.size()[:-1]
seq_length = input_shape[1]
if position_ids is None:
position_ids = self.position_ids[:, :seq_length]
if token_type_ids is None:
if hasattr(self, "token_type_ids"):
buffered_token_type_ids = self.token_type_ids[:, :seq_length]
buffered_token_type_ids_expanded = buffered_token_type_ids.expand(input_shape[0], seq_length)
token_type_ids = buffered_token_type_ids_expanded
else:
token_type_ids = torch.zeros(input_shape, dtype=torch.long, device=self.position_ids.device)
if inputs_embeds is None:
inputs_embeds = self.word_embeddings(input_ids)
token_type_embeddings = self.token_type_embeddings(token_type_ids)
embeddings = inputs_embeds + token_type_embeddings
position_embeddings = self.position_embeddings(position_ids)
embeddings += position_embeddings
embeddings = self.LayerNorm(embeddings)
embeddings = self.dropout(embeddings)
return embeddings
|
BrosTextEmbeddings
|
python
|
astropy__astropy
|
astropy/io/votable/exceptions.py
|
{
"start": 46155,
"end": 46532
}
|
class ____(VOWarning, ValueError):
"""All ``COOSYS`` elements must have an ``ID`` attribute.
Note that the VOTable 1.1 specification says this attribute is
optional, but its corresponding schema indicates it is required.
In VOTable 1.2, the ``COOSYS`` element is deprecated.
"""
message_template = "ID attribute is required for all COOSYS elements"
|
E15
|
python
|
sympy__sympy
|
sympy/physics/optics/gaussopt.py
|
{
"start": 4657,
"end": 5126
}
|
class ____(RayTransferMatrix):
"""
Ray Transfer Matrix for free space.
Parameters
==========
distance
See Also
========
RayTransferMatrix
Examples
========
>>> from sympy.physics.optics import FreeSpace
>>> from sympy import symbols
>>> d = symbols('d')
>>> FreeSpace(d)
Matrix([
[1, d],
[0, 1]])
"""
def __new__(cls, d):
return RayTransferMatrix.__new__(cls, 1, d, 0, 1)
|
FreeSpace
|
python
|
dagster-io__dagster
|
python_modules/dagster/dagster/_core/definitions/declarative_automation/operators/since_operator.py
|
{
"start": 3386,
"end": 9079
}
|
class ____(BuiltinAutomationCondition[T_EntityKey]):
trigger_condition: AutomationCondition[T_EntityKey]
reset_condition: AutomationCondition[T_EntityKey]
@property
def name(self) -> str:
return "SINCE"
@property
def children(self) -> Sequence[AutomationCondition[T_EntityKey]]:
return [self.trigger_condition, self.reset_condition]
def get_node_unique_id(
self,
*,
parent_unique_id: Optional[str],
index: Optional[int],
target_key: Optional[EntityKey],
) -> str:
# since conditions should have stable cursoring logic regardless of where they
# exist in the broader condition tree, as they're always evaluated over the entire
# subset
return self._get_stable_unique_id(target_key)
def get_backcompat_node_unique_ids(
self,
*,
parent_unique_id: Optional[str] = None,
index: Optional[int] = None,
target_key: Optional[EntityKey] = None,
) -> Sequence[str]:
return [
# get the standard globally-aware unique id for backcompat purposes
super().get_node_unique_id(
parent_unique_id=parent_unique_id, index=index, target_key=target_key
)
]
async def evaluate( # pyright: ignore[reportIncompatibleMethodOverride]
self, context: AutomationContext[T_EntityKey]
) -> AutomationResult[T_EntityKey]:
# must evaluate child condition over the entire subset to avoid missing state transitions
child_candidate_subset = context.asset_graph_view.get_full_subset(key=context.key)
# compute result for trigger and reset conditions
trigger_result, reset_result = await asyncio.gather(
*[
context.for_child_condition(
self.trigger_condition,
child_indices=[0],
candidate_subset=child_candidate_subset,
).evaluate_async(),
context.for_child_condition(
self.reset_condition,
child_indices=[1],
candidate_subset=child_candidate_subset,
).evaluate_async(),
]
)
# take the previous subset that this was true for
true_subset = context.previous_true_subset or context.get_empty_subset()
# add in any newly true trigger asset partitions
true_subset = true_subset.compute_union(trigger_result.true_subset)
# remove any newly true reset asset partitions
true_subset = true_subset.compute_difference(reset_result.true_subset)
# if anything changed since the previous evaluation, update the metadata
condition_data = SinceConditionData.from_metadata(context.previous_metadata).update(
context.evaluation_id,
context.evaluation_time.timestamp(),
trigger_result=trigger_result,
reset_result=reset_result,
)
return AutomationResult(
context=context,
true_subset=true_subset,
child_results=[trigger_result, reset_result],
metadata=condition_data.to_metadata(),
)
def replace(
self, old: Union[AutomationCondition, str], new: T_AutomationCondition
) -> Union[Self, T_AutomationCondition]:
"""Replaces all instances of ``old`` across any sub-conditions with ``new``.
If ``old`` is a string, then conditions with a label or name matching
that string will be replaced.
Args:
old (Union[AutomationCondition, str]): The condition to replace.
new (AutomationCondition): The condition to replace with.
"""
return (
new
if old in [self, self.name, self.get_label()]
else copy(
self,
trigger_condition=self.trigger_condition.replace(old, new),
reset_condition=self.reset_condition.replace(old, new),
)
)
@public
def allow(self, selection: "AssetSelection") -> "SinceCondition":
"""Applies the ``.allow()`` method across all sub-conditions.
This impacts any dep-related sub-conditions.
Args:
selection (AssetSelection): The selection to allow.
"""
from dagster._core.definitions.asset_selection import AssetSelection
check.inst_param(selection, "selection", AssetSelection)
return copy(
self,
trigger_condition=self.trigger_condition.allow(selection)
if has_allow_ignore(self.trigger_condition)
else self.trigger_condition,
reset_condition=self.reset_condition.allow(selection)
if has_allow_ignore(self.reset_condition)
else self.reset_condition,
)
@public
def ignore(self, selection: "AssetSelection") -> "SinceCondition":
"""Applies the ``.ignore()`` method across all sub-conditions.
This impacts any dep-related sub-conditions.
Args:
selection (AssetSelection): The selection to ignore.
"""
from dagster._core.definitions.asset_selection import AssetSelection
check.inst_param(selection, "selection", AssetSelection)
return copy(
self,
trigger_condition=self.trigger_condition.ignore(selection)
if has_allow_ignore(self.trigger_condition)
else self.trigger_condition,
reset_condition=self.reset_condition.ignore(selection)
if has_allow_ignore(self.reset_condition)
else self.reset_condition,
)
|
SinceCondition
|
python
|
ray-project__ray
|
rllib/examples/multi_agent/utils/self_play_league_based_callback.py
|
{
"start": 266,
"end": 13494
}
|
class ____(RLlibCallback):
def __init__(self, win_rate_threshold):
super().__init__()
# All policies in the league.
self.main_policies = {"main", "main_0"}
self.main_exploiters = {"main_exploiter_0", "main_exploiter_1"}
self.league_exploiters = {"league_exploiter_0", "league_exploiter_1"}
# Set of currently trainable policies in the league.
self.trainable_policies = {"main"}
# Set of currently non-trainable (frozen) policies in the league.
self.non_trainable_policies = {
"main_0",
"league_exploiter_0",
"main_exploiter_0",
}
# The win-rate value reaching of which leads to a new module being added
# to the leage (frozen copy of main).
self.win_rate_threshold = win_rate_threshold
# Store the win rates for league overview printouts.
self.win_rates = {}
# Report the matchup counters (who played against whom?).
self._matching_stats = defaultdict(int)
def on_episode_end(
self,
*,
episode,
env_runner,
metrics_logger,
env,
env_index,
rl_module,
**kwargs,
) -> None:
num_learning_policies = (
episode.module_for(0) in env_runner.config.policies_to_train
) + (episode.module_for(1) in env_runner.config.policies_to_train)
# Make sure the mapping function doesn't match two non-trainables together.
# This would be a waste of EnvRunner resources.
# assert num_learning_policies > 0
# Ignore matches between two learning policies and don't count win-rates for
# these.
assert num_learning_policies > 0, (
f"agent=0 -> mod={episode.module_for(0)}; "
f"agent=1 -> mod={episode.module_for(1)}; "
f"EnvRunner.config.policies_to_train={env_runner.config.policies_to_train}"
)
if num_learning_policies == 1:
# Compute the win rate for this episode (only looking at non-trained
# opponents, such as random or frozen policies) and log it with some window.
rewards_dict = episode.get_rewards()
for aid, rewards in rewards_dict.items():
mid = episode.module_for(aid)
won = rewards[-1] == 1.0
metrics_logger.log_value(
f"win_rate_{mid}",
won,
window=100,
)
def on_train_result(self, *, algorithm, metrics_logger=None, result, **kwargs):
local_worker = algorithm.env_runner
# Avoid `self` being pickled into the remote function below.
_trainable_policies = self.trainable_policies
# Get the win rate for the train batch.
# Note that normally, one should set up a proper evaluation config,
# such that evaluation always happens on the already updated policy,
# instead of on the already used train_batch.
league_changed = False
keys = [
k for k in result[ENV_RUNNER_RESULTS].keys() if k.startswith("win_rate_")
]
for key in keys:
module_id = key[9:]
self.win_rates[module_id] = result[ENV_RUNNER_RESULTS][key]
# Policy is frozen; ignore.
if module_id in self.non_trainable_policies:
continue
print(
f"Iter={algorithm.iteration} {module_id}'s "
f"win-rate={self.win_rates[module_id]} -> ",
end="",
)
# If win rate is good -> Snapshot current policy and decide,
# whether to freeze the copy or not.
if self.win_rates[module_id] > self.win_rate_threshold:
is_main = re.match("^main(_\\d+)?$", module_id)
initializing_exploiters = False
# First time, main manages a decent win-rate against random:
# Add league_exploiter_1 and main_exploiter_1 as trainables to the mix.
if is_main and len(self.trainable_policies) == 1:
initializing_exploiters = True
self.trainable_policies.add("league_exploiter_1")
self.trainable_policies.add("main_exploiter_1")
# If main manages to win (above threshold) against the entire league
# -> increase the league by another frozen copy of main,
# main-exploiters or league-exploiters.
else:
keep_training = (
False
if is_main
else np.random.choice([True, False], p=[0.3, 0.7])
)
if module_id in self.main_policies:
new_mod_id = re.sub(
"(main)(_\\d+)?$",
f"\\1_{len(self.main_policies) - 1}",
module_id,
)
self.main_policies.add(new_mod_id)
elif module_id in self.main_exploiters:
new_mod_id = re.sub(
"_\\d+$", f"_{len(self.main_exploiters)}", module_id
)
self.main_exploiters.add(new_mod_id)
else:
new_mod_id = re.sub(
"_\\d+$", f"_{len(self.league_exploiters)}", module_id
)
self.league_exploiters.add(new_mod_id)
if keep_training:
self.trainable_policies.add(new_mod_id)
else:
self.non_trainable_policies.add(new_mod_id)
print(f"adding new opponents to the mix ({new_mod_id}).")
# Initialize state variablers for agent-to-module mapping. Note, we
# need to keep track of the league-exploiter to always match a
# non-trainable policy with a trainable one - otherwise matches are
# a waste of resources.
self.type_count = 0
self.exploiter = None
def agent_to_module_mapping_fn(agent_id, episode, **kwargs):
# Pick whether this is ...
type_ = np.random.choice([1, 2])
# Each second third call reset state variables. Note, there will
# be always two agents playing against each others.
if self.type_count >= 2:
# Reset the counter.
self.type_count = 0
# Set the exploiter to `None`.
self.exploiter = None
# Increment the counter for each agent.
self.type_count += 1
# 1) League exploiter vs any other.
if type_ == 1:
# Note, the exploiter could be either of `type_==1` or `type_==2`.
if not self.exploiter:
self.exploiter = "league_exploiter_" + str(
np.random.choice(
list(range(len(self.league_exploiters)))
)
)
# This league exploiter is frozen: Play against a
# trainable policy.
if self.exploiter not in self.trainable_policies:
opponent = np.random.choice(list(self.trainable_policies))
# League exploiter is trainable: Play against any other
# non-trainable policy.
else:
opponent = np.random.choice(
list(self.non_trainable_policies)
)
# Only record match stats once per match.
if hash(episode.id_) % 2 == agent_id:
self._matching_stats[(self.exploiter, opponent)] += 1
return self.exploiter
else:
return opponent
# 2) Main exploiter vs main.
else:
# Note, the exploiter could be either of `type_==1` or `type_==2`.
if not self.exploiter:
self.exploiter = "main_exploiter_" + str(
np.random.choice(list(range(len(self.main_exploiters))))
)
# Main exploiter is frozen: Play against the main
# policy.
if self.exploiter not in self.trainable_policies:
main = "main"
# Main exploiter is trainable: Play against any
# frozen main.
else:
main = np.random.choice(list(self.main_policies - {"main"}))
# Only record match stats once per match.
if hash(episode.id_) % 2 == agent_id:
self._matching_stats[(self.exploiter, main)] += 1
return self.exploiter
else:
return main
multi_rl_module = local_worker.module
main_module = multi_rl_module["main"]
# Set the weights of the new polic(y/ies).
if initializing_exploiters:
main_state = main_module.get_state()
multi_rl_module["main_0"].set_state(main_state)
multi_rl_module["league_exploiter_1"].set_state(main_state)
multi_rl_module["main_exploiter_1"].set_state(main_state)
# We need to sync the just copied local weights to all the
# remote workers and remote Learner workers as well.
algorithm.env_runner_group.sync_weights(
policies=["main_0", "league_exploiter_1", "main_exploiter_1"]
)
algorithm.learner_group.set_weights(multi_rl_module.get_state())
else:
algorithm.add_module(
module_id=new_mod_id,
module_spec=RLModuleSpec.from_module(main_module),
)
# TODO (sven): Maybe we should move this convenience step back into
# `Algorithm.add_module()`? Would be less explicit, but also
# easier.
algorithm.set_state(
{
"learner_group": {
"learner": {
"rl_module": {
new_mod_id: multi_rl_module[
module_id
].get_state(),
}
}
}
}
)
algorithm.env_runner_group.foreach_env_runner(
lambda env_runner: env_runner.config.multi_agent(
policy_mapping_fn=agent_to_module_mapping_fn,
# This setting doesn't really matter for EnvRunners (no
# training going on there, but we'll update this as well
# here for good measure).
policies_to_train=_trainable_policies,
),
local_env_runner=True,
)
# Set all Learner workers' should_module_be_updated to the new
# value.
algorithm.learner_group.foreach_learner(
func=lambda learner: learner.config.multi_agent(
policies_to_train=_trainable_policies,
),
timeout_seconds=0.0, # fire-and-forget
)
league_changed = True
else:
print("not good enough; will keep learning ...")
# Add current league size to results dict.
result["league_size"] = len(self.non_trainable_policies) + len(
self.trainable_policies
)
if league_changed:
self._print_league()
def _print_league(self):
print("--- League ---")
print("Matchups:")
pprint(self._matching_stats)
print("Trainable policies (win-rates):")
for p in sorted(self.trainable_policies):
wr = self.win_rates[p] if p in self.win_rates else 0.0
print(f"\t{p}: {wr}")
print("Frozen policies:")
for p in sorted(self.non_trainable_policies):
wr = self.win_rates[p] if p in self.win_rates else 0.0
print(f"\t{p}: {wr}")
print()
|
SelfPlayLeagueBasedCallback
|
python
|
keras-team__keras
|
keras/src/ops/nn_test.py
|
{
"start": 3403,
"end": 28015
}
|
class ____(testing.TestCase):
def test_relu(self):
x = KerasTensor([None, 2, 3])
self.assertEqual(knn.relu(x).shape, (None, 2, 3))
def test_relu6(self):
x = KerasTensor([None, 2, 3])
self.assertEqual(knn.relu6(x).shape, (None, 2, 3))
def test_sigmoid(self):
x = KerasTensor([None, 2, 3])
self.assertEqual(knn.sigmoid(x).shape, (None, 2, 3))
def test_sparse_sigmoid(self):
x = KerasTensor([None, 2, 3])
self.assertEqual(knn.sparse_sigmoid(x).shape, (None, 2, 3))
def test_softplus(self):
x = KerasTensor([None, 2, 3])
self.assertEqual(knn.softplus(x).shape, (None, 2, 3))
def test_softsign(self):
x = KerasTensor([None, 2, 3])
self.assertEqual(knn.softsign(x).shape, (None, 2, 3))
def test_silu(self):
x = KerasTensor([None, 2, 3])
self.assertEqual(knn.silu(x).shape, (None, 2, 3))
def test_log_sigmoid(self):
x = KerasTensor([None, 2, 3])
self.assertEqual(knn.log_sigmoid(x).shape, (None, 2, 3))
def test_leaky_relu(self):
x = KerasTensor([None, 2, 3])
self.assertEqual(knn.leaky_relu(x).shape, (None, 2, 3))
def test_hard_sigmoid(self):
x = KerasTensor([None, 2, 3])
self.assertEqual(knn.hard_sigmoid(x).shape, (None, 2, 3))
def test_hard_silu(self):
x = KerasTensor([None, 2, 3])
self.assertEqual(knn.hard_silu(x).shape, (None, 2, 3))
def test_elu(self):
x = KerasTensor([None, 2, 3])
self.assertEqual(knn.elu(x).shape, (None, 2, 3))
def test_selu(self):
x = KerasTensor([None, 2, 3])
self.assertEqual(knn.selu(x).shape, (None, 2, 3))
def test_gelu(self):
x = KerasTensor([None, 2, 3])
self.assertEqual(knn.gelu(x).shape, (None, 2, 3))
def test_celu(self):
x = KerasTensor([None, 2, 3])
self.assertEqual(knn.celu(x).shape, (None, 2, 3))
def test_glu(self):
x = KerasTensor([None, 2, 4])
self.assertEqual(knn.glu(x).shape, (None, 2, 2))
def test_tanh_shrink(self):
x = KerasTensor([None, 2, 3])
self.assertEqual(knn.tanh_shrink(x).shape, (None, 2, 3))
def test_hard_tanh(self):
x = KerasTensor([None, 2, 3])
self.assertEqual(knn.hard_tanh(x).shape, (None, 2, 3))
def test_hard_shrink(self):
x = KerasTensor([None, 2, 3])
self.assertEqual(knn.hard_shrink(x).shape, (None, 2, 3))
def test_threshld(self):
x = KerasTensor([None, 2, 3])
self.assertEqual(knn.threshold(x, 0, 0).shape, (None, 2, 3))
def test_squareplus(self):
x = KerasTensor([None, 2, 3])
self.assertEqual(knn.squareplus(x).shape, (None, 2, 3))
def test_soft_shrink(self):
x = KerasTensor([None, 2, 3])
self.assertEqual(knn.soft_shrink(x).shape, (None, 2, 3))
def test_sparse_plus(self):
x = KerasTensor([None, 2, 3])
self.assertEqual(knn.sparse_plus(x).shape, (None, 2, 3))
def test_softmax(self):
x = KerasTensor([None, 2, 3])
self.assertEqual(knn.softmax(x).shape, (None, 2, 3))
self.assertEqual(knn.softmax(x, axis=1).shape, (None, 2, 3))
self.assertEqual(knn.softmax(x, axis=-1).shape, (None, 2, 3))
def test_softmax_in_graph(self):
class SoftmaxLayer(keras.Layer):
def call(self, x):
return ops.softmax(x, axis=-1)
class Model(keras.Model):
def __init__(self):
x = keras.Input(shape=(None,))
y = SoftmaxLayer()(x)
super().__init__(inputs=x, outputs=y)
# Make sure Keras is able to compile the model graph
model = Model()
x = ops.array([[1.0, 2.0, 3.0, 4.0]])
model.predict(x)
def test_log_softmax(self):
x = KerasTensor([None, 2, 3])
self.assertEqual(knn.log_softmax(x).shape, (None, 2, 3))
self.assertEqual(knn.log_softmax(x, axis=1).shape, (None, 2, 3))
self.assertEqual(knn.log_softmax(x, axis=-1).shape, (None, 2, 3))
def test_sparsemax(self):
x = KerasTensor([None, 2, 3])
self.assertEqual(knn.sparsemax(x).shape, (None, 2, 3))
def test_max_pool(self):
data_format = backend.config.image_data_format()
if data_format == "channels_last":
input_shape = (None, 8, 3)
else:
input_shape = (None, 3, 8)
x = KerasTensor(input_shape)
self.assertEqual(
knn.max_pool(x, 2, 1).shape,
(None, 7, 3) if data_format == "channels_last" else (None, 3, 7),
)
self.assertEqual(
knn.max_pool(x, 2, 2, padding="same").shape,
(None, 4, 3) if data_format == "channels_last" else (None, 3, 4),
)
if data_format == "channels_last":
input_shape = (None, 8, None, 3)
else:
input_shape = (None, 3, 8, None)
x = KerasTensor(input_shape)
(
self.assertEqual(knn.max_pool(x, 2, 1).shape, (None, 7, None, 3))
if data_format == "channels_last"
else (None, 3, 7, None)
)
self.assertEqual(
knn.max_pool(x, 2, 2, padding="same").shape,
(
(None, 4, None, 3)
if data_format == "channels_last"
else (None, 3, 4, None)
),
)
self.assertEqual(
knn.max_pool(x, (2, 2), (2, 2), padding="same").shape,
(
(None, 4, None, 3)
if data_format == "channels_last"
else (None, 3, 4, None)
),
)
def test_average_pool(self):
data_format = backend.config.image_data_format()
if data_format == "channels_last":
input_shape = (None, 8, 3)
else:
input_shape = (None, 3, 8)
x = KerasTensor(input_shape)
self.assertEqual(
knn.average_pool(x, 2, 1).shape,
(None, 7, 3) if data_format == "channels_last" else (None, 3, 7),
)
self.assertEqual(
knn.average_pool(x, 2, 2, padding="same").shape,
(None, 4, 3) if data_format == "channels_last" else (None, 3, 4),
)
if data_format == "channels_last":
input_shape = (None, 8, None, 3)
else:
input_shape = (None, 3, 8, None)
x = KerasTensor(input_shape)
self.assertEqual(
knn.average_pool(x, 2, 1).shape,
(
(None, 7, None, 3)
if data_format == "channels_last"
else (None, 3, 7, None)
),
)
self.assertEqual(
knn.average_pool(x, 2, 2, padding="same").shape,
(
(None, 4, None, 3)
if data_format == "channels_last"
else (None, 3, 4, None)
),
)
self.assertEqual(
knn.average_pool(x, (2, 2), (2, 2), padding="same").shape,
(
(None, 4, None, 3)
if data_format == "channels_last"
else (None, 3, 4, None)
),
)
def test_multi_hot(self):
x = KerasTensor([None, 3, 1])
self.assertEqual(knn.multi_hot(x, 5).shape, (None, 1, 5))
self.assertEqual(knn.multi_hot(x, 5, 1).shape, (None, 3, 1))
self.assertEqual(knn.multi_hot(x, 5, 2).shape, (None, 5, 1))
self.assertSparse(knn.multi_hot(x, 5, sparse=True))
@parameterized.named_parameters(
named_product(dtype=["float32", "int32", "bool"], sparse=[False, True])
)
def test_multi_hot_dtype(self, dtype, sparse):
if sparse and not backend.SUPPORTS_SPARSE_TENSORS:
pytest.skip("Backend does not support sparse tensors")
x = np.arange(5)
out = knn.multi_hot(x, 5, axis=0, dtype=dtype, sparse=sparse)
self.assertEqual(backend.standardize_dtype(out.dtype), dtype)
self.assertSparse(out, sparse)
def test_conv(self):
data_format = backend.config.image_data_format()
# Test 1D conv.
if data_format == "channels_last":
input_shape = (None, 20, 3)
else:
input_shape = (None, 3, 20)
inputs_1d = KerasTensor(input_shape)
kernel = KerasTensor([4, 3, 2])
for padding in ["valid", "VALID"]:
self.assertEqual(
knn.conv(inputs_1d, kernel, 1, padding=padding).shape,
(
(None, 17, 2)
if data_format == "channels_last"
else (None, 2, 17)
),
)
for padding in ["same", "SAME"]:
self.assertEqual(
knn.conv(inputs_1d, kernel, 1, padding=padding).shape,
(
(None, 20, 2)
if data_format == "channels_last"
else (None, 2, 20)
),
)
self.assertEqual(
knn.conv(inputs_1d, kernel, (2,), dilation_rate=2).shape,
(None, 7, 2) if data_format == "channels_last" else (None, 2, 7),
)
# Test 2D conv.
if data_format == "channels_last":
input_shape = (None, 10, None, 3)
else:
input_shape = (None, 3, 10, None)
inputs_2d = KerasTensor(input_shape)
kernel = KerasTensor([2, 2, 3, 2])
for padding in ["valid", "VALID"]:
self.assertEqual(
knn.conv(inputs_2d, kernel, 1, padding=padding).shape,
(
(None, 9, None, 2)
if data_format == "channels_last"
else (None, 2, 9, None)
),
)
for padding in ["same", "SAME"]:
self.assertEqual(
knn.conv(inputs_2d, kernel, 1, padding=padding).shape,
(
(None, 10, None, 2)
if data_format == "channels_last"
else (None, 2, 10, None)
),
)
self.assertEqual(
knn.conv(inputs_2d, kernel, (2, 1), dilation_rate=(2, 1)).shape,
(
(None, 4, None, 2)
if data_format == "channels_last"
else (None, 2, 4, None)
),
)
# Test 2D conv - H, W specified
if data_format == "channels_last":
input_shape = (None, 10, 10, 3)
else:
input_shape = (None, 3, 10, 10)
inputs_2d = KerasTensor(input_shape)
kernel = KerasTensor([2, 2, 3, 2])
for padding in ["valid", "VALID"]:
self.assertEqual(
knn.conv(inputs_2d, kernel, 1, padding=padding).shape,
(
(None, 9, 9, 2)
if data_format == "channels_last"
else (None, 2, 9, 9)
),
)
for padding in ["same", "SAME"]:
self.assertEqual(
knn.conv(inputs_2d, kernel, 1, padding=padding).shape,
(
(None, 10, 10, 2)
if data_format == "channels_last"
else (None, 2, 10, 10)
),
)
self.assertEqual(
knn.conv(inputs_2d, kernel, (2, 1), dilation_rate=(2, 1)).shape,
(
(None, 4, 9, 2)
if data_format == "channels_last"
else (None, 2, 4, 9)
),
)
# Test 3D conv.
if data_format == "channels_last":
input_shape = (None, 8, None, 8, 3)
else:
input_shape = (None, 3, 8, None, 8)
inputs_3d = KerasTensor(input_shape)
kernel = KerasTensor([3, 3, 3, 3, 2])
for padding in ["valid", "VALID"]:
self.assertEqual(
knn.conv(inputs_3d, kernel, 1, padding=padding).shape,
(
(None, 6, None, 6, 2)
if data_format == "channels_last"
else (None, 2, 6, None, 6)
),
)
for padding in ["same", "SAME"]:
self.assertEqual(
knn.conv(inputs_3d, kernel, (2, 1, 2), padding=padding).shape,
(
(None, 4, None, 4, 2)
if data_format == "channels_last"
else (None, 2, 4, None, 4)
),
)
self.assertEqual(
knn.conv(
inputs_3d, kernel, 1, padding="valid", dilation_rate=(1, 2, 2)
).shape,
(
(None, 6, None, 4, 2)
if data_format == "channels_last"
else (None, 2, 6, None, 4)
),
)
def test_depthwise_conv(self):
data_format = backend.config.image_data_format()
# Test 1D depthwise conv.
if data_format == "channels_last":
input_shape = (None, 20, 3)
else:
input_shape = (None, 3, 20)
inputs_1d = KerasTensor(input_shape)
kernel = KerasTensor([4, 3, 1])
for padding in ["valid", "VALID"]:
self.assertEqual(
knn.depthwise_conv(inputs_1d, kernel, 1, padding=padding).shape,
(
(None, 17, 3)
if data_format == "channels_last"
else (None, 3, 17)
),
)
for padding in ["same", "SAME"]:
self.assertEqual(
knn.depthwise_conv(
inputs_1d, kernel, (1,), padding=padding
).shape,
(
(None, 20, 3)
if data_format == "channels_last"
else (None, 3, 20)
),
)
self.assertEqual(
knn.depthwise_conv(inputs_1d, kernel, 2, dilation_rate=2).shape,
(None, 7, 3) if data_format == "channels_last" else (None, 3, 7),
)
# Test 2D depthwise conv.
if data_format == "channels_last":
input_shape = (None, 10, 10, 3)
else:
input_shape = (None, 3, 10, 10)
inputs_2d = KerasTensor(input_shape)
kernel = KerasTensor([2, 2, 3, 1])
for padding in ["valid", "VALID"]:
self.assertEqual(
knn.depthwise_conv(inputs_2d, kernel, 1, padding=padding).shape,
(
(None, 9, 9, 3)
if data_format == "channels_last"
else (None, 3, 9, 9)
),
)
for padding in ["same", "SAME"]:
self.assertEqual(
knn.depthwise_conv(
inputs_2d, kernel, (1, 2), padding=padding
).shape,
(
(None, 10, 5, 3)
if data_format == "channels_last"
else (None, 3, 10, 5)
),
)
self.assertEqual(
knn.depthwise_conv(inputs_2d, kernel, 2, dilation_rate=2).shape,
(
(None, 4, 4, 3)
if data_format == "channels_last"
else (None, 3, 4, 4)
),
)
self.assertEqual(
knn.depthwise_conv(
inputs_2d, kernel, 2, dilation_rate=(2, 1)
).shape,
(
(None, 4, 5, 3)
if data_format == "channels_last"
else (None, 3, 4, 5)
),
)
def test_separable_conv(self):
data_format = backend.config.image_data_format()
# Test 1D separable conv.
if data_format == "channels_last":
input_shape = (None, 20, 3)
else:
input_shape = (None, 3, 20)
inputs_1d = KerasTensor(input_shape)
kernel = KerasTensor([4, 3, 2])
pointwise_kernel = KerasTensor([1, 6, 5])
self.assertEqual(
knn.separable_conv(
inputs_1d, kernel, pointwise_kernel, 1, padding="valid"
).shape,
(None, 17, 5) if data_format == "channels_last" else (None, 5, 17),
)
self.assertEqual(
knn.separable_conv(
inputs_1d, kernel, pointwise_kernel, 1, padding="same"
).shape,
(None, 20, 5) if data_format == "channels_last" else (None, 5, 20),
)
self.assertEqual(
knn.separable_conv(
inputs_1d, kernel, pointwise_kernel, 2, dilation_rate=2
).shape,
(None, 7, 5) if data_format == "channels_last" else (None, 5, 7),
)
# Test 2D separable conv.
if data_format == "channels_last":
input_shape = (None, 10, 10, 3)
else:
input_shape = (None, 3, 10, 10)
inputs_2d = KerasTensor(input_shape)
kernel = KerasTensor([2, 2, 3, 2])
pointwise_kernel = KerasTensor([1, 1, 6, 5])
self.assertEqual(
knn.separable_conv(
inputs_2d, kernel, pointwise_kernel, 1, padding="valid"
).shape,
(
(None, 9, 9, 5)
if data_format == "channels_last"
else (None, 5, 9, 9)
),
)
self.assertEqual(
knn.separable_conv(
inputs_2d, kernel, pointwise_kernel, (1, 2), padding="same"
).shape,
(
(None, 10, 5, 5)
if data_format == "channels_last"
else (None, 5, 10, 5)
),
)
self.assertEqual(
knn.separable_conv(
inputs_2d, kernel, pointwise_kernel, 2, dilation_rate=(2, 1)
).shape,
(
(None, 4, 5, 5)
if data_format == "channels_last"
else (None, 5, 4, 5)
),
)
def test_conv_transpose(self):
data_format = backend.config.image_data_format()
if data_format == "channels_last":
input_shape = (None, 4, 3)
else:
input_shape = (None, 3, 4)
inputs_1d = KerasTensor(input_shape)
kernel = KerasTensor([2, 5, 3])
self.assertEqual(
knn.conv_transpose(inputs_1d, kernel, 2).shape,
(None, 8, 5) if data_format == "channels_last" else (None, 5, 8),
)
self.assertEqual(
knn.conv_transpose(inputs_1d, kernel, 2, padding="same").shape,
(None, 8, 5) if data_format == "channels_last" else (None, 5, 8),
)
self.assertEqual(
knn.conv_transpose(
inputs_1d, kernel, 5, padding="valid", output_padding=4
).shape,
(None, 21, 5) if data_format == "channels_last" else (None, 5, 21),
)
if data_format == "channels_last":
input_shape = (None, 4, 4, 3)
else:
input_shape = (None, 3, 4, 4)
inputs_2d = KerasTensor(input_shape)
kernel = KerasTensor([2, 2, 5, 3])
self.assertEqual(
knn.conv_transpose(inputs_2d, kernel, 2).shape,
(
(None, 8, 8, 5)
if data_format == "channels_last"
else (None, 5, 8, 8)
),
)
self.assertEqual(
knn.conv_transpose(inputs_2d, kernel, (2, 2), padding="same").shape,
(
(None, 8, 8, 5)
if data_format == "channels_last"
else (None, 5, 8, 8)
),
)
self.assertEqual(
knn.conv_transpose(
inputs_2d, kernel, (5, 5), padding="valid", output_padding=4
).shape,
(
(None, 21, 21, 5)
if data_format == "channels_last"
else (None, 5, 21, 21)
),
)
def test_one_hot(self):
x = KerasTensor([None, 3, 1])
self.assertEqual(knn.one_hot(x, 5).shape, (None, 3, 1, 5))
self.assertEqual(knn.one_hot(x, 5, 1).shape, (None, 5, 3, 1))
self.assertEqual(knn.one_hot(x, 5, 2).shape, (None, 3, 5, 1))
self.assertSparse(knn.one_hot(x, 5, sparse=True))
@parameterized.named_parameters(
named_product(dtype=["float32", "int32", "bool"], sparse=[False, True])
)
def test_one_hot_dtype(self, dtype, sparse):
if sparse and not backend.SUPPORTS_SPARSE_TENSORS:
pytest.skip("Backend does not support sparse tensors")
x = np.arange(5)
out = knn.one_hot(x, 5, axis=0, dtype=dtype, sparse=sparse)
self.assertEqual(backend.standardize_dtype(out.dtype), dtype)
self.assertSparse(out, sparse)
def test_moments(self):
x = KerasTensor([None, 3, 4])
self.assertEqual(knn.moments(x, axes=[0])[0].shape, (3, 4))
self.assertEqual(knn.moments(x, axes=[0, 1])[0].shape, (4,))
self.assertEqual(
knn.moments(x, axes=[0, 1], keepdims=True)[0].shape, (1, 1, 4)
)
self.assertEqual(knn.moments(x, axes=[1])[0].shape, (None, 4))
self.assertEqual(knn.moments(x, axes=[1, 2])[0].shape, (None,))
self.assertEqual(
knn.moments(x, axes=[1, 2], keepdims=True)[0].shape, (None, 1, 1)
)
def test_batch_normalization(self):
x = KerasTensor([None, 3, 4])
mean = KerasTensor([4])
variance = KerasTensor([4])
self.assertEqual(
knn.batch_normalization(x, mean, variance, axis=-1).shape,
(None, 3, 4),
)
x = KerasTensor([None, 3, 4, 5])
self.assertEqual(
knn.batch_normalization(x, mean, variance, axis=2).shape,
(None, 3, 4, 5),
)
mean = KerasTensor([3])
variance = KerasTensor([3])
self.assertEqual(
knn.batch_normalization(x, mean, variance, axis=1).shape,
(None, 3, 4, 5),
)
# Test wrong offset shape
self.assertRaisesRegex(
ValueError,
"`offset` must be a vector of length",
knn.batch_normalization,
KerasTensor([None, 3, 4, 5]),
KerasTensor([5]),
KerasTensor([5]),
axis=-1,
offset=KerasTensor([3]),
scale=KerasTensor([5]),
)
# Test wrong scale shape
self.assertRaisesRegex(
ValueError,
"`scale` must be a vector of length",
knn.batch_normalization,
KerasTensor([None, 3, 4, 5]),
KerasTensor([5]),
KerasTensor([5]),
axis=-1,
offset=KerasTensor([5]),
scale=KerasTensor([3]),
)
def test_ctc_decode(self):
# Test strategy="greedy"
inputs = KerasTensor([None, 2, 3])
sequence_lengths = KerasTensor([None])
decoded, scores = knn.ctc_decode(inputs, sequence_lengths)
self.assertEqual(decoded.shape, (1, None, 2))
self.assertEqual(scores.shape, (None, 1))
# Test strategy="beam_search"
inputs = KerasTensor([None, 2, 3])
sequence_lengths = KerasTensor([None])
decoded, scores = knn.ctc_decode(
inputs, sequence_lengths, strategy="beam_search", top_paths=2
)
self.assertEqual(decoded.shape, (2, None, 2))
self.assertEqual(scores.shape, (None, 2))
def test_normalize(self):
x = KerasTensor([None, 2, 3])
self.assertEqual(knn.normalize(x).shape, (None, 2, 3))
def test_psnr(self):
x1 = KerasTensor([None, 2, 3])
x2 = KerasTensor([None, 5, 6])
out = knn.psnr(x1, x2, max_val=224)
self.assertEqual(out.shape, ())
def test_dot_product_attention(self):
query = KerasTensor([None, None, 8, 16])
key = KerasTensor([None, None, 6, 16])
value = KerasTensor([None, None, 6, 16])
out = knn.dot_product_attention(query, key, value)
self.assertEqual(out.shape, query.shape)
def test_rms_normalization(self):
x = KerasTensor([None, 8, 16])
scale = KerasTensor([None, 8, 16])
out = knn.rms_normalization(x, scale)
self.assertEqual(out.shape, x.shape)
def test_layer_normalization(self):
x = KerasTensor([None, 8, 16])
gamma = KerasTensor([None, 16])
beta = KerasTensor([None, 16])
out = knn.layer_normalization(x, gamma, beta)
self.assertEqual(out.shape, x.shape)
|
NNOpsDynamicShapeTest
|
python
|
walkccc__LeetCode
|
solutions/92. Reverse Linked List II/92.py
|
{
"start": 0,
"end": 533
}
|
class ____:
def reverseBetween(
self,
head: ListNode | None,
left: int,
right: int,
) -> ListNode | None:
if left == 1:
return self.reverseN(head, right)
head.next = self.reverseBetween(head.next, left - 1, right - 1)
return head
def reverseN(self, head: ListNode | None, n: int) -> ListNode | None:
if n == 1:
return head
newHead = self.reverseN(head.next, n - 1)
headNext = head.next
head.next = headNext.next
headNext.next = head
return newHead
|
Solution
|
python
|
allegroai__clearml
|
clearml/backend_api/services/v2_13/queues.py
|
{
"start": 75503,
"end": 76686
}
|
class ____(Response):
"""
Response of queues.remove_task endpoint.
:param removed: Number of tasks removed (0 or 1)
:type removed: int
"""
_service = "queues"
_action = "remove_task"
_version = "2.13"
_schema = {
"definitions": {},
"properties": {
"removed": {
"description": "Number of tasks removed (0 or 1)",
"enum": [0, 1],
"type": ["integer", "null"],
}
},
"type": "object",
}
def __init__(self, removed: Optional[int] = None, **kwargs: Any) -> None:
super(RemoveTaskResponse, self).__init__(**kwargs)
self.removed = removed
@schema_property("removed")
def removed(self) -> Optional[int]:
return self._property_removed
@removed.setter
def removed(self, value: Optional[int]) -> None:
if value is None:
self._property_removed = None
return
if isinstance(value, float) and value.is_integer():
value = int(value)
self.assert_isinstance(value, "removed", six.integer_types)
self._property_removed = value
|
RemoveTaskResponse
|
python
|
networkx__networkx
|
networkx/exception.py
|
{
"start": 2136,
"end": 2317
}
|
class ____(NetworkXAlgorithmError):
"""Exception raised by algorithms trying to solve a maximization
or a minimization problem instance that is unbounded."""
|
NetworkXUnbounded
|
python
|
pytorch__pytorch
|
torch/_dynamo/variables/streams.py
|
{
"start": 4586,
"end": 5793
}
|
class ____:
"""Track the currently entered stream if any"""
def __init__(self) -> None:
from ..source import CurrentStreamSource
cur_stack: list[StreamVariable] = []
if torch.accelerator.is_available():
stream_var = LazyVariableTracker.create(
torch.accelerator.current_stream(),
source=CurrentStreamSource(torch.accelerator.current_stream().device),
)
cur_stack = [stream_var] # type: ignore[list-item]
self.cur_stream_stack: collections.deque[StreamVariable] = collections.deque(
cur_stack
)
def enter_stream(self, stream: "StreamVariable") -> None:
self.cur_stream_stack.append(stream)
def exit_stream(self) -> None:
self.cur_stream_stack.pop()
def cur_stream(self, device: Optional[torch.device] = None) -> "StreamVariable":
if device is not None:
for stream in reversed(self.cur_stream_stack):
if stream.device == device:
return stream
return self.cur_stream_stack[-1]
def in_stream_context(self) -> bool:
return len(self.cur_stream_stack) > 0
|
SymbolicStreamState
|
python
|
sphinx-doc__sphinx
|
sphinx/ext/doctest.py
|
{
"start": 5934,
"end": 6255
}
|
class ____(TestDirective):
option_spec: ClassVar[OptionSpec] = {
'hide': directives.flag,
'no-trim-doctest-flags': directives.flag,
'pyversion': directives.unchanged_required,
'skipif': directives.unchanged_required,
'trim-doctest-flags': directives.flag,
}
|
TestcodeDirective
|
python
|
automl__auto-sklearn
|
autosklearn/ensembles/multiobjective_dummy_ensemble.py
|
{
"start": 726,
"end": 7582
}
|
class ____(AbstractMultiObjectiveEnsemble):
def __init__(
self,
task_type: int,
metrics: Sequence[Scorer] | Scorer,
backend: Backend,
random_state: int | np.random.RandomState | None = None,
) -> None:
"""A dummy implementation of a multi-objective ensemble.
Builds ensembles that are individual models on the Pareto front each.
Parameters
----------
task_type: int
An identifier indicating which task is being performed.
metrics: Sequence[Scorer] | Scorer
The metrics used to evaluate the models.
backend : Backend
Gives access to the backend of Auto-sklearn. Not used.
random_state: int | RandomState | None = None
Not used.
"""
self.task_type = task_type
if isinstance(metrics, Sequence):
if len(metrics) == 1:
warnings.warn(
"Passed only a single metric to a multi-objective ensemble. "
"Please use a single-objective ensemble in such cases."
)
self.metrics = metrics
else:
self.metric = [metrics]
self.random_state = random_state
self.backend = backend
@property
def pareto_set(self) -> Sequence[AbstractEnsemble]:
if not hasattr(self, "pareto_set_"):
raise NotFittedError("`pareto_set` not created, please call `fit()` first")
return self.pareto_set_
def fit(
self,
base_models_predictions: list[np.ndarray],
true_targets: np.ndarray,
model_identifiers: list[tuple[int, int, float]],
runs: Sequence[Run],
X_data: SUPPORTED_FEAT_TYPES | None = None,
) -> MultiObjectiveDummyEnsemble:
"""Select dummy ensembles given predictions of base models and targets.
Parameters
----------
base_models_predictions: np.ndarray
shape = (n_base_models, n_data_points, n_targets)
n_targets is the number of classes in case of classification,
n_targets is 0 or 1 in case of regression
Can be a list of 2d numpy arrays as well to prevent copying all
predictions into a single, large numpy array.
true_targets : array of shape [n_targets]
model_identifiers : identifier for each base model.
Can be used for practical text output of the ensemble.
runs: Sequence[Run]
Additional information for each run executed by SMAC that was
considered by the ensemble builder. Not used.
X_data : list-like | sparse matrix | None = None
X data to give to the metric if required
Returns
-------
self
"""
if self.task_type not in TASK_TYPES:
raise ValueError("Unknown task type %s." % self.task_type)
all_costs = np.empty((len(base_models_predictions), len(self.metrics)))
for i, base_model_prediction in enumerate(base_models_predictions):
losses = calculate_losses(
solution=true_targets,
prediction=base_model_prediction,
task_type=self.task_type,
metrics=self.metrics,
X_data=X_data,
)
all_costs[i] = [losses[metric.name] for metric in self.metrics]
all_costs = np.array(all_costs)
sort_by_first_metric = np.argsort(all_costs[:, 0])
efficient_points = pareto_front(all_costs, is_loss=True)
pareto_set = []
for argsort_idx in sort_by_first_metric:
if not efficient_points[argsort_idx]:
continue
ensemble = SingleModelEnsemble(
task_type=self.task_type,
metrics=self.metrics,
random_state=self.random_state,
backend=self.backend,
model_index=argsort_idx,
)
ensemble.fit(
base_models_predictions=base_models_predictions,
true_targets=true_targets,
model_identifiers=model_identifiers,
runs=runs,
X_data=X_data,
)
pareto_set.append(ensemble)
self.pareto_set_ = pareto_set
return self
def predict(
self, base_models_predictions: np.ndarray | list[np.ndarray]
) -> np.ndarray:
"""Predict using the ensemble which is best for the 1st metric.
Parameters
----------
base_models_predictions : np.ndarray
shape = (n_base_models, n_data_points, n_targets)
Same as in the fit method.
Returns
-------
np.ndarray
"""
return self.pareto_set[0].predict(base_models_predictions)
def __str__(self) -> str:
return "MultiObjectiveDummyEnsemble: %d models" % len(self.pareto_set)
def get_models_with_weights(
self, models: dict[tuple[int, int, float], BasePipeline]
) -> list[tuple[float, BasePipeline]]:
"""Return a list of (weight, model) pairs for the ensemble that is
best for the 1st metric.
Parameters
----------
models : dict {identifier : model object}
The identifiers are the same as the one presented to the fit()
method. Models can be used for nice printing.
Returns
-------
list[tuple[float, BasePipeline]]
"""
return self.pareto_set[0].get_models_with_weights(models)
def get_identifiers_with_weights(
self,
) -> list[tuple[tuple[int, int, float], float]]:
"""Return a (identifier, weight)-pairs for all models that were passed to the
ensemble builder based on the ensemble that is best for the 1st metric.
Parameters
----------
models : dict {identifier : model object}
The identifiers are the same as the one presented to the fit()
method. Models can be used for nice printing.
Returns
-------
list[tuple[tuple[int, int, float], float]
"""
return self.pareto_set[0].get_identifiers_with_weights()
def get_selected_model_identifiers(self) -> list[tuple[int, int, float]]:
"""Return identifiers of models in the ensemble that is best for the 1st metric.
This includes models which have a weight of zero!
Returns
-------
list
"""
return self.pareto_set[0].get_selected_model_identifiers()
def get_validation_performance(self) -> float:
"""Validation performance of the ensemble that is best for the 1st metric.
Returns
-------
float
"""
return self.pareto_set[0].get_validation_performance()
|
MultiObjectiveDummyEnsemble
|
python
|
pytorch__pytorch
|
torch/distributed/algorithms/join.py
|
{
"start": 2842,
"end": 3433
}
|
class ____(NamedTuple):
r"""This includes all fields needed from a :class:`Joinable` instance for the join context manager side."""
enable: bool
throw_on_early_termination: bool
is_first_joinable: bool
@staticmethod
def construct_disabled_join_config():
r"""Return a :class:`_JoinConfig` instance indicating that join-related logic should be disabled.
e.g. if the caller is not in a join context manager.
"""
return _JoinConfig(
enable=False, throw_on_early_termination=False, is_first_joinable=False
)
|
_JoinConfig
|
python
|
streamlit__streamlit
|
lib/tests/streamlit/user_info_test.py
|
{
"start": 2041,
"end": 6184
}
|
class ____(DeltaGeneratorTestCase):
"""Test UserInfoProxy."""
def test_user_email_attr(self):
"""Test that `st.user.email` returns user info from ScriptRunContext"""
assert st.user.email == "test@example.com"
def test_user_email_key(self):
assert st.user["email"] == "test@example.com"
def test_user_non_existing_attr(self):
"""Test that an error is raised when called non existed attr."""
with pytest.raises(AttributeError):
st.write(st.user.attribute)
def test_user_non_existing_key(self):
"""Test that an error is raised when called non existed key."""
with pytest.raises(KeyError):
st.write(st.user["key"])
def test_user_cannot_be_modified_existing_key(self):
"""
Test that an error is raised when try to assign new value to existing key.
"""
with pytest.raises(StreamlitAPIException) as e:
st.user["email"] = "NEW_VALUE"
assert str(e.value) == "st.user cannot be modified"
def test_user_cannot_be_modified_new_key(self):
"""
Test that an error is raised when try to assign new value to new key.
"""
with pytest.raises(StreamlitAPIException) as e:
st.user["foo"] = "bar"
assert str(e.value) == "st.user cannot be modified"
def test_user_cannot_be_modified_existing_attr(self):
"""
Test that an error is raised when try to assign new value to existing attr.
"""
with pytest.raises(StreamlitAPIException) as e:
st.user.email = "bar"
assert str(e.value) == "st.user cannot be modified"
def test_user_cannot_be_modified_new_attr(self):
"""
Test that an error is raised when try to assign new value to new attr.
"""
with pytest.raises(StreamlitAPIException) as e:
st.user.foo = "bar"
assert str(e.value) == "st.user cannot be modified"
def test_user_len(self):
assert len(st.user) == 1
def test_st_user_reads_from_context_(self):
"""Test that st.user reads information from current ScriptRunContext
And after ScriptRunContext changed, it returns new email
"""
orig_report_ctx = get_script_run_ctx()
forward_msg_queue = ForwardMsgQueue()
try:
add_script_run_ctx(
threading.current_thread(),
ScriptRunContext(
session_id="test session id",
_enqueue=forward_msg_queue.enqueue,
query_string="",
session_state=SafeSessionState(SessionState(), lambda: None),
uploaded_file_mgr=None,
main_script_path="",
user_info={"email": "something@else.com"},
fragment_storage=MemoryFragmentStorage(),
pages_manager=PagesManager(""),
),
)
assert st.user.email == "something@else.com"
except Exception as e:
raise e
finally:
add_script_run_ctx(threading.current_thread(), orig_report_ctx)
@patch("streamlit.user_info.show_deprecation_warning")
@patch("streamlit.user_info.has_shown_experimental_user_warning", False)
def test_deprecate_st_experimental_user(self, mock_show_warning: MagicMock):
"""Test that we show deprecation warning only once."""
st.write(st.experimental_user)
expected_warning = (
"Please replace `st.experimental_user` with `st.user`.\n\n"
"`st.experimental_user` will be removed after 2025-11-06."
)
# We only show the warning a single time for a given object.
mock_show_warning.assert_called_once_with(expected_warning)
mock_show_warning.reset_mock()
st.write(st.experimental_user)
mock_show_warning.assert_not_called()
@patch(
"streamlit.auth_util.secrets_singleton",
MagicMock(
load_if_toml_exists=MagicMock(return_value=True),
get=MagicMock(return_value=SECRETS_MOCK),
),
)
|
UserInfoProxyTest
|
python
|
apache__airflow
|
providers/amazon/tests/unit/amazon/aws/operators/test_lambda_function.py
|
{
"start": 6222,
"end": 10855
}
|
class ____:
@pytest.mark.parametrize("payload", PAYLOADS)
def test_init(self, payload):
lambda_operator = LambdaInvokeFunctionOperator(
task_id="test",
function_name="test",
payload=payload,
log_type="None",
aws_conn_id="aws_conn_test",
region_name="foo-bar-1",
verify="/spam/egg.pem",
botocore_config={"baz": "qux"},
)
assert lambda_operator.task_id == "test"
assert lambda_operator.function_name == "test"
assert lambda_operator.payload == payload
assert lambda_operator.log_type == "None"
assert lambda_operator.aws_conn_id == "aws_conn_test"
assert lambda_operator.region_name == "foo-bar-1"
assert lambda_operator.verify == "/spam/egg.pem"
assert lambda_operator.botocore_config == {"baz": "qux"}
@mock.patch.object(LambdaHook, "invoke_lambda")
@mock.patch.object(LambdaHook, "conn")
@pytest.mark.parametrize(
"keep_empty_log_lines", [pytest.param(True, id="keep"), pytest.param(False, id="truncate")]
)
@pytest.mark.parametrize(
("log_result", "expected_execution_logs"),
[
pytest.param(LOG_RESPONSE, True, id="log-result"),
pytest.param(BAD_LOG_RESPONSE, False, id="corrupted-log-result"),
pytest.param(None, False, id="none-log-result"),
pytest.param(NO_LOG_RESPONSE_SENTINEL, False, id="no-response"),
],
)
@pytest.mark.parametrize("payload", PAYLOADS)
def test_invoke_lambda(
self,
mock_conn,
mock_invoke,
payload,
keep_empty_log_lines,
log_result,
expected_execution_logs,
caplog,
):
operator = LambdaInvokeFunctionOperator(
task_id="task_test",
function_name="a",
invocation_type="b",
log_type="c",
keep_empty_log_lines=keep_empty_log_lines,
client_context="d",
payload=payload,
qualifier="f",
)
returned_payload = Mock()
returned_payload.read().decode.return_value = "data was read"
fake_response = {
"ResponseMetadata": "",
"StatusCode": 200,
"Payload": returned_payload,
}
if log_result is not NO_LOG_RESPONSE_SENTINEL:
fake_response["LogResult"] = log_result
mock_invoke.return_value = fake_response
caplog.set_level("INFO", "airflow.task.operators")
value = operator.execute(None)
assert value == "data was read"
mock_invoke.assert_called_once_with(
function_name="a",
invocation_type="b",
log_type="c",
client_context="d",
payload=payload,
qualifier="f",
)
# Validate log messages in task logs
if expected_execution_logs:
assert "The last 4 KB of the Lambda execution log" in caplog.text
assert "FOO" in caplog.messages
assert "BAR" in caplog.messages
if keep_empty_log_lines:
assert "" in caplog.messages
else:
assert "" not in caplog.messages
else:
assert "The last 4 KB of the Lambda execution log" not in caplog.text
@patch.object(LambdaInvokeFunctionOperator, "hook", new_callable=mock.PropertyMock)
def test_invoke_lambda_bad_http_code(self, hook_mock):
operator = LambdaInvokeFunctionOperator(
task_id="task_test",
function_name="a",
)
hook_mock().invoke_lambda.return_value = {"ResponseMetadata": "", "StatusCode": 404}
with pytest.raises(ValueError, match=LAMBDA_FUNC_NO_EXECUTION):
operator.execute(None)
@patch.object(LambdaInvokeFunctionOperator, "hook", new_callable=mock.PropertyMock)
def test_invoke_lambda_function_error(self, hook_mock):
operator = LambdaInvokeFunctionOperator(
task_id="task_test",
function_name="a",
)
hook_mock().invoke_lambda.return_value = {
"ResponseMetadata": "",
"StatusCode": 404,
"FunctionError": "yes",
"Payload": Mock(),
}
with pytest.raises(ValueError, match=LAMBDA_FUNC_NO_EXECUTION):
operator.execute(None)
def test_template_fields(self):
operator = LambdaInvokeFunctionOperator(
task_id="task_test",
function_name="a",
)
validate_template_fields(operator)
|
TestLambdaInvokeFunctionOperator
|
python
|
huggingface__transformers
|
src/transformers/models/aria/processing_aria.py
|
{
"start": 2070,
"end": 8999
}
|
class ____(ProcessorMixin):
"""
AriaProcessor is a processor for the Aria model which wraps the Aria image preprocessor and the LLama slow tokenizer.
Args:
image_processor (`AriaImageProcessor`, *optional*):
The AriaImageProcessor to use for image preprocessing.
tokenizer (`PreTrainedTokenizerBase`, *optional*):
An instance of [`PreTrainedTokenizerBase`]. This should correspond with the model's text model. The tokenizer is a required input.
chat_template (`str`, *optional*):
A Jinja template which will be used to convert lists of messages in a chat into a tokenizable string.
size_conversion (`Dict`, *optional*):
A dictionary indicating size conversions for images.
"""
def __init__(
self,
image_processor=None,
tokenizer: Union[AutoTokenizer, str] = None,
chat_template: Optional[str] = None,
size_conversion: Optional[dict[Union[float, int], int]] = None,
):
if size_conversion is None:
size_conversion = {490: 128, 980: 256}
self.size_conversion = {int(k): v for k, v in size_conversion.items()}
self.image_token = tokenizer.image_token
self.image_token_id = tokenizer.image_token_id
if tokenizer is not None and tokenizer.pad_token is None:
tokenizer.pad_token = tokenizer.unk_token
super().__init__(image_processor, tokenizer, chat_template=chat_template)
def __call__(
self,
text: Union[TextInput, PreTokenizedInput, list[TextInput], list[PreTokenizedInput]],
images: Optional[ImageInput] = None,
**kwargs: Unpack[AriaProcessorKwargs],
) -> BatchFeature:
"""
Main method to prepare for the model one or several sequences(s) and image(s).
Args:
text (`TextInput`, `PreTokenizedInput`, `list[TextInput]`, `list[PreTokenizedInput]`):
The sequence or batch of sequences to be encoded. Each sequence can be a string or a list of strings
(pretokenized string). If the sequences are provided as list of strings (pretokenized), you must set
`is_split_into_words=True` (to lift the ambiguity with a batch of sequences).
images (`ImageInput`):
The image or batch of images to be prepared. Each image can be a PIL image, NumPy array or PyTorch
tensor. Both channels-first and channels-last formats are supported.
Returns:
[`BatchFeature`]: A [`BatchFeature`] with the following fields:
- **input_ids** -- List of token ids to be fed to a model. Returned when `text` is not `None`.
- **attention_mask** -- List of indices specifying which tokens should be attended to by the model (when
`return_attention_mask=True` or if *"attention_mask"* is in `self.model_input_names` and if `text` is not
`None`).
- **pixel_values** -- Pixel values to be fed to a model. Returned when `images` is not `None`.
- **pixel_mask** -- Pixel mask to be fed to a model. Returned when `images` is not `None`.
"""
output_kwargs = self._merge_kwargs(
AriaProcessorKwargs,
tokenizer_init_kwargs=self.tokenizer.init_kwargs,
**kwargs,
)
if isinstance(text, str):
text = [text]
elif not isinstance(text, list) and not isinstance(text[0], str):
raise TypeError("Invalid input text. Please provide a string, or a list of strings")
if images is not None:
image_inputs = self.image_processor(images, **output_kwargs["images_kwargs"])
# expand the image_token according to the num_crops and tokens per image
tokens_per_image = self.size_conversion[image_inputs.pixel_values.shape[2]]
prompt_strings = []
num_crops = image_inputs.pop("num_crops") * tokens_per_image
for sample in text:
sample = sample.replace(self.tokenizer.image_token, self.tokenizer.image_token * num_crops)
prompt_strings.append(sample)
else:
image_inputs = {}
prompt_strings = text
return_tensors = output_kwargs["text_kwargs"].pop("return_tensors", None)
return_mm_token_type_ids = output_kwargs["text_kwargs"].pop("return_mm_token_type_ids", False)
text_inputs = self.tokenizer(prompt_strings, **output_kwargs["text_kwargs"], return_tensors=None)
self._check_special_mm_tokens(prompt_strings, text_inputs, modalities=["image"])
if return_mm_token_type_ids:
array_ids = np.array(text_inputs["input_ids"])
mm_token_type_ids = np.zeros_like(text_inputs["input_ids"])
mm_token_type_ids[array_ids == self.image_token_id] = 1
text_inputs["mm_token_type_ids"] = mm_token_type_ids.tolist()
return BatchFeature(data={**text_inputs, **image_inputs}, tensor_type=return_tensors)
def _get_num_multimodal_tokens(self, image_sizes=None, **kwargs):
"""
Computes the number of placeholder tokens needed for multimodal inputs with the given sizes.
Args:
image_sizes (`list[list[int]]`, *optional*):
The input sizes formatted as (height, width) per each image.
Returns:
`MultiModalData`: A `MultiModalData` object holding number of tokens per each of the provided
input modalities, along with other useful data.
"""
vision_data = {}
if image_sizes is not None:
images_kwargs = AriaProcessorKwargs._defaults.get("images_kwargs", {})
images_kwargs.update(kwargs)
max_size = images_kwargs.get("max_image_size", None) or self.image_processor.max_image_size
num_image_patches = [
self.image_processor.get_number_of_image_patches(*image_size, images_kwargs)
for image_size in image_sizes
]
num_image_tokens = [self.size_conversion[max_size] * num_patches for num_patches in num_image_patches]
vision_data.update({"num_image_tokens": num_image_tokens, "num_image_patches": num_image_patches})
return MultiModalData(**vision_data)
@property
def model_input_names(self):
tokenizer_input_names = self.tokenizer.model_input_names
image_processor_input_names = self.image_processor.model_input_names
# Remove `num_crops`, it is popped and used only when processing. Make a copy of list when removing
# otherwise `self.image_processor.model_input_names` is also modified
image_processor_input_names = [name for name in image_processor_input_names if name != "num_crops"]
return list(dict.fromkeys(tokenizer_input_names + image_processor_input_names))
__all__ = ["AriaProcessor"]
|
AriaProcessor
|
python
|
huggingface__transformers
|
src/transformers/models/groupvit/modeling_groupvit.py
|
{
"start": 12934,
"end": 14353
}
|
class ____(nn.Module):
"""
Image to Patch Embedding.
"""
def __init__(
self,
image_size: int = 224,
patch_size: Union[int, tuple[int, int]] = 16,
num_channels: int = 3,
embed_dim: int = 768,
):
super().__init__()
image_size = image_size if isinstance(image_size, collections.abc.Iterable) else (image_size, image_size)
patch_size = patch_size if isinstance(patch_size, collections.abc.Iterable) else (patch_size, patch_size)
num_patches = (image_size[1] // patch_size[1]) * (image_size[0] // patch_size[0])
self.image_size = image_size
self.patch_size = patch_size
self.num_patches = num_patches
self.projection = nn.Conv2d(num_channels, embed_dim, kernel_size=patch_size, stride=patch_size)
def forward(self, pixel_values: torch.Tensor, interpolate_pos_encoding: bool = False) -> torch.Tensor:
batch_size, num_channels, height, width = pixel_values.shape
if not interpolate_pos_encoding:
if height != self.image_size[0] or width != self.image_size[1]:
raise ValueError(
f"Input image size ({height}*{width}) doesn't match model"
f" ({self.image_size[0]}*{self.image_size[1]})."
)
x = self.projection(pixel_values).flatten(2).transpose(1, 2)
return x
|
GroupViTPatchEmbeddings
|
python
|
spack__spack
|
lib/spack/spack/spec.py
|
{
"start": 220174,
"end": 220297
}
|
class ____(spack.error.SpecError):
"""Raised when the same dependency occurs in a spec twice."""
|
DuplicateDependencyError
|
python
|
wandb__wandb
|
wandb/vendor/pygments/lexers/javascript.py
|
{
"start": 41335,
"end": 45696
}
|
class ____(RegexLexer):
"""
For `CoffeeScript`_ source code.
.. _CoffeeScript: http://coffeescript.org
.. versionadded:: 1.3
"""
name = 'CoffeeScript'
aliases = ['coffee-script', 'coffeescript', 'coffee']
filenames = ['*.coffee']
mimetypes = ['text/coffeescript']
_operator_re = (
r'\+\+|~|&&|\band\b|\bor\b|\bis\b|\bisnt\b|\bnot\b|\?|:|'
r'\|\||\\(?=\n)|'
r'(<<|>>>?|==?(?!>)|!=?|=(?!>)|-(?!>)|[<>+*`%&\|\^/])=?')
flags = re.DOTALL
tokens = {
'commentsandwhitespace': [
(r'\s+', Text),
(r'###[^#].*?###', Comment.Multiline),
(r'#(?!##[^#]).*?\n', Comment.Single),
],
'multilineregex': [
(r'[^/#]+', String.Regex),
(r'///([gim]+\b|\B)', String.Regex, '#pop'),
(r'#\{', String.Interpol, 'interpoling_string'),
(r'[/#]', String.Regex),
],
'slashstartsregex': [
include('commentsandwhitespace'),
(r'///', String.Regex, ('#pop', 'multilineregex')),
(r'/(?! )(\\.|[^[/\\\n]|\[(\\.|[^\]\\\n])*])+/'
r'([gim]+\b|\B)', String.Regex, '#pop'),
# This isn't really guarding against mishighlighting well-formed
# code, just the ability to infinite-loop between root and
# slashstartsregex.
(r'/', Operator),
default('#pop'),
],
'root': [
include('commentsandwhitespace'),
(r'^(?=\s|/)', Text, 'slashstartsregex'),
(_operator_re, Operator, 'slashstartsregex'),
(r'(?:\([^()]*\))?\s*[=-]>', Name.Function, 'slashstartsregex'),
(r'[{(\[;,]', Punctuation, 'slashstartsregex'),
(r'[})\].]', Punctuation),
(r'(?<![.$])(for|own|in|of|while|until|'
r'loop|break|return|continue|'
r'switch|when|then|if|unless|else|'
r'throw|try|catch|finally|new|delete|typeof|instanceof|super|'
r'extends|this|class|by)\b', Keyword, 'slashstartsregex'),
(r'(?<![.$])(true|false|yes|no|on|off|null|'
r'NaN|Infinity|undefined)\b',
Keyword.Constant),
(r'(Array|Boolean|Date|Error|Function|Math|netscape|'
r'Number|Object|Packages|RegExp|String|sun|decodeURI|'
r'decodeURIComponent|encodeURI|encodeURIComponent|'
r'eval|isFinite|isNaN|parseFloat|parseInt|document|window)\b',
Name.Builtin),
(r'[$a-zA-Z_][\w.:$]*\s*[:=]\s', Name.Variable,
'slashstartsregex'),
(r'@[$a-zA-Z_][\w.:$]*\s*[:=]\s', Name.Variable.Instance,
'slashstartsregex'),
(r'@', Name.Other, 'slashstartsregex'),
(r'@?[$a-zA-Z_][\w$]*', Name.Other),
(r'[0-9][0-9]*\.[0-9]+([eE][0-9]+)?[fd]?', Number.Float),
(r'0x[0-9a-fA-F]+', Number.Hex),
(r'[0-9]+', Number.Integer),
('"""', String, 'tdqs'),
("'''", String, 'tsqs'),
('"', String, 'dqs'),
("'", String, 'sqs'),
],
'strings': [
(r'[^#\\\'"]+', String),
# note that all coffee script strings are multi-line.
# hashmarks, quotes and backslashes must be parsed one at a time
],
'interpoling_string': [
(r'\}', String.Interpol, "#pop"),
include('root')
],
'dqs': [
(r'"', String, '#pop'),
(r'\\.|\'', String), # double-quoted string don't need ' escapes
(r'#\{', String.Interpol, "interpoling_string"),
(r'#', String),
include('strings')
],
'sqs': [
(r"'", String, '#pop'),
(r'#|\\.|"', String), # single quoted strings don't need " escapses
include('strings')
],
'tdqs': [
(r'"""', String, '#pop'),
(r'\\.|\'|"', String), # no need to escape quotes in triple-string
(r'#\{', String.Interpol, "interpoling_string"),
(r'#', String),
include('strings'),
],
'tsqs': [
(r"'''", String, '#pop'),
(r'#|\\.|\'|"', String), # no need to escape quotes in triple-strings
include('strings')
],
}
|
CoffeeScriptLexer
|
python
|
tensorflow__tensorflow
|
tensorflow/python/distribute/multi_process_lib.py
|
{
"start": 1418,
"end": 5886
}
|
class ____:
"""A process that runs using absl.app.run."""
def __init__(self, *args, **kwargs):
super(_AbslProcess, self).__init__(*args, **kwargs)
# Monkey-patch that is carried over into the spawned process by pickle.
self._run_impl = getattr(self, 'run')
self.run = self._run_with_absl
def _run_with_absl(self):
app.run(lambda _: self._run_impl())
if _is_enabled():
class AbslForkServerProcess(_AbslProcess,
multiprocessing.context.ForkServerProcess):
"""An absl-compatible Forkserver process.
Note: Forkserver is not available in windows.
"""
class AbslForkServerContext(multiprocessing.context.ForkServerContext):
_name = 'absl_forkserver'
Process = AbslForkServerProcess # pylint: disable=invalid-name
multiprocessing = AbslForkServerContext()
Process = multiprocessing.Process
else:
class Process(object):
"""A process that skips test (until windows is supported)."""
def __init__(self, *args, **kwargs):
del args, kwargs
raise unittest.SkipTest(
'TODO(b/150264776): Windows is not supported in MultiProcessRunner.')
_test_main_called = False
def _set_spawn_exe_path():
"""Set the path to the executable for spawned processes.
This utility searches for the binary the parent process is using, and sets
the executable of multiprocessing's context accordingly.
Raises:
RuntimeError: If the binary path cannot be determined.
"""
# TODO(b/150264776): This does not work with Windows. Find a solution.
if sys.argv[0].endswith('.py'):
def guess_path(package_root):
# If all we have is a python module path, we'll need to make a guess for
# the actual executable path.
if 'bazel-out' in sys.argv[0] and package_root in sys.argv[0]:
# Guess the binary path under bazel. For target
# //tensorflow/python/distribute:input_lib_test_multiworker_gpu, the
# argv[0] is in the form of
# /.../tensorflow/python/distribute/input_lib_test.py
# and the binary is
# /.../tensorflow/python/distribute/input_lib_test_multiworker_gpu
package_root_base = sys.argv[0][:sys.argv[0].rfind(package_root)]
binary = os.environ['TEST_TARGET'][2:].replace(':', '/', 1)
possible_path = os.path.join(package_root_base, package_root,
binary)
logging.info('Guessed test binary path: %s', possible_path)
if os.access(possible_path, os.X_OK):
return possible_path
return None
path = guess_path('org_tensorflow')
if not path:
path = guess_path('org_keras')
if path is None:
logging.error(
'Cannot determine binary path. sys.argv[0]=%s os.environ=%s',
sys.argv[0], os.environ)
raise RuntimeError('Cannot determine binary path')
sys.argv[0] = path
# Note that this sets the executable for *all* contexts.
multiprocessing.get_context().set_executable(sys.argv[0])
def _if_spawn_run_and_exit():
"""If spawned process, run requested spawn task and exit. Else a no-op."""
# `multiprocessing` module passes a script "from multiprocessing.x import y"
# to subprocess, followed by a main function call. We use this to tell if
# the process is spawned. Examples of x are "forkserver" or
# "semaphore_tracker".
is_spawned = ('-c' in sys.argv[1:] and
sys.argv[sys.argv.index('-c') +
1].startswith('from multiprocessing.'))
if not is_spawned:
return
cmd = sys.argv[sys.argv.index('-c') + 1]
# As a subprocess, we disregarding all other interpreter command line
# arguments.
sys.argv = sys.argv[0:1]
# Run the specified command - this is expected to be one of:
# 1. Spawn the process for semaphore tracker.
# 2. Spawn the initial process for forkserver.
# 3. Spawn any process as requested by the "spawn" method.
exec(cmd) # pylint: disable=exec-used
sys.exit(0) # Semaphore tracker doesn't explicitly sys.exit.
def test_main():
"""Main function to be called within `__main__` of a test file."""
global _test_main_called
_test_main_called = True
os.environ['TF_FORCE_GPU_ALLOW_GROWTH'] = 'true'
if _is_enabled():
_set_spawn_exe_path()
_if_spawn_run_and_exit()
# Only runs test.main() if not spawned process.
test.main()
def initialized():
"""Returns whether the module is initialized."""
return _test_main_called
|
_AbslProcess
|
python
|
allegroai__clearml
|
clearml/backend_api/services/v2_13/workers.py
|
{
"start": 49047,
"end": 50444
}
|
class ____(Request):
"""
Returns information on all registered workers.
:param last_seen: Filter out workers not active for more than last_seen
seconds. A value or 0 or 'none' will disable the filter.
:type last_seen: int
"""
_service = "workers"
_action = "get_all"
_version = "2.13"
_schema = {
"definitions": {},
"properties": {
"last_seen": {
"default": 3600,
"description": "Filter out workers not active for more than last_seen seconds.\n A value or 0 or 'none' will disable the filter.",
"type": ["integer", "null"],
}
},
"type": "object",
}
def __init__(self, last_seen: Optional[int] = 3600, **kwargs: Any) -> None:
super(GetAllRequest, self).__init__(**kwargs)
self.last_seen = last_seen
@schema_property("last_seen")
def last_seen(self) -> Optional[int]:
return self._property_last_seen
@last_seen.setter
def last_seen(self, value: Optional[int]) -> None:
if value is None:
self._property_last_seen = None
return
if isinstance(value, float) and value.is_integer():
value = int(value)
self.assert_isinstance(value, "last_seen", six.integer_types)
self._property_last_seen = value
|
GetAllRequest
|
python
|
PyCQA__pyflakes
|
pyflakes/messages.py
|
{
"start": 6745,
"end": 6933
}
|
class ____(Message):
"""
Assertion test is a non-empty tuple literal, which are always True.
"""
message = 'assertion is always true, perhaps remove parentheses?'
|
AssertTuple
|
python
|
cython__cython
|
Doc/s5/ep2008/worker.py
|
{
"start": 1,
"end": 184
}
|
class ____(object):
u"Almost Sisyphus"
def __init__(self, task):
self.task = task
def work_hard(self):
for i in range(100):
self.task()
|
HardWorker
|
python
|
great-expectations__great_expectations
|
great_expectations/datasource/fluent/data_asset/path/file_asset.py
|
{
"start": 2271,
"end": 2518
}
|
class ____(ValueError):
def __init__(self, path: PathStr):
message = f"Provided path matched multiple targets, and must match exactly one: {path} "
super().__init__(message)
self.path = path
@public_api
|
AmbiguousPathError
|
python
|
dagster-io__dagster
|
python_modules/dagster-pipes/dagster_pipes/__init__.py
|
{
"start": 16082,
"end": 16273
}
|
class ____(ABC):
@contextmanager
@abstractmethod
def capture(self) -> Iterator[None]: ...
T_LogChannel = TypeVar("T_LogChannel", bound=PipesLogWriterChannel)
|
PipesLogWriterChannel
|
python
|
ray-project__ray
|
python/ray/dashboard/modules/job/job_head.py
|
{
"start": 3333,
"end": 6548
}
|
class ____:
"""A local client for submitting and interacting with jobs on a specific node
in the remote cluster.
Submits requests over HTTP to the job agent on the specific node using the REST API.
"""
def __init__(
self,
dashboard_agent_address: str,
):
self._agent_address = dashboard_agent_address
self._session = aiohttp.ClientSession()
async def _raise_error(self, resp: ClientResponse):
status = resp.status
error_text = await resp.text()
raise RuntimeError(f"Request failed with status code {status}: {error_text}.")
async def submit_job_internal(self, req: JobSubmitRequest) -> JobSubmitResponse:
logger.debug(f"Submitting job with submission_id={req.submission_id}.")
async with self._session.post(
f"{self._agent_address}/api/job_agent/jobs/", json=dataclasses.asdict(req)
) as resp:
if resp.status == 200:
result_json = await resp.json()
return JobSubmitResponse(**result_json)
else:
await self._raise_error(resp)
async def stop_job_internal(self, job_id: str) -> JobStopResponse:
logger.debug(f"Stopping job with job_id={job_id}.")
async with self._session.post(
f"{self._agent_address}/api/job_agent/jobs/{job_id}/stop"
) as resp:
if resp.status == 200:
result_json = await resp.json()
return JobStopResponse(**result_json)
else:
await self._raise_error(resp)
async def delete_job_internal(self, job_id: str) -> JobDeleteResponse:
logger.debug(f"Deleting job with job_id={job_id}.")
async with self._session.delete(
f"{self._agent_address}/api/job_agent/jobs/{job_id}"
) as resp:
if resp.status == 200:
result_json = await resp.json()
return JobDeleteResponse(**result_json)
else:
await self._raise_error(resp)
async def get_job_logs_internal(self, job_id: str) -> JobLogsResponse:
async with self._session.get(
f"{self._agent_address}/api/job_agent/jobs/{job_id}/logs"
) as resp:
if resp.status == 200:
result_json = await resp.json()
return JobLogsResponse(**result_json)
else:
await self._raise_error(resp)
async def tail_job_logs(self, job_id: str) -> AsyncIterator[str]:
"""Get an iterator that follows the logs of a job."""
ws = await self._session.ws_connect(
f"{self._agent_address}/api/job_agent/jobs/{job_id}/logs/tail"
)
while True:
msg = await ws.receive()
if msg.type == aiohttp.WSMsgType.TEXT:
yield msg.data
elif msg.type == aiohttp.WSMsgType.CLOSED:
break
elif msg.type == aiohttp.WSMsgType.ERROR:
pass
async def close(self, ignore_error=True):
try:
await self._session.close()
except Exception:
if not ignore_error:
raise
|
JobAgentSubmissionClient
|
python
|
PrefectHQ__prefect
|
src/prefect/server/schemas/filters.py
|
{
"start": 78754,
"end": 80202
}
|
class ____(PrefectFilterBaseModel):
"""Filter by `ArtifactCollection.key`."""
any_: Optional[list[str]] = Field(
default=None, description="A list of artifact keys to include"
)
like_: Optional[str] = Field(
default=None,
description=(
"A string to match artifact keys against. This can include "
"SQL wildcard characters like `%` and `_`."
),
examples=["my-artifact-%"],
)
exists_: Optional[bool] = Field(
default=None,
description=(
"If `true`, only include artifacts with a non-null key. If `false`, "
"only include artifacts with a null key. Should return all rows in "
"the ArtifactCollection table if specified."
),
)
def _get_filter_list(
self, db: "PrefectDBInterface"
) -> Iterable[sa.ColumnExpressionArgument[bool]]:
filters: list[sa.ColumnExpressionArgument[bool]] = []
if self.any_ is not None:
filters.append(db.ArtifactCollection.key.in_(self.any_))
if self.like_ is not None:
filters.append(db.ArtifactCollection.key.ilike(f"%{self.like_}%"))
if self.exists_ is not None:
filters.append(
db.ArtifactCollection.key.isnot(None)
if self.exists_
else db.ArtifactCollection.key.is_(None)
)
return filters
|
ArtifactCollectionFilterKey
|
python
|
sanic-org__sanic
|
sanic/exceptions.py
|
{
"start": 8176,
"end": 9285
}
|
class ____(HTTPException):
"""500 Internal Server Error
A general server-side error has occurred. If no other HTTP exception is
appropriate, then this should be used
Args:
message (Optional[Union[str, bytes]], optional): The message to be sent to the client. If `None`
then the HTTP status 'Bad Request' will be sent. Defaults to `None`.
quiet (Optional[bool], optional): When `True`, the error traceback will be suppressed
from the logs. Defaults to `None`.
context (Optional[Dict[str, Any]], optional): Additional mapping of key/value data that will be
sent to the client upon exception. Defaults to `None`.
extra (Optional[Dict[str, Any]], optional): Additional mapping of key/value data that will NOT be
sent to the client when in PRODUCTION mode. Defaults to `None`.
headers (Optional[Dict[str, Any]], optional): Additional headers that should be sent with the HTTP
response. Defaults to `None`.
""" # noqa: E501
status_code = 500
InternalServerError = ServerError
|
ServerError
|
python
|
kamyu104__LeetCode-Solutions
|
Python/divisor-game.py
|
{
"start": 1234,
"end": 1892
}
|
class ____(object):
def divisorGame(self, n):
"""
:type n: int
:rtype: bool
"""
def factors(n):
result = [[] for _ in xrange(n+1)]
for i in xrange(1, n+1):
for j in range(i, n+1, i):
result[j].append(i)
return result
def memoization(n):
if lookup[n] is None:
lookup[n] = any(not memoization(n-i) for i in FACTORS[n] if i != n)
return lookup[n]
FACTORS = factors(n)
lookup = [None]*(n+1)
return memoization(n)
# Time: O(n^(3/2))
# Space: O(n)
# memoization
|
Solution3
|
python
|
getsentry__sentry
|
tests/sentry/utils/test_registry.py
|
{
"start": 193,
"end": 1978
}
|
class ____(TestCase):
def test(self) -> None:
test_registry = Registry[Callable]()
@test_registry.register("something")
def registered_func():
raise NotImplementedError
def unregistered_func():
raise NotImplementedError
assert test_registry.get("something") == registered_func
with pytest.raises(NoRegistrationExistsError):
test_registry.get("something else")
assert test_registry.get_key(registered_func) == "something"
with pytest.raises(NoRegistrationExistsError):
test_registry.get_key(unregistered_func)
with pytest.raises(AlreadyRegisteredError):
test_registry.register("something")(unregistered_func)
with pytest.raises(AlreadyRegisteredError):
test_registry.register("new_key")(registered_func)
test_registry.register("something else")(unregistered_func)
assert test_registry.get("something else") == unregistered_func
def test_allow_duplicate_values(self) -> None:
test_registry = Registry[Callable[[], None]](enable_reverse_lookup=False)
@test_registry.register("something")
@test_registry.register("something 2")
def registered_func():
raise NotImplementedError
assert test_registry.get("something") == registered_func
assert test_registry.get("something 2") == registered_func
with pytest.raises(NoRegistrationExistsError):
test_registry.get("something else")
with pytest.raises(NotImplementedError):
test_registry.get_key(registered_func)
test_registry.register("something else")(registered_func)
assert test_registry.get("something else") == registered_func
|
RegistryTest
|
python
|
apache__airflow
|
providers/amazon/src/airflow/providers/amazon/aws/hooks/sts.py
|
{
"start": 891,
"end": 1827
}
|
class ____(AwsBaseHook):
"""
Interact with AWS Security Token Service (STS).
Provide thin wrapper around :external+boto3:py:class:`boto3.client("sts") <STS.Client>`.
Additional arguments (such as ``aws_conn_id``) may be specified and
are passed down to the underlying AwsBaseHook.
.. seealso::
- :class:`airflow.providers.amazon.aws.hooks.base_aws.AwsBaseHook`
"""
def __init__(self, *args, **kwargs):
super().__init__(client_type="sts", *args, **kwargs)
def get_account_number(self) -> str:
"""
Get the account Number.
.. seealso::
- :external+boto3:py:meth:`STS.Client.get_caller_identity`
"""
try:
return self.get_conn().get_caller_identity()["Account"]
except Exception as general_error:
self.log.error("Failed to get the AWS Account Number, error: %s", general_error)
raise
|
StsHook
|
python
|
pytorch__pytorch
|
torch/testing/_internal/common_pruning.py
|
{
"start": 2800,
"end": 3735
}
|
class ____(nn.Module):
r"""Model with only Linear layers, some with bias, some in a Sequential and some following.
Activation functions modules in between each Linear in the Sequential, and each outside layer.
Used to test pruned Linear(Bias)-Activation-Linear fusion."""
def __init__(self) -> None:
super().__init__()
self.seq = nn.Sequential(
nn.Linear(7, 5, bias=True),
nn.ReLU(),
nn.Linear(5, 6, bias=False),
nn.Tanh(),
nn.Linear(6, 4, bias=True),
)
self.linear1 = nn.Linear(4, 3, bias=True)
self.act1 = nn.ReLU()
self.linear2 = nn.Linear(3, 10, bias=False)
self.act2 = nn.Tanh()
def forward(self, x: torch.Tensor) -> torch.Tensor:
x = self.seq(x)
x = self.linear1(x)
x = self.act1(x)
x = self.linear2(x)
x = self.act2(x)
return x
|
LinearActivation
|
python
|
huggingface__transformers
|
src/transformers/models/dots1/modular_dots1.py
|
{
"start": 1261,
"end": 1322
}
|
class ____(Qwen3RotaryEmbedding):
pass
|
Dots1RotaryEmbedding
|
python
|
coleifer__peewee
|
playhouse/migrate.py
|
{
"start": 5156,
"end": 15145
}
|
class ____(object):
explicit_create_foreign_key = False
explicit_delete_foreign_key = False
def __init__(self, database):
self.database = database
def make_context(self):
return self.database.get_sql_context()
@classmethod
def from_database(cls, database):
if CockroachDatabase and isinstance(database, CockroachDatabase):
return CockroachDBMigrator(database)
elif isinstance(database, PostgresqlDatabase):
return PostgresqlMigrator(database)
elif isinstance(database, MySQLDatabase):
return MySQLMigrator(database)
elif isinstance(database, SqliteDatabase):
return SqliteMigrator(database)
raise ValueError('Unsupported database: %s' % database)
@operation
def apply_default(self, table, column_name, field):
default = field.default
if callable_(default):
default = default()
return (self.make_context()
.literal('UPDATE ')
.sql(Entity(table))
.literal(' SET ')
.sql(Expression(
Entity(column_name),
OP.EQ,
field.db_value(default),
flat=True)))
def _alter_table(self, ctx, table):
return ctx.literal('ALTER TABLE ').sql(Entity(table))
def _alter_column(self, ctx, table, column):
return (self
._alter_table(ctx, table)
.literal(' ALTER COLUMN ')
.sql(Entity(column)))
@operation
def alter_add_column(self, table, column_name, field):
# Make field null at first.
ctx = self.make_context()
field_null, field.null = field.null, True
# Set the field's column-name and name, if it is not set or doesn't
# match the new value.
if field.column_name != column_name:
field.name = field.column_name = column_name
(self
._alter_table(ctx, table)
.literal(' ADD COLUMN ')
.sql(field.ddl(ctx)))
field.null = field_null
if isinstance(field, ForeignKeyField):
self.add_inline_fk_sql(ctx, field)
return ctx
@operation
def add_constraint(self, table, name, constraint):
return (self
._alter_table(self.make_context(), table)
.literal(' ADD CONSTRAINT ')
.sql(Entity(name))
.literal(' ')
.sql(constraint))
@operation
def add_unique(self, table, *column_names):
constraint_name = 'uniq_%s' % '_'.join(column_names)
constraint = NodeList((
SQL('UNIQUE'),
EnclosedNodeList([Entity(column) for column in column_names])))
return self.add_constraint(table, constraint_name, constraint)
@operation
def drop_constraint(self, table, name):
return (self
._alter_table(self.make_context(), table)
.literal(' DROP CONSTRAINT ')
.sql(Entity(name)))
def add_inline_fk_sql(self, ctx, field):
ctx = (ctx
.literal(' REFERENCES ')
.sql(Entity(field.rel_model._meta.table_name))
.literal(' ')
.sql(EnclosedNodeList((Entity(field.rel_field.column_name),))))
if field.on_delete is not None:
ctx = ctx.literal(' ON DELETE %s' % field.on_delete)
if field.on_update is not None:
ctx = ctx.literal(' ON UPDATE %s' % field.on_update)
return ctx
@operation
def add_foreign_key_constraint(self, table, column_name, rel, rel_column,
on_delete=None, on_update=None,
constraint_name=None):
constraint = constraint_name or 'fk_%s_%s_refs_%s' % (table,
column_name,
rel)
ctx = (self
.make_context()
.literal('ALTER TABLE ')
.sql(Entity(table))
.literal(' ADD CONSTRAINT ')
.sql(Entity(_truncate_constraint_name(constraint)))
.literal(' FOREIGN KEY ')
.sql(EnclosedNodeList((Entity(column_name),)))
.literal(' REFERENCES ')
.sql(Entity(rel))
.literal(' (')
.sql(Entity(rel_column))
.literal(')'))
if on_delete is not None:
ctx = ctx.literal(' ON DELETE %s' % on_delete)
if on_update is not None:
ctx = ctx.literal(' ON UPDATE %s' % on_update)
return ctx
@operation
def add_column(self, table, column_name, field):
# Adding a column is complicated by the fact that if there are rows
# present and the field is non-null, then we need to first add the
# column as a nullable field, then set the value, then add a not null
# constraint.
if not field.null and field.default is None:
raise ValueError('%s is not null but has no default' % column_name)
is_foreign_key = isinstance(field, ForeignKeyField)
if is_foreign_key and not field.rel_field:
raise ValueError('Foreign keys must specify a `field`.')
operations = [self.alter_add_column(table, column_name, field)]
# In the event the field is *not* nullable, update with the default
# value and set not null.
if not field.null:
operations.extend([
self.apply_default(table, column_name, field),
self.add_not_null(table, column_name)])
if is_foreign_key and self.explicit_create_foreign_key:
operations.append(
self.add_foreign_key_constraint(
table,
column_name,
field.rel_model._meta.table_name,
field.rel_field.column_name,
field.on_delete,
field.on_update))
if field.index or field.unique:
using = getattr(field, 'index_type', None)
operations.append(self.add_index(table, (column_name,),
field.unique, using))
return operations
@operation
def drop_foreign_key_constraint(self, table, column_name):
raise NotImplementedError
@operation
def drop_column(self, table, column_name, cascade=True):
ctx = self.make_context()
(self._alter_table(ctx, table)
.literal(' DROP COLUMN ')
.sql(Entity(column_name)))
if cascade:
ctx.literal(' CASCADE')
fk_columns = [
foreign_key.column
for foreign_key in self.database.get_foreign_keys(table)]
if column_name in fk_columns and self.explicit_delete_foreign_key:
return [self.drop_foreign_key_constraint(table, column_name), ctx]
return ctx
@operation
def rename_column(self, table, old_name, new_name):
return (self
._alter_table(self.make_context(), table)
.literal(' RENAME COLUMN ')
.sql(Entity(old_name))
.literal(' TO ')
.sql(Entity(new_name)))
@operation
def add_not_null(self, table, column):
return (self
._alter_column(self.make_context(), table, column)
.literal(' SET NOT NULL'))
@operation
def drop_not_null(self, table, column):
return (self
._alter_column(self.make_context(), table, column)
.literal(' DROP NOT NULL'))
@operation
def add_column_default(self, table, column, default):
if default is None:
raise ValueError('`default` must be not None/NULL.')
if callable_(default):
default = default()
# Try to handle SQL functions and string literals, otherwise pass as a
# bound value.
if isinstance(default, str) and default.endswith((')', "'")):
default = SQL(default)
return (self
._alter_table(self.make_context(), table)
.literal(' ALTER COLUMN ')
.sql(Entity(column))
.literal(' SET DEFAULT ')
.sql(default))
@operation
def drop_column_default(self, table, column):
return (self
._alter_table(self.make_context(), table)
.literal(' ALTER COLUMN ')
.sql(Entity(column))
.literal(' DROP DEFAULT'))
@operation
def alter_column_type(self, table, column, field, cast=None):
# ALTER TABLE <table> ALTER COLUMN <column>
ctx = self.make_context()
ctx = (self
._alter_column(ctx, table, column)
.literal(' TYPE ')
.sql(field.ddl_datatype(ctx)))
if cast is not None:
if not isinstance(cast, Node):
cast = SQL(cast)
ctx = ctx.literal(' USING ').sql(cast)
return ctx
@operation
def rename_table(self, old_name, new_name):
return (self
._alter_table(self.make_context(), old_name)
.literal(' RENAME TO ')
.sql(Entity(new_name)))
@operation
def add_index(self, table, columns, unique=False, using=None):
ctx = self.make_context()
index_name = make_index_name(table, columns)
table_obj = Table(table)
cols = [getattr(table_obj.c, column) for column in columns]
index = Index(index_name, table_obj, cols, unique=unique, using=using)
return ctx.sql(index)
@operation
def drop_index(self, table, index_name):
return (self
.make_context()
.literal('DROP INDEX ')
.sql(Entity(index_name)))
|
SchemaMigrator
|
python
|
mlflow__mlflow
|
mlflow/types/chat.py
|
{
"start": 1133,
"end": 1443
}
|
class ____(BaseModel):
type: Literal["input_audio"]
input_audio: InputAudio
ContentPartsList = list[
Annotated[TextContentPart | ImageContentPart | AudioContentPart, Field(discriminator="type")]
]
ContentType = Annotated[str | ContentPartsList, Field(union_mode="left_to_right")]
|
AudioContentPart
|
python
|
rapidsai__cudf
|
python/cudf/cudf/core/dtypes.py
|
{
"start": 5169,
"end": 5273
}
|
class ____(ExtensionDtype, Serializable):
# Base type for all cudf-specific dtypes
pass
|
_BaseDtype
|
python
|
apache__airflow
|
providers/amazon/src/airflow/providers/amazon/aws/triggers/bedrock.py
|
{
"start": 8900,
"end": 9827
}
|
class ____(BedrockBaseBatchInferenceTrigger):
"""
Trigger when a batch inference job is complete.
:param job_arn: The Amazon Resource Name (ARN) of the batch inference job.
:param waiter_delay: The amount of time in seconds to wait between attempts. (default: 120)
:param waiter_max_attempts: The maximum number of attempts to be made. (default: 75)
:param aws_conn_id: The Airflow connection used for AWS credentials.
"""
def __init__(
self,
*,
job_arn: str,
waiter_delay: int = 120,
waiter_max_attempts: int = 75,
aws_conn_id: str | None = None,
) -> None:
super().__init__(
waiter_name="batch_inference_complete",
job_arn=job_arn,
waiter_delay=waiter_delay,
waiter_max_attempts=waiter_max_attempts,
aws_conn_id=aws_conn_id,
)
|
BedrockBatchInferenceCompletedTrigger
|
python
|
kamyu104__LeetCode-Solutions
|
Python/maximum-white-tiles-covered-by-a-carpet.py
|
{
"start": 2244,
"end": 2941
}
|
class ____(object):
def maximumWhiteTiles(self, tiles, carpetLen):
"""
:type tiles: List[List[int]]
:type carpetLen: int
:rtype: int
"""
tiles.sort()
prefix = [0]*(len(tiles)+1)
for i, (l, r) in enumerate(tiles):
prefix[i+1] = prefix[i]+(r-l+1)
result = 0
for right, (_, r) in enumerate(tiles):
l = r-carpetLen+1
left = bisect.bisect_right(tiles, [l])
if left-1 >= 0 and tiles[left-1][1]+1 >= l:
left -= 1
extra = max(l-tiles[left][0], 0)
result = max(result, (prefix[right+1]-prefix[left])-extra)
return result
|
Solution4
|
python
|
huggingface__transformers
|
src/transformers/models/beit/modeling_beit.py
|
{
"start": 17170,
"end": 17759
}
|
class ____(nn.Module):
def __init__(self, config: BeitConfig) -> None:
super().__init__()
self.dense = nn.Linear(config.hidden_size, config.intermediate_size)
if isinstance(config.hidden_act, str):
self.intermediate_act_fn = ACT2FN[config.hidden_act]
else:
self.intermediate_act_fn = config.hidden_act
def forward(self, hidden_states: torch.Tensor) -> torch.Tensor:
hidden_states = self.dense(hidden_states)
hidden_states = self.intermediate_act_fn(hidden_states)
return hidden_states
|
BeitIntermediate
|
python
|
huggingface__transformers
|
src/transformers/models/qwen2_5_omni/modeling_qwen2_5_omni.py
|
{
"start": 127654,
"end": 128881
}
|
class ____(nn.Module):
"""An implementation of building block in ECAPA-TDNN, i.e.,
TDNN-Res2Net-TDNN-SqueezeExcitationBlock.
"""
def __init__(
self,
in_channels,
out_channels,
res2net_scale=8,
se_channels=128,
kernel_size=1,
dilation=1,
):
super().__init__()
self.out_channels = out_channels
self.tdnn1 = TimeDelayNetBlock(
in_channels,
out_channels,
kernel_size=1,
dilation=1,
)
self.res2net_block = Res2NetBlock(out_channels, out_channels, res2net_scale, kernel_size, dilation)
self.tdnn2 = TimeDelayNetBlock(
out_channels,
out_channels,
kernel_size=1,
dilation=1,
)
self.se_block = SqueezeExcitationBlock(out_channels, se_channels, out_channels)
def forward(self, hidden_state):
residual = hidden_state
hidden_state = self.tdnn1(hidden_state)
hidden_state = self.res2net_block(hidden_state)
hidden_state = self.tdnn2(hidden_state)
hidden_state = self.se_block(hidden_state)
return hidden_state + residual
|
SqueezeExcitationRes2NetBlock
|
python
|
vyperlang__vyper
|
vyper/exceptions.py
|
{
"start": 12016,
"end": 12118
}
|
class ____(VyperInternalException):
"""General unexpected error during compilation."""
|
CompilerPanic
|
python
|
pennersr__django-allauth
|
allauth/socialaccount/providers/wahoo/provider.py
|
{
"start": 312,
"end": 434
}
|
class ____(ProviderAccount):
def get_profile_url(self):
return "https://api.wahooligan.com/v1/user"
|
WahooAccount
|
python
|
apache__thrift
|
lib/py/src/Thrift.py
|
{
"start": 1357,
"end": 1447
}
|
class ____(object):
CALL = 1
REPLY = 2
EXCEPTION = 3
ONEWAY = 4
|
TMessageType
|
python
|
pytest-dev__pytest
|
src/_pytest/config/argparsing.py
|
{
"start": 9810,
"end": 10698
}
|
class ____:
"""An option defined in an OptionGroup."""
def __init__(self, action: argparse.Action) -> None:
self._action = action
def attrs(self) -> dict[str, Any]:
return self._action.__dict__
def names(self) -> Sequence[str]:
return self._action.option_strings
@property
def dest(self) -> str:
return self._action.dest
@property
def default(self) -> Any:
return self._action.default
@property
def type(self) -> Any | None:
return self._action.type
def __repr__(self) -> str:
args: list[str] = []
args += ["opts: " + repr(self.names())]
args += ["dest: " + repr(self.dest)]
if self._action.type:
args += ["type: " + repr(self.type)]
args += ["default: " + repr(self.default)]
return "Argument({})".format(", ".join(args))
|
Argument
|
python
|
conda__conda
|
conda/exceptions.py
|
{
"start": 39954,
"end": 40153
}
|
class ____(CondaError):
def __init__(self, caused_by: Any, **kwargs):
message = "No space left on devices."
super().__init__(message, caused_by=caused_by, **kwargs)
|
NoSpaceLeftError
|
python
|
numba__numba
|
numba/cuda/cudadrv/driver.py
|
{
"start": 62795,
"end": 67066
}
|
class ____(object):
"""A memory pointer that owns a buffer, with an optional finalizer. Memory
pointers provide reference counting, and instances are initialized with a
reference count of 1.
The base ``MemoryPointer`` class does not use the
reference count for managing the buffer lifetime. Instead, the buffer
lifetime is tied to the memory pointer instance's lifetime:
- When the instance is deleted, the finalizer will be called.
- When the reference count drops to 0, no action is taken.
Subclasses of ``MemoryPointer`` may modify these semantics, for example to
tie the buffer lifetime to the reference count, so that the buffer is freed
when there are no more references.
:param context: The context in which the pointer was allocated.
:type context: Context
:param pointer: The address of the buffer.
:type pointer: ctypes.c_void_p
:param size: The size of the allocation in bytes.
:type size: int
:param owner: The owner is sometimes set by the internals of this class, or
used for Numba's internal memory management. It should not be
provided by an external user of the ``MemoryPointer`` class
(e.g. from within an EMM Plugin); the default of `None`
should always suffice.
:type owner: NoneType
:param finalizer: A function that is called when the buffer is to be freed.
:type finalizer: function
"""
__cuda_memory__ = True
def __init__(self, context, pointer, size, owner=None, finalizer=None):
self.context = context
self.device_pointer = pointer
self.size = size
self._cuda_memsize_ = size
self.is_managed = finalizer is not None
self.refct = 1
self.handle = self.device_pointer
self._owner = owner
if finalizer is not None:
self._finalizer = weakref.finalize(self, finalizer)
@property
def owner(self):
return self if self._owner is None else self._owner
def own(self):
return OwnedPointer(weakref.proxy(self))
def free(self):
"""
Forces the device memory to the trash.
"""
if self.is_managed:
if not self._finalizer.alive:
raise RuntimeError("Freeing dead memory")
self._finalizer()
assert not self._finalizer.alive
def memset(self, byte, count=None, stream=0):
count = self.size if count is None else count
if stream:
driver.cuMemsetD8Async(self.device_pointer, byte, count,
stream.handle)
else:
driver.cuMemsetD8(self.device_pointer, byte, count)
def view(self, start, stop=None):
if stop is None:
size = self.size - start
else:
size = stop - start
# Handle NULL/empty memory buffer
if not self.device_pointer_value:
if size != 0:
raise RuntimeError("non-empty slice into empty slice")
view = self # new view is just a reference to self
# Handle normal case
else:
base = self.device_pointer_value + start
if size < 0:
raise RuntimeError('size cannot be negative')
if USE_NV_BINDING:
pointer = binding.CUdeviceptr()
ctypes_ptr = drvapi.cu_device_ptr.from_address(pointer.getPtr())
ctypes_ptr.value = base
else:
pointer = drvapi.cu_device_ptr(base)
view = MemoryPointer(self.context, pointer, size, owner=self.owner)
if isinstance(self.owner, (MemoryPointer, OwnedPointer)):
# Owned by a numba-managed memory segment, take an owned reference
return OwnedPointer(weakref.proxy(self.owner), view)
else:
# Owned by external alloc, return view with same external owner
return view
@property
def device_ctypes_pointer(self):
return self.device_pointer
@property
def device_pointer_value(self):
if USE_NV_BINDING:
return int(self.device_pointer) or None
else:
return self.device_pointer.value
|
MemoryPointer
|
python
|
jmcnamara__XlsxWriter
|
xlsxwriter/test/app/test_app03.py
|
{
"start": 333,
"end": 2627
}
|
class ____(unittest.TestCase):
"""
Test assembling a complete App file.
"""
def test_assemble_xml_file(self):
"""Test writing an App file."""
self.maxDiff = None
fh = StringIO()
app = App()
app._set_filehandle(fh)
app._add_part_name("Sheet1")
app._add_part_name("Sheet1!Print_Titles")
app._add_heading_pair(("Worksheets", 1))
app._add_heading_pair(("Named Ranges", 1))
app._assemble_xml_file()
exp = _xml_to_list(
"""
<?xml version="1.0" encoding="UTF-8" standalone="yes"?>
<Properties xmlns="http://schemas.openxmlformats.org/officeDocument/2006/extended-properties" xmlns:vt="http://schemas.openxmlformats.org/officeDocument/2006/docPropsVTypes">
<Application>Microsoft Excel</Application>
<DocSecurity>0</DocSecurity>
<ScaleCrop>false</ScaleCrop>
<HeadingPairs>
<vt:vector size="4" baseType="variant">
<vt:variant>
<vt:lpstr>Worksheets</vt:lpstr>
</vt:variant>
<vt:variant>
<vt:i4>1</vt:i4>
</vt:variant>
<vt:variant>
<vt:lpstr>Named Ranges</vt:lpstr>
</vt:variant>
<vt:variant>
<vt:i4>1</vt:i4>
</vt:variant>
</vt:vector>
</HeadingPairs>
<TitlesOfParts>
<vt:vector size="2" baseType="lpstr">
<vt:lpstr>Sheet1</vt:lpstr>
<vt:lpstr>Sheet1!Print_Titles</vt:lpstr>
</vt:vector>
</TitlesOfParts>
<Company>
</Company>
<LinksUpToDate>false</LinksUpToDate>
<SharedDoc>false</SharedDoc>
<HyperlinksChanged>false</HyperlinksChanged>
<AppVersion>12.0000</AppVersion>
</Properties>
"""
)
got = _xml_to_list(fh.getvalue())
self.assertEqual(exp, got)
|
TestAssembleApp
|
python
|
matplotlib__matplotlib
|
lib/matplotlib/_mathtext.py
|
{
"start": 35484,
"end": 35546
}
|
class ____(FontConstantsBase):
pass
|
DejaVuSerifFontConstants
|
python
|
microsoft__pyright
|
packages/pyright-internal/src/tests/samples/circularBaseClass.py
|
{
"start": 170,
"end": 260
}
|
class ____(Bar):
pass
# This should generate an error because 'ClassB' is not bound.
|
Bar
|
python
|
airbytehq__airbyte
|
airbyte-integrations/connectors/source-github/source_github/github_schema.py
|
{
"start": 179795,
"end": 180845
}
|
class ____(sgqlc.types.Input):
"""Autogenerated input type of CreateCommitOnBranch"""
__schema__ = github_schema
__field_names__ = ("branch", "file_changes", "message", "expected_head_oid", "client_mutation_id")
branch = sgqlc.types.Field(sgqlc.types.non_null(CommittableBranch), graphql_name="branch")
"""The Ref to be updated. Must be a branch."""
file_changes = sgqlc.types.Field("FileChanges", graphql_name="fileChanges")
"""A description of changes to files in this commit."""
message = sgqlc.types.Field(sgqlc.types.non_null(CommitMessage), graphql_name="message")
"""The commit message the be included with the commit."""
expected_head_oid = sgqlc.types.Field(sgqlc.types.non_null(GitObjectID), graphql_name="expectedHeadOid")
"""The git commit oid expected at the head of the branch prior to the
commit
"""
client_mutation_id = sgqlc.types.Field(String, graphql_name="clientMutationId")
"""A unique identifier for the client performing the mutation."""
|
CreateCommitOnBranchInput
|
python
|
dask__dask
|
dask/tests/test_multiprocessing.py
|
{
"start": 2556,
"end": 5098
}
|
class ____:
def __getstate__(self):
return ()
def __setstate__(self, state):
raise ValueError("Can't unpickle me")
def test_unpicklable_args_generate_errors():
a = NotUnpickleable()
dsk = {"x": (bool, a)}
with pytest.raises(ValueError):
get(dsk, "x")
dsk = {"x": (bool, "a"), "a": a}
with pytest.raises(ValueError):
get(dsk, "x")
@pytest.mark.parametrize("pool_typ", [multiprocessing.Pool, ProcessPoolExecutor])
def test_reuse_pool(pool_typ):
with pool_typ(CPU_COUNT) as pool:
with dask.config.set(pool=pool):
assert get({"x": (inc, 1)}, "x") == 2
assert get({"x": (inc, 1)}, "x") == 2
def test_dumps_loads():
with dask.config.set(func_dumps=pickle.dumps, func_loads=pickle.loads):
assert get({"x": 1, "y": (add, "x", 2)}, "y") == 3
def test_fuse_doesnt_clobber_intermediates():
d = {"x": 1, "y": (inc, "x"), "z": (add, 10, "y")}
assert get(d, ["y", "z"]) == (2, 12)
def test_optimize_graph_false():
from dask.callbacks import Callback
d = {"x": 1, "y": (inc, "x"), "z": (add, 10, "y")}
keys = []
with Callback(pretask=lambda key, *args: keys.append(key)):
get(d, "z", optimize_graph=False)
assert len(keys) == 2
def test_works_with_highlevel_graph():
"""Previously `dask.multiprocessing.get` would accidentally forward
`HighLevelGraph` graphs through the dask optimization/scheduling routines,
resulting in odd errors. One way to trigger this was to have a
non-indexable object in a task. This is just a smoketest to ensure that
things work properly even if `HighLevelGraph` objects get passed to
`dask.multiprocessing.get`. See https://github.com/dask/dask/issues/7190.
"""
class NoIndex:
def __init__(self, x):
self.x = x
def __getitem__(self, key):
raise Exception("Oh no!")
x = delayed(lambda x: x)(NoIndex(1))
(res,) = get(x.dask, x.__dask_keys__())
assert isinstance(res, NoIndex)
assert res.x == 1
@pytest.mark.parametrize("random", ["numpy", "random"])
def test_random_seeds(random):
if random == "numpy":
np = pytest.importorskip("numpy")
random = np.random
else:
import random
@delayed(pure=False)
def f():
return tuple(random.randint(0, 10000) for i in range(5))
N = 10
with dask.config.set(scheduler="processes"):
(results,) = compute([f() for _ in range(N)])
assert len(set(results)) == N
|
NotUnpickleable
|
python
|
gevent__gevent
|
src/greentest/3.10/test_socket.py
|
{
"start": 73777,
"end": 77572
}
|
class ____(unittest.TestCase):
def testCrucialConstants(self):
socket.AF_CAN
socket.PF_CAN
socket.CAN_RAW
@unittest.skipUnless(hasattr(socket, "CAN_BCM"),
'socket.CAN_BCM required for this test.')
def testBCMConstants(self):
socket.CAN_BCM
# opcodes
socket.CAN_BCM_TX_SETUP # create (cyclic) transmission task
socket.CAN_BCM_TX_DELETE # remove (cyclic) transmission task
socket.CAN_BCM_TX_READ # read properties of (cyclic) transmission task
socket.CAN_BCM_TX_SEND # send one CAN frame
socket.CAN_BCM_RX_SETUP # create RX content filter subscription
socket.CAN_BCM_RX_DELETE # remove RX content filter subscription
socket.CAN_BCM_RX_READ # read properties of RX content filter subscription
socket.CAN_BCM_TX_STATUS # reply to TX_READ request
socket.CAN_BCM_TX_EXPIRED # notification on performed transmissions (count=0)
socket.CAN_BCM_RX_STATUS # reply to RX_READ request
socket.CAN_BCM_RX_TIMEOUT # cyclic message is absent
socket.CAN_BCM_RX_CHANGED # updated CAN frame (detected content change)
# flags
socket.CAN_BCM_SETTIMER
socket.CAN_BCM_STARTTIMER
socket.CAN_BCM_TX_COUNTEVT
socket.CAN_BCM_TX_ANNOUNCE
socket.CAN_BCM_TX_CP_CAN_ID
socket.CAN_BCM_RX_FILTER_ID
socket.CAN_BCM_RX_CHECK_DLC
socket.CAN_BCM_RX_NO_AUTOTIMER
socket.CAN_BCM_RX_ANNOUNCE_RESUME
socket.CAN_BCM_TX_RESET_MULTI_IDX
socket.CAN_BCM_RX_RTR_FRAME
def testCreateSocket(self):
with socket.socket(socket.PF_CAN, socket.SOCK_RAW, socket.CAN_RAW) as s:
pass
@unittest.skipUnless(hasattr(socket, "CAN_BCM"),
'socket.CAN_BCM required for this test.')
def testCreateBCMSocket(self):
with socket.socket(socket.PF_CAN, socket.SOCK_DGRAM, socket.CAN_BCM) as s:
pass
def testBindAny(self):
with socket.socket(socket.PF_CAN, socket.SOCK_RAW, socket.CAN_RAW) as s:
address = ('', )
s.bind(address)
self.assertEqual(s.getsockname(), address)
def testTooLongInterfaceName(self):
# most systems limit IFNAMSIZ to 16, take 1024 to be sure
with socket.socket(socket.PF_CAN, socket.SOCK_RAW, socket.CAN_RAW) as s:
self.assertRaisesRegex(OSError, 'interface name too long',
s.bind, ('x' * 1024,))
@unittest.skipUnless(hasattr(socket, "CAN_RAW_LOOPBACK"),
'socket.CAN_RAW_LOOPBACK required for this test.')
def testLoopback(self):
with socket.socket(socket.PF_CAN, socket.SOCK_RAW, socket.CAN_RAW) as s:
for loopback in (0, 1):
s.setsockopt(socket.SOL_CAN_RAW, socket.CAN_RAW_LOOPBACK,
loopback)
self.assertEqual(loopback,
s.getsockopt(socket.SOL_CAN_RAW, socket.CAN_RAW_LOOPBACK))
@unittest.skipUnless(hasattr(socket, "CAN_RAW_FILTER"),
'socket.CAN_RAW_FILTER required for this test.')
def testFilter(self):
can_id, can_mask = 0x200, 0x700
can_filter = struct.pack("=II", can_id, can_mask)
with socket.socket(socket.PF_CAN, socket.SOCK_RAW, socket.CAN_RAW) as s:
s.setsockopt(socket.SOL_CAN_RAW, socket.CAN_RAW_FILTER, can_filter)
self.assertEqual(can_filter,
s.getsockopt(socket.SOL_CAN_RAW, socket.CAN_RAW_FILTER, 8))
s.setsockopt(socket.SOL_CAN_RAW, socket.CAN_RAW_FILTER, bytearray(can_filter))
@unittest.skipUnless(HAVE_SOCKET_CAN, 'SocketCan required for this test.')
|
BasicCANTest
|
python
|
conda__conda
|
conda/models/version.py
|
{
"start": 18819,
"end": 23727
}
|
class ____(BaseSpec, metaclass=SingleStrArgCachingType):
_cache_ = {}
def __init__(self, vspec):
vspec_str, matcher, is_exact = self.get_matcher(vspec)
super().__init__(vspec_str, matcher, is_exact)
def get_matcher(self, vspec):
if isinstance(vspec, str) and regex_split_re.match(vspec):
vspec = treeify(vspec)
if isinstance(vspec, tuple):
vspec_tree = vspec
_matcher = self.any_match if vspec_tree[0] == "|" else self.all_match
tup = tuple(VersionSpec(s) for s in vspec_tree[1:])
vspec_str = untreeify((vspec_tree[0],) + tuple(t.spec for t in tup))
self.tup = tup
matcher = _matcher
is_exact = False
return vspec_str, matcher, is_exact
vspec_str = str(vspec).strip()
if vspec_str[0] == "^" or vspec_str[-1] == "$":
if vspec_str[0] != "^" or vspec_str[-1] != "$":
raise InvalidVersionSpec(
vspec_str, "regex specs must start with '^' and end with '$'"
)
self.regex = re.compile(vspec_str)
matcher = self.regex_match
is_exact = False
elif vspec_str[0] in OPERATOR_START:
m = version_relation_re.match(vspec_str)
if m is None:
raise InvalidVersionSpec(vspec_str, "invalid operator")
operator_str, vo_str = m.groups()
if vo_str[-2:] == ".*":
if operator_str in ("=", ">="):
vo_str = vo_str[:-2]
elif operator_str == "!=":
vo_str = vo_str[:-2]
operator_str = "!=startswith"
elif operator_str == "~=":
raise InvalidVersionSpec(vspec_str, "invalid operator with '.*'")
else:
log.warning(
"Using .* with relational operator is superfluous and deprecated "
"and will be removed in a future version of conda. Your spec was "
f"{vo_str}, but conda is ignoring the .* and treating it as {vo_str[:-2]}"
)
vo_str = vo_str[:-2]
try:
self.operator_func = OPERATOR_MAP[operator_str]
except KeyError:
raise InvalidVersionSpec(vspec_str, f"invalid operator: {operator_str}")
self.matcher_vo = VersionOrder(vo_str)
matcher = self.operator_match
is_exact = operator_str == "=="
elif vspec_str == "*":
matcher = self.always_true_match
is_exact = False
elif "*" in vspec_str.rstrip("*"):
rx = vspec_str.replace(".", r"\.").replace("+", r"\+").replace("*", r".*")
rx = rf"^(?:{rx})$"
self.regex = re.compile(rx)
matcher = self.regex_match
is_exact = False
elif vspec_str[-1] == "*":
if vspec_str[-2:] != ".*":
vspec_str = vspec_str[:-1] + ".*"
# if vspec_str[-1] in OPERATOR_START:
# m = version_relation_re.match(vspec_str)
# if m is None:
# raise InvalidVersionSpecError(vspec_str)
# operator_str, vo_str = m.groups()
#
#
# else:
# pass
vo_str = vspec_str.rstrip("*").rstrip(".")
self.operator_func = VersionOrder.startswith
self.matcher_vo = VersionOrder(vo_str)
matcher = self.operator_match
is_exact = False
elif "@" not in vspec_str:
self.operator_func = OPERATOR_MAP["=="]
self.matcher_vo = VersionOrder(vspec_str)
matcher = self.operator_match
is_exact = True
else:
matcher = self.exact_match
is_exact = True
return vspec_str, matcher, is_exact
def merge(self, other):
if not isinstance(other, self.__class__):
raise TypeError(
f"Can only combine objects of the same type. Received {type(other)}."
)
if self.raw_value == other.raw_value:
return self
return self.__class__(",".join(sorted((self.raw_value, other.raw_value))))
def union(self, other):
if not isinstance(other, self.__class__):
raise TypeError(
f"Can only combine objects of the same type. Received {type(other)}."
)
options = {self.raw_value, other.raw_value}
# important: we only return a string here because the parens get gobbled otherwise
# this info is for visual display only, not for feeding into actual matches
return "|".join(sorted(options))
# TODO: someday switch out these class names for consistency
VersionMatch = VersionSpec
|
VersionSpec
|
python
|
run-llama__llama_index
|
llama-index-integrations/vector_stores/llama-index-vector-stores-weaviate/llama_index/vector_stores/weaviate/base.py
|
{
"start": 3394,
"end": 19123
}
|
class ____(BasePydanticVectorStore):
"""
Weaviate vector store.
In this vector store, embeddings and docs are stored within a
Weaviate collection.
During query time, the index uses Weaviate to query for the top
k most similar nodes.
Args:
weaviate_client (Optional[Any]): Either a WeaviateClient (synchronous) or WeaviateAsyncClient (asynchronous)
instance from `weaviate-client` package
index_name (Optional[str]): name for Weaviate classes
Examples:
`pip install llama-index-vector-stores-weaviate`
```python
import weaviate
resource_owner_config = weaviate.AuthClientPassword(
username="<username>",
password="<password>",
)
client = weaviate.Client(
"https://llama-test-ezjahb4m.weaviate.network",
auth_client_secret=resource_owner_config,
)
vector_store = WeaviateVectorStore(
weaviate_client=client, index_name="LlamaIndex"
)
```
"""
stores_text: bool = True
index_name: str
url: Optional[str]
text_key: str
auth_config: Dict[str, Any] = Field(default_factory=dict)
client_kwargs: Dict[str, Any] = Field(default_factory=dict)
_client: weaviate.WeaviateClient = PrivateAttr()
_aclient: weaviate.WeaviateAsyncClient = PrivateAttr()
_collection_initialized: bool = PrivateAttr()
_is_self_created_weaviate_client: bool = PrivateAttr() # States if the Weaviate client was created within this class and therefore closing it lies in our responsibility
_custom_batch: Optional[BatchWrapper] = PrivateAttr()
def __init__(
self,
weaviate_client: Optional[Any] = None,
class_prefix: Optional[str] = None,
index_name: Optional[str] = None,
text_key: str = DEFAULT_TEXT_KEY,
auth_config: Optional[Any] = None,
client_kwargs: Optional[Dict[str, Any]] = None,
url: Optional[str] = None,
**kwargs: Any,
) -> None:
"""Initialize params."""
# validate class prefix starts with a capital letter
if class_prefix is not None:
_logger.warning("class_prefix is deprecated, please use index_name")
# legacy, kept for backward compatibility
index_name = f"{class_prefix}_Node"
index_name = index_name or f"LlamaIndex_{uuid4().hex}"
if not index_name[0].isupper():
raise ValueError(
"Index name must start with a capital letter, e.g. 'LlamaIndex'"
)
super().__init__(
url=url,
index_name=index_name,
text_key=text_key,
auth_config=auth_config.__dict__ if auth_config else {},
client_kwargs=client_kwargs or {},
)
if isinstance(weaviate_client, weaviate.WeaviateClient):
self._client = weaviate_client
self._aclient = None
self._is_self_created_weaviate_client = False
elif isinstance(weaviate_client, weaviate.WeaviateAsyncClient):
self._client = None
self._aclient = weaviate_client
self._is_self_created_weaviate_client = False
elif weaviate_client is None:
if isinstance(auth_config, dict):
auth_config = weaviate.auth.AuthApiKey(auth_config)
client_kwargs = client_kwargs or {}
self._client = weaviate.WeaviateClient(
auth_client_secret=auth_config, **client_kwargs
)
self._client.connect()
self._is_self_created_weaviate_client = True
else: # weaviate_client neither one of the expected types nor None
raise ValueError(
f"Unsupported weaviate_client of type {type(weaviate_client)}. Either provide an instance of `WeaviateClient` or `WeaviateAsyncClient` or set `weaviate_client` to None to have a sync client automatically created using the setting provided in `auth_config` and `client_kwargs`."
)
# validate custom batch
self._custom_batch = (
client_kwargs.get("custom_batch") if client_kwargs else None
)
if self._custom_batch and not isinstance(self._custom_batch, BatchWrapper):
raise ValueError(
"client_kwargs['custom_batch'] must be an instance of client.batch.dynamic() or client.batch.fixed_size()"
)
# create default schema if does not exist
if self._client is not None:
if not class_schema_exists(self._client, index_name):
create_default_schema(self._client, index_name)
self._collection_initialized = True
else:
# need to do lazy init for async clients
self._collection_initialized = False
def __del__(self) -> None:
if self._is_self_created_weaviate_client:
self.client.close()
@classmethod
def class_name(cls) -> str:
return "WeaviateVectorStore"
@property
def client(self) -> weaviate.WeaviateClient:
"""Get the synchronous Weaviate client, if available."""
if self._client is None:
raise SyncClientNotProvidedError
return self._client
@property
def async_client(self) -> weaviate.WeaviateAsyncClient:
"""Get the asynchronous Weaviate client, if available."""
if self._aclient is None:
raise AsyncClientNotProvidedError
return self._aclient
def add(
self,
nodes: List[BaseNode],
**add_kwargs: Any,
) -> List[str]:
"""
Add nodes to index.
Args:
nodes: List[BaseNode]: list of nodes with embeddings
"""
ids = [r.node_id for r in nodes]
provided_batch = self._custom_batch
if not provided_batch:
provided_batch = self.client.batch.dynamic()
with provided_batch as batch:
for node in nodes:
data_object = get_data_object(node=node, text_key=self.text_key)
batch.add_object(
collection=self.index_name,
properties=data_object.properties,
uuid=data_object.uuid,
vector=data_object.vector,
)
return ids
async def async_add(
self,
nodes: List[BaseNode],
**add_kwargs: Any,
) -> List[str]:
"""
Add nodes to index.
Args:
nodes: List[BaseNode]: list of nodes with embeddings
Raises:
AsyncClientNotProvidedError: If trying to use async methods without aclient
"""
if len(nodes) > 0 and not self._collection_initialized:
if not await aclass_schema_exists(self.async_client, self.index_name):
await acreate_default_schema(self.async_client, self.index_name)
ids = [r.node_id for r in nodes]
collection = self.async_client.collections.get(self.index_name)
response = await collection.data.insert_many(
[get_data_object(node=node, text_key=self.text_key) for node in nodes]
)
return ids
def delete(self, ref_doc_id: str, **delete_kwargs: Any) -> None:
"""
Delete nodes using with ref_doc_id.
Args:
ref_doc_id (str): The doc_id of the document to delete.
"""
collection = self.client.collections.get(self.index_name)
where_filter = wvc.query.Filter.by_property("ref_doc_id").equal(ref_doc_id)
if "filter" in delete_kwargs and delete_kwargs["filter"] is not None:
where_filter = where_filter & _to_weaviate_filter(delete_kwargs["filter"])
collection.data.delete_many(where=where_filter)
async def adelete(self, ref_doc_id: str, **delete_kwargs: Any) -> None:
"""
Delete nodes using with ref_doc_id.
Args:
ref_doc_id (str): The doc_id of the document to delete.
Raises:
AsyncClientNotProvidedError: If trying to use async methods without aclient
"""
collection = self.async_client.collections.get(self.index_name)
where_filter = wvc.query.Filter.by_property("ref_doc_id").equal(ref_doc_id)
if "filter" in delete_kwargs and delete_kwargs["filter"] is not None:
where_filter = where_filter & _to_weaviate_filter(delete_kwargs["filter"])
result = await collection.data.delete_many(where=where_filter)
def delete_index(self) -> None:
"""
Delete the index associated with the client.
Raises:
- Exception: If the deletion fails, for some reason.
"""
if not class_schema_exists(self.client, self.index_name):
_logger.warning(
f"Index '{self.index_name}' does not exist. No action taken."
)
return
try:
self.client.collections.delete(self.index_name)
_logger.info(f"Successfully deleted index '{self.index_name}'.")
except Exception as e:
_logger.error(f"Failed to delete index '{self.index_name}': {e}")
raise Exception(f"Failed to delete index '{self.index_name}': {e}")
def delete_nodes(
self,
node_ids: Optional[List[str]] = None,
filters: Optional[MetadataFilters] = None,
**delete_kwargs: Any,
) -> None:
"""
Deletes nodes.
Args:
node_ids (Optional[List[str]], optional): IDs of nodes to delete. Defaults to None.
filters (Optional[MetadataFilters], optional): Metadata filters. Defaults to None.
"""
if not node_ids and not filters:
return
collection = self.client.collections.get(self.index_name)
if node_ids:
filter = wvc.query.Filter.by_id().contains_any(node_ids or [])
if filters:
if node_ids:
filter = filter & _to_weaviate_filter(filters)
else:
filter = _to_weaviate_filter(filters)
collection.data.delete_many(where=filter, **delete_kwargs)
async def adelete_nodes(
self,
node_ids: Optional[List[str]] = None,
filters: Optional[MetadataFilters] = None,
**delete_kwargs: Any,
) -> None:
"""
Deletes nodes.
Args:
node_ids (Optional[List[str]], optional): IDs of nodes to delete. Defaults to None.
filters (Optional[MetadataFilters], optional): Metadata filters. Defaults to None.
Raises:
AsyncClientNotProvidedError: If trying to use async methods without aclient
"""
if not node_ids and not filters:
return
collection = self.async_client.collections.get(self.index_name)
if node_ids:
filter = wvc.query.Filter.by_id().contains_any(node_ids or [])
if filters:
if node_ids:
filter = filter & _to_weaviate_filter(filters)
else:
filter = _to_weaviate_filter(filters)
await collection.data.delete_many(where=filter, **delete_kwargs)
def clear(self) -> None:
"""Clears index."""
self.delete_index()
async def aclear(self) -> None:
"""
Delete the index associated with the client.
Raises:
- Exception: If the deletion fails, for some reason.
- AsyncClientNotProvidedError: If trying to use async methods without aclient
"""
if not await aclass_schema_exists(self.async_client, self.index_name):
_logger.warning(
f"Index '{self.index_name}' does not exist. No action taken."
)
return
try:
await self.async_client.collections.delete(self.index_name)
_logger.info(f"Successfully deleted index '{self.index_name}'.")
except Exception as e:
_logger.error(f"Failed to delete index '{self.index_name}': {e}")
raise Exception(f"Failed to delete index '{self.index_name}': {e}")
def get_query_parameters(self, query: VectorStoreQuery, **kwargs: Any):
filters = None
# list of documents to constrain search
if query.doc_ids:
filters = wvc.query.Filter.by_property("doc_id").contains_any(query.doc_ids)
if query.node_ids:
filters = wvc.query.Filter.by_property("id").contains_any(query.node_ids)
return_metatada = wvc.query.MetadataQuery(distance=True, score=True)
vector = query.query_embedding
alpha = 1
if query.mode == VectorStoreQueryMode.HYBRID:
_logger.debug(f"Using hybrid search with alpha {query.alpha}")
if vector is not None and query.query_str:
alpha = query.alpha or 0.5
if query.filters is not None:
filters = _to_weaviate_filter(query.filters)
elif "filter" in kwargs and kwargs["filter"] is not None:
filters = kwargs["filter"]
limit = query.similarity_top_k
_logger.debug(f"Using limit of {query.similarity_top_k}")
query_parameters = {
"query": query.query_str,
"vector": vector,
"alpha": alpha,
"limit": limit,
"filters": filters,
"return_metadata": return_metatada,
"include_vector": True,
}
query_parameters.update(kwargs)
return query_parameters
def parse_query_result(
self, query_result: Any, query: VectorStoreQuery
) -> VectorStoreQueryResult:
entries = query_result.objects
similarity_key = "score"
similarities = []
nodes: List[BaseNode] = []
node_ids = []
for i, entry in enumerate(entries):
if i < query.similarity_top_k:
entry_as_dict = entry.__dict__
similarities.append(get_node_similarity(entry_as_dict, similarity_key))
nodes.append(to_node(entry_as_dict, text_key=self.text_key))
node_ids.append(nodes[-1].node_id)
else:
break
return VectorStoreQueryResult(
nodes=nodes, ids=node_ids, similarities=similarities
)
def query(self, query: VectorStoreQuery, **kwargs: Any) -> VectorStoreQueryResult:
"""Query index for top k most similar nodes."""
collection = self.client.collections.get(self.index_name)
query_parameters = self.get_query_parameters(query, **kwargs)
# execute query
try:
query_result = collection.query.hybrid(**query_parameters)
except weaviate.exceptions.WeaviateQueryError as e:
raise ValueError(f"Invalid query, got errors: {e.message}")
# parse results
return self.parse_query_result(query_result, query)
async def aquery(
self, query: VectorStoreQuery, **kwargs: Any
) -> VectorStoreQueryResult:
"""
Query index for top k most similar nodes.
Raises:
AsyncClientNotProvidedError: If trying to use async methods without aclient
"""
collection = self.async_client.collections.get(self.index_name)
query_parameters = self.get_query_parameters(query, **kwargs)
# execute query
try:
query_result = await collection.query.hybrid(**query_parameters)
except weaviate.exceptions.WeaviateQueryError as e:
raise ValueError(f"Invalid query, got errors: {e.message}")
# parse results
return self.parse_query_result(query_result, query)
|
WeaviateVectorStore
|
python
|
PyCQA__pylint
|
tests/functional/n/no/no_member.py
|
{
"start": 1025,
"end": 1054
}
|
class ____:
label: str
|
Base
|
python
|
pyparsing__pyparsing
|
pyparsing/core.py
|
{
"start": 222278,
"end": 233669
}
|
class ____(ParseElementEnhance):
"""
Forward declaration of an expression to be defined later -
used for recursive grammars, such as algebraic infix notation.
When the expression is known, it is assigned to the ``Forward``
instance using the ``'<<'`` operator.
.. Note::
Take care when assigning to ``Forward`` not to overlook
precedence of operators.
Specifically, ``'|'`` has a lower precedence than ``'<<'``, so that::
fwd_expr << a | b | c
will actually be evaluated as::
(fwd_expr << a) | b | c
thereby leaving b and c out as parseable alternatives.
It is recommended that you explicitly group the values
inserted into the :class:`Forward`::
fwd_expr << (a | b | c)
Converting to use the ``'<<='`` operator instead will avoid this problem.
See :meth:`ParseResults.pprint` for an example of a recursive
parser created using :class:`Forward`.
"""
def __init__(
self, other: typing.Optional[Union[ParserElement, str]] = None
) -> None:
self.caller_frame = traceback.extract_stack(limit=2)[0]
super().__init__(other, savelist=False) # type: ignore[arg-type]
self.lshift_line = None
def __lshift__(self, other) -> Forward:
if hasattr(self, "caller_frame"):
del self.caller_frame
if isinstance(other, str_type):
other = self._literalStringClass(other)
if not isinstance(other, ParserElement):
return NotImplemented
self.expr = other
self.streamlined = other.streamlined
self.mayIndexError = self.expr.mayIndexError
self._may_return_empty = self.expr.mayReturnEmpty
self.set_whitespace_chars(
self.expr.whiteChars, copy_defaults=self.expr.copyDefaultWhiteChars
)
self.skipWhitespace = self.expr.skipWhitespace
self.saveAsList = self.expr.saveAsList
self.ignoreExprs.extend(self.expr.ignoreExprs)
self.lshift_line = traceback.extract_stack(limit=2)[-2] # type: ignore[assignment]
return self
def __ilshift__(self, other) -> Forward:
if not isinstance(other, ParserElement):
return NotImplemented
return self << other
def __or__(self, other) -> ParserElement:
caller_line = traceback.extract_stack(limit=2)[-2]
if (
__diag__.warn_on_match_first_with_lshift_operator
and caller_line == self.lshift_line
and Diagnostics.warn_on_match_first_with_lshift_operator
not in self.suppress_warnings_
):
warnings.warn(
"warn_on_match_first_with_lshift_operator:"
" using '<<' operator with '|' is probably an error, use '<<='",
stacklevel=2,
)
ret = super().__or__(other)
return ret
def __del__(self):
# see if we are getting dropped because of '=' reassignment of var instead of '<<=' or '<<'
if (
self.expr is None
and __diag__.warn_on_assignment_to_Forward
and Diagnostics.warn_on_assignment_to_Forward not in self.suppress_warnings_
):
warnings.warn_explicit(
"warn_on_assignment_to_Forward:"
" Forward defined here but no expression attached later using '<<=' or '<<'",
UserWarning,
filename=self.caller_frame.filename,
lineno=self.caller_frame.lineno,
)
def parseImpl(self, instring, loc, do_actions=True) -> ParseImplReturnType:
if (
self.expr is None
and __diag__.warn_on_parse_using_empty_Forward
and Diagnostics.warn_on_parse_using_empty_Forward
not in self.suppress_warnings_
):
# walk stack until parse_string, scan_string, search_string, or transform_string is found
parse_fns = (
"parse_string",
"scan_string",
"search_string",
"transform_string",
)
tb = traceback.extract_stack(limit=200)
for i, frm in enumerate(reversed(tb), start=1):
if frm.name in parse_fns:
stacklevel = i + 1
break
else:
stacklevel = 2
warnings.warn(
"warn_on_parse_using_empty_Forward:"
" Forward expression was never assigned a value, will not parse any input",
stacklevel=stacklevel,
)
if not ParserElement._left_recursion_enabled:
return super().parseImpl(instring, loc, do_actions)
# ## Bounded Recursion algorithm ##
# Recursion only needs to be processed at ``Forward`` elements, since they are
# the only ones that can actually refer to themselves. The general idea is
# to handle recursion stepwise: We start at no recursion, then recurse once,
# recurse twice, ..., until more recursion offers no benefit (we hit the bound).
#
# The "trick" here is that each ``Forward`` gets evaluated in two contexts
# - to *match* a specific recursion level, and
# - to *search* the bounded recursion level
# and the two run concurrently. The *search* must *match* each recursion level
# to find the best possible match. This is handled by a memo table, which
# provides the previous match to the next level match attempt.
#
# See also "Left Recursion in Parsing Expression Grammars", Medeiros et al.
#
# There is a complication since we not only *parse* but also *transform* via
# actions: We do not want to run the actions too often while expanding. Thus,
# we expand using `do_actions=False` and only run `do_actions=True` if the next
# recursion level is acceptable.
with ParserElement.recursion_lock:
memo = ParserElement.recursion_memos
try:
# we are parsing at a specific recursion expansion - use it as-is
prev_loc, prev_result = memo[loc, self, do_actions]
if isinstance(prev_result, Exception):
raise prev_result
return prev_loc, prev_result.copy()
except KeyError:
act_key = (loc, self, True)
peek_key = (loc, self, False)
# we are searching for the best recursion expansion - keep on improving
# both `do_actions` cases must be tracked separately here!
prev_loc, prev_peek = memo[peek_key] = (
loc - 1,
ParseException(
instring, loc, "Forward recursion without base case", self
),
)
if do_actions:
memo[act_key] = memo[peek_key]
while True:
try:
new_loc, new_peek = super().parseImpl(instring, loc, False)
except ParseException:
# we failed before getting any match - do not hide the error
if isinstance(prev_peek, Exception):
raise
new_loc, new_peek = prev_loc, prev_peek
# the match did not get better: we are done
if new_loc <= prev_loc:
if do_actions:
# replace the match for do_actions=False as well,
# in case the action did backtrack
prev_loc, prev_result = memo[peek_key] = memo[act_key]
del memo[peek_key], memo[act_key]
return prev_loc, copy.copy(prev_result)
del memo[peek_key]
return prev_loc, copy.copy(prev_peek)
# the match did get better: see if we can improve further
if do_actions:
try:
memo[act_key] = super().parseImpl(instring, loc, True)
except ParseException as e:
memo[peek_key] = memo[act_key] = (new_loc, e)
raise
prev_loc, prev_peek = memo[peek_key] = new_loc, new_peek
def leave_whitespace(self, recursive: bool = True) -> ParserElement:
"""
Extends ``leave_whitespace`` defined in base class.
"""
self.skipWhitespace = False
return self
def ignore_whitespace(self, recursive: bool = True) -> ParserElement:
"""
Extends ``ignore_whitespace`` defined in base class.
"""
self.skipWhitespace = True
return self
def streamline(self) -> ParserElement:
if not self.streamlined:
self.streamlined = True
if self.expr is not None:
self.expr.streamline()
return self
def validate(self, validateTrace=None) -> None:
warnings.warn(
"ParserElement.validate() is deprecated, and should not be used to check for left recursion",
DeprecationWarning,
stacklevel=2,
)
if validateTrace is None:
validateTrace = []
if self not in validateTrace:
tmp = validateTrace[:] + [self]
if self.expr is not None:
self.expr.validate(tmp)
self._checkRecursion([])
def _generateDefaultName(self) -> str:
# Avoid infinite recursion by setting a temporary _defaultName
save_default_name = self._defaultName
self._defaultName = ": ..."
# Use the string representation of main expression.
try:
if self.expr is not None:
ret_string = str(self.expr)[:1000]
else:
ret_string = "None"
except Exception:
ret_string = "..."
self._defaultName = save_default_name
return f"{type(self).__name__}: {ret_string}"
def copy(self) -> ParserElement:
"""
Returns a copy of this expression.
Generally only used internally by pyparsing.
"""
if self.expr is not None:
return super().copy()
else:
ret = Forward()
ret <<= self
return ret
def _setResultsName(self, name, list_all_matches=False) -> ParserElement:
# fmt: off
if (
__diag__.warn_name_set_on_empty_Forward
and Diagnostics.warn_name_set_on_empty_Forward not in self.suppress_warnings_
and self.expr is None
):
warning = (
"warn_name_set_on_empty_Forward:"
f" setting results name {name!r} on {type(self).__name__} expression"
" that has no contained expression"
)
warnings.warn(warning, stacklevel=3)
# fmt: on
return super()._setResultsName(name, list_all_matches)
# Compatibility synonyms
# fmt: off
leaveWhitespace = replaced_by_pep8("leaveWhitespace", leave_whitespace)
ignoreWhitespace = replaced_by_pep8("ignoreWhitespace", ignore_whitespace)
# fmt: on
|
Forward
|
python
|
django__django
|
tests/backends/oracle/test_operations.py
|
{
"start": 256,
"end": 5715
}
|
class ____(TransactionTestCase):
available_apps = ["backends"]
def test_sequence_name_truncation(self):
seq_name = connection.ops._get_no_autofield_sequence_name(
"schema_authorwithevenlongee869"
)
self.assertEqual(seq_name, "SCHEMA_AUTHORWITHEVENLOB0B8_SQ")
def test_bulk_batch_size(self):
# Oracle restricts the number of parameters in a query.
objects = range(2**16)
self.assertEqual(connection.ops.bulk_batch_size([], objects), len(objects))
# Each field is a parameter for each object.
first_name_field = Person._meta.get_field("first_name")
last_name_field = Person._meta.get_field("last_name")
self.assertEqual(
connection.ops.bulk_batch_size([first_name_field], objects),
connection.features.max_query_params,
)
self.assertEqual(
connection.ops.bulk_batch_size(
[first_name_field, last_name_field],
objects,
),
connection.features.max_query_params // 2,
)
composite_pk = models.CompositePrimaryKey("first_name", "last_name")
composite_pk.fields = [first_name_field, last_name_field]
self.assertEqual(
connection.ops.bulk_batch_size([composite_pk, first_name_field], objects),
connection.features.max_query_params // 3,
)
def test_sql_flush(self):
statements = connection.ops.sql_flush(
no_style(),
[Person._meta.db_table, Tag._meta.db_table],
)
# The tables and constraints are processed in an unordered set.
self.assertEqual(
statements[0],
'ALTER TABLE "BACKENDS_TAG" DISABLE CONSTRAINT '
'"BACKENDS__CONTENT_T_FD9D7A85_F" KEEP INDEX;',
)
self.assertEqual(
sorted(statements[1:-1]),
[
'TRUNCATE TABLE "BACKENDS_PERSON";',
'TRUNCATE TABLE "BACKENDS_TAG";',
],
)
self.assertEqual(
statements[-1],
'ALTER TABLE "BACKENDS_TAG" ENABLE CONSTRAINT '
'"BACKENDS__CONTENT_T_FD9D7A85_F";',
)
def test_sql_flush_allow_cascade(self):
statements = connection.ops.sql_flush(
no_style(),
[Person._meta.db_table, Tag._meta.db_table],
allow_cascade=True,
)
# The tables and constraints are processed in an unordered set.
self.assertEqual(
statements[0],
'ALTER TABLE "BACKENDS_VERYLONGMODELNAME540F" DISABLE CONSTRAINT '
'"BACKENDS__PERSON_ID_1DD5E829_F" KEEP INDEX;',
)
self.assertEqual(
sorted(statements[1:-1]),
[
'TRUNCATE TABLE "BACKENDS_PERSON";',
'TRUNCATE TABLE "BACKENDS_TAG";',
'TRUNCATE TABLE "BACKENDS_VERYLONGMODELNAME540F";',
],
)
self.assertEqual(
statements[-1],
'ALTER TABLE "BACKENDS_VERYLONGMODELNAME540F" ENABLE CONSTRAINT '
'"BACKENDS__PERSON_ID_1DD5E829_F";',
)
def test_sql_flush_sequences(self):
statements = connection.ops.sql_flush(
no_style(),
[Person._meta.db_table, Tag._meta.db_table],
reset_sequences=True,
)
# The tables and constraints are processed in an unordered set.
self.assertEqual(
statements[0],
'ALTER TABLE "BACKENDS_TAG" DISABLE CONSTRAINT '
'"BACKENDS__CONTENT_T_FD9D7A85_F" KEEP INDEX;',
)
self.assertEqual(
sorted(statements[1:3]),
[
'TRUNCATE TABLE "BACKENDS_PERSON";',
'TRUNCATE TABLE "BACKENDS_TAG";',
],
)
self.assertEqual(
statements[3],
'ALTER TABLE "BACKENDS_TAG" ENABLE CONSTRAINT '
'"BACKENDS__CONTENT_T_FD9D7A85_F";',
)
# Sequences.
self.assertEqual(len(statements[4:]), 2)
self.assertIn("BACKENDS_PERSON_SQ", statements[4])
self.assertIn("BACKENDS_TAG_SQ", statements[5])
def test_sql_flush_sequences_allow_cascade(self):
statements = connection.ops.sql_flush(
no_style(),
[Person._meta.db_table, Tag._meta.db_table],
reset_sequences=True,
allow_cascade=True,
)
# The tables and constraints are processed in an unordered set.
self.assertEqual(
statements[0],
'ALTER TABLE "BACKENDS_VERYLONGMODELNAME540F" DISABLE CONSTRAINT '
'"BACKENDS__PERSON_ID_1DD5E829_F" KEEP INDEX;',
)
self.assertEqual(
sorted(statements[1:4]),
[
'TRUNCATE TABLE "BACKENDS_PERSON";',
'TRUNCATE TABLE "BACKENDS_TAG";',
'TRUNCATE TABLE "BACKENDS_VERYLONGMODELNAME540F";',
],
)
self.assertEqual(
statements[4],
'ALTER TABLE "BACKENDS_VERYLONGMODELNAME540F" ENABLE CONSTRAINT '
'"BACKENDS__PERSON_ID_1DD5E829_F";',
)
# Sequences.
self.assertEqual(len(statements[5:]), 3)
self.assertIn("BACKENDS_PERSON_SQ", statements[5])
self.assertIn("BACKENDS_VERYLONGMODELN7BE2_SQ", statements[6])
self.assertIn("BACKENDS_TAG_SQ", statements[7])
|
OperationsTests
|
python
|
airbytehq__airbyte
|
airbyte-integrations/bases/connector-acceptance-test/connector_acceptance_test/config.py
|
{
"start": 1738,
"end": 2219
}
|
class ____(BaseConfig):
oauth = Field(True, description="Allow source to have another default method that OAuth.")
bypass_reason: Optional[str] = Field(description="Reason why OAuth is not default method.")
@validator("oauth", always=True)
def validate_oauth(cls, oauth, values):
if oauth is False and not values.get("bypass_reason"):
raise ValueError("Please provide a bypass reason for Auth default method")
return oauth
|
OAuthTestConfig
|
python
|
apache__airflow
|
providers/google/src/airflow/providers/google/cloud/operators/vertex_ai/generative_model.py
|
{
"start": 16476,
"end": 21226
}
|
class ____(GoogleCloudBaseOperator):
"""
Use the Rapid Evaluation API to evaluate a model.
:param project_id: Required. The ID of the Google Cloud project that the service belongs to.
:param location: Required. The ID of the Google Cloud location that the service belongs to.
:param pretrained_model: Required. A pre-trained model optimized for performing natural
language tasks such as classification, summarization, extraction, content
creation, and ideation.
:param eval_dataset: Required. A fixed dataset for evaluating a model against. Adheres to Rapid Evaluation API.
:param metrics: Required. A list of evaluation metrics to be used in the experiment. Adheres to Rapid Evaluation API.
:param experiment_name: Required. The name of the evaluation experiment.
:param experiment_run_name: Required. The specific run name or ID for this experiment.
:param prompt_template: Required. The template used to format the model's prompts during evaluation. Adheres to Rapid Evaluation API.
:param generation_config: Optional. A dictionary containing generation parameters for the model.
:param safety_settings: Optional. A dictionary specifying harm category thresholds for blocking model outputs.
:param system_instruction: Optional. An instruction given to the model to guide its behavior.
:param tools: Optional. A list of tools available to the model during evaluation, such as a data store.
:param gcp_conn_id: The connection ID to use connecting to Google Cloud.
:param impersonation_chain: Optional service account to impersonate using short-term
credentials, or chained list of accounts required to get the access_token
of the last account in the list, which will be impersonated in the request.
If set as a string, the account must grant the originating account
the Service Account Token Creator IAM role.
If set as a sequence, the identities from the list must grant
Service Account Token Creator IAM role to the directly preceding identity, with first
account from the list granting this role to the originating account (templated).
"""
template_fields = (
"location",
"project_id",
"impersonation_chain",
"pretrained_model",
"eval_dataset",
"prompt_template",
"experiment_name",
"experiment_run_name",
)
def __init__(
self,
*,
project_id: str,
location: str,
pretrained_model: str,
eval_dataset: dict,
metrics: list,
experiment_name: str,
experiment_run_name: str,
prompt_template: str,
generation_config: dict | None = None,
safety_settings: dict | None = None,
system_instruction: str | None = None,
tools: list | None = None,
gcp_conn_id: str = "google_cloud_default",
impersonation_chain: str | Sequence[str] | None = None,
**kwargs,
) -> None:
super().__init__(**kwargs)
self.project_id = project_id
self.location = location
self.pretrained_model = pretrained_model
self.eval_dataset = eval_dataset
self.metrics = metrics
self.experiment_name = experiment_name
self.experiment_run_name = experiment_run_name
self.prompt_template = prompt_template
self.generation_config = generation_config
self.safety_settings = safety_settings
self.system_instruction = system_instruction
self.tools = tools
self.gcp_conn_id = gcp_conn_id
self.impersonation_chain = impersonation_chain
def execute(self, context: Context):
self.hook = GenerativeModelHook(
gcp_conn_id=self.gcp_conn_id,
impersonation_chain=self.impersonation_chain,
)
response = self.hook.run_evaluation(
project_id=self.project_id,
location=self.location,
pretrained_model=self.pretrained_model,
eval_dataset=self.eval_dataset,
metrics=self.metrics,
experiment_name=self.experiment_name,
experiment_run_name=self.experiment_run_name,
prompt_template=self.prompt_template,
generation_config=self.generation_config,
safety_settings=self.safety_settings,
system_instruction=self.system_instruction,
tools=self.tools,
)
return response.summary_metrics
@deprecated(
planned_removal_date="January 3, 2026",
use_instead="airflow.providers.google.cloud.operators.gen_ai.generative_model.GenAICreateCachedContentOperator",
category=AirflowProviderDeprecationWarning,
)
|
RunEvaluationOperator
|
python
|
facebookresearch__faiss
|
tests/test_index.py
|
{
"start": 25587,
"end": 26092
}
|
class ____(unittest.TestCase):
def test_random(self):
""" just check if several runs of search retrieve the
same results """
index = faiss.IndexRandom(32, 1000000000)
(xt, xb, xq) = get_dataset_2(32, 0, 0, 10)
Dref, Iref = index.search(xq, 10)
self.assertTrue(np.all(Dref[:, 1:] >= Dref[:, :-1]))
Dnew, Inew = index.search(xq, 10)
np.testing.assert_array_equal(Dref, Dnew)
np.testing.assert_array_equal(Iref, Inew)
|
TestRandomIndex
|
python
|
getsentry__sentry
|
src/sentry/integrations/middleware/metrics.py
|
{
"start": 1479,
"end": 1676
}
|
class ____(StrEnum):
"""
Reasons why a middleware operation may halt without success/failure.
"""
ORG_INTEGRATION_DOES_NOT_EXIST = "org_integration_does_not_exist"
|
MiddlewareHaltReason
|
python
|
kamyu104__LeetCode-Solutions
|
Python/minimum-costs-using-the-train-line.py
|
{
"start": 53,
"end": 628
}
|
class ____(object):
def minimumCosts(self, regular, express, expressCost):
"""
:type regular: List[int]
:type express: List[int]
:type expressCost: int
:rtype: List[int]
"""
result = []
dp = [0, expressCost] # dp[0]: min cost of regular route to curr stop, dp[1]: min cost of express route to curr stop
for r, e in itertools.izip(regular, express):
dp = [min(dp[0]+r, dp[1]+e), min(dp[0]+(r+expressCost), dp[1]+e)]
result.append(min(dp[0], dp[1]))
return result
|
Solution
|
python
|
jmcnamara__XlsxWriter
|
xlsxwriter/test/comparison/test_chart_axis51.py
|
{
"start": 315,
"end": 1539
}
|
class ____(ExcelComparisonTest):
"""
Test file created by XlsxWriter against a file created by Excel.
"""
def setUp(self):
self.set_filename("chart_axis51.xlsx")
def test_create_file(self):
"""Test the creation of a simple XlsxWriter file."""
workbook = Workbook(self.got_filename)
worksheet = workbook.add_worksheet()
chart = workbook.add_chart({"type": "scatter"})
chart.axis_ids = [47712896, 48124288]
data = [
[1, 2, 3, 4, 5],
[2, 4, 6, 8, 10],
[3, 6, 9, 12, 15],
]
worksheet.write_column("A1", data[0])
worksheet.write_column("B1", data[1])
worksheet.write_column("C1", data[2])
chart.add_series(
{
"categories": "=Sheet1!$A$1:$A$5",
"values": "=Sheet1!$B$1:$B$5",
}
)
chart.add_series(
{
"categories": "=Sheet1!$A$1:$A$5",
"values": "=Sheet1!$C$1:$C$5",
}
)
chart.set_x_axis({"visible": 0})
worksheet.insert_chart("E9", chart)
workbook.close()
self.assertExcelEqual()
|
TestCompareXLSXFiles
|
python
|
realpython__materials
|
arcade-platformer/arcade_platformer/03_read_level_one.py
|
{
"start": 575,
"end": 4372
}
|
class ____(arcade.Window):
def __init__(self) -> None:
super().__init__(SCREEN_WIDTH, SCREEN_HEIGHT, SCREEN_TITLE)
# These lists will hold different sets of sprites
self.coins = None
self.background = None
self.walls = None
self.ladders = None
self.goals = None
self.enemies = None
# One sprite for the player, no more is needed
self.player = None
# We need a physics engine as well
self.physics_engine = None
# Someplace to keep score
self.score = 0
# Which level are we on?
self.level = 1
# Load up our sounds here
self.coin_sound = arcade.load_sound(
str(ASSETS_PATH / "sounds" / "coin.wav")
)
self.jump_sound = arcade.load_sound(
str(ASSETS_PATH / "sounds" / "jump.wav")
)
self.victory_sound = arcade.load_sound(
str(ASSETS_PATH / "sounds" / "victory.wav")
)
def setup(self) -> None:
"""Sets up the game for the current level"""
# Get the current map based on the level
map_name = f"platform_level_{self.level:02}.tmx"
map_path = ASSETS_PATH / map_name
# What are the names of the layers?
wall_layer = "ground"
coin_layer = "coins"
goal_layer = "goal"
background_layer = "background"
ladders_layer = "ladders"
# Load the current map
game_map = arcade.tilemap.read_tmx(str(map_path))
# Load the layers
self.background = arcade.tilemap.process_layer(
game_map, layer_name=background_layer, scaling=MAP_SCALING
)
self.goals = arcade.tilemap.process_layer(
game_map, layer_name=goal_layer, scaling=MAP_SCALING
)
self.walls = arcade.tilemap.process_layer(
game_map, layer_name=wall_layer, scaling=MAP_SCALING
)
self.ladders = arcade.tilemap.process_layer(
game_map, layer_name=ladders_layer, scaling=MAP_SCALING
)
self.coins = arcade.tilemap.process_layer(
game_map, layer_name=coin_layer, scaling=MAP_SCALING
)
# Set the background color
background_color = arcade.color.FRESH_AIR
if game_map.background_color:
background_color = game_map.background_color
arcade.set_background_color(background_color)
# Create the player sprite, if they're not already setup
if not self.player:
self.player = self.create_player_sprite()
# Move the player sprite back to the beginning
self.player.center_x = PLAYER_START_X
self.player.center_y = PLAYER_START_Y
self.player.change_x = 0
self.player.change_y = 0
# Load the physics engine for this map
self.physics_engine = arcade.PhysicsEnginePlatformer(
player_sprite=self.player,
platforms=self.walls,
gravity_constant=GRAVITY,
ladders=self.ladders,
)
def on_key_press(self, key: int, modifiers: int):
"""Arguments:
key {int} -- Which key was pressed
modifiers {int} -- Which modifiers were down at the time
"""
def on_key_release(self, key: int, modifiers: int):
"""Arguments:
key {int} -- Which key was released
modifiers {int} -- Which modifiers were down at the time
"""
def on_update(self, delta_time: float):
"""Updates the position of all game objects
Arguments:
delta_time {float} -- How much time since the last call
"""
pass
def on_draw(self):
pass
if __name__ == "__main__":
window = Platformer()
window.setup()
arcade.run()
|
Platformer
|
python
|
encode__django-rest-framework
|
tests/test_generics.py
|
{
"start": 1013,
"end": 1148
}
|
class ____(serializers.ModelSerializer):
class Meta:
model = ForeignKeySource
fields = '__all__'
|
ForeignKeySerializer
|
python
|
microsoft__pyright
|
packages/pyright-internal/src/tests/samples/dataclassFrozen1.py
|
{
"start": 150,
"end": 205
}
|
class ____:
val1: int = 6
@dataclass(frozen=True)
|
DC1
|
python
|
doocs__leetcode
|
solution/2300-2399/2386.Find the K-Sum of an Array/Solution.py
|
{
"start": 0,
"end": 513
}
|
class ____:
def kSum(self, nums: List[int], k: int) -> int:
mx = 0
for i, x in enumerate(nums):
if x > 0:
mx += x
else:
nums[i] = -x
nums.sort()
h = [(0, 0)]
for _ in range(k - 1):
s, i = heappop(h)
if i < len(nums):
heappush(h, (s + nums[i], i + 1))
if i:
heappush(h, (s + nums[i] - nums[i - 1], i + 1))
return mx - h[0][0]
|
Solution
|
python
|
encode__django-rest-framework
|
tests/test_relations.py
|
{
"start": 5133,
"end": 6412
}
|
class ____(APISimpleTestCase):
def setUp(self):
self.queryset = MockQueryset([
MockObject(pk=uuid.UUID(int=0), name='foo'),
MockObject(pk=uuid.UUID(int=1), name='bar'),
MockObject(pk=uuid.UUID(int=2), name='baz')
])
self.instance = self.queryset.items[2]
self.field = serializers.PrimaryKeyRelatedField(
queryset=self.queryset,
pk_field=serializers.UUIDField(format='int')
)
def test_pk_related_lookup_exists(self):
instance = self.field.to_internal_value(self.instance.pk.int)
assert instance is self.instance
def test_pk_related_lookup_does_not_exist(self):
with pytest.raises(serializers.ValidationError) as excinfo:
self.field.to_internal_value(4)
msg = excinfo.value.detail[0]
assert msg == 'Invalid pk "00000000-0000-0000-0000-000000000004" - object does not exist.'
def test_pk_representation(self):
representation = self.field.to_representation(self.instance)
assert representation == self.instance.pk.int
urlpatterns = [
re_path(r'^example/(?P<name>.+)/$', lambda: None, name='example'),
]
@override_settings(ROOT_URLCONF='tests.test_relations')
|
TestProxiedPrimaryKeyRelatedField
|
python
|
mlflow__mlflow
|
mlflow/telemetry/events.py
|
{
"start": 9982,
"end": 10148
}
|
class ____(str, Enum):
"""Source of a trace received by the MLflow server."""
MLFLOW_PYTHON_CLIENT = "MLFLOW_PYTHON_CLIENT"
UNKNOWN = "UNKNOWN"
|
TraceSource
|
python
|
tiangolo__fastapi
|
docs_src/schema_extra_example/tutorial004_an_py310.py
|
{
"start": 114,
"end": 917
}
|
class ____(BaseModel):
name: str
description: str | None = None
price: float
tax: float | None = None
@app.put("/items/{item_id}")
async def update_item(
*,
item_id: int,
item: Annotated[
Item,
Body(
examples=[
{
"name": "Foo",
"description": "A very nice Item",
"price": 35.4,
"tax": 3.2,
},
{
"name": "Bar",
"price": "35.4",
},
{
"name": "Baz",
"price": "thirty five point four",
},
],
),
],
):
results = {"item_id": item_id, "item": item}
return results
|
Item
|
python
|
sympy__sympy
|
sympy/plotting/pygletplot/managed_window.py
|
{
"start": 120,
"end": 3072
}
|
class ____(Window):
"""
A pyglet window with an event loop which executes automatically
in a separate thread. Behavior is added by creating a subclass
which overrides setup, update, and/or draw.
"""
fps_limit = 30
default_win_args = {"width": 600,
"height": 500,
"vsync": False,
"resizable": True}
def __init__(self, **win_args):
"""
It is best not to override this function in the child
class, unless you need to take additional arguments.
Do any OpenGL initialization calls in setup().
"""
# check if this is run from the doctester
if win_args.get('runfromdoctester', False):
return
self.win_args = dict(self.default_win_args, **win_args)
self.Thread = Thread(target=self.__event_loop__)
self.Thread.start()
def __event_loop__(self, **win_args):
"""
The event loop thread function. Do not override or call
directly (it is called by __init__).
"""
gl_lock.acquire()
try:
try:
super().__init__(**self.win_args)
self.switch_to()
self.setup()
except Exception as e:
print("Window initialization failed: %s" % (str(e)))
self.has_exit = True
finally:
gl_lock.release()
clock = Clock()
clock.fps_limit = self.fps_limit
while not self.has_exit:
dt = clock.tick()
gl_lock.acquire()
try:
try:
self.switch_to()
self.dispatch_events()
self.clear()
self.update(dt)
self.draw()
self.flip()
except Exception as e:
print("Uncaught exception in event loop: %s" % str(e))
self.has_exit = True
finally:
gl_lock.release()
super().close()
def close(self):
"""
Closes the window.
"""
self.has_exit = True
def setup(self):
"""
Called once before the event loop begins.
Override this method in a child class. This
is the best place to put things like OpenGL
initialization calls.
"""
pass
def update(self, dt):
"""
Called before draw during each iteration of
the event loop. dt is the elapsed time in
seconds since the last update. OpenGL rendering
calls are best put in draw() rather than here.
"""
pass
def draw(self):
"""
Called after update during each iteration of
the event loop. Put OpenGL rendering calls
here.
"""
pass
if __name__ == '__main__':
ManagedWindow()
|
ManagedWindow
|
python
|
getsentry__sentry-python
|
sentry_sdk/integrations/celery/__init__.py
|
{
"start": 8109,
"end": 18699
}
|
class ____:
def __enter__(self):
# type: () -> None
return None
def __exit__(self, exc_type, exc_value, traceback):
# type: (Any, Any, Any) -> None
return None
def _wrap_task_run(f):
# type: (F) -> F
@wraps(f)
def apply_async(*args, **kwargs):
# type: (*Any, **Any) -> Any
# Note: kwargs can contain headers=None, so no setdefault!
# Unsure which backend though.
integration = sentry_sdk.get_client().get_integration(CeleryIntegration)
if integration is None:
return f(*args, **kwargs)
kwarg_headers = kwargs.get("headers") or {}
propagate_traces = kwarg_headers.pop(
"sentry-propagate-traces", integration.propagate_traces
)
if not propagate_traces:
return f(*args, **kwargs)
if isinstance(args[0], Task):
task_name = args[0].name # type: str
elif len(args) > 1 and isinstance(args[1], str):
task_name = args[1]
else:
task_name = "<unknown Celery task>"
task_started_from_beat = sentry_sdk.get_isolation_scope()._name == "celery-beat"
span_mgr = (
sentry_sdk.start_span(
op=OP.QUEUE_SUBMIT_CELERY,
name=task_name,
origin=CeleryIntegration.origin,
)
if not task_started_from_beat
else NoOpMgr()
) # type: Union[Span, NoOpMgr]
with span_mgr as span:
kwargs["headers"] = _update_celery_task_headers(
kwarg_headers, span, integration.monitor_beat_tasks
)
return f(*args, **kwargs)
return apply_async # type: ignore
def _wrap_tracer(task, f):
# type: (Any, F) -> F
# Need to wrap tracer for pushing the scope before prerun is sent, and
# popping it after postrun is sent.
#
# This is the reason we don't use signals for hooking in the first place.
# Also because in Celery 3, signal dispatch returns early if one handler
# crashes.
@wraps(f)
@ensure_integration_enabled(CeleryIntegration, f)
def _inner(*args, **kwargs):
# type: (*Any, **Any) -> Any
with isolation_scope() as scope:
scope._name = "celery"
scope.clear_breadcrumbs()
scope.add_event_processor(_make_event_processor(task, *args, **kwargs))
transaction = None
# Celery task objects are not a thing to be trusted. Even
# something such as attribute access can fail.
with capture_internal_exceptions():
headers = args[3].get("headers") or {}
transaction = continue_trace(
headers,
op=OP.QUEUE_TASK_CELERY,
name="unknown celery task",
source=TransactionSource.TASK,
origin=CeleryIntegration.origin,
)
transaction.name = task.name
transaction.set_status(SPANSTATUS.OK)
if transaction is None:
return f(*args, **kwargs)
with sentry_sdk.start_transaction(
transaction,
custom_sampling_context={
"celery_job": {
"task": task.name,
# for some reason, args[1] is a list if non-empty but a
# tuple if empty
"args": list(args[1]),
"kwargs": args[2],
}
},
):
return f(*args, **kwargs)
return _inner # type: ignore
def _set_messaging_destination_name(task, span):
# type: (Any, Span) -> None
"""Set "messaging.destination.name" tag for span"""
with capture_internal_exceptions():
delivery_info = task.request.delivery_info
if delivery_info:
routing_key = delivery_info.get("routing_key")
if delivery_info.get("exchange") == "" and routing_key is not None:
# Empty exchange indicates the default exchange, meaning the tasks
# are sent to the queue with the same name as the routing key.
span.set_data(SPANDATA.MESSAGING_DESTINATION_NAME, routing_key)
def _wrap_task_call(task, f):
# type: (Any, F) -> F
# Need to wrap task call because the exception is caught before we get to
# see it. Also celery's reported stacktrace is untrustworthy.
# functools.wraps is important here because celery-once looks at this
# method's name. @ensure_integration_enabled internally calls functools.wraps,
# but if we ever remove the @ensure_integration_enabled decorator, we need
# to add @functools.wraps(f) here.
# https://github.com/getsentry/sentry-python/issues/421
@ensure_integration_enabled(CeleryIntegration, f)
def _inner(*args, **kwargs):
# type: (*Any, **Any) -> Any
try:
with sentry_sdk.start_span(
op=OP.QUEUE_PROCESS,
name=task.name,
origin=CeleryIntegration.origin,
) as span:
_set_messaging_destination_name(task, span)
latency = None
with capture_internal_exceptions():
if (
task.request.headers is not None
and "sentry-task-enqueued-time" in task.request.headers
):
latency = _now_seconds_since_epoch() - task.request.headers.pop(
"sentry-task-enqueued-time"
)
if latency is not None:
latency *= 1000 # milliseconds
span.set_data(SPANDATA.MESSAGING_MESSAGE_RECEIVE_LATENCY, latency)
with capture_internal_exceptions():
span.set_data(SPANDATA.MESSAGING_MESSAGE_ID, task.request.id)
with capture_internal_exceptions():
span.set_data(
SPANDATA.MESSAGING_MESSAGE_RETRY_COUNT, task.request.retries
)
with capture_internal_exceptions():
span.set_data(
SPANDATA.MESSAGING_SYSTEM,
task.app.connection().transport.driver_type,
)
return f(*args, **kwargs)
except Exception:
exc_info = sys.exc_info()
with capture_internal_exceptions():
_capture_exception(task, exc_info)
reraise(*exc_info)
return _inner # type: ignore
def _patch_build_tracer():
# type: () -> None
import celery.app.trace as trace # type: ignore
original_build_tracer = trace.build_tracer
def sentry_build_tracer(name, task, *args, **kwargs):
# type: (Any, Any, *Any, **Any) -> Any
if not getattr(task, "_sentry_is_patched", False):
# determine whether Celery will use __call__ or run and patch
# accordingly
if task_has_custom(task, "__call__"):
type(task).__call__ = _wrap_task_call(task, type(task).__call__)
else:
task.run = _wrap_task_call(task, task.run)
# `build_tracer` is apparently called for every task
# invocation. Can't wrap every celery task for every invocation
# or we will get infinitely nested wrapper functions.
task._sentry_is_patched = True
return _wrap_tracer(task, original_build_tracer(name, task, *args, **kwargs))
trace.build_tracer = sentry_build_tracer
def _patch_task_apply_async():
# type: () -> None
Task.apply_async = _wrap_task_run(Task.apply_async)
def _patch_celery_send_task():
# type: () -> None
from celery import Celery
Celery.send_task = _wrap_task_run(Celery.send_task)
def _patch_worker_exit():
# type: () -> None
# Need to flush queue before worker shutdown because a crashing worker will
# call os._exit
from billiard.pool import Worker # type: ignore
original_workloop = Worker.workloop
def sentry_workloop(*args, **kwargs):
# type: (*Any, **Any) -> Any
try:
return original_workloop(*args, **kwargs)
finally:
with capture_internal_exceptions():
if (
sentry_sdk.get_client().get_integration(CeleryIntegration)
is not None
):
sentry_sdk.flush()
Worker.workloop = sentry_workloop
def _patch_producer_publish():
# type: () -> None
original_publish = Producer.publish
@ensure_integration_enabled(CeleryIntegration, original_publish)
def sentry_publish(self, *args, **kwargs):
# type: (Producer, *Any, **Any) -> Any
kwargs_headers = kwargs.get("headers", {})
if not isinstance(kwargs_headers, Mapping):
# Ensure kwargs_headers is a Mapping, so we can safely call get().
# We don't expect this to happen, but it's better to be safe. Even
# if it does happen, only our instrumentation breaks. This line
# does not overwrite kwargs["headers"], so the original publish
# method will still work.
kwargs_headers = {}
task_name = kwargs_headers.get("task")
task_id = kwargs_headers.get("id")
retries = kwargs_headers.get("retries")
routing_key = kwargs.get("routing_key")
exchange = kwargs.get("exchange")
with sentry_sdk.start_span(
op=OP.QUEUE_PUBLISH,
name=task_name,
origin=CeleryIntegration.origin,
) as span:
if task_id is not None:
span.set_data(SPANDATA.MESSAGING_MESSAGE_ID, task_id)
if exchange == "" and routing_key is not None:
# Empty exchange indicates the default exchange, meaning messages are
# routed to the queue with the same name as the routing key.
span.set_data(SPANDATA.MESSAGING_DESTINATION_NAME, routing_key)
if retries is not None:
span.set_data(SPANDATA.MESSAGING_MESSAGE_RETRY_COUNT, retries)
with capture_internal_exceptions():
span.set_data(
SPANDATA.MESSAGING_SYSTEM, self.connection.transport.driver_type
)
return original_publish(self, *args, **kwargs)
Producer.publish = sentry_publish
|
NoOpMgr
|
python
|
pytorch__pytorch
|
test/distributed/checkpoint/test_nested_dict.py
|
{
"start": 229,
"end": 2065
}
|
class ____(TestCase):
def test_flattening_round_trip(self) -> None:
state_dict = {
"key0": 1,
"key1": [1, 2],
"key2": {"1": 2, "2": 3},
"key3": torch.tensor([1]),
"key4": [[torch.tensor(2), "x"], [1, 2, 3], {"key6": [44]}],
}
flatten_dict, mapping = flatten_state_dict(state_dict)
"""
flatten_dict:
{
'key0': 1,
'key1': [1, 2],
'key2': {'1': 2, '2': 3},
'key3': tensor([1]),
'key4.0.0': tensor(2),
'key4.0.1': 'x',
'key4.1': [1, 2, 3],
'key4.2': {'key6': [44]}
}
"""
restored = unflatten_state_dict(flatten_dict, mapping)
self.assertEqual(state_dict, restored)
def test_mapping(self) -> None:
state_dict = {
"k0": [1],
"k2": [torch.tensor([1]), 99, [{"k3": torch.tensor(1)}]],
"k3": ["x", 99, [{"k3": "y"}]],
}
_, mapping = flatten_state_dict(state_dict)
"""
flatten_dict:
{'k0': [1], 'k2.0': tensor([1]), 'k2.1': 99, 'k2.2.0.k3': tensor(1), 'k3': ['x', 99, [{'k3': 'y'}]]}
mapping:
{'k0': ('k0',), 'k2.0': ('k2', 0), 'k2.1': ('k2', 1), 'k2.2.0.k3': ('k2', 2, 0, 'k3'), 'k3': ('k3',)}
"""
self.assertEqual(("k0",), mapping["k0"])
self.assertEqual(("k2", 0), mapping["k2.0"])
self.assertEqual(("k2", 1), mapping["k2.1"])
self.assertEqual(("k2", 2, 0, "k3"), mapping["k2.2.0.k3"])
self.assertEqual(("k3", 0), mapping["k3.0"])
self.assertEqual(("k3", 1), mapping["k3.1"])
self.assertEqual(("k3", 2, 0, "k3"), mapping["k3.2.0.k3"])
if __name__ == "__main__":
run_tests()
|
TestFlattening
|
python
|
getsentry__sentry
|
src/sentry/grouping/strategies/base.py
|
{
"start": 1451,
"end": 1685
}
|
class ____(Protocol[ConcreteInterface]):
def __call__(
self,
interface: ConcreteInterface,
event: Event,
context: GroupingContext,
**kwargs: Any,
) -> ComponentsByVariant: ...
|
StrategyFunc
|
python
|
apache__airflow
|
providers/hashicorp/tests/unit/hashicorp/hooks/test_vault.py
|
{
"start": 1206,
"end": 58178
}
|
class ____:
@staticmethod
def get_mock_connection(
conn_type="vault", schema="secret", host="localhost", port=8180, user="user", password="pass"
):
mock_connection = mock.MagicMock()
type(mock_connection).conn_type = PropertyMock(return_value=conn_type)
type(mock_connection).host = PropertyMock(return_value=host)
type(mock_connection).port = PropertyMock(return_value=port)
type(mock_connection).login = PropertyMock(return_value=user)
type(mock_connection).password = PropertyMock(return_value=password)
type(mock_connection).schema = PropertyMock(return_value=schema)
return mock_connection
@mock.patch("airflow.providers.hashicorp.hooks.vault.VaultHook.get_connection")
@mock.patch("airflow.providers.hashicorp._internal_client.vault_client.hvac")
def test_version_not_int(self, mock_hvac, mock_get_connection):
mock_client = mock.MagicMock()
mock_hvac.Client.return_value = mock_client
mock_connection = self.get_mock_connection()
mock_get_connection.return_value = mock_connection
connection_dict = {"auth_type": "userpass", "kv_engine_version": "text"}
mock_connection.extra_dejson.get.side_effect = connection_dict.get
kwargs = {
"vault_conn_id": "vault_conn_id",
}
with pytest.raises(VaultError, match="The version is not an int: text"):
VaultHook(**kwargs)
@pytest.mark.parametrize(
("version", "expected_version"),
[
("2", 2),
(1, 1),
],
)
@mock.patch("airflow.providers.hashicorp.hooks.vault.VaultHook.get_connection")
@mock.patch("airflow.providers.hashicorp._internal_client.vault_client.hvac")
def test_version(self, mock_hvac, mock_get_connection, version, expected_version):
mock_client = mock.MagicMock()
mock_hvac.Client.return_value = mock_client
mock_connection = self.get_mock_connection()
mock_get_connection.return_value = mock_connection
connection_dict = {"auth_type": "userpass", "kv_engine_version": version}
mock_connection.extra_dejson.get.side_effect = connection_dict.get
kwargs = {
"vault_conn_id": "vault_conn_id",
}
test_hook = VaultHook(**kwargs)
assert expected_version == test_hook.vault_client.kv_engine_version
@mock.patch("airflow.providers.hashicorp.hooks.vault.VaultHook.get_connection")
@mock.patch("airflow.providers.hashicorp._internal_client.vault_client.hvac")
def test_custom_mount_point_dejson(self, mock_hvac, mock_get_connection):
mock_client = mock.MagicMock()
mock_hvac.Client.return_value = mock_client
mock_connection = self.get_mock_connection(schema="custom")
mock_get_connection.return_value = mock_connection
connection_dict = {
"auth_type": "userpass",
}
mock_connection.extra_dejson.get.side_effect = connection_dict.get
kwargs = {
"vault_conn_id": "vault_conn_id",
}
test_hook = VaultHook(**kwargs)
assert test_hook.vault_client.mount_point == "custom"
@mock.patch("airflow.providers.hashicorp.hooks.vault.VaultHook.get_connection")
@mock.patch("airflow.providers.hashicorp._internal_client.vault_client.hvac")
def test_custom_auth_mount_point_init_params(self, mock_hvac, mock_get_connection):
mock_client = mock.MagicMock()
mock_hvac.Client.return_value = mock_client
mock_connection = self.get_mock_connection()
mock_get_connection.return_value = mock_connection
connection_dict = {
"auth_type": "userpass",
}
mock_connection.extra_dejson.get.side_effect = connection_dict.get
kwargs = {"vault_conn_id": "vault_conn_id", "auth_mount_point": "custom"}
test_hook = VaultHook(**kwargs)
assert test_hook.vault_client.mount_point == "secret"
assert test_hook.vault_client.auth_mount_point == "custom"
@mock.patch("airflow.providers.hashicorp.hooks.vault.VaultHook.get_connection")
@mock.patch("airflow.providers.hashicorp._internal_client.vault_client.hvac")
def test_custom_auth_mount_point_dejson(self, mock_hvac, mock_get_connection):
mock_client = mock.MagicMock()
mock_hvac.Client.return_value = mock_client
mock_connection = self.get_mock_connection()
mock_get_connection.return_value = mock_connection
connection_dict = {"auth_type": "userpass", "auth_mount_point": "custom"}
mock_connection.extra_dejson.get.side_effect = connection_dict.get
kwargs = {
"vault_conn_id": "vault_conn_id",
}
test_hook = VaultHook(**kwargs)
assert test_hook.vault_client.mount_point == "secret"
assert test_hook.vault_client.auth_mount_point == "custom"
@mock.patch("airflow.providers.hashicorp.hooks.vault.VaultHook.get_connection")
@mock.patch("airflow.providers.hashicorp._internal_client.vault_client.hvac")
def test_version_one_dejson(self, mock_hvac, mock_get_connection):
mock_client = mock.MagicMock()
mock_hvac.Client.return_value = mock_client
mock_connection = self.get_mock_connection()
mock_get_connection.return_value = mock_connection
connection_dict = {
"auth_type": "userpass",
}
mock_connection.extra_dejson.get.side_effect = connection_dict.get
kwargs = {
"kv_engine_version": 1,
"vault_conn_id": "vault_conn_id",
}
test_hook = VaultHook(**kwargs)
assert test_hook.vault_client.kv_engine_version == 1
@pytest.mark.parametrize(
("protocol", "expected_url"),
[
("vaults", "https://localhost:8180"),
("http", "http://localhost:8180"),
("https", "https://localhost:8180"),
],
)
@mock.patch("airflow.providers.hashicorp.hooks.vault.VaultHook.get_connection")
@mock.patch("airflow.providers.hashicorp._internal_client.vault_client.hvac")
def test_protocol(self, mock_hvac, mock_get_connection, protocol, expected_url):
mock_client = mock.MagicMock()
mock_hvac.Client.return_value = mock_client
mock_connection = self.get_mock_connection(conn_type=protocol)
mock_get_connection.return_value = mock_connection
connection_dict = {}
mock_connection.extra_dejson.get.side_effect = connection_dict.get
kwargs = {
"vault_conn_id": "vault_conn_id",
"auth_type": "approle",
"kv_engine_version": 2,
"session": None,
}
test_hook = VaultHook(**kwargs)
mock_get_connection.assert_called_with("vault_conn_id")
test_client = test_hook.get_conn()
mock_hvac.Client.assert_called_with(url=expected_url, session=None)
test_client.auth.approle.login.assert_called_with(role_id="user", secret_id="pass")
test_client.is_authenticated.assert_called_with()
assert test_hook.vault_client.kv_engine_version == 2
@pytest.mark.parametrize(
("use_tls", "expected_url"),
[
(True, "https://localhost:8180"),
(False, "http://localhost:8180"),
],
)
@mock.patch("airflow.providers.hashicorp.hooks.vault.VaultHook.get_connection")
@mock.patch("airflow.providers.hashicorp._internal_client.vault_client.hvac")
def test_protocol_via_use_tls(self, mock_hvac, mock_get_connection, use_tls, expected_url):
mock_client = mock.MagicMock()
mock_hvac.Client.return_value = mock_client
mock_connection = self.get_mock_connection(conn_type="vault")
mock_get_connection.return_value = mock_connection
connection_dict = {"use_tls": use_tls}
mock_connection.extra_dejson.get.side_effect = connection_dict.get
kwargs = {
"vault_conn_id": "vault_conn_id",
"auth_type": "approle",
"kv_engine_version": 2,
"session": None,
}
test_hook = VaultHook(**kwargs)
mock_get_connection.assert_called_with("vault_conn_id")
test_client = test_hook.get_conn()
mock_hvac.Client.assert_called_with(url=expected_url, session=None)
test_client.auth.approle.login.assert_called_with(role_id="user", secret_id="pass")
test_client.is_authenticated.assert_called_with()
assert test_hook.vault_client.kv_engine_version == 2
@mock.patch("airflow.providers.hashicorp.hooks.vault.VaultHook.get_connection")
@mock.patch("airflow.providers.hashicorp._internal_client.vault_client.hvac")
def test_approle_init_params(self, mock_hvac, mock_get_connection):
mock_client = mock.MagicMock()
mock_hvac.Client.return_value = mock_client
mock_connection = self.get_mock_connection()
mock_get_connection.return_value = mock_connection
connection_dict = {}
mock_connection.extra_dejson.get.side_effect = connection_dict.get
kwargs = {
"vault_conn_id": "vault_conn_id",
"auth_type": "approle",
"kv_engine_version": 2,
"session": None,
}
test_hook = VaultHook(**kwargs)
mock_get_connection.assert_called_with("vault_conn_id")
test_client = test_hook.get_conn()
mock_hvac.Client.assert_called_with(url="http://localhost:8180", session=None)
test_client.auth.approle.login.assert_called_with(role_id="user", secret_id="pass")
test_client.is_authenticated.assert_called_with()
assert test_hook.vault_client.kv_engine_version == 2
@mock.patch("airflow.providers.hashicorp.hooks.vault.VaultHook.get_connection")
@mock.patch("airflow.providers.hashicorp._internal_client.vault_client.hvac")
def test_approle_dejson(self, mock_hvac, mock_get_connection):
mock_client = mock.MagicMock()
mock_hvac.Client.return_value = mock_client
mock_connection = self.get_mock_connection()
mock_get_connection.return_value = mock_connection
connection_dict = {"auth_type": "approle"}
mock_connection.extra_dejson.get.side_effect = connection_dict.get
kwargs = {
"vault_conn_id": "vault_conn_id",
"session": None,
}
test_hook = VaultHook(**kwargs)
mock_get_connection.assert_called_with("vault_conn_id")
test_client = test_hook.get_conn()
mock_hvac.Client.assert_called_with(url="http://localhost:8180", session=None)
test_client.auth.approle.login.assert_called_with(role_id="user", secret_id="pass")
test_client.is_authenticated.assert_called_with()
assert test_hook.vault_client.kv_engine_version == 2
@mock.patch("airflow.providers.hashicorp._internal_client.vault_client.hvac")
@mock.patch.dict(
"os.environ",
AIRFLOW_CONN_VAULT_CONN_ID="https://role:secret@vault.example.com?auth_type=approle",
)
def test_approle_uri(self, mock_hvac):
test_hook = VaultHook(vault_conn_id="vault_conn_id", session=None)
test_client = test_hook.get_conn()
mock_hvac.Client.assert_called_with(url="https://vault.example.com", session=None)
test_client.auth.approle.login.assert_called_with(role_id="role", secret_id="secret")
test_client.is_authenticated.assert_called_with()
assert test_hook.vault_client.kv_engine_version == 2
@mock.patch("airflow.providers.hashicorp.hooks.vault.VaultHook.get_connection")
@mock.patch("airflow.providers.hashicorp._internal_client.vault_client.hvac")
def test_aws_iam_init_params(self, mock_hvac, mock_get_connection):
mock_client = mock.MagicMock()
mock_hvac.Client.return_value = mock_client
mock_connection = self.get_mock_connection()
mock_get_connection.return_value = mock_connection
connection_dict = {}
mock_connection.extra_dejson.get.side_effect = connection_dict.get
kwargs = {
"vault_conn_id": "vault_conn_id",
"auth_type": "aws_iam",
"role_id": "role",
"session": None,
"region": "us-east-2",
}
test_hook = VaultHook(**kwargs)
mock_get_connection.assert_called_with("vault_conn_id")
test_client = test_hook.get_conn()
mock_hvac.Client.assert_called_with(url="http://localhost:8180", session=None)
test_client.auth.aws.iam_login.assert_called_with(
access_key="user", secret_key="pass", role="role", region="us-east-2"
)
test_client.is_authenticated.assert_called_with()
assert test_hook.vault_client.kv_engine_version == 2
@mock.patch("airflow.providers.hashicorp.hooks.vault.VaultHook.get_connection")
@mock.patch("airflow.providers.hashicorp._internal_client.vault_client.hvac")
def test_aws_iam_dejson(self, mock_hvac, mock_get_connection):
mock_client = mock.MagicMock()
mock_hvac.Client.return_value = mock_client
mock_connection = self.get_mock_connection()
mock_get_connection.return_value = mock_connection
connection_dict = {"auth_type": "aws_iam", "role_id": "role", "region": "us-east-2"}
mock_connection.extra_dejson.get.side_effect = connection_dict.get
kwargs = {
"vault_conn_id": "vault_conn_id",
"session": None,
}
test_hook = VaultHook(**kwargs)
mock_get_connection.assert_called_with("vault_conn_id")
test_client = test_hook.get_conn()
mock_hvac.Client.assert_called_with(url="http://localhost:8180", session=None)
test_client.auth.aws.iam_login.assert_called_with(
access_key="user",
secret_key="pass",
role="role",
region="us-east-2",
)
@mock.patch("airflow.providers.hashicorp._internal_client.vault_client.hvac")
@mock.patch.dict(
"os.environ",
AIRFLOW_CONN_VAULT_CONN_ID="https://login:pass@vault.example.com?auth_type=aws_iam&role_id=role"
"®ion=us-east-2",
)
def test_aws_uri(self, mock_hvac):
test_hook = VaultHook(vault_conn_id="vault_conn_id", session=None)
test_client = test_hook.get_conn()
mock_hvac.Client.assert_called_with(url="https://vault.example.com", session=None)
test_client.auth.aws.iam_login.assert_called_with(
access_key="login", secret_key="pass", role="role", region="us-east-2"
)
test_client.is_authenticated.assert_called_with()
assert test_hook.vault_client.kv_engine_version == 2
@mock.patch("airflow.providers.hashicorp.hooks.vault.VaultHook.get_connection")
@mock.patch("airflow.providers.hashicorp._internal_client.vault_client.hvac")
def test_azure_init_params(self, mock_hvac, mock_get_connection):
mock_client = mock.MagicMock()
mock_hvac.Client.return_value = mock_client
mock_connection = self.get_mock_connection()
mock_get_connection.return_value = mock_connection
connection_dict = {}
mock_connection.extra_dejson.get.side_effect = connection_dict.get
kwargs = {
"vault_conn_id": "vault_conn_id",
"auth_type": "azure",
"azure_tenant_id": "tenant_id",
"azure_resource": "resource",
"session": None,
}
test_hook = VaultHook(**kwargs)
mock_get_connection.assert_called_with("vault_conn_id")
test_client = test_hook.get_conn()
mock_hvac.Client.assert_called_with(url="http://localhost:8180", session=None)
test_client.auth.azure.configure.assert_called_with(
tenant_id="tenant_id",
resource="resource",
client_id="user",
client_secret="pass",
)
test_client.is_authenticated.assert_called_with()
assert test_hook.vault_client.kv_engine_version == 2
@mock.patch("airflow.providers.hashicorp.hooks.vault.VaultHook.get_connection")
@mock.patch("airflow.providers.hashicorp._internal_client.vault_client.hvac")
def test_azure_dejson(self, mock_hvac, mock_get_connection):
mock_client = mock.MagicMock()
mock_hvac.Client.return_value = mock_client
mock_connection = self.get_mock_connection()
mock_get_connection.return_value = mock_connection
connection_dict = {
"auth_type": "azure",
"azure_tenant_id": "tenant_id",
"azure_resource": "resource",
}
mock_connection.extra_dejson.get.side_effect = connection_dict.get
kwargs = {
"vault_conn_id": "vault_conn_id",
"session": None,
}
test_hook = VaultHook(**kwargs)
mock_get_connection.assert_called_with("vault_conn_id")
test_client = test_hook.get_conn()
mock_hvac.Client.assert_called_with(url="http://localhost:8180", session=None)
test_client.auth.azure.configure.assert_called_with(
tenant_id="tenant_id",
resource="resource",
client_id="user",
client_secret="pass",
)
test_client.is_authenticated.assert_called_with()
assert test_hook.vault_client.kv_engine_version == 2
@mock.patch("airflow.providers.google.cloud.utils.credentials_provider._get_scopes")
@mock.patch("airflow.providers.google.cloud.utils.credentials_provider.get_credentials_and_project_id")
@mock.patch("airflow.providers.hashicorp.hooks.vault.VaultHook.get_connection")
@mock.patch("airflow.providers.hashicorp._internal_client.vault_client.hvac")
@mock.patch("googleapiclient.discovery.build")
def test_gcp_init_params(
self, mock_build, mock_hvac, mock_get_connection, mock_get_credentials, mock_get_scopes
):
mock_client = mock.MagicMock()
mock_hvac.Client.return_value = mock_client
mock_connection = self.get_mock_connection()
mock_get_connection.return_value = mock_connection
mock_get_scopes.return_value = ["scope1", "scope2"]
mock_get_credentials.return_value = ("credentials", "project_id")
# Mock googleapiclient.discovery.build chain
mock_service = MagicMock()
mock_projects = MagicMock()
mock_service_accounts = MagicMock()
mock_sign_jwt = MagicMock()
mock_sign_jwt.execute.return_value = {"signedJwt": "mocked_jwt"}
mock_service_accounts.signJwt.return_value = mock_sign_jwt
mock_projects.serviceAccounts.return_value = mock_service_accounts
mock_service.projects.return_value = mock_projects
mock_build.return_value = mock_service
connection_dict = {}
mock_connection.extra_dejson.get.side_effect = connection_dict.get
kwargs = {
"vault_conn_id": "vault_conn_id",
"auth_type": "gcp",
"gcp_key_path": "path.json",
"gcp_scopes": "scope1,scope2",
"role_id": "role",
"session": None,
}
with patch(
"builtins.open", mock_open(read_data='{"client_email": "service_account_email"}')
) as mock_file:
test_hook = VaultHook(**kwargs)
test_client = test_hook.get_conn()
mock_file.assert_called_with("path.json")
mock_get_connection.assert_called_with("vault_conn_id")
mock_get_scopes.assert_called_with("scope1,scope2")
mock_get_credentials.assert_called_with(
key_path="path.json", keyfile_dict=None, scopes=["scope1", "scope2"]
)
mock_hvac.Client.assert_called_with(url="http://localhost:8180", session=None)
test_client.auth.gcp.login.assert_called_with(role="role", jwt="mocked_jwt")
test_client.is_authenticated.assert_called_with()
assert test_hook.vault_client.kv_engine_version == 2
@mock.patch("airflow.providers.google.cloud.utils.credentials_provider._get_scopes")
@mock.patch("airflow.providers.google.cloud.utils.credentials_provider.get_credentials_and_project_id")
@mock.patch("airflow.providers.hashicorp.hooks.vault.VaultHook.get_connection")
@mock.patch("airflow.providers.hashicorp._internal_client.vault_client.hvac")
@mock.patch("googleapiclient.discovery.build")
def test_gcp_dejson(
self, mock_build, mock_hvac, mock_get_connection, mock_get_credentials, mock_get_scopes
):
mock_client = mock.MagicMock()
mock_hvac.Client.return_value = mock_client
mock_connection = self.get_mock_connection()
mock_get_connection.return_value = mock_connection
mock_get_scopes.return_value = ["scope1", "scope2"]
mock_get_credentials.return_value = ("credentials", "project_id")
# Mock googleapiclient.discovery.build chain
mock_service = MagicMock()
mock_projects = MagicMock()
mock_service_accounts = MagicMock()
mock_sign_jwt = MagicMock()
mock_sign_jwt.execute.return_value = {"signedJwt": "mocked_jwt"}
mock_service_accounts.signJwt.return_value = mock_sign_jwt
mock_projects.serviceAccounts.return_value = mock_service_accounts
mock_service.projects.return_value = mock_projects
mock_build.return_value = mock_service
connection_dict = {
"auth_type": "gcp",
"gcp_key_path": "path.json",
"gcp_scopes": "scope1,scope2",
"role_id": "role",
}
mock_connection.extra_dejson.get.side_effect = connection_dict.get
kwargs = {
"vault_conn_id": "vault_conn_id",
"session": None,
"role_id": "role",
}
with patch(
"builtins.open", mock_open(read_data='{"client_email": "service_account_email"}')
) as mock_file:
test_hook = VaultHook(**kwargs)
test_client = test_hook.get_conn()
mock_file.assert_called_with("path.json")
mock_get_connection.assert_called_with("vault_conn_id")
mock_get_scopes.assert_called_with("scope1,scope2")
mock_get_credentials.assert_called_with(
key_path="path.json", keyfile_dict=None, scopes=["scope1", "scope2"]
)
mock_hvac.Client.assert_called_with(url="http://localhost:8180", session=None)
test_client.auth.gcp.login.assert_called_with(role="role", jwt="mocked_jwt")
test_client.is_authenticated.assert_called_with()
assert test_hook.vault_client.kv_engine_version == 2
@mock.patch("airflow.providers.google.cloud.utils.credentials_provider._get_scopes")
@mock.patch("airflow.providers.google.cloud.utils.credentials_provider.get_credentials_and_project_id")
@mock.patch("airflow.providers.hashicorp.hooks.vault.VaultHook.get_connection")
@mock.patch("airflow.providers.hashicorp._internal_client.vault_client.hvac")
@mock.patch("googleapiclient.discovery.build")
def test_gcp_dict_dejson(
self, mock_build, mock_hvac, mock_get_connection, mock_get_credentials, mock_get_scopes
):
mock_client = mock.MagicMock()
mock_hvac.Client.return_value = mock_client
mock_connection = self.get_mock_connection()
mock_get_connection.return_value = mock_connection
mock_get_scopes.return_value = ["scope1", "scope2"]
mock_get_credentials.return_value = ("credentials", "project_id")
# Mock googleapiclient.discovery.build chain
mock_service = MagicMock()
mock_projects = MagicMock()
mock_service_accounts = MagicMock()
mock_sign_jwt = MagicMock()
mock_sign_jwt.execute.return_value = {"signedJwt": "mocked_jwt"}
mock_service_accounts.signJwt.return_value = mock_sign_jwt
mock_projects.serviceAccounts.return_value = mock_service_accounts
mock_service.projects.return_value = mock_projects
mock_build.return_value = mock_service
connection_dict = {
"auth_type": "gcp",
"gcp_keyfile_dict": '{"client_email": "service_account_email"}',
"gcp_scopes": "scope1,scope2",
"role_id": "role",
}
mock_connection.extra_dejson.get.side_effect = connection_dict.get
kwargs = {
"vault_conn_id": "vault_conn_id",
"session": None,
"role_id": "role",
}
test_hook = VaultHook(**kwargs)
test_client = test_hook.get_conn()
mock_get_connection.assert_called_with("vault_conn_id")
mock_get_scopes.assert_called_with("scope1,scope2")
mock_get_credentials.assert_called_with(
key_path=None, keyfile_dict={"client_email": "service_account_email"}, scopes=["scope1", "scope2"]
)
mock_hvac.Client.assert_called_with(url="http://localhost:8180", session=None)
test_client.auth.gcp.login.assert_called_with(role="role", jwt="mocked_jwt")
test_client.is_authenticated.assert_called_with()
assert test_hook.vault_client.kv_engine_version == 2
@mock.patch("airflow.providers.hashicorp.hooks.vault.VaultHook.get_connection")
@mock.patch("airflow.providers.hashicorp._internal_client.vault_client.hvac")
def test_github_init_params(self, mock_hvac, mock_get_connection):
mock_client = mock.MagicMock()
mock_hvac.Client.return_value = mock_client
mock_connection = self.get_mock_connection()
mock_get_connection.return_value = mock_connection
connection_dict = {}
mock_connection.extra_dejson.get.side_effect = connection_dict.get
kwargs = {
"auth_type": "github",
"vault_conn_id": "vault_conn_id",
"session": None,
}
test_hook = VaultHook(**kwargs)
mock_get_connection.assert_called_with("vault_conn_id")
test_client = test_hook.get_conn()
mock_hvac.Client.assert_called_with(url="http://localhost:8180", session=None)
test_client.auth.github.login.assert_called_with(token="pass")
test_client.is_authenticated.assert_called_with()
assert test_hook.vault_client.kv_engine_version == 2
@mock.patch("airflow.providers.hashicorp.hooks.vault.VaultHook.get_connection")
@mock.patch("airflow.providers.hashicorp._internal_client.vault_client.hvac")
def test_github_dejson(self, mock_hvac, mock_get_connection):
mock_client = mock.MagicMock()
mock_hvac.Client.return_value = mock_client
mock_connection = self.get_mock_connection()
mock_get_connection.return_value = mock_connection
connection_dict = {
"auth_type": "github",
}
mock_connection.extra_dejson.get.side_effect = connection_dict.get
kwargs = {
"vault_conn_id": "vault_conn_id",
"session": None,
}
test_hook = VaultHook(**kwargs)
mock_get_connection.assert_called_with("vault_conn_id")
test_client = test_hook.get_conn()
mock_hvac.Client.assert_called_with(url="http://localhost:8180", session=None)
test_client.auth.github.login.assert_called_with(token="pass")
test_client.is_authenticated.assert_called_with()
assert test_hook.vault_client.kv_engine_version == 2
@mock.patch("airflow.providers.hashicorp.hooks.vault.VaultHook.get_connection")
@mock.patch("airflow.providers.hashicorp._internal_client.vault_client.hvac")
@mock.patch("airflow.providers.hashicorp._internal_client.vault_client.Kubernetes")
def test_kubernetes_default_path(self, mock_kubernetes, mock_hvac, mock_get_connection):
mock_client = mock.MagicMock()
mock_hvac.Client.return_value = mock_client
mock_connection = self.get_mock_connection()
mock_get_connection.return_value = mock_connection
connection_dict = {}
mock_connection.extra_dejson.get.side_effect = connection_dict.get
kwargs = {
"auth_type": "kubernetes",
"kubernetes_role": "kube_role",
"vault_conn_id": "vault_conn_id",
"session": None,
}
with patch("builtins.open", mock_open(read_data="data")) as mock_file:
test_hook = VaultHook(**kwargs)
test_client = test_hook.get_conn()
mock_get_connection.assert_called_with("vault_conn_id")
mock_file.assert_called_with("/var/run/secrets/kubernetes.io/serviceaccount/token")
mock_hvac.Client.assert_called_with(url="http://localhost:8180", session=None)
mock_kubernetes.assert_called_with(mock_client.adapter)
mock_kubernetes.return_value.login.assert_called_with(role="kube_role", jwt="data")
test_client.is_authenticated.assert_called_with()
assert test_hook.vault_client.kv_engine_version == 2
@mock.patch("airflow.providers.hashicorp.hooks.vault.VaultHook.get_connection")
@mock.patch("airflow.providers.hashicorp._internal_client.vault_client.hvac")
@mock.patch("airflow.providers.hashicorp._internal_client.vault_client.Kubernetes")
def test_kubernetes_init_params(self, mock_kubernetes, mock_hvac, mock_get_connection):
mock_client = mock.MagicMock()
mock_hvac.Client.return_value = mock_client
mock_connection = self.get_mock_connection()
mock_get_connection.return_value = mock_connection
connection_dict = {
"kubernetes_role": "kube_role",
"kubernetes_jwt_path": "path",
}
mock_connection.extra_dejson.get.side_effect = connection_dict.get
kwargs = {
"auth_type": "kubernetes",
"vault_conn_id": "vault_conn_id",
"session": None,
}
with patch("builtins.open", mock_open(read_data="data")) as mock_file:
test_hook = VaultHook(**kwargs)
test_client = test_hook.get_conn()
mock_get_connection.assert_called_with("vault_conn_id")
mock_file.assert_called_with("path")
mock_hvac.Client.assert_called_with(url="http://localhost:8180", session=None)
mock_kubernetes.assert_called_with(mock_client.adapter)
mock_kubernetes.return_value.login.assert_called_with(role="kube_role", jwt="data")
test_client.is_authenticated.assert_called_with()
assert test_hook.vault_client.kv_engine_version == 2
@mock.patch("airflow.providers.hashicorp.hooks.vault.VaultHook.get_connection")
@mock.patch("airflow.providers.hashicorp._internal_client.vault_client.hvac")
@mock.patch("airflow.providers.hashicorp._internal_client.vault_client.Kubernetes")
def test_kubernetes_dejson(self, mock_kubernetes, mock_hvac, mock_get_connection):
mock_client = mock.MagicMock()
mock_hvac.Client.return_value = mock_client
mock_connection = self.get_mock_connection()
mock_get_connection.return_value = mock_connection
connection_dict = {}
mock_connection.extra_dejson.get.side_effect = connection_dict.get
kwargs = {
"kubernetes_role": "kube_role",
"kubernetes_jwt_path": "path",
"auth_type": "kubernetes",
"vault_conn_id": "vault_conn_id",
"session": None,
}
with patch("builtins.open", mock_open(read_data="data")) as mock_file:
test_hook = VaultHook(**kwargs)
test_client = test_hook.get_conn()
mock_get_connection.assert_called_with("vault_conn_id")
mock_file.assert_called_with("path")
mock_hvac.Client.assert_called_with(url="http://localhost:8180", session=None)
mock_kubernetes.assert_called_with(mock_client.adapter)
mock_kubernetes.return_value.login.assert_called_with(role="kube_role", jwt="data")
test_client.is_authenticated.assert_called_with()
assert test_hook.vault_client.kv_engine_version == 2
@mock.patch("airflow.providers.hashicorp.hooks.vault.VaultHook.get_connection")
@mock.patch("airflow.providers.hashicorp._internal_client.vault_client.hvac")
def test_client_kwargs(self, mock_hvac, mock_get_connection):
"""This test checks that values in connection extras keyed with 'client_kwargs' will be
consumed by the underlying Hashicorp Vault client init. The order of precedence should
be kwargs (passed through the hook init) > client_kwargs (found in connection extras).
"""
mock_client = mock.MagicMock()
mock_hvac.Client.return_value = mock_client
mock_connection = self.get_mock_connection()
mock_get_connection.return_value = mock_connection
connection_dict = {
"client_kwargs": {"namespace": "name", "timeout": 50, "generic_arg": "generic_val1"}
}
mock_connection.extra_dejson.get.side_effect = connection_dict.get
kwargs = {"vault_conn_id": "vault_conn_id", "generic_arg": "generic_val0", "session": None}
test_hook = VaultHook(**kwargs)
test_client = test_hook.get_conn()
mock_get_connection.assert_called_with("vault_conn_id")
mock_hvac.Client.assert_called_with(
url="http://localhost:8180",
namespace="name",
timeout=50,
generic_arg="generic_val0",
session=None,
)
test_client.is_authenticated.assert_called_with()
assert test_hook.vault_client.kv_engine_version == 2
@mock.patch("airflow.providers.hashicorp.hooks.vault.VaultHook.get_connection")
@mock.patch("airflow.providers.hashicorp._internal_client.vault_client.hvac")
def test_ldap_init_params(self, mock_hvac, mock_get_connection):
mock_client = mock.MagicMock()
mock_hvac.Client.return_value = mock_client
mock_connection = self.get_mock_connection()
mock_get_connection.return_value = mock_connection
connection_dict = {}
mock_connection.extra_dejson.get.side_effect = connection_dict.get
kwargs = {
"auth_type": "ldap",
"vault_conn_id": "vault_conn_id",
"session": None,
}
test_hook = VaultHook(**kwargs)
mock_get_connection.assert_called_with("vault_conn_id")
test_client = test_hook.get_conn()
mock_hvac.Client.assert_called_with(url="http://localhost:8180", session=None)
test_client.auth.ldap.login.assert_called_with(username="user", password="pass")
test_client.is_authenticated.assert_called_with()
assert test_hook.vault_client.kv_engine_version == 2
@mock.patch("airflow.providers.hashicorp.hooks.vault.VaultHook.get_connection")
@mock.patch("airflow.providers.hashicorp._internal_client.vault_client.hvac")
def test_ldap_dejson(self, mock_hvac, mock_get_connection):
mock_client = mock.MagicMock()
mock_hvac.Client.return_value = mock_client
mock_connection = self.get_mock_connection()
mock_get_connection.return_value = mock_connection
connection_dict = {
"auth_type": "ldap",
}
mock_connection.extra_dejson.get.side_effect = connection_dict.get
kwargs = {
"vault_conn_id": "vault_conn_id",
"session": None,
}
test_hook = VaultHook(**kwargs)
mock_get_connection.assert_called_with("vault_conn_id")
test_client = test_hook.get_conn()
mock_hvac.Client.assert_called_with(url="http://localhost:8180", session=None)
test_client.auth.ldap.login.assert_called_with(username="user", password="pass")
test_client.is_authenticated.assert_called_with()
assert test_hook.vault_client.kv_engine_version == 2
@mock.patch("airflow.providers.hashicorp.hooks.vault.VaultHook.get_connection")
@mock.patch("airflow.providers.hashicorp._internal_client.vault_client.hvac")
def test_radius_init_params(self, mock_hvac, mock_get_connection):
mock_client = mock.MagicMock()
mock_hvac.Client.return_value = mock_client
mock_connection = self.get_mock_connection()
mock_get_connection.return_value = mock_connection
connection_dict = {}
mock_connection.extra_dejson.get.side_effect = connection_dict.get
kwargs = {
"auth_type": "radius",
"radius_host": "radhost",
"vault_conn_id": "vault_conn_id",
"session": None,
}
test_hook = VaultHook(**kwargs)
mock_get_connection.assert_called_with("vault_conn_id")
test_client = test_hook.get_conn()
mock_hvac.Client.assert_called_with(url="http://localhost:8180", session=None)
test_client.auth.radius.configure.assert_called_with(host="radhost", secret="pass", port=None)
test_client.is_authenticated.assert_called_with()
assert test_hook.vault_client.kv_engine_version == 2
@mock.patch("airflow.providers.hashicorp.hooks.vault.VaultHook.get_connection")
@mock.patch("airflow.providers.hashicorp._internal_client.vault_client.hvac")
def test_radius_init_params_port(self, mock_hvac, mock_get_connection):
mock_client = mock.MagicMock()
mock_hvac.Client.return_value = mock_client
mock_connection = self.get_mock_connection()
mock_get_connection.return_value = mock_connection
connection_dict = {}
mock_connection.extra_dejson.get.side_effect = connection_dict.get
kwargs = {
"auth_type": "radius",
"radius_host": "radhost",
"radius_port": 8123,
"vault_conn_id": "vault_conn_id",
"session": None,
}
test_hook = VaultHook(**kwargs)
mock_get_connection.assert_called_with("vault_conn_id")
test_client = test_hook.get_conn()
mock_hvac.Client.assert_called_with(url="http://localhost:8180", session=None)
test_client.auth.radius.configure.assert_called_with(host="radhost", secret="pass", port=8123)
test_client.is_authenticated.assert_called_with()
assert test_hook.vault_client.kv_engine_version == 2
@mock.patch("airflow.providers.hashicorp.hooks.vault.VaultHook.get_connection")
@mock.patch("airflow.providers.hashicorp._internal_client.vault_client.hvac")
def test_radius_dejson(self, mock_hvac, mock_get_connection):
mock_client = mock.MagicMock()
mock_hvac.Client.return_value = mock_client
mock_connection = self.get_mock_connection()
mock_get_connection.return_value = mock_connection
connection_dict = {
"auth_type": "radius",
"radius_host": "radhost",
"radius_port": "8123",
}
mock_connection.extra_dejson.get.side_effect = connection_dict.get
kwargs = {
"vault_conn_id": "vault_conn_id",
"session": None,
}
test_hook = VaultHook(**kwargs)
mock_get_connection.assert_called_with("vault_conn_id")
test_client = test_hook.get_conn()
mock_hvac.Client.assert_called_with(url="http://localhost:8180", session=None)
test_client.auth.radius.configure.assert_called_with(host="radhost", secret="pass", port=8123)
test_client.is_authenticated.assert_called_with()
assert test_hook.vault_client.kv_engine_version == 2
@mock.patch("airflow.providers.hashicorp.hooks.vault.VaultHook.get_connection")
@mock.patch("airflow.providers.hashicorp._internal_client.vault_client.hvac")
def test_radius_dejson_wrong_port(self, mock_hvac, mock_get_connection):
mock_client = mock.MagicMock()
mock_hvac.Client.return_value = mock_client
mock_connection = self.get_mock_connection()
mock_get_connection.return_value = mock_connection
connection_dict = {
"auth_type": "radius",
"radius_host": "radhost",
"radius_port": "wrong",
}
mock_connection.extra_dejson.get.side_effect = connection_dict.get
kwargs = {
"vault_conn_id": "vault_conn_id",
}
with pytest.raises(VaultError, match="Radius port was wrong: wrong"):
VaultHook(**kwargs)
@mock.patch("airflow.providers.hashicorp.hooks.vault.VaultHook.get_connection")
@mock.patch("airflow.providers.hashicorp._internal_client.vault_client.hvac")
def test_token_init_params(self, mock_hvac, mock_get_connection):
mock_client = mock.MagicMock()
mock_hvac.Client.return_value = mock_client
mock_connection = self.get_mock_connection()
mock_get_connection.return_value = mock_connection
connection_dict = {}
mock_connection.extra_dejson.get.side_effect = connection_dict.get
kwargs = {
"vault_conn_id": "vault_conn_id",
"auth_type": "token",
"kv_engine_version": 2,
"session": None,
}
test_hook = VaultHook(**kwargs)
mock_get_connection.assert_called_with("vault_conn_id")
test_client = test_hook.get_conn()
mock_hvac.Client.assert_called_with(url="http://localhost:8180", session=None)
test_client.is_authenticated.assert_called_with()
assert test_client.token == "pass"
assert test_hook.vault_client.kv_engine_version == 2
assert test_hook.vault_client.mount_point == "secret"
@mock.patch("airflow.providers.hashicorp.hooks.vault.VaultHook.get_connection")
@mock.patch("airflow.providers.hashicorp._internal_client.vault_client.hvac")
def test_token_dejson(self, mock_hvac, mock_get_connection):
mock_client = mock.MagicMock()
mock_hvac.Client.return_value = mock_client
mock_connection = self.get_mock_connection()
mock_get_connection.return_value = mock_connection
connection_dict = {
"auth_type": "token",
}
mock_connection.extra_dejson.get.side_effect = connection_dict.get
kwargs = {
"vault_conn_id": "vault_conn_id",
"session": None,
}
test_hook = VaultHook(**kwargs)
mock_get_connection.assert_called_with("vault_conn_id")
test_client = test_hook.get_conn()
mock_hvac.Client.assert_called_with(url="http://localhost:8180", session=None)
test_client.is_authenticated.assert_called_with()
assert test_client.token == "pass"
assert test_hook.vault_client.kv_engine_version == 2
@mock.patch("airflow.providers.hashicorp.hooks.vault.VaultHook.get_connection")
@mock.patch("airflow.providers.hashicorp._internal_client.vault_client.hvac")
def test_userpass_init_params(self, mock_hvac, mock_get_connection):
mock_client = mock.MagicMock()
mock_hvac.Client.return_value = mock_client
mock_connection = self.get_mock_connection()
mock_get_connection.return_value = mock_connection
connection_dict = {}
mock_connection.extra_dejson.get.side_effect = connection_dict.get
kwargs = {
"vault_conn_id": "vault_conn_id",
"auth_type": "userpass",
"kv_engine_version": 2,
"session": None,
}
test_hook = VaultHook(**kwargs)
mock_get_connection.assert_called_with("vault_conn_id")
test_client = test_hook.get_conn()
mock_hvac.Client.assert_called_with(url="http://localhost:8180", session=None)
test_client.auth.userpass.login.assert_called_with(username="user", password="pass")
test_client.is_authenticated.assert_called_with()
assert test_hook.vault_client.kv_engine_version == 2
@mock.patch("airflow.providers.hashicorp.hooks.vault.VaultHook.get_connection")
@mock.patch("airflow.providers.hashicorp._internal_client.vault_client.hvac")
def test_userpass_dejson(self, mock_hvac, mock_get_connection):
mock_client = mock.MagicMock()
mock_hvac.Client.return_value = mock_client
mock_connection = self.get_mock_connection()
mock_get_connection.return_value = mock_connection
connection_dict = {
"auth_type": "userpass",
}
mock_connection.extra_dejson.get.side_effect = connection_dict.get
kwargs = {
"vault_conn_id": "vault_conn_id",
"session": None,
}
test_hook = VaultHook(**kwargs)
mock_get_connection.assert_called_with("vault_conn_id")
test_client = test_hook.get_conn()
mock_hvac.Client.assert_called_with(url="http://localhost:8180", session=None)
test_client.auth.userpass.login.assert_called_with(username="user", password="pass")
test_client.is_authenticated.assert_called_with()
assert test_hook.vault_client.kv_engine_version == 2
@mock.patch("airflow.providers.hashicorp.hooks.vault.VaultHook.get_connection")
@mock.patch("airflow.providers.hashicorp._internal_client.vault_client.hvac")
def test_get_existing_key_v2(self, mock_hvac, mock_get_connection):
mock_connection = self.get_mock_connection()
mock_get_connection.return_value = mock_connection
mock_client = mock.MagicMock()
mock_hvac.Client.return_value = mock_client
connection_dict = {}
mock_client.secrets.kv.v2.read_secret_version.return_value = {
"request_id": "94011e25-f8dc-ec29-221b-1f9c1d9ad2ae",
"lease_id": "",
"renewable": False,
"lease_duration": 0,
"data": {
"data": {"secret_key": "secret_value"},
"metadata": {
"created_time": "2020-03-16T21:01:43.331126Z",
"deletion_time": "",
"destroyed": False,
"version": 1,
},
},
"wrap_info": None,
"warnings": None,
"auth": None,
}
mock_connection.extra_dejson.get.side_effect = connection_dict.get
kwargs = {"vault_conn_id": "vault_conn_id", "auth_type": "token", "kv_engine_version": 2}
test_hook = VaultHook(**kwargs)
secret = test_hook.get_secret(secret_path="missing")
assert secret == {"secret_key": "secret_value"}
mock_client.secrets.kv.v2.read_secret_version.assert_called_once_with(
mount_point="secret", path="missing", version=None, raise_on_deleted_version=True
)
@mock.patch("airflow.providers.hashicorp.hooks.vault.VaultHook.get_connection")
@mock.patch("airflow.providers.hashicorp._internal_client.vault_client.hvac")
def test_get_existing_key_v2_version(self, mock_hvac, mock_get_connection):
mock_connection = self.get_mock_connection()
mock_get_connection.return_value = mock_connection
mock_client = mock.MagicMock()
mock_hvac.Client.return_value = mock_client
connection_dict = {}
mock_client.secrets.kv.v2.read_secret_version.return_value = {
"request_id": "94011e25-f8dc-ec29-221b-1f9c1d9ad2ae",
"lease_id": "",
"renewable": False,
"lease_duration": 0,
"data": {
"data": {"secret_key": "secret_value"},
"metadata": {
"created_time": "2020-03-16T21:01:43.331126Z",
"deletion_time": "",
"destroyed": False,
"version": 1,
},
},
"wrap_info": None,
"warnings": None,
"auth": None,
}
mock_connection.extra_dejson.get.side_effect = connection_dict.get
kwargs = {"vault_conn_id": "vault_conn_id", "auth_type": "token", "kv_engine_version": 2}
test_hook = VaultHook(**kwargs)
secret = test_hook.get_secret(secret_path="missing", secret_version=1)
assert secret == {"secret_key": "secret_value"}
mock_client.secrets.kv.v2.read_secret_version.assert_called_once_with(
mount_point="secret", path="missing", version=1, raise_on_deleted_version=True
)
@mock.patch("airflow.providers.hashicorp.hooks.vault.VaultHook.get_connection")
@mock.patch("airflow.providers.hashicorp._internal_client.vault_client.hvac")
def test_get_existing_key_v1(self, mock_hvac, mock_get_connection):
mock_connection = self.get_mock_connection()
mock_get_connection.return_value = mock_connection
mock_client = mock.MagicMock()
mock_hvac.Client.return_value = mock_client
connection_dict = {}
mock_client.secrets.kv.v1.read_secret.return_value = {
"request_id": "182d0673-618c-9889-4cba-4e1f4cfe4b4b",
"lease_id": "",
"renewable": False,
"lease_duration": 2764800,
"data": {"value": "world"},
"wrap_info": None,
"warnings": None,
"auth": None,
}
mock_connection.extra_dejson.get.side_effect = connection_dict.get
kwargs = {"vault_conn_id": "vault_conn_id", "auth_type": "token", "kv_engine_version": 1}
test_hook = VaultHook(**kwargs)
secret = test_hook.get_secret(secret_path="missing")
assert secret == {"value": "world"}
mock_client.secrets.kv.v1.read_secret.assert_called_once_with(mount_point="secret", path="missing")
@mock.patch("airflow.providers.hashicorp.hooks.vault.VaultHook.get_connection")
@mock.patch("airflow.providers.hashicorp._internal_client.vault_client.hvac")
def test_get_secret_metadata_v2(self, mock_hvac, mock_get_connection):
mock_connection = self.get_mock_connection()
mock_get_connection.return_value = mock_connection
mock_client = mock.MagicMock()
mock_hvac.Client.return_value = mock_client
connection_dict = {}
mock_client.secrets.kv.v2.read_secret_metadata.return_value = {
"request_id": "94011e25-f8dc-ec29-221b-1f9c1d9ad2ae",
"lease_id": "",
"renewable": False,
"lease_duration": 0,
"metadata": [
{
"created_time": "2020-03-16T21:01:43.331126Z",
"deletion_time": "",
"destroyed": False,
"version": 1,
},
{
"created_time": "2020-03-16T21:01:43.331126Z",
"deletion_time": "",
"destroyed": False,
"version": 2,
},
],
}
mock_connection.extra_dejson.get.side_effect = connection_dict.get
kwargs = {"vault_conn_id": "vault_conn_id", "auth_type": "token", "kv_engine_version": 2}
test_hook = VaultHook(**kwargs)
metadata = test_hook.get_secret_metadata(secret_path="missing")
assert metadata == {
"request_id": "94011e25-f8dc-ec29-221b-1f9c1d9ad2ae",
"lease_id": "",
"renewable": False,
"lease_duration": 0,
"metadata": [
{
"created_time": "2020-03-16T21:01:43.331126Z",
"deletion_time": "",
"destroyed": False,
"version": 1,
},
{
"created_time": "2020-03-16T21:01:43.331126Z",
"deletion_time": "",
"destroyed": False,
"version": 2,
},
],
}
mock_client.secrets.kv.v2.read_secret_metadata.assert_called_once_with(
mount_point="secret", path="missing"
)
@mock.patch("airflow.providers.hashicorp.hooks.vault.VaultHook.get_connection")
@mock.patch("airflow.providers.hashicorp._internal_client.vault_client.hvac")
def test_get_secret_including_metadata_v2(self, mock_hvac, mock_get_connection):
mock_connection = self.get_mock_connection()
mock_get_connection.return_value = mock_connection
mock_client = mock.MagicMock()
mock_hvac.Client.return_value = mock_client
connection_dict = {}
mock_client.secrets.kv.v2.read_secret_version.return_value = {
"request_id": "94011e25-f8dc-ec29-221b-1f9c1d9ad2ae",
"lease_id": "",
"renewable": False,
"lease_duration": 0,
"data": {
"data": {"secret_key": "secret_value"},
"metadata": {
"created_time": "2020-03-16T21:01:43.331126Z",
"deletion_time": "",
"destroyed": False,
"version": 1,
},
},
"wrap_info": None,
"warnings": None,
"auth": None,
}
mock_connection.extra_dejson.get.side_effect = connection_dict.get
kwargs = {"vault_conn_id": "vault_conn_id", "auth_type": "token", "kv_engine_version": 2}
test_hook = VaultHook(**kwargs)
metadata = test_hook.get_secret_including_metadata(secret_path="missing")
assert metadata == {
"request_id": "94011e25-f8dc-ec29-221b-1f9c1d9ad2ae",
"lease_id": "",
"renewable": False,
"lease_duration": 0,
"data": {
"data": {"secret_key": "secret_value"},
"metadata": {
"created_time": "2020-03-16T21:01:43.331126Z",
"deletion_time": "",
"destroyed": False,
"version": 1,
},
},
"wrap_info": None,
"warnings": None,
"auth": None,
}
mock_client.secrets.kv.v2.read_secret_version.assert_called_once_with(
mount_point="secret", path="missing", version=None, raise_on_deleted_version=True
)
@mock.patch("airflow.providers.hashicorp.hooks.vault.VaultHook.get_connection")
@mock.patch("airflow.providers.hashicorp._internal_client.vault_client.hvac")
def test_create_or_update_secret_v2(self, mock_hvac, mock_get_connection):
mock_connection = self.get_mock_connection()
mock_get_connection.return_value = mock_connection
mock_client = mock.MagicMock()
mock_hvac.Client.return_value = mock_client
connection_dict = {}
mock_connection.extra_dejson.get.side_effect = connection_dict.get
kwargs = {"vault_conn_id": "vault_conn_id", "auth_type": "token", "kv_engine_version": 2}
test_hook = VaultHook(**kwargs)
test_hook.create_or_update_secret(secret_path="path", secret={"key": "value"})
mock_client.secrets.kv.v2.create_or_update_secret.assert_called_once_with(
mount_point="secret", path="path", secret={"key": "value"}, cas=None
)
@mock.patch("airflow.providers.hashicorp.hooks.vault.VaultHook.get_connection")
@mock.patch("airflow.providers.hashicorp._internal_client.vault_client.hvac")
def test_create_or_update_secret_v2_cas(self, mock_hvac, mock_get_connection):
mock_connection = self.get_mock_connection()
mock_get_connection.return_value = mock_connection
mock_client = mock.MagicMock()
mock_hvac.Client.return_value = mock_client
connection_dict = {}
mock_connection.extra_dejson.get.side_effect = connection_dict.get
kwargs = {"vault_conn_id": "vault_conn_id", "auth_type": "token", "kv_engine_version": 2}
test_hook = VaultHook(**kwargs)
test_hook.create_or_update_secret(secret_path="path", secret={"key": "value"}, cas=10)
mock_client.secrets.kv.v2.create_or_update_secret.assert_called_once_with(
mount_point="secret", path="path", secret={"key": "value"}, cas=10
)
@pytest.mark.parametrize(
("method", "expected_method"),
[
(None, None),
("post", "post"),
],
)
@mock.patch("airflow.providers.hashicorp.hooks.vault.VaultHook.get_connection")
@mock.patch("airflow.providers.hashicorp._internal_client.vault_client.hvac")
def test_create_or_update_secret_v1(self, mock_hvac, mock_get_connection, method, expected_method):
mock_connection = self.get_mock_connection()
mock_get_connection.return_value = mock_connection
mock_client = mock.MagicMock()
mock_hvac.Client.return_value = mock_client
connection_dict = {}
mock_connection.extra_dejson.get.side_effect = connection_dict.get
kwargs = {"vault_conn_id": "vault_conn_id", "auth_type": "token", "kv_engine_version": 1}
test_hook = VaultHook(**kwargs)
test_hook.create_or_update_secret(secret_path="path", secret={"key": "value"}, method=method)
mock_client.secrets.kv.v1.create_or_update_secret.assert_called_once_with(
mount_point="secret", path="path", secret={"key": "value"}, method=expected_method
)
|
TestVaultHook
|
python
|
docker__docker-py
|
tests/unit/utils_config_test.py
|
{
"start": 163,
"end": 2143
}
|
class ____(unittest.TestCase):
@fixture(autouse=True)
def tmpdir(self, tmpdir):
self.mkdir = tmpdir.mkdir
def test_find_config_fallback(self):
tmpdir = self.mkdir('test_find_config_fallback')
with mock.patch.dict(os.environ, {'HOME': str(tmpdir)}):
assert config.find_config_file() is None
def test_find_config_from_explicit_path(self):
tmpdir = self.mkdir('test_find_config_from_explicit_path')
config_path = tmpdir.ensure('my-config-file.json')
assert config.find_config_file(str(config_path)) == str(config_path)
def test_find_config_from_environment(self):
tmpdir = self.mkdir('test_find_config_from_environment')
config_path = tmpdir.ensure('config.json')
with mock.patch.dict(os.environ, {'DOCKER_CONFIG': str(tmpdir)}):
assert config.find_config_file() == str(config_path)
@mark.skipif("sys.platform == 'win32'")
def test_find_config_from_home_posix(self):
tmpdir = self.mkdir('test_find_config_from_home_posix')
config_path = tmpdir.ensure('.docker', 'config.json')
with mock.patch.dict(os.environ, {'HOME': str(tmpdir)}):
assert config.find_config_file() == str(config_path)
@mark.skipif("sys.platform == 'win32'")
def test_find_config_from_home_legacy_name(self):
tmpdir = self.mkdir('test_find_config_from_home_legacy_name')
config_path = tmpdir.ensure('.dockercfg')
with mock.patch.dict(os.environ, {'HOME': str(tmpdir)}):
assert config.find_config_file() == str(config_path)
@mark.skipif("sys.platform != 'win32'")
def test_find_config_from_home_windows(self):
tmpdir = self.mkdir('test_find_config_from_home_windows')
config_path = tmpdir.ensure('.docker', 'config.json')
with mock.patch.dict(os.environ, {'USERPROFILE': str(tmpdir)}):
assert config.find_config_file() == str(config_path)
|
FindConfigFileTest
|
python
|
python-openxml__python-docx
|
tests/oxml/test_xmlchemy.py
|
{
"start": 14474,
"end": 15657
}
|
class ____:
def it_adds_a_getter_property_for_the_attr_value(self, getter_fixture):
parent, optAttr_python_value = getter_fixture
assert parent.optAttr == optAttr_python_value
def it_adds_a_setter_property_for_the_attr(self, setter_fixture):
parent, value, expected_xml = setter_fixture
parent.optAttr = value
assert parent.xml == expected_xml
def it_adds_a_docstring_for_the_property(self):
assert CT_Parent.optAttr.__doc__.startswith("ST_IntegerType type-converted value of ")
# fixtures -------------------------------------------------------
@pytest.fixture
def getter_fixture(self):
parent = a_parent().with_nsdecls().with_optAttr("24").element
return parent, 24
@pytest.fixture(params=[36, None])
def setter_fixture(self, request):
value = request.param
parent = a_parent().with_nsdecls().with_optAttr("42").element
if value is None:
expected_xml = a_parent().with_nsdecls().xml()
else:
expected_xml = a_parent().with_nsdecls().with_optAttr(value).xml()
return parent, value, expected_xml
|
DescribeOptionalAttribute
|
python
|
PrefectHQ__prefect
|
tests/server/models/test_workers.py
|
{
"start": 23796,
"end": 31602
}
|
class ____:
@pytest.fixture(autouse=True)
async def setup(self, session, flow):
"""
Creates:
- Three different work pools ("A", "B", "C")
- Three different queues in each pool ("AA", "AB", "AC", "BA", "BB", "BC", "CA", "CB", "CC")
- One pending run, one running run, and 5 scheduled runs in each queue
"""
# create three different work pools
wp_a = await models.workers.create_work_pool(
session=session,
work_pool=schemas.actions.WorkPoolCreate(name="A"),
)
wp_b = await models.workers.create_work_pool(
session=session,
work_pool=schemas.actions.WorkPoolCreate(name="B"),
)
wp_c = await models.workers.create_work_pool(
session=session,
work_pool=schemas.actions.WorkPoolCreate(name="C"),
)
# create three different work queues for each config
wq_aa = await models.workers.create_work_queue(
session=session,
work_pool_id=wp_a.id,
work_queue=schemas.actions.WorkQueueCreate(name="AA"),
)
wq_ab = await models.workers.create_work_queue(
session=session,
work_pool_id=wp_a.id,
work_queue=schemas.actions.WorkQueueCreate(name="AB"),
)
wq_ac = await models.workers.create_work_queue(
session=session,
work_pool_id=wp_a.id,
work_queue=schemas.actions.WorkQueueCreate(name="AC"),
)
wq_ba = await models.workers.create_work_queue(
session=session,
work_pool_id=wp_b.id,
work_queue=schemas.actions.WorkQueueCreate(name="BA"),
)
wq_bb = await models.workers.create_work_queue(
session=session,
work_pool_id=wp_b.id,
work_queue=schemas.actions.WorkQueueCreate(name="BB"),
)
wq_bc = await models.workers.create_work_queue(
session=session,
work_pool_id=wp_b.id,
work_queue=schemas.actions.WorkQueueCreate(name="BC"),
)
wq_ca = await models.workers.create_work_queue(
session=session,
work_pool_id=wp_c.id,
work_queue=schemas.actions.WorkQueueCreate(name="CA"),
)
wq_cb = await models.workers.create_work_queue(
session=session,
work_pool_id=wp_c.id,
work_queue=schemas.actions.WorkQueueCreate(name="CB"),
)
wq_cc = await models.workers.create_work_queue(
session=session,
work_pool_id=wp_c.id,
work_queue=schemas.actions.WorkQueueCreate(name="CC"),
)
# create flow runs
for wq in [wq_aa, wq_ab, wq_ac, wq_ba, wq_bb, wq_bc, wq_ca, wq_cb, wq_cc]:
# create a running run
await models.flow_runs.create_flow_run(
session=session,
flow_run=schemas.core.FlowRun(
flow_id=flow.id,
state=schemas.states.Running(),
work_queue_id=wq.id,
),
)
# create a pending run
await models.flow_runs.create_flow_run(
session=session,
flow_run=schemas.core.FlowRun(
flow_id=flow.id,
state=schemas.states.Pending(),
work_queue_id=wq.id,
),
)
# create scheduled runs
for i in range(3, -2, -1):
current_time = now("UTC") + datetime.timedelta(hours=i)
await models.flow_runs.create_flow_run(
session=session,
flow_run=schemas.core.FlowRun(
flow_id=flow.id,
state=schemas.states.Scheduled(scheduled_time=current_time),
work_queue_id=wq.id,
),
)
await session.commit()
return dict(
work_pools=dict(wp_a=wp_a, wp_b=wp_b, wp_c=wp_c),
work_queues=dict(
wq_aa=wq_aa,
wq_ab=wq_ab,
wq_ac=wq_ac,
wq_ba=wq_ba,
wq_bb=wq_bb,
wq_bc=wq_bc,
wq_ca=wq_ca,
wq_cb=wq_cb,
wq_cc=wq_cc,
),
)
@pytest.fixture
def work_pools(self, setup):
return setup["work_pools"]
@pytest.fixture
def work_queues(self, setup):
return setup["work_queues"]
async def test_get_all_runs(self, session):
runs = await models.workers.get_scheduled_flow_runs(session=session)
assert len(runs) == 45
# runs are not sorted by time because they're sorted by queue priority
assert runs != sorted(runs, key=lambda r: r.flow_run.next_scheduled_start_time)
async def test_get_all_runs_without_queue_priority(self, session):
runs = await models.workers.get_scheduled_flow_runs(
session=session, respect_queue_priorities=False
)
assert len(runs) == 45
# runs are sorted by time
assert runs == sorted(runs, key=lambda r: r.flow_run.next_scheduled_start_time)
async def test_get_all_runs_limit(self, session):
runs = await models.workers.get_scheduled_flow_runs(session=session, limit=12)
assert len(runs) == 12
async def test_get_all_runs_scheduled_before(self, session):
runs = await models.workers.get_scheduled_flow_runs(
session=session, scheduled_before=now("UTC")
)
assert len(runs) == 18
async def test_get_all_runs_scheduled_after(self, session):
runs = await models.workers.get_scheduled_flow_runs(
session=session, scheduled_after=now("UTC")
)
assert len(runs) == 27
async def test_get_all_runs_wq_aa(self, session, work_pools, work_queues):
runs = await models.workers.get_scheduled_flow_runs(
session=session, work_queue_ids=[work_queues["wq_aa"].id]
)
assert len(runs) == 5
async def test_get_all_runs_wq_aa_wq_ba_wq_cb(
self, session, work_pools, work_queues
):
runs = await models.workers.get_scheduled_flow_runs(
session=session,
work_queue_ids=[
work_queues["wq_aa"].id,
work_queues["wq_ba"].id,
work_queues["wq_cb"].id,
],
)
assert len(runs) == 15
async def test_get_all_runs_wp_a(self, session, work_pools, work_queues):
runs = await models.workers.get_scheduled_flow_runs(
session=session, work_pool_ids=[work_pools["wp_a"].id]
)
assert len(runs) == 15
async def test_get_all_runs_wp_a_wp_b(self, session, work_pools, work_queues):
runs = await models.workers.get_scheduled_flow_runs(
session=session,
work_pool_ids=[work_pools["wp_a"].id, work_pools["wp_b"].id],
)
assert len(runs) == 30
async def test_get_all_runs_pools_and_queues_combined(
self, session, work_pools, work_queues
):
runs = await models.workers.get_scheduled_flow_runs(
session=session,
work_pool_ids=[work_pools["wp_a"].id],
work_queue_ids=[work_queues["wq_aa"].id],
)
assert len(runs) == 5
async def test_get_all_runs_pools_and_queues_incompatible(
self, session, work_pools, work_queues
):
runs = await models.workers.get_scheduled_flow_runs(
session=session,
work_pool_ids=[work_pools["wp_b"].id],
work_queue_ids=[work_queues["wq_aa"].id],
)
assert len(runs) == 0
|
TestGetScheduledRuns
|
python
|
keras-team__keras
|
keras/src/metrics/confusion_metrics.py
|
{
"start": 26341,
"end": 30012
}
|
class ____(SensitivitySpecificityBase):
"""Computes best sensitivity where specificity is >= specified value.
`Sensitivity` measures the proportion of actual positives that are correctly
identified as such `(tp / (tp + fn))`.
`Specificity` measures the proportion of actual negatives that are correctly
identified as such `(tn / (tn + fp))`.
This metric creates four local variables, `true_positives`,
`true_negatives`, `false_positives` and `false_negatives` that are used to
compute the sensitivity at the given specificity. The threshold for the
given specificity value is computed and used to evaluate the corresponding
sensitivity.
If `sample_weight` is `None`, weights default to 1.
Use `sample_weight` of 0 to mask values.
If `class_id` is specified, we calculate precision by considering only the
entries in the batch for which `class_id` is above the threshold
predictions, and computing the fraction of them for which `class_id` is
indeed a correct label.
For additional information about specificity and sensitivity, see
[the following](https://en.wikipedia.org/wiki/Sensitivity_and_specificity).
Args:
specificity: A scalar value in range `[0, 1]`.
num_thresholds: (Optional) Defaults to 200. The number of thresholds to
use for matching the given specificity.
class_id: (Optional) Integer class ID for which we want binary metrics.
This must be in the half-open interval `[0, num_classes)`, where
`num_classes` is the last dimension of predictions.
name: (Optional) string name of the metric instance.
dtype: (Optional) data type of the metric result.
Example:
>>> m = keras.metrics.SensitivityAtSpecificity(0.5)
>>> m.update_state([0, 0, 0, 1, 1], [0, 0.3, 0.8, 0.3, 0.8])
>>> m.result()
0.5
>>> m.reset_state()
>>> m.update_state([0, 0, 0, 1, 1], [0, 0.3, 0.8, 0.3, 0.8],
... sample_weight=[1, 1, 2, 2, 1])
>>> m.result()
0.333333
Usage with `compile()` API:
```python
model.compile(
optimizer='sgd',
loss='binary_crossentropy',
metrics=[keras.metrics.SensitivityAtSpecificity(specificity=0.5)])
```
"""
def __init__(
self,
specificity,
num_thresholds=200,
class_id=None,
name=None,
dtype=None,
):
if specificity < 0 or specificity > 1:
raise ValueError(
"Argument `specificity` must be in the range [0, 1]. "
f"Received: specificity={specificity}"
)
self.specificity = specificity
self.num_thresholds = num_thresholds
super().__init__(
specificity,
num_thresholds=num_thresholds,
class_id=class_id,
name=name,
dtype=dtype,
)
def result(self):
sensitivities = ops.divide_no_nan(
self.true_positives,
ops.add(self.true_positives, self.false_negatives),
)
specificities = ops.divide_no_nan(
self.true_negatives,
ops.add(self.true_negatives, self.false_positives),
)
return self._find_max_under_constraint(
specificities, sensitivities, ops.greater_equal
)
def get_config(self):
config = {
"num_thresholds": self.num_thresholds,
"specificity": self.specificity,
}
base_config = super().get_config()
return {**base_config, **config}
@keras_export("keras.metrics.SpecificityAtSensitivity")
|
SensitivityAtSpecificity
|
python
|
doocs__leetcode
|
solution/0300-0399/0373.Find K Pairs with Smallest Sums/Solution.py
|
{
"start": 0,
"end": 470
}
|
class ____:
def kSmallestPairs(
self, nums1: List[int], nums2: List[int], k: int
) -> List[List[int]]:
q = [[u + nums2[0], i, 0] for i, u in enumerate(nums1[:k])]
heapify(q)
ans = []
while q and k > 0:
_, i, j = heappop(q)
ans.append([nums1[i], nums2[j]])
k -= 1
if j + 1 < len(nums2):
heappush(q, [nums1[i] + nums2[j + 1], i, j + 1])
return ans
|
Solution
|
python
|
pandas-dev__pandas
|
pandas/io/excel/_calamine.py
|
{
"start": 733,
"end": 3524
}
|
class ____(BaseExcelReader["CalamineWorkbook"]):
@doc(storage_options=_shared_docs["storage_options"])
def __init__(
self,
filepath_or_buffer: FilePath | ReadBuffer[bytes],
storage_options: StorageOptions | None = None,
engine_kwargs: dict | None = None,
) -> None:
"""
Reader using calamine engine (xlsx/xls/xlsb/ods).
Parameters
----------
filepath_or_buffer : str, path to be parsed or
an open readable stream.
{storage_options}
engine_kwargs : dict, optional
Arbitrary keyword arguments passed to excel engine.
"""
import_optional_dependency("python_calamine")
super().__init__(
filepath_or_buffer,
storage_options=storage_options,
engine_kwargs=engine_kwargs,
)
@property
def _workbook_class(self) -> type[CalamineWorkbook]:
from python_calamine import CalamineWorkbook
return CalamineWorkbook
def load_workbook(
self, filepath_or_buffer: FilePath | ReadBuffer[bytes], engine_kwargs: Any
) -> CalamineWorkbook:
from python_calamine import load_workbook
return load_workbook(
filepath_or_buffer,
**engine_kwargs,
)
@property
def sheet_names(self) -> list[str]:
from python_calamine import SheetTypeEnum
return [
sheet.name
for sheet in self.book.sheets_metadata
if sheet.typ == SheetTypeEnum.WorkSheet
]
def get_sheet_by_name(self, name: str) -> CalamineSheet:
self.raise_if_bad_sheet_by_name(name)
return self.book.get_sheet_by_name(name)
def get_sheet_by_index(self, index: int) -> CalamineSheet:
self.raise_if_bad_sheet_by_index(index)
return self.book.get_sheet_by_index(index)
def get_sheet_data(
self, sheet: CalamineSheet, file_rows_needed: int | None = None
) -> list[list[Scalar | NaTType | time]]:
def _convert_cell(value: _CellValue) -> Scalar | NaTType | time:
if isinstance(value, float):
val = int(value)
if val == value:
return val
else:
return value
elif isinstance(value, date):
return pd.Timestamp(value)
elif isinstance(value, timedelta):
return pd.Timedelta(value)
elif isinstance(value, time):
return value
return value
rows: list[list[_CellValue]] = sheet.to_python(
skip_empty_area=False, nrows=file_rows_needed
)
data = [[_convert_cell(cell) for cell in row] for row in rows]
return data
|
CalamineReader
|
python
|
scipy__scipy
|
scipy/linalg/tests/test_fblas.py
|
{
"start": 14525,
"end": 14755
}
|
class ____(BaseGemv):
blas_func = fblas.dgemv
dtype = float64
try:
class TestCgemv(BaseGemv):
blas_func = fblas.cgemv
dtype = complex64
except AttributeError:
class TestCgemv:
pass
|
TestDgemv
|
python
|
openai__openai-python
|
src/openai/types/beta/realtime/session_create_params.py
|
{
"start": 8738,
"end": 10251
}
|
class ____(TypedDict, total=False):
create_response: bool
"""
Whether or not to automatically generate a response when a VAD stop event
occurs.
"""
eagerness: Literal["low", "medium", "high", "auto"]
"""Used only for `semantic_vad` mode.
The eagerness of the model to respond. `low` will wait longer for the user to
continue speaking, `high` will respond more quickly. `auto` is the default and
is equivalent to `medium`.
"""
interrupt_response: bool
"""
Whether or not to automatically interrupt any ongoing response with output to
the default conversation (i.e. `conversation` of `auto`) when a VAD start event
occurs.
"""
prefix_padding_ms: int
"""Used only for `server_vad` mode.
Amount of audio to include before the VAD detected speech (in milliseconds).
Defaults to 300ms.
"""
silence_duration_ms: int
"""Used only for `server_vad` mode.
Duration of silence to detect speech stop (in milliseconds). Defaults to 500ms.
With shorter values the model will respond more quickly, but may jump in on
short pauses from the user.
"""
threshold: float
"""Used only for `server_vad` mode.
Activation threshold for VAD (0.0 to 1.0), this defaults to 0.5. A higher
threshold will require louder audio to activate the model, and thus might
perform better in noisy environments.
"""
type: Literal["server_vad", "semantic_vad"]
"""Type of turn detection."""
|
TurnDetection
|
python
|
pytorch__pytorch
|
test/torch_np/test_basic.py
|
{
"start": 9060,
"end": 9713
}
|
class ____(TestCase):
"""Smoke test (sequence of arrays) -> (array)."""
@parametrize("func", seq_to_single_funcs)
def test_several(self, func):
arys = (
torch.Tensor([[1, 2, 3], [4, 5, 6]]),
w.asarray([[1, 2, 3], [4, 5, 6]]),
[[1, 2, 3], [4, 5, 6]],
)
result = func(arys)
assert isinstance(result, w.ndarray)
single_to_seq_funcs = (
w.nonzero,
# https://github.com/Quansight-Labs/numpy_pytorch_interop/pull/121#discussion_r1172824545
# w.tril_indices_from,
# w.triu_indices_from,
w.where,
)
@instantiate_parametrized_tests
|
TestSequenceOfArraysToSingle
|
python
|
optuna__optuna
|
tests/artifacts_tests/stubs.py
|
{
"start": 589,
"end": 1609
}
|
class ____:
def __init__(self) -> None:
self._data: dict[str, io.BytesIO] = {}
self._lock = threading.Lock()
def open_reader(self, artifact_id: str) -> BinaryIO:
with self._lock:
data = self._data.get(artifact_id)
if data is None:
raise ArtifactNotFound("not found")
return copy.deepcopy(data)
def write(self, artifact_id: str, content_body: BinaryIO) -> None:
buf = io.BytesIO()
shutil.copyfileobj(content_body, buf)
buf.seek(0)
with self._lock:
self._data[artifact_id] = buf
def remove(self, artifact_id: str) -> None:
with self._lock:
if artifact_id not in self._data:
raise ArtifactNotFound("not found")
del self._data[artifact_id]
if TYPE_CHECKING:
from optuna.artifacts._protocol import ArtifactStore
_fail: ArtifactStore = FailArtifactStore()
_inmemory: ArtifactStore = InMemoryArtifactStore()
|
InMemoryArtifactStore
|
python
|
tensorflow__tensorflow
|
tensorflow/python/keras/optimizer_v1.py
|
{
"start": 7999,
"end": 10384
}
|
class ____(Optimizer):
"""RMSProp optimizer.
It is recommended to leave the parameters of this optimizer
at their default values
(except the learning rate, which can be freely tuned).
Args:
lr: float >= 0. Learning rate.
rho: float >= 0.
epsilon: float >= 0. Fuzz factor.
If `None`, defaults to `backend.epsilon()`.
decay: float >= 0. Learning rate decay over each update.
"""
def __init__(self, lr=0.001, rho=0.9, epsilon=None, decay=0., **kwargs):
super(RMSprop, self).__init__(**kwargs)
with backend.name_scope(self.__class__.__name__):
self.lr = backend.variable(lr, name='lr')
self.rho = backend.variable(rho, name='rho')
self.decay = backend.variable(decay, name='decay')
self.iterations = backend.variable(0, dtype='int64', name='iterations')
if epsilon is None:
epsilon = backend.epsilon()
self.epsilon = epsilon
self.initial_decay = decay
def _create_all_weights(self, params):
accumulators = [
backend.zeros(backend.int_shape(p), dtype=backend.dtype(p))
for p in params]
self.weights = accumulators
return accumulators
def get_updates(self, loss, params):
grads = self.get_gradients(loss, params)
accumulators = self._create_all_weights(params)
self.updates = [state_ops.assign_add(self.iterations, 1)]
lr = self.lr
if self.initial_decay > 0:
lr = lr * (
1. /
(1. +
self.decay * math_ops.cast(self.iterations,
backend.dtype(self.decay))))
for p, g, a in zip(params, grads, accumulators):
# update accumulator
new_a = self.rho * a + (1. - self.rho) * math_ops.square(g)
self.updates.append(state_ops.assign(a, new_a))
new_p = p - lr * g / (backend.sqrt(new_a) + self.epsilon)
# Apply constraints.
if getattr(p, 'constraint', None) is not None:
new_p = p.constraint(new_p)
self.updates.append(state_ops.assign(p, new_p))
return self.updates
def get_config(self):
config = {
'lr': float(backend.get_value(self.lr)),
'rho': float(backend.get_value(self.rho)),
'decay': float(backend.get_value(self.decay)),
'epsilon': self.epsilon
}
base_config = super(RMSprop, self).get_config()
return dict(list(base_config.items()) + list(config.items()))
|
RMSprop
|
python
|
pytorch__pytorch
|
test/dynamo/test_guard_serialization.py
|
{
"start": 2145,
"end": 2278
}
|
class ____:
def __getstate__(self):
raise RuntimeError("Cannot pickle")
def add(self, x):
return x + 1
|
MyClass
|
python
|
PrefectHQ__prefect
|
src/integrations/prefect-github/prefect_github/schemas/graphql_schema.py
|
{
"start": 774383,
"end": 775114
}
|
class ____(sgqlc.types.Type, Node):
"""
See source code for more info.
"""
__schema__ = graphql_schema
__field_names__ = (
"actor",
"created_at",
"head_ref",
"head_ref_name",
"pull_request",
)
actor = sgqlc.types.Field(Actor, graphql_name="actor")
created_at = sgqlc.types.Field(
sgqlc.types.non_null(DateTime), graphql_name="createdAt"
)
head_ref = sgqlc.types.Field("Ref", graphql_name="headRef")
head_ref_name = sgqlc.types.Field(
sgqlc.types.non_null(String), graphql_name="headRefName"
)
pull_request = sgqlc.types.Field(
sgqlc.types.non_null("PullRequest"), graphql_name="pullRequest"
)
|
HeadRefDeletedEvent
|
python
|
django-haystack__django-haystack
|
test_haystack/elasticsearch5_tests/test_backend.py
|
{
"start": 58220,
"end": 60925
}
|
class ____(TestCase):
def setUp(self):
super().setUp()
# Wipe it clean.
self.raw_es = elasticsearch.Elasticsearch(
settings.HAYSTACK_CONNECTIONS["elasticsearch"]["URL"]
)
clear_elasticsearch_index()
# Stow.
self.old_ui = connections["elasticsearch"].get_unified_index()
self.ui = UnifiedIndex()
self.smmi = Elasticsearch5BoostMockSearchIndex()
self.ui.build(indexes=[self.smmi])
connections["elasticsearch"]._index = self.ui
self.sb = connections["elasticsearch"].get_backend()
self.sample_objs = []
for i in range(1, 5):
mock = AFourthMockModel()
mock.id = i
if i % 2:
mock.author = "daniel"
mock.editor = "david"
else:
mock.author = "david"
mock.editor = "daniel"
mock.pub_date = datetime.date(2009, 2, 25) - datetime.timedelta(days=i)
self.sample_objs.append(mock)
def tearDown(self):
connections["elasticsearch"]._index = self.old_ui
super().tearDown()
def raw_search(self, query):
return self.raw_es.search(
q="*:*", index=settings.HAYSTACK_CONNECTIONS["elasticsearch"]["INDEX_NAME"]
)
def test_boost(self):
self.sb.update(self.smmi, self.sample_objs)
self.assertEqual(self.raw_search("*:*")["hits"]["total"], 4)
results = SearchQuerySet(using="elasticsearch").filter(
SQ(author="daniel") | SQ(editor="daniel")
)
self.assertEqual(
set([result.id for result in results]),
{
"core.afourthmockmodel.4",
"core.afourthmockmodel.3",
"core.afourthmockmodel.1",
"core.afourthmockmodel.2",
},
)
def test__to_python(self):
self.assertEqual(self.sb._to_python("abc"), "abc")
self.assertEqual(self.sb._to_python("1"), 1)
self.assertEqual(self.sb._to_python("2653"), 2653)
self.assertEqual(self.sb._to_python("25.5"), 25.5)
self.assertEqual(self.sb._to_python("[1, 2, 3]"), [1, 2, 3])
self.assertEqual(
self.sb._to_python('{"a": 1, "b": 2, "c": 3}'), {"a": 1, "c": 3, "b": 2}
)
self.assertEqual(
self.sb._to_python("2009-05-09T16:14:00"),
datetime.datetime(2009, 5, 9, 16, 14),
)
self.assertEqual(
self.sb._to_python("2009-05-09T00:00:00"),
datetime.datetime(2009, 5, 9, 0, 0),
)
self.assertEqual(self.sb._to_python(None), None)
|
Elasticsearch5BoostBackendTestCase
|
python
|
charliermarsh__ruff
|
crates/ruff_linter/resources/test/fixtures/pylint/invalid_return_type_length.py
|
{
"start": 706,
"end": 763
}
|
class ____:
def __len__(self):
return 42
|
Length
|
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.