text
stringlengths 2
999k
|
|---|
# -*- coding: utf-8 -*-
from __future__ import unicode_literals
from .base import GenericModelTestBase
from computedfields.models import ComputedFieldsModelType
from computedfields.graph import CycleNodeException
from django.core.management import call_command
from django.utils.six.moves import cStringIO
from django.utils.six.moves import cPickle as pickle
from django.conf import settings
import os
class CommandTests(GenericModelTestBase):
"""
Tests the management commands.
"""
def setUp(self):
self.setDeps({
# deps only to itself
'B': {'func': lambda self: self.name},
# one fk step deps to comp field
'C': {'depends': ['f_cb#comp'],
'func': lambda self: self.name + self.f_cb.comp},
'D': {'depends': ['f_dc#comp'],
'func': lambda self: self.name + self.f_dc.comp},
# multi fk steps deps to non comp field
'E': {'depends': ['f_ed.f_dc.f_cb.f_ba#name'],
'func': lambda self: self.name + self.f_ed.f_dc.f_cb.f_ba.name},
# multi fk steps deps to comp field
'F': {'depends': ['f_fe.f_ed.f_dc.f_cb#name'],
'func': lambda self: self.name + self.f_fe.f_ed.f_dc.f_cb.name}
})
def tearDown(self):
self.resetDeps()
def test_rendergraph(self):
# TODO: test for output
self.assertEqual(self.graph.is_cyclefree, True)
call_command('rendergraph', 'output', verbosity=0)
os.remove('output.pdf')
def test_rendergraph_with_cycle(self):
import sys
# raises due to get_nodepaths() in _resolve_dependencies()
self.assertRaises(
CycleNodeException,
lambda: self.setDeps({
'A': {'depends': ['f_ag#comp']},
'G': {'depends': ['f_ga#comp']},
})
)
self.assertEqual(ComputedFieldsModelType._graph.is_cyclefree, False)
stdout = sys.stdout
sys.stdout = cStringIO()
call_command('rendergraph', 'output', verbosity=0)
# should have printed cycle info on stdout
self.assertIn('Warning - 1 cycles in dependencies found:', sys.stdout.getvalue())
sys.stdout = stdout
def test_updatedata(self):
# TODO: advanced test case
self.models.A(name='a').save()
call_command('updatedata', verbosity=0)
def test_createmap(self):
# save old value
old_map = None
map_set = hasattr(settings, 'COMPUTEDFIELDS_MAP')
if map_set:
old_map = settings.COMPUTEDFIELDS_MAP
# should not fail
settings.COMPUTEDFIELDS_MAP = os.path.join(settings.BASE_DIR, 'map.test')
call_command('createmap', verbosity=0)
with open(os.path.join(settings.BASE_DIR, 'map.test'), 'rb') as f:
map = pickle.load(f)
self.assertDictEqual(map, ComputedFieldsModelType._map)
os.remove(os.path.join(settings.BASE_DIR, 'map.test'))
# restore old value
if map_set:
settings.COMPUTEDFIELDS_MAP = old_map
|
"""Emoji
Available Commands:
.support
"""
from telethon import events
import asyncio
from userbot.utils import admin_cmd
@borg.on(admin_cmd("secktor"))
async def _(event):
if event.fwd_from:
return
animation_interval = 0.1
animation_ttl = range(0,36)
#input_str = event.pattern_match.group(1)
# if input_str == "Read This Telegraph Whole info here":
await event.edit("Thanks")
animation_chars = [
"Click here to Go to Telegraph",
"[Click Here For Guide](https://telegra.ph/)"
]
for i in animation_ttl:
await asyncio.sleep(animation_interval)
await event.edit(animation_chars[i % 18])
|
#!/usr/bin/env python
from setuptools import setup, find_packages
setup(name='CryptoPlus',
version='1.0',
description='PyCrypto Cipher extension',
author='Christophe Oosterlynck',
author_email='tiftof@gmail.com',
packages = find_packages('src'),
install_requires = ['pycryptodome'],
package_dir={'': 'src'}
)
|
import torch
from weakvtg.loss import loss_orthogonal_box_class_count_scaled
def test_loss_orthogonal_box_class_count_scaled():
X = torch.tensor([1, -1, 1, -1, 0, 0, .236, -.751], dtype=torch.float), torch.tensor([3, 1, 1, 1, 0, 1, 1, 0])
y = torch.tensor([1, -1, -1, 1, -1, 1, -1, 1], dtype=torch.float)
eps = 1e-08
x_pos = X[0] * (y == 1)
x_neg = X[0] * (y != 1)
count_pos = X[1] * (y == 1) + eps
count_neg = X[1] * (y != 1) + eps
assert torch.equal(
loss_orthogonal_box_class_count_scaled(X, y),
-1 * (x_pos / count_pos) + torch.square(x_neg) / count_neg
)
|
from setuptools import setup, find_packages
import pathlib
directory = pathlib.Path(__file__).parent
README = (directory / "README.md").read_text()
setup(
name="qsurface",
version="0.1.5",
description="Open library from surface code simulations and visualizations",
long_description=README,
long_description_content_type="text/markdown",
url="https://github.com/watermarkhu/qsurface",
project_urls={"Documentation": "https://qsurface.readthedocs.io/en/latest/"},
author="Mark Shui Hu",
author_email="watermarkhu@gmail.com",
license="BSD-3",
classifiers=[
"Development Status :: 3 - Alpha",
"License :: OSI Approved :: BSD License",
"Programming Language :: Python :: 3.7",
"Topic :: Scientific/Engineering :: Physics",
],
packages=find_packages(exclude=["tests", "*.tests", "*.tests.*"]),
include_package_data=True,
python_requires=">3.7.0",
install_requires=[
"matplotlib>=3.3.2",
"networkx>=2.0",
"pandas>=1.1.0",
"scipy>=1.4.0",
"pptree>=3.1",
],
entry_points={
"console_scrips": [
"qsurface=qsurface.__main__:main",
"qsurface-getblossomv=qsurface.decoders.mwpm:get_blossomv",
],
},
)
|
from flask import Flask, request, jsonify
from td4a.controllers.config import api_config
from td4a.controllers.hosts import api_hosts
from td4a.controllers.inventory import api_inventory
from td4a.controllers.link import api_link
from td4a.controllers.render import api_render
from td4a.controllers.retrieve import api_retrieve
from td4a.controllers.schema import api_schema
from td4a.controllers.validate import api_validate
app = Flask(__name__, static_url_path='') # pylint: disable=invalid-name
app.register_blueprint(api_config)
app.register_blueprint(api_hosts)
app.register_blueprint(api_inventory)
app.register_blueprint(api_link)
app.register_blueprint(api_render)
app.register_blueprint(api_retrieve)
app.register_blueprint(api_schema)
app.register_blueprint(api_validate)
@app.route('/')
def root():
""" root path
"""
return app.send_static_file('index.html')
|
# coding=utf-8
# --------------------------------------------------------------------------
# Copyright (c) Microsoft Corporation. All rights reserved.
# Licensed under the MIT License. See License.txt in the project root for license information.
# Code generated by Microsoft (R) AutoRest Code Generator.
# Changes may cause incorrect behavior and will be lost if the code is regenerated.
# --------------------------------------------------------------------------
from typing import Any, AsyncIterable, Callable, Dict, Generic, Optional, TypeVar, Union
import warnings
from azure.core.async_paging import AsyncItemPaged, AsyncList
from azure.core.exceptions import ClientAuthenticationError, HttpResponseError, ResourceExistsError, ResourceNotFoundError, map_error
from azure.core.pipeline import PipelineResponse
from azure.core.pipeline.transport import AsyncHttpResponse, HttpRequest
from azure.core.polling import AsyncLROPoller, AsyncNoPolling, AsyncPollingMethod
from azure.mgmt.core.exceptions import ARMErrorFormat
from azure.mgmt.core.polling.async_arm_polling import AsyncARMPolling
from ... import models as _models
T = TypeVar('T')
ClsType = Optional[Callable[[PipelineResponse[HttpRequest, AsyncHttpResponse], T, Dict[str, Any]], Any]]
class ExpressRouteCircuitsOperations:
"""ExpressRouteCircuitsOperations async operations.
You should not instantiate this class directly. Instead, you should create a Client instance that
instantiates it for you and attaches it as an attribute.
:ivar models: Alias to model classes used in this operation group.
:type models: ~azure.mgmt.network.v2020_03_01.models
:param client: Client for service requests.
:param config: Configuration of service client.
:param serializer: An object model serializer.
:param deserializer: An object model deserializer.
"""
models = _models
def __init__(self, client, config, serializer, deserializer) -> None:
self._client = client
self._serialize = serializer
self._deserialize = deserializer
self._config = config
async def _delete_initial(
self,
resource_group_name: str,
circuit_name: str,
**kwargs: Any
) -> None:
cls = kwargs.pop('cls', None) # type: ClsType[None]
error_map = {
401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError
}
error_map.update(kwargs.pop('error_map', {}))
api_version = "2020-03-01"
accept = "application/json"
# Construct URL
url = self._delete_initial.metadata['url'] # type: ignore
path_format_arguments = {
'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str'),
'circuitName': self._serialize.url("circuit_name", circuit_name, 'str'),
'subscriptionId': self._serialize.url("self._config.subscription_id", self._config.subscription_id, 'str'),
}
url = self._client.format_url(url, **path_format_arguments)
# Construct parameters
query_parameters = {} # type: Dict[str, Any]
query_parameters['api-version'] = self._serialize.query("api_version", api_version, 'str')
# Construct headers
header_parameters = {} # type: Dict[str, Any]
header_parameters['Accept'] = self._serialize.header("accept", accept, 'str')
request = self._client.delete(url, query_parameters, header_parameters)
pipeline_response = await self._client._pipeline.run(request, stream=False, **kwargs)
response = pipeline_response.http_response
if response.status_code not in [200, 202, 204]:
map_error(status_code=response.status_code, response=response, error_map=error_map)
raise HttpResponseError(response=response, error_format=ARMErrorFormat)
if cls:
return cls(pipeline_response, None, {})
_delete_initial.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.Network/expressRouteCircuits/{circuitName}'} # type: ignore
async def begin_delete(
self,
resource_group_name: str,
circuit_name: str,
**kwargs: Any
) -> AsyncLROPoller[None]:
"""Deletes the specified express route circuit.
:param resource_group_name: The name of the resource group.
:type resource_group_name: str
:param circuit_name: The name of the express route circuit.
:type circuit_name: str
:keyword callable cls: A custom type or function that will be passed the direct response
:keyword str continuation_token: A continuation token to restart a poller from a saved state.
:keyword polling: By default, your polling method will be AsyncARMPolling.
Pass in False for this operation to not poll, or pass in your own initialized polling object for a personal polling strategy.
:paramtype polling: bool or ~azure.core.polling.AsyncPollingMethod
:keyword int polling_interval: Default waiting time between two polls for LRO operations if no Retry-After header is present.
:return: An instance of AsyncLROPoller that returns either None or the result of cls(response)
:rtype: ~azure.core.polling.AsyncLROPoller[None]
:raises ~azure.core.exceptions.HttpResponseError:
"""
polling = kwargs.pop('polling', True) # type: Union[bool, AsyncPollingMethod]
cls = kwargs.pop('cls', None) # type: ClsType[None]
lro_delay = kwargs.pop(
'polling_interval',
self._config.polling_interval
)
cont_token = kwargs.pop('continuation_token', None) # type: Optional[str]
if cont_token is None:
raw_result = await self._delete_initial(
resource_group_name=resource_group_name,
circuit_name=circuit_name,
cls=lambda x,y,z: x,
**kwargs
)
kwargs.pop('error_map', None)
kwargs.pop('content_type', None)
def get_long_running_output(pipeline_response):
if cls:
return cls(pipeline_response, None, {})
path_format_arguments = {
'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str'),
'circuitName': self._serialize.url("circuit_name", circuit_name, 'str'),
'subscriptionId': self._serialize.url("self._config.subscription_id", self._config.subscription_id, 'str'),
}
if polling is True: polling_method = AsyncARMPolling(lro_delay, lro_options={'final-state-via': 'location'}, path_format_arguments=path_format_arguments, **kwargs)
elif polling is False: polling_method = AsyncNoPolling()
else: polling_method = polling
if cont_token:
return AsyncLROPoller.from_continuation_token(
polling_method=polling_method,
continuation_token=cont_token,
client=self._client,
deserialization_callback=get_long_running_output
)
else:
return AsyncLROPoller(self._client, raw_result, get_long_running_output, polling_method)
begin_delete.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.Network/expressRouteCircuits/{circuitName}'} # type: ignore
async def get(
self,
resource_group_name: str,
circuit_name: str,
**kwargs: Any
) -> "_models.ExpressRouteCircuit":
"""Gets information about the specified express route circuit.
:param resource_group_name: The name of the resource group.
:type resource_group_name: str
:param circuit_name: The name of express route circuit.
:type circuit_name: str
:keyword callable cls: A custom type or function that will be passed the direct response
:return: ExpressRouteCircuit, or the result of cls(response)
:rtype: ~azure.mgmt.network.v2020_03_01.models.ExpressRouteCircuit
:raises: ~azure.core.exceptions.HttpResponseError
"""
cls = kwargs.pop('cls', None) # type: ClsType["_models.ExpressRouteCircuit"]
error_map = {
401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError
}
error_map.update(kwargs.pop('error_map', {}))
api_version = "2020-03-01"
accept = "application/json"
# Construct URL
url = self.get.metadata['url'] # type: ignore
path_format_arguments = {
'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str'),
'circuitName': self._serialize.url("circuit_name", circuit_name, 'str'),
'subscriptionId': self._serialize.url("self._config.subscription_id", self._config.subscription_id, 'str'),
}
url = self._client.format_url(url, **path_format_arguments)
# Construct parameters
query_parameters = {} # type: Dict[str, Any]
query_parameters['api-version'] = self._serialize.query("api_version", api_version, 'str')
# Construct headers
header_parameters = {} # type: Dict[str, Any]
header_parameters['Accept'] = self._serialize.header("accept", accept, 'str')
request = self._client.get(url, query_parameters, header_parameters)
pipeline_response = await self._client._pipeline.run(request, stream=False, **kwargs)
response = pipeline_response.http_response
if response.status_code not in [200]:
map_error(status_code=response.status_code, response=response, error_map=error_map)
raise HttpResponseError(response=response, error_format=ARMErrorFormat)
deserialized = self._deserialize('ExpressRouteCircuit', pipeline_response)
if cls:
return cls(pipeline_response, deserialized, {})
return deserialized
get.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.Network/expressRouteCircuits/{circuitName}'} # type: ignore
async def _create_or_update_initial(
self,
resource_group_name: str,
circuit_name: str,
parameters: "_models.ExpressRouteCircuit",
**kwargs: Any
) -> "_models.ExpressRouteCircuit":
cls = kwargs.pop('cls', None) # type: ClsType["_models.ExpressRouteCircuit"]
error_map = {
401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError
}
error_map.update(kwargs.pop('error_map', {}))
api_version = "2020-03-01"
content_type = kwargs.pop("content_type", "application/json")
accept = "application/json"
# Construct URL
url = self._create_or_update_initial.metadata['url'] # type: ignore
path_format_arguments = {
'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str'),
'circuitName': self._serialize.url("circuit_name", circuit_name, 'str'),
'subscriptionId': self._serialize.url("self._config.subscription_id", self._config.subscription_id, 'str'),
}
url = self._client.format_url(url, **path_format_arguments)
# Construct parameters
query_parameters = {} # type: Dict[str, Any]
query_parameters['api-version'] = self._serialize.query("api_version", api_version, 'str')
# Construct headers
header_parameters = {} # type: Dict[str, Any]
header_parameters['Content-Type'] = self._serialize.header("content_type", content_type, 'str')
header_parameters['Accept'] = self._serialize.header("accept", accept, 'str')
body_content_kwargs = {} # type: Dict[str, Any]
body_content = self._serialize.body(parameters, 'ExpressRouteCircuit')
body_content_kwargs['content'] = body_content
request = self._client.put(url, query_parameters, header_parameters, **body_content_kwargs)
pipeline_response = await self._client._pipeline.run(request, stream=False, **kwargs)
response = pipeline_response.http_response
if response.status_code not in [200, 201]:
map_error(status_code=response.status_code, response=response, error_map=error_map)
raise HttpResponseError(response=response, error_format=ARMErrorFormat)
if response.status_code == 200:
deserialized = self._deserialize('ExpressRouteCircuit', pipeline_response)
if response.status_code == 201:
deserialized = self._deserialize('ExpressRouteCircuit', pipeline_response)
if cls:
return cls(pipeline_response, deserialized, {})
return deserialized
_create_or_update_initial.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.Network/expressRouteCircuits/{circuitName}'} # type: ignore
async def begin_create_or_update(
self,
resource_group_name: str,
circuit_name: str,
parameters: "_models.ExpressRouteCircuit",
**kwargs: Any
) -> AsyncLROPoller["_models.ExpressRouteCircuit"]:
"""Creates or updates an express route circuit.
:param resource_group_name: The name of the resource group.
:type resource_group_name: str
:param circuit_name: The name of the circuit.
:type circuit_name: str
:param parameters: Parameters supplied to the create or update express route circuit operation.
:type parameters: ~azure.mgmt.network.v2020_03_01.models.ExpressRouteCircuit
:keyword callable cls: A custom type or function that will be passed the direct response
:keyword str continuation_token: A continuation token to restart a poller from a saved state.
:keyword polling: By default, your polling method will be AsyncARMPolling.
Pass in False for this operation to not poll, or pass in your own initialized polling object for a personal polling strategy.
:paramtype polling: bool or ~azure.core.polling.AsyncPollingMethod
:keyword int polling_interval: Default waiting time between two polls for LRO operations if no Retry-After header is present.
:return: An instance of AsyncLROPoller that returns either ExpressRouteCircuit or the result of cls(response)
:rtype: ~azure.core.polling.AsyncLROPoller[~azure.mgmt.network.v2020_03_01.models.ExpressRouteCircuit]
:raises ~azure.core.exceptions.HttpResponseError:
"""
polling = kwargs.pop('polling', True) # type: Union[bool, AsyncPollingMethod]
cls = kwargs.pop('cls', None) # type: ClsType["_models.ExpressRouteCircuit"]
lro_delay = kwargs.pop(
'polling_interval',
self._config.polling_interval
)
cont_token = kwargs.pop('continuation_token', None) # type: Optional[str]
if cont_token is None:
raw_result = await self._create_or_update_initial(
resource_group_name=resource_group_name,
circuit_name=circuit_name,
parameters=parameters,
cls=lambda x,y,z: x,
**kwargs
)
kwargs.pop('error_map', None)
kwargs.pop('content_type', None)
def get_long_running_output(pipeline_response):
deserialized = self._deserialize('ExpressRouteCircuit', pipeline_response)
if cls:
return cls(pipeline_response, deserialized, {})
return deserialized
path_format_arguments = {
'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str'),
'circuitName': self._serialize.url("circuit_name", circuit_name, 'str'),
'subscriptionId': self._serialize.url("self._config.subscription_id", self._config.subscription_id, 'str'),
}
if polling is True: polling_method = AsyncARMPolling(lro_delay, lro_options={'final-state-via': 'azure-async-operation'}, path_format_arguments=path_format_arguments, **kwargs)
elif polling is False: polling_method = AsyncNoPolling()
else: polling_method = polling
if cont_token:
return AsyncLROPoller.from_continuation_token(
polling_method=polling_method,
continuation_token=cont_token,
client=self._client,
deserialization_callback=get_long_running_output
)
else:
return AsyncLROPoller(self._client, raw_result, get_long_running_output, polling_method)
begin_create_or_update.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.Network/expressRouteCircuits/{circuitName}'} # type: ignore
async def update_tags(
self,
resource_group_name: str,
circuit_name: str,
parameters: "_models.TagsObject",
**kwargs: Any
) -> "_models.ExpressRouteCircuit":
"""Updates an express route circuit tags.
:param resource_group_name: The name of the resource group.
:type resource_group_name: str
:param circuit_name: The name of the circuit.
:type circuit_name: str
:param parameters: Parameters supplied to update express route circuit tags.
:type parameters: ~azure.mgmt.network.v2020_03_01.models.TagsObject
:keyword callable cls: A custom type or function that will be passed the direct response
:return: ExpressRouteCircuit, or the result of cls(response)
:rtype: ~azure.mgmt.network.v2020_03_01.models.ExpressRouteCircuit
:raises: ~azure.core.exceptions.HttpResponseError
"""
cls = kwargs.pop('cls', None) # type: ClsType["_models.ExpressRouteCircuit"]
error_map = {
401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError
}
error_map.update(kwargs.pop('error_map', {}))
api_version = "2020-03-01"
content_type = kwargs.pop("content_type", "application/json")
accept = "application/json"
# Construct URL
url = self.update_tags.metadata['url'] # type: ignore
path_format_arguments = {
'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str'),
'circuitName': self._serialize.url("circuit_name", circuit_name, 'str'),
'subscriptionId': self._serialize.url("self._config.subscription_id", self._config.subscription_id, 'str'),
}
url = self._client.format_url(url, **path_format_arguments)
# Construct parameters
query_parameters = {} # type: Dict[str, Any]
query_parameters['api-version'] = self._serialize.query("api_version", api_version, 'str')
# Construct headers
header_parameters = {} # type: Dict[str, Any]
header_parameters['Content-Type'] = self._serialize.header("content_type", content_type, 'str')
header_parameters['Accept'] = self._serialize.header("accept", accept, 'str')
body_content_kwargs = {} # type: Dict[str, Any]
body_content = self._serialize.body(parameters, 'TagsObject')
body_content_kwargs['content'] = body_content
request = self._client.patch(url, query_parameters, header_parameters, **body_content_kwargs)
pipeline_response = await self._client._pipeline.run(request, stream=False, **kwargs)
response = pipeline_response.http_response
if response.status_code not in [200]:
map_error(status_code=response.status_code, response=response, error_map=error_map)
raise HttpResponseError(response=response, error_format=ARMErrorFormat)
deserialized = self._deserialize('ExpressRouteCircuit', pipeline_response)
if cls:
return cls(pipeline_response, deserialized, {})
return deserialized
update_tags.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.Network/expressRouteCircuits/{circuitName}'} # type: ignore
async def _list_arp_table_initial(
self,
resource_group_name: str,
circuit_name: str,
peering_name: str,
device_path: str,
**kwargs: Any
) -> Optional["_models.ExpressRouteCircuitsArpTableListResult"]:
cls = kwargs.pop('cls', None) # type: ClsType[Optional["_models.ExpressRouteCircuitsArpTableListResult"]]
error_map = {
401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError
}
error_map.update(kwargs.pop('error_map', {}))
api_version = "2020-03-01"
accept = "application/json"
# Construct URL
url = self._list_arp_table_initial.metadata['url'] # type: ignore
path_format_arguments = {
'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str'),
'circuitName': self._serialize.url("circuit_name", circuit_name, 'str'),
'peeringName': self._serialize.url("peering_name", peering_name, 'str'),
'devicePath': self._serialize.url("device_path", device_path, 'str'),
'subscriptionId': self._serialize.url("self._config.subscription_id", self._config.subscription_id, 'str'),
}
url = self._client.format_url(url, **path_format_arguments)
# Construct parameters
query_parameters = {} # type: Dict[str, Any]
query_parameters['api-version'] = self._serialize.query("api_version", api_version, 'str')
# Construct headers
header_parameters = {} # type: Dict[str, Any]
header_parameters['Accept'] = self._serialize.header("accept", accept, 'str')
request = self._client.post(url, query_parameters, header_parameters)
pipeline_response = await self._client._pipeline.run(request, stream=False, **kwargs)
response = pipeline_response.http_response
if response.status_code not in [200, 202]:
map_error(status_code=response.status_code, response=response, error_map=error_map)
raise HttpResponseError(response=response, error_format=ARMErrorFormat)
deserialized = None
if response.status_code == 200:
deserialized = self._deserialize('ExpressRouteCircuitsArpTableListResult', pipeline_response)
if cls:
return cls(pipeline_response, deserialized, {})
return deserialized
_list_arp_table_initial.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.Network/expressRouteCircuits/{circuitName}/peerings/{peeringName}/arpTables/{devicePath}'} # type: ignore
async def begin_list_arp_table(
self,
resource_group_name: str,
circuit_name: str,
peering_name: str,
device_path: str,
**kwargs: Any
) -> AsyncLROPoller["_models.ExpressRouteCircuitsArpTableListResult"]:
"""Gets the currently advertised ARP table associated with the express route circuit in a resource
group.
:param resource_group_name: The name of the resource group.
:type resource_group_name: str
:param circuit_name: The name of the express route circuit.
:type circuit_name: str
:param peering_name: The name of the peering.
:type peering_name: str
:param device_path: The path of the device.
:type device_path: str
:keyword callable cls: A custom type or function that will be passed the direct response
:keyword str continuation_token: A continuation token to restart a poller from a saved state.
:keyword polling: By default, your polling method will be AsyncARMPolling.
Pass in False for this operation to not poll, or pass in your own initialized polling object for a personal polling strategy.
:paramtype polling: bool or ~azure.core.polling.AsyncPollingMethod
:keyword int polling_interval: Default waiting time between two polls for LRO operations if no Retry-After header is present.
:return: An instance of AsyncLROPoller that returns either ExpressRouteCircuitsArpTableListResult or the result of cls(response)
:rtype: ~azure.core.polling.AsyncLROPoller[~azure.mgmt.network.v2020_03_01.models.ExpressRouteCircuitsArpTableListResult]
:raises ~azure.core.exceptions.HttpResponseError:
"""
polling = kwargs.pop('polling', True) # type: Union[bool, AsyncPollingMethod]
cls = kwargs.pop('cls', None) # type: ClsType["_models.ExpressRouteCircuitsArpTableListResult"]
lro_delay = kwargs.pop(
'polling_interval',
self._config.polling_interval
)
cont_token = kwargs.pop('continuation_token', None) # type: Optional[str]
if cont_token is None:
raw_result = await self._list_arp_table_initial(
resource_group_name=resource_group_name,
circuit_name=circuit_name,
peering_name=peering_name,
device_path=device_path,
cls=lambda x,y,z: x,
**kwargs
)
kwargs.pop('error_map', None)
kwargs.pop('content_type', None)
def get_long_running_output(pipeline_response):
deserialized = self._deserialize('ExpressRouteCircuitsArpTableListResult', pipeline_response)
if cls:
return cls(pipeline_response, deserialized, {})
return deserialized
path_format_arguments = {
'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str'),
'circuitName': self._serialize.url("circuit_name", circuit_name, 'str'),
'peeringName': self._serialize.url("peering_name", peering_name, 'str'),
'devicePath': self._serialize.url("device_path", device_path, 'str'),
'subscriptionId': self._serialize.url("self._config.subscription_id", self._config.subscription_id, 'str'),
}
if polling is True: polling_method = AsyncARMPolling(lro_delay, lro_options={'final-state-via': 'location'}, path_format_arguments=path_format_arguments, **kwargs)
elif polling is False: polling_method = AsyncNoPolling()
else: polling_method = polling
if cont_token:
return AsyncLROPoller.from_continuation_token(
polling_method=polling_method,
continuation_token=cont_token,
client=self._client,
deserialization_callback=get_long_running_output
)
else:
return AsyncLROPoller(self._client, raw_result, get_long_running_output, polling_method)
begin_list_arp_table.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.Network/expressRouteCircuits/{circuitName}/peerings/{peeringName}/arpTables/{devicePath}'} # type: ignore
async def _list_routes_table_initial(
self,
resource_group_name: str,
circuit_name: str,
peering_name: str,
device_path: str,
**kwargs: Any
) -> Optional["_models.ExpressRouteCircuitsRoutesTableListResult"]:
cls = kwargs.pop('cls', None) # type: ClsType[Optional["_models.ExpressRouteCircuitsRoutesTableListResult"]]
error_map = {
401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError
}
error_map.update(kwargs.pop('error_map', {}))
api_version = "2020-03-01"
accept = "application/json"
# Construct URL
url = self._list_routes_table_initial.metadata['url'] # type: ignore
path_format_arguments = {
'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str'),
'circuitName': self._serialize.url("circuit_name", circuit_name, 'str'),
'peeringName': self._serialize.url("peering_name", peering_name, 'str'),
'devicePath': self._serialize.url("device_path", device_path, 'str'),
'subscriptionId': self._serialize.url("self._config.subscription_id", self._config.subscription_id, 'str'),
}
url = self._client.format_url(url, **path_format_arguments)
# Construct parameters
query_parameters = {} # type: Dict[str, Any]
query_parameters['api-version'] = self._serialize.query("api_version", api_version, 'str')
# Construct headers
header_parameters = {} # type: Dict[str, Any]
header_parameters['Accept'] = self._serialize.header("accept", accept, 'str')
request = self._client.post(url, query_parameters, header_parameters)
pipeline_response = await self._client._pipeline.run(request, stream=False, **kwargs)
response = pipeline_response.http_response
if response.status_code not in [200, 202]:
map_error(status_code=response.status_code, response=response, error_map=error_map)
raise HttpResponseError(response=response, error_format=ARMErrorFormat)
deserialized = None
if response.status_code == 200:
deserialized = self._deserialize('ExpressRouteCircuitsRoutesTableListResult', pipeline_response)
if cls:
return cls(pipeline_response, deserialized, {})
return deserialized
_list_routes_table_initial.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.Network/expressRouteCircuits/{circuitName}/peerings/{peeringName}/routeTables/{devicePath}'} # type: ignore
async def begin_list_routes_table(
self,
resource_group_name: str,
circuit_name: str,
peering_name: str,
device_path: str,
**kwargs: Any
) -> AsyncLROPoller["_models.ExpressRouteCircuitsRoutesTableListResult"]:
"""Gets the currently advertised routes table associated with the express route circuit in a
resource group.
:param resource_group_name: The name of the resource group.
:type resource_group_name: str
:param circuit_name: The name of the express route circuit.
:type circuit_name: str
:param peering_name: The name of the peering.
:type peering_name: str
:param device_path: The path of the device.
:type device_path: str
:keyword callable cls: A custom type or function that will be passed the direct response
:keyword str continuation_token: A continuation token to restart a poller from a saved state.
:keyword polling: By default, your polling method will be AsyncARMPolling.
Pass in False for this operation to not poll, or pass in your own initialized polling object for a personal polling strategy.
:paramtype polling: bool or ~azure.core.polling.AsyncPollingMethod
:keyword int polling_interval: Default waiting time between two polls for LRO operations if no Retry-After header is present.
:return: An instance of AsyncLROPoller that returns either ExpressRouteCircuitsRoutesTableListResult or the result of cls(response)
:rtype: ~azure.core.polling.AsyncLROPoller[~azure.mgmt.network.v2020_03_01.models.ExpressRouteCircuitsRoutesTableListResult]
:raises ~azure.core.exceptions.HttpResponseError:
"""
polling = kwargs.pop('polling', True) # type: Union[bool, AsyncPollingMethod]
cls = kwargs.pop('cls', None) # type: ClsType["_models.ExpressRouteCircuitsRoutesTableListResult"]
lro_delay = kwargs.pop(
'polling_interval',
self._config.polling_interval
)
cont_token = kwargs.pop('continuation_token', None) # type: Optional[str]
if cont_token is None:
raw_result = await self._list_routes_table_initial(
resource_group_name=resource_group_name,
circuit_name=circuit_name,
peering_name=peering_name,
device_path=device_path,
cls=lambda x,y,z: x,
**kwargs
)
kwargs.pop('error_map', None)
kwargs.pop('content_type', None)
def get_long_running_output(pipeline_response):
deserialized = self._deserialize('ExpressRouteCircuitsRoutesTableListResult', pipeline_response)
if cls:
return cls(pipeline_response, deserialized, {})
return deserialized
path_format_arguments = {
'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str'),
'circuitName': self._serialize.url("circuit_name", circuit_name, 'str'),
'peeringName': self._serialize.url("peering_name", peering_name, 'str'),
'devicePath': self._serialize.url("device_path", device_path, 'str'),
'subscriptionId': self._serialize.url("self._config.subscription_id", self._config.subscription_id, 'str'),
}
if polling is True: polling_method = AsyncARMPolling(lro_delay, lro_options={'final-state-via': 'location'}, path_format_arguments=path_format_arguments, **kwargs)
elif polling is False: polling_method = AsyncNoPolling()
else: polling_method = polling
if cont_token:
return AsyncLROPoller.from_continuation_token(
polling_method=polling_method,
continuation_token=cont_token,
client=self._client,
deserialization_callback=get_long_running_output
)
else:
return AsyncLROPoller(self._client, raw_result, get_long_running_output, polling_method)
begin_list_routes_table.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.Network/expressRouteCircuits/{circuitName}/peerings/{peeringName}/routeTables/{devicePath}'} # type: ignore
async def _list_routes_table_summary_initial(
self,
resource_group_name: str,
circuit_name: str,
peering_name: str,
device_path: str,
**kwargs: Any
) -> Optional["_models.ExpressRouteCircuitsRoutesTableSummaryListResult"]:
cls = kwargs.pop('cls', None) # type: ClsType[Optional["_models.ExpressRouteCircuitsRoutesTableSummaryListResult"]]
error_map = {
401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError
}
error_map.update(kwargs.pop('error_map', {}))
api_version = "2020-03-01"
accept = "application/json"
# Construct URL
url = self._list_routes_table_summary_initial.metadata['url'] # type: ignore
path_format_arguments = {
'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str'),
'circuitName': self._serialize.url("circuit_name", circuit_name, 'str'),
'peeringName': self._serialize.url("peering_name", peering_name, 'str'),
'devicePath': self._serialize.url("device_path", device_path, 'str'),
'subscriptionId': self._serialize.url("self._config.subscription_id", self._config.subscription_id, 'str'),
}
url = self._client.format_url(url, **path_format_arguments)
# Construct parameters
query_parameters = {} # type: Dict[str, Any]
query_parameters['api-version'] = self._serialize.query("api_version", api_version, 'str')
# Construct headers
header_parameters = {} # type: Dict[str, Any]
header_parameters['Accept'] = self._serialize.header("accept", accept, 'str')
request = self._client.post(url, query_parameters, header_parameters)
pipeline_response = await self._client._pipeline.run(request, stream=False, **kwargs)
response = pipeline_response.http_response
if response.status_code not in [200, 202]:
map_error(status_code=response.status_code, response=response, error_map=error_map)
raise HttpResponseError(response=response, error_format=ARMErrorFormat)
deserialized = None
if response.status_code == 200:
deserialized = self._deserialize('ExpressRouteCircuitsRoutesTableSummaryListResult', pipeline_response)
if cls:
return cls(pipeline_response, deserialized, {})
return deserialized
_list_routes_table_summary_initial.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.Network/expressRouteCircuits/{circuitName}/peerings/{peeringName}/routeTablesSummary/{devicePath}'} # type: ignore
async def begin_list_routes_table_summary(
self,
resource_group_name: str,
circuit_name: str,
peering_name: str,
device_path: str,
**kwargs: Any
) -> AsyncLROPoller["_models.ExpressRouteCircuitsRoutesTableSummaryListResult"]:
"""Gets the currently advertised routes table summary associated with the express route circuit in
a resource group.
:param resource_group_name: The name of the resource group.
:type resource_group_name: str
:param circuit_name: The name of the express route circuit.
:type circuit_name: str
:param peering_name: The name of the peering.
:type peering_name: str
:param device_path: The path of the device.
:type device_path: str
:keyword callable cls: A custom type or function that will be passed the direct response
:keyword str continuation_token: A continuation token to restart a poller from a saved state.
:keyword polling: By default, your polling method will be AsyncARMPolling.
Pass in False for this operation to not poll, or pass in your own initialized polling object for a personal polling strategy.
:paramtype polling: bool or ~azure.core.polling.AsyncPollingMethod
:keyword int polling_interval: Default waiting time between two polls for LRO operations if no Retry-After header is present.
:return: An instance of AsyncLROPoller that returns either ExpressRouteCircuitsRoutesTableSummaryListResult or the result of cls(response)
:rtype: ~azure.core.polling.AsyncLROPoller[~azure.mgmt.network.v2020_03_01.models.ExpressRouteCircuitsRoutesTableSummaryListResult]
:raises ~azure.core.exceptions.HttpResponseError:
"""
polling = kwargs.pop('polling', True) # type: Union[bool, AsyncPollingMethod]
cls = kwargs.pop('cls', None) # type: ClsType["_models.ExpressRouteCircuitsRoutesTableSummaryListResult"]
lro_delay = kwargs.pop(
'polling_interval',
self._config.polling_interval
)
cont_token = kwargs.pop('continuation_token', None) # type: Optional[str]
if cont_token is None:
raw_result = await self._list_routes_table_summary_initial(
resource_group_name=resource_group_name,
circuit_name=circuit_name,
peering_name=peering_name,
device_path=device_path,
cls=lambda x,y,z: x,
**kwargs
)
kwargs.pop('error_map', None)
kwargs.pop('content_type', None)
def get_long_running_output(pipeline_response):
deserialized = self._deserialize('ExpressRouteCircuitsRoutesTableSummaryListResult', pipeline_response)
if cls:
return cls(pipeline_response, deserialized, {})
return deserialized
path_format_arguments = {
'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str'),
'circuitName': self._serialize.url("circuit_name", circuit_name, 'str'),
'peeringName': self._serialize.url("peering_name", peering_name, 'str'),
'devicePath': self._serialize.url("device_path", device_path, 'str'),
'subscriptionId': self._serialize.url("self._config.subscription_id", self._config.subscription_id, 'str'),
}
if polling is True: polling_method = AsyncARMPolling(lro_delay, lro_options={'final-state-via': 'location'}, path_format_arguments=path_format_arguments, **kwargs)
elif polling is False: polling_method = AsyncNoPolling()
else: polling_method = polling
if cont_token:
return AsyncLROPoller.from_continuation_token(
polling_method=polling_method,
continuation_token=cont_token,
client=self._client,
deserialization_callback=get_long_running_output
)
else:
return AsyncLROPoller(self._client, raw_result, get_long_running_output, polling_method)
begin_list_routes_table_summary.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.Network/expressRouteCircuits/{circuitName}/peerings/{peeringName}/routeTablesSummary/{devicePath}'} # type: ignore
async def get_stats(
self,
resource_group_name: str,
circuit_name: str,
**kwargs: Any
) -> "_models.ExpressRouteCircuitStats":
"""Gets all the stats from an express route circuit in a resource group.
:param resource_group_name: The name of the resource group.
:type resource_group_name: str
:param circuit_name: The name of the express route circuit.
:type circuit_name: str
:keyword callable cls: A custom type or function that will be passed the direct response
:return: ExpressRouteCircuitStats, or the result of cls(response)
:rtype: ~azure.mgmt.network.v2020_03_01.models.ExpressRouteCircuitStats
:raises: ~azure.core.exceptions.HttpResponseError
"""
cls = kwargs.pop('cls', None) # type: ClsType["_models.ExpressRouteCircuitStats"]
error_map = {
401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError
}
error_map.update(kwargs.pop('error_map', {}))
api_version = "2020-03-01"
accept = "application/json"
# Construct URL
url = self.get_stats.metadata['url'] # type: ignore
path_format_arguments = {
'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str'),
'circuitName': self._serialize.url("circuit_name", circuit_name, 'str'),
'subscriptionId': self._serialize.url("self._config.subscription_id", self._config.subscription_id, 'str'),
}
url = self._client.format_url(url, **path_format_arguments)
# Construct parameters
query_parameters = {} # type: Dict[str, Any]
query_parameters['api-version'] = self._serialize.query("api_version", api_version, 'str')
# Construct headers
header_parameters = {} # type: Dict[str, Any]
header_parameters['Accept'] = self._serialize.header("accept", accept, 'str')
request = self._client.get(url, query_parameters, header_parameters)
pipeline_response = await self._client._pipeline.run(request, stream=False, **kwargs)
response = pipeline_response.http_response
if response.status_code not in [200]:
map_error(status_code=response.status_code, response=response, error_map=error_map)
raise HttpResponseError(response=response, error_format=ARMErrorFormat)
deserialized = self._deserialize('ExpressRouteCircuitStats', pipeline_response)
if cls:
return cls(pipeline_response, deserialized, {})
return deserialized
get_stats.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.Network/expressRouteCircuits/{circuitName}/stats'} # type: ignore
async def get_peering_stats(
self,
resource_group_name: str,
circuit_name: str,
peering_name: str,
**kwargs: Any
) -> "_models.ExpressRouteCircuitStats":
"""Gets all stats from an express route circuit in a resource group.
:param resource_group_name: The name of the resource group.
:type resource_group_name: str
:param circuit_name: The name of the express route circuit.
:type circuit_name: str
:param peering_name: The name of the peering.
:type peering_name: str
:keyword callable cls: A custom type or function that will be passed the direct response
:return: ExpressRouteCircuitStats, or the result of cls(response)
:rtype: ~azure.mgmt.network.v2020_03_01.models.ExpressRouteCircuitStats
:raises: ~azure.core.exceptions.HttpResponseError
"""
cls = kwargs.pop('cls', None) # type: ClsType["_models.ExpressRouteCircuitStats"]
error_map = {
401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError
}
error_map.update(kwargs.pop('error_map', {}))
api_version = "2020-03-01"
accept = "application/json"
# Construct URL
url = self.get_peering_stats.metadata['url'] # type: ignore
path_format_arguments = {
'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str'),
'circuitName': self._serialize.url("circuit_name", circuit_name, 'str'),
'peeringName': self._serialize.url("peering_name", peering_name, 'str'),
'subscriptionId': self._serialize.url("self._config.subscription_id", self._config.subscription_id, 'str'),
}
url = self._client.format_url(url, **path_format_arguments)
# Construct parameters
query_parameters = {} # type: Dict[str, Any]
query_parameters['api-version'] = self._serialize.query("api_version", api_version, 'str')
# Construct headers
header_parameters = {} # type: Dict[str, Any]
header_parameters['Accept'] = self._serialize.header("accept", accept, 'str')
request = self._client.get(url, query_parameters, header_parameters)
pipeline_response = await self._client._pipeline.run(request, stream=False, **kwargs)
response = pipeline_response.http_response
if response.status_code not in [200]:
map_error(status_code=response.status_code, response=response, error_map=error_map)
raise HttpResponseError(response=response, error_format=ARMErrorFormat)
deserialized = self._deserialize('ExpressRouteCircuitStats', pipeline_response)
if cls:
return cls(pipeline_response, deserialized, {})
return deserialized
get_peering_stats.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.Network/expressRouteCircuits/{circuitName}/peerings/{peeringName}/stats'} # type: ignore
def list(
self,
resource_group_name: str,
**kwargs: Any
) -> AsyncIterable["_models.ExpressRouteCircuitListResult"]:
"""Gets all the express route circuits in a resource group.
:param resource_group_name: The name of the resource group.
:type resource_group_name: str
:keyword callable cls: A custom type or function that will be passed the direct response
:return: An iterator like instance of either ExpressRouteCircuitListResult or the result of cls(response)
:rtype: ~azure.core.async_paging.AsyncItemPaged[~azure.mgmt.network.v2020_03_01.models.ExpressRouteCircuitListResult]
:raises: ~azure.core.exceptions.HttpResponseError
"""
cls = kwargs.pop('cls', None) # type: ClsType["_models.ExpressRouteCircuitListResult"]
error_map = {
401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError
}
error_map.update(kwargs.pop('error_map', {}))
api_version = "2020-03-01"
accept = "application/json"
def prepare_request(next_link=None):
# Construct headers
header_parameters = {} # type: Dict[str, Any]
header_parameters['Accept'] = self._serialize.header("accept", accept, 'str')
if not next_link:
# Construct URL
url = self.list.metadata['url'] # type: ignore
path_format_arguments = {
'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str'),
'subscriptionId': self._serialize.url("self._config.subscription_id", self._config.subscription_id, 'str'),
}
url = self._client.format_url(url, **path_format_arguments)
# Construct parameters
query_parameters = {} # type: Dict[str, Any]
query_parameters['api-version'] = self._serialize.query("api_version", api_version, 'str')
request = self._client.get(url, query_parameters, header_parameters)
else:
url = next_link
query_parameters = {} # type: Dict[str, Any]
request = self._client.get(url, query_parameters, header_parameters)
return request
async def extract_data(pipeline_response):
deserialized = self._deserialize('ExpressRouteCircuitListResult', pipeline_response)
list_of_elem = deserialized.value
if cls:
list_of_elem = cls(list_of_elem)
return deserialized.next_link or None, AsyncList(list_of_elem)
async def get_next(next_link=None):
request = prepare_request(next_link)
pipeline_response = await self._client._pipeline.run(request, stream=False, **kwargs)
response = pipeline_response.http_response
if response.status_code not in [200]:
map_error(status_code=response.status_code, response=response, error_map=error_map)
raise HttpResponseError(response=response, error_format=ARMErrorFormat)
return pipeline_response
return AsyncItemPaged(
get_next, extract_data
)
list.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.Network/expressRouteCircuits'} # type: ignore
def list_all(
self,
**kwargs: Any
) -> AsyncIterable["_models.ExpressRouteCircuitListResult"]:
"""Gets all the express route circuits in a subscription.
:keyword callable cls: A custom type or function that will be passed the direct response
:return: An iterator like instance of either ExpressRouteCircuitListResult or the result of cls(response)
:rtype: ~azure.core.async_paging.AsyncItemPaged[~azure.mgmt.network.v2020_03_01.models.ExpressRouteCircuitListResult]
:raises: ~azure.core.exceptions.HttpResponseError
"""
cls = kwargs.pop('cls', None) # type: ClsType["_models.ExpressRouteCircuitListResult"]
error_map = {
401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError
}
error_map.update(kwargs.pop('error_map', {}))
api_version = "2020-03-01"
accept = "application/json"
def prepare_request(next_link=None):
# Construct headers
header_parameters = {} # type: Dict[str, Any]
header_parameters['Accept'] = self._serialize.header("accept", accept, 'str')
if not next_link:
# Construct URL
url = self.list_all.metadata['url'] # type: ignore
path_format_arguments = {
'subscriptionId': self._serialize.url("self._config.subscription_id", self._config.subscription_id, 'str'),
}
url = self._client.format_url(url, **path_format_arguments)
# Construct parameters
query_parameters = {} # type: Dict[str, Any]
query_parameters['api-version'] = self._serialize.query("api_version", api_version, 'str')
request = self._client.get(url, query_parameters, header_parameters)
else:
url = next_link
query_parameters = {} # type: Dict[str, Any]
request = self._client.get(url, query_parameters, header_parameters)
return request
async def extract_data(pipeline_response):
deserialized = self._deserialize('ExpressRouteCircuitListResult', pipeline_response)
list_of_elem = deserialized.value
if cls:
list_of_elem = cls(list_of_elem)
return deserialized.next_link or None, AsyncList(list_of_elem)
async def get_next(next_link=None):
request = prepare_request(next_link)
pipeline_response = await self._client._pipeline.run(request, stream=False, **kwargs)
response = pipeline_response.http_response
if response.status_code not in [200]:
map_error(status_code=response.status_code, response=response, error_map=error_map)
raise HttpResponseError(response=response, error_format=ARMErrorFormat)
return pipeline_response
return AsyncItemPaged(
get_next, extract_data
)
list_all.metadata = {'url': '/subscriptions/{subscriptionId}/providers/Microsoft.Network/expressRouteCircuits'} # type: ignore
|
from __future__ import absolute_import
from __future__ import print_function
import veriloggen
import simulation_simulator_verilator
from veriloggen import *
expected_rslt = """\
LED: 0 count: 0
LED: 0 count: 1
LED: 0 count: 2
LED: 0 count: 3
LED: 0 count: 4
LED: 0 count: 5
LED: 0 count: 6
LED: 0 count: 7
LED: 0 count: 8
LED: 0 count: 9
LED: 0 count: 10
LED: 0 count: 0
LED: 0 count: 1
LED: 0 count: 2
LED: 0 count: 3
LED: 0 count: 4
LED: 0 count: 5
LED: 0 count: 6
LED: 0 count: 7
LED: 0 count: 8
LED: 0 count: 9
LED: 0 count: 10
LED: 0 count: 11
LED: 0 count: 12
LED: 0 count: 13
LED: 0 count: 14
LED: 0 count: 15
LED: 0 count: 16
LED: 0 count: 17
LED: 0 count: 18
LED: 0 count: 19
LED: 0 count: 20
LED: 0 count: 21
LED: 0 count: 22
LED: 0 count: 23
LED: 0 count: 24
LED: 0 count: 25
LED: 0 count: 26
LED: 0 count: 27
LED: 0 count: 28
LED: 0 count: 29
LED: 0 count: 30
LED: 0 count: 31
LED: 0 count: 32
LED: 0 count: 33
LED: 0 count: 34
LED: 0 count: 35
LED: 0 count: 36
LED: 0 count: 37
LED: 0 count: 38
LED: 0 count: 39
LED: 0 count: 40
LED: 0 count: 41
LED: 0 count: 42
LED: 0 count: 43
LED: 0 count: 44
LED: 0 count: 45
LED: 0 count: 46
LED: 0 count: 47
LED: 0 count: 48
LED: 0 count: 49
LED: 0 count: 50
LED: 0 count: 51
LED: 0 count: 52
LED: 0 count: 53
LED: 0 count: 54
LED: 0 count: 55
LED: 0 count: 56
LED: 0 count: 57
LED: 0 count: 58
LED: 0 count: 59
LED: 0 count: 60
LED: 0 count: 61
LED: 0 count: 62
LED: 0 count: 63
LED: 0 count: 64
LED: 0 count: 65
LED: 0 count: 66
LED: 0 count: 67
LED: 0 count: 68
LED: 0 count: 69
LED: 0 count: 70
LED: 0 count: 71
LED: 0 count: 72
LED: 0 count: 73
LED: 0 count: 74
LED: 0 count: 75
LED: 0 count: 76
LED: 0 count: 77
LED: 0 count: 78
LED: 0 count: 79
LED: 0 count: 80
LED: 0 count: 81
LED: 0 count: 82
LED: 0 count: 83
LED: 0 count: 84
LED: 0 count: 85
LED: 0 count: 86
LED: 0 count: 87
LED: 0 count: 88
LED: 0 count: 89
LED: 0 count: 90
LED: 0 count: 91
LED: 0 count: 92
LED: 0 count: 93
LED: 0 count: 94
LED: 0 count: 95
LED: 0 count: 96
LED: 0 count: 97
LED: 0 count: 98
LED: 0 count: 99
LED: 0 count: 100
LED: 0 count: 101
LED: 0 count: 102
LED: 0 count: 103
LED: 0 count: 104
LED: 0 count: 105
LED: 0 count: 106
LED: 0 count: 107
LED: 0 count: 108
LED: 0 count: 109
LED: 0 count: 110
LED: 0 count: 111
LED: 0 count: 112
LED: 0 count: 113
LED: 0 count: 114
LED: 0 count: 115
LED: 0 count: 116
LED: 0 count: 117
LED: 0 count: 118
LED: 0 count: 119
LED: 0 count: 120
LED: 0 count: 121
LED: 0 count: 122
LED: 0 count: 123
LED: 0 count: 124
LED: 0 count: 125
LED: 0 count: 126
LED: 0 count: 127
LED: 0 count: 128
LED: 0 count: 129
LED: 0 count: 130
LED: 0 count: 131
LED: 0 count: 132
LED: 0 count: 133
LED: 0 count: 134
LED: 0 count: 135
LED: 0 count: 136
LED: 0 count: 137
LED: 0 count: 138
LED: 0 count: 139
LED: 0 count: 140
LED: 0 count: 141
LED: 0 count: 142
LED: 0 count: 143
LED: 0 count: 144
LED: 0 count: 145
LED: 0 count: 146
LED: 0 count: 147
LED: 0 count: 148
LED: 0 count: 149
LED: 0 count: 150
LED: 0 count: 151
LED: 0 count: 152
LED: 0 count: 153
LED: 0 count: 154
LED: 0 count: 155
LED: 0 count: 156
LED: 0 count: 157
LED: 0 count: 158
LED: 0 count: 159
LED: 0 count: 160
LED: 0 count: 161
LED: 0 count: 162
LED: 0 count: 163
LED: 0 count: 164
LED: 0 count: 165
LED: 0 count: 166
LED: 0 count: 167
LED: 0 count: 168
LED: 0 count: 169
LED: 0 count: 170
LED: 0 count: 171
LED: 0 count: 172
LED: 0 count: 173
LED: 0 count: 174
LED: 0 count: 175
LED: 0 count: 176
LED: 0 count: 177
LED: 0 count: 178
LED: 0 count: 179
LED: 0 count: 180
LED: 0 count: 181
LED: 0 count: 182
LED: 0 count: 183
LED: 0 count: 184
LED: 0 count: 185
LED: 0 count: 186
LED: 0 count: 187
LED: 0 count: 188
LED: 0 count: 189
LED: 0 count: 190
LED: 0 count: 191
LED: 0 count: 192
LED: 0 count: 193
LED: 0 count: 194
LED: 0 count: 195
LED: 0 count: 196
LED: 0 count: 197
LED: 0 count: 198
LED: 0 count: 199
LED: 0 count: 200
LED: 0 count: 201
LED: 0 count: 202
LED: 0 count: 203
LED: 0 count: 204
LED: 0 count: 205
LED: 0 count: 206
LED: 0 count: 207
LED: 0 count: 208
LED: 0 count: 209
LED: 0 count: 210
LED: 0 count: 211
LED: 0 count: 212
LED: 0 count: 213
LED: 0 count: 214
LED: 0 count: 215
LED: 0 count: 216
LED: 0 count: 217
LED: 0 count: 218
LED: 0 count: 219
LED: 0 count: 220
LED: 0 count: 221
LED: 0 count: 222
LED: 0 count: 223
LED: 0 count: 224
LED: 0 count: 225
LED: 0 count: 226
LED: 0 count: 227
LED: 0 count: 228
LED: 0 count: 229
LED: 0 count: 230
LED: 0 count: 231
LED: 0 count: 232
LED: 0 count: 233
LED: 0 count: 234
LED: 0 count: 235
LED: 0 count: 236
LED: 0 count: 237
LED: 0 count: 238
LED: 0 count: 239
LED: 0 count: 240
LED: 0 count: 241
LED: 0 count: 242
LED: 0 count: 243
LED: 0 count: 244
LED: 0 count: 245
LED: 0 count: 246
LED: 0 count: 247
LED: 0 count: 248
LED: 0 count: 249
LED: 0 count: 250
LED: 0 count: 251
LED: 0 count: 252
LED: 0 count: 253
LED: 0 count: 254
LED: 0 count: 255
LED: 0 count: 256
LED: 0 count: 257
LED: 0 count: 258
LED: 0 count: 259
LED: 0 count: 260
LED: 0 count: 261
LED: 0 count: 262
LED: 0 count: 263
LED: 0 count: 264
LED: 0 count: 265
LED: 0 count: 266
LED: 0 count: 267
LED: 0 count: 268
LED: 0 count: 269
LED: 0 count: 270
LED: 0 count: 271
LED: 0 count: 272
LED: 0 count: 273
LED: 0 count: 274
LED: 0 count: 275
LED: 0 count: 276
LED: 0 count: 277
LED: 0 count: 278
LED: 0 count: 279
LED: 0 count: 280
LED: 0 count: 281
LED: 0 count: 282
LED: 0 count: 283
LED: 0 count: 284
LED: 0 count: 285
LED: 0 count: 286
LED: 0 count: 287
LED: 0 count: 288
LED: 0 count: 289
LED: 0 count: 290
LED: 0 count: 291
LED: 0 count: 292
LED: 0 count: 293
LED: 0 count: 294
LED: 0 count: 295
LED: 0 count: 296
LED: 0 count: 297
LED: 0 count: 298
LED: 0 count: 299
LED: 0 count: 300
LED: 0 count: 301
LED: 0 count: 302
LED: 0 count: 303
LED: 0 count: 304
LED: 0 count: 305
LED: 0 count: 306
LED: 0 count: 307
LED: 0 count: 308
LED: 0 count: 309
LED: 0 count: 310
LED: 0 count: 311
LED: 0 count: 312
LED: 0 count: 313
LED: 0 count: 314
LED: 0 count: 315
LED: 0 count: 316
LED: 0 count: 317
LED: 0 count: 318
LED: 0 count: 319
LED: 0 count: 320
LED: 0 count: 321
LED: 0 count: 322
LED: 0 count: 323
LED: 0 count: 324
LED: 0 count: 325
LED: 0 count: 326
LED: 0 count: 327
LED: 0 count: 328
LED: 0 count: 329
LED: 0 count: 330
LED: 0 count: 331
LED: 0 count: 332
LED: 0 count: 333
LED: 0 count: 334
LED: 0 count: 335
LED: 0 count: 336
LED: 0 count: 337
LED: 0 count: 338
LED: 0 count: 339
LED: 0 count: 340
LED: 0 count: 341
LED: 0 count: 342
LED: 0 count: 343
LED: 0 count: 344
LED: 0 count: 345
LED: 0 count: 346
LED: 0 count: 347
LED: 0 count: 348
LED: 0 count: 349
LED: 0 count: 350
LED: 0 count: 351
LED: 0 count: 352
LED: 0 count: 353
LED: 0 count: 354
LED: 0 count: 355
LED: 0 count: 356
LED: 0 count: 357
LED: 0 count: 358
LED: 0 count: 359
LED: 0 count: 360
LED: 0 count: 361
LED: 0 count: 362
LED: 0 count: 363
LED: 0 count: 364
LED: 0 count: 365
LED: 0 count: 366
LED: 0 count: 367
LED: 0 count: 368
LED: 0 count: 369
LED: 0 count: 370
LED: 0 count: 371
LED: 0 count: 372
LED: 0 count: 373
LED: 0 count: 374
LED: 0 count: 375
LED: 0 count: 376
LED: 0 count: 377
LED: 0 count: 378
LED: 0 count: 379
LED: 0 count: 380
LED: 0 count: 381
LED: 0 count: 382
LED: 0 count: 383
LED: 0 count: 384
LED: 0 count: 385
LED: 0 count: 386
LED: 0 count: 387
LED: 0 count: 388
LED: 0 count: 389
LED: 0 count: 390
LED: 0 count: 391
LED: 0 count: 392
LED: 0 count: 393
LED: 0 count: 394
LED: 0 count: 395
LED: 0 count: 396
LED: 0 count: 397
LED: 0 count: 398
LED: 0 count: 399
LED: 0 count: 400
LED: 0 count: 401
LED: 0 count: 402
LED: 0 count: 403
LED: 0 count: 404
LED: 0 count: 405
LED: 0 count: 406
LED: 0 count: 407
LED: 0 count: 408
LED: 0 count: 409
LED: 0 count: 410
LED: 0 count: 411
LED: 0 count: 412
LED: 0 count: 413
LED: 0 count: 414
LED: 0 count: 415
LED: 0 count: 416
LED: 0 count: 417
LED: 0 count: 418
LED: 0 count: 419
LED: 0 count: 420
LED: 0 count: 421
LED: 0 count: 422
LED: 0 count: 423
LED: 0 count: 424
LED: 0 count: 425
LED: 0 count: 426
LED: 0 count: 427
LED: 0 count: 428
LED: 0 count: 429
LED: 0 count: 430
LED: 0 count: 431
LED: 0 count: 432
LED: 0 count: 433
LED: 0 count: 434
LED: 0 count: 435
LED: 0 count: 436
LED: 0 count: 437
LED: 0 count: 438
LED: 0 count: 439
LED: 0 count: 440
LED: 0 count: 441
LED: 0 count: 442
LED: 0 count: 443
LED: 0 count: 444
LED: 0 count: 445
LED: 0 count: 446
LED: 0 count: 447
LED: 0 count: 448
LED: 0 count: 449
LED: 0 count: 450
LED: 0 count: 451
LED: 0 count: 452
LED: 0 count: 453
LED: 0 count: 454
LED: 0 count: 455
LED: 0 count: 456
LED: 0 count: 457
LED: 0 count: 458
LED: 0 count: 459
LED: 0 count: 460
LED: 0 count: 461
LED: 0 count: 462
LED: 0 count: 463
LED: 0 count: 464
LED: 0 count: 465
LED: 0 count: 466
LED: 0 count: 467
LED: 0 count: 468
LED: 0 count: 469
LED: 0 count: 470
LED: 0 count: 471
LED: 0 count: 472
LED: 0 count: 473
LED: 0 count: 474
LED: 0 count: 475
LED: 0 count: 476
LED: 0 count: 477
LED: 0 count: 478
LED: 0 count: 479
LED: 0 count: 480
LED: 0 count: 481
LED: 0 count: 482
LED: 0 count: 483
LED: 0 count: 484
LED: 0 count: 485
LED: 0 count: 486
LED: 0 count: 487
LED: 0 count: 488
LED: 0 count: 489
LED: 0 count: 490
LED: 0 count: 491
LED: 0 count: 492
LED: 0 count: 493
LED: 0 count: 494
LED: 0 count: 495
LED: 0 count: 496
LED: 0 count: 497
LED: 0 count: 498
LED: 0 count: 499
LED: 0 count: 500
LED: 0 count: 501
LED: 0 count: 502
LED: 0 count: 503
LED: 0 count: 504
LED: 0 count: 505
LED: 0 count: 506
LED: 0 count: 507
LED: 0 count: 508
LED: 0 count: 509
LED: 0 count: 510
LED: 0 count: 511
LED: 0 count: 512
LED: 0 count: 513
LED: 0 count: 514
LED: 0 count: 515
LED: 0 count: 516
LED: 0 count: 517
LED: 0 count: 518
LED: 0 count: 519
LED: 0 count: 520
LED: 0 count: 521
LED: 0 count: 522
LED: 0 count: 523
LED: 0 count: 524
LED: 0 count: 525
LED: 0 count: 526
LED: 0 count: 527
LED: 0 count: 528
LED: 0 count: 529
LED: 0 count: 530
LED: 0 count: 531
LED: 0 count: 532
LED: 0 count: 533
LED: 0 count: 534
LED: 0 count: 535
LED: 0 count: 536
LED: 0 count: 537
LED: 0 count: 538
LED: 0 count: 539
LED: 0 count: 540
LED: 0 count: 541
LED: 0 count: 542
LED: 0 count: 543
LED: 0 count: 544
LED: 0 count: 545
LED: 0 count: 546
LED: 0 count: 547
LED: 0 count: 548
LED: 0 count: 549
LED: 0 count: 550
LED: 0 count: 551
LED: 0 count: 552
LED: 0 count: 553
LED: 0 count: 554
LED: 0 count: 555
LED: 0 count: 556
LED: 0 count: 557
LED: 0 count: 558
LED: 0 count: 559
LED: 0 count: 560
LED: 0 count: 561
LED: 0 count: 562
LED: 0 count: 563
LED: 0 count: 564
LED: 0 count: 565
LED: 0 count: 566
LED: 0 count: 567
LED: 0 count: 568
LED: 0 count: 569
LED: 0 count: 570
LED: 0 count: 571
LED: 0 count: 572
LED: 0 count: 573
LED: 0 count: 574
LED: 0 count: 575
LED: 0 count: 576
LED: 0 count: 577
LED: 0 count: 578
LED: 0 count: 579
LED: 0 count: 580
LED: 0 count: 581
LED: 0 count: 582
LED: 0 count: 583
LED: 0 count: 584
LED: 0 count: 585
LED: 0 count: 586
LED: 0 count: 587
LED: 0 count: 588
LED: 0 count: 589
LED: 0 count: 590
LED: 0 count: 591
LED: 0 count: 592
LED: 0 count: 593
LED: 0 count: 594
LED: 0 count: 595
LED: 0 count: 596
LED: 0 count: 597
LED: 0 count: 598
LED: 0 count: 599
LED: 0 count: 600
LED: 0 count: 601
LED: 0 count: 602
LED: 0 count: 603
LED: 0 count: 604
LED: 0 count: 605
LED: 0 count: 606
LED: 0 count: 607
LED: 0 count: 608
LED: 0 count: 609
LED: 0 count: 610
LED: 0 count: 611
LED: 0 count: 612
LED: 0 count: 613
LED: 0 count: 614
LED: 0 count: 615
LED: 0 count: 616
LED: 0 count: 617
LED: 0 count: 618
LED: 0 count: 619
LED: 0 count: 620
LED: 0 count: 621
LED: 0 count: 622
LED: 0 count: 623
LED: 0 count: 624
LED: 0 count: 625
LED: 0 count: 626
LED: 0 count: 627
LED: 0 count: 628
LED: 0 count: 629
LED: 0 count: 630
LED: 0 count: 631
LED: 0 count: 632
LED: 0 count: 633
LED: 0 count: 634
LED: 0 count: 635
LED: 0 count: 636
LED: 0 count: 637
LED: 0 count: 638
LED: 0 count: 639
LED: 0 count: 640
LED: 0 count: 641
LED: 0 count: 642
LED: 0 count: 643
LED: 0 count: 644
LED: 0 count: 645
LED: 0 count: 646
LED: 0 count: 647
LED: 0 count: 648
LED: 0 count: 649
LED: 0 count: 650
LED: 0 count: 651
LED: 0 count: 652
LED: 0 count: 653
LED: 0 count: 654
LED: 0 count: 655
LED: 0 count: 656
LED: 0 count: 657
LED: 0 count: 658
LED: 0 count: 659
LED: 0 count: 660
LED: 0 count: 661
LED: 0 count: 662
LED: 0 count: 663
LED: 0 count: 664
LED: 0 count: 665
LED: 0 count: 666
LED: 0 count: 667
LED: 0 count: 668
LED: 0 count: 669
LED: 0 count: 670
LED: 0 count: 671
LED: 0 count: 672
LED: 0 count: 673
LED: 0 count: 674
LED: 0 count: 675
LED: 0 count: 676
LED: 0 count: 677
LED: 0 count: 678
LED: 0 count: 679
LED: 0 count: 680
LED: 0 count: 681
LED: 0 count: 682
LED: 0 count: 683
LED: 0 count: 684
LED: 0 count: 685
LED: 0 count: 686
LED: 0 count: 687
LED: 0 count: 688
LED: 0 count: 689
LED: 0 count: 690
LED: 0 count: 691
LED: 0 count: 692
LED: 0 count: 693
LED: 0 count: 694
LED: 0 count: 695
LED: 0 count: 696
LED: 0 count: 697
LED: 0 count: 698
LED: 0 count: 699
LED: 0 count: 700
LED: 0 count: 701
LED: 0 count: 702
LED: 0 count: 703
LED: 0 count: 704
LED: 0 count: 705
LED: 0 count: 706
LED: 0 count: 707
LED: 0 count: 708
LED: 0 count: 709
LED: 0 count: 710
LED: 0 count: 711
LED: 0 count: 712
LED: 0 count: 713
LED: 0 count: 714
LED: 0 count: 715
LED: 0 count: 716
LED: 0 count: 717
LED: 0 count: 718
LED: 0 count: 719
LED: 0 count: 720
LED: 0 count: 721
LED: 0 count: 722
LED: 0 count: 723
LED: 0 count: 724
LED: 0 count: 725
LED: 0 count: 726
LED: 0 count: 727
LED: 0 count: 728
LED: 0 count: 729
LED: 0 count: 730
LED: 0 count: 731
LED: 0 count: 732
LED: 0 count: 733
LED: 0 count: 734
LED: 0 count: 735
LED: 0 count: 736
LED: 0 count: 737
LED: 0 count: 738
LED: 0 count: 739
LED: 0 count: 740
LED: 0 count: 741
LED: 0 count: 742
LED: 0 count: 743
LED: 0 count: 744
LED: 0 count: 745
LED: 0 count: 746
LED: 0 count: 747
LED: 0 count: 748
LED: 0 count: 749
LED: 0 count: 750
LED: 0 count: 751
LED: 0 count: 752
LED: 0 count: 753
LED: 0 count: 754
LED: 0 count: 755
LED: 0 count: 756
LED: 0 count: 757
LED: 0 count: 758
LED: 0 count: 759
LED: 0 count: 760
LED: 0 count: 761
LED: 0 count: 762
LED: 0 count: 763
LED: 0 count: 764
LED: 0 count: 765
LED: 0 count: 766
LED: 0 count: 767
LED: 0 count: 768
LED: 0 count: 769
LED: 0 count: 770
LED: 0 count: 771
LED: 0 count: 772
LED: 0 count: 773
LED: 0 count: 774
LED: 0 count: 775
LED: 0 count: 776
LED: 0 count: 777
LED: 0 count: 778
LED: 0 count: 779
LED: 0 count: 780
LED: 0 count: 781
LED: 0 count: 782
LED: 0 count: 783
LED: 0 count: 784
LED: 0 count: 785
LED: 0 count: 786
LED: 0 count: 787
LED: 0 count: 788
LED: 0 count: 789
LED: 0 count: 790
LED: 0 count: 791
LED: 0 count: 792
LED: 0 count: 793
LED: 0 count: 794
LED: 0 count: 795
LED: 0 count: 796
LED: 0 count: 797
LED: 0 count: 798
LED: 0 count: 799
LED: 0 count: 800
LED: 0 count: 801
LED: 0 count: 802
LED: 0 count: 803
LED: 0 count: 804
LED: 0 count: 805
LED: 0 count: 806
LED: 0 count: 807
LED: 0 count: 808
LED: 0 count: 809
LED: 0 count: 810
LED: 0 count: 811
LED: 0 count: 812
LED: 0 count: 813
LED: 0 count: 814
LED: 0 count: 815
LED: 0 count: 816
LED: 0 count: 817
LED: 0 count: 818
LED: 0 count: 819
LED: 0 count: 820
LED: 0 count: 821
LED: 0 count: 822
LED: 0 count: 823
LED: 0 count: 824
LED: 0 count: 825
LED: 0 count: 826
LED: 0 count: 827
LED: 0 count: 828
LED: 0 count: 829
LED: 0 count: 830
LED: 0 count: 831
LED: 0 count: 832
LED: 0 count: 833
LED: 0 count: 834
LED: 0 count: 835
LED: 0 count: 836
LED: 0 count: 837
LED: 0 count: 838
LED: 0 count: 839
LED: 0 count: 840
LED: 0 count: 841
LED: 0 count: 842
LED: 0 count: 843
LED: 0 count: 844
LED: 0 count: 845
LED: 0 count: 846
LED: 0 count: 847
LED: 0 count: 848
LED: 0 count: 849
LED: 0 count: 850
LED: 0 count: 851
LED: 0 count: 852
LED: 0 count: 853
LED: 0 count: 854
LED: 0 count: 855
LED: 0 count: 856
LED: 0 count: 857
LED: 0 count: 858
LED: 0 count: 859
LED: 0 count: 860
LED: 0 count: 861
LED: 0 count: 862
LED: 0 count: 863
LED: 0 count: 864
LED: 0 count: 865
LED: 0 count: 866
LED: 0 count: 867
LED: 0 count: 868
LED: 0 count: 869
LED: 0 count: 870
LED: 0 count: 871
LED: 0 count: 872
LED: 0 count: 873
LED: 0 count: 874
LED: 0 count: 875
LED: 0 count: 876
LED: 0 count: 877
LED: 0 count: 878
LED: 0 count: 879
LED: 0 count: 880
LED: 0 count: 881
LED: 0 count: 882
LED: 0 count: 883
LED: 0 count: 884
LED: 0 count: 885
LED: 0 count: 886
LED: 0 count: 887
LED: 0 count: 888
LED: 0 count: 889
LED: 0 count: 890
LED: 0 count: 891
LED: 0 count: 892
LED: 0 count: 893
LED: 0 count: 894
LED: 0 count: 895
LED: 0 count: 896
LED: 0 count: 897
LED: 0 count: 898
LED: 0 count: 899
LED: 0 count: 900
LED: 0 count: 901
LED: 0 count: 902
LED: 0 count: 903
LED: 0 count: 904
LED: 0 count: 905
LED: 0 count: 906
LED: 0 count: 907
LED: 0 count: 908
LED: 0 count: 909
LED: 0 count: 910
LED: 0 count: 911
LED: 0 count: 912
LED: 0 count: 913
LED: 0 count: 914
LED: 0 count: 915
LED: 0 count: 916
LED: 0 count: 917
LED: 0 count: 918
LED: 0 count: 919
LED: 0 count: 920
LED: 0 count: 921
LED: 0 count: 922
LED: 0 count: 923
LED: 0 count: 924
LED: 0 count: 925
LED: 0 count: 926
LED: 0 count: 927
LED: 0 count: 928
LED: 0 count: 929
LED: 0 count: 930
LED: 0 count: 931
LED: 0 count: 932
LED: 0 count: 933
LED: 0 count: 934
LED: 0 count: 935
LED: 0 count: 936
LED: 0 count: 937
LED: 0 count: 938
LED: 0 count: 939
LED: 0 count: 940
LED: 0 count: 941
LED: 0 count: 942
LED: 0 count: 943
LED: 0 count: 944
LED: 0 count: 945
LED: 0 count: 946
LED: 0 count: 947
LED: 0 count: 948
LED: 0 count: 949
LED: 0 count: 950
LED: 0 count: 951
LED: 0 count: 952
LED: 0 count: 953
LED: 0 count: 954
LED: 0 count: 955
LED: 0 count: 956
LED: 0 count: 957
LED: 0 count: 958
LED: 0 count: 959
LED: 0 count: 960
LED: 0 count: 961
LED: 0 count: 962
LED: 0 count: 963
LED: 0 count: 964
LED: 0 count: 965
LED: 0 count: 966
LED: 0 count: 967
LED: 0 count: 968
LED: 0 count: 969
LED: 0 count: 970
LED: 0 count: 971
LED: 0 count: 972
LED: 0 count: 973
LED: 0 count: 974
LED: 0 count: 975
LED: 0 count: 976
LED: 0 count: 977
LED: 0 count: 978
LED: 0 count: 979
LED: 0 count: 980
LED: 0 count: 981
LED: 0 count: 982
LED: 0 count: 983
LED: 0 count: 984
LED: 0 count: 985
LED: 0 count: 986
LED: 0 count: 987
LED: 0 count: 988
LED: 0 count: 989
LED: 0 count: 990
LED: 0 count: 991
LED: 0 count: 992
LED: 0 count: 993
LED: 0 count: 994
LED: 0 count: 995
LED: 0 count: 996
LED: 0 count: 997
LED: 0 count: 998
LED: 0 count: 999
LED: 0 count: 1000
LED: 0 count: 1001
LED: 0 count: 1002
LED: 0 count: 1003
LED: 0 count: 1004
LED: 0 count: 1005
LED: 0 count: 1006
LED: 0 count: 1007
LED: 0 count: 1008
LED: 0 count: 1009
LED: 0 count: 1010
LED: 0 count: 1011
LED: 0 count: 1012
LED: 0 count: 1013
LED: 0 count: 1014
LED: 0 count: 1015
LED: 0 count: 1016
LED: 0 count: 1017
LED: 0 count: 1018
LED: 0 count: 1019
LED: 0 count: 1020
LED: 0 count: 1021
LED: 0 count: 1022
LED: 0 count: 1023
LED: 1 count: 0
LED: 1 count: 1
LED: 1 count: 2
LED: 1 count: 3
LED: 1 count: 4
LED: 1 count: 5
LED: 1 count: 6
LED: 1 count: 7
LED: 1 count: 8
LED: 1 count: 9
LED: 1 count: 10
LED: 1 count: 11
LED: 1 count: 12
LED: 1 count: 13
LED: 1 count: 14
LED: 1 count: 15
LED: 1 count: 16
LED: 1 count: 17
LED: 1 count: 18
LED: 1 count: 19
LED: 1 count: 20
LED: 1 count: 21
LED: 1 count: 22
LED: 1 count: 23
LED: 1 count: 24
LED: 1 count: 25
LED: 1 count: 26
LED: 1 count: 27
LED: 1 count: 28
LED: 1 count: 29
LED: 1 count: 30
LED: 1 count: 31
LED: 1 count: 32
LED: 1 count: 33
LED: 1 count: 34
LED: 1 count: 35
LED: 1 count: 36
LED: 1 count: 37
LED: 1 count: 38
LED: 1 count: 39
LED: 1 count: 40
LED: 1 count: 41
LED: 1 count: 42
LED: 1 count: 43
LED: 1 count: 44
LED: 1 count: 45
LED: 1 count: 46
LED: 1 count: 47
LED: 1 count: 48
LED: 1 count: 49
LED: 1 count: 50
LED: 1 count: 51
LED: 1 count: 52
LED: 1 count: 53
LED: 1 count: 54
LED: 1 count: 55
LED: 1 count: 56
LED: 1 count: 57
LED: 1 count: 58
LED: 1 count: 59
LED: 1 count: 60
LED: 1 count: 61
LED: 1 count: 62
LED: 1 count: 63
LED: 1 count: 64
LED: 1 count: 65
LED: 1 count: 66
LED: 1 count: 67
LED: 1 count: 68
LED: 1 count: 69
LED: 1 count: 70
LED: 1 count: 71
LED: 1 count: 72
LED: 1 count: 73
LED: 1 count: 74
LED: 1 count: 75
LED: 1 count: 76
LED: 1 count: 77
LED: 1 count: 78
LED: 1 count: 79
LED: 1 count: 80
LED: 1 count: 81
LED: 1 count: 82
LED: 1 count: 83
LED: 1 count: 84
LED: 1 count: 85
LED: 1 count: 86
LED: 1 count: 87
LED: 1 count: 88
LED: 1 count: 89
LED: 1 count: 90
LED: 1 count: 91
LED: 1 count: 92
LED: 1 count: 93
LED: 1 count: 94
LED: 1 count: 95
LED: 1 count: 96
LED: 1 count: 97
LED: 1 count: 98
LED: 1 count: 99
LED: 1 count: 100
LED: 1 count: 101
LED: 1 count: 102
LED: 1 count: 103
LED: 1 count: 104
LED: 1 count: 105
LED: 1 count: 106
LED: 1 count: 107
LED: 1 count: 108
LED: 1 count: 109
LED: 1 count: 110
LED: 1 count: 111
LED: 1 count: 112
LED: 1 count: 113
LED: 1 count: 114
LED: 1 count: 115
LED: 1 count: 116
LED: 1 count: 117
LED: 1 count: 118
LED: 1 count: 119
LED: 1 count: 120
LED: 1 count: 121
LED: 1 count: 122
LED: 1 count: 123
LED: 1 count: 124
LED: 1 count: 125
LED: 1 count: 126
LED: 1 count: 127
LED: 1 count: 128
LED: 1 count: 129
LED: 1 count: 130
LED: 1 count: 131
LED: 1 count: 132
LED: 1 count: 133
LED: 1 count: 134
LED: 1 count: 135
LED: 1 count: 136
LED: 1 count: 137
LED: 1 count: 138
LED: 1 count: 139
LED: 1 count: 140
LED: 1 count: 141
LED: 1 count: 142
LED: 1 count: 143
LED: 1 count: 144
LED: 1 count: 145
LED: 1 count: 146
LED: 1 count: 147
LED: 1 count: 148
LED: 1 count: 149
LED: 1 count: 150
LED: 1 count: 151
LED: 1 count: 152
LED: 1 count: 153
LED: 1 count: 154
LED: 1 count: 155
LED: 1 count: 156
LED: 1 count: 157
LED: 1 count: 158
LED: 1 count: 159
LED: 1 count: 160
LED: 1 count: 161
LED: 1 count: 162
LED: 1 count: 163
LED: 1 count: 164
LED: 1 count: 165
LED: 1 count: 166
LED: 1 count: 167
LED: 1 count: 168
LED: 1 count: 169
LED: 1 count: 170
LED: 1 count: 171
LED: 1 count: 172
LED: 1 count: 173
LED: 1 count: 174
LED: 1 count: 175
LED: 1 count: 176
LED: 1 count: 177
LED: 1 count: 178
LED: 1 count: 179
LED: 1 count: 180
LED: 1 count: 181
LED: 1 count: 182
LED: 1 count: 183
LED: 1 count: 184
LED: 1 count: 185
LED: 1 count: 186
LED: 1 count: 187
LED: 1 count: 188
LED: 1 count: 189
LED: 1 count: 190
LED: 1 count: 191
LED: 1 count: 192
LED: 1 count: 193
LED: 1 count: 194
LED: 1 count: 195
LED: 1 count: 196
LED: 1 count: 197
LED: 1 count: 198
LED: 1 count: 199
LED: 1 count: 200
LED: 1 count: 201
LED: 1 count: 202
LED: 1 count: 203
LED: 1 count: 204
LED: 1 count: 205
LED: 1 count: 206
LED: 1 count: 207
LED: 1 count: 208
LED: 1 count: 209
LED: 1 count: 210
LED: 1 count: 211
LED: 1 count: 212
LED: 1 count: 213
LED: 1 count: 214
LED: 1 count: 215
LED: 1 count: 216
LED: 1 count: 217
LED: 1 count: 218
LED: 1 count: 219
LED: 1 count: 220
LED: 1 count: 221
LED: 1 count: 222
LED: 1 count: 223
LED: 1 count: 224
LED: 1 count: 225
LED: 1 count: 226
LED: 1 count: 227
LED: 1 count: 228
LED: 1 count: 229
LED: 1 count: 230
LED: 1 count: 231
LED: 1 count: 232
LED: 1 count: 233
LED: 1 count: 234
LED: 1 count: 235
LED: 1 count: 236
LED: 1 count: 237
LED: 1 count: 238
LED: 1 count: 239
LED: 1 count: 240
LED: 1 count: 241
LED: 1 count: 242
LED: 1 count: 243
LED: 1 count: 244
LED: 1 count: 245
LED: 1 count: 246
LED: 1 count: 247
LED: 1 count: 248
LED: 1 count: 249
LED: 1 count: 250
LED: 1 count: 251
LED: 1 count: 252
LED: 1 count: 253
LED: 1 count: 254
LED: 1 count: 255
LED: 1 count: 256
LED: 1 count: 257
LED: 1 count: 258
LED: 1 count: 259
LED: 1 count: 260
LED: 1 count: 261
LED: 1 count: 262
LED: 1 count: 263
LED: 1 count: 264
LED: 1 count: 265
LED: 1 count: 266
LED: 1 count: 267
LED: 1 count: 268
LED: 1 count: 269
LED: 1 count: 270
LED: 1 count: 271
LED: 1 count: 272
LED: 1 count: 273
LED: 1 count: 274
LED: 1 count: 275
LED: 1 count: 276
LED: 1 count: 277
LED: 1 count: 278
LED: 1 count: 279
LED: 1 count: 280
LED: 1 count: 281
LED: 1 count: 282
LED: 1 count: 283
LED: 1 count: 284
LED: 1 count: 285
LED: 1 count: 286
LED: 1 count: 287
LED: 1 count: 288
LED: 1 count: 289
LED: 1 count: 290
LED: 1 count: 291
LED: 1 count: 292
LED: 1 count: 293
LED: 1 count: 294
LED: 1 count: 295
LED: 1 count: 296
LED: 1 count: 297
LED: 1 count: 298
LED: 1 count: 299
LED: 1 count: 300
LED: 1 count: 301
LED: 1 count: 302
LED: 1 count: 303
LED: 1 count: 304
LED: 1 count: 305
LED: 1 count: 306
LED: 1 count: 307
LED: 1 count: 308
LED: 1 count: 309
LED: 1 count: 310
LED: 1 count: 311
LED: 1 count: 312
LED: 1 count: 313
LED: 1 count: 314
LED: 1 count: 315
LED: 1 count: 316
LED: 1 count: 317
LED: 1 count: 318
LED: 1 count: 319
LED: 1 count: 320
LED: 1 count: 321
LED: 1 count: 322
LED: 1 count: 323
LED: 1 count: 324
LED: 1 count: 325
LED: 1 count: 326
LED: 1 count: 327
LED: 1 count: 328
LED: 1 count: 329
LED: 1 count: 330
LED: 1 count: 331
LED: 1 count: 332
LED: 1 count: 333
LED: 1 count: 334
LED: 1 count: 335
LED: 1 count: 336
LED: 1 count: 337
LED: 1 count: 338
LED: 1 count: 339
LED: 1 count: 340
LED: 1 count: 341
LED: 1 count: 342
LED: 1 count: 343
LED: 1 count: 344
LED: 1 count: 345
LED: 1 count: 346
LED: 1 count: 347
LED: 1 count: 348
LED: 1 count: 349
LED: 1 count: 350
LED: 1 count: 351
LED: 1 count: 352
LED: 1 count: 353
LED: 1 count: 354
LED: 1 count: 355
LED: 1 count: 356
LED: 1 count: 357
LED: 1 count: 358
LED: 1 count: 359
LED: 1 count: 360
LED: 1 count: 361
LED: 1 count: 362
LED: 1 count: 363
LED: 1 count: 364
LED: 1 count: 365
LED: 1 count: 366
LED: 1 count: 367
LED: 1 count: 368
LED: 1 count: 369
LED: 1 count: 370
LED: 1 count: 371
LED: 1 count: 372
LED: 1 count: 373
LED: 1 count: 374
LED: 1 count: 375
LED: 1 count: 376
LED: 1 count: 377
LED: 1 count: 378
LED: 1 count: 379
LED: 1 count: 380
LED: 1 count: 381
LED: 1 count: 382
LED: 1 count: 383
LED: 1 count: 384
LED: 1 count: 385
LED: 1 count: 386
LED: 1 count: 387
LED: 1 count: 388
LED: 1 count: 389
LED: 1 count: 390
LED: 1 count: 391
LED: 1 count: 392
LED: 1 count: 393
LED: 1 count: 394
LED: 1 count: 395
LED: 1 count: 396
LED: 1 count: 397
LED: 1 count: 398
LED: 1 count: 399
LED: 1 count: 400
LED: 1 count: 401
LED: 1 count: 402
LED: 1 count: 403
LED: 1 count: 404
LED: 1 count: 405
LED: 1 count: 406
LED: 1 count: 407
LED: 1 count: 408
LED: 1 count: 409
LED: 1 count: 410
LED: 1 count: 411
LED: 1 count: 412
LED: 1 count: 413
LED: 1 count: 414
LED: 1 count: 415
LED: 1 count: 416
LED: 1 count: 417
LED: 1 count: 418
LED: 1 count: 419
LED: 1 count: 420
LED: 1 count: 421
LED: 1 count: 422
LED: 1 count: 423
LED: 1 count: 424
LED: 1 count: 425
LED: 1 count: 426
LED: 1 count: 427
LED: 1 count: 428
LED: 1 count: 429
LED: 1 count: 430
LED: 1 count: 431
LED: 1 count: 432
LED: 1 count: 433
LED: 1 count: 434
LED: 1 count: 435
LED: 1 count: 436
LED: 1 count: 437
LED: 1 count: 438
LED: 1 count: 439
LED: 1 count: 440
LED: 1 count: 441
LED: 1 count: 442
LED: 1 count: 443
LED: 1 count: 444
LED: 1 count: 445
LED: 1 count: 446
LED: 1 count: 447
LED: 1 count: 448
LED: 1 count: 449
LED: 1 count: 450
LED: 1 count: 451
LED: 1 count: 452
LED: 1 count: 453
LED: 1 count: 454
LED: 1 count: 455
LED: 1 count: 456
LED: 1 count: 457
LED: 1 count: 458
LED: 1 count: 459
LED: 1 count: 460
LED: 1 count: 461
LED: 1 count: 462
LED: 1 count: 463
LED: 1 count: 464
LED: 1 count: 465
LED: 1 count: 466
LED: 1 count: 467
LED: 1 count: 468
LED: 1 count: 469
LED: 1 count: 470
LED: 1 count: 471
LED: 1 count: 472
LED: 1 count: 473
LED: 1 count: 474
LED: 1 count: 475
LED: 1 count: 476
LED: 1 count: 477
LED: 1 count: 478
LED: 1 count: 479
LED: 1 count: 480
LED: 1 count: 481
LED: 1 count: 482
LED: 1 count: 483
LED: 1 count: 484
LED: 1 count: 485
LED: 1 count: 486
LED: 1 count: 487
LED: 1 count: 488
LED: 1 count: 489
LED: 1 count: 490
LED: 1 count: 491
LED: 1 count: 492
LED: 1 count: 493
LED: 1 count: 494
LED: 1 count: 495
LED: 1 count: 496
LED: 1 count: 497
LED: 1 count: 498
LED: 1 count: 499
LED: 1 count: 500
LED: 1 count: 501
LED: 1 count: 502
LED: 1 count: 503
LED: 1 count: 504
LED: 1 count: 505
LED: 1 count: 506
LED: 1 count: 507
LED: 1 count: 508
LED: 1 count: 509
LED: 1 count: 510
LED: 1 count: 511
LED: 1 count: 512
LED: 1 count: 513
LED: 1 count: 514
LED: 1 count: 515
LED: 1 count: 516
LED: 1 count: 517
LED: 1 count: 518
LED: 1 count: 519
LED: 1 count: 520
LED: 1 count: 521
LED: 1 count: 522
LED: 1 count: 523
LED: 1 count: 524
LED: 1 count: 525
LED: 1 count: 526
LED: 1 count: 527
LED: 1 count: 528
LED: 1 count: 529
LED: 1 count: 530
LED: 1 count: 531
LED: 1 count: 532
LED: 1 count: 533
LED: 1 count: 534
LED: 1 count: 535
LED: 1 count: 536
LED: 1 count: 537
LED: 1 count: 538
LED: 1 count: 539
LED: 1 count: 540
LED: 1 count: 541
LED: 1 count: 542
LED: 1 count: 543
LED: 1 count: 544
LED: 1 count: 545
LED: 1 count: 546
LED: 1 count: 547
LED: 1 count: 548
LED: 1 count: 549
LED: 1 count: 550
LED: 1 count: 551
LED: 1 count: 552
LED: 1 count: 553
LED: 1 count: 554
LED: 1 count: 555
LED: 1 count: 556
LED: 1 count: 557
LED: 1 count: 558
LED: 1 count: 559
LED: 1 count: 560
LED: 1 count: 561
LED: 1 count: 562
LED: 1 count: 563
LED: 1 count: 564
LED: 1 count: 565
LED: 1 count: 566
LED: 1 count: 567
LED: 1 count: 568
LED: 1 count: 569
LED: 1 count: 570
LED: 1 count: 571
LED: 1 count: 572
LED: 1 count: 573
LED: 1 count: 574
LED: 1 count: 575
LED: 1 count: 576
LED: 1 count: 577
LED: 1 count: 578
LED: 1 count: 579
LED: 1 count: 580
LED: 1 count: 581
LED: 1 count: 582
LED: 1 count: 583
LED: 1 count: 584
LED: 1 count: 585
LED: 1 count: 586
LED: 1 count: 587
LED: 1 count: 588
LED: 1 count: 589
LED: 1 count: 590
LED: 1 count: 591
LED: 1 count: 592
LED: 1 count: 593
LED: 1 count: 594
LED: 1 count: 595
LED: 1 count: 596
LED: 1 count: 597
LED: 1 count: 598
LED: 1 count: 599
LED: 1 count: 600
LED: 1 count: 601
LED: 1 count: 602
LED: 1 count: 603
LED: 1 count: 604
LED: 1 count: 605
LED: 1 count: 606
LED: 1 count: 607
LED: 1 count: 608
LED: 1 count: 609
LED: 1 count: 610
LED: 1 count: 611
LED: 1 count: 612
LED: 1 count: 613
LED: 1 count: 614
LED: 1 count: 615
LED: 1 count: 616
LED: 1 count: 617
LED: 1 count: 618
LED: 1 count: 619
LED: 1 count: 620
LED: 1 count: 621
LED: 1 count: 622
LED: 1 count: 623
LED: 1 count: 624
LED: 1 count: 625
LED: 1 count: 626
LED: 1 count: 627
LED: 1 count: 628
LED: 1 count: 629
LED: 1 count: 630
LED: 1 count: 631
LED: 1 count: 632
LED: 1 count: 633
LED: 1 count: 634
LED: 1 count: 635
LED: 1 count: 636
LED: 1 count: 637
LED: 1 count: 638
LED: 1 count: 639
LED: 1 count: 640
LED: 1 count: 641
LED: 1 count: 642
LED: 1 count: 643
LED: 1 count: 644
LED: 1 count: 645
LED: 1 count: 646
LED: 1 count: 647
LED: 1 count: 648
LED: 1 count: 649
LED: 1 count: 650
LED: 1 count: 651
LED: 1 count: 652
LED: 1 count: 653
LED: 1 count: 654
LED: 1 count: 655
LED: 1 count: 656
LED: 1 count: 657
LED: 1 count: 658
LED: 1 count: 659
LED: 1 count: 660
LED: 1 count: 661
LED: 1 count: 662
LED: 1 count: 663
LED: 1 count: 664
LED: 1 count: 665
LED: 1 count: 666
LED: 1 count: 667
LED: 1 count: 668
LED: 1 count: 669
LED: 1 count: 670
LED: 1 count: 671
LED: 1 count: 672
LED: 1 count: 673
LED: 1 count: 674
LED: 1 count: 675
LED: 1 count: 676
LED: 1 count: 677
LED: 1 count: 678
LED: 1 count: 679
LED: 1 count: 680
LED: 1 count: 681
LED: 1 count: 682
LED: 1 count: 683
LED: 1 count: 684
LED: 1 count: 685
LED: 1 count: 686
LED: 1 count: 687
LED: 1 count: 688
LED: 1 count: 689
LED: 1 count: 690
LED: 1 count: 691
LED: 1 count: 692
LED: 1 count: 693
LED: 1 count: 694
LED: 1 count: 695
LED: 1 count: 696
LED: 1 count: 697
LED: 1 count: 698
LED: 1 count: 699
LED: 1 count: 700
LED: 1 count: 701
LED: 1 count: 702
LED: 1 count: 703
LED: 1 count: 704
LED: 1 count: 705
LED: 1 count: 706
LED: 1 count: 707
LED: 1 count: 708
LED: 1 count: 709
LED: 1 count: 710
LED: 1 count: 711
LED: 1 count: 712
LED: 1 count: 713
LED: 1 count: 714
LED: 1 count: 715
LED: 1 count: 716
LED: 1 count: 717
LED: 1 count: 718
LED: 1 count: 719
LED: 1 count: 720
LED: 1 count: 721
LED: 1 count: 722
LED: 1 count: 723
LED: 1 count: 724
LED: 1 count: 725
LED: 1 count: 726
LED: 1 count: 727
LED: 1 count: 728
LED: 1 count: 729
LED: 1 count: 730
LED: 1 count: 731
LED: 1 count: 732
LED: 1 count: 733
LED: 1 count: 734
LED: 1 count: 735
LED: 1 count: 736
LED: 1 count: 737
LED: 1 count: 738
LED: 1 count: 739
LED: 1 count: 740
LED: 1 count: 741
LED: 1 count: 742
LED: 1 count: 743
LED: 1 count: 744
LED: 1 count: 745
LED: 1 count: 746
LED: 1 count: 747
LED: 1 count: 748
LED: 1 count: 749
LED: 1 count: 750
LED: 1 count: 751
LED: 1 count: 752
LED: 1 count: 753
LED: 1 count: 754
LED: 1 count: 755
LED: 1 count: 756
LED: 1 count: 757
LED: 1 count: 758
LED: 1 count: 759
LED: 1 count: 760
LED: 1 count: 761
LED: 1 count: 762
LED: 1 count: 763
LED: 1 count: 764
LED: 1 count: 765
LED: 1 count: 766
LED: 1 count: 767
LED: 1 count: 768
LED: 1 count: 769
LED: 1 count: 770
LED: 1 count: 771
LED: 1 count: 772
LED: 1 count: 773
LED: 1 count: 774
LED: 1 count: 775
LED: 1 count: 776
LED: 1 count: 777
LED: 1 count: 778
LED: 1 count: 779
LED: 1 count: 780
LED: 1 count: 781
LED: 1 count: 782
LED: 1 count: 783
LED: 1 count: 784
LED: 1 count: 785
LED: 1 count: 786
LED: 1 count: 787
LED: 1 count: 788
LED: 1 count: 789
LED: 1 count: 790
LED: 1 count: 791
LED: 1 count: 792
LED: 1 count: 793
LED: 1 count: 794
LED: 1 count: 795
LED: 1 count: 796
LED: 1 count: 797
LED: 1 count: 798
LED: 1 count: 799
LED: 1 count: 800
LED: 1 count: 801
LED: 1 count: 802
LED: 1 count: 803
LED: 1 count: 804
LED: 1 count: 805
LED: 1 count: 806
LED: 1 count: 807
LED: 1 count: 808
LED: 1 count: 809
LED: 1 count: 810
LED: 1 count: 811
LED: 1 count: 812
LED: 1 count: 813
LED: 1 count: 814
LED: 1 count: 815
LED: 1 count: 816
LED: 1 count: 817
LED: 1 count: 818
LED: 1 count: 819
LED: 1 count: 820
LED: 1 count: 821
LED: 1 count: 822
LED: 1 count: 823
LED: 1 count: 824
LED: 1 count: 825
LED: 1 count: 826
LED: 1 count: 827
LED: 1 count: 828
LED: 1 count: 829
LED: 1 count: 830
LED: 1 count: 831
LED: 1 count: 832
LED: 1 count: 833
LED: 1 count: 834
LED: 1 count: 835
LED: 1 count: 836
LED: 1 count: 837
LED: 1 count: 838
LED: 1 count: 839
LED: 1 count: 840
LED: 1 count: 841
LED: 1 count: 842
LED: 1 count: 843
LED: 1 count: 844
LED: 1 count: 845
LED: 1 count: 846
LED: 1 count: 847
LED: 1 count: 848
LED: 1 count: 849
LED: 1 count: 850
LED: 1 count: 851
LED: 1 count: 852
LED: 1 count: 853
LED: 1 count: 854
LED: 1 count: 855
LED: 1 count: 856
LED: 1 count: 857
LED: 1 count: 858
LED: 1 count: 859
LED: 1 count: 860
LED: 1 count: 861
LED: 1 count: 862
LED: 1 count: 863
LED: 1 count: 864
LED: 1 count: 865
LED: 1 count: 866
LED: 1 count: 867
LED: 1 count: 868
LED: 1 count: 869
LED: 1 count: 870
LED: 1 count: 871
LED: 1 count: 872
LED: 1 count: 873
LED: 1 count: 874
LED: 1 count: 875
LED: 1 count: 876
LED: 1 count: 877
LED: 1 count: 878
LED: 1 count: 879
LED: 1 count: 880
LED: 1 count: 881
LED: 1 count: 882
LED: 1 count: 883
LED: 1 count: 884
LED: 1 count: 885
LED: 1 count: 886
LED: 1 count: 887
LED: 1 count: 888
LED: 1 count: 889
LED: 1 count: 890
LED: 1 count: 891
LED: 1 count: 892
LED: 1 count: 893
LED: 1 count: 894
LED: 1 count: 895
LED: 1 count: 896
LED: 1 count: 897
LED: 1 count: 898
LED: 1 count: 899
LED: 1 count: 900
LED: 1 count: 901
LED: 1 count: 902
LED: 1 count: 903
LED: 1 count: 904
LED: 1 count: 905
LED: 1 count: 906
LED: 1 count: 907
LED: 1 count: 908
LED: 1 count: 909
LED: 1 count: 910
LED: 1 count: 911
LED: 1 count: 912
LED: 1 count: 913
LED: 1 count: 914
LED: 1 count: 915
LED: 1 count: 916
LED: 1 count: 917
LED: 1 count: 918
LED: 1 count: 919
LED: 1 count: 920
LED: 1 count: 921
LED: 1 count: 922
LED: 1 count: 923
LED: 1 count: 924
LED: 1 count: 925
LED: 1 count: 926
LED: 1 count: 927
LED: 1 count: 928
LED: 1 count: 929
LED: 1 count: 930
LED: 1 count: 931
LED: 1 count: 932
LED: 1 count: 933
LED: 1 count: 934
LED: 1 count: 935
LED: 1 count: 936
LED: 1 count: 937
LED: 1 count: 938
LED: 1 count: 939
LED: 1 count: 940
LED: 1 count: 941
LED: 1 count: 942
LED: 1 count: 943
LED: 1 count: 944
LED: 1 count: 945
LED: 1 count: 946
LED: 1 count: 947
LED: 1 count: 948
LED: 1 count: 949
LED: 1 count: 950
LED: 1 count: 951
LED: 1 count: 952
LED: 1 count: 953
LED: 1 count: 954
LED: 1 count: 955
"""
def test():
veriloggen.reset()
test_module = simulation_simulator_verilator.mkTest()
sim = simulation.Simulator(test_module, sim='verilator')
rslt = sim.run(outputfile='verilator.out', sim_time=1000 * 20)
ver_rslt = '\n'.join(rslt.splitlines())
exp_rslt = '\n'.join(expected_rslt.splitlines())
assert(ver_rslt == exp_rslt)
|
import json
import time
from urllib2 import urlopen
from sys import argv
albumID = argv[1]
urlpath = urlopen('https://itunes.apple.com/lookup?id=' + albumID)
result = json.loads(urlpath.read())
print (result['resultCount'])
count = 0
while result['resultCount'] == 0:
urlpath = urlopen('https://itunes.apple.com/lookup?id=' + albumID)
result = json.loads(urlpath.read())
print count, (result['resultCount'])
count = count + 1
time.sleep(4)
print 'Done'
|
# SPDX-License-Identifier: Apache-2.0
# Copyright 2020 Contributors to OpenLEADR
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
# http://www.apache.org/licenses/LICENSE-2.0
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
from lxml import etree
import xmltodict
from jinja2 import Environment, PackageLoader
from signxml import XMLSigner, XMLVerifier, methods
from uuid import uuid4
from lxml.etree import Element
from openleadr import errors
from datetime import datetime, timezone, timedelta
import os
from openleadr import utils
from .preflight import preflight_message
import logging
logger = logging.getLogger('openleadr')
SIGNER = XMLSigner(method=methods.detached,
c14n_algorithm="http://www.w3.org/TR/2001/REC-xml-c14n-20010315")
SIGNER.namespaces['oadr'] = "http://openadr.org/oadr-2.0b/2012/07"
VERIFIER = XMLVerifier()
XML_SCHEMA_LOCATION = os.path.join(os.path.dirname(__file__), 'schema', 'oadr_20b.xsd')
with open(XML_SCHEMA_LOCATION) as file:
XML_SCHEMA = etree.XMLSchema(etree.parse(file))
XML_PARSER = etree.XMLParser(schema=XML_SCHEMA)
def parse_message(data):
"""
Parse a message and distill its usable parts. Returns a message type and payload.
:param data str: The XML string that is received
Returns a message type (str) and a message payload (dict)
"""
message_dict = xmltodict.parse(data, process_namespaces=True, namespaces=NAMESPACES, namespace_separator=' ')
message_type, message_payload = message_dict['oadrPayload']['oadrSignedObject'].popitem()
message_payload = utils.normalize_dict(message_payload)
return message_type, message_payload
def create_message(message_type, cert=None, key=None, passphrase=None, disable_signature=False, **message_payload):
"""
Create and optionally sign an OpenADR message. Returns an XML string.
"""
message_payload = preflight_message(message_type, message_payload)
template = TEMPLATES.get_template(f'{message_type}.xml')
signed_object = utils.flatten_xml(template.render(**message_payload))
envelope = TEMPLATES.get_template('oadrPayload.xml')
if cert and key and not disable_signature:
tree = etree.fromstring(signed_object)
signature_tree = SIGNER.sign(tree,
key=key,
cert=cert,
passphrase=utils.ensure_bytes(passphrase),
reference_uri="#oadrSignedObject",
signature_properties=_create_replay_protect())
signature = etree.tostring(signature_tree).decode('utf-8')
else:
signature = None
msg = envelope.render(template=f'{message_type}',
signature=signature,
signed_object=signed_object)
return msg
def validate_xml_schema(content):
"""
Validates the XML tree against the schema. Return the XML tree.
"""
if isinstance(content, str):
content = content.encode('utf-8')
tree = etree.fromstring(content, XML_PARSER)
return tree
def validate_xml_signature(xml_tree, cert_fingerprint=None):
"""
Validate the XMLDSIG signature and the ReplayProtect element.
"""
cert = utils.extract_pem_cert(xml_tree)
if cert_fingerprint:
fingerprint = utils.certificate_fingerprint(cert)
if fingerprint != cert_fingerprint:
raise errors.FingerprintMismatch("The certificate fingerprint was incorrect. "
f"Expected: {cert_fingerprint}; "
f"Received: {fingerprint}. Ignoring message.")
VERIFIER.verify(xml_tree, x509_cert=utils.ensure_bytes(cert), expect_references=2)
_verify_replay_protect(xml_tree)
def validate_xml_signature_none(xml_tree):
assert xml_tree.find('.//{http://www.w3.org/2000/09/xmldsig#}X509Certificate') is None
async def authenticate_message(request, message_tree, message_payload, fingerprint_lookup=None, ven_lookup=None):
if request.secure and 'ven_id' in message_payload:
connection_fingerprint = utils.get_cert_fingerprint_from_request(request)
if connection_fingerprint is None:
msg = ("Your request must use a client side SSL certificate, of which the "
"fingerprint must match the fingerprint that you have given to this VTN.")
raise errors.NotRegisteredOrAuthorizedError(msg)
try:
ven_id = message_payload.get('ven_id')
if fingerprint_lookup:
expected_fingerprint = await utils.await_if_required(fingerprint_lookup(ven_id))
if not expected_fingerprint:
raise ValueError
elif ven_lookup:
ven_info = await utils.await_if_required(ven_lookup(ven_id))
if not ven_info:
raise ValueError
expected_fingerprint = ven_info.get('fingerprint')
except ValueError:
msg = (f"Your venID {ven_id} is not known to this VTN. Make sure you use the venID "
"that you receive from this VTN during the registration step")
raise errors.NotRegisteredOrAuthorizedError(msg)
if expected_fingerprint is None:
msg = ("This VTN server does not know what your certificate fingerprint is. Please "
"deliver your fingerprint to the VTN (outside of OpenADR). You used the "
"following fingerprint to make this request:")
raise errors.NotRegisteredOrAuthorizedError(msg)
if connection_fingerprint != expected_fingerprint:
msg = (f"The fingerprint of your HTTPS certificate '{connection_fingerprint}' "
f"does not match the expected fingerprint '{expected_fingerprint}'")
raise errors.NotRegisteredOrAuthorizedError(msg)
message_cert = utils.extract_pem_cert(message_tree)
message_fingerprint = utils.certificate_fingerprint(message_cert)
if message_fingerprint != expected_fingerprint:
msg = (f"The fingerprint of the certificate used to sign the message "
f"{message_fingerprint} did not match the fingerprint that this "
f"VTN has for you {expected_fingerprint}. Make sure you use the correct "
"certificate to sign your messages.")
raise errors.NotRegisteredOrAuthorizedError(msg)
try:
validate_xml_signature(message_tree)
except ValueError:
msg = ("The message signature did not match the message contents. Please make sure "
"you are using the correct XMLDSig algorithm and C14n canonicalization.")
raise errors.NotRegisteredOrAuthorizedError(msg)
def _create_replay_protect():
dt_element = Element("{http://openadr.org/oadr-2.0b/2012/07/xmldsig-properties}timestamp")
dt_element.text = utils.datetimeformat(datetime.now(timezone.utc))
nonce_element = Element("{http://openadr.org/oadr-2.0b/2012/07/xmldsig-properties}nonce")
nonce_element.text = uuid4().hex
el = Element("{http://openadr.org/oadr-2.0b/2012/07/xmldsig-properties}ReplayProtect",
nsmap={'dsp': 'http://openadr.org/oadr-2.0b/2012/07/xmldsig-properties'},
attrib={'Id': 'myid', 'Target': '#mytarget'})
el.append(dt_element)
el.append(nonce_element)
return el
def _verify_replay_protect(xml_tree):
try:
ns = "{http://openadr.org/oadr-2.0b/2012/07/xmldsig-properties}"
timestamp = utils.parse_datetime(xml_tree.findtext(f".//{ns}timestamp"))
nonce = xml_tree.findtext(f".//{ns}nonce")
except Exception:
raise ValueError("Missing or malformed ReplayProtect element in the message signature.")
else:
if nonce is None:
raise ValueError("Missing 'nonce' element in ReplayProtect in incoming message.")
if timestamp < datetime.now(timezone.utc) - REPLAY_PROTECT_MAX_TIME_DELTA:
raise ValueError("The message was signed too long ago.")
elif (timestamp, nonce) in NONCE_CACHE:
raise ValueError("This combination of timestamp and nonce was already used.")
_update_nonce_cache(timestamp, nonce)
def _update_nonce_cache(timestamp, nonce):
NONCE_CACHE.add((timestamp, nonce))
for timestamp, nonce in list(NONCE_CACHE):
if timestamp < datetime.now(timezone.utc) - REPLAY_PROTECT_MAX_TIME_DELTA:
NONCE_CACHE.remove((timestamp, nonce))
# Replay protect settings
REPLAY_PROTECT_MAX_TIME_DELTA = timedelta(seconds=5)
NONCE_CACHE = set()
# Settings for jinja2
TEMPLATES = Environment(loader=PackageLoader('openleadr', 'templates'))
TEMPLATES.filters['datetimeformat'] = utils.datetimeformat
TEMPLATES.filters['timedeltaformat'] = utils.timedeltaformat
TEMPLATES.filters['booleanformat'] = utils.booleanformat
TEMPLATES.trim_blocks = True
TEMPLATES.lstrip_blocks = True
# Settings for xmltodict
NAMESPACES = {
'http://docs.oasis-open.org/ns/energyinterop/201110': None,
'http://openadr.org/oadr-2.0b/2012/07': None,
'urn:ietf:params:xml:ns:icalendar-2.0': None,
'http://docs.oasis-open.org/ns/energyinterop/201110/payloads': None,
'http://docs.oasis-open.org/ns/emix/2011/06': None,
'urn:ietf:params:xml:ns:icalendar-2.0:stream': None,
'http://docs.oasis-open.org/ns/emix/2011/06/power': None,
'http://docs.oasis-open.org/ns/emix/2011/06/siscale': None,
'http://www.w3.org/2000/09/xmldsig#': None,
'http://openadr.org/oadr-2.0b/2012/07/xmldsig-properties': None
}
|
import pandas as pd
def load_report(mr, params) -> pd.DataFrame:
return normalize_report(mr.get('report'), params)
def normalize_report(df, params):
df = df.copy()
df.R0 = df.R0.apply(lambda x: round(complex(x).real, 1))
df_temp = df.drop(['Time', 'R0', 'latentRate', 'removalRate', 'hospRate', 'deathRateICU', 'deathRateNoIcu'],
axis=1)
df_temp = df_temp * params.population
df.update(df_temp)
return df
|
# Generated by Django 3.1.4 on 2020-12-08 05:55
from django.db import migrations, models
class Migration(migrations.Migration):
dependencies = [
('alog', '0003_answer_author'),
]
operations = [
migrations.AddField(
model_name='answer',
name='modify_date',
field=models.DateTimeField(blank=True, null=True),
),
migrations.AddField(
model_name='blogpost',
name='modify_date',
field=models.DateTimeField(blank=True, null=True),
),
]
|
from django import forms
from Hostel.models import *
class Hostel_DetailsForm(forms.ModelForm):
class Meta:
model = Hostel_Details
fields = '__all__'
class Hostel_RoomForm(forms.ModelForm):
class Meta:
model = Hostel_Room
fields = '__all__'
class Hostel_RegisterForm(forms.ModelForm):
class Meta:
model = Hostel_Register
fields = '__all__'
class Hostel_AllocationForm(forms.ModelForm):
class Meta:
model = Hostel_Allocation
fields = '__all__'
|
#!/usr/bin/python
#
# Send a value to change the opening of the Robotiq gripper using an action
#
import argparse
import rospy
import copy
import geometry_msgs.msg
from std_msgs.msg import Header, ColorRGBA
from geometry_msgs.msg import PoseStamped, Vector3, Pose, Quaternion
from visualization_msgs.msg import Marker, MarkerArray
from tf.transformations import quaternion_from_euler
from tf.transformations import *
from copy import deepcopy
markerArray = MarkerArray()
topic = 'visualization_marker_array'
def place_marker_at_pose(publisher, poseStamped):
marker_x = Marker(
type=Marker.ARROW,
id=0,
lifetime=rospy.Duration(30),
pose=poseStamped.pose,
scale=Vector3(0.1, 0.01, 0.01),
header=poseStamped.header,
color=ColorRGBA(1.0, 0.0, 0.0, 0.8))
markerArray.markers.append(marker_x)
# RPY to convert: -90deg around z -> we get y from x
# quat_tf = poseStamped.pose.orientation
# quat_x = [quat_tf.x, quat_tf.y, quat_tf.z, quat_tf.w]
# quat_rot = quaternion_from_euler(0, 0, -1.5707)
# quat_y = quaternion_multiply(quat_rot, quat_x)
# marker_y = Marker(
# type=Marker.ARROW,
# id=1,
# lifetime=rospy.Duration(180),
# pose=copy.deepcopy(poseStamped.pose),
# scale=Vector3(0.1, 0.01, 0.01),
# header=poseStamped.header,
# color=ColorRGBA(0.0, 1.0, 0.0, 0.8))
# marker_y.pose.orientation.x = quat_y[0]
# marker_y.pose.orientation.y = quat_y[1]
# marker_y.pose.orientation.z = quat_y[2]
# marker_y.pose.orientation.w = quat_y[3]
# markerArray.markers.append(marker_y)
# # RPY to convert: +90deg around y -> we get z from x
# quat_rot = quaternion_from_euler(0, 1.5707, 0)
# quat_z = quaternion_multiply(quat_rot, quat_x)
# marker_z = Marker(
# type=Marker.ARROW,
# id=2,
# lifetime=rospy.Duration(180),
# pose=copy.deepcopy(poseStamped.pose),
# scale=Vector3(0.1, 0.01, 0.01),
# header=poseStamped.header,
# color=ColorRGBA(0.0, 0.0, 1.0, 0.8))
# marker_z.pose.orientation.x = quat_z[0]
# marker_z.pose.orientation.y = quat_z[1]
# marker_z.pose.orientation.z = quat_z[2]
# marker_z.pose.orientation.w = quat_z[3]
# markerArray.markers.append(marker_z)
# # Publish the MarkerArray
# publisher.publish(markerArray)
if __name__ == '__main__':
try:
# Start the ROS node
rospy.init_node('pose_marker_visualizer')
publisher = rospy.Publisher(topic, MarkerArray, queue_size=1)
poseStamped = geometry_msgs.msg.PoseStamped();
poseStamped.header.frame_id = "map"
poseStamped.pose.position.x = 0.1;
poseStamped.pose.position.y = 0.1;
poseStamped.pose.position.z = 0.1;
poseStamped.pose.orientation.w = 1;
place_marker_at_pose(publisher, poseStamped)
rospy.spin()
except rospy.ROSInterruptException:
print ("Program interrupted before completion")
|
import logging
import datetime
from google.appengine.ext import webapp
import util
_SE_MONTH_NAMES = {
1: "januari", 2: "februari", 3: "mars", 4: "april", 5: "maj", 6: "juni",
7: "juli", 8: "augusti", 9: "september", 10: "oktober", 11: "november",
12: "december"
}
register = webapp.template.create_template_register()
def format(time):
"""Format seconds to HH:MM:SS format"""
return str(datetime.timedelta(seconds=time))
def formatd(indate):
"""Format datetime to just date"""
return util.utc_as_cet(indate).strftime("%Y-%m-%d")
def formatdv(indate):
"""Format datetime to just date"""
month_name = _SE_MONTH_NAMES.get(indate.month, "sommari")
return util.utc_as_cet(indate).strftime("%d %%s %Y") % month_name
def formatdvsy(indate):
month_name = _SE_MONTH_NAMES.get(indate.month, "sommari")
return util.utc_as_cet(indate).strftime("%d %%s") % month_name
def formatt(indate):
"""Format datetime to just time"""
return util.utc_as_cet(indate).strftime("%H:%M:%S")
def formaty(indate):
"""Format datetime to just time"""
return util.utc_as_cet(indate).strftime("%Y")
def duration_from_now(dt):
duration = datetime.datetime.utcnow() - dt
return util.duration_to_text(duration)
register.filter(format)
register.filter(formatd)
register.filter(formatdv)
register.filter(formatdvsy)
register.filter(formatt)
register.filter(formaty)
register.filter(duration_from_now)
|
# -*- coding: utf-8 -*-
import unittest
from blo.BloArticle import BloArticle
class TestBloArticle(unittest.TestCase):
def setUp(self):
self.blo_article = BloArticle('./templates')
self.base_file_path_1 = "./test_article_1.md"
self.base_file_path_2 = "./test_article_2.md"
def test_failed_load_from_file(self):
with self.assertRaises(FileNotFoundError):
self.blo_article.load_from_file("")
def test_success_load_from_file(self):
expected_str = '# Test Article\nfirst paragraph \n\nsecond paragraph with semi long length string and the quick brown fox jumps over the lazy dog repeat the quick brown fox jumps over the lazy dog repeat the quick brown fox jumps over the lazy dog.\n\nthird paragraph with bullet list\n- 1st\n - 1st c1\n - 1st c2\n- 2nd\n- 3rd\n - 3rd c1\n - 3rd c2\n- 4th\n\n**Strong text** *Italic text*'
self.assertIsNone(self.blo_article.load_from_file(self.base_file_path_1))
self.assertEqual(expected_str, self.blo_article._raw_text)
def test_convert_to_simple_html_1(self):
expected_html = '<h1>Test Article</h1>\n<p>first paragraph</p>\n<p>second paragraph with semi long length string and the quick brown fox jumps over the lazy dog repeat the quick brown fox jumps over the lazy dog repeat the quick brown fox jumps over the lazy dog.</p>\n<p>third paragraph with bullet list</p>\n<ul>\n<li>1st\n<ul>\n<li>1st c1</li>\n<li>1st c2</li>\n</ul>\n</li>\n<li>2nd</li>\n<li>3rd\n<ul>\n<li>3rd c1</li>\n<li>3rd c2</li>\n</ul>\n</li>\n<li>4th</li>\n</ul>\n<p><strong>Strong text</strong> <em>Italic text</em></p>\n'
self.blo_article.load_from_file(self.base_file_path_1)
self.assertMultiLineEqual(expected_html, self.blo_article._convert_to_html())
def test_convert_to_simple_html_2(self):
expected_html = """<h1>日本語を含んだテストパターンファイル</h1>
<h2>天文と俳句(現代仮名遣い風に編集)</h2>
<h3>寺田寅彦</h3>
<p>俳句季題の分類は普通に <strong>時候</strong> 、''天文'''、 地理 、<code>人事</code>、動物、植物という風になっている。
これらのうちで後の三つは別として、初めの三つの項目中における各季題の分け方は現代の科学知識から見ると、
決して合理的であるとは思われない。</p>
<h2>天文と俳句(原文をそのまま青空文庫より引用)</h2>
<h3>寺田寅彦</h3>
<p><code>俳句季題の分類は普通に時候、天文、地理、人事、動物、植物といふ風になつて居る。此等のうちで後の三つは別として、初めの三つの項目中に於ける各季題の分け方は現代の科學知識から見ると、決して合理的であるとは思はれない。</code></p>
<h2>いくつかの記述要素</h2>
<p>リストを記述する</p>
<ul>
<li>リスト項目1
<ul>
<li>子リスト項目1</li>
<li>子リスト項目2</li>
</ul>
</li>
<li>with english text
<ul>
<li><em>in itarlic</em></li>
<li>日本語の表記と英語( <em>English</em> )の表記を併記した状態でテストを行うためのデータ</li>
</ul>
</li>
</ul>
"""
self.blo_article.load_from_file(self.base_file_path_2)
self.assertMultiLineEqual(expected_html, self.blo_article._convert_to_html())
def test_convert_to_template_html(self):
pass
def test_get_digest_1(self):
expected_html = '<h1>Test Article</h1>\n<p>first paragraph</p>\n<p>second paragraph with semi long length string and the quick brown fox jumps over the lazy dog repeat the quick brown fox jumps over the lazy dog repeat the quick brown fox jumps over the lazy dog.</p>\n<p>third paragraph with bullet list</p>\n<ul>\n<li>1st\n<ul>\n<li>1st c1</li>\n<li>1st c2</li>\n</ul>\n</li>\n<li>2nd</li>\n<li>3rd\n<ul>\n<li>3rd c1</li>\n<li>3rd c2</li>\n</ul>\n</li>\n<li>4th</li>\n</ul>\n<p><strong>Strong text</strong> <em>Italic text</em></p>\n'
self.blo_article.load_from_file(self.base_file_path_1)
self.blo_article.get_html()
from hashlib import sha512
hs = sha512()
hs.update(expected_html.encode('utf-8'))
self.assertEqual(hs.hexdigest(), self.blo_article.get_digest())
def test_get_digest_2(self):
expected_html = """<h1>日本語を含んだテストパターンファイル</h1>
<h2>天文と俳句(現代仮名遣い風に編集)</h2>
<h3>寺田寅彦</h3>
<p>俳句季題の分類は普通に <strong>時候</strong> 、''天文'''、 地理 、<code>人事</code>、動物、植物という風になっている。
これらのうちで後の三つは別として、初めの三つの項目中における各季題の分け方は現代の科学知識から見ると、
決して合理的であるとは思われない。</p>
<h2>天文と俳句(原文をそのまま青空文庫より引用)</h2>
<h3>寺田寅彦</h3>
<p><code>俳句季題の分類は普通に時候、天文、地理、人事、動物、植物といふ風になつて居る。此等のうちで後の三つは別として、初めの三つの項目中に於ける各季題の分け方は現代の科學知識から見ると、決して合理的であるとは思はれない。</code></p>
<h2>いくつかの記述要素</h2>
<p>リストを記述する</p>
<ul>
<li>リスト項目1
<ul>
<li>子リスト項目1</li>
<li>子リスト項目2</li>
</ul>
</li>
<li>with english text
<ul>
<li><em>in itarlic</em></li>
<li>日本語の表記と英語( <em>English</em> )の表記を併記した状態でテストを行うためのデータ</li>
</ul>
</li>
</ul>
"""
self.blo_article.load_from_file(self.base_file_path_2)
self.blo_article.get_html()
from hashlib import sha512
hs = sha512()
hs.update(expected_html.encode('utf-8'))
self.assertEqual(hs.hexdigest(), self.blo_article.get_digest())
def test_get_raw_text_body_2(self):
expected_txt = """日本語を含んだテストパターンファイル\n天文と俳句(現代仮名遣い風に編集)\n寺田寅彦\n俳句季題の分類は普通に 時候 、''天文'''、 地理 、人事、動物、植物という風になっている。\nこれらのうちで後の三つは別として、初めの三つの項目中における各季題の分け方は現代の科学知識から見ると、\n決して合理的であるとは思われない。\n天文と俳句(原文をそのまま青空文庫より引用)\n寺田寅彦\n俳句季題の分類は普通に時候、天文、地理、人事、動物、植物といふ風になつて居る。此等のうちで後の三つは別として、初めの三つの項目中に於ける各季題の分け方は現代の科學知識から見ると、決して合理的であるとは思はれない。\nいくつかの記述要素\nリストを記述する\nリスト項目1\n子リスト項目1\n子リスト項目2\nwith english text\nin itarlic\n日本語の表記と英語( English )の表記を併記した状態でテストを行うためのデータ\n"""
self.blo_article.load_from_file(self.base_file_path_2)
base_txt = self.blo_article._get_raw_text_body()
self.assertEqual(expected_txt, base_txt)
def text_get_wakati_text_body_2(self):
expected_txt = "日本語 を 含ん だ テストパターン ファイル 天文 と 俳句 ( 現代 仮名遣い 風 に 編集 ) 寺田 寅彦 俳句 季題 の 分類 は 普通 に 時候 、 '' 天文 '''、 地理 、 人事 、 動物 、 植物 という 風 に なっ て いる 。 これら の うち で 後 の 三つ は 別 として 、 初め の 三つ の 項目 中 における 各 季題 の 分け 方 は 現代 の 科学 知識 から 見る と 、 決して 合理 的 で ある と は 思わ れ ない 。 天文 と 俳句 ( 原文 を そのまま 青空 文庫 より 引用 ) 寺田 寅彦 俳句 季題 の 分類 は 普通 に 時候 、 天文 、 地理 、 人事 、 動物 、 植物 といふ 風 に なつ て 居る 。 此等 の うち で 後 の 三つ は 別 として 、 初め の 三つ の 項目 中 に 於け る 各 季題 の 分け 方 は 現代 の 科 學 知識 から 見る と 、 決して 合理 的 で ある と は 思は れ ない 。 いくつ か の 記述 要素 リスト を 記述 する リスト 項目 1 子 リスト 項目 1 子 リスト 項目 2 with english text in itarlic 日本語 の 表記 と 英語 ( English ) の 表記 を 併記 し た 状態 で テスト を 行う ため の データ \n"
self.blo_article.load_from_file(self.base_file_path_2)
self.blo_article._get_raw_text_body()
wakati_txt = self.blo_article.get_wakati_txt()
self.assertEqual(expected_txt, wakati_txt)
def test_template(self):
expected_txt = '''<!DOCTYPE html>
<html lang="ja">
<head>
<meta charset="UTF-8">
<title>Test Article</title>
</head>
<body>
<h1>Test Article</h1>
<p>first paragraph</p>
<p>second paragraph with semi long length string and the quick brown fox jumps over the lazy dog repeat the quick brown fox jumps over the lazy dog repeat the quick brown fox jumps over the lazy dog.</p>
<p>third paragraph with bullet list</p>
<ul>
<li>1st
<ul>
<li>1st c1</li>
<li>1st c2</li>
</ul>
</li>
<li>2nd</li>
<li>3rd
<ul>
<li>3rd c1</li>
<li>3rd c2</li>
</ul>
</li>
<li>4th</li>
</ul>
<p><strong>Strong text</strong> <em>Italic text</em></p>
</body>
</html>'''
self.blo_article.load_from_file(self.base_file_path_1)
html_txt = self.blo_article.get_html('test_success.html')
self.assertEqual(expected_txt, html_txt)
|
#!/usr/bin/env python3
# Copyright (c) Facebook, Inc. and its affiliates.
# This source code is licensed under the MIT license found in the
# LICENSE file in the root directory of this source tree.
from typing import List, Optional, Dict, Any, Tuple, TYPE_CHECKING
from mephisto.abstractions.blueprint import AgentState
from mephisto.data_model.packet import (
PACKET_TYPE_AGENT_ACTION,
PACKET_TYPE_UPDATE_AGENT_STATUS,
)
import os
import json
import time
if TYPE_CHECKING:
from mephisto.data_model.agent import Agent
from mephisto.data_model.packet import Packet
class ParlAIChatAgentState(AgentState):
"""
Holds information about ParlAI-style chat. Data is stored in json files
containing every act from the ParlAI world.
"""
def __init__(self, agent: "Agent"):
"""
Create an AgentState to track the state of an agent's work on a Unit
Initialize with an existing file if it exists.
"""
self.agent = agent
data_file = self._get_expected_data_file()
if os.path.exists(data_file):
self.load_data()
else:
self.messages: List[Dict[str, Any]] = []
self.init_data = None
self.save_data()
def set_init_state(self, data: Any) -> bool:
"""Set the initial state for this agent"""
if self.init_data is not None:
# Initial state is already set
return False
else:
self.init_data = data
self.save_data()
return True
def get_init_state(self) -> Optional[Dict[str, Any]]:
"""
Return the initial state for this agent,
None if no such state exists
"""
if self.init_data is None:
return None
return {"task_data": self.init_data, "raw_messages": self.messages}
def _get_expected_data_file(self) -> str:
"""Return the place we would expect to find data for this agent state"""
agent_dir = self.agent.get_data_dir()
os.makedirs(agent_dir, exist_ok=True)
return os.path.join(agent_dir, "state.json")
def load_data(self) -> None:
"""Load stored data from a file to this object"""
agent_file = self._get_expected_data_file()
with open(agent_file, "r") as state_json:
state = json.load(state_json)
self.messages = state["outputs"]["messages"]
self.init_data = state["inputs"]
def get_data(self) -> Dict[str, Any]:
"""Return dict with the messages of this agent"""
return {"outputs": {"messages": self.messages}, "inputs": self.init_data}
def get_parsed_data(self) -> Dict[str, Any]:
"""Return the formatted input, conversations, and final data"""
init_data = self.init_data
save_data = None
messages = [
m["data"]
for m in self.messages
if m["packet_type"] == PACKET_TYPE_AGENT_ACTION
]
agent_name = None
if len(messages) > 0:
for m in self.messages:
if m["packet_type"] == PACKET_TYPE_UPDATE_AGENT_STATUS:
if "agent_display_name" in m["data"]["state"]:
agent_name = m["data"]["state"]["agent_display_name"]
break
if "MEPHISTO_is_submit" in messages[-1]:
messages = messages[:-1]
if "WORLD_DATA" in messages[-1]:
save_data = messages[-1]["WORLD_DATA"]
messages = messages[:-1]
return {
"agent_name": agent_name,
"initial_data": init_data,
"messages": messages,
"save_data": save_data,
}
def save_data(self) -> None:
"""Save all messages from this agent to """
agent_file = self._get_expected_data_file()
with open(agent_file, "w+") as state_json:
json.dump(self.get_data(), state_json)
def update_data(self, packet: "Packet") -> None:
"""
Append the incoming packet as well as who it came from
"""
message_data = packet.to_sendable_dict()
message_data["timestamp"] = time.time()
self.messages.append(message_data)
self.save_data()
|
# -*- coding: utf-8 -*-
from __future__ import unicode_literals
import curses
from collections import OrderedDict
import pytest
from tuir.page import PageStack
from tuir.submission_page import SubmissionPage
from tuir.docs import FOOTER_SUBMISSION
try:
from unittest import mock
except ImportError:
import mock
PROMPTS = OrderedDict([
('prompt_1', 'comments/571dw3'),
('prompt_2', '///comments/571dw3'),
('prompt_3', '/comments/571dw3'),
('prompt_4', '/r/pics/comments/571dw3/'),
('prompt_5', 'https://www.reddit.com/r/pics/comments/571dw3/at_disneyland'),
])
def test_submission_page_construct(reddit, terminal, config, oauth):
window = terminal.stdscr.subwin
url = ('https://www.reddit.com/r/Python/comments/2xmo63/'
'a_python_terminal_viewer_for_browsing_reddit')
with terminal.loader():
page = SubmissionPage(reddit, terminal, config, oauth, url=url)
assert terminal.loader.exception is None
# Toggle the second comment so we can check the draw more comments method
page.content.toggle(1)
# Set some special flags to make sure that we can draw them
submission_data = page.content.get(-1)
submission_data['gold'] = 1
submission_data['stickied'] = True
submission_data['saved'] = True
submission_data['flair'] = 'flair'
# Set some special flags to make sure that we can draw them
comment_data = page.content.get(0)
comment_data['gold'] = 3
comment_data['stickied'] = True
comment_data['saved'] = True
comment_data['flair'] = 'flair'
page.draw()
# Title
title = url[:terminal.stdscr.ncols-1].encode('utf-8')
window.addstr.assert_any_call(0, 0, title)
# Banner
menu = '[1]hot [2]top [3]rising [4]new [5]controversial'
window.addstr.assert_any_call(0, 0, menu.encode('utf-8'))
# Footer - The text is longer than the default terminal width
text = FOOTER_SUBMISSION.strip()[:79]
window.addstr.assert_any_call(0, 0, text.encode('utf-8'))
# Submission
submission_data = page.content.get(-1)
text = submission_data['title'].encode('utf-8')
window.subwin.addstr.assert_any_call(1, 1, text, 2097152)
assert window.subwin.border.called
# Comment
comment_data = page.content.get(0)
text = comment_data['split_body'][0].encode('utf-8')
window.subwin.addstr.assert_any_call(1, 1, text, curses.A_NORMAL)
# More Comments
comment_data = page.content.get(1)
text = comment_data['body'].encode('utf-8')
window.subwin.addstr.assert_any_call(0, 1, text, curses.A_NORMAL)
# Cursor should not be drawn when the page is first opened
assert not any(args[0][3] == curses.A_REVERSE
for args in window.subwin.addch.call_args_list)
# Reload with a smaller terminal window
terminal.stdscr.ncols = 20
terminal.stdscr.nlines = 10
with terminal.loader():
page = SubmissionPage(reddit, terminal, config, oauth, url=url)
assert terminal.loader.exception is None
page.draw()
def test_submission_refresh(submission_page):
# Should be able to refresh content
submission_page.refresh_content()
def test_submission_unauthenticated(submission_page, terminal):
# Unauthenticated commands
methods = [
'a', # Upvote
'z', # Downvote
'c', # Comment
'e', # Edit
'd', # Delete
'w', # Save
]
for ch in methods:
submission_page.controller.trigger(ch)
text = 'Not logged in'.encode('utf-8')
terminal.stdscr.subwin.addstr.assert_called_with(1, 1, text)
def test_submission_open(submission_page, terminal):
# Open the selected link with the web browser
with mock.patch.object(terminal, 'open_browser'):
submission_page.controller.trigger(terminal.RETURN)
assert terminal.open_browser.called
def test_submission_prompt(submission_page, terminal):
# Prompt for a different subreddit
with mock.patch.object(terminal, 'prompt_input'):
# Valid input
initial_stack_size = PageStack.size()
terminal.prompt_input.return_value = 'front/top'
submission_page.controller.trigger('/')
stack_size_after_first_prompt = PageStack.size()
assert(stack_size_after_first_prompt == initial_stack_size + 1)
# Invalid input
terminal.prompt_input.return_value = 'front/pot'
submission_page.controller.trigger('/')
assert(PageStack.size() == stack_size_after_first_prompt)
@pytest.mark.parametrize('prompt', PROMPTS.values(), ids=list(PROMPTS))
def test_submission_prompt_submission(submission_page, terminal, prompt):
# Navigate to a different submission from inside a submission
with mock.patch.object(terminal, 'prompt_input'):
initial_stack_size = PageStack.size()
terminal.prompt_input.return_value = prompt
submission_page.content.order = 'top'
submission_page.controller.trigger('/')
assert not terminal.loader.exception
assert(PageStack.size() == initial_stack_size + 1)
assert PageStack.current_page().content.order is None
data = PageStack.current_page().content.get(-1)
assert data['object'].id == '571dw3'
def test_submission_order(submission_page):
submission_page.controller.trigger('1')
assert submission_page.content.order == 'hot'
submission_page.controller.trigger('2')
assert submission_page.content.order == 'top'
submission_page.controller.trigger('3')
assert submission_page.content.order == 'rising'
submission_page.controller.trigger('4')
assert submission_page.content.order == 'new'
submission_page.controller.trigger('5')
assert submission_page.content.order == 'controversial'
# Shouldn't be able to sort the submission page by gilded
submission_page.controller.trigger('6')
assert submission_page.content.order == 'controversial'
def test_submission_move_top_bottom(submission_page):
submission_page.controller.trigger('G')
assert submission_page.nav.absolute_index == 44
submission_page.controller.trigger('g')
submission_page.controller.trigger('g')
assert submission_page.nav.absolute_index == -1
def test_submission_move_sibling_parent(submission_page):
# Jump to sibling
with mock.patch.object(submission_page, 'clear_input_queue'):
submission_page.controller.trigger('j')
submission_page.controller.trigger('J')
assert submission_page.nav.absolute_index == 7
# Jump to parent
with mock.patch.object(submission_page, 'clear_input_queue'):
submission_page.controller.trigger('k')
submission_page.controller.trigger('k')
submission_page.controller.trigger('K')
assert submission_page.nav.absolute_index == 0
def test_submission_pager(submission_page, terminal):
# View a submission with the pager
with mock.patch.object(terminal, 'open_pager'):
submission_page.controller.trigger('l')
assert terminal.open_pager.called
# Move down to the first comment
with mock.patch.object(submission_page, 'clear_input_queue'):
submission_page.controller.trigger('j')
# View a comment with the pager
with mock.patch.object(terminal, 'open_pager'):
submission_page.controller.trigger('l')
assert terminal.open_pager.called
def test_submission_comment_not_enough_space(submission_page, terminal):
# The first comment is 10 lines, shrink the screen so that it won't fit.
# Setting the terminal to 10 lines means that there will only be 8 lines
# available (after subtracting the header and footer) to draw the comment.
terminal.stdscr.nlines = 10
# Select the first comment
with mock.patch.object(submission_page, 'clear_input_queue'):
submission_page.move_cursor_down()
submission_page.draw()
text = '(Not enough space to display)'.encode('ascii')
window = terminal.stdscr.subwin
window.subwin.addstr.assert_any_call(6, 1, text, curses.A_NORMAL)
def test_submission_vote(submission_page, refresh_token):
# Log in
submission_page.config.refresh_token = refresh_token
submission_page.oauth.authorize()
# Test voting on the submission
with mock.patch('tuir.packages.praw.objects.Submission.upvote') as upvote, \
mock.patch('tuir.packages.praw.objects.Submission.downvote') as downvote, \
mock.patch('tuir.packages.praw.objects.Submission.clear_vote') as clear_vote:
data = submission_page.get_selected_item()
data['object'].archived = False
# Upvote
submission_page.controller.trigger('a')
assert upvote.called
assert data['likes'] is True
# Clear vote
submission_page.controller.trigger('a')
assert clear_vote.called
assert data['likes'] is None
# Upvote
submission_page.controller.trigger('a')
assert upvote.called
assert data['likes'] is True
# Downvote
submission_page.controller.trigger('z')
assert downvote.called
assert data['likes'] is False
# Clear vote
submission_page.controller.trigger('z')
assert clear_vote.called
assert data['likes'] is None
# Upvote - exception
upvote.side_effect = KeyboardInterrupt
submission_page.controller.trigger('a')
assert data['likes'] is None
# Downvote - exception
downvote.side_effect = KeyboardInterrupt
submission_page.controller.trigger('a')
assert data['likes'] is None
def test_submission_vote_archived(submission_page, refresh_token, terminal):
# Log in
submission_page.config.refresh_token = refresh_token
submission_page.oauth.authorize()
# Load an archived submission
archived_url = 'https://www.reddit.com/r/IAmA/comments/z1c9z/'
submission_page.refresh_content(name=archived_url)
with mock.patch.object(terminal, 'show_notification') as show_notification:
data = submission_page.get_selected_item()
# Upvote the submission
show_notification.reset_mock()
submission_page.controller.trigger('a')
show_notification.assert_called_with('Voting disabled for archived post', style='Error')
assert data['likes'] is None
# Downvote the submission
show_notification.reset_mock()
submission_page.controller.trigger('z')
show_notification.assert_called_with('Voting disabled for archived post', style='Error')
assert data['likes'] is None
def test_submission_save(submission_page, refresh_token):
# Log in
submission_page.config.refresh_token = refresh_token
submission_page.oauth.authorize()
# Test save on the submission
with mock.patch('tuir.packages.praw.objects.Submission.save') as save, \
mock.patch('tuir.packages.praw.objects.Submission.unsave') as unsave:
data = submission_page.content.get(submission_page.nav.absolute_index)
# Save
submission_page.controller.trigger('w')
assert save.called
assert data['saved'] is True
# Unsave
submission_page.controller.trigger('w')
assert unsave.called
assert data['saved'] is False
# Save - exception
save.side_effect = KeyboardInterrupt
submission_page.controller.trigger('w')
assert data['saved'] is False
def test_submission_comment_save(submission_page, terminal, refresh_token):
# Log in
submission_page.config.refresh_token = refresh_token
submission_page.oauth.authorize()
# Move down to the first comment
with mock.patch.object(submission_page, 'clear_input_queue'):
submission_page.controller.trigger('j')
# Test save on the comment submission
with mock.patch('tuir.packages.praw.objects.Comment.save') as save, \
mock.patch('tuir.packages.praw.objects.Comment.unsave') as unsave:
data = submission_page.content.get(submission_page.nav.absolute_index)
# Save
submission_page.controller.trigger('w')
assert save.called
assert data['saved'] is True
# Unsave
submission_page.controller.trigger('w')
assert unsave.called
assert data['saved'] is False
# Save - exception
save.side_effect = KeyboardInterrupt
submission_page.controller.trigger('w')
assert data['saved'] is False
def test_submission_comment(submission_page, terminal, refresh_token):
# Log in
submission_page.config.refresh_token = refresh_token
submission_page.oauth.authorize()
# Leave a comment
with mock.patch('tuir.packages.praw.objects.Submission.add_comment') as add_comment, \
mock.patch.object(terminal, 'open_editor') as open_editor, \
mock.patch('time.sleep'):
open_editor.return_value.__enter__.return_value = 'comment text'
submission_page.controller.trigger('c')
assert open_editor.called
add_comment.assert_called_with('comment text')
def test_submission_delete(submission_page, terminal, refresh_token):
# Log in
submission_page.config.refresh_token = refresh_token
submission_page.oauth.authorize()
# Can't delete the submission
curses.flash.reset_mock()
submission_page.controller.trigger('d')
assert curses.flash.called
# Move down to the first comment
with mock.patch.object(submission_page, 'clear_input_queue'):
submission_page.controller.trigger('j')
# Try to delete the first comment - wrong author
curses.flash.reset_mock()
submission_page.controller.trigger('d')
assert curses.flash.called
# Spoof the author and try to delete again
data = submission_page.content.get(submission_page.nav.absolute_index)
data['author'] = submission_page.reddit.user.name
with mock.patch('tuir.packages.praw.objects.Comment.delete') as delete, \
mock.patch.object(terminal.stdscr, 'getch') as getch, \
mock.patch('time.sleep'):
getch.return_value = ord('y')
submission_page.controller.trigger('d')
assert delete.called
def test_submission_edit(submission_page, terminal, refresh_token):
# Log in
submission_page.config.refresh_token = refresh_token
submission_page.oauth.authorize()
# Try to edit the submission - wrong author
data = submission_page.content.get(submission_page.nav.absolute_index)
data['author'] = 'some other person'
curses.flash.reset_mock()
submission_page.controller.trigger('e')
assert curses.flash.called
# Spoof the submission and try to edit again
data = submission_page.content.get(submission_page.nav.absolute_index)
data['author'] = submission_page.reddit.user.name
with mock.patch('tuir.packages.praw.objects.Submission.edit') as edit, \
mock.patch.object(terminal, 'open_editor') as open_editor, \
mock.patch('time.sleep'):
open_editor.return_value.__enter__.return_value = 'submission text'
submission_page.controller.trigger('e')
assert open_editor.called
edit.assert_called_with('submission text')
# Move down to the first comment
with mock.patch.object(submission_page, 'clear_input_queue'):
submission_page.controller.trigger('j')
# Spoof the author and edit the comment
data = submission_page.content.get(submission_page.nav.absolute_index)
data['author'] = submission_page.reddit.user.name
with mock.patch('tuir.packages.praw.objects.Comment.edit') as edit, \
mock.patch.object(terminal, 'open_editor') as open_editor, \
mock.patch('time.sleep'):
open_editor.return_value.__enter__.return_value = 'comment text'
submission_page.controller.trigger('e')
assert open_editor.called
edit.assert_called_with('comment text')
def test_submission_urlview(submission_page, terminal, refresh_token):
# Log in
submission_page.config.refresh_token = refresh_token
submission_page.oauth.authorize()
# Submission case
data = submission_page.content.get(submission_page.nav.absolute_index)
data['body'] = 'test comment body ❤'
with mock.patch.object(terminal, 'open_urlview') as open_urlview:
submission_page.controller.trigger('b')
open_urlview.assert_called_with('test comment body ❤')
# Subreddit case
data = submission_page.content.get(submission_page.nav.absolute_index)
data['text'] = ''
data['body'] = ''
data['url_full'] = 'http://test.url.com ❤'
with mock.patch.object(terminal, 'open_urlview') as open_urlview, \
mock.patch('subprocess.Popen'):
submission_page.controller.trigger('b')
open_urlview.assert_called_with('http://test.url.com ❤')
def test_submission_prompt_and_select_link(submission_page, terminal):
# A link submission should return the URL that it's pointing to
link = submission_page.prompt_and_select_link()
assert link == 'https://github.com/michael-lazar/rtv'
with mock.patch.object(submission_page, 'clear_input_queue'):
submission_page.controller.trigger('j')
# The first comment doesn't have any links in the comment body
link = submission_page.prompt_and_select_link()
data = submission_page.get_selected_item()
assert link == data['permalink']
with mock.patch.object(submission_page, 'clear_input_queue'):
submission_page.controller.trigger('j')
# The second comment has a link embedded in the comment body, and
# the user is prompted to select which link to open
with mock.patch.object(terminal, 'prompt_user_to_select_link') as prompt:
prompt.return_value = 'https://selected_link'
link = submission_page.prompt_and_select_link()
data = submission_page.get_selected_item()
assert link == prompt.return_value
embedded_url = 'http://peterdowns.com/posts/first-time-with-pypi.html'
assert prompt.call_args[0][0] == [
{'text': 'Permalink', 'href': data['permalink']},
{'text': 'Relevant tutorial', 'href': embedded_url}
]
submission_page.controller.trigger(' ')
# The comment is now hidden so there are no links to select
link = submission_page.prompt_and_select_link()
assert link is None
|
# coding=utf-8
import configobj
import os
import sys
import logging
import inspect
import traceback
import pkg_resources
from diamond.util import load_class_from_name
from diamond.collector import Collector
from diamond.handler.Handler import Handler
logger = logging.getLogger('diamond')
def load_include_path(paths):
"""
Scan for and add paths to the include path
"""
for path in paths:
# Verify the path is valid
if not os.path.isdir(path):
continue
# Add path to the system path, to avoid name clashes
# with mysql-connector for example ...
if path not in sys.path:
sys.path.insert(1, path)
# Load all the files in path
for f in os.listdir(path):
# Are we a directory? If so process down the tree
fpath = os.path.join(path, f)
if os.path.isdir(fpath):
load_include_path([fpath])
def load_dynamic_class(fqn, subclass):
"""
Dynamically load fqn class and verify it's a subclass of subclass
"""
if not isinstance(fqn, basestring):
return fqn
cls = load_class_from_name(fqn)
if cls == subclass or not issubclass(cls, subclass):
raise TypeError("%s is not a valid %s" % (fqn, subclass.__name__))
return cls
def load_handlers(config, handler_names):
"""
Load handlers
"""
handlers = []
if isinstance(handler_names, basestring):
handler_names = [handler_names]
for handler in handler_names:
logger.debug('Loading Handler %s', handler)
try:
# Load Handler Class
cls = load_dynamic_class(handler, Handler)
cls_name = cls.__name__
# Initialize Handler config
handler_config = configobj.ConfigObj()
# Merge default Handler default config
handler_config.merge(config['handlers']['default'])
# Check if Handler config exists
if cls_name in config['handlers']:
# Merge Handler config section
handler_config.merge(config['handlers'][cls_name])
# Check for config file in config directory
if 'handlers_config_path' in config['server']:
configfile = os.path.join(
config['server']['handlers_config_path'],
cls_name) + '.conf'
if os.path.exists(configfile):
# Merge Collector config file
handler_config.merge(configobj.ConfigObj(configfile))
# Initialize Handler class
h = cls(handler_config)
handlers.append(h)
except (ImportError, SyntaxError):
# Log Error
logger.warning("Failed to load handler %s. %s",
handler,
traceback.format_exc())
continue
return handlers
def load_collectors(paths):
"""
Load all collectors
"""
collectors = load_collectors_from_paths(paths)
collectors.update(load_collectors_from_entry_point('diamond.collectors'))
return collectors
def load_collectors_from_paths(paths):
"""
Scan for collectors to load from path
"""
# Initialize return value
collectors = {}
if paths is None:
return
if isinstance(paths, basestring):
paths = paths.split(',')
paths = map(str.strip, paths)
load_include_path(paths)
for path in paths:
# Get a list of files in the directory, if the directory exists
if not os.path.exists(path):
raise OSError("Directory does not exist: %s" % path)
if path.endswith('tests') or path.endswith('fixtures'):
return collectors
# Load all the files in path
for f in os.listdir(path):
# Are we a directory? If so process down the tree
fpath = os.path.join(path, f)
if os.path.isdir(fpath):
subcollectors = load_collectors_from_paths([fpath])
for key in subcollectors:
collectors[key] = subcollectors[key]
# Ignore anything that isn't a .py file
elif (os.path.isfile(fpath) and
len(f) > 3 and
f[-3:] == '.py' and
f[0:4] != 'test' and
f[0] != '.'):
modname = f[:-3]
try:
# Import the module
mod = __import__(modname, globals(), locals(), ['*'])
except (KeyboardInterrupt, SystemExit), err:
logger.error(
"System or keyboard interrupt "
"while loading module %s"
% modname)
if isinstance(err, SystemExit):
sys.exit(err.code)
raise KeyboardInterrupt
except Exception:
# Log error
logger.error("Failed to import module: %s. %s",
modname,
traceback.format_exc())
else:
for name, cls in get_collectors_from_module(mod):
collectors[name] = cls
# Return Collector classes
return collectors
def load_collectors_from_entry_point(path):
"""
Load collectors that were installed into an entry_point.
"""
collectors = {}
for ep in pkg_resources.iter_entry_points(path):
try:
mod = ep.load()
except Exception:
logger.error('Failed to import entry_point: %s. %s',
ep.name,
traceback.format_exc())
else:
collectors.update(get_collectors_from_module(mod))
return collectors
def get_collectors_from_module(mod):
"""
Locate all of the collector classes within a given module
"""
for attrname in dir(mod):
attr = getattr(mod, attrname)
# Only attempt to load classes that are infact classes
# are Collectors but are not the base Collector class
if ((inspect.isclass(attr) and
issubclass(attr, Collector) and
attr != Collector)):
if attrname.startswith('parent_'):
continue
# Get class name
fqcn = '.'.join([mod.__name__, attrname])
try:
# Load Collector class
cls = load_dynamic_class(fqcn, Collector)
# Add Collector class
yield cls.__name__, cls
except Exception:
# Log error
logger.error(
"Failed to load Collector: %s. %s",
fqcn, traceback.format_exc())
continue
def initialize_collector(cls, name=None, configfile=None, handlers=[]):
"""
Initialize collector
"""
collector = None
try:
# Initialize Collector
collector = cls(name=name, configfile=configfile, handlers=handlers)
except Exception:
# Log error
logger.error("Failed to initialize Collector: %s. %s",
cls.__name__, traceback.format_exc())
# Return collector
return collector
|
"""This module contains the general information for DupeScope ManagedObject."""
import sys, os
from ...ucsmo import ManagedObject
from ...ucscoremeta import UcsVersion, MoPropertyMeta, MoMeta
from ...ucsmeta import VersionMeta
class DupeScopeConsts():
IS_SYSTEM_FALSE = "false"
IS_SYSTEM_NO = "no"
IS_SYSTEM_TRUE = "true"
IS_SYSTEM_YES = "yes"
MO_CLASS_ID_AAA_AUTH_METHOD = "aaaAuthMethod"
MO_CLASS_ID_AAA_AUTH_REALM = "aaaAuthRealm"
MO_CLASS_ID_AAA_AUTH_REALM_FSM = "aaaAuthRealmFsm"
MO_CLASS_ID_AAA_AUTH_REALM_FSM_STAGE = "aaaAuthRealmFsmStage"
MO_CLASS_ID_AAA_BANNER = "aaaBanner"
MO_CLASS_ID_AAA_CIMC_SESSION = "aaaCimcSession"
MO_CLASS_ID_AAA_CONFIG = "aaaConfig"
MO_CLASS_ID_AAA_CONSOLE_AUTH = "aaaConsoleAuth"
MO_CLASS_ID_AAA_DEFAULT_AUTH = "aaaDefaultAuth"
MO_CLASS_ID_AAA_DEFINITION = "aaaDefinition"
MO_CLASS_ID_AAA_DOMAIN = "aaaDomain"
MO_CLASS_ID_AAA_DOMAIN_AUTH = "aaaDomainAuth"
MO_CLASS_ID_AAA_EP = "aaaEp"
MO_CLASS_ID_AAA_EP_AUTH_PROFILE = "aaaEpAuthProfile"
MO_CLASS_ID_AAA_EP_FSM = "aaaEpFsm"
MO_CLASS_ID_AAA_EP_FSM_STAGE = "aaaEpFsmStage"
MO_CLASS_ID_AAA_EP_FSM_TASK = "aaaEpFsmTask"
MO_CLASS_ID_AAA_EP_LOGIN = "aaaEpLogin"
MO_CLASS_ID_AAA_EP_USER = "aaaEpUser"
MO_CLASS_ID_AAA_EXT_MGMT_CUT_THRU_TKN = "aaaExtMgmtCutThruTkn"
MO_CLASS_ID_AAA_ITEM = "aaaItem"
MO_CLASS_ID_AAA_LDAP_EP = "aaaLdapEp"
MO_CLASS_ID_AAA_LDAP_EP_FSM = "aaaLdapEpFsm"
MO_CLASS_ID_AAA_LDAP_EP_FSM_STAGE = "aaaLdapEpFsmStage"
MO_CLASS_ID_AAA_LDAP_GROUP = "aaaLdapGroup"
MO_CLASS_ID_AAA_LDAP_GROUP_RULE = "aaaLdapGroupRule"
MO_CLASS_ID_AAA_LDAP_PROVIDER = "aaaLdapProvider"
MO_CLASS_ID_AAA_LOCALE = "aaaLocale"
MO_CLASS_ID_AAA_LOG = "aaaLog"
MO_CLASS_ID_AAA_MOD_LR = "aaaModLR"
MO_CLASS_ID_AAA_ORG = "aaaOrg"
MO_CLASS_ID_AAA_PRE_LOGIN_BANNER = "aaaPreLoginBanner"
MO_CLASS_ID_AAA_PROVIDER = "aaaProvider"
MO_CLASS_ID_AAA_PROVIDER_GROUP = "aaaProviderGroup"
MO_CLASS_ID_AAA_PROVIDER_REF = "aaaProviderRef"
MO_CLASS_ID_AAA_PWD_PROFILE = "aaaPwdProfile"
MO_CLASS_ID_AAA_RADIUS_EP = "aaaRadiusEp"
MO_CLASS_ID_AAA_RADIUS_EP_FSM = "aaaRadiusEpFsm"
MO_CLASS_ID_AAA_RADIUS_EP_FSM_STAGE = "aaaRadiusEpFsmStage"
MO_CLASS_ID_AAA_RADIUS_PROVIDER = "aaaRadiusProvider"
MO_CLASS_ID_AAA_REALM = "aaaRealm"
MO_CLASS_ID_AAA_REALM_FSM = "aaaRealmFsm"
MO_CLASS_ID_AAA_REALM_FSM_STAGE = "aaaRealmFsmStage"
MO_CLASS_ID_AAA_REALM_FSM_TASK = "aaaRealmFsmTask"
MO_CLASS_ID_AAA_REMOTE_USER = "aaaRemoteUser"
MO_CLASS_ID_AAA_ROLE = "aaaRole"
MO_CLASS_ID_AAA_SESSION = "aaaSession"
MO_CLASS_ID_AAA_SESSION_INFO = "aaaSessionInfo"
MO_CLASS_ID_AAA_SESSION_INFO_TABLE = "aaaSessionInfoTable"
MO_CLASS_ID_AAA_SESSION_LR = "aaaSessionLR"
MO_CLASS_ID_AAA_SHELL_LOGIN = "aaaShellLogin"
MO_CLASS_ID_AAA_SSH_AUTH = "aaaSshAuth"
MO_CLASS_ID_AAA_SYSTEM_USER = "aaaSystemUser"
MO_CLASS_ID_AAA_TACACS_PLUS_EP = "aaaTacacsPlusEp"
MO_CLASS_ID_AAA_TACACS_PLUS_EP_FSM = "aaaTacacsPlusEpFsm"
MO_CLASS_ID_AAA_TACACS_PLUS_EP_FSM_STAGE = "aaaTacacsPlusEpFsmStage"
MO_CLASS_ID_AAA_TACACS_PLUS_PROVIDER = "aaaTacacsPlusProvider"
MO_CLASS_ID_AAA_USER = "aaaUser"
MO_CLASS_ID_AAA_USER_ACTION = "aaaUserAction"
MO_CLASS_ID_AAA_USER_DATA = "aaaUserData"
MO_CLASS_ID_AAA_USER_EP = "aaaUserEp"
MO_CLASS_ID_AAA_USER_EP_FSM = "aaaUserEpFsm"
MO_CLASS_ID_AAA_USER_EP_FSM_STAGE = "aaaUserEpFsmStage"
MO_CLASS_ID_AAA_USER_EP_FSM_TASK = "aaaUserEpFsmTask"
MO_CLASS_ID_AAA_USER_GROUP = "aaaUserGroup"
MO_CLASS_ID_AAA_USER_LOCALE = "aaaUserLocale"
MO_CLASS_ID_AAA_USER_LOGIN = "aaaUserLogin"
MO_CLASS_ID_AAA_USER_ROLE = "aaaUserRole"
MO_CLASS_ID_AAA_WEB_LOGIN = "aaaWebLogin"
MO_CLASS_ID_ADAPTOR_BEH_CAP = "adaptorBehCap"
MO_CLASS_ID_ADAPTOR_CIO_EP = "adaptorCIoEp"
MO_CLASS_ID_ADAPTOR_CAP_DEF = "adaptorCapDef"
MO_CLASS_ID_ADAPTOR_CAP_QUAL = "adaptorCapQual"
MO_CLASS_ID_ADAPTOR_CAP_SPEC = "adaptorCapSpec"
MO_CLASS_ID_ADAPTOR_DIAG_CAP = "adaptorDiagCap"
MO_CLASS_ID_ADAPTOR_DYNAMIC_CONFIG_CAP = "adaptorDynamicConfigCap"
MO_CLASS_ID_ADAPTOR_ETH_ARFS_PROFILE = "adaptorEthArfsProfile"
MO_CLASS_ID_ADAPTOR_ETH_COMP_QUEUE_PROFILE = "adaptorEthCompQueueProfile"
MO_CLASS_ID_ADAPTOR_ETH_FAILOVER_PROFILE = "adaptorEthFailoverProfile"
MO_CLASS_ID_ADAPTOR_ETH_INTERRUPT_PROFILE = "adaptorEthInterruptProfile"
MO_CLASS_ID_ADAPTOR_ETH_NVGREPROFILE = "adaptorEthNVGREProfile"
MO_CLASS_ID_ADAPTOR_ETH_OFFLOAD_PROFILE = "adaptorEthOffloadProfile"
MO_CLASS_ID_ADAPTOR_ETH_PORT_BY_SIZE_LARGE_STATS = "adaptorEthPortBySizeLargeStats"
MO_CLASS_ID_ADAPTOR_ETH_PORT_BY_SIZE_LARGE_STATS_HIST = "adaptorEthPortBySizeLargeStatsHist"
MO_CLASS_ID_ADAPTOR_ETH_PORT_BY_SIZE_SMALL_STATS = "adaptorEthPortBySizeSmallStats"
MO_CLASS_ID_ADAPTOR_ETH_PORT_BY_SIZE_SMALL_STATS_HIST = "adaptorEthPortBySizeSmallStatsHist"
MO_CLASS_ID_ADAPTOR_ETH_PORT_ERR_STATS = "adaptorEthPortErrStats"
MO_CLASS_ID_ADAPTOR_ETH_PORT_ERR_STATS_HIST = "adaptorEthPortErrStatsHist"
MO_CLASS_ID_ADAPTOR_ETH_PORT_MCAST_STATS = "adaptorEthPortMcastStats"
MO_CLASS_ID_ADAPTOR_ETH_PORT_MCAST_STATS_HIST = "adaptorEthPortMcastStatsHist"
MO_CLASS_ID_ADAPTOR_ETH_PORT_OUTSIZED_STATS = "adaptorEthPortOutsizedStats"
MO_CLASS_ID_ADAPTOR_ETH_PORT_OUTSIZED_STATS_HIST = "adaptorEthPortOutsizedStatsHist"
MO_CLASS_ID_ADAPTOR_ETH_PORT_STATS = "adaptorEthPortStats"
MO_CLASS_ID_ADAPTOR_ETH_PORT_STATS_HIST = "adaptorEthPortStatsHist"
MO_CLASS_ID_ADAPTOR_ETH_QUEUE_PROFILE = "adaptorEthQueueProfile"
MO_CLASS_ID_ADAPTOR_ETH_RECV_QUEUE_PROFILE = "adaptorEthRecvQueueProfile"
MO_CLASS_ID_ADAPTOR_ETH_RO_CEPROFILE = "adaptorEthRoCEProfile"
MO_CLASS_ID_ADAPTOR_ETH_VX_LANPROFILE = "adaptorEthVxLANProfile"
MO_CLASS_ID_ADAPTOR_ETH_WORK_QUEUE_PROFILE = "adaptorEthWorkQueueProfile"
MO_CLASS_ID_ADAPTOR_ETHER_IF_STATS = "adaptorEtherIfStats"
MO_CLASS_ID_ADAPTOR_ETHER_IF_STATS_HIST = "adaptorEtherIfStatsHist"
MO_CLASS_ID_ADAPTOR_EXT_ETH_IF = "adaptorExtEthIf"
MO_CLASS_ID_ADAPTOR_EXT_ETH_IF_FSM = "adaptorExtEthIfFsm"
MO_CLASS_ID_ADAPTOR_EXT_ETH_IF_FSM_STAGE = "adaptorExtEthIfFsmStage"
MO_CLASS_ID_ADAPTOR_EXT_ETH_IF_FSM_TASK = "adaptorExtEthIfFsmTask"
MO_CLASS_ID_ADAPTOR_EXT_ETH_IF_PC = "adaptorExtEthIfPc"
MO_CLASS_ID_ADAPTOR_EXT_ETH_IF_PC_EP = "adaptorExtEthIfPcEp"
MO_CLASS_ID_ADAPTOR_EXT_IF = "adaptorExtIf"
MO_CLASS_ID_ADAPTOR_EXT_IF_EP = "adaptorExtIfEp"
MO_CLASS_ID_ADAPTOR_EXT_IF_PC = "adaptorExtIfPc"
MO_CLASS_ID_ADAPTOR_EXT_IP_V6_RSS_HASH_PROFILE = "adaptorExtIpV6RssHashProfile"
MO_CLASS_ID_ADAPTOR_EXTERNAL_EP = "adaptorExternalEp"
MO_CLASS_ID_ADAPTOR_EXTERNAL_PC = "adaptorExternalPc"
MO_CLASS_ID_ADAPTOR_FAMILY_TYPE_DEF = "adaptorFamilyTypeDef"
MO_CLASS_ID_ADAPTOR_FC_CDB_WORK_QUEUE_PROFILE = "adaptorFcCdbWorkQueueProfile"
MO_CLASS_ID_ADAPTOR_FC_ERROR_RECOVERY_PROFILE = "adaptorFcErrorRecoveryProfile"
MO_CLASS_ID_ADAPTOR_FC_IF_EVENT_STATS = "adaptorFcIfEventStats"
MO_CLASS_ID_ADAPTOR_FC_IF_EVENT_STATS_HIST = "adaptorFcIfEventStatsHist"
MO_CLASS_ID_ADAPTOR_FC_IF_FC4_STATS = "adaptorFcIfFC4Stats"
MO_CLASS_ID_ADAPTOR_FC_IF_FC4_STATS_HIST = "adaptorFcIfFC4StatsHist"
MO_CLASS_ID_ADAPTOR_FC_IF_FRAME_STATS = "adaptorFcIfFrameStats"
MO_CLASS_ID_ADAPTOR_FC_IF_FRAME_STATS_HIST = "adaptorFcIfFrameStatsHist"
MO_CLASS_ID_ADAPTOR_FC_INTERRUPT_PROFILE = "adaptorFcInterruptProfile"
MO_CLASS_ID_ADAPTOR_FC_LOGI_PROFILE = "adaptorFcLogiProfile"
MO_CLASS_ID_ADAPTOR_FC_OEIF = "adaptorFcOEIf"
MO_CLASS_ID_ADAPTOR_FC_PORT_FLOGI_PROFILE = "adaptorFcPortFLogiProfile"
MO_CLASS_ID_ADAPTOR_FC_PORT_PLOGI_PROFILE = "adaptorFcPortPLogiProfile"
MO_CLASS_ID_ADAPTOR_FC_PORT_PROFILE = "adaptorFcPortProfile"
MO_CLASS_ID_ADAPTOR_FC_PORT_STATS = "adaptorFcPortStats"
MO_CLASS_ID_ADAPTOR_FC_PORT_STATS_HIST = "adaptorFcPortStatsHist"
MO_CLASS_ID_ADAPTOR_FC_QUEUE_PROFILE = "adaptorFcQueueProfile"
MO_CLASS_ID_ADAPTOR_FC_RECV_QUEUE_PROFILE = "adaptorFcRecvQueueProfile"
MO_CLASS_ID_ADAPTOR_FC_WORK_QUEUE_PROFILE = "adaptorFcWorkQueueProfile"
MO_CLASS_ID_ADAPTOR_FRU_CAP_PROVIDER = "adaptorFruCapProvider"
MO_CLASS_ID_ADAPTOR_FRU_CAP_REF = "adaptorFruCapRef"
MO_CLASS_ID_ADAPTOR_FW_CAP_PROVIDER = "adaptorFwCapProvider"
MO_CLASS_ID_ADAPTOR_HOST_ETH_IF = "adaptorHostEthIf"
MO_CLASS_ID_ADAPTOR_HOST_ETH_IF_FSM = "adaptorHostEthIfFsm"
MO_CLASS_ID_ADAPTOR_HOST_ETH_IF_FSM_STAGE = "adaptorHostEthIfFsmStage"
MO_CLASS_ID_ADAPTOR_HOST_ETH_IF_FSM_TASK = "adaptorHostEthIfFsmTask"
MO_CLASS_ID_ADAPTOR_HOST_ETH_IF_PROFILE = "adaptorHostEthIfProfile"
MO_CLASS_ID_ADAPTOR_HOST_FC_IF = "adaptorHostFcIf"
MO_CLASS_ID_ADAPTOR_HOST_FC_IF_FSM = "adaptorHostFcIfFsm"
MO_CLASS_ID_ADAPTOR_HOST_FC_IF_FSM_STAGE = "adaptorHostFcIfFsmStage"
MO_CLASS_ID_ADAPTOR_HOST_FC_IF_FSM_TASK = "adaptorHostFcIfFsmTask"
MO_CLASS_ID_ADAPTOR_HOST_FC_IF_PROFILE = "adaptorHostFcIfProfile"
MO_CLASS_ID_ADAPTOR_HOST_IF = "adaptorHostIf"
MO_CLASS_ID_ADAPTOR_HOST_IF_CONN_DEF = "adaptorHostIfConnDef"
MO_CLASS_ID_ADAPTOR_HOST_IF_PROFILE = "adaptorHostIfProfile"
MO_CLASS_ID_ADAPTOR_HOST_ISCSI_IF = "adaptorHostIscsiIf"
MO_CLASS_ID_ADAPTOR_HOST_ISCSI_IF_PROFILE = "adaptorHostIscsiIfProfile"
MO_CLASS_ID_ADAPTOR_HOST_MGMT_CAP = "adaptorHostMgmtCap"
MO_CLASS_ID_ADAPTOR_HOST_PORT = "adaptorHostPort"
MO_CLASS_ID_ADAPTOR_HOST_PORT_CAP = "adaptorHostPortCap"
MO_CLASS_ID_ADAPTOR_HOST_SCSI_IF = "adaptorHostScsiIf"
MO_CLASS_ID_ADAPTOR_HOST_SCSI_LUN_REF = "adaptorHostScsiLunRef"
MO_CLASS_ID_ADAPTOR_HOST_SERVICE_ETH_IF = "adaptorHostServiceEthIf"
MO_CLASS_ID_ADAPTOR_HOST_VNIC_HW_ADDR_CAP = "adaptorHostVnicHwAddrCap"
MO_CLASS_ID_ADAPTOR_HOSTETH_HW_ADDR_CAP = "adaptorHostethHwAddrCap"
MO_CLASS_ID_ADAPTOR_HOSTFC_HW_ADDR_CAP = "adaptorHostfcHwAddrCap"
MO_CLASS_ID_ADAPTOR_HW_ADDR_CAP = "adaptorHwAddrCap"
MO_CLASS_ID_ADAPTOR_ISCSI_CAP = "adaptorIScsiCap"
MO_CLASS_ID_ADAPTOR_IP_V4_RSS_HASH_PROFILE = "adaptorIpV4RssHashProfile"
MO_CLASS_ID_ADAPTOR_IP_V6_RSS_HASH_PROFILE = "adaptorIpV6RssHashProfile"
MO_CLASS_ID_ADAPTOR_ISCSI_AUTH = "adaptorIscsiAuth"
MO_CLASS_ID_ADAPTOR_ISCSI_PROT = "adaptorIscsiProt"
MO_CLASS_ID_ADAPTOR_ISCSI_TARGET_IF = "adaptorIscsiTargetIf"
MO_CLASS_ID_ADAPTOR_LAN_CAP = "adaptorLanCap"
MO_CLASS_ID_ADAPTOR_LLDP_CAP = "adaptorLldpCap"
MO_CLASS_ID_ADAPTOR_MENLO_BASE_ERROR_STATS = "adaptorMenloBaseErrorStats"
MO_CLASS_ID_ADAPTOR_MENLO_BASE_ERROR_STATS_HIST = "adaptorMenloBaseErrorStatsHist"
MO_CLASS_ID_ADAPTOR_MENLO_DCE_PORT_STATS = "adaptorMenloDcePortStats"
MO_CLASS_ID_ADAPTOR_MENLO_DCE_PORT_STATS_HIST = "adaptorMenloDcePortStatsHist"
MO_CLASS_ID_ADAPTOR_MENLO_ETH_ERROR_STATS = "adaptorMenloEthErrorStats"
MO_CLASS_ID_ADAPTOR_MENLO_ETH_ERROR_STATS_HIST = "adaptorMenloEthErrorStatsHist"
MO_CLASS_ID_ADAPTOR_MENLO_ETH_STATS = "adaptorMenloEthStats"
MO_CLASS_ID_ADAPTOR_MENLO_ETH_STATS_HIST = "adaptorMenloEthStatsHist"
MO_CLASS_ID_ADAPTOR_MENLO_FC_ERROR_STATS = "adaptorMenloFcErrorStats"
MO_CLASS_ID_ADAPTOR_MENLO_FC_ERROR_STATS_HIST = "adaptorMenloFcErrorStatsHist"
MO_CLASS_ID_ADAPTOR_MENLO_FC_STATS = "adaptorMenloFcStats"
MO_CLASS_ID_ADAPTOR_MENLO_FC_STATS_HIST = "adaptorMenloFcStatsHist"
MO_CLASS_ID_ADAPTOR_MENLO_HOST_PORT_STATS = "adaptorMenloHostPortStats"
MO_CLASS_ID_ADAPTOR_MENLO_HOST_PORT_STATS_HIST = "adaptorMenloHostPortStatsHist"
MO_CLASS_ID_ADAPTOR_MENLO_MCPU_ERROR_STATS = "adaptorMenloMcpuErrorStats"
MO_CLASS_ID_ADAPTOR_MENLO_MCPU_ERROR_STATS_HIST = "adaptorMenloMcpuErrorStatsHist"
MO_CLASS_ID_ADAPTOR_MENLO_MCPU_STATS = "adaptorMenloMcpuStats"
MO_CLASS_ID_ADAPTOR_MENLO_MCPU_STATS_HIST = "adaptorMenloMcpuStatsHist"
MO_CLASS_ID_ADAPTOR_MENLO_NET_EG_STATS = "adaptorMenloNetEgStats"
MO_CLASS_ID_ADAPTOR_MENLO_NET_EG_STATS_HIST = "adaptorMenloNetEgStatsHist"
MO_CLASS_ID_ADAPTOR_MENLO_NET_IN_STATS = "adaptorMenloNetInStats"
MO_CLASS_ID_ADAPTOR_MENLO_NET_IN_STATS_HIST = "adaptorMenloNetInStatsHist"
MO_CLASS_ID_ADAPTOR_MENLO_QERROR_STATS = "adaptorMenloQErrorStats"
MO_CLASS_ID_ADAPTOR_MENLO_QERROR_STATS_HIST = "adaptorMenloQErrorStatsHist"
MO_CLASS_ID_ADAPTOR_MENLO_QSTATS = "adaptorMenloQStats"
MO_CLASS_ID_ADAPTOR_MENLO_QSTATS_HIST = "adaptorMenloQStatsHist"
MO_CLASS_ID_ADAPTOR_MGMT_CAP = "adaptorMgmtCap"
MO_CLASS_ID_ADAPTOR_MGMT_VNIC_ETH_CONFIG = "adaptorMgmtVnicEthConfig"
MO_CLASS_ID_ADAPTOR_NW_MGMT_CAP = "adaptorNwMgmtCap"
MO_CLASS_ID_ADAPTOR_NW_STATS_MGMT_CAP = "adaptorNwStatsMgmtCap"
MO_CLASS_ID_ADAPTOR_PIO_EP = "adaptorPIoEp"
MO_CLASS_ID_ADAPTOR_PROFILE_ITEM = "adaptorProfileItem"
MO_CLASS_ID_ADAPTOR_PROTOCOL_PROFILE = "adaptorProtocolProfile"
MO_CLASS_ID_ADAPTOR_QUAL = "adaptorQual"
MO_CLASS_ID_ADAPTOR_QUEUE_PROFILE = "adaptorQueueProfile"
MO_CLASS_ID_ADAPTOR_RNIC_CAP_SPEC = "adaptorRnicCapSpec"
MO_CLASS_ID_ADAPTOR_RSS_HASH_PROFILE = "adaptorRssHashProfile"
MO_CLASS_ID_ADAPTOR_RSS_PROFILE = "adaptorRssProfile"
MO_CLASS_ID_ADAPTOR_SAN_CAP = "adaptorSanCap"
MO_CLASS_ID_ADAPTOR_TCP_IP_RSS_HASH_PROFILE = "adaptorTcpIpRssHashProfile"
MO_CLASS_ID_ADAPTOR_UNIT = "adaptorUnit"
MO_CLASS_ID_ADAPTOR_UNIT_ASSOC_CTX = "adaptorUnitAssocCtx"
MO_CLASS_ID_ADAPTOR_UNIT_EXTN = "adaptorUnitExtn"
MO_CLASS_ID_ADAPTOR_UPLINK_HW_ADDR_CAP = "adaptorUplinkHwAddrCap"
MO_CLASS_ID_ADAPTOR_UPLINK_PORT_STATS = "adaptorUplinkPortStats"
MO_CLASS_ID_ADAPTOR_USNIC_CONN_DEF = "adaptorUsnicConnDef"
MO_CLASS_ID_ADAPTOR_VLAN = "adaptorVlan"
MO_CLASS_ID_ADAPTOR_VNIC_STATS = "adaptorVnicStats"
MO_CLASS_ID_ADAPTOR_VNIC_STATS_HIST = "adaptorVnicStatsHist"
MO_CLASS_ID_ADAPTOR_VSAN = "adaptorVsan"
MO_CLASS_ID_APE_ADAPTER = "apeAdapter"
MO_CLASS_ID_APE_ADAPTER_VNIC = "apeAdapterVnic"
MO_CLASS_ID_APE_ATTRIBUTE = "apeAttribute"
MO_CLASS_ID_APE_BOOT_METHOD = "apeBootMethod"
MO_CLASS_ID_APE_CONTROLLER_CHASSIS = "apeControllerChassis"
MO_CLASS_ID_APE_CONTROLLER_EEPROM = "apeControllerEeprom"
MO_CLASS_ID_APE_CONTROLLER_MANAGER = "apeControllerManager"
MO_CLASS_ID_APE_DCOS_AG_MANAGER = "apeDcosAgManager"
MO_CLASS_ID_APE_FRU = "apeFru"
MO_CLASS_ID_APE_HOST_AGENT = "apeHostAgent"
MO_CLASS_ID_APE_LANBOOT = "apeLANBoot"
MO_CLASS_ID_APE_LOCAL_DISK_BOOT = "apeLocalDiskBoot"
MO_CLASS_ID_APE_MANAGER = "apeManager"
MO_CLASS_ID_APE_MC = "apeMc"
MO_CLASS_ID_APE_MC_TABLE = "apeMcTable"
MO_CLASS_ID_APE_MENLO = "apeMenlo"
MO_CLASS_ID_APE_MENLO_VNIC = "apeMenloVnic"
MO_CLASS_ID_APE_MENLO_VNIC_STATS = "apeMenloVnicStats"
MO_CLASS_ID_APE_NIC_AG_MANAGER = "apeNicAgManager"
MO_CLASS_ID_APE_PALO = "apePalo"
MO_CLASS_ID_APE_PALO_VNIC = "apePaloVnic"
MO_CLASS_ID_APE_PALO_VNIC_STATS = "apePaloVnicStats"
MO_CLASS_ID_APE_PARAM = "apeParam"
MO_CLASS_ID_APE_READING = "apeReading"
MO_CLASS_ID_APE_SANBOOT = "apeSANBoot"
MO_CLASS_ID_APE_SDR = "apeSdr"
MO_CLASS_ID_APE_SWITCH_FIRMWARE_INV = "apeSwitchFirmwareInv"
MO_CLASS_ID_APE_VIRTUAL_MEDIA_BOOT = "apeVirtualMediaBoot"
MO_CLASS_ID_APE_VNIC_STATS = "apeVnicStats"
MO_CLASS_ID_BIOS_AREF = "biosARef"
MO_CLASS_ID_BIOS_BOT = "biosBOT"
MO_CLASS_ID_BIOS_BOOT_DEV = "biosBootDev"
MO_CLASS_ID_BIOS_BOOT_DEV_GRP = "biosBootDevGrp"
MO_CLASS_ID_BIOS_FEATURE_REF = "biosFeatureRef"
MO_CLASS_ID_BIOS_PARAMETER_REF = "biosParameterRef"
MO_CLASS_ID_BIOS_REF = "biosRef"
MO_CLASS_ID_BIOS_SETTING_REF = "biosSettingRef"
MO_CLASS_ID_BIOS_SETTINGS = "biosSettings"
MO_CLASS_ID_BIOS_UNIT = "biosUnit"
MO_CLASS_ID_BIOS_VFEAT = "biosVFeat"
MO_CLASS_ID_BIOS_VIDENTITY_PARAMS = "biosVIdentityParams"
MO_CLASS_ID_BIOS_VPROFILE = "biosVProfile"
MO_CLASS_ID_BIOS_VF_ACPI10_SUPPORT = "biosVfACPI10Support"
MO_CLASS_ID_BIOS_VF_ASPMSUPPORT = "biosVfASPMSupport"
MO_CLASS_ID_BIOS_VF_ALL_USBDEVICES = "biosVfAllUSBDevices"
MO_CLASS_ID_BIOS_VF_ALTITUDE = "biosVfAltitude"
MO_CLASS_ID_BIOS_VF_ASSERT_NMION_PERR = "biosVfAssertNMIOnPERR"
MO_CLASS_ID_BIOS_VF_ASSERT_NMION_SERR = "biosVfAssertNMIOnSERR"
MO_CLASS_ID_BIOS_VF_BOOT_OPTION_RETRY = "biosVfBootOptionRetry"
MO_CLASS_ID_BIOS_VF_CPUHARDWARE_POWER_MANAGEMENT = "biosVfCPUHardwarePowerManagement"
MO_CLASS_ID_BIOS_VF_CPUPERFORMANCE = "biosVfCPUPerformance"
MO_CLASS_ID_BIOS_VF_CPUPOWER_MANAGEMENT = "biosVfCPUPowerManagement"
MO_CLASS_ID_BIOS_VF_CONSISTENT_DEVICE_NAME_CONTROL = "biosVfConsistentDeviceNameControl"
MO_CLASS_ID_BIOS_VF_CONSOLE_REDIRECTION = "biosVfConsoleRedirection"
MO_CLASS_ID_BIOS_VF_CORE_MULTI_PROCESSING = "biosVfCoreMultiProcessing"
MO_CLASS_ID_BIOS_VF_DDR3_VOLTAGE_SELECTION = "biosVfDDR3VoltageSelection"
MO_CLASS_ID_BIOS_VF_DRAMCLOCK_THROTTLING = "biosVfDRAMClockThrottling"
MO_CLASS_ID_BIOS_VF_DIRECT_CACHE_ACCESS = "biosVfDirectCacheAccess"
MO_CLASS_ID_BIOS_VF_DRAM_REFRESH_RATE = "biosVfDramRefreshRate"
MO_CLASS_ID_BIOS_VF_ENHANCED_INTEL_SPEED_STEP_TECH = "biosVfEnhancedIntelSpeedStepTech"
MO_CLASS_ID_BIOS_VF_ENHANCED_POWER_CAPPING_SUPPORT = "biosVfEnhancedPowerCappingSupport"
MO_CLASS_ID_BIOS_VF_EXECUTE_DISABLE_BIT = "biosVfExecuteDisableBit"
MO_CLASS_ID_BIOS_VF_FRB2_TIMER = "biosVfFRB2Timer"
MO_CLASS_ID_BIOS_VF_FREQUENCY_FLOOR_OVERRIDE = "biosVfFrequencyFloorOverride"
MO_CLASS_ID_BIOS_VF_FRONT_PANEL_LOCKOUT = "biosVfFrontPanelLockout"
MO_CLASS_ID_BIOS_VF_INTEGRATED_GRAPHICS = "biosVfIntegratedGraphics"
MO_CLASS_ID_BIOS_VF_INTEGRATED_GRAPHICS_APERTURE_SIZE = "biosVfIntegratedGraphicsApertureSize"
MO_CLASS_ID_BIOS_VF_INTEL_ENTRY_SASRAIDMODULE = "biosVfIntelEntrySASRAIDModule"
MO_CLASS_ID_BIOS_VF_INTEL_HYPER_THREADING_TECH = "biosVfIntelHyperThreadingTech"
MO_CLASS_ID_BIOS_VF_INTEL_TRUSTED_EXECUTION_TECHNOLOGY = "biosVfIntelTrustedExecutionTechnology"
MO_CLASS_ID_BIOS_VF_INTEL_TURBO_BOOST_TECH = "biosVfIntelTurboBoostTech"
MO_CLASS_ID_BIOS_VF_INTEL_VTFOR_DIRECTED_IO = "biosVfIntelVTForDirectedIO"
MO_CLASS_ID_BIOS_VF_INTEL_VIRTUALIZATION_TECHNOLOGY = "biosVfIntelVirtualizationTechnology"
MO_CLASS_ID_BIOS_VF_INTERLEAVE_CONFIGURATION = "biosVfInterleaveConfiguration"
MO_CLASS_ID_BIOS_VF_LOCAL_X2_APIC = "biosVfLocalX2Apic"
MO_CLASS_ID_BIOS_VF_LV_DIMMSUPPORT = "biosVfLvDIMMSupport"
MO_CLASS_ID_BIOS_VF_MAX_VARIABLE_MTRRSETTING = "biosVfMaxVariableMTRRSetting"
MO_CLASS_ID_BIOS_VF_MAXIMUM_MEMORY_BELOW4_GB = "biosVfMaximumMemoryBelow4GB"
MO_CLASS_ID_BIOS_VF_MEMORY_MAPPED_IOABOVE4_GB = "biosVfMemoryMappedIOAbove4GB"
MO_CLASS_ID_BIOS_VF_MIRRORING_MODE = "biosVfMirroringMode"
MO_CLASS_ID_BIOS_VF_NUMAOPTIMIZED = "biosVfNUMAOptimized"
MO_CLASS_ID_BIOS_VF_OSBOOT_WATCHDOG_TIMER = "biosVfOSBootWatchdogTimer"
MO_CLASS_ID_BIOS_VF_OSBOOT_WATCHDOG_TIMER_POLICY = "biosVfOSBootWatchdogTimerPolicy"
MO_CLASS_ID_BIOS_VF_OSBOOT_WATCHDOG_TIMER_TIMEOUT = "biosVfOSBootWatchdogTimerTimeout"
MO_CLASS_ID_BIOS_VF_ONBOARD_GRAPHICS = "biosVfOnboardGraphics"
MO_CLASS_ID_BIOS_VF_ONBOARD_SATACONTROLLER = "biosVfOnboardSATAController"
MO_CLASS_ID_BIOS_VF_ONBOARD_STORAGE = "biosVfOnboardStorage"
MO_CLASS_ID_BIOS_VF_OPTION_ROMENABLE = "biosVfOptionROMEnable"
MO_CLASS_ID_BIOS_VF_OPTION_ROMLOAD = "biosVfOptionROMLoad"
MO_CLASS_ID_BIOS_VF_PCHSATAMODE = "biosVfPCHSATAMode"
MO_CLASS_ID_BIOS_VF_PCILOMPORTS_CONFIGURATION = "biosVfPCILOMPortsConfiguration"
MO_CLASS_ID_BIOS_VF_PCISLOT_LINK_SPEED = "biosVfPCISlotLinkSpeed"
MO_CLASS_ID_BIOS_VF_PCISLOT_OPTION_ROMENABLE = "biosVfPCISlotOptionROMEnable"
MO_CLASS_ID_BIOS_VF_POSTERROR_PAUSE = "biosVfPOSTErrorPause"
MO_CLASS_ID_BIOS_VF_PSTATECOORDINATION = "biosVfPSTATECoordination"
MO_CLASS_ID_BIOS_VF_PACKAGE_CSTATE_LIMIT = "biosVfPackageCStateLimit"
MO_CLASS_ID_BIOS_VF_PROCESSOR_C1_E = "biosVfProcessorC1E"
MO_CLASS_ID_BIOS_VF_PROCESSOR_C3_REPORT = "biosVfProcessorC3Report"
MO_CLASS_ID_BIOS_VF_PROCESSOR_C6_REPORT = "biosVfProcessorC6Report"
MO_CLASS_ID_BIOS_VF_PROCESSOR_C7_REPORT = "biosVfProcessorC7Report"
MO_CLASS_ID_BIOS_VF_PROCESSOR_CSTATE = "biosVfProcessorCState"
MO_CLASS_ID_BIOS_VF_PROCESSOR_ENERGY_CONFIGURATION = "biosVfProcessorEnergyConfiguration"
MO_CLASS_ID_BIOS_VF_PROCESSOR_PREFETCH_CONFIG = "biosVfProcessorPrefetchConfig"
MO_CLASS_ID_BIOS_VF_QPILINK_FREQUENCY_SELECT = "biosVfQPILinkFrequencySelect"
MO_CLASS_ID_BIOS_VF_QPISNOOP_MODE = "biosVfQPISnoopMode"
MO_CLASS_ID_BIOS_VF_QUIET_BOOT = "biosVfQuietBoot"
MO_CLASS_ID_BIOS_VF_RESUME_ON_ACPOWER_LOSS = "biosVfResumeOnACPowerLoss"
MO_CLASS_ID_BIOS_VF_SCRUB_POLICIES = "biosVfScrubPolicies"
MO_CLASS_ID_BIOS_VF_SELECT_MEMORY_RASCONFIGURATION = "biosVfSelectMemoryRASConfiguration"
MO_CLASS_ID_BIOS_VF_SERIAL_PORT_AENABLE = "biosVfSerialPortAEnable"
MO_CLASS_ID_BIOS_VF_SPARING_MODE = "biosVfSparingMode"
MO_CLASS_ID_BIOS_VF_SRIOV_CONFIG = "biosVfSriovConfig"
MO_CLASS_ID_BIOS_VF_TPMPENDING_OPERATION = "biosVfTPMPendingOperation"
MO_CLASS_ID_BIOS_VF_TPMSUPPORT = "biosVfTPMSupport"
MO_CLASS_ID_BIOS_VF_TRUSTED_PLATFORM_MODULE = "biosVfTrustedPlatformModule"
MO_CLASS_ID_BIOS_VF_UCSMBOOT_MODE_CONTROL = "biosVfUCSMBootModeControl"
MO_CLASS_ID_BIOS_VF_UCSMBOOT_ORDER_RULE_CONTROL = "biosVfUCSMBootOrderRuleControl"
MO_CLASS_ID_BIOS_VF_UEFIOSUSE_LEGACY_VIDEO = "biosVfUEFIOSUseLegacyVideo"
MO_CLASS_ID_BIOS_VF_USBBOOT_CONFIG = "biosVfUSBBootConfig"
MO_CLASS_ID_BIOS_VF_USBCONFIGURATION = "biosVfUSBConfiguration"
MO_CLASS_ID_BIOS_VF_USBFRONT_PANEL_ACCESS_LOCK = "biosVfUSBFrontPanelAccessLock"
MO_CLASS_ID_BIOS_VF_USBPORT_CONFIGURATION = "biosVfUSBPortConfiguration"
MO_CLASS_ID_BIOS_VF_USBSYSTEM_IDLE_POWER_OPTIMIZING_SETTING = "biosVfUSBSystemIdlePowerOptimizingSetting"
MO_CLASS_ID_BIOS_VF_VGAPRIORITY = "biosVfVGAPriority"
MO_CLASS_ID_BMC_SELCOUNTER = "bmcSELCounter"
MO_CLASS_ID_CALLHOME_ANONYMOUS_REPORTING = "callhomeAnonymousReporting"
MO_CLASS_ID_CALLHOME_DEST = "callhomeDest"
MO_CLASS_ID_CALLHOME_EP = "callhomeEp"
MO_CLASS_ID_CALLHOME_EP_FSM = "callhomeEpFsm"
MO_CLASS_ID_CALLHOME_EP_FSM_STAGE = "callhomeEpFsmStage"
MO_CLASS_ID_CALLHOME_EP_FSM_TASK = "callhomeEpFsmTask"
MO_CLASS_ID_CALLHOME_ITEM = "callhomeItem"
MO_CLASS_ID_CALLHOME_PERIODIC_SYSTEM_INVENTORY = "callhomePeriodicSystemInventory"
MO_CLASS_ID_CALLHOME_POLICY = "callhomePolicy"
MO_CLASS_ID_CALLHOME_PROFILE = "callhomeProfile"
MO_CLASS_ID_CALLHOME_SMTP = "callhomeSmtp"
MO_CLASS_ID_CALLHOME_SOURCE = "callhomeSource"
MO_CLASS_ID_CALLHOME_TEST_ALERT = "callhomeTestAlert"
MO_CLASS_ID_CAPABILITY_CATALOGUE = "capabilityCatalogue"
MO_CLASS_ID_CAPABILITY_CATALOGUE_FSM = "capabilityCatalogueFsm"
MO_CLASS_ID_CAPABILITY_CATALOGUE_FSM_STAGE = "capabilityCatalogueFsmStage"
MO_CLASS_ID_CAPABILITY_CATALOGUE_FSM_TASK = "capabilityCatalogueFsmTask"
MO_CLASS_ID_CAPABILITY_DEF = "capabilityDef"
MO_CLASS_ID_CAPABILITY_EP = "capabilityEp"
MO_CLASS_ID_CAPABILITY_FEATURE_LIMITS = "capabilityFeatureLimits"
MO_CLASS_ID_CAPABILITY_ITEM = "capabilityItem"
MO_CLASS_ID_CAPABILITY_MGMT_EXTENSION = "capabilityMgmtExtension"
MO_CLASS_ID_CAPABILITY_MGMT_EXTENSION_FSM = "capabilityMgmtExtensionFsm"
MO_CLASS_ID_CAPABILITY_MGMT_EXTENSION_FSM_STAGE = "capabilityMgmtExtensionFsmStage"
MO_CLASS_ID_CAPABILITY_MGMT_EXTENSION_FSM_TASK = "capabilityMgmtExtensionFsmTask"
MO_CLASS_ID_CAPABILITY_NETWORK_LIMITS = "capabilityNetworkLimits"
MO_CLASS_ID_CAPABILITY_PROVIDER = "capabilityProvider"
MO_CLASS_ID_CAPABILITY_STORAGE_LIMITS = "capabilityStorageLimits"
MO_CLASS_ID_CAPABILITY_SYSTEM_LIMITS = "capabilitySystemLimits"
MO_CLASS_ID_CAPABILITY_UPDATE = "capabilityUpdate"
MO_CLASS_ID_CAPABILITY_UPDATER = "capabilityUpdater"
MO_CLASS_ID_CAPABILITY_UPDATER_FSM = "capabilityUpdaterFsm"
MO_CLASS_ID_CAPABILITY_UPDATER_FSM_STAGE = "capabilityUpdaterFsmStage"
MO_CLASS_ID_CAPABILITY_UPDATER_FSM_TASK = "capabilityUpdaterFsmTask"
MO_CLASS_ID_CHANGE_CHANGED_OBJECT_REF = "changeChangedObjectRef"
MO_CLASS_ID_CIMCVMEDIA_ACTUAL_MOUNT_ENTRY = "cimcvmediaActualMountEntry"
MO_CLASS_ID_CIMCVMEDIA_ACTUAL_MOUNT_LIST = "cimcvmediaActualMountList"
MO_CLASS_ID_CIMCVMEDIA_CONFIG_MOUNT_ENTRY = "cimcvmediaConfigMountEntry"
MO_CLASS_ID_CIMCVMEDIA_EXT_MGMT_RULE_ENTRY = "cimcvmediaExtMgmtRuleEntry"
MO_CLASS_ID_CIMCVMEDIA_ITEM = "cimcvmediaItem"
MO_CLASS_ID_CIMCVMEDIA_MOUNT_CONFIG = "cimcvmediaMountConfig"
MO_CLASS_ID_CIMCVMEDIA_MOUNT_CONFIG_DEF = "cimcvmediaMountConfigDef"
MO_CLASS_ID_CIMCVMEDIA_MOUNT_CONFIG_POLICY = "cimcvmediaMountConfigPolicy"
MO_CLASS_ID_CIMCVMEDIA_MOUNT_INFO = "cimcvmediaMountInfo"
MO_CLASS_ID_CLITEST_TYPE_TEST = "clitestTypeTest"
MO_CLASS_ID_CLITEST_TYPE_TEST2 = "clitestTypeTest2"
MO_CLASS_ID_CLITEST_TYPE_TEST_CHILD = "clitestTypeTestChild"
MO_CLASS_ID_CLITEST_TYPE_TEST_PARENT = "clitestTypeTestParent"
MO_CLASS_ID_COMM_CIMC_WEB_SERVICE = "commCimcWebService"
MO_CLASS_ID_COMM_CIMXML = "commCimxml"
MO_CLASS_ID_COMM_CLIENT = "commClient"
MO_CLASS_ID_COMM_CLIENT_ITEM = "commClientItem"
MO_CLASS_ID_COMM_DATE_TIME = "commDateTime"
MO_CLASS_ID_COMM_DEFINITION = "commDefinition"
MO_CLASS_ID_COMM_DNS = "commDns"
MO_CLASS_ID_COMM_DNS_PROVIDER = "commDnsProvider"
MO_CLASS_ID_COMM_EVT_CHANNEL = "commEvtChannel"
MO_CLASS_ID_COMM_HTTP = "commHttp"
MO_CLASS_ID_COMM_HTTPS = "commHttps"
MO_CLASS_ID_COMM_ITEM = "commItem"
MO_CLASS_ID_COMM_LOCALE = "commLocale"
MO_CLASS_ID_COMM_NTP_PROVIDER = "commNtpProvider"
MO_CLASS_ID_COMM_SHELL = "commShell"
MO_CLASS_ID_COMM_SHELL_SVC_LIMITS = "commShellSvcLimits"
MO_CLASS_ID_COMM_SMASH_CLP = "commSmashCLP"
MO_CLASS_ID_COMM_SNMP = "commSnmp"
MO_CLASS_ID_COMM_SNMP_TRAP = "commSnmpTrap"
MO_CLASS_ID_COMM_SNMP_USER = "commSnmpUser"
MO_CLASS_ID_COMM_SSH = "commSsh"
MO_CLASS_ID_COMM_SVC = "commSvc"
MO_CLASS_ID_COMM_SVC_CHANNEL = "commSvcChannel"
MO_CLASS_ID_COMM_SVC_EP = "commSvcEp"
MO_CLASS_ID_COMM_SVC_EP_FSM = "commSvcEpFsm"
MO_CLASS_ID_COMM_SVC_EP_FSM_STAGE = "commSvcEpFsmStage"
MO_CLASS_ID_COMM_SVC_EP_FSM_TASK = "commSvcEpFsmTask"
MO_CLASS_ID_COMM_SVC_LIMITS = "commSvcLimits"
MO_CLASS_ID_COMM_SVC_POLICY = "commSvcPolicy"
MO_CLASS_ID_COMM_SYSLOG = "commSyslog"
MO_CLASS_ID_COMM_SYSLOG_CLIENT = "commSyslogClient"
MO_CLASS_ID_COMM_SYSLOG_CONSOLE = "commSyslogConsole"
MO_CLASS_ID_COMM_SYSLOG_FILE = "commSyslogFile"
MO_CLASS_ID_COMM_SYSLOG_MONITOR = "commSyslogMonitor"
MO_CLASS_ID_COMM_SYSLOG_SOURCE = "commSyslogSource"
MO_CLASS_ID_COMM_TELNET = "commTelnet"
MO_CLASS_ID_COMM_WEB = "commWeb"
MO_CLASS_ID_COMM_WEB_CHANNEL = "commWebChannel"
MO_CLASS_ID_COMM_WEB_SVC_LIMITS = "commWebSvcLimits"
MO_CLASS_ID_COMM_WSMAN = "commWsman"
MO_CLASS_ID_COMM_XML_CL_CONN_POLICY = "commXmlClConnPolicy"
MO_CLASS_ID_COMPUTE_ABOARD = "computeABoard"
MO_CLASS_ID_COMPUTE_ACHASSIS_DISC_POLICY = "computeAChassisDiscPolicy"
MO_CLASS_ID_COMPUTE_AUTOCONFIG_POLICY = "computeAutoconfigPolicy"
MO_CLASS_ID_COMPUTE_BEH_CAP = "computeBehCap"
MO_CLASS_ID_COMPUTE_BLADE = "computeBlade"
MO_CLASS_ID_COMPUTE_BLADE_DISC_POLICY = "computeBladeDiscPolicy"
MO_CLASS_ID_COMPUTE_BLADE_EP = "computeBladeEp"
MO_CLASS_ID_COMPUTE_BLADE_FSM = "computeBladeFsm"
MO_CLASS_ID_COMPUTE_BLADE_FSM_STAGE = "computeBladeFsmStage"
MO_CLASS_ID_COMPUTE_BLADE_FSM_TASK = "computeBladeFsmTask"
MO_CLASS_ID_COMPUTE_BLADE_INHERIT_POLICY = "computeBladeInheritPolicy"
MO_CLASS_ID_COMPUTE_BLADE_POS_QUAL = "computeBladePosQual"
MO_CLASS_ID_COMPUTE_BOARD = "computeBoard"
MO_CLASS_ID_COMPUTE_BOARD_CONNECTOR = "computeBoardConnector"
MO_CLASS_ID_COMPUTE_BOARD_CONTROLLER = "computeBoardController"
MO_CLASS_ID_COMPUTE_CARTRIDGE = "computeCartridge"
MO_CLASS_ID_COMPUTE_CHASSIS_CONN_POLICY = "computeChassisConnPolicy"
MO_CLASS_ID_COMPUTE_CHASSIS_DISC_POLICY = "computeChassisDiscPolicy"
MO_CLASS_ID_COMPUTE_CHASSIS_QUAL = "computeChassisQual"
MO_CLASS_ID_COMPUTE_COMPUTE_DISC_POLICY = "computeComputeDiscPolicy"
MO_CLASS_ID_COMPUTE_CONFIG_POLICY = "computeConfigPolicy"
MO_CLASS_ID_COMPUTE_CONSTRAINT_DEF = "computeConstraintDef"
MO_CLASS_ID_COMPUTE_CONTAINER = "computeContainer"
MO_CLASS_ID_COMPUTE_DEFAULTS = "computeDefaults"
MO_CLASS_ID_COMPUTE_DISC_POLICY = "computeDiscPolicy"
MO_CLASS_ID_COMPUTE_EXT_BOARD = "computeExtBoard"
MO_CLASS_ID_COMPUTE_FW_SYNC_ACK = "computeFwSyncAck"
MO_CLASS_ID_COMPUTE_HEALTH_LED_SENSOR_ALARM = "computeHealthLedSensorAlarm"
MO_CLASS_ID_COMPUTE_IOHUB = "computeIOHub"
MO_CLASS_ID_COMPUTE_IOHUB_ENV_STATS = "computeIOHubEnvStats"
MO_CLASS_ID_COMPUTE_IOHUB_ENV_STATS_HIST = "computeIOHubEnvStatsHist"
MO_CLASS_ID_COMPUTE_INIT_CONFIG_POLICY = "computeInitConfigPolicy"
MO_CLASS_ID_COMPUTE_INSTANCE_ID_QUAL = "computeInstanceIdQual"
MO_CLASS_ID_COMPUTE_ITEM = "computeItem"
MO_CLASS_ID_COMPUTE_KVM_MGMT_POLICY = "computeKvmMgmtPolicy"
MO_CLASS_ID_COMPUTE_LOGICAL = "computeLogical"
MO_CLASS_ID_COMPUTE_MB_POWER_STATS = "computeMbPowerStats"
MO_CLASS_ID_COMPUTE_MB_POWER_STATS_HIST = "computeMbPowerStatsHist"
MO_CLASS_ID_COMPUTE_MB_TEMP_STATS = "computeMbTempStats"
MO_CLASS_ID_COMPUTE_MB_TEMP_STATS_HIST = "computeMbTempStatsHist"
MO_CLASS_ID_COMPUTE_MEMORY_CONFIG_POLICY = "computeMemoryConfigPolicy"
MO_CLASS_ID_COMPUTE_MEMORY_CONFIGURATION = "computeMemoryConfiguration"
MO_CLASS_ID_COMPUTE_MEMORY_UNIT_CONSTRAINT_DEF = "computeMemoryUnitConstraintDef"
MO_CLASS_ID_COMPUTE_PCIE_FATAL_COMPLETION_STATS = "computePCIeFatalCompletionStats"
MO_CLASS_ID_COMPUTE_PCIE_FATAL_PROTOCOL_STATS = "computePCIeFatalProtocolStats"
MO_CLASS_ID_COMPUTE_PCIE_FATAL_RECEIVE_STATS = "computePCIeFatalReceiveStats"
MO_CLASS_ID_COMPUTE_PCIE_FATAL_STATS = "computePCIeFatalStats"
MO_CLASS_ID_COMPUTE_PARTITION = "computePartition"
MO_CLASS_ID_COMPUTE_PCI_CAP = "computePciCap"
MO_CLASS_ID_COMPUTE_PCI_SLOT_SCAN_DEF = "computePciSlotScanDef"
MO_CLASS_ID_COMPUTE_PHYSICAL = "computePhysical"
MO_CLASS_ID_COMPUTE_PHYSICAL_ASSOC_CTX = "computePhysicalAssocCtx"
MO_CLASS_ID_COMPUTE_PHYSICAL_FSM = "computePhysicalFsm"
MO_CLASS_ID_COMPUTE_PHYSICAL_FSM_STAGE = "computePhysicalFsmStage"
MO_CLASS_ID_COMPUTE_PHYSICAL_FSM_TASK = "computePhysicalFsmTask"
MO_CLASS_ID_COMPUTE_PHYSICAL_QUAL = "computePhysicalQual"
MO_CLASS_ID_COMPUTE_PLATFORM = "computePlatform"
MO_CLASS_ID_COMPUTE_PNU_OSIMAGE = "computePnuOSImage"
MO_CLASS_ID_COMPUTE_POOL = "computePool"
MO_CLASS_ID_COMPUTE_POOL_POLICY_REF = "computePoolPolicyRef"
MO_CLASS_ID_COMPUTE_POOLABLE = "computePoolable"
MO_CLASS_ID_COMPUTE_POOLED = "computePooled"
MO_CLASS_ID_COMPUTE_POOLED_ENCLOSURE_COMPUTE_SLOT = "computePooledEnclosureComputeSlot"
MO_CLASS_ID_COMPUTE_POOLED_PHYSICAL = "computePooledPhysical"
MO_CLASS_ID_COMPUTE_POOLED_RACK_UNIT = "computePooledRackUnit"
MO_CLASS_ID_COMPUTE_POOLED_SLOT = "computePooledSlot"
MO_CLASS_ID_COMPUTE_POOLING_POLICY = "computePoolingPolicy"
MO_CLASS_ID_COMPUTE_PSU_CONTROL = "computePsuControl"
MO_CLASS_ID_COMPUTE_PSU_DEF = "computePsuDef"
MO_CLASS_ID_COMPUTE_PSU_POLICY = "computePsuPolicy"
MO_CLASS_ID_COMPUTE_QUAL = "computeQual"
MO_CLASS_ID_COMPUTE_QUAL_BASE = "computeQualBase"
MO_CLASS_ID_COMPUTE_QUAL_ITEM = "computeQualItem"
MO_CLASS_ID_COMPUTE_QUALIFIED_POLICY = "computeQualifiedPolicy"
MO_CLASS_ID_COMPUTE_RACK_POS_QUAL = "computeRackPosQual"
MO_CLASS_ID_COMPUTE_RACK_QUAL = "computeRackQual"
MO_CLASS_ID_COMPUTE_RACK_UNIT = "computeRackUnit"
MO_CLASS_ID_COMPUTE_RACK_UNIT_FSM = "computeRackUnitFsm"
MO_CLASS_ID_COMPUTE_RACK_UNIT_FSM_STAGE = "computeRackUnitFsmStage"
MO_CLASS_ID_COMPUTE_RACK_UNIT_FSM_TASK = "computeRackUnitFsmTask"
MO_CLASS_ID_COMPUTE_RACK_UNIT_MB_TEMP_STATS = "computeRackUnitMbTempStats"
MO_CLASS_ID_COMPUTE_RACK_UNIT_MB_TEMP_STATS_HIST = "computeRackUnitMbTempStatsHist"
MO_CLASS_ID_COMPUTE_RTC_BATTERY = "computeRtcBattery"
MO_CLASS_ID_COMPUTE_SCRUB_POLICY = "computeScrubPolicy"
MO_CLASS_ID_COMPUTE_SERVER = "computeServer"
MO_CLASS_ID_COMPUTE_SERVER_DISC_POLICY = "computeServerDiscPolicy"
MO_CLASS_ID_COMPUTE_SERVER_DISC_POLICY_FSM = "computeServerDiscPolicyFsm"
MO_CLASS_ID_COMPUTE_SERVER_DISC_POLICY_FSM_STAGE = "computeServerDiscPolicyFsmStage"
MO_CLASS_ID_COMPUTE_SERVER_DISC_POLICY_FSM_TASK = "computeServerDiscPolicyFsmTask"
MO_CLASS_ID_COMPUTE_SERVER_MGMT_POLICY = "computeServerMgmtPolicy"
MO_CLASS_ID_COMPUTE_SERVER_TYPE_CAP = "computeServerTypeCap"
MO_CLASS_ID_COMPUTE_SERVER_UNIT = "computeServerUnit"
MO_CLASS_ID_COMPUTE_SERVER_UNIT_FSM = "computeServerUnitFsm"
MO_CLASS_ID_COMPUTE_SERVER_UNIT_FSM_STAGE = "computeServerUnitFsmStage"
MO_CLASS_ID_COMPUTE_SERVER_UNIT_FSM_TASK = "computeServerUnitFsmTask"
MO_CLASS_ID_COMPUTE_SLOT_ENTITY = "computeSlotEntity"
MO_CLASS_ID_COMPUTE_SLOT_QUAL = "computeSlotQual"
MO_CLASS_ID_COMPUTE_VIRTUAL = "computeVirtual"
MO_CLASS_ID_COMPUTE_VIRTUAL_CONTAINER = "computeVirtualContainer"
MO_CLASS_ID_CONDITION_IMMUTABLE = "conditionImmutable"
MO_CLASS_ID_CONDITION_INFO = "conditionInfo"
MO_CLASS_ID_CONDITION_LOG = "conditionLog"
MO_CLASS_ID_CONDITION_LOGGABLE = "conditionLoggable"
MO_CLASS_ID_CONDITION_MULTI_INSTANCE_IMMUTABLE = "conditionMultiInstanceImmutable"
MO_CLASS_ID_CONDITION_MUTABLE = "conditionMutable"
MO_CLASS_ID_CONDITION_POLICY = "conditionPolicy"
MO_CLASS_ID_CONDITION_REPORTABLE = "conditionReportable"
MO_CLASS_ID_CONFIG_IMPACT = "configImpact"
MO_CLASS_ID_CONFIG_IMPACT_RESPONSE = "configImpactResponse"
MO_CLASS_ID_CONFIG_MANAGED_EP_IMPACT_RESPONSE = "configManagedEpImpactResponse"
MO_CLASS_ID_CONFIG_SORTER = "configSorter"
MO_CLASS_ID_DCX_FCOE_VIF_EP = "dcxFcoeVifEp"
MO_CLASS_ID_DCX_NS = "dcxNs"
MO_CLASS_ID_DCX_UNIVERSE = "dcxUniverse"
MO_CLASS_ID_DCX_VIF = "dcxVIf"
MO_CLASS_ID_DCX_VC = "dcxVc"
MO_CLASS_ID_DCX_VIF_EP = "dcxVifEp"
MO_CLASS_ID_DHCP_ACQUIRED = "dhcpAcquired"
MO_CLASS_ID_DHCP_INST = "dhcpInst"
MO_CLASS_ID_DHCP_LEASE = "dhcpLease"
MO_CLASS_ID_DIAG_BLADE_TEST = "diagBladeTest"
MO_CLASS_ID_DIAG_CTRL = "diagCtrl"
MO_CLASS_ID_DIAG_NETWORK_TEST = "diagNetworkTest"
MO_CLASS_ID_DIAG_RSLT = "diagRslt"
MO_CLASS_ID_DIAG_RUN_POLICY = "diagRunPolicy"
MO_CLASS_ID_DIAG_SRV_CAP_PROVIDER = "diagSrvCapProvider"
MO_CLASS_ID_DIAG_SRV_CTRL = "diagSrvCtrl"
MO_CLASS_ID_DIAG_TEST = "diagTest"
MO_CLASS_ID_DOMAIN_ENVIRONMENT_FEATURE = "domainEnvironmentFeature"
MO_CLASS_ID_DOMAIN_ENVIRONMENT_FEATURE_CONT = "domainEnvironmentFeatureCont"
MO_CLASS_ID_DOMAIN_ENVIRONMENT_PARAM = "domainEnvironmentParam"
MO_CLASS_ID_DOMAIN_FEATURE = "domainFeature"
MO_CLASS_ID_DOMAIN_FEATURE_CONT = "domainFeatureCont"
MO_CLASS_ID_DOMAIN_NETWORK_FEATURE = "domainNetworkFeature"
MO_CLASS_ID_DOMAIN_NETWORK_FEATURE_CONT = "domainNetworkFeatureCont"
MO_CLASS_ID_DOMAIN_NETWORK_PARAM = "domainNetworkParam"
MO_CLASS_ID_DOMAIN_PARAMETER = "domainParameter"
MO_CLASS_ID_DOMAIN_SERVER_FEATURE = "domainServerFeature"
MO_CLASS_ID_DOMAIN_SERVER_FEATURE_CONT = "domainServerFeatureCont"
MO_CLASS_ID_DOMAIN_SERVER_PARAM = "domainServerParam"
MO_CLASS_ID_DOMAIN_STORAGE_FEATURE = "domainStorageFeature"
MO_CLASS_ID_DOMAIN_STORAGE_FEATURE_CONT = "domainStorageFeatureCont"
MO_CLASS_ID_DOMAIN_STORAGE_PARAM = "domainStorageParam"
MO_CLASS_ID_DPSEC_MAC = "dpsecMac"
MO_CLASS_ID_DUPE_SCOPE = "dupeScope"
MO_CLASS_ID_DUPE_SCOPE_RESULT = "dupeScopeResult"
MO_CLASS_ID_EPQOS_DEFINITION = "epqosDefinition"
MO_CLASS_ID_EPQOS_DEFINITION_DEL_TASK = "epqosDefinitionDelTask"
MO_CLASS_ID_EPQOS_DEFINITION_DEL_TASK_FSM = "epqosDefinitionDelTaskFsm"
MO_CLASS_ID_EPQOS_DEFINITION_DEL_TASK_FSM_STAGE = "epqosDefinitionDelTaskFsmStage"
MO_CLASS_ID_EPQOS_DEFINITION_DEL_TASK_FSM_TASK = "epqosDefinitionDelTaskFsmTask"
MO_CLASS_ID_EPQOS_DEFINITION_FSM = "epqosDefinitionFsm"
MO_CLASS_ID_EPQOS_DEFINITION_FSM_STAGE = "epqosDefinitionFsmStage"
MO_CLASS_ID_EPQOS_DEFINITION_FSM_TASK = "epqosDefinitionFsmTask"
MO_CLASS_ID_EPQOS_EGRESS = "epqosEgress"
MO_CLASS_ID_EPQOS_ITEM = "epqosItem"
MO_CLASS_ID_EQUIPMENT_ADAPTOR_CONN_DEF = "equipmentAdaptorConnDef"
MO_CLASS_ID_EQUIPMENT_ADAPTOR_DEF = "equipmentAdaptorDef"
MO_CLASS_ID_EQUIPMENT_ADVANCED_BOOT_ORDER = "equipmentAdvancedBootOrder"
MO_CLASS_ID_EQUIPMENT_ASSOC_CTX = "equipmentAssocCtx"
MO_CLASS_ID_EQUIPMENT_AUTO_NEGOTIATE_CAP = "equipmentAutoNegotiateCap"
MO_CLASS_ID_EQUIPMENT_BASE_BOARD_CAP_PROVIDER = "equipmentBaseBoardCapProvider"
MO_CLASS_ID_EQUIPMENT_BEACON_CAP_PROVIDER = "equipmentBeaconCapProvider"
MO_CLASS_ID_EQUIPMENT_BEACON_LED = "equipmentBeaconLed"
MO_CLASS_ID_EQUIPMENT_BEACON_LED_FSM = "equipmentBeaconLedFsm"
MO_CLASS_ID_EQUIPMENT_BEACON_LED_FSM_STAGE = "equipmentBeaconLedFsmStage"
MO_CLASS_ID_EQUIPMENT_BEACON_LED_FSM_TASK = "equipmentBeaconLedFsmTask"
MO_CLASS_ID_EQUIPMENT_BEH_CAP = "equipmentBehCap"
MO_CLASS_ID_EQUIPMENT_BIOS_DEF = "equipmentBiosDef"
MO_CLASS_ID_EQUIPMENT_BLADE_AGLIBRARY = "equipmentBladeAGLibrary"
MO_CLASS_ID_EQUIPMENT_BLADE_AGGREGATION_CAP_REF = "equipmentBladeAggregationCapRef"
MO_CLASS_ID_EQUIPMENT_BLADE_BIOS_CAP_PROVIDER = "equipmentBladeBiosCapProvider"
MO_CLASS_ID_EQUIPMENT_BLADE_CAP_PROVIDER = "equipmentBladeCapProvider"
MO_CLASS_ID_EQUIPMENT_BLADE_CAP_PROVIDER_TYPE_DEF = "equipmentBladeCapProviderTypeDef"
MO_CLASS_ID_EQUIPMENT_BLADE_CONN_DEF = "equipmentBladeConnDef"
MO_CLASS_ID_EQUIPMENT_BLADE_IOMCONN_DEF = "equipmentBladeIOMConnDef"
MO_CLASS_ID_EQUIPMENT_BLADE_SWITCH_CONN_DEF = "equipmentBladeSwitchConnDef"
MO_CLASS_ID_EQUIPMENT_BOARD_CONTROLLER_DEF = "equipmentBoardControllerDef"
MO_CLASS_ID_EQUIPMENT_BREAKOUT_CAP = "equipmentBreakoutCap"
MO_CLASS_ID_EQUIPMENT_CARD = "equipmentCard"
MO_CLASS_ID_EQUIPMENT_CARTRIDGE_CAP_PROVIDER = "equipmentCartridgeCapProvider"
MO_CLASS_ID_EQUIPMENT_CATALOG_CAP_PROVIDER = "equipmentCatalogCapProvider"
MO_CLASS_ID_EQUIPMENT_CHASSIS = "equipmentChassis"
MO_CLASS_ID_EQUIPMENT_CHASSIS_CAP_PROVIDER = "equipmentChassisCapProvider"
MO_CLASS_ID_EQUIPMENT_CHASSIS_FSM = "equipmentChassisFsm"
MO_CLASS_ID_EQUIPMENT_CHASSIS_FSM_STAGE = "equipmentChassisFsmStage"
MO_CLASS_ID_EQUIPMENT_CHASSIS_FSM_TASK = "equipmentChassisFsmTask"
MO_CLASS_ID_EQUIPMENT_CHASSIS_STATS = "equipmentChassisStats"
MO_CLASS_ID_EQUIPMENT_CHASSIS_STATS_HIST = "equipmentChassisStatsHist"
MO_CLASS_ID_EQUIPMENT_CIMC_VMEDIA = "equipmentCimcVmedia"
MO_CLASS_ID_EQUIPMENT_COMPUTE_PHYSICAL_CAP_PROVIDER = "equipmentComputePhysicalCapProvider"
MO_CLASS_ID_EQUIPMENT_DBG_PLUGIN_CAP_PROVIDER = "equipmentDbgPluginCapProvider"
MO_CLASS_ID_EQUIPMENT_DIMM_ENTRY = "equipmentDimmEntry"
MO_CLASS_ID_EQUIPMENT_DIMM_MAPPING = "equipmentDimmMapping"
MO_CLASS_ID_EQUIPMENT_DISCOVERY_CAP = "equipmentDiscoveryCap"
MO_CLASS_ID_EQUIPMENT_DOWNGRADE_CONSTRAINT = "equipmentDowngradeConstraint"
MO_CLASS_ID_EQUIPMENT_ENV_SENSOR = "equipmentEnvSensor"
MO_CLASS_ID_EQUIPMENT_FAN = "equipmentFan"
MO_CLASS_ID_EQUIPMENT_FAN_MODULE = "equipmentFanModule"
MO_CLASS_ID_EQUIPMENT_FAN_MODULE_CAP_PROVIDER = "equipmentFanModuleCapProvider"
MO_CLASS_ID_EQUIPMENT_FAN_MODULE_DEF = "equipmentFanModuleDef"
MO_CLASS_ID_EQUIPMENT_FAN_MODULE_STATS = "equipmentFanModuleStats"
MO_CLASS_ID_EQUIPMENT_FAN_MODULE_STATS_HIST = "equipmentFanModuleStatsHist"
MO_CLASS_ID_EQUIPMENT_FAN_STATS = "equipmentFanStats"
MO_CLASS_ID_EQUIPMENT_FAN_STATS_HIST = "equipmentFanStatsHist"
MO_CLASS_ID_EQUIPMENT_FEX = "equipmentFex"
MO_CLASS_ID_EQUIPMENT_FEX_CAP_PROVIDER = "equipmentFexCapProvider"
MO_CLASS_ID_EQUIPMENT_FEX_ENV_STATS = "equipmentFexEnvStats"
MO_CLASS_ID_EQUIPMENT_FEX_ENV_STATS_HIST = "equipmentFexEnvStatsHist"
MO_CLASS_ID_EQUIPMENT_FEX_FSM = "equipmentFexFsm"
MO_CLASS_ID_EQUIPMENT_FEX_FSM_STAGE = "equipmentFexFsmStage"
MO_CLASS_ID_EQUIPMENT_FEX_FSM_TASK = "equipmentFexFsmTask"
MO_CLASS_ID_EQUIPMENT_FEX_POWER_SUMMARY = "equipmentFexPowerSummary"
MO_CLASS_ID_EQUIPMENT_FEX_POWER_SUMMARY_HIST = "equipmentFexPowerSummaryHist"
MO_CLASS_ID_EQUIPMENT_FEX_PSU_INPUT_STATS = "equipmentFexPsuInputStats"
MO_CLASS_ID_EQUIPMENT_FEX_PSU_INPUT_STATS_HIST = "equipmentFexPsuInputStatsHist"
MO_CLASS_ID_EQUIPMENT_FEX_SYSTEM_STATS = "equipmentFexSystemStats"
MO_CLASS_ID_EQUIPMENT_FEX_SYSTEM_STATS_HIST = "equipmentFexSystemStatsHist"
MO_CLASS_ID_EQUIPMENT_FIRMWARE_CONSTRAINT = "equipmentFirmwareConstraint"
MO_CLASS_ID_EQUIPMENT_FLASH_LIFE = "equipmentFlashLife"
MO_CLASS_ID_EQUIPMENT_FRU_CAP_PROVIDER = "equipmentFruCapProvider"
MO_CLASS_ID_EQUIPMENT_GEM_CAP_PROVIDER = "equipmentGemCapProvider"
MO_CLASS_ID_EQUIPMENT_GEM_PORT_CAP = "equipmentGemPortCap"
MO_CLASS_ID_EQUIPMENT_GRAPHICS_CARD_CAP_PROVIDER = "equipmentGraphicsCardCapProvider"
MO_CLASS_ID_EQUIPMENT_GRAPHICS_CARD_CAP_REF = "equipmentGraphicsCardCapRef"
MO_CLASS_ID_EQUIPMENT_HDDFAULT_MON_DEF = "equipmentHDDFaultMonDef"
MO_CLASS_ID_EQUIPMENT_HEALTH_LED = "equipmentHealthLed"
MO_CLASS_ID_EQUIPMENT_HOLDER = "equipmentHolder"
MO_CLASS_ID_EQUIPMENT_HOLDER_CAP_PROVIDER = "equipmentHolderCapProvider"
MO_CLASS_ID_EQUIPMENT_HOST_IF_CAP_PROVIDER = "equipmentHostIfCapProvider"
MO_CLASS_ID_EQUIPMENT_HW_CAP_PROVIDER = "equipmentHwCapProvider"
MO_CLASS_ID_EQUIPMENT_IOCARD = "equipmentIOCard"
MO_CLASS_ID_EQUIPMENT_IOCARD_BASE = "equipmentIOCardBase"
MO_CLASS_ID_EQUIPMENT_IOCARD_BASE_FSM = "equipmentIOCardBaseFsm"
MO_CLASS_ID_EQUIPMENT_IOCARD_BASE_FSM_STAGE = "equipmentIOCardBaseFsmStage"
MO_CLASS_ID_EQUIPMENT_IOCARD_BASE_FSM_TASK = "equipmentIOCardBaseFsmTask"
MO_CLASS_ID_EQUIPMENT_IOCARD_CAP_PROVIDER = "equipmentIOCardCapProvider"
MO_CLASS_ID_EQUIPMENT_IOCARD_FSM = "equipmentIOCardFsm"
MO_CLASS_ID_EQUIPMENT_IOCARD_FSM_STAGE = "equipmentIOCardFsmStage"
MO_CLASS_ID_EQUIPMENT_IOCARD_FSM_TASK = "equipmentIOCardFsmTask"
MO_CLASS_ID_EQUIPMENT_IOCARD_STATS = "equipmentIOCardStats"
MO_CLASS_ID_EQUIPMENT_IOCARD_STATS_HIST = "equipmentIOCardStatsHist"
MO_CLASS_ID_EQUIPMENT_IOCARD_TYPE_DEF = "equipmentIOCardTypeDef"
MO_CLASS_ID_EQUIPMENT_INBAND_MGMT_CAP = "equipmentInbandMgmtCap"
MO_CLASS_ID_EQUIPMENT_INDICATOR_LED = "equipmentIndicatorLed"
MO_CLASS_ID_EQUIPMENT_INTEGRATED_COMPONENT_CAP_PROVIDER = "equipmentIntegratedComponentCapProvider"
MO_CLASS_ID_EQUIPMENT_ITEM = "equipmentItem"
MO_CLASS_ID_EQUIPMENT_KVM_MGMT_CAP = "equipmentKvmMgmtCap"
MO_CLASS_ID_EQUIPMENT_LED = "equipmentLed"
MO_CLASS_ID_EQUIPMENT_LOCAL_DISK_CAP_PROVIDER = "equipmentLocalDiskCapProvider"
MO_CLASS_ID_EQUIPMENT_LOCAL_DISK_CONTROLLER_CAP_PROVIDER = "equipmentLocalDiskControllerCapProvider"
MO_CLASS_ID_EQUIPMENT_LOCAL_DISK_CONTROLLER_CAP_REF = "equipmentLocalDiskControllerCapRef"
MO_CLASS_ID_EQUIPMENT_LOCAL_DISK_CONTROLLER_DEF = "equipmentLocalDiskControllerDef"
MO_CLASS_ID_EQUIPMENT_LOCAL_DISK_CONTROLLER_TYPE_DEF = "equipmentLocalDiskControllerTypeDef"
MO_CLASS_ID_EQUIPMENT_LOCAL_DISK_DEF = "equipmentLocalDiskDef"
MO_CLASS_ID_EQUIPMENT_LOCATOR_LED = "equipmentLocatorLed"
MO_CLASS_ID_EQUIPMENT_LOCATOR_LED_FSM = "equipmentLocatorLedFsm"
MO_CLASS_ID_EQUIPMENT_LOCATOR_LED_FSM_STAGE = "equipmentLocatorLedFsmStage"
MO_CLASS_ID_EQUIPMENT_LOCATOR_LED_FSM_TASK = "equipmentLocatorLedFsmTask"
MO_CLASS_ID_EQUIPMENT_MANUFACTURING_DEF = "equipmentManufacturingDef"
MO_CLASS_ID_EQUIPMENT_MEMORY_UNIT_CAP_PROVIDER = "equipmentMemoryUnitCapProvider"
MO_CLASS_ID_EQUIPMENT_MEMORY_UNIT_DISCOVERY_MODIFIER_DEF = "equipmentMemoryUnitDiscoveryModifierDef"
MO_CLASS_ID_EQUIPMENT_MGMT_CAP_PROVIDER = "equipmentMgmtCapProvider"
MO_CLASS_ID_EQUIPMENT_MGMT_EXT_CAP_PROVIDER = "equipmentMgmtExtCapProvider"
MO_CLASS_ID_EQUIPMENT_NETWORK_ELEMENT_FAN_STATS = "equipmentNetworkElementFanStats"
MO_CLASS_ID_EQUIPMENT_NETWORK_ELEMENT_FAN_STATS_HIST = "equipmentNetworkElementFanStatsHist"
MO_CLASS_ID_EQUIPMENT_NON_COPPER_PASSIVE_CAP = "equipmentNonCopperPassiveCap"
MO_CLASS_ID_EQUIPMENT_POST = "equipmentPOST"
MO_CLASS_ID_EQUIPMENT_POSTCODE = "equipmentPOSTCode"
MO_CLASS_ID_EQUIPMENT_POSTCODE_CONTAINER = "equipmentPOSTCodeContainer"
MO_CLASS_ID_EQUIPMENT_POSTCODE_DATA = "equipmentPOSTCodeData"
MO_CLASS_ID_EQUIPMENT_POSTCODE_REPORTER = "equipmentPOSTCodeReporter"
MO_CLASS_ID_EQUIPMENT_POSTCODE_TEMPLATE = "equipmentPOSTCodeTemplate"
MO_CLASS_ID_EQUIPMENT_PCI_DEF = "equipmentPciDef"
MO_CLASS_ID_EQUIPMENT_PFC_MMU_CAP = "equipmentPfcMmuCap"
MO_CLASS_ID_EQUIPMENT_PHYS_DEVICES_PER_BOARD = "equipmentPhysDevicesPerBoard"
MO_CLASS_ID_EQUIPMENT_PHYSICAL_DEF = "equipmentPhysicalDef"
MO_CLASS_ID_EQUIPMENT_PICTURE = "equipmentPicture"
MO_CLASS_ID_EQUIPMENT_PORT_CAP = "equipmentPortCap"
MO_CLASS_ID_EQUIPMENT_PORT_GROUP_AGGREGATION_DEF = "equipmentPortGroupAggregationDef"
MO_CLASS_ID_EQUIPMENT_PORT_GROUP_DEF = "equipmentPortGroupDef"
MO_CLASS_ID_EQUIPMENT_PORT_GROUP_SW_COMPLEX_DEF = "equipmentPortGroupSwComplexDef"
MO_CLASS_ID_EQUIPMENT_PORT_SW_COMPLEX_REF = "equipmentPortSwComplexRef"
MO_CLASS_ID_EQUIPMENT_POWER_CAP_DEF = "equipmentPowerCapDef"
MO_CLASS_ID_EQUIPMENT_PROCESSOR_UNIT_CAP_PROVIDER = "equipmentProcessorUnitCapProvider"
MO_CLASS_ID_EQUIPMENT_PROCESSOR_UNIT_DEF = "equipmentProcessorUnitDef"
MO_CLASS_ID_EQUIPMENT_PSU = "equipmentPsu"
MO_CLASS_ID_EQUIPMENT_PSU_CAP_PROVIDER = "equipmentPsuCapProvider"
MO_CLASS_ID_EQUIPMENT_PSU_DEF = "equipmentPsuDef"
MO_CLASS_ID_EQUIPMENT_PSU_FSM = "equipmentPsuFsm"
MO_CLASS_ID_EQUIPMENT_PSU_FSM_STAGE = "equipmentPsuFsmStage"
MO_CLASS_ID_EQUIPMENT_PSU_FSM_TASK = "equipmentPsuFsmTask"
MO_CLASS_ID_EQUIPMENT_PSU_INPUT_STATS = "equipmentPsuInputStats"
MO_CLASS_ID_EQUIPMENT_PSU_INPUT_STATS_HIST = "equipmentPsuInputStatsHist"
MO_CLASS_ID_EQUIPMENT_PSU_OUTPUT_STATS = "equipmentPsuOutputStats"
MO_CLASS_ID_EQUIPMENT_PSU_OUTPUT_STATS_HIST = "equipmentPsuOutputStatsHist"
MO_CLASS_ID_EQUIPMENT_PSU_STATS = "equipmentPsuStats"
MO_CLASS_ID_EQUIPMENT_PSU_STATS_HIST = "equipmentPsuStatsHist"
MO_CLASS_ID_EQUIPMENT_RACK_FAN_MODULE_DEF = "equipmentRackFanModuleDef"
MO_CLASS_ID_EQUIPMENT_RACK_UNIT_CAP_PROVIDER = "equipmentRackUnitCapProvider"
MO_CLASS_ID_EQUIPMENT_RACK_UNIT_FAN_STATS = "equipmentRackUnitFanStats"
MO_CLASS_ID_EQUIPMENT_RACK_UNIT_FAN_STATS_HIST = "equipmentRackUnitFanStatsHist"
MO_CLASS_ID_EQUIPMENT_RACK_UNIT_PSU_STATS = "equipmentRackUnitPsuStats"
MO_CLASS_ID_EQUIPMENT_RACK_UNIT_PSU_STATS_HIST = "equipmentRackUnitPsuStatsHist"
MO_CLASS_ID_EQUIPMENT_RAID_DEF = "equipmentRaidDef"
MO_CLASS_ID_EQUIPMENT_SECURE_BOOT = "equipmentSecureBoot"
MO_CLASS_ID_EQUIPMENT_SECURE_CONTROLLER = "equipmentSecureController"
MO_CLASS_ID_EQUIPMENT_SECURITY_UNIT_CAP_PROVIDER = "equipmentSecurityUnitCapProvider"
MO_CLASS_ID_EQUIPMENT_SERVER_FEATURE_CAP = "equipmentServerFeatureCap"
MO_CLASS_ID_EQUIPMENT_SERVER_PORT_CAP_PROVIDER = "equipmentServerPortCapProvider"
MO_CLASS_ID_EQUIPMENT_SERVER_UNIT_CAP_PROVIDER = "equipmentServerUnitCapProvider"
MO_CLASS_ID_EQUIPMENT_SERVICE_DEF = "equipmentServiceDef"
MO_CLASS_ID_EQUIPMENT_SHARED_IOMODULE = "equipmentSharedIOModule"
MO_CLASS_ID_EQUIPMENT_SLOT_ARRAY = "equipmentSlotArray"
MO_CLASS_ID_EQUIPMENT_SLOT_ARRAY_REF = "equipmentSlotArrayRef"
MO_CLASS_ID_EQUIPMENT_SLOT_ENCLOSURE = "equipmentSlotEnclosure"
MO_CLASS_ID_EQUIPMENT_STATEFUL = "equipmentStateful"
MO_CLASS_ID_EQUIPMENT_STATEFUL_BLADE_COMP = "equipmentStatefulBladeComp"
MO_CLASS_ID_EQUIPMENT_STATEFUL_CH_COMP = "equipmentStatefulChComp"
MO_CLASS_ID_EQUIPMENT_STORAGE_CONTROLLER_CONFIG = "equipmentStorageControllerConfig"
MO_CLASS_ID_EQUIPMENT_STORAGE_CONTROLLER_SLOT_DEF = "equipmentStorageControllerSlotDef"
MO_CLASS_ID_EQUIPMENT_STORAGE_LIMIT_CAP = "equipmentStorageLimitCap"
MO_CLASS_ID_EQUIPMENT_STORAGE_SAS_EXPANDER_CAP_PROVIDER = "equipmentStorageSasExpanderCapProvider"
MO_CLASS_ID_EQUIPMENT_STORAGE_SAS_EXPANDER_CAP_REF = "equipmentStorageSasExpanderCapRef"
MO_CLASS_ID_EQUIPMENT_SWITCH_CAP = "equipmentSwitchCap"
MO_CLASS_ID_EQUIPMENT_SWITCH_CAP_PROVIDER = "equipmentSwitchCapProvider"
MO_CLASS_ID_EQUIPMENT_SWITCH_CARD = "equipmentSwitchCard"
MO_CLASS_ID_EQUIPMENT_SWITCH_IOCARD = "equipmentSwitchIOCard"
MO_CLASS_ID_EQUIPMENT_SWITCH_IOCARD_CAP_PROVIDER = "equipmentSwitchIOCardCapProvider"
MO_CLASS_ID_EQUIPMENT_SWITCH_IOCARD_FSM = "equipmentSwitchIOCardFsm"
MO_CLASS_ID_EQUIPMENT_SWITCH_IOCARD_FSM_STAGE = "equipmentSwitchIOCardFsmStage"
MO_CLASS_ID_EQUIPMENT_SWITCH_IOCARD_FSM_TASK = "equipmentSwitchIOCardFsmTask"
MO_CLASS_ID_EQUIPMENT_SWITCH_TYPE_DEF = "equipmentSwitchTypeDef"
MO_CLASS_ID_EQUIPMENT_TPM = "equipmentTpm"
MO_CLASS_ID_EQUIPMENT_TPM_CAP_PROVIDER = "equipmentTpmCapProvider"
MO_CLASS_ID_EQUIPMENT_UNIFIED_PORT_CAP_PROVIDER = "equipmentUnifiedPortCapProvider"
MO_CLASS_ID_EQUIPMENT_UUID_FEATURE_CAP = "equipmentUuidFeatureCap"
MO_CLASS_ID_EQUIPMENT_VERSION_CONSTRAINT = "equipmentVersionConstraint"
MO_CLASS_ID_EQUIPMENT_XCVR = "equipmentXcvr"
MO_CLASS_ID_ETHER_CIO_EP = "etherCIoEp"
MO_CLASS_ID_ETHER_CONFIG = "etherConfig"
MO_CLASS_ID_ETHER_ERR_STATS = "etherErrStats"
MO_CLASS_ID_ETHER_ERR_STATS_HIST = "etherErrStatsHist"
MO_CLASS_ID_ETHER_EXTERNAL_EP = "etherExternalEp"
MO_CLASS_ID_ETHER_EXTERNAL_PC = "etherExternalPc"
MO_CLASS_ID_ETHER_FCOE_INTERFACE_STATS = "etherFcoeInterfaceStats"
MO_CLASS_ID_ETHER_FCOE_INTERFACE_STATS_HIST = "etherFcoeInterfaceStatsHist"
MO_CLASS_ID_ETHER_IF_CONFIG = "etherIfConfig"
MO_CLASS_ID_ETHER_INT_FIO_EP = "etherIntFIoEp"
MO_CLASS_ID_ETHER_INTERNAL_PC = "etherInternalPc"
MO_CLASS_ID_ETHER_LOSS_STATS = "etherLossStats"
MO_CLASS_ID_ETHER_LOSS_STATS_HIST = "etherLossStatsHist"
MO_CLASS_ID_ETHER_NI_ERR_STATS = "etherNiErrStats"
MO_CLASS_ID_ETHER_NI_ERR_STATS_HIST = "etherNiErrStatsHist"
MO_CLASS_ID_ETHER_NIC_IF_CONFIG = "etherNicIfConfig"
MO_CLASS_ID_ETHER_PIO = "etherPIo"
MO_CLASS_ID_ETHER_PIO_END_POINT = "etherPIoEndPoint"
MO_CLASS_ID_ETHER_PIO_EP = "etherPIoEp"
MO_CLASS_ID_ETHER_PIO_FSM = "etherPIoFsm"
MO_CLASS_ID_ETHER_PIO_FSM_STAGE = "etherPIoFsmStage"
MO_CLASS_ID_ETHER_PAUSE_STATS = "etherPauseStats"
MO_CLASS_ID_ETHER_PAUSE_STATS_HIST = "etherPauseStatsHist"
MO_CLASS_ID_ETHER_PC = "etherPc"
MO_CLASS_ID_ETHER_PORT_CHAN_ID_ELEM = "etherPortChanIdElem"
MO_CLASS_ID_ETHER_PORT_CHAN_ID_UNIVERSE = "etherPortChanIdUniverse"
MO_CLASS_ID_ETHER_RX_STATS = "etherRxStats"
MO_CLASS_ID_ETHER_RX_STATS_HIST = "etherRxStatsHist"
MO_CLASS_ID_ETHER_SERVER_INT_FIO = "etherServerIntFIo"
MO_CLASS_ID_ETHER_SERVER_INT_FIO_FSM = "etherServerIntFIoFsm"
MO_CLASS_ID_ETHER_SERVER_INT_FIO_FSM_STAGE = "etherServerIntFIoFsmStage"
MO_CLASS_ID_ETHER_SERVER_INT_FIO_FSM_TASK = "etherServerIntFIoFsmTask"
MO_CLASS_ID_ETHER_SERVER_INT_FIO_PC = "etherServerIntFIoPc"
MO_CLASS_ID_ETHER_SERVER_INT_FIO_PC_EP = "etherServerIntFIoPcEp"
MO_CLASS_ID_ETHER_SW_IF_CONFIG = "etherSwIfConfig"
MO_CLASS_ID_ETHER_SWITCH_INT_FIO = "etherSwitchIntFIo"
MO_CLASS_ID_ETHER_SWITCH_INT_FIO_PC = "etherSwitchIntFIoPc"
MO_CLASS_ID_ETHER_SWITCH_INT_FIO_PC_EP = "etherSwitchIntFIoPcEp"
MO_CLASS_ID_ETHER_TX_STATS = "etherTxStats"
MO_CLASS_ID_ETHER_TX_STATS_HIST = "etherTxStatsHist"
MO_CLASS_ID_EVENT_EP_CTRL = "eventEpCtrl"
MO_CLASS_ID_EVENT_HOLDER = "eventHolder"
MO_CLASS_ID_EVENT_INST = "eventInst"
MO_CLASS_ID_EVENT_LOG = "eventLog"
MO_CLASS_ID_EVENT_POLICY = "eventPolicy"
MO_CLASS_ID_EVENT_RECORD = "eventRecord"
MO_CLASS_ID_EXTMGMT_ARP_TARGETS = "extmgmtArpTargets"
MO_CLASS_ID_EXTMGMT_GATEWAY_PING = "extmgmtGatewayPing"
MO_CLASS_ID_EXTMGMT_IF = "extmgmtIf"
MO_CLASS_ID_EXTMGMT_IF_MON_POLICY = "extmgmtIfMonPolicy"
MO_CLASS_ID_EXTMGMT_MII_STATUS = "extmgmtMiiStatus"
MO_CLASS_ID_EXTMGMT_NDISC_TARGETS = "extmgmtNdiscTargets"
MO_CLASS_ID_EXTPOL_CLIENT = "extpolClient"
MO_CLASS_ID_EXTPOL_CLIENT_CONT = "extpolClientCont"
MO_CLASS_ID_EXTPOL_CONNECTOR = "extpolConnector"
MO_CLASS_ID_EXTPOL_CONNECTOR_CONTAINER = "extpolConnectorContainer"
MO_CLASS_ID_EXTPOL_CONTROLLER = "extpolController"
MO_CLASS_ID_EXTPOL_CONTROLLER_CONT = "extpolControllerCont"
MO_CLASS_ID_EXTPOL_EP = "extpolEp"
MO_CLASS_ID_EXTPOL_EP_FSM = "extpolEpFsm"
MO_CLASS_ID_EXTPOL_EP_FSM_STAGE = "extpolEpFsmStage"
MO_CLASS_ID_EXTPOL_EP_FSM_TASK = "extpolEpFsmTask"
MO_CLASS_ID_EXTPOL_PROVIDER = "extpolProvider"
MO_CLASS_ID_EXTPOL_PROVIDER_CONT = "extpolProviderCont"
MO_CLASS_ID_EXTPOL_PROVIDER_FSM = "extpolProviderFsm"
MO_CLASS_ID_EXTPOL_PROVIDER_FSM_STAGE = "extpolProviderFsmStage"
MO_CLASS_ID_EXTPOL_PROVIDER_FSM_TASK = "extpolProviderFsmTask"
MO_CLASS_ID_EXTPOL_REGISTRY = "extpolRegistry"
MO_CLASS_ID_EXTPOL_REGISTRY_FSM = "extpolRegistryFsm"
MO_CLASS_ID_EXTPOL_REGISTRY_FSM_STAGE = "extpolRegistryFsmStage"
MO_CLASS_ID_EXTPOL_REGISTRY_FSM_TASK = "extpolRegistryFsmTask"
MO_CLASS_ID_EXTPOL_SVC = "extpolSvc"
MO_CLASS_ID_EXTPOL_SYSTEM_CONTEXT = "extpolSystemContext"
MO_CLASS_ID_EXTVMM_EP = "extvmmEp"
MO_CLASS_ID_EXTVMM_EP_FSM = "extvmmEpFsm"
MO_CLASS_ID_EXTVMM_EP_FSM_STAGE = "extvmmEpFsmStage"
MO_CLASS_ID_EXTVMM_EP_FSM_TASK = "extvmmEpFsmTask"
MO_CLASS_ID_EXTVMM_FNDREFERENCE = "extvmmFNDReference"
MO_CLASS_ID_EXTVMM_FABRIC_NETWORK = "extvmmFabricNetwork"
MO_CLASS_ID_EXTVMM_FABRIC_NETWORK_DEFINITION = "extvmmFabricNetworkDefinition"
MO_CLASS_ID_EXTVMM_KEY_INST = "extvmmKeyInst"
MO_CLASS_ID_EXTVMM_KEY_RING = "extvmmKeyRing"
MO_CLASS_ID_EXTVMM_KEY_STORE = "extvmmKeyStore"
MO_CLASS_ID_EXTVMM_KEY_STORE_FSM = "extvmmKeyStoreFsm"
MO_CLASS_ID_EXTVMM_KEY_STORE_FSM_STAGE = "extvmmKeyStoreFsmStage"
MO_CLASS_ID_EXTVMM_KEY_STORE_FSM_TASK = "extvmmKeyStoreFsmTask"
MO_CLASS_ID_EXTVMM_MASTER_EXT_KEY = "extvmmMasterExtKey"
MO_CLASS_ID_EXTVMM_MASTER_EXT_KEY_FSM = "extvmmMasterExtKeyFsm"
MO_CLASS_ID_EXTVMM_MASTER_EXT_KEY_FSM_STAGE = "extvmmMasterExtKeyFsmStage"
MO_CLASS_ID_EXTVMM_MASTER_EXT_KEY_FSM_TASK = "extvmmMasterExtKeyFsmTask"
MO_CLASS_ID_EXTVMM_NETWORK_SETS = "extvmmNetworkSets"
MO_CLASS_ID_EXTVMM_NETWORK_SETS_FSM = "extvmmNetworkSetsFsm"
MO_CLASS_ID_EXTVMM_NETWORK_SETS_FSM_STAGE = "extvmmNetworkSetsFsmStage"
MO_CLASS_ID_EXTVMM_NETWORK_SETS_FSM_TASK = "extvmmNetworkSetsFsmTask"
MO_CLASS_ID_EXTVMM_PROVIDER = "extvmmProvider"
MO_CLASS_ID_EXTVMM_PROVIDER_FSM = "extvmmProviderFsm"
MO_CLASS_ID_EXTVMM_PROVIDER_FSM_STAGE = "extvmmProviderFsmStage"
MO_CLASS_ID_EXTVMM_PROVIDER_FSM_TASK = "extvmmProviderFsmTask"
MO_CLASS_ID_EXTVMM_SWITCH_DEL_TASK = "extvmmSwitchDelTask"
MO_CLASS_ID_EXTVMM_SWITCH_DEL_TASK_FSM = "extvmmSwitchDelTaskFsm"
MO_CLASS_ID_EXTVMM_SWITCH_DEL_TASK_FSM_STAGE = "extvmmSwitchDelTaskFsmStage"
MO_CLASS_ID_EXTVMM_SWITCH_DEL_TASK_FSM_TASK = "extvmmSwitchDelTaskFsmTask"
MO_CLASS_ID_EXTVMM_SWITCH_SET = "extvmmSwitchSet"
MO_CLASS_ID_EXTVMM_UP_LINK_PP = "extvmmUpLinkPP"
MO_CLASS_ID_EXTVMM_VMNDREF = "extvmmVMNDRef"
MO_CLASS_ID_EXTVMM_VMNETWORK = "extvmmVMNetwork"
MO_CLASS_ID_EXTVMM_VMNETWORK_DEFINITION = "extvmmVMNetworkDefinition"
MO_CLASS_ID_EXTVMM_VMNETWORK_SETS = "extvmmVMNetworkSets"
MO_CLASS_ID_FABRIC_ADCE_SW_SRV_EP = "fabricADceSwSrvEp"
MO_CLASS_ID_FABRIC_AETH_ESTC_EP = "fabricAEthEstcEp"
MO_CLASS_ID_FABRIC_AETH_LAN_EP = "fabricAEthLanEp"
MO_CLASS_ID_FABRIC_AFC_ESTC_EP = "fabricAFcEstcEp"
MO_CLASS_ID_FABRIC_AFC_SAN_EP = "fabricAFcSanEp"
MO_CLASS_ID_FABRIC_AFCOE_ESTC_EP = "fabricAFcoeEstcEp"
MO_CLASS_ID_FABRIC_AFCOE_SAN_EP = "fabricAFcoeSanEp"
MO_CLASS_ID_FABRIC_AVLAN = "fabricAVlan"
MO_CLASS_ID_FABRIC_AVSAN = "fabricAVsan"
MO_CLASS_ID_FABRIC_BHVLAN = "fabricBHVlan"
MO_CLASS_ID_FABRIC_BREAKOUT = "fabricBreakout"
MO_CLASS_ID_FABRIC_CIO_EP = "fabricCIoEp"
MO_CLASS_ID_FABRIC_CABLING = "fabricCabling"
MO_CLASS_ID_FABRIC_CABLING_SW = "fabricCablingSw"
MO_CLASS_ID_FABRIC_CARTRIDGE_PH_EP = "fabricCartridgePhEp"
MO_CLASS_ID_FABRIC_CARTRIDGE_SLOT_EP = "fabricCartridgeSlotEp"
MO_CLASS_ID_FABRIC_CARTRIDGE_SLOT_EP_FSM = "fabricCartridgeSlotEpFsm"
MO_CLASS_ID_FABRIC_CARTRIDGE_SLOT_EP_FSM_STAGE = "fabricCartridgeSlotEpFsmStage"
MO_CLASS_ID_FABRIC_CARTRIDGE_SLOT_EP_FSM_TASK = "fabricCartridgeSlotEpFsmTask"
MO_CLASS_ID_FABRIC_CDP_LINK_POLICY = "fabricCdpLinkPolicy"
MO_CLASS_ID_FABRIC_CHANGED_OBJECT_REF = "fabricChangedObjectRef"
MO_CLASS_ID_FABRIC_CHASSIS_EP = "fabricChassisEp"
MO_CLASS_ID_FABRIC_CLOUD = "fabricCloud"
MO_CLASS_ID_FABRIC_COMPUTE_EP = "fabricComputeEp"
MO_CLASS_ID_FABRIC_COMPUTE_MSLOT_EP = "fabricComputeMSlotEp"
MO_CLASS_ID_FABRIC_COMPUTE_MSLOT_EP_FSM = "fabricComputeMSlotEpFsm"
MO_CLASS_ID_FABRIC_COMPUTE_MSLOT_EP_FSM_STAGE = "fabricComputeMSlotEpFsmStage"
MO_CLASS_ID_FABRIC_COMPUTE_MSLOT_EP_FSM_TASK = "fabricComputeMSlotEpFsmTask"
MO_CLASS_ID_FABRIC_COMPUTE_PH_EP = "fabricComputePhEp"
MO_CLASS_ID_FABRIC_COMPUTE_SLOT_EP = "fabricComputeSlotEp"
MO_CLASS_ID_FABRIC_COMPUTE_SLOT_EP_FSM = "fabricComputeSlotEpFsm"
MO_CLASS_ID_FABRIC_COMPUTE_SLOT_EP_FSM_STAGE = "fabricComputeSlotEpFsmStage"
MO_CLASS_ID_FABRIC_COMPUTE_SLOT_EP_FSM_TASK = "fabricComputeSlotEpFsmTask"
MO_CLASS_ID_FABRIC_DCE_SRV = "fabricDceSrv"
MO_CLASS_ID_FABRIC_DCE_SW_SRV = "fabricDceSwSrv"
MO_CLASS_ID_FABRIC_DCE_SW_SRV_EP = "fabricDceSwSrvEp"
MO_CLASS_ID_FABRIC_DCE_SW_SRV_PC = "fabricDceSwSrvPc"
MO_CLASS_ID_FABRIC_DCE_SW_SRV_PC_EP = "fabricDceSwSrvPcEp"
MO_CLASS_ID_FABRIC_DOMAIN = "fabricDomain"
MO_CLASS_ID_FABRIC_EP = "fabricEp"
MO_CLASS_ID_FABRIC_EP_MGR = "fabricEpMgr"
MO_CLASS_ID_FABRIC_EP_MGR_FSM = "fabricEpMgrFsm"
MO_CLASS_ID_FABRIC_EP_MGR_FSM_STAGE = "fabricEpMgrFsmStage"
MO_CLASS_ID_FABRIC_EP_MGR_FSM_TASK = "fabricEpMgrFsmTask"
MO_CLASS_ID_FABRIC_EP_VLAN = "fabricEpVlan"
MO_CLASS_ID_FABRIC_EP_VSAN = "fabricEpVsan"
MO_CLASS_ID_FABRIC_EQUIPMENT_EP = "fabricEquipmentEp"
MO_CLASS_ID_FABRIC_ESTC_EP = "fabricEstcEp"
MO_CLASS_ID_FABRIC_ESTC_PC = "fabricEstcPc"
MO_CLASS_ID_FABRIC_ETH_CDP_POLICY = "fabricEthCdpPolicy"
MO_CLASS_ID_FABRIC_ETH_ESTC = "fabricEthEstc"
MO_CLASS_ID_FABRIC_ETH_ESTC_CLOUD = "fabricEthEstcCloud"
MO_CLASS_ID_FABRIC_ETH_ESTC_EP = "fabricEthEstcEp"
MO_CLASS_ID_FABRIC_ETH_ESTC_PC = "fabricEthEstcPc"
MO_CLASS_ID_FABRIC_ETH_ESTC_PC_EP = "fabricEthEstcPcEp"
MO_CLASS_ID_FABRIC_ETH_FLOW_MON_LAN = "fabricEthFlowMonLan"
MO_CLASS_ID_FABRIC_ETH_FLOW_MON_SRC_REF = "fabricEthFlowMonSrcRef"
MO_CLASS_ID_FABRIC_ETH_LAN = "fabricEthLan"
MO_CLASS_ID_FABRIC_ETH_LAN_EP = "fabricEthLanEp"
MO_CLASS_ID_FABRIC_ETH_LAN_FLOW_MON = "fabricEthLanFlowMon"
MO_CLASS_ID_FABRIC_ETH_LAN_FLOW_MON_COLLECTOR = "fabricEthLanFlowMonCollector"
MO_CLASS_ID_FABRIC_ETH_LAN_FLOW_MON_EXPORTER = "fabricEthLanFlowMonExporter"
MO_CLASS_ID_FABRIC_ETH_LAN_FLOW_MON_SRC_EP = "fabricEthLanFlowMonSrcEp"
MO_CLASS_ID_FABRIC_ETH_LAN_FLOW_MONITOR = "fabricEthLanFlowMonitor"
MO_CLASS_ID_FABRIC_ETH_LAN_FLOW_MONITORING = "fabricEthLanFlowMonitoring"
MO_CLASS_ID_FABRIC_ETH_LAN_PC = "fabricEthLanPc"
MO_CLASS_ID_FABRIC_ETH_LAN_PC_EP = "fabricEthLanPcEp"
MO_CLASS_ID_FABRIC_ETH_LINK_POLICY = "fabricEthLinkPolicy"
MO_CLASS_ID_FABRIC_ETH_LINK_PROFILE = "fabricEthLinkProfile"
MO_CLASS_ID_FABRIC_ETH_MON = "fabricEthMon"
MO_CLASS_ID_FABRIC_ETH_MON_DEST_EP = "fabricEthMonDestEp"
MO_CLASS_ID_FABRIC_ETH_MON_FILT_EP = "fabricEthMonFiltEp"
MO_CLASS_ID_FABRIC_ETH_MON_FILT_REF = "fabricEthMonFiltRef"
MO_CLASS_ID_FABRIC_ETH_MON_LAN = "fabricEthMonLan"
MO_CLASS_ID_FABRIC_ETH_MON_SRC_EP = "fabricEthMonSrcEp"
MO_CLASS_ID_FABRIC_ETH_MON_SRC_REF = "fabricEthMonSrcRef"
MO_CLASS_ID_FABRIC_ETH_TARGET_EP = "fabricEthTargetEp"
MO_CLASS_ID_FABRIC_ETH_UDLD_POLICY = "fabricEthUdldPolicy"
MO_CLASS_ID_FABRIC_ETH_VLAN_PC = "fabricEthVlanPc"
MO_CLASS_ID_FABRIC_ETH_VLAN_PORT_EP = "fabricEthVlanPortEp"
MO_CLASS_ID_FABRIC_EXTERNAL = "fabricExternal"
MO_CLASS_ID_FABRIC_EXTERNAL_EP = "fabricExternalEp"
MO_CLASS_ID_FABRIC_EXTERNAL_ESTC = "fabricExternalEstc"
MO_CLASS_ID_FABRIC_EXTERNAL_PC = "fabricExternalPc"
MO_CLASS_ID_FABRIC_FC_ESTC = "fabricFcEstc"
MO_CLASS_ID_FABRIC_FC_ESTC_CLOUD = "fabricFcEstcCloud"
MO_CLASS_ID_FABRIC_FC_ESTC_EP = "fabricFcEstcEp"
MO_CLASS_ID_FABRIC_FC_MON = "fabricFcMon"
MO_CLASS_ID_FABRIC_FC_MON_DEST_EP = "fabricFcMonDestEp"
MO_CLASS_ID_FABRIC_FC_MON_FILT_EP = "fabricFcMonFiltEp"
MO_CLASS_ID_FABRIC_FC_MON_FILT_REF = "fabricFcMonFiltRef"
MO_CLASS_ID_FABRIC_FC_MON_SAN = "fabricFcMonSan"
MO_CLASS_ID_FABRIC_FC_MON_SRC_EP = "fabricFcMonSrcEp"
MO_CLASS_ID_FABRIC_FC_MON_SRC_REF = "fabricFcMonSrcRef"
MO_CLASS_ID_FABRIC_FC_SAN = "fabricFcSan"
MO_CLASS_ID_FABRIC_FC_SAN_EP = "fabricFcSanEp"
MO_CLASS_ID_FABRIC_FC_SAN_PC = "fabricFcSanPc"
MO_CLASS_ID_FABRIC_FC_SAN_PC_EP = "fabricFcSanPcEp"
MO_CLASS_ID_FABRIC_FC_VSAN_PC = "fabricFcVsanPc"
MO_CLASS_ID_FABRIC_FC_VSAN_PORT_EP = "fabricFcVsanPortEp"
MO_CLASS_ID_FABRIC_FCOE_ESTC_EP = "fabricFcoeEstcEp"
MO_CLASS_ID_FABRIC_FCOE_SAN_EP = "fabricFcoeSanEp"
MO_CLASS_ID_FABRIC_FCOE_SAN_PC = "fabricFcoeSanPc"
MO_CLASS_ID_FABRIC_FCOE_SAN_PC_EP = "fabricFcoeSanPcEp"
MO_CLASS_ID_FABRIC_FCOE_VSAN_PC = "fabricFcoeVsanPc"
MO_CLASS_ID_FABRIC_FCOE_VSAN_PORT_EP = "fabricFcoeVsanPortEp"
MO_CLASS_ID_FABRIC_FLOW_MON = "fabricFlowMon"
MO_CLASS_ID_FABRIC_FLOW_MON_COLLECTOR = "fabricFlowMonCollector"
MO_CLASS_ID_FABRIC_FLOW_MON_DEFINITION = "fabricFlowMonDefinition"
MO_CLASS_ID_FABRIC_FLOW_MON_EXPORTER = "fabricFlowMonExporter"
MO_CLASS_ID_FABRIC_FLOW_MON_EXPORTER_PROFILE = "fabricFlowMonExporterProfile"
MO_CLASS_ID_FABRIC_FLOW_MON_ITEM = "fabricFlowMonItem"
MO_CLASS_ID_FABRIC_FLOW_MON_SRC_EP = "fabricFlowMonSrcEp"
MO_CLASS_ID_FABRIC_FLOW_MON_SRC_REF = "fabricFlowMonSrcRef"
MO_CLASS_ID_FABRIC_FLOW_MONITOR = "fabricFlowMonitor"
MO_CLASS_ID_FABRIC_FLOW_MONITORING_CATEGORY = "fabricFlowMonitoringCategory"
MO_CLASS_ID_FABRIC_IF = "fabricIf"
MO_CLASS_ID_FABRIC_INTERNAL = "fabricInternal"
MO_CLASS_ID_FABRIC_INTERNAL_DCE_SRV = "fabricInternalDceSrv"
MO_CLASS_ID_FABRIC_INTERNAL_EP = "fabricInternalEp"
MO_CLASS_ID_FABRIC_INTERNAL_PC = "fabricInternalPc"
MO_CLASS_ID_FABRIC_LACP_POLICY = "fabricLacpPolicy"
MO_CLASS_ID_FABRIC_LAN = "fabricLan"
MO_CLASS_ID_FABRIC_LAN_ACCESS_MGR = "fabricLanAccessMgr"
MO_CLASS_ID_FABRIC_LAN_CLOUD = "fabricLanCloud"
MO_CLASS_ID_FABRIC_LAN_CLOUD_FSM = "fabricLanCloudFsm"
MO_CLASS_ID_FABRIC_LAN_CLOUD_FSM_STAGE = "fabricLanCloudFsmStage"
MO_CLASS_ID_FABRIC_LAN_CLOUD_FSM_TASK = "fabricLanCloudFsmTask"
MO_CLASS_ID_FABRIC_LAN_CLOUD_POLICY = "fabricLanCloudPolicy"
MO_CLASS_ID_FABRIC_LAN_EP = "fabricLanEp"
MO_CLASS_ID_FABRIC_LAN_FLOW_MON = "fabricLanFlowMon"
MO_CLASS_ID_FABRIC_LAN_FLOW_MON_COLLECTOR = "fabricLanFlowMonCollector"
MO_CLASS_ID_FABRIC_LAN_FLOW_MON_EXPORTER = "fabricLanFlowMonExporter"
MO_CLASS_ID_FABRIC_LAN_FLOW_MON_SRC_EP = "fabricLanFlowMonSrcEp"
MO_CLASS_ID_FABRIC_LAN_FLOW_MONITOR = "fabricLanFlowMonitor"
MO_CLASS_ID_FABRIC_LAN_MON_CLOUD = "fabricLanMonCloud"
MO_CLASS_ID_FABRIC_LAN_PC = "fabricLanPc"
MO_CLASS_ID_FABRIC_LAN_PIN_GROUP = "fabricLanPinGroup"
MO_CLASS_ID_FABRIC_LAN_PIN_TARGET = "fabricLanPinTarget"
MO_CLASS_ID_FABRIC_LAST_ACKED_SLOT = "fabricLastAckedSlot"
MO_CLASS_ID_FABRIC_LINK_POLICY = "fabricLinkPolicy"
MO_CLASS_ID_FABRIC_LOCALE = "fabricLocale"
MO_CLASS_ID_FABRIC_MON = "fabricMon"
MO_CLASS_ID_FABRIC_MON_DEST_EP = "fabricMonDestEp"
MO_CLASS_ID_FABRIC_MON_FILT_REF = "fabricMonFiltRef"
MO_CLASS_ID_FABRIC_MON_SRC_EP = "fabricMonSrcEp"
MO_CLASS_ID_FABRIC_MON_SRC_FILT_EP = "fabricMonSrcFiltEp"
MO_CLASS_ID_FABRIC_MON_SRC_REF = "fabricMonSrcRef"
MO_CLASS_ID_FABRIC_MULTICAST_POLICY = "fabricMulticastPolicy"
MO_CLASS_ID_FABRIC_NET_GROUP = "fabricNetGroup"
MO_CLASS_ID_FABRIC_NETFLOW_COLLECTOR = "fabricNetflowCollector"
MO_CLASS_ID_FABRIC_NETFLOW_IPV4_ADDR = "fabricNetflowIPv4Addr"
MO_CLASS_ID_FABRIC_NETFLOW_MON_EXPORTER = "fabricNetflowMonExporter"
MO_CLASS_ID_FABRIC_NETFLOW_MON_EXPORTER_REF = "fabricNetflowMonExporterRef"
MO_CLASS_ID_FABRIC_NETFLOW_MON_SESSION = "fabricNetflowMonSession"
MO_CLASS_ID_FABRIC_NETFLOW_MON_SRC_EP = "fabricNetflowMonSrcEp"
MO_CLASS_ID_FABRIC_NETFLOW_MON_SRC_REF = "fabricNetflowMonSrcRef"
MO_CLASS_ID_FABRIC_NETFLOW_MONITOR = "fabricNetflowMonitor"
MO_CLASS_ID_FABRIC_NETFLOW_MONITOR_REF = "fabricNetflowMonitorRef"
MO_CLASS_ID_FABRIC_NETFLOW_TIMEOUT_POLICY = "fabricNetflowTimeoutPolicy"
MO_CLASS_ID_FABRIC_ORG_VLAN_POLICY = "fabricOrgVlanPolicy"
MO_CLASS_ID_FABRIC_PIO_EP = "fabricPIoEp"
MO_CLASS_ID_FABRIC_PATH = "fabricPath"
MO_CLASS_ID_FABRIC_PATH_CONN = "fabricPathConn"
MO_CLASS_ID_FABRIC_PATH_EP = "fabricPathEp"
MO_CLASS_ID_FABRIC_PIN_GROUP = "fabricPinGroup"
MO_CLASS_ID_FABRIC_PIN_TARGET = "fabricPinTarget"
MO_CLASS_ID_FABRIC_POOLABLE_VLAN = "fabricPoolableVlan"
MO_CLASS_ID_FABRIC_POOLED_VLAN = "fabricPooledVlan"
MO_CLASS_ID_FABRIC_SAN = "fabricSan"
MO_CLASS_ID_FABRIC_SAN_CLOUD = "fabricSanCloud"
MO_CLASS_ID_FABRIC_SAN_CLOUD_FSM = "fabricSanCloudFsm"
MO_CLASS_ID_FABRIC_SAN_CLOUD_FSM_STAGE = "fabricSanCloudFsmStage"
MO_CLASS_ID_FABRIC_SAN_CLOUD_FSM_TASK = "fabricSanCloudFsmTask"
MO_CLASS_ID_FABRIC_SAN_EP = "fabricSanEp"
MO_CLASS_ID_FABRIC_SAN_MON_CLOUD = "fabricSanMonCloud"
MO_CLASS_ID_FABRIC_SAN_PC = "fabricSanPc"
MO_CLASS_ID_FABRIC_SAN_PIN_GROUP = "fabricSanPinGroup"
MO_CLASS_ID_FABRIC_SAN_PIN_TARGET = "fabricSanPinTarget"
MO_CLASS_ID_FABRIC_SUB_GROUP = "fabricSubGroup"
MO_CLASS_ID_FABRIC_SW_CH_EP = "fabricSwChEp"
MO_CLASS_ID_FABRIC_SW_CH_PH_EP = "fabricSwChPhEp"
MO_CLASS_ID_FABRIC_SW_SRV_EP = "fabricSwSrvEp"
MO_CLASS_ID_FABRIC_SW_SRV_PC = "fabricSwSrvPc"
MO_CLASS_ID_FABRIC_SW_SUB_GROUP = "fabricSwSubGroup"
MO_CLASS_ID_FABRIC_TARGET_EP = "fabricTargetEp"
MO_CLASS_ID_FABRIC_UDLD_LINK_POLICY = "fabricUdldLinkPolicy"
MO_CLASS_ID_FABRIC_UDLD_POLICY = "fabricUdldPolicy"
MO_CLASS_ID_FABRIC_VCON = "fabricVCon"
MO_CLASS_ID_FABRIC_VCON_PROFILE = "fabricVConProfile"
MO_CLASS_ID_FABRIC_VLAN = "fabricVlan"
MO_CLASS_ID_FABRIC_VLAN_EP = "fabricVlanEp"
MO_CLASS_ID_FABRIC_VLAN_GROUP_REQ = "fabricVlanGroupReq"
MO_CLASS_ID_FABRIC_VLAN_PERMIT = "fabricVlanPermit"
MO_CLASS_ID_FABRIC_VLAN_REQ = "fabricVlanReq"
MO_CLASS_ID_FABRIC_VNET_EP = "fabricVnetEp"
MO_CLASS_ID_FABRIC_VNET_EP_SYNC_EP = "fabricVnetEpSyncEp"
MO_CLASS_ID_FABRIC_VNET_EP_SYNC_EP_FSM = "fabricVnetEpSyncEpFsm"
MO_CLASS_ID_FABRIC_VNET_EP_SYNC_EP_FSM_STAGE = "fabricVnetEpSyncEpFsmStage"
MO_CLASS_ID_FABRIC_VNET_EP_SYNC_EP_FSM_TASK = "fabricVnetEpSyncEpFsmTask"
MO_CLASS_ID_FABRIC_VNET_GROUP_REQ = "fabricVnetGroupReq"
MO_CLASS_ID_FABRIC_VNET_PERMIT = "fabricVnetPermit"
MO_CLASS_ID_FABRIC_VNET_REQ = "fabricVnetReq"
MO_CLASS_ID_FABRIC_VSAN = "fabricVsan"
MO_CLASS_ID_FABRIC_VSAN_EP = "fabricVsanEp"
MO_CLASS_ID_FABRIC_VSAN_MEMBERSHIP = "fabricVsanMembership"
MO_CLASS_ID_FABRIC_ZONE_ID_UNIVERSE = "fabricZoneIdUniverse"
MO_CLASS_ID_FAULT_AFFECTED_CLASS = "faultAffectedClass"
MO_CLASS_ID_FAULT_BASE_HOLDER = "faultBaseHolder"
MO_CLASS_ID_FAULT_BASE_POLICY = "faultBasePolicy"
MO_CLASS_ID_FAULT_HOLDER = "faultHolder"
MO_CLASS_ID_FAULT_INST = "faultInst"
MO_CLASS_ID_FAULT_LOCAL_TYPED_HOLDER = "faultLocalTypedHolder"
MO_CLASS_ID_FAULT_POLICY = "faultPolicy"
MO_CLASS_ID_FAULT_SUPPRESS_POLICY = "faultSuppressPolicy"
MO_CLASS_ID_FAULT_SUPPRESS_POLICY_ITEM = "faultSuppressPolicyItem"
MO_CLASS_ID_FAULT_SUPPRESS_TASK = "faultSuppressTask"
MO_CLASS_ID_FC_CONFIG = "fcConfig"
MO_CLASS_ID_FC_ERR_STATS = "fcErrStats"
MO_CLASS_ID_FC_ERR_STATS_HIST = "fcErrStatsHist"
MO_CLASS_ID_FC_IF_CONFIG = "fcIfConfig"
MO_CLASS_ID_FC_NIC_IF_CONFIG = "fcNicIfConfig"
MO_CLASS_ID_FC_PIO = "fcPIo"
MO_CLASS_ID_FC_PIO_FSM = "fcPIoFsm"
MO_CLASS_ID_FC_PIO_FSM_STAGE = "fcPIoFsmStage"
MO_CLASS_ID_FC_STATS = "fcStats"
MO_CLASS_ID_FC_STATS_HIST = "fcStatsHist"
MO_CLASS_ID_FC_SW_IF_CONFIG = "fcSwIfConfig"
MO_CLASS_ID_FCPOOL_ADDR = "fcpoolAddr"
MO_CLASS_ID_FCPOOL_BLOCK = "fcpoolBlock"
MO_CLASS_ID_FCPOOL_BOOT_TARGET = "fcpoolBootTarget"
MO_CLASS_ID_FCPOOL_FORMAT = "fcpoolFormat"
MO_CLASS_ID_FCPOOL_INITIATOR = "fcpoolInitiator"
MO_CLASS_ID_FCPOOL_INITIATOR_EP = "fcpoolInitiatorEp"
MO_CLASS_ID_FCPOOL_INITIATORS = "fcpoolInitiators"
MO_CLASS_ID_FCPOOL_POOLABLE = "fcpoolPoolable"
MO_CLASS_ID_FCPOOL_UNIVERSE = "fcpoolUniverse"
MO_CLASS_ID_FEATURE_BASE_REF = "featureBaseRef"
MO_CLASS_ID_FEATURE_CONTEXT_EP = "featureContextEp"
MO_CLASS_ID_FEATURE_DEFINITION = "featureDefinition"
MO_CLASS_ID_FEATURE_DEFINITION_INSTANCE = "featureDefinitionInstance"
MO_CLASS_ID_FEATURE_DEFINITION_REF = "featureDefinitionRef"
MO_CLASS_ID_FEATURE_FRU_CAP_PROVIDER_INSTANCE = "featureFruCapProviderInstance"
MO_CLASS_ID_FEATURE_FRU_CAP_PROVIDER_REF = "featureFruCapProviderRef"
MO_CLASS_ID_FEATURE_PROVIDER = "featureProvider"
MO_CLASS_ID_FEATURE_PROVIDER_INSTANCE = "featureProviderInstance"
MO_CLASS_ID_FIRMWARE_ABUNDLE_TYPE_CAP_PROVIDER = "firmwareABundleTypeCapProvider"
MO_CLASS_ID_FIRMWARE_ACONSTRAINT = "firmwareAConstraint"
MO_CLASS_ID_FIRMWARE_ACK = "firmwareAck"
MO_CLASS_ID_FIRMWARE_ACTIVITY = "firmwareActivity"
MO_CLASS_ID_FIRMWARE_AUTO_SYNC_POLICY = "firmwareAutoSyncPolicy"
MO_CLASS_ID_FIRMWARE_BLADE = "firmwareBlade"
MO_CLASS_ID_FIRMWARE_BOOT_DEFINITION = "firmwareBootDefinition"
MO_CLASS_ID_FIRMWARE_BOOT_UNIT = "firmwareBootUnit"
MO_CLASS_ID_FIRMWARE_BUNDLE_INFO = "firmwareBundleInfo"
MO_CLASS_ID_FIRMWARE_BUNDLE_INFO_DIGEST = "firmwareBundleInfoDigest"
MO_CLASS_ID_FIRMWARE_BUNDLE_TYPE = "firmwareBundleType"
MO_CLASS_ID_FIRMWARE_BUNDLE_TYPE_CAP_PROVIDER = "firmwareBundleTypeCapProvider"
MO_CLASS_ID_FIRMWARE_CAP_PROVIDER = "firmwareCapProvider"
MO_CLASS_ID_FIRMWARE_CATALOG_PACK = "firmwareCatalogPack"
MO_CLASS_ID_FIRMWARE_CATALOGUE = "firmwareCatalogue"
MO_CLASS_ID_FIRMWARE_COMP_ITEM = "firmwareCompItem"
MO_CLASS_ID_FIRMWARE_COMP_SOURCE = "firmwareCompSource"
MO_CLASS_ID_FIRMWARE_COMP_TARGET = "firmwareCompTarget"
MO_CLASS_ID_FIRMWARE_COMPUTE_HOST_PACK = "firmwareComputeHostPack"
MO_CLASS_ID_FIRMWARE_COMPUTE_MGMT_PACK = "firmwareComputeMgmtPack"
MO_CLASS_ID_FIRMWARE_COMPUTE_PACK = "firmwareComputePack"
MO_CLASS_ID_FIRMWARE_CONSTRAINTS = "firmwareConstraints"
MO_CLASS_ID_FIRMWARE_DEPENDENCY = "firmwareDependency"
MO_CLASS_ID_FIRMWARE_DIST_IMAGE = "firmwareDistImage"
MO_CLASS_ID_FIRMWARE_DISTRIBUTABLE = "firmwareDistributable"
MO_CLASS_ID_FIRMWARE_DISTRIBUTABLE_FSM = "firmwareDistributableFsm"
MO_CLASS_ID_FIRMWARE_DISTRIBUTABLE_FSM_STAGE = "firmwareDistributableFsmStage"
MO_CLASS_ID_FIRMWARE_DISTRIBUTABLE_FSM_TASK = "firmwareDistributableFsmTask"
MO_CLASS_ID_FIRMWARE_DOWNLOADER = "firmwareDownloader"
MO_CLASS_ID_FIRMWARE_DOWNLOADER_FSM = "firmwareDownloaderFsm"
MO_CLASS_ID_FIRMWARE_DOWNLOADER_FSM_STAGE = "firmwareDownloaderFsmStage"
MO_CLASS_ID_FIRMWARE_DOWNLOADER_FSM_TASK = "firmwareDownloaderFsmTask"
MO_CLASS_ID_FIRMWARE_EXCLUDE_SERVER_COMPONENT = "firmwareExcludeServerComponent"
MO_CLASS_ID_FIRMWARE_FILE_UNIT = "firmwareFileUnit"
MO_CLASS_ID_FIRMWARE_HOST = "firmwareHost"
MO_CLASS_ID_FIRMWARE_HOST_PACK_MOD_IMPACT = "firmwareHostPackModImpact"
MO_CLASS_ID_FIRMWARE_IMAGE = "firmwareImage"
MO_CLASS_ID_FIRMWARE_IMAGE_FSM = "firmwareImageFsm"
MO_CLASS_ID_FIRMWARE_IMAGE_FSM_STAGE = "firmwareImageFsmStage"
MO_CLASS_ID_FIRMWARE_IMAGE_FSM_TASK = "firmwareImageFsmTask"
MO_CLASS_ID_FIRMWARE_IMAGE_LOCK = "firmwareImageLock"
MO_CLASS_ID_FIRMWARE_INFRA = "firmwareInfra"
MO_CLASS_ID_FIRMWARE_INFRA_PACK = "firmwareInfraPack"
MO_CLASS_ID_FIRMWARE_INSTALL_IMPACT = "firmwareInstallImpact"
MO_CLASS_ID_FIRMWARE_INSTALLABLE = "firmwareInstallable"
MO_CLASS_ID_FIRMWARE_PCHSTORAGE_CONFIG_CONSTRAINT = "firmwarePCHStorageConfigConstraint"
MO_CLASS_ID_FIRMWARE_PACK = "firmwarePack"
MO_CLASS_ID_FIRMWARE_PACK_ITEM = "firmwarePackItem"
MO_CLASS_ID_FIRMWARE_PLATFORM_BUNDLE_TYPE_CAP_PROVIDER = "firmwarePlatformBundleTypeCapProvider"
MO_CLASS_ID_FIRMWARE_PROCESSOR_TYPE_CONSTRAINT = "firmwareProcessorTypeConstraint"
MO_CLASS_ID_FIRMWARE_RACK = "firmwareRack"
MO_CLASS_ID_FIRMWARE_RUNNING = "firmwareRunning"
MO_CLASS_ID_FIRMWARE_SERVER_TYPE_CONSTRAINT = "firmwareServerTypeConstraint"
MO_CLASS_ID_FIRMWARE_SPEC = "firmwareSpec"
MO_CLASS_ID_FIRMWARE_STATUS = "firmwareStatus"
MO_CLASS_ID_FIRMWARE_SYSTEM = "firmwareSystem"
MO_CLASS_ID_FIRMWARE_SYSTEM_COMP_CHECK_RESULT = "firmwareSystemCompCheckResult"
MO_CLASS_ID_FIRMWARE_SYSTEM_FSM = "firmwareSystemFsm"
MO_CLASS_ID_FIRMWARE_SYSTEM_FSM_STAGE = "firmwareSystemFsmStage"
MO_CLASS_ID_FIRMWARE_SYSTEM_FSM_TASK = "firmwareSystemFsmTask"
MO_CLASS_ID_FIRMWARE_TYPE = "firmwareType"
MO_CLASS_ID_FIRMWARE_UCSC_INFO = "firmwareUcscInfo"
MO_CLASS_ID_FIRMWARE_UNIT = "firmwareUnit"
MO_CLASS_ID_FIRMWARE_UPDATABLE = "firmwareUpdatable"
MO_CLASS_ID_FIRMWARE_UPGRADE_CONSTRAINT = "firmwareUpgradeConstraint"
MO_CLASS_ID_FIRMWARE_UPGRADE_DETAIL = "firmwareUpgradeDetail"
MO_CLASS_ID_FIRMWARE_UPGRADE_INFO = "firmwareUpgradeInfo"
MO_CLASS_ID_FIRMWARE_VIC_SLOT_CONSTRAINT = "firmwareVicSlotConstraint"
MO_CLASS_ID_FIRMWARE_VNIC_CDN_CONSTRAINT = "firmwareVnicCdnConstraint"
MO_CLASS_ID_FLOWCTRL_DEFINITION = "flowctrlDefinition"
MO_CLASS_ID_FLOWCTRL_ITEM = "flowctrlItem"
MO_CLASS_ID_FSM_FSM = "fsmFsm"
MO_CLASS_ID_FSM_STAGE = "fsmStage"
MO_CLASS_ID_FSM_STATUS = "fsmStatus"
MO_CLASS_ID_FSM_TASK = "fsmTask"
MO_CLASS_ID_GMETA_CLASS = "gmetaClass"
MO_CLASS_ID_GMETA_EP = "gmetaEp"
MO_CLASS_ID_GMETA_HOLDER = "gmetaHolder"
MO_CLASS_ID_GMETA_HOLDER_FSM = "gmetaHolderFsm"
MO_CLASS_ID_GMETA_HOLDER_FSM_STAGE = "gmetaHolderFsmStage"
MO_CLASS_ID_GMETA_HOLDER_FSM_TASK = "gmetaHolderFsmTask"
MO_CLASS_ID_GMETA_POLICY_MAP_ELEMENT = "gmetaPolicyMapElement"
MO_CLASS_ID_GMETA_POLICY_MAP_HOLDER = "gmetaPolicyMapHolder"
MO_CLASS_ID_GMETA_PROP = "gmetaProp"
MO_CLASS_ID_GRAPHICS_CARD = "graphicsCard"
MO_CLASS_ID_GRAPHICS_CONTROLLER = "graphicsController"
MO_CLASS_ID_HOSTIMG_POLICY = "hostimgPolicy"
MO_CLASS_ID_HOSTIMG_TARGET = "hostimgTarget"
MO_CLASS_ID_IDENT_IDENT_CTX = "identIdentCtx"
MO_CLASS_ID_IDENT_IDENT_REQUEST = "identIdentRequest"
MO_CLASS_ID_IDENT_IDENT_REQUEST_FSM = "identIdentRequestFsm"
MO_CLASS_ID_IDENT_IDENT_REQUEST_FSM_STAGE = "identIdentRequestFsmStage"
MO_CLASS_ID_IDENT_IDENT_REQUEST_FSM_TASK = "identIdentRequestFsmTask"
MO_CLASS_ID_IDENT_META_SYSTEM = "identMetaSystem"
MO_CLASS_ID_IDENT_META_SYSTEM_FSM = "identMetaSystemFsm"
MO_CLASS_ID_IDENT_META_SYSTEM_FSM_STAGE = "identMetaSystemFsmStage"
MO_CLASS_ID_IDENT_META_SYSTEM_FSM_TASK = "identMetaSystemFsmTask"
MO_CLASS_ID_IDENT_META_VERSE = "identMetaVerse"
MO_CLASS_ID_IDENT_REQUEST_EP = "identRequestEp"
MO_CLASS_ID_IDENT_SYS_INFO = "identSysInfo"
MO_CLASS_ID_IMGPROV_POLICY = "imgprovPolicy"
MO_CLASS_ID_IMGPROV_TARGET = "imgprovTarget"
MO_CLASS_ID_IMGSEC_KEY = "imgsecKey"
MO_CLASS_ID_IMGSEC_POLICY = "imgsecPolicy"
MO_CLASS_ID_INITIATOR_EP = "initiatorEp"
MO_CLASS_ID_INITIATOR_FC_INITIATOR_EP = "initiatorFcInitiatorEp"
MO_CLASS_ID_INITIATOR_GROUP_EP = "initiatorGroupEp"
MO_CLASS_ID_INITIATOR_ISCSI_INITIATOR_EP = "initiatorIScsiInitiatorEp"
MO_CLASS_ID_INITIATOR_INITIATOR_EP = "initiatorInitiatorEp"
MO_CLASS_ID_INITIATOR_LUN_EP = "initiatorLunEp"
MO_CLASS_ID_INITIATOR_MEMBER_EP = "initiatorMemberEp"
MO_CLASS_ID_INITIATOR_REQUESTOR_EP = "initiatorRequestorEp"
MO_CLASS_ID_INITIATOR_REQUESTOR_GRP_EP = "initiatorRequestorGrpEp"
MO_CLASS_ID_INITIATOR_STORE_EP = "initiatorStoreEp"
MO_CLASS_ID_INITIATOR_UNIT_EP = "initiatorUnitEp"
MO_CLASS_ID_IP_DNS_SUFFIX = "ipDnsSuffix"
MO_CLASS_ID_IP_IPV4_DNS = "ipIPv4Dns"
MO_CLASS_ID_IP_IPV4_WINS_SERVER = "ipIPv4WinsServer"
MO_CLASS_ID_IP_IP_V4_ADDR = "ipIpV4Addr"
MO_CLASS_ID_IP_IP_V4_STATIC_ADDR = "ipIpV4StaticAddr"
MO_CLASS_ID_IP_IP_V4_STATIC_TARGET_ADDR = "ipIpV4StaticTargetAddr"
MO_CLASS_ID_IP_SERVICE_IF = "ipServiceIf"
MO_CLASS_ID_IPPOOL_ADDR = "ippoolAddr"
MO_CLASS_ID_IPPOOL_BLOCK = "ippoolBlock"
MO_CLASS_ID_IPPOOL_IP_V6_ADDR = "ippoolIpV6Addr"
MO_CLASS_ID_IPPOOL_IP_V6_BLOCK = "ippoolIpV6Block"
MO_CLASS_ID_IPPOOL_IP_V6_POOLED = "ippoolIpV6Pooled"
MO_CLASS_ID_IPPOOL_POOL = "ippoolPool"
MO_CLASS_ID_IPPOOL_POOLABLE = "ippoolPoolable"
MO_CLASS_ID_IPPOOL_POOLED = "ippoolPooled"
MO_CLASS_ID_IPPOOL_UNIVERSE = "ippoolUniverse"
MO_CLASS_ID_IQNPOOL_ABS_BLOCK = "iqnpoolAbsBlock"
MO_CLASS_ID_IQNPOOL_ADDR = "iqnpoolAddr"
MO_CLASS_ID_IQNPOOL_BLOCK = "iqnpoolBlock"
MO_CLASS_ID_IQNPOOL_FORMAT = "iqnpoolFormat"
MO_CLASS_ID_IQNPOOL_POOL = "iqnpoolPool"
MO_CLASS_ID_IQNPOOL_POOLABLE = "iqnpoolPoolable"
MO_CLASS_ID_IQNPOOL_POOLED = "iqnpoolPooled"
MO_CLASS_ID_IQNPOOL_TRANSPORT_BLOCK = "iqnpoolTransportBlock"
MO_CLASS_ID_IQNPOOL_UNIVERSE = "iqnpoolUniverse"
MO_CLASS_ID_ISCSI_AUTH_PROFILE = "iscsiAuthProfile"
MO_CLASS_ID_LICENSE_CAP_PROVIDER = "licenseCapProvider"
MO_CLASS_ID_LICENSE_CONTENTS = "licenseContents"
MO_CLASS_ID_LICENSE_DOWNLOADER = "licenseDownloader"
MO_CLASS_ID_LICENSE_DOWNLOADER_FSM = "licenseDownloaderFsm"
MO_CLASS_ID_LICENSE_DOWNLOADER_FSM_STAGE = "licenseDownloaderFsmStage"
MO_CLASS_ID_LICENSE_DOWNLOADER_FSM_TASK = "licenseDownloaderFsmTask"
MO_CLASS_ID_LICENSE_EP = "licenseEp"
MO_CLASS_ID_LICENSE_FEATURE = "licenseFeature"
MO_CLASS_ID_LICENSE_FEATURE_CAP_PROVIDER = "licenseFeatureCapProvider"
MO_CLASS_ID_LICENSE_FEATURE_LINE = "licenseFeatureLine"
MO_CLASS_ID_LICENSE_FILE = "licenseFile"
MO_CLASS_ID_LICENSE_FILE_FSM = "licenseFileFsm"
MO_CLASS_ID_LICENSE_FILE_FSM_STAGE = "licenseFileFsmStage"
MO_CLASS_ID_LICENSE_FILE_FSM_TASK = "licenseFileFsmTask"
MO_CLASS_ID_LICENSE_INSTANCE = "licenseInstance"
MO_CLASS_ID_LICENSE_INSTANCE_FSM = "licenseInstanceFsm"
MO_CLASS_ID_LICENSE_INSTANCE_FSM_STAGE = "licenseInstanceFsmStage"
MO_CLASS_ID_LICENSE_INSTANCE_FSM_TASK = "licenseInstanceFsmTask"
MO_CLASS_ID_LICENSE_PROP = "licenseProp"
MO_CLASS_ID_LICENSE_SERVER_HOST_ID = "licenseServerHostId"
MO_CLASS_ID_LICENSE_SOURCE = "licenseSource"
MO_CLASS_ID_LICENSE_SOURCE_FILE = "licenseSourceFile"
MO_CLASS_ID_LICENSE_TARGET = "licenseTarget"
MO_CLASS_ID_LLDP_ACQUIRED = "lldpAcquired"
MO_CLASS_ID_LS_AGENT_POLICY = "lsAgentPolicy"
MO_CLASS_ID_LS_BINDING = "lsBinding"
MO_CLASS_ID_LS_COMPUTE_BINDING = "lsComputeBinding"
MO_CLASS_ID_LS_FC_LOCALE = "lsFcLocale"
MO_CLASS_ID_LS_FC_ZONE = "lsFcZone"
MO_CLASS_ID_LS_FC_ZONE_GROUP = "lsFcZoneGroup"
MO_CLASS_ID_LS_FC_ZONE_MEMBER = "lsFcZoneMember"
MO_CLASS_ID_LS_IDENTITY_INFO = "lsIdentityInfo"
MO_CLASS_ID_LS_ISSUES = "lsIssues"
MO_CLASS_ID_LS_POWER = "lsPower"
MO_CLASS_ID_LS_REQUIREMENT = "lsRequirement"
MO_CLASS_ID_LS_SERVER = "lsServer"
MO_CLASS_ID_LS_SERVER_ASSOC_CTX = "lsServerAssocCtx"
MO_CLASS_ID_LS_SERVER_EXTENSION = "lsServerExtension"
MO_CLASS_ID_LS_SERVER_FSM = "lsServerFsm"
MO_CLASS_ID_LS_SERVER_FSM_STAGE = "lsServerFsmStage"
MO_CLASS_ID_LS_SERVER_FSM_TASK = "lsServerFsmTask"
MO_CLASS_ID_LS_TIER = "lsTier"
MO_CLASS_ID_LS_UUID_HISTORY = "lsUuidHistory"
MO_CLASS_ID_LS_VCON_ASSIGN = "lsVConAssign"
MO_CLASS_ID_LS_VERSION_BEH = "lsVersionBeh"
MO_CLASS_ID_LS_ZONE_INITIATOR_MEMBER = "lsZoneInitiatorMember"
MO_CLASS_ID_LS_ZONE_TARGET_MEMBER = "lsZoneTargetMember"
MO_CLASS_ID_LSBOOT_ADEF = "lsbootADef"
MO_CLASS_ID_LSBOOT_BOOT_SECURITY = "lsbootBootSecurity"
MO_CLASS_ID_LSBOOT_CATEGORY = "lsbootCategory"
MO_CLASS_ID_LSBOOT_DEF = "lsbootDef"
MO_CLASS_ID_LSBOOT_DEFAULT_LOCAL_IMAGE = "lsbootDefaultLocalImage"
MO_CLASS_ID_LSBOOT_EMBEDDED_LOCAL_DISK_IMAGE = "lsbootEmbeddedLocalDiskImage"
MO_CLASS_ID_LSBOOT_EMBEDDED_LOCAL_DISK_IMAGE_PATH = "lsbootEmbeddedLocalDiskImagePath"
MO_CLASS_ID_LSBOOT_EMBEDDED_LOCAL_LUN_IMAGE = "lsbootEmbeddedLocalLunImage"
MO_CLASS_ID_LSBOOT_ISCSI = "lsbootIScsi"
MO_CLASS_ID_LSBOOT_ISCSI_IMAGE_PATH = "lsbootIScsiImagePath"
MO_CLASS_ID_LSBOOT_IMAGE = "lsbootImage"
MO_CLASS_ID_LSBOOT_IMAGE_PATH = "lsbootImagePath"
MO_CLASS_ID_LSBOOT_ITEM = "lsbootItem"
MO_CLASS_ID_LSBOOT_LAN = "lsbootLan"
MO_CLASS_ID_LSBOOT_LAN_IMAGE_PATH = "lsbootLanImagePath"
MO_CLASS_ID_LSBOOT_LOCAL_DISK_IMAGE = "lsbootLocalDiskImage"
MO_CLASS_ID_LSBOOT_LOCAL_DISK_IMAGE_PATH = "lsbootLocalDiskImagePath"
MO_CLASS_ID_LSBOOT_LOCAL_HDD_IMAGE = "lsbootLocalHddImage"
MO_CLASS_ID_LSBOOT_LOCAL_IMAGE = "lsbootLocalImage"
MO_CLASS_ID_LSBOOT_LOCAL_LUN_IMAGE_PATH = "lsbootLocalLunImagePath"
MO_CLASS_ID_LSBOOT_LOCAL_MEDIA_ITEM = "lsbootLocalMediaItem"
MO_CLASS_ID_LSBOOT_LOCAL_STORAGE = "lsbootLocalStorage"
MO_CLASS_ID_LSBOOT_POLICY = "lsbootPolicy"
MO_CLASS_ID_LSBOOT_REMOTE_IMAGE = "lsbootRemoteImage"
MO_CLASS_ID_LSBOOT_SAN = "lsbootSan"
MO_CLASS_ID_LSBOOT_SAN_CAT_SAN_IMAGE = "lsbootSanCatSanImage"
MO_CLASS_ID_LSBOOT_SAN_CAT_SAN_IMAGE_PATH = "lsbootSanCatSanImagePath"
MO_CLASS_ID_LSBOOT_SAN_IMAGE = "lsbootSanImage"
MO_CLASS_ID_LSBOOT_SAN_IMAGE_PATH = "lsbootSanImagePath"
MO_CLASS_ID_LSBOOT_STORAGE = "lsbootStorage"
MO_CLASS_ID_LSBOOT_UEFIBOOT_PARAM = "lsbootUEFIBootParam"
MO_CLASS_ID_LSBOOT_USB_EXTERNAL_IMAGE = "lsbootUsbExternalImage"
MO_CLASS_ID_LSBOOT_USB_FLASH_STORAGE_IMAGE = "lsbootUsbFlashStorageImage"
MO_CLASS_ID_LSBOOT_USB_INTERNAL_IMAGE = "lsbootUsbInternalImage"
MO_CLASS_ID_LSBOOT_VIRTUAL_MEDIA = "lsbootVirtualMedia"
MO_CLASS_ID_LSMAINT_ACK = "lsmaintAck"
MO_CLASS_ID_LSMAINT_MAINT_POLICY = "lsmaintMaintPolicy"
MO_CLASS_ID_LSTORAGE_CONTROLLER_DEF = "lstorageControllerDef"
MO_CLASS_ID_LSTORAGE_CONTROLLER_MODE_CONFIG = "lstorageControllerModeConfig"
MO_CLASS_ID_LSTORAGE_CONTROLLER_QUALIFIER = "lstorageControllerQualifier"
MO_CLASS_ID_LSTORAGE_DAS_SCSI_LUN = "lstorageDasScsiLun"
MO_CLASS_ID_LSTORAGE_DISK_GROUP_CONFIG = "lstorageDiskGroupConfig"
MO_CLASS_ID_LSTORAGE_DISK_GROUP_CONFIG_DEF = "lstorageDiskGroupConfigDef"
MO_CLASS_ID_LSTORAGE_DISK_GROUP_CONFIG_POLICY = "lstorageDiskGroupConfigPolicy"
MO_CLASS_ID_LSTORAGE_DISK_GROUP_QUALIFIER = "lstorageDiskGroupQualifier"
MO_CLASS_ID_LSTORAGE_ITEM = "lstorageItem"
MO_CLASS_ID_LSTORAGE_LOCAL_DISK_CONFIG_REF = "lstorageLocalDiskConfigRef"
MO_CLASS_ID_LSTORAGE_LOCAL_DISK_REF = "lstorageLocalDiskRef"
MO_CLASS_ID_LSTORAGE_PROFILE = "lstorageProfile"
MO_CLASS_ID_LSTORAGE_PROFILE_BASE = "lstorageProfileBase"
MO_CLASS_ID_LSTORAGE_PROFILE_BINDING = "lstorageProfileBinding"
MO_CLASS_ID_LSTORAGE_PROFILE_BINDING_BASE = "lstorageProfileBindingBase"
MO_CLASS_ID_LSTORAGE_PROFILE_DEF = "lstorageProfileDef"
MO_CLASS_ID_LSTORAGE_SCSI_LUN = "lstorageScsiLun"
MO_CLASS_ID_LSTORAGE_VIRTUAL_DRIVE_DEF = "lstorageVirtualDriveDef"
MO_CLASS_ID_MACPOOL_ADDR = "macpoolAddr"
MO_CLASS_ID_MACPOOL_BLOCK = "macpoolBlock"
MO_CLASS_ID_MACPOOL_FORMAT = "macpoolFormat"
MO_CLASS_ID_MACPOOL_POOL = "macpoolPool"
MO_CLASS_ID_MACPOOL_POOLABLE = "macpoolPoolable"
MO_CLASS_ID_MACPOOL_POOLED = "macpoolPooled"
MO_CLASS_ID_MACPOOL_UNIVERSE = "macpoolUniverse"
MO_CLASS_ID_MEMORY_ARRAY = "memoryArray"
MO_CLASS_ID_MEMORY_ARRAY_ENV_STATS = "memoryArrayEnvStats"
MO_CLASS_ID_MEMORY_ARRAY_ENV_STATS_HIST = "memoryArrayEnvStatsHist"
MO_CLASS_ID_MEMORY_BUFFER_UNIT = "memoryBufferUnit"
MO_CLASS_ID_MEMORY_BUFFER_UNIT_ENV_STATS = "memoryBufferUnitEnvStats"
MO_CLASS_ID_MEMORY_BUFFER_UNIT_ENV_STATS_HIST = "memoryBufferUnitEnvStatsHist"
MO_CLASS_ID_MEMORY_ERROR_STATS = "memoryErrorStats"
MO_CLASS_ID_MEMORY_QUAL = "memoryQual"
MO_CLASS_ID_MEMORY_RUNTIME = "memoryRuntime"
MO_CLASS_ID_MEMORY_RUNTIME_HIST = "memoryRuntimeHist"
MO_CLASS_ID_MEMORY_UNIT = "memoryUnit"
MO_CLASS_ID_MEMORY_UNIT_ENV_STATS = "memoryUnitEnvStats"
MO_CLASS_ID_MEMORY_UNIT_ENV_STATS_HIST = "memoryUnitEnvStatsHist"
MO_CLASS_ID_MGMT_ACCESS_POLICY = "mgmtAccessPolicy"
MO_CLASS_ID_MGMT_ACCESS_POLICY_ITEM = "mgmtAccessPolicyItem"
MO_CLASS_ID_MGMT_ACCESS_PORT = "mgmtAccessPort"
MO_CLASS_ID_MGMT_BACKUP = "mgmtBackup"
MO_CLASS_ID_MGMT_BACKUP_EXPORT_EXT_POLICY = "mgmtBackupExportExtPolicy"
MO_CLASS_ID_MGMT_BACKUP_FSM = "mgmtBackupFsm"
MO_CLASS_ID_MGMT_BACKUP_FSM_STAGE = "mgmtBackupFsmStage"
MO_CLASS_ID_MGMT_BACKUP_FSM_TASK = "mgmtBackupFsmTask"
MO_CLASS_ID_MGMT_BACKUP_POLICY = "mgmtBackupPolicy"
MO_CLASS_ID_MGMT_BACKUP_POLICY_CONFIG = "mgmtBackupPolicyConfig"
MO_CLASS_ID_MGMT_BACKUP_POLICY_FSM = "mgmtBackupPolicyFsm"
MO_CLASS_ID_MGMT_BACKUP_POLICY_FSM_STAGE = "mgmtBackupPolicyFsmStage"
MO_CLASS_ID_MGMT_CFG_EXPORT_POLICY = "mgmtCfgExportPolicy"
MO_CLASS_ID_MGMT_CFG_EXPORT_POLICY_FSM = "mgmtCfgExportPolicyFsm"
MO_CLASS_ID_MGMT_CFG_EXPORT_POLICY_FSM_STAGE = "mgmtCfgExportPolicyFsmStage"
MO_CLASS_ID_MGMT_CIMC_INTERFACE = "mgmtCimcInterface"
MO_CLASS_ID_MGMT_CIMC_SECURE_BOOT = "mgmtCimcSecureBoot"
MO_CLASS_ID_MGMT_CONNECTION = "mgmtConnection"
MO_CLASS_ID_MGMT_CONTROLLER = "mgmtController"
MO_CLASS_ID_MGMT_CONTROLLER_FSM = "mgmtControllerFsm"
MO_CLASS_ID_MGMT_CONTROLLER_FSM_STAGE = "mgmtControllerFsmStage"
MO_CLASS_ID_MGMT_CONTROLLER_FSM_TASK = "mgmtControllerFsmTask"
MO_CLASS_ID_MGMT_ENTITY = "mgmtEntity"
MO_CLASS_ID_MGMT_EXPORT_POLICY = "mgmtExportPolicy"
MO_CLASS_ID_MGMT_EXPORT_POLICY_FSM = "mgmtExportPolicyFsm"
MO_CLASS_ID_MGMT_EXPORT_POLICY_FSM_STAGE = "mgmtExportPolicyFsmStage"
MO_CLASS_ID_MGMT_EXPORT_POLICY_FSM_TASK = "mgmtExportPolicyFsmTask"
MO_CLASS_ID_MGMT_HEALTH_ATTR = "mgmtHealthAttr"
MO_CLASS_ID_MGMT_HEALTH_STATUS = "mgmtHealthStatus"
MO_CLASS_ID_MGMT_IPV6_ADDR = "mgmtIPv6Addr"
MO_CLASS_ID_MGMT_IPV6_IF_ADDR = "mgmtIPv6IfAddr"
MO_CLASS_ID_MGMT_IPV6_IF_ADDR_FSM = "mgmtIPv6IfAddrFsm"
MO_CLASS_ID_MGMT_IPV6_IF_ADDR_FSM_STAGE = "mgmtIPv6IfAddrFsmStage"
MO_CLASS_ID_MGMT_IPV6_IF_ADDR_FSM_TASK = "mgmtIPv6IfAddrFsmTask"
MO_CLASS_ID_MGMT_IPV6_IF_CONFIG = "mgmtIPv6IfConfig"
MO_CLASS_ID_MGMT_IF = "mgmtIf"
MO_CLASS_ID_MGMT_IF_FSM = "mgmtIfFsm"
MO_CLASS_ID_MGMT_IF_FSM_STAGE = "mgmtIfFsmStage"
MO_CLASS_ID_MGMT_IF_FSM_TASK = "mgmtIfFsmTask"
MO_CLASS_ID_MGMT_IMPORTER = "mgmtImporter"
MO_CLASS_ID_MGMT_IMPORTER_FSM = "mgmtImporterFsm"
MO_CLASS_ID_MGMT_IMPORTER_FSM_STAGE = "mgmtImporterFsmStage"
MO_CLASS_ID_MGMT_IMPORTER_FSM_TASK = "mgmtImporterFsmTask"
MO_CLASS_ID_MGMT_INBAND_PROFILE = "mgmtInbandProfile"
MO_CLASS_ID_MGMT_INT_AUTH_POLICY = "mgmtIntAuthPolicy"
MO_CLASS_ID_MGMT_INTERFACE = "mgmtInterface"
MO_CLASS_ID_MGMT_PMON_ENTRY = "mgmtPmonEntry"
MO_CLASS_ID_MGMT_PROF_DERIVED_INTERFACE = "mgmtProfDerivedInterface"
MO_CLASS_ID_MGMT_VNET = "mgmtVnet"
MO_CLASS_ID_MO_TOP_PROPS = "moTopProps"
MO_CLASS_ID_NAMING_NAMED_IDENTIFIED_OBJECT = "namingNamedIdentifiedObject"
MO_CLASS_ID_NAMING_NAMED_OBJECT = "namingNamedObject"
MO_CLASS_ID_NETWORK_ALAN_NEIGHBOR_ENTRY = "networkALanNeighborEntry"
MO_CLASS_ID_NETWORK_ANEIGHBOR_ENTRY = "networkANeighborEntry"
MO_CLASS_ID_NETWORK_CIO_EP = "networkCIoEp"
MO_CLASS_ID_NETWORK_CONN = "networkConn"
MO_CLASS_ID_NETWORK_DOMAIN_EP = "networkDomainEp"
MO_CLASS_ID_NETWORK_ELEMENT = "networkElement"
MO_CLASS_ID_NETWORK_EP = "networkEp"
MO_CLASS_ID_NETWORK_IF_EP = "networkIfEp"
MO_CLASS_ID_NETWORK_IF_STATS = "networkIfStats"
MO_CLASS_ID_NETWORK_LAN_NEIGHBOR_ENTRY = "networkLanNeighborEntry"
MO_CLASS_ID_NETWORK_LAN_NEIGHBORS = "networkLanNeighbors"
MO_CLASS_ID_NETWORK_LLDP_NEIGHBOR_ENTRY = "networkLldpNeighborEntry"
MO_CLASS_ID_NETWORK_LLDP_NEIGHBORS = "networkLldpNeighbors"
MO_CLASS_ID_NETWORK_OPER_LEVEL = "networkOperLevel"
MO_CLASS_ID_NETWORK_PIO_EP = "networkPIoEp"
MO_CLASS_ID_NETWORK_PHYS_EP = "networkPhysEp"
MO_CLASS_ID_NETWORK_SAN_NEIGHBOR_ENTRY = "networkSanNeighborEntry"
MO_CLASS_ID_NETWORK_SAN_NEIGHBORS = "networkSanNeighbors"
MO_CLASS_ID_NETWORK_VNET_EP = "networkVnetEp"
MO_CLASS_ID_NETWORKRULE_DEFINITION = "networkruleDefinition"
MO_CLASS_ID_NETWORKRULE_ITEM = "networkruleItem"
MO_CLASS_ID_NETWORKRULE_REQUIREMENT = "networkruleRequirement"
MO_CLASS_ID_NFS_EP = "nfsEp"
MO_CLASS_ID_NFS_MOUNT_DEF = "nfsMountDef"
MO_CLASS_ID_NFS_MOUNT_DEF_FSM = "nfsMountDefFsm"
MO_CLASS_ID_NFS_MOUNT_DEF_FSM_STAGE = "nfsMountDefFsmStage"
MO_CLASS_ID_NFS_MOUNT_DEF_FSM_TASK = "nfsMountDefFsmTask"
MO_CLASS_ID_NFS_MOUNT_INST = "nfsMountInst"
MO_CLASS_ID_NFS_MOUNT_INST_FSM = "nfsMountInstFsm"
MO_CLASS_ID_NFS_MOUNT_INST_FSM_STAGE = "nfsMountInstFsmStage"
MO_CLASS_ID_NFS_MOUNT_INST_FSM_TASK = "nfsMountInstFsmTask"
MO_CLASS_ID_NWCTRL_DEFINITION = "nwctrlDefinition"
MO_CLASS_ID_OBSERVE_FILTER = "observeFilter"
MO_CLASS_ID_OBSERVE_OBSERVED = "observeObserved"
MO_CLASS_ID_OBSERVE_OBSERVED_CONT = "observeObservedCont"
MO_CLASS_ID_OBSERVE_OBSERVED_FSM = "observeObservedFsm"
MO_CLASS_ID_OBSERVE_OBSERVED_FSM_STAGE = "observeObservedFsmStage"
MO_CLASS_ID_OBSERVE_OBSERVED_FSM_TASK = "observeObservedFsmTask"
MO_CLASS_ID_ORG_ORG = "orgOrg"
MO_CLASS_ID_ORG_SOURCE_MASK = "orgSourceMask"
MO_CLASS_ID_OS_ARPLINK_MONITORING_POLICY = "osARPLinkMonitoringPolicy"
MO_CLASS_ID_OS_ARPTARGET = "osARPTarget"
MO_CLASS_ID_OS_AGENT = "osAgent"
MO_CLASS_ID_OS_ETH_BOND_INTF = "osEthBondIntf"
MO_CLASS_ID_OS_ETH_BOND_MODE = "osEthBondMode"
MO_CLASS_ID_OS_ETH_BOND_MODE_ACTIVE_BACKUP = "osEthBondModeActiveBackup"
MO_CLASS_ID_OS_ETH_BOND_MODE_BALANCED_ALB = "osEthBondModeBalancedALB"
MO_CLASS_ID_OS_ETH_BOND_MODE_BALANCED_RR = "osEthBondModeBalancedRR"
MO_CLASS_ID_OS_ETH_BOND_MODE_BALANCED_TLB = "osEthBondModeBalancedTLB"
MO_CLASS_ID_OS_ETH_BOND_MODE_BALANCED_XOR = "osEthBondModeBalancedXOR"
MO_CLASS_ID_OS_ETH_BOND_MODE_BROADCAST = "osEthBondModeBroadcast"
MO_CLASS_ID_OS_ETH_BOND_MODE_LB = "osEthBondModeLB"
MO_CLASS_ID_OS_ETH_INTF = "osEthIntf"
MO_CLASS_ID_OS_INSTANCE = "osInstance"
MO_CLASS_ID_OS_INTF = "osIntf"
MO_CLASS_ID_OS_LINK_MONITORING_POLICY = "osLinkMonitoringPolicy"
MO_CLASS_ID_OS_MII_LINK_MONITORING_POLICY = "osMiiLinkMonitoringPolicy"
MO_CLASS_ID_OS_PRIMARY_SLAVE = "osPrimarySlave"
MO_CLASS_ID_PCI_CARD = "pciCard"
MO_CLASS_ID_PCI_EQUIP_SLOT = "pciEquipSlot"
MO_CLASS_ID_PCI_UNIT = "pciUnit"
MO_CLASS_ID_PKI_CERT_REQ = "pkiCertReq"
MO_CLASS_ID_PKI_DEFINITION = "pkiDefinition"
MO_CLASS_ID_PKI_EP = "pkiEp"
MO_CLASS_ID_PKI_EP_FSM = "pkiEpFsm"
MO_CLASS_ID_PKI_EP_FSM_STAGE = "pkiEpFsmStage"
MO_CLASS_ID_PKI_EP_FSM_TASK = "pkiEpFsmTask"
MO_CLASS_ID_PKI_ITEM = "pkiItem"
MO_CLASS_ID_PKI_KEY_RING = "pkiKeyRing"
MO_CLASS_ID_PKI_TP = "pkiTP"
MO_CLASS_ID_POLICY_BINDING = "policyBinding"
MO_CLASS_ID_POLICY_CENTRALE_SYNC = "policyCentraleSync"
MO_CLASS_ID_POLICY_COMMUNICATION = "policyCommunication"
MO_CLASS_ID_POLICY_CONFIG_BACKUP = "policyConfigBackup"
MO_CLASS_ID_POLICY_CONTROL = "policyControl"
MO_CLASS_ID_POLICY_CONTROL_EP = "policyControlEp"
MO_CLASS_ID_POLICY_CONTROL_EP_FSM = "policyControlEpFsm"
MO_CLASS_ID_POLICY_CONTROL_EP_FSM_STAGE = "policyControlEpFsmStage"
MO_CLASS_ID_POLICY_CONTROL_EP_FSM_TASK = "policyControlEpFsmTask"
MO_CLASS_ID_POLICY_CONTROLLED = "policyControlled"
MO_CLASS_ID_POLICY_CONTROLLED_INSTANCE = "policyControlledInstance"
MO_CLASS_ID_POLICY_CONTROLLED_TYPE = "policyControlledType"
MO_CLASS_ID_POLICY_CONTROLLED_TYPE_FSM = "policyControlledTypeFsm"
MO_CLASS_ID_POLICY_CONTROLLED_TYPE_FSM_STAGE = "policyControlledTypeFsmStage"
MO_CLASS_ID_POLICY_CONTROLLED_TYPE_FSM_TASK = "policyControlledTypeFsmTask"
MO_CLASS_ID_POLICY_DATE_TIME = "policyDateTime"
MO_CLASS_ID_POLICY_DEFINITION = "policyDefinition"
MO_CLASS_ID_POLICY_DIGEST = "policyDigest"
MO_CLASS_ID_POLICY_DISCOVERY = "policyDiscovery"
MO_CLASS_ID_POLICY_DNS = "policyDns"
MO_CLASS_ID_POLICY_ELEMENT = "policyElement"
MO_CLASS_ID_POLICY_EQUIPMENT = "policyEquipment"
MO_CLASS_ID_POLICY_FAULT = "policyFault"
MO_CLASS_ID_POLICY_HOLDER = "policyHolder"
MO_CLASS_ID_POLICY_ID_RESOLVE_POLICY = "policyIdResolvePolicy"
MO_CLASS_ID_POLICY_INFRA_FIRMWARE = "policyInfraFirmware"
MO_CLASS_ID_POLICY_ITEM = "policyItem"
MO_CLASS_ID_POLICY_LOCAL_MAP = "policyLocalMap"
MO_CLASS_ID_POLICY_MEP = "policyMEp"
MO_CLASS_ID_POLICY_MONITORING = "policyMonitoring"
MO_CLASS_ID_POLICY_OBJECT = "policyObject"
MO_CLASS_ID_POLICY_POLICY_EP = "policyPolicyEp"
MO_CLASS_ID_POLICY_POLICY_REQUESTOR = "policyPolicyRequestor"
MO_CLASS_ID_POLICY_POLICY_SCOPE = "policyPolicyScope"
MO_CLASS_ID_POLICY_POLICY_SCOPE_CONT = "policyPolicyScopeCont"
MO_CLASS_ID_POLICY_POLICY_SCOPE_CONTEXT = "policyPolicyScopeContext"
MO_CLASS_ID_POLICY_POLICY_SCOPE_FSM = "policyPolicyScopeFsm"
MO_CLASS_ID_POLICY_POLICY_SCOPE_FSM_STAGE = "policyPolicyScopeFsmStage"
MO_CLASS_ID_POLICY_POLICY_SCOPE_FSM_TASK = "policyPolicyScopeFsmTask"
MO_CLASS_ID_POLICY_PORT_CONFIG = "policyPortConfig"
MO_CLASS_ID_POLICY_POWER_MGMT = "policyPowerMgmt"
MO_CLASS_ID_POLICY_PSU = "policyPsu"
MO_CLASS_ID_POLICY_REF_REQ = "policyRefReq"
MO_CLASS_ID_POLICY_SECURITY = "policySecurity"
MO_CLASS_ID_POLICY_STORAGE_AUTO_CONFIG = "policyStorageAutoConfig"
MO_CLASS_ID_POLICY_SYSTEM_EP = "policySystemEp"
MO_CLASS_ID_POOL_ELEMENT = "poolElement"
MO_CLASS_ID_POOL_POOL = "poolPool"
MO_CLASS_ID_POOL_POOL_MEMBER = "poolPoolMember"
MO_CLASS_ID_POOL_POOLABLE = "poolPoolable"
MO_CLASS_ID_POOL_UNIVERSE = "poolUniverse"
MO_CLASS_ID_PORT_DOMAIN_EP = "portDomainEp"
MO_CLASS_ID_PORT_GROUP = "portGroup"
MO_CLASS_ID_PORT_INT_FIO = "portIntFIo"
MO_CLASS_ID_PORT_PIO = "portPIo"
MO_CLASS_ID_PORT_PIO_FSM = "portPIoFsm"
MO_CLASS_ID_PORT_PIO_FSM_STAGE = "portPIoFsmStage"
MO_CLASS_ID_PORT_PIO_FSM_TASK = "portPIoFsmTask"
MO_CLASS_ID_PORT_PHYS_SWITCH_IO = "portPhysSwitchIo"
MO_CLASS_ID_PORT_SERVER_INT_FIO = "portServerIntFIo"
MO_CLASS_ID_PORT_SUB_GROUP = "portSubGroup"
MO_CLASS_ID_PORT_SWITCH_INT_FIO = "portSwitchIntFIo"
MO_CLASS_ID_PORT_TRUST_MODE = "portTrustMode"
MO_CLASS_ID_POWER_ABUDGET = "powerABudget"
MO_CLASS_ID_POWER_AGROUP = "powerAGroup"
MO_CLASS_ID_POWER_BUDGET = "powerBudget"
MO_CLASS_ID_POWER_CHASSIS_MEMBER = "powerChassisMember"
MO_CLASS_ID_POWER_EP = "powerEp"
MO_CLASS_ID_POWER_GROUP = "powerGroup"
MO_CLASS_ID_POWER_GROUP_ADDITION_POLICY = "powerGroupAdditionPolicy"
MO_CLASS_ID_POWER_GROUP_MEMBER = "powerGroupMember"
MO_CLASS_ID_POWER_GROUP_QUAL = "powerGroupQual"
MO_CLASS_ID_POWER_GROUP_STATS = "powerGroupStats"
MO_CLASS_ID_POWER_GROUP_STATS_HIST = "powerGroupStatsHist"
MO_CLASS_ID_POWER_MGMT_POLICY = "powerMgmtPolicy"
MO_CLASS_ID_POWER_PLACEMENT = "powerPlacement"
MO_CLASS_ID_POWER_POLICY = "powerPolicy"
MO_CLASS_ID_POWER_PRIO_WGHT = "powerPrioWght"
MO_CLASS_ID_POWER_PROFILED_POWER = "powerProfiledPower"
MO_CLASS_ID_POWER_RACK_UNIT_MEMBER = "powerRackUnitMember"
MO_CLASS_ID_PROC_DOER = "procDoer"
MO_CLASS_ID_PROC_MANAGER = "procManager"
MO_CLASS_ID_PROC_PROC_COUNTS = "procProcCounts"
MO_CLASS_ID_PROC_PROCS = "procProcs"
MO_CLASS_ID_PROC_PRT = "procPrt"
MO_CLASS_ID_PROC_PRT_COUNTS = "procPrtCounts"
MO_CLASS_ID_PROC_STIMULUS_COUNTS = "procStimulusCounts"
MO_CLASS_ID_PROC_SVC = "procSvc"
MO_CLASS_ID_PROC_TX_COUNTS = "procTxCounts"
MO_CLASS_ID_PROCESSOR_COMPONENT = "processorComponent"
MO_CLASS_ID_PROCESSOR_CORE = "processorCore"
MO_CLASS_ID_PROCESSOR_ENV_STATS = "processorEnvStats"
MO_CLASS_ID_PROCESSOR_ENV_STATS_HIST = "processorEnvStatsHist"
MO_CLASS_ID_PROCESSOR_ERROR_STATS = "processorErrorStats"
MO_CLASS_ID_PROCESSOR_QUAL = "processorQual"
MO_CLASS_ID_PROCESSOR_RUNTIME = "processorRuntime"
MO_CLASS_ID_PROCESSOR_RUNTIME_HIST = "processorRuntimeHist"
MO_CLASS_ID_PROCESSOR_THREAD = "processorThread"
MO_CLASS_ID_PROCESSOR_UNIT = "processorUnit"
MO_CLASS_ID_PROCESSOR_UNIT_ASSOC_CTX = "processorUnitAssocCtx"
MO_CLASS_ID_QOS_DEFINITION = "qosDefinition"
MO_CLASS_ID_QOS_ITEM = "qosItem"
MO_CLASS_ID_QOSCLASS_DEFINITION = "qosclassDefinition"
MO_CLASS_ID_QOSCLASS_DEFINITION_FSM = "qosclassDefinitionFsm"
MO_CLASS_ID_QOSCLASS_DEFINITION_FSM_STAGE = "qosclassDefinitionFsmStage"
MO_CLASS_ID_QOSCLASS_DEFINITION_FSM_TASK = "qosclassDefinitionFsmTask"
MO_CLASS_ID_QOSCLASS_ETH = "qosclassEth"
MO_CLASS_ID_QOSCLASS_ETH_BE = "qosclassEthBE"
MO_CLASS_ID_QOSCLASS_ETH_CLASSIFIED = "qosclassEthClassified"
MO_CLASS_ID_QOSCLASS_FC = "qosclassFc"
MO_CLASS_ID_QOSCLASS_ITEM = "qosclassItem"
MO_CLASS_ID_QUERYRESULT_DEPENDENCY = "queryresultDependency"
MO_CLASS_ID_QUERYRESULT_ITEM = "queryresultItem"
MO_CLASS_ID_QUERYRESULT_USAGE = "queryresultUsage"
MO_CLASS_ID_RULE_DEFINITION = "ruleDefinition"
MO_CLASS_ID_RULE_ITEM = "ruleItem"
MO_CLASS_ID_RULE_REQUIREMENT = "ruleRequirement"
MO_CLASS_ID_RULE_SIZE_REQUIREMENT = "ruleSizeRequirement"
MO_CLASS_ID_SECURITY_UNIT = "securityUnit"
MO_CLASS_ID_SOL_CONFIG = "solConfig"
MO_CLASS_ID_SOL_DEF = "solDef"
MO_CLASS_ID_SOL_IF = "solIf"
MO_CLASS_ID_SOL_POLICY = "solPolicy"
MO_CLASS_ID_STATS_COLLECTION_POLICY = "statsCollectionPolicy"
MO_CLASS_ID_STATS_COLLECTION_POLICY_FSM = "statsCollectionPolicyFsm"
MO_CLASS_ID_STATS_COLLECTION_POLICY_FSM_STAGE = "statsCollectionPolicyFsmStage"
MO_CLASS_ID_STATS_COLLECTION_POLICY_FSM_TASK = "statsCollectionPolicyFsmTask"
MO_CLASS_ID_STATS_CURR = "statsCurr"
MO_CLASS_ID_STATS_HIST = "statsHist"
MO_CLASS_ID_STATS_HOLDER = "statsHolder"
MO_CLASS_ID_STATS_ITEM = "statsItem"
MO_CLASS_ID_STATS_THR32_DEFINITION = "statsThr32Definition"
MO_CLASS_ID_STATS_THR32_VALUE = "statsThr32Value"
MO_CLASS_ID_STATS_THR64_DEFINITION = "statsThr64Definition"
MO_CLASS_ID_STATS_THR64_VALUE = "statsThr64Value"
MO_CLASS_ID_STATS_THR_FLOAT_DEFINITION = "statsThrFloatDefinition"
MO_CLASS_ID_STATS_THR_FLOAT_VALUE = "statsThrFloatValue"
MO_CLASS_ID_STATS_THRESHOLD_CLASS = "statsThresholdClass"
MO_CLASS_ID_STATS_THRESHOLD_DEFINITION = "statsThresholdDefinition"
MO_CLASS_ID_STATS_THRESHOLD_POLICY = "statsThresholdPolicy"
MO_CLASS_ID_STATS_THRESHOLD_VALUE = "statsThresholdValue"
MO_CLASS_ID_STORAGE_ADEF = "storageADef"
MO_CLASS_ID_STORAGE_ALUN = "storageALun"
MO_CLASS_ID_STORAGE_ALUN_REF_BASE = "storageALunRefBase"
MO_CLASS_ID_STORAGE_AUTH_KEY = "storageAuthKey"
MO_CLASS_ID_STORAGE_CONNECTION_DEF = "storageConnectionDef"
MO_CLASS_ID_STORAGE_CONNECTION_POLICY = "storageConnectionPolicy"
MO_CLASS_ID_STORAGE_CONTROLLER = "storageController"
MO_CLASS_ID_STORAGE_CONTROLLER_BASE = "storageControllerBase"
MO_CLASS_ID_STORAGE_DEVICE = "storageDevice"
MO_CLASS_ID_STORAGE_DISK_ENV_STATS = "storageDiskEnvStats"
MO_CLASS_ID_STORAGE_DISK_ENV_STATS_HIST = "storageDiskEnvStatsHist"
MO_CLASS_ID_STORAGE_DOMAIN_EP = "storageDomainEp"
MO_CLASS_ID_STORAGE_DRIVE = "storageDrive"
MO_CLASS_ID_STORAGE_ENCLOSURE = "storageEnclosure"
MO_CLASS_ID_STORAGE_EP = "storageEp"
MO_CLASS_ID_STORAGE_EP_USER = "storageEpUser"
MO_CLASS_ID_STORAGE_ETHER_IF = "storageEtherIf"
MO_CLASS_ID_STORAGE_FC_IF = "storageFcIf"
MO_CLASS_ID_STORAGE_FC_TARGET_EP = "storageFcTargetEp"
MO_CLASS_ID_STORAGE_FC_TARGET_IF = "storageFcTargetIf"
MO_CLASS_ID_STORAGE_FLEX_FLASH_CARD = "storageFlexFlashCard"
MO_CLASS_ID_STORAGE_FLEX_FLASH_CONTROLLER = "storageFlexFlashController"
MO_CLASS_ID_STORAGE_FLEX_FLASH_CONTROLLER_FSM = "storageFlexFlashControllerFsm"
MO_CLASS_ID_STORAGE_FLEX_FLASH_CONTROLLER_FSM_STAGE = "storageFlexFlashControllerFsmStage"
MO_CLASS_ID_STORAGE_FLEX_FLASH_CONTROLLER_FSM_TASK = "storageFlexFlashControllerFsmTask"
MO_CLASS_ID_STORAGE_FLEX_FLASH_DRIVE = "storageFlexFlashDrive"
MO_CLASS_ID_STORAGE_FLEX_FLASH_VIRTUAL_DRIVE = "storageFlexFlashVirtualDrive"
MO_CLASS_ID_STORAGE_ISCSI_TARGET_IF = "storageIScsiTargetIf"
MO_CLASS_ID_STORAGE_INI_GROUP = "storageIniGroup"
MO_CLASS_ID_STORAGE_INITIATOR = "storageInitiator"
MO_CLASS_ID_STORAGE_ITEM = "storageItem"
MO_CLASS_ID_STORAGE_L2_IF = "storageL2If"
MO_CLASS_ID_STORAGE_LOCAL_DISK = "storageLocalDisk"
MO_CLASS_ID_STORAGE_LOCAL_DISK_BASE = "storageLocalDiskBase"
MO_CLASS_ID_STORAGE_LOCAL_DISK_CONFIG = "storageLocalDiskConfig"
MO_CLASS_ID_STORAGE_LOCAL_DISK_CONFIG_DEF = "storageLocalDiskConfigDef"
MO_CLASS_ID_STORAGE_LOCAL_DISK_CONFIG_POLICY = "storageLocalDiskConfigPolicy"
MO_CLASS_ID_STORAGE_LOCAL_DISK_PARTITION = "storageLocalDiskPartition"
MO_CLASS_ID_STORAGE_LOCAL_DISK_SLOT_EP = "storageLocalDiskSlotEp"
MO_CLASS_ID_STORAGE_LOCAL_LUN = "storageLocalLun"
MO_CLASS_ID_STORAGE_LOGICAL = "storageLogical"
MO_CLASS_ID_STORAGE_LUN_DISK = "storageLunDisk"
MO_CLASS_ID_STORAGE_LUN_RESOURCE_SELECTION_LOG = "storageLunResourceSelectionLog"
MO_CLASS_ID_STORAGE_MEZZ_FLASH_LIFE = "storageMezzFlashLife"
MO_CLASS_ID_STORAGE_NODE_EP = "storageNodeEp"
MO_CLASS_ID_STORAGE_OPERATION = "storageOperation"
MO_CLASS_ID_STORAGE_PHYSICAL = "storagePhysical"
MO_CLASS_ID_STORAGE_QUAL = "storageQual"
MO_CLASS_ID_STORAGE_RAID_BATTERY = "storageRaidBattery"
MO_CLASS_ID_STORAGE_SAS_EXPANDER = "storageSasExpander"
MO_CLASS_ID_STORAGE_SCSI_LUN_REF = "storageScsiLunRef"
MO_CLASS_ID_STORAGE_SYSTEM = "storageSystem"
MO_CLASS_ID_STORAGE_SYSTEM_FSM = "storageSystemFsm"
MO_CLASS_ID_STORAGE_SYSTEM_FSM_STAGE = "storageSystemFsmStage"
MO_CLASS_ID_STORAGE_SYSTEM_FSM_TASK = "storageSystemFsmTask"
MO_CLASS_ID_STORAGE_TARGET = "storageTarget"
MO_CLASS_ID_STORAGE_TARGET_IF = "storageTargetIf"
MO_CLASS_ID_STORAGE_TRANSPORT_IF = "storageTransportIf"
MO_CLASS_ID_STORAGE_TRANSPORTABLE_FLASH_MODULE = "storageTransportableFlashModule"
MO_CLASS_ID_STORAGE_UNIT = "storageUnit"
MO_CLASS_ID_STORAGE_VDMEMBER_EP = "storageVDMemberEp"
MO_CLASS_ID_STORAGE_VIRTUAL_DRIVE = "storageVirtualDrive"
MO_CLASS_ID_STORAGE_VIRTUAL_DRIVE_REF = "storageVirtualDriveRef"
MO_CLASS_ID_STORAGE_VSAN_REF = "storageVsanRef"
MO_CLASS_ID_SW_ACCESS_DOMAIN = "swAccessDomain"
MO_CLASS_ID_SW_ACCESS_DOMAIN_FSM = "swAccessDomainFsm"
MO_CLASS_ID_SW_ACCESS_DOMAIN_FSM_STAGE = "swAccessDomainFsmStage"
MO_CLASS_ID_SW_ACCESS_DOMAIN_FSM_TASK = "swAccessDomainFsmTask"
MO_CLASS_ID_SW_ACCESS_EP = "swAccessEp"
MO_CLASS_ID_SW_BORDER_DOMAIN = "swBorderDomain"
MO_CLASS_ID_SW_BORDER_EP = "swBorderEp"
MO_CLASS_ID_SW_BORDER_PC = "swBorderPc"
MO_CLASS_ID_SW_CIO_EP = "swCIoEp"
MO_CLASS_ID_SW_CARD_ENV_STATS = "swCardEnvStats"
MO_CLASS_ID_SW_CARD_ENV_STATS_HIST = "swCardEnvStatsHist"
MO_CLASS_ID_SW_CMCLAN = "swCmclan"
MO_CLASS_ID_SW_DOMAIN = "swDomain"
MO_CLASS_ID_SW_ENV_STATS = "swEnvStats"
MO_CLASS_ID_SW_ENV_STATS_HIST = "swEnvStatsHist"
MO_CLASS_ID_SW_ESTC_EP = "swEstcEp"
MO_CLASS_ID_SW_ETH_ESTC_EP = "swEthEstcEp"
MO_CLASS_ID_SW_ETH_ESTC_PC = "swEthEstcPc"
MO_CLASS_ID_SW_ETH_FLOW_MON_SESSION = "swEthFlowMonSession"
MO_CLASS_ID_SW_ETH_LAN_BORDER = "swEthLanBorder"
MO_CLASS_ID_SW_ETH_LAN_BORDER_FSM = "swEthLanBorderFsm"
MO_CLASS_ID_SW_ETH_LAN_BORDER_FSM_STAGE = "swEthLanBorderFsmStage"
MO_CLASS_ID_SW_ETH_LAN_BORDER_FSM_TASK = "swEthLanBorderFsmTask"
MO_CLASS_ID_SW_ETH_LAN_EP = "swEthLanEp"
MO_CLASS_ID_SW_ETH_LAN_FLOW_MON = "swEthLanFlowMon"
MO_CLASS_ID_SW_ETH_LAN_FLOW_MON_EXPORTER = "swEthLanFlowMonExporter"
MO_CLASS_ID_SW_ETH_LAN_FLOW_MON_FSM = "swEthLanFlowMonFsm"
MO_CLASS_ID_SW_ETH_LAN_FLOW_MON_FSM_STAGE = "swEthLanFlowMonFsmStage"
MO_CLASS_ID_SW_ETH_LAN_FLOW_MON_FSM_TASK = "swEthLanFlowMonFsmTask"
MO_CLASS_ID_SW_ETH_LAN_FLOW_MONITOR = "swEthLanFlowMonitor"
MO_CLASS_ID_SW_ETH_LAN_FLOW_RECORD_DEF = "swEthLanFlowRecordDef"
MO_CLASS_ID_SW_ETH_LAN_MON = "swEthLanMon"
MO_CLASS_ID_SW_ETH_LAN_PC = "swEthLanPc"
MO_CLASS_ID_SW_ETH_MON = "swEthMon"
MO_CLASS_ID_SW_ETH_MON_DEST_EP = "swEthMonDestEp"
MO_CLASS_ID_SW_ETH_MON_FSM = "swEthMonFsm"
MO_CLASS_ID_SW_ETH_MON_FSM_STAGE = "swEthMonFsmStage"
MO_CLASS_ID_SW_ETH_MON_FSM_TASK = "swEthMonFsmTask"
MO_CLASS_ID_SW_ETH_MON_SRC_EP = "swEthMonSrcEp"
MO_CLASS_ID_SW_ETH_TARGET_EP = "swEthTargetEp"
MO_CLASS_ID_SW_EXT_UTILITY = "swExtUtility"
MO_CLASS_ID_SW_EXT_UTILITY_FSM = "swExtUtilityFsm"
MO_CLASS_ID_SW_EXT_UTILITY_FSM_STAGE = "swExtUtilityFsmStage"
MO_CLASS_ID_SW_EXT_UTILITY_FSM_TASK = "swExtUtilityFsmTask"
MO_CLASS_ID_SW_FABRIC_ZONE_NS = "swFabricZoneNs"
MO_CLASS_ID_SW_FABRIC_ZONE_NS_OVERRIDE = "swFabricZoneNsOverride"
MO_CLASS_ID_SW_FC_ESTC_EP = "swFcEstcEp"
MO_CLASS_ID_SW_FC_MON = "swFcMon"
MO_CLASS_ID_SW_FC_MON_DEST_EP = "swFcMonDestEp"
MO_CLASS_ID_SW_FC_MON_FSM = "swFcMonFsm"
MO_CLASS_ID_SW_FC_MON_FSM_STAGE = "swFcMonFsmStage"
MO_CLASS_ID_SW_FC_MON_FSM_TASK = "swFcMonFsmTask"
MO_CLASS_ID_SW_FC_MON_SRC_EP = "swFcMonSrcEp"
MO_CLASS_ID_SW_FC_SAN_BORDER = "swFcSanBorder"
MO_CLASS_ID_SW_FC_SAN_BORDER_FSM = "swFcSanBorderFsm"
MO_CLASS_ID_SW_FC_SAN_BORDER_FSM_STAGE = "swFcSanBorderFsmStage"
MO_CLASS_ID_SW_FC_SAN_BORDER_FSM_TASK = "swFcSanBorderFsmTask"
MO_CLASS_ID_SW_FC_SAN_EP = "swFcSanEp"
MO_CLASS_ID_SW_FC_SAN_MON = "swFcSanMon"
MO_CLASS_ID_SW_FC_SAN_PC = "swFcSanPc"
MO_CLASS_ID_SW_FC_SERVER_ZONE_GROUP = "swFcServerZoneGroup"
MO_CLASS_ID_SW_FC_ZONE = "swFcZone"
MO_CLASS_ID_SW_FC_ZONE_MEMBER = "swFcZoneMember"
MO_CLASS_ID_SW_FC_ZONE_SET = "swFcZoneSet"
MO_CLASS_ID_SW_FCOE_ESTC_EP = "swFcoeEstcEp"
MO_CLASS_ID_SW_FCOE_SAN_EP = "swFcoeSanEp"
MO_CLASS_ID_SW_FCOE_SAN_PC = "swFcoeSanPc"
MO_CLASS_ID_SW_FLOW_MON_EXPORTER = "swFlowMonExporter"
MO_CLASS_ID_SW_FLOW_MONITOR = "swFlowMonitor"
MO_CLASS_ID_SW_FLOW_RECORD_DEF = "swFlowRecordDef"
MO_CLASS_ID_SW_IP_ROUTE = "swIpRoute"
MO_CLASS_ID_SW_LAN_BORDER = "swLanBorder"
MO_CLASS_ID_SW_LAN_EP = "swLanEp"
MO_CLASS_ID_SW_LAN_MON = "swLanMon"
MO_CLASS_ID_SW_LAN_PC = "swLanPc"
MO_CLASS_ID_SW_MON = "swMon"
MO_CLASS_ID_SW_MON_DEST_EP = "swMonDestEp"
MO_CLASS_ID_SW_MON_DOMAIN = "swMonDomain"
MO_CLASS_ID_SW_MON_SRC_EP = "swMonSrcEp"
MO_CLASS_ID_SW_NFEXPORTER_REF = "swNFExporterRef"
MO_CLASS_ID_SW_NETFLOW_EXPORTER = "swNetflowExporter"
MO_CLASS_ID_SW_NETFLOW_MON_SESSION = "swNetflowMonSession"
MO_CLASS_ID_SW_NETFLOW_MONITOR = "swNetflowMonitor"
MO_CLASS_ID_SW_NETFLOW_MONITOR_REF = "swNetflowMonitorRef"
MO_CLASS_ID_SW_NETFLOW_RECORD_DEF = "swNetflowRecordDef"
MO_CLASS_ID_SW_PIO_EP = "swPIoEp"
MO_CLASS_ID_SW_PHYS = "swPhys"
MO_CLASS_ID_SW_PHYS_ETHER_EP = "swPhysEtherEp"
MO_CLASS_ID_SW_PHYS_FC_EP = "swPhysFcEp"
MO_CLASS_ID_SW_PHYS_FSM = "swPhysFsm"
MO_CLASS_ID_SW_PHYS_FSM_STAGE = "swPhysFsmStage"
MO_CLASS_ID_SW_PHYS_FSM_TASK = "swPhysFsmTask"
MO_CLASS_ID_SW_PORT_BREAKOUT = "swPortBreakout"
MO_CLASS_ID_SW_SAN_BORDER = "swSanBorder"
MO_CLASS_ID_SW_SAN_EP = "swSanEp"
MO_CLASS_ID_SW_SAN_MON = "swSanMon"
MO_CLASS_ID_SW_SAN_PC = "swSanPc"
MO_CLASS_ID_SW_SUB_GROUP = "swSubGroup"
MO_CLASS_ID_SW_SYSTEM_STATS = "swSystemStats"
MO_CLASS_ID_SW_SYSTEM_STATS_HIST = "swSystemStatsHist"
MO_CLASS_ID_SW_TARGET_EP = "swTargetEp"
MO_CLASS_ID_SW_ULAN = "swUlan"
MO_CLASS_ID_SW_UTILITY_DOMAIN = "swUtilityDomain"
MO_CLASS_ID_SW_UTILITY_DOMAIN_FSM = "swUtilityDomainFsm"
MO_CLASS_ID_SW_UTILITY_DOMAIN_FSM_STAGE = "swUtilityDomainFsmStage"
MO_CLASS_ID_SW_UTILITY_DOMAIN_FSM_TASK = "swUtilityDomainFsmTask"
MO_CLASS_ID_SW_VIFREF = "swVIFRef"
MO_CLASS_ID_SW_VIRT_L3_INTF = "swVirtL3Intf"
MO_CLASS_ID_SW_VLAN = "swVlan"
MO_CLASS_ID_SW_VLAN_GROUP = "swVlanGroup"
MO_CLASS_ID_SW_VLAN_PORT_NS = "swVlanPortNs"
MO_CLASS_ID_SW_VLAN_PORT_NS_OVERRIDE = "swVlanPortNsOverride"
MO_CLASS_ID_SW_VLAN_REF = "swVlanRef"
MO_CLASS_ID_SW_VSAN = "swVsan"
MO_CLASS_ID_SW_ZONE_INITIATOR_MEMBER = "swZoneInitiatorMember"
MO_CLASS_ID_SW_ZONE_TARGET_MEMBER = "swZoneTargetMember"
MO_CLASS_ID_SWAT_ACTION = "swatAction"
MO_CLASS_ID_SWAT_CONDITION = "swatCondition"
MO_CLASS_ID_SWAT_INJECTION = "swatInjection"
MO_CLASS_ID_SWAT_RESULTSTATS = "swatResultstats"
MO_CLASS_ID_SWAT_TARGET = "swatTarget"
MO_CLASS_ID_SWAT_TRIGGER = "swatTrigger"
MO_CLASS_ID_SYNTHETIC_DIRECTORY = "syntheticDirectory"
MO_CLASS_ID_SYNTHETIC_FILE = "syntheticFile"
MO_CLASS_ID_SYNTHETIC_FILE_SYSTEM = "syntheticFileSystem"
MO_CLASS_ID_SYNTHETIC_FS_OBJ = "syntheticFsObj"
MO_CLASS_ID_SYNTHETIC_FS_OBJ_FSM = "syntheticFsObjFsm"
MO_CLASS_ID_SYNTHETIC_FS_OBJ_FSM_STAGE = "syntheticFsObjFsmStage"
MO_CLASS_ID_SYNTHETIC_FS_OBJ_FSM_TASK = "syntheticFsObjFsmTask"
MO_CLASS_ID_SYNTHETIC_TIME = "syntheticTime"
MO_CLASS_ID_SYSDEBUG_AUTO_CORE_FILE_EXPORT_TARGET = "sysdebugAutoCoreFileExportTarget"
MO_CLASS_ID_SYSDEBUG_AUTO_CORE_FILE_EXPORT_TARGET_FSM = "sysdebugAutoCoreFileExportTargetFsm"
MO_CLASS_ID_SYSDEBUG_AUTO_CORE_FILE_EXPORT_TARGET_FSM_STAGE = "sysdebugAutoCoreFileExportTargetFsmStage"
MO_CLASS_ID_SYSDEBUG_AUTO_CORE_FILE_EXPORT_TARGET_FSM_TASK = "sysdebugAutoCoreFileExportTargetFsmTask"
MO_CLASS_ID_SYSDEBUG_BACKUP_BEHAVIOR = "sysdebugBackupBehavior"
MO_CLASS_ID_SYSDEBUG_CORE = "sysdebugCore"
MO_CLASS_ID_SYSDEBUG_CORE_FILE_EXPORT_TARGET = "sysdebugCoreFileExportTarget"
MO_CLASS_ID_SYSDEBUG_CORE_FILE_REPOSITORY = "sysdebugCoreFileRepository"
MO_CLASS_ID_SYSDEBUG_CORE_FSM = "sysdebugCoreFsm"
MO_CLASS_ID_SYSDEBUG_CORE_FSM_STAGE = "sysdebugCoreFsmStage"
MO_CLASS_ID_SYSDEBUG_CORE_FSM_TASK = "sysdebugCoreFsmTask"
MO_CLASS_ID_SYSDEBUG_EP = "sysdebugEp"
MO_CLASS_ID_SYSDEBUG_EXPORTER = "sysdebugExporter"
MO_CLASS_ID_SYSDEBUG_FILE = "sysdebugFile"
MO_CLASS_ID_SYSDEBUG_LOG_BEHAVIOR = "sysdebugLogBehavior"
MO_CLASS_ID_SYSDEBUG_LOG_CONTROL_DESTINATION_FILE = "sysdebugLogControlDestinationFile"
MO_CLASS_ID_SYSDEBUG_LOG_CONTROL_DESTINATION_SYSLOG = "sysdebugLogControlDestinationSyslog"
MO_CLASS_ID_SYSDEBUG_LOG_CONTROL_DOMAIN = "sysdebugLogControlDomain"
MO_CLASS_ID_SYSDEBUG_LOG_CONTROL_EP = "sysdebugLogControlEp"
MO_CLASS_ID_SYSDEBUG_LOG_CONTROL_EP_FSM = "sysdebugLogControlEpFsm"
MO_CLASS_ID_SYSDEBUG_LOG_CONTROL_EP_FSM_STAGE = "sysdebugLogControlEpFsmStage"
MO_CLASS_ID_SYSDEBUG_LOG_CONTROL_EP_FSM_TASK = "sysdebugLogControlEpFsmTask"
MO_CLASS_ID_SYSDEBUG_LOG_CONTROL_MODULE = "sysdebugLogControlModule"
MO_CLASS_ID_SYSDEBUG_LOG_EXPORT_POLICY = "sysdebugLogExportPolicy"
MO_CLASS_ID_SYSDEBUG_LOG_EXPORT_POLICY_FSM = "sysdebugLogExportPolicyFsm"
MO_CLASS_ID_SYSDEBUG_LOG_EXPORT_POLICY_FSM_STAGE = "sysdebugLogExportPolicyFsmStage"
MO_CLASS_ID_SYSDEBUG_LOG_EXPORT_POLICY_FSM_TASK = "sysdebugLogExportPolicyFsmTask"
MO_CLASS_ID_SYSDEBUG_LOG_EXPORT_STATUS = "sysdebugLogExportStatus"
MO_CLASS_ID_SYSDEBUG_MEP_LOG = "sysdebugMEpLog"
MO_CLASS_ID_SYSDEBUG_MEP_LOG_DEF = "sysdebugMEpLogDef"
MO_CLASS_ID_SYSDEBUG_MEP_LOG_POLICY = "sysdebugMEpLogPolicy"
MO_CLASS_ID_SYSDEBUG_MANUAL_CORE_FILE_EXPORT_TARGET = "sysdebugManualCoreFileExportTarget"
MO_CLASS_ID_SYSDEBUG_MANUAL_CORE_FILE_EXPORT_TARGET_FSM = "sysdebugManualCoreFileExportTargetFsm"
MO_CLASS_ID_SYSDEBUG_MANUAL_CORE_FILE_EXPORT_TARGET_FSM_STAGE = "sysdebugManualCoreFileExportTargetFsmStage"
MO_CLASS_ID_SYSDEBUG_MANUAL_CORE_FILE_EXPORT_TARGET_FSM_TASK = "sysdebugManualCoreFileExportTargetFsmTask"
MO_CLASS_ID_SYSDEBUG_REPOSITORY = "sysdebugRepository"
MO_CLASS_ID_SYSDEBUG_TECH_SUP_FILE_REPOSITORY = "sysdebugTechSupFileRepository"
MO_CLASS_ID_SYSDEBUG_TECH_SUPPORT = "sysdebugTechSupport"
MO_CLASS_ID_SYSDEBUG_TECH_SUPPORT_CMD_OPT = "sysdebugTechSupportCmdOpt"
MO_CLASS_ID_SYSDEBUG_TECH_SUPPORT_FSM = "sysdebugTechSupportFsm"
MO_CLASS_ID_SYSDEBUG_TECH_SUPPORT_FSM_STAGE = "sysdebugTechSupportFsmStage"
MO_CLASS_ID_SYSDEBUG_TECH_SUPPORT_FSM_TASK = "sysdebugTechSupportFsmTask"
MO_CLASS_ID_SYSFILE_DIGEST = "sysfileDigest"
MO_CLASS_ID_SYSFILE_EP = "sysfileEp"
MO_CLASS_ID_SYSFILE_EXPORTER = "sysfileExporter"
MO_CLASS_ID_SYSFILE_IMPORTER = "sysfileImporter"
MO_CLASS_ID_SYSFILE_INSTANCE = "sysfileInstance"
MO_CLASS_ID_SYSFILE_MUTATION = "sysfileMutation"
MO_CLASS_ID_SYSFILE_MUTATION_FSM = "sysfileMutationFsm"
MO_CLASS_ID_SYSFILE_MUTATION_FSM_STAGE = "sysfileMutationFsmStage"
MO_CLASS_ID_SYSFILE_MUTATION_FSM_TASK = "sysfileMutationFsmTask"
MO_CLASS_ID_SYSFILE_REPOSITORY = "sysfileRepository"
MO_CLASS_ID_TOP_INFO_POLICY = "topInfoPolicy"
MO_CLASS_ID_TOP_INFO_SYNC_POLICY = "topInfoSyncPolicy"
MO_CLASS_ID_TOP_META_INF = "topMetaInf"
MO_CLASS_ID_TOP_ROOT = "topRoot"
MO_CLASS_ID_TOP_SYS_DEFAULTS = "topSysDefaults"
MO_CLASS_ID_TOP_SYSTEM = "topSystem"
MO_CLASS_ID_TRIG_ABS_WINDOW = "trigAbsWindow"
MO_CLASS_ID_TRIG_ACK = "trigAck"
MO_CLASS_ID_TRIG_BASE_ABS_WINDOW = "trigBaseAbsWindow"
MO_CLASS_ID_TRIG_BASE_RECURR_WINDOW = "trigBaseRecurrWindow"
MO_CLASS_ID_TRIG_BASE_SCHED = "trigBaseSched"
MO_CLASS_ID_TRIG_CLIENT_TOKEN = "trigClientToken"
MO_CLASS_ID_TRIG_CONF_ACK = "trigConfAck"
MO_CLASS_ID_TRIG_INST = "trigInst"
MO_CLASS_ID_TRIG_LOCAL_ABS_WINDOW = "trigLocalAbsWindow"
MO_CLASS_ID_TRIG_LOCAL_SCHED = "trigLocalSched"
MO_CLASS_ID_TRIG_META = "trigMeta"
MO_CLASS_ID_TRIG_RECURR_WINDOW = "trigRecurrWindow"
MO_CLASS_ID_TRIG_RES_ACK = "trigResAck"
MO_CLASS_ID_TRIG_SCHED = "trigSched"
MO_CLASS_ID_TRIG_SCHED_WINDOW = "trigSchedWindow"
MO_CLASS_ID_TRIG_TEST = "trigTest"
MO_CLASS_ID_TRIG_TOKEN = "trigToken"
MO_CLASS_ID_TRIG_TRIGGERABLE = "trigTriggerable"
MO_CLASS_ID_TRIG_TRIGGERED = "trigTriggered"
MO_CLASS_ID_TRIG_WINDOW = "trigWindow"
MO_CLASS_ID_UNSPECIFIED = "unspecified"
MO_CLASS_ID_UUIDPOOL_ADDR = "uuidpoolAddr"
MO_CLASS_ID_UUIDPOOL_BLOCK = "uuidpoolBlock"
MO_CLASS_ID_UUIDPOOL_FORMAT = "uuidpoolFormat"
MO_CLASS_ID_UUIDPOOL_POOL = "uuidpoolPool"
MO_CLASS_ID_UUIDPOOL_POOLABLE = "uuidpoolPoolable"
MO_CLASS_ID_UUIDPOOL_POOLED = "uuidpoolPooled"
MO_CLASS_ID_UUIDPOOL_UNIVERSE = "uuidpoolUniverse"
MO_CLASS_ID_VERSION_APPLICATION = "versionApplication"
MO_CLASS_ID_VERSION_EP = "versionEp"
MO_CLASS_ID_VERSION_VERSION = "versionVersion"
MO_CLASS_ID_VM_ADAPTOR = "vmAdaptor"
MO_CLASS_ID_VM_CLIENT_CONTAINER = "vmClientContainer"
MO_CLASS_ID_VM_COMPUTE_EP = "vmComputeEp"
MO_CLASS_ID_VM_CONT = "vmCont"
MO_CLASS_ID_VM_DC = "vmDC"
MO_CLASS_ID_VM_DCORG = "vmDCOrg"
MO_CLASS_ID_VM_DIR_CONT = "vmDirCont"
MO_CLASS_ID_VM_EP = "vmEp"
MO_CLASS_ID_VM_HBA = "vmHba"
MO_CLASS_ID_VM_HV = "vmHv"
MO_CLASS_ID_VM_INSTANCE = "vmInstance"
MO_CLASS_ID_VM_LIFE_CYCLE_POLICY = "vmLifeCyclePolicy"
MO_CLASS_ID_VM_LIFE_CYCLE_POLICY_FSM = "vmLifeCyclePolicyFsm"
MO_CLASS_ID_VM_LIFE_CYCLE_POLICY_FSM_STAGE = "vmLifeCyclePolicyFsmStage"
MO_CLASS_ID_VM_LIFE_CYCLE_POLICY_FSM_TASK = "vmLifeCyclePolicyFsmTask"
MO_CLASS_ID_VM_NIC = "vmNic"
MO_CLASS_ID_VM_ORG = "vmOrg"
MO_CLASS_ID_VM_SWITCH = "vmSwitch"
MO_CLASS_ID_VM_VIF = "vmVif"
MO_CLASS_ID_VM_VIRTUAL = "vmVirtual"
MO_CLASS_ID_VM_VLAN = "vmVlan"
MO_CLASS_ID_VM_VNIC_PROF_CL = "vmVnicProfCl"
MO_CLASS_ID_VM_VNIC_PROF_INST = "vmVnicProfInst"
MO_CLASS_ID_VM_VSAN = "vmVsan"
MO_CLASS_ID_VNIC_ABEH = "vnicABeh"
MO_CLASS_ID_VNIC_AETHER_IF = "vnicAEtherIf"
MO_CLASS_ID_VNIC_AFC_IF = "vnicAFcIf"
MO_CLASS_ID_VNIC_AGROUP = "vnicAGroup"
MO_CLASS_ID_VNIC_AIPC_IF = "vnicAIpcIf"
MO_CLASS_ID_VNIC_ASCSI_IF = "vnicAScsiIf"
MO_CLASS_ID_VNIC_BOOT_IP_POLICY = "vnicBootIpPolicy"
MO_CLASS_ID_VNIC_BOOT_TARGET = "vnicBootTarget"
MO_CLASS_ID_VNIC_CON_POLICY_REF = "vnicConPolicyRef"
MO_CLASS_ID_VNIC_CON_REQ = "vnicConReq"
MO_CLASS_ID_VNIC_CONN_DEF = "vnicConnDef"
MO_CLASS_ID_VNIC_CONNECTION = "vnicConnection"
MO_CLASS_ID_VNIC_DEF_BEH = "vnicDefBeh"
MO_CLASS_ID_VNIC_DYNAMIC_CON = "vnicDynamicCon"
MO_CLASS_ID_VNIC_DYNAMIC_CON_POLICY = "vnicDynamicConPolicy"
MO_CLASS_ID_VNIC_DYNAMIC_CON_POLICY_REF = "vnicDynamicConPolicyRef"
MO_CLASS_ID_VNIC_DYNAMIC_CON_REQ = "vnicDynamicConReq"
MO_CLASS_ID_VNIC_DYNAMIC_ID_UNIVERSE = "vnicDynamicIdUniverse"
MO_CLASS_ID_VNIC_DYNAMIC_PROVIDER = "vnicDynamicProvider"
MO_CLASS_ID_VNIC_DYNAMIC_PROVIDER_EP = "vnicDynamicProviderEp"
MO_CLASS_ID_VNIC_ETH_CONFIG = "vnicEthConfig"
MO_CLASS_ID_VNIC_ETH_LIF = "vnicEthLif"
MO_CLASS_ID_VNIC_ETHER = "vnicEther"
MO_CLASS_ID_VNIC_ETHER_BASE = "vnicEtherBase"
MO_CLASS_ID_VNIC_ETHER_BASE_IF = "vnicEtherBaseIf"
MO_CLASS_ID_VNIC_ETHER_IF = "vnicEtherIf"
MO_CLASS_ID_VNIC_FC = "vnicFc"
MO_CLASS_ID_VNIC_FC_BASE = "vnicFcBase"
MO_CLASS_ID_VNIC_FC_GROUP_DEF = "vnicFcGroupDef"
MO_CLASS_ID_VNIC_FC_GROUP_TEMPL = "vnicFcGroupTempl"
MO_CLASS_ID_VNIC_FC_IF = "vnicFcIf"
MO_CLASS_ID_VNIC_FC_LIF = "vnicFcLif"
MO_CLASS_ID_VNIC_FC_NODE = "vnicFcNode"
MO_CLASS_ID_VNIC_FC_OEIF = "vnicFcOEIf"
MO_CLASS_ID_VNIC_IPIF = "vnicIPIf"
MO_CLASS_ID_VNIC_IPV4_DHCP = "vnicIPv4Dhcp"
MO_CLASS_ID_VNIC_IPV4_DNS = "vnicIPv4Dns"
MO_CLASS_ID_VNIC_IPV4_IF = "vnicIPv4If"
MO_CLASS_ID_VNIC_IPV4_ISCSI_ADDR = "vnicIPv4IscsiAddr"
MO_CLASS_ID_VNIC_IPV4_POOLED_ISCSI_ADDR = "vnicIPv4PooledIscsiAddr"
MO_CLASS_ID_VNIC_IPV4_STATIC_ROUTE = "vnicIPv4StaticRoute"
MO_CLASS_ID_VNIC_IPV6_IF = "vnicIPv6If"
MO_CLASS_ID_VNIC_ISCSI = "vnicIScsi"
MO_CLASS_ID_VNIC_ISCSI_AUTO_TARGET_IF = "vnicIScsiAutoTargetIf"
MO_CLASS_ID_VNIC_ISCSI_BASE = "vnicIScsiBase"
MO_CLASS_ID_VNIC_ISCSI_BOOT_PARAMS = "vnicIScsiBootParams"
MO_CLASS_ID_VNIC_ISCSI_BOOT_VNIC = "vnicIScsiBootVnic"
MO_CLASS_ID_VNIC_ISCSI_CONFIG = "vnicIScsiConfig"
MO_CLASS_ID_VNIC_ISCSI_INIT_AUTO_CONFIG_POLICY = "vnicIScsiInitAutoConfigPolicy"
MO_CLASS_ID_VNIC_ISCSI_LCP = "vnicIScsiLCP"
MO_CLASS_ID_VNIC_ISCSI_NODE = "vnicIScsiNode"
MO_CLASS_ID_VNIC_ISCSI_STATIC_TARGET_IF = "vnicIScsiStaticTargetIf"
MO_CLASS_ID_VNIC_ISCSI_TARGET_IF = "vnicIScsiTargetIf"
MO_CLASS_ID_VNIC_IF = "vnicIf"
MO_CLASS_ID_VNIC_IF_ROLE = "vnicIfRole"
MO_CLASS_ID_VNIC_INITIATOR_AUTO_CONFIG_POLICY = "vnicInitiatorAutoConfigPolicy"
MO_CLASS_ID_VNIC_INTERNAL_PROFILE = "vnicInternalProfile"
MO_CLASS_ID_VNIC_IP_ADDR = "vnicIpAddr"
MO_CLASS_ID_VNIC_IP_V4_ADDR = "vnicIpV4Addr"
MO_CLASS_ID_VNIC_IP_V4_ADDR_CONF = "vnicIpV4AddrConf"
MO_CLASS_ID_VNIC_IP_V4_ADDR_EXPL_CONF = "vnicIpV4AddrExplConf"
MO_CLASS_ID_VNIC_IP_V4_HISTORY = "vnicIpV4History"
MO_CLASS_ID_VNIC_IP_V4_MGMT_POOLED_ADDR = "vnicIpV4MgmtPooledAddr"
MO_CLASS_ID_VNIC_IP_V4_POOLED_ADDR = "vnicIpV4PooledAddr"
MO_CLASS_ID_VNIC_IP_V4_PROF_DERIVED_ADDR = "vnicIpV4ProfDerivedAddr"
MO_CLASS_ID_VNIC_IP_V4_STATIC_ADDR = "vnicIpV4StaticAddr"
MO_CLASS_ID_VNIC_IP_V6_ADDR = "vnicIpV6Addr"
MO_CLASS_ID_VNIC_IP_V6_ADDR_CONF = "vnicIpV6AddrConf"
MO_CLASS_ID_VNIC_IP_V6_ADDR_EXPL_CONF = "vnicIpV6AddrExplConf"
MO_CLASS_ID_VNIC_IP_V6_HISTORY = "vnicIpV6History"
MO_CLASS_ID_VNIC_IP_V6_MGMT_POOLED_ADDR = "vnicIpV6MgmtPooledAddr"
MO_CLASS_ID_VNIC_IP_V6_STATIC_ADDR = "vnicIpV6StaticAddr"
MO_CLASS_ID_VNIC_IPC = "vnicIpc"
MO_CLASS_ID_VNIC_IPC_IF = "vnicIpcIf"
MO_CLASS_ID_VNIC_IQN_HISTORY = "vnicIqnHistory"
MO_CLASS_ID_VNIC_L2_IF = "vnicL2If"
MO_CLASS_ID_VNIC_L2_LIF = "vnicL2Lif"
MO_CLASS_ID_VNIC_L3_IF = "vnicL3If"
MO_CLASS_ID_VNIC_LAN_CONN_POLICY = "vnicLanConnPolicy"
MO_CLASS_ID_VNIC_LAN_CONN_TEMPL = "vnicLanConnTempl"
MO_CLASS_ID_VNIC_LIF_VLAN = "vnicLifVlan"
MO_CLASS_ID_VNIC_LIF_VSAN = "vnicLifVsan"
MO_CLASS_ID_VNIC_LUN = "vnicLun"
MO_CLASS_ID_VNIC_MAC_HISTORY = "vnicMacHistory"
MO_CLASS_ID_VNIC_NIC_CONN = "vnicNicConn"
MO_CLASS_ID_VNIC_OPROFILE_ALIAS = "vnicOProfileAlias"
MO_CLASS_ID_VNIC_PROFILE = "vnicProfile"
MO_CLASS_ID_VNIC_PROFILE_ALIAS = "vnicProfileAlias"
MO_CLASS_ID_VNIC_PROFILE_REF = "vnicProfileRef"
MO_CLASS_ID_VNIC_PROFILE_SET = "vnicProfileSet"
MO_CLASS_ID_VNIC_PROFILE_SET_FSM = "vnicProfileSetFsm"
MO_CLASS_ID_VNIC_PROFILE_SET_FSM_STAGE = "vnicProfileSetFsmStage"
MO_CLASS_ID_VNIC_PROFILE_SET_FSM_TASK = "vnicProfileSetFsmTask"
MO_CLASS_ID_VNIC_RACK_SERVER_DISCOVERY_PROFILE = "vnicRackServerDiscoveryProfile"
MO_CLASS_ID_VNIC_SAN_CONN_POLICY = "vnicSanConnPolicy"
MO_CLASS_ID_VNIC_SAN_CONN_TEMPL = "vnicSanConnTempl"
MO_CLASS_ID_VNIC_SCSI = "vnicScsi"
MO_CLASS_ID_VNIC_SCSI_IF = "vnicScsiIf"
MO_CLASS_ID_VNIC_TEMPL = "vnicTempl"
MO_CLASS_ID_VNIC_USNIC_CON_POLICY = "vnicUsnicConPolicy"
MO_CLASS_ID_VNIC_USNIC_CON_POLICY_REF = "vnicUsnicConPolicyRef"
MO_CLASS_ID_VNIC_USNIC_CON_REQ = "vnicUsnicConReq"
MO_CLASS_ID_VNIC_VPROFILE_ALIAS = "vnicVProfileAlias"
MO_CLASS_ID_VNIC_VHBA_BEH_POLICY = "vnicVhbaBehPolicy"
MO_CLASS_ID_VNIC_VLAN = "vnicVlan"
MO_CLASS_ID_VNIC_VMQ_CON_POLICY = "vnicVmqConPolicy"
MO_CLASS_ID_VNIC_VMQ_CON_POLICY_REF = "vnicVmqConPolicyRef"
MO_CLASS_ID_VNIC_VMQ_CON_REQ = "vnicVmqConReq"
MO_CLASS_ID_VNIC_VNIC = "vnicVnic"
MO_CLASS_ID_VNIC_VNIC_BEH_POLICY = "vnicVnicBehPolicy"
MO_CLASS_ID_VNIC_WWNN_HISTORY = "vnicWwnnHistory"
MO_CLASS_ID_VNIC_WWPN_HISTORY = "vnicWwpnHistory"
OPER_CODE_CREATE = "create"
OPER_CODE_DELETE = "delete"
class DupeScope(ManagedObject):
"""This is DupeScope class."""
consts = DupeScopeConsts()
naming_props = set([u'id'])
mo_meta = MoMeta("DupeScope", "dupeScope", "scope-[id]", VersionMeta.Version302a, "InputOutput", 0xff, [], ["admin"], [], [], [None])
prop_meta = {
"child_action": MoPropertyMeta("child_action", "childAction", "string", VersionMeta.Version302a, MoPropertyMeta.INTERNAL, 0x2, None, None, r"""((deleteAll|ignore|deleteNonPresent),){0,2}(deleteAll|ignore|deleteNonPresent){0,1}""", [], []),
"client_mo_dn": MoPropertyMeta("client_mo_dn", "clientMoDn", "string", VersionMeta.Version302a, MoPropertyMeta.READ_ONLY, None, 0, 256, None, [], []),
"dn": MoPropertyMeta("dn", "dn", "string", VersionMeta.Version302a, MoPropertyMeta.READ_ONLY, 0x4, 0, 256, None, [], []),
"id": MoPropertyMeta("id", "id", "uint", VersionMeta.Version302a, MoPropertyMeta.NAMING, 0x8, None, None, None, [], []),
"is_system": MoPropertyMeta("is_system", "isSystem", "string", VersionMeta.Version302a, MoPropertyMeta.READ_ONLY, None, None, None, None, ["false", "no", "true", "yes"], []),
"mo_class_id": MoPropertyMeta("mo_class_id", "moClassId", "string", VersionMeta.Version302a, MoPropertyMeta.READ_ONLY, None, None, None, None, ["aaaAuthMethod", "aaaAuthRealm", "aaaAuthRealmFsm", "aaaAuthRealmFsmStage", "aaaBanner", "aaaCimcSession", "aaaConfig", "aaaConsoleAuth", "aaaDefaultAuth", "aaaDefinition", "aaaDomain", "aaaDomainAuth", "aaaEp", "aaaEpAuthProfile", "aaaEpFsm", "aaaEpFsmStage", "aaaEpFsmTask", "aaaEpLogin", "aaaEpUser", "aaaExtMgmtCutThruTkn", "aaaItem", "aaaLdapEp", "aaaLdapEpFsm", "aaaLdapEpFsmStage", "aaaLdapGroup", "aaaLdapGroupRule", "aaaLdapProvider", "aaaLocale", "aaaLog", "aaaModLR", "aaaOrg", "aaaPreLoginBanner", "aaaProvider", "aaaProviderGroup", "aaaProviderRef", "aaaPwdProfile", "aaaRadiusEp", "aaaRadiusEpFsm", "aaaRadiusEpFsmStage", "aaaRadiusProvider", "aaaRealm", "aaaRealmFsm", "aaaRealmFsmStage", "aaaRealmFsmTask", "aaaRemoteUser", "aaaRole", "aaaSession", "aaaSessionInfo", "aaaSessionInfoTable", "aaaSessionLR", "aaaShellLogin", "aaaSshAuth", "aaaSystemUser", "aaaTacacsPlusEp", "aaaTacacsPlusEpFsm", "aaaTacacsPlusEpFsmStage", "aaaTacacsPlusProvider", "aaaUser", "aaaUserAction", "aaaUserData", "aaaUserEp", "aaaUserEpFsm", "aaaUserEpFsmStage", "aaaUserEpFsmTask", "aaaUserGroup", "aaaUserLocale", "aaaUserLogin", "aaaUserRole", "aaaWebLogin", "adaptorBehCap", "adaptorCIoEp", "adaptorCapDef", "adaptorCapQual", "adaptorCapSpec", "adaptorDiagCap", "adaptorDynamicConfigCap", "adaptorEthArfsProfile", "adaptorEthCompQueueProfile", "adaptorEthFailoverProfile", "adaptorEthInterruptProfile", "adaptorEthNVGREProfile", "adaptorEthOffloadProfile", "adaptorEthPortBySizeLargeStats", "adaptorEthPortBySizeLargeStatsHist", "adaptorEthPortBySizeSmallStats", "adaptorEthPortBySizeSmallStatsHist", "adaptorEthPortErrStats", "adaptorEthPortErrStatsHist", "adaptorEthPortMcastStats", "adaptorEthPortMcastStatsHist", "adaptorEthPortOutsizedStats", "adaptorEthPortOutsizedStatsHist", "adaptorEthPortStats", "adaptorEthPortStatsHist", "adaptorEthQueueProfile", "adaptorEthRecvQueueProfile", "adaptorEthRoCEProfile", "adaptorEthVxLANProfile", "adaptorEthWorkQueueProfile", "adaptorEtherIfStats", "adaptorEtherIfStatsHist", "adaptorExtEthIf", "adaptorExtEthIfFsm", "adaptorExtEthIfFsmStage", "adaptorExtEthIfFsmTask", "adaptorExtEthIfPc", "adaptorExtEthIfPcEp", "adaptorExtIf", "adaptorExtIfEp", "adaptorExtIfPc", "adaptorExtIpV6RssHashProfile", "adaptorExternalEp", "adaptorExternalPc", "adaptorFamilyTypeDef", "adaptorFcCdbWorkQueueProfile", "adaptorFcErrorRecoveryProfile", "adaptorFcIfEventStats", "adaptorFcIfEventStatsHist", "adaptorFcIfFC4Stats", "adaptorFcIfFC4StatsHist", "adaptorFcIfFrameStats", "adaptorFcIfFrameStatsHist", "adaptorFcInterruptProfile", "adaptorFcLogiProfile", "adaptorFcOEIf", "adaptorFcPortFLogiProfile", "adaptorFcPortPLogiProfile", "adaptorFcPortProfile", "adaptorFcPortStats", "adaptorFcPortStatsHist", "adaptorFcQueueProfile", "adaptorFcRecvQueueProfile", "adaptorFcWorkQueueProfile", "adaptorFruCapProvider", "adaptorFruCapRef", "adaptorFwCapProvider", "adaptorHostEthIf", "adaptorHostEthIfFsm", "adaptorHostEthIfFsmStage", "adaptorHostEthIfFsmTask", "adaptorHostEthIfProfile", "adaptorHostFcIf", "adaptorHostFcIfFsm", "adaptorHostFcIfFsmStage", "adaptorHostFcIfFsmTask", "adaptorHostFcIfProfile", "adaptorHostIf", "adaptorHostIfConnDef", "adaptorHostIfProfile", "adaptorHostIscsiIf", "adaptorHostIscsiIfProfile", "adaptorHostMgmtCap", "adaptorHostPort", "adaptorHostPortCap", "adaptorHostScsiIf", "adaptorHostScsiLunRef", "adaptorHostServiceEthIf", "adaptorHostVnicHwAddrCap", "adaptorHostethHwAddrCap", "adaptorHostfcHwAddrCap", "adaptorHwAddrCap", "adaptorIScsiCap", "adaptorIpV4RssHashProfile", "adaptorIpV6RssHashProfile", "adaptorIscsiAuth", "adaptorIscsiProt", "adaptorIscsiTargetIf", "adaptorLanCap", "adaptorLldpCap", "adaptorMenloBaseErrorStats", "adaptorMenloBaseErrorStatsHist", "adaptorMenloDcePortStats", "adaptorMenloDcePortStatsHist", "adaptorMenloEthErrorStats", "adaptorMenloEthErrorStatsHist", "adaptorMenloEthStats", "adaptorMenloEthStatsHist", "adaptorMenloFcErrorStats", "adaptorMenloFcErrorStatsHist", "adaptorMenloFcStats", "adaptorMenloFcStatsHist", "adaptorMenloHostPortStats", "adaptorMenloHostPortStatsHist", "adaptorMenloMcpuErrorStats", "adaptorMenloMcpuErrorStatsHist", "adaptorMenloMcpuStats", "adaptorMenloMcpuStatsHist", "adaptorMenloNetEgStats", "adaptorMenloNetEgStatsHist", "adaptorMenloNetInStats", "adaptorMenloNetInStatsHist", "adaptorMenloQErrorStats", "adaptorMenloQErrorStatsHist", "adaptorMenloQStats", "adaptorMenloQStatsHist", "adaptorMgmtCap", "adaptorMgmtVnicEthConfig", "adaptorNwMgmtCap", "adaptorNwStatsMgmtCap", "adaptorPIoEp", "adaptorProfileItem", "adaptorProtocolProfile", "adaptorQual", "adaptorQueueProfile", "adaptorRnicCapSpec", "adaptorRssHashProfile", "adaptorRssProfile", "adaptorSanCap", "adaptorTcpIpRssHashProfile", "adaptorUnit", "adaptorUnitAssocCtx", "adaptorUnitExtn", "adaptorUplinkHwAddrCap", "adaptorUplinkPortStats", "adaptorUsnicConnDef", "adaptorVlan", "adaptorVnicStats", "adaptorVnicStatsHist", "adaptorVsan", "apeAdapter", "apeAdapterVnic", "apeAttribute", "apeBootMethod", "apeControllerChassis", "apeControllerEeprom", "apeControllerManager", "apeDcosAgManager", "apeFru", "apeHostAgent", "apeLANBoot", "apeLocalDiskBoot", "apeManager", "apeMc", "apeMcTable", "apeMenlo", "apeMenloVnic", "apeMenloVnicStats", "apeNicAgManager", "apePalo", "apePaloVnic", "apePaloVnicStats", "apeParam", "apeReading", "apeSANBoot", "apeSdr", "apeSwitchFirmwareInv", "apeVirtualMediaBoot", "apeVnicStats", "biosARef", "biosBOT", "biosBootDev", "biosBootDevGrp", "biosFeatureRef", "biosParameterRef", "biosRef", "biosSettingRef", "biosSettings", "biosUnit", "biosVFeat", "biosVIdentityParams", "biosVProfile", "biosVfACPI10Support", "biosVfASPMSupport", "biosVfAllUSBDevices", "biosVfAltitude", "biosVfAssertNMIOnPERR", "biosVfAssertNMIOnSERR", "biosVfBootOptionRetry", "biosVfCPUHardwarePowerManagement", "biosVfCPUPerformance", "biosVfCPUPowerManagement", "biosVfConsistentDeviceNameControl", "biosVfConsoleRedirection", "biosVfCoreMultiProcessing", "biosVfDDR3VoltageSelection", "biosVfDRAMClockThrottling", "biosVfDirectCacheAccess", "biosVfDramRefreshRate", "biosVfEnhancedIntelSpeedStepTech", "biosVfEnhancedPowerCappingSupport", "biosVfExecuteDisableBit", "biosVfFRB2Timer", "biosVfFrequencyFloorOverride", "biosVfFrontPanelLockout", "biosVfIntegratedGraphics", "biosVfIntegratedGraphicsApertureSize", "biosVfIntelEntrySASRAIDModule", "biosVfIntelHyperThreadingTech", "biosVfIntelTrustedExecutionTechnology", "biosVfIntelTurboBoostTech", "biosVfIntelVTForDirectedIO", "biosVfIntelVirtualizationTechnology", "biosVfInterleaveConfiguration", "biosVfLocalX2Apic", "biosVfLvDIMMSupport", "biosVfMaxVariableMTRRSetting", "biosVfMaximumMemoryBelow4GB", "biosVfMemoryMappedIOAbove4GB", "biosVfMirroringMode", "biosVfNUMAOptimized", "biosVfOSBootWatchdogTimer", "biosVfOSBootWatchdogTimerPolicy", "biosVfOSBootWatchdogTimerTimeout", "biosVfOnboardGraphics", "biosVfOnboardSATAController", "biosVfOnboardStorage", "biosVfOptionROMEnable", "biosVfOptionROMLoad", "biosVfPCHSATAMode", "biosVfPCILOMPortsConfiguration", "biosVfPCISlotLinkSpeed", "biosVfPCISlotOptionROMEnable", "biosVfPOSTErrorPause", "biosVfPSTATECoordination", "biosVfPackageCStateLimit", "biosVfProcessorC1E", "biosVfProcessorC3Report", "biosVfProcessorC6Report", "biosVfProcessorC7Report", "biosVfProcessorCState", "biosVfProcessorEnergyConfiguration", "biosVfProcessorPrefetchConfig", "biosVfQPILinkFrequencySelect", "biosVfQPISnoopMode", "biosVfQuietBoot", "biosVfResumeOnACPowerLoss", "biosVfScrubPolicies", "biosVfSelectMemoryRASConfiguration", "biosVfSerialPortAEnable", "biosVfSparingMode", "biosVfSriovConfig", "biosVfTPMPendingOperation", "biosVfTPMSupport", "biosVfTrustedPlatformModule", "biosVfUCSMBootModeControl", "biosVfUCSMBootOrderRuleControl", "biosVfUEFIOSUseLegacyVideo", "biosVfUSBBootConfig", "biosVfUSBConfiguration", "biosVfUSBFrontPanelAccessLock", "biosVfUSBPortConfiguration", "biosVfUSBSystemIdlePowerOptimizingSetting", "biosVfVGAPriority", "bmcSELCounter", "callhomeAnonymousReporting", "callhomeDest", "callhomeEp", "callhomeEpFsm", "callhomeEpFsmStage", "callhomeEpFsmTask", "callhomeItem", "callhomePeriodicSystemInventory", "callhomePolicy", "callhomeProfile", "callhomeSmtp", "callhomeSource", "callhomeTestAlert", "capabilityCatalogue", "capabilityCatalogueFsm", "capabilityCatalogueFsmStage", "capabilityCatalogueFsmTask", "capabilityDef", "capabilityEp", "capabilityFeatureLimits", "capabilityItem", "capabilityMgmtExtension", "capabilityMgmtExtensionFsm", "capabilityMgmtExtensionFsmStage", "capabilityMgmtExtensionFsmTask", "capabilityNetworkLimits", "capabilityProvider", "capabilityStorageLimits", "capabilitySystemLimits", "capabilityUpdate", "capabilityUpdater", "capabilityUpdaterFsm", "capabilityUpdaterFsmStage", "capabilityUpdaterFsmTask", "changeChangedObjectRef", "cimcvmediaActualMountEntry", "cimcvmediaActualMountList", "cimcvmediaConfigMountEntry", "cimcvmediaExtMgmtRuleEntry", "cimcvmediaItem", "cimcvmediaMountConfig", "cimcvmediaMountConfigDef", "cimcvmediaMountConfigPolicy", "cimcvmediaMountInfo", "clitestTypeTest", "clitestTypeTest2", "clitestTypeTestChild", "clitestTypeTestParent", "commCimcWebService", "commCimxml", "commClient", "commClientItem", "commDateTime", "commDefinition", "commDns", "commDnsProvider", "commEvtChannel", "commHttp", "commHttps", "commItem", "commLocale", "commNtpProvider", "commShell", "commShellSvcLimits", "commSmashCLP", "commSnmp", "commSnmpTrap", "commSnmpUser", "commSsh", "commSvc", "commSvcChannel", "commSvcEp", "commSvcEpFsm", "commSvcEpFsmStage", "commSvcEpFsmTask", "commSvcLimits", "commSvcPolicy", "commSyslog", "commSyslogClient", "commSyslogConsole", "commSyslogFile", "commSyslogMonitor", "commSyslogSource", "commTelnet", "commWeb", "commWebChannel", "commWebSvcLimits", "commWsman", "commXmlClConnPolicy", "computeABoard", "computeAChassisDiscPolicy", "computeAutoconfigPolicy", "computeBehCap", "computeBlade", "computeBladeDiscPolicy", "computeBladeEp", "computeBladeFsm", "computeBladeFsmStage", "computeBladeFsmTask", "computeBladeInheritPolicy", "computeBladePosQual", "computeBoard", "computeBoardConnector", "computeBoardController", "computeCartridge", "computeChassisConnPolicy", "computeChassisDiscPolicy", "computeChassisQual", "computeComputeDiscPolicy", "computeConfigPolicy", "computeConstraintDef", "computeContainer", "computeDefaults", "computeDiscPolicy", "computeExtBoard", "computeFwSyncAck", "computeHealthLedSensorAlarm", "computeIOHub", "computeIOHubEnvStats", "computeIOHubEnvStatsHist", "computeInitConfigPolicy", "computeInstanceIdQual", "computeItem", "computeKvmMgmtPolicy", "computeLogical", "computeMbPowerStats", "computeMbPowerStatsHist", "computeMbTempStats", "computeMbTempStatsHist", "computeMemoryConfigPolicy", "computeMemoryConfiguration", "computeMemoryUnitConstraintDef", "computePCIeFatalCompletionStats", "computePCIeFatalProtocolStats", "computePCIeFatalReceiveStats", "computePCIeFatalStats", "computePartition", "computePciCap", "computePciSlotScanDef", "computePhysical", "computePhysicalAssocCtx", "computePhysicalFsm", "computePhysicalFsmStage", "computePhysicalFsmTask", "computePhysicalQual", "computePlatform", "computePnuOSImage", "computePool", "computePoolPolicyRef", "computePoolable", "computePooled", "computePooledEnclosureComputeSlot", "computePooledPhysical", "computePooledRackUnit", "computePooledSlot", "computePoolingPolicy", "computePsuControl", "computePsuDef", "computePsuPolicy", "computeQual", "computeQualBase", "computeQualItem", "computeQualifiedPolicy", "computeRackPosQual", "computeRackQual", "computeRackUnit", "computeRackUnitFsm", "computeRackUnitFsmStage", "computeRackUnitFsmTask", "computeRackUnitMbTempStats", "computeRackUnitMbTempStatsHist", "computeRtcBattery", "computeScrubPolicy", "computeServer", "computeServerDiscPolicy", "computeServerDiscPolicyFsm", "computeServerDiscPolicyFsmStage", "computeServerDiscPolicyFsmTask", "computeServerMgmtPolicy", "computeServerTypeCap", "computeServerUnit", "computeServerUnitFsm", "computeServerUnitFsmStage", "computeServerUnitFsmTask", "computeSlotEntity", "computeSlotQual", "computeVirtual", "computeVirtualContainer", "conditionImmutable", "conditionInfo", "conditionLog", "conditionLoggable", "conditionMultiInstanceImmutable", "conditionMutable", "conditionPolicy", "conditionReportable", "configImpact", "configImpactResponse", "configManagedEpImpactResponse", "configSorter", "dcxFcoeVifEp", "dcxNs", "dcxUniverse", "dcxVIf", "dcxVc", "dcxVifEp", "dhcpAcquired", "dhcpInst", "dhcpLease", "diagBladeTest", "diagCtrl", "diagNetworkTest", "diagRslt", "diagRunPolicy", "diagSrvCapProvider", "diagSrvCtrl", "diagTest", "domainEnvironmentFeature", "domainEnvironmentFeatureCont", "domainEnvironmentParam", "domainFeature", "domainFeatureCont", "domainNetworkFeature", "domainNetworkFeatureCont", "domainNetworkParam", "domainParameter", "domainServerFeature", "domainServerFeatureCont", "domainServerParam", "domainStorageFeature", "domainStorageFeatureCont", "domainStorageParam", "dpsecMac", "dupeScope", "dupeScopeResult", "epqosDefinition", "epqosDefinitionDelTask", "epqosDefinitionDelTaskFsm", "epqosDefinitionDelTaskFsmStage", "epqosDefinitionDelTaskFsmTask", "epqosDefinitionFsm", "epqosDefinitionFsmStage", "epqosDefinitionFsmTask", "epqosEgress", "epqosItem", "equipmentAdaptorConnDef", "equipmentAdaptorDef", "equipmentAdvancedBootOrder", "equipmentAssocCtx", "equipmentAutoNegotiateCap", "equipmentBaseBoardCapProvider", "equipmentBeaconCapProvider", "equipmentBeaconLed", "equipmentBeaconLedFsm", "equipmentBeaconLedFsmStage", "equipmentBeaconLedFsmTask", "equipmentBehCap", "equipmentBiosDef", "equipmentBladeAGLibrary", "equipmentBladeAggregationCapRef", "equipmentBladeBiosCapProvider", "equipmentBladeCapProvider", "equipmentBladeCapProviderTypeDef", "equipmentBladeConnDef", "equipmentBladeIOMConnDef", "equipmentBladeSwitchConnDef", "equipmentBoardControllerDef", "equipmentBreakoutCap", "equipmentCard", "equipmentCartridgeCapProvider", "equipmentCatalogCapProvider", "equipmentChassis", "equipmentChassisCapProvider", "equipmentChassisFsm", "equipmentChassisFsmStage", "equipmentChassisFsmTask", "equipmentChassisStats", "equipmentChassisStatsHist", "equipmentCimcVmedia", "equipmentComputePhysicalCapProvider", "equipmentDbgPluginCapProvider", "equipmentDimmEntry", "equipmentDimmMapping", "equipmentDiscoveryCap", "equipmentDowngradeConstraint", "equipmentEnvSensor", "equipmentFan", "equipmentFanModule", "equipmentFanModuleCapProvider", "equipmentFanModuleDef", "equipmentFanModuleStats", "equipmentFanModuleStatsHist", "equipmentFanStats", "equipmentFanStatsHist", "equipmentFex", "equipmentFexCapProvider", "equipmentFexEnvStats", "equipmentFexEnvStatsHist", "equipmentFexFsm", "equipmentFexFsmStage", "equipmentFexFsmTask", "equipmentFexPowerSummary", "equipmentFexPowerSummaryHist", "equipmentFexPsuInputStats", "equipmentFexPsuInputStatsHist", "equipmentFexSystemStats", "equipmentFexSystemStatsHist", "equipmentFirmwareConstraint", "equipmentFlashLife", "equipmentFruCapProvider", "equipmentGemCapProvider", "equipmentGemPortCap", "equipmentGraphicsCardCapProvider", "equipmentGraphicsCardCapRef", "equipmentHDDFaultMonDef", "equipmentHealthLed", "equipmentHolder", "equipmentHolderCapProvider", "equipmentHostIfCapProvider", "equipmentHwCapProvider", "equipmentIOCard", "equipmentIOCardBase", "equipmentIOCardBaseFsm", "equipmentIOCardBaseFsmStage", "equipmentIOCardBaseFsmTask", "equipmentIOCardCapProvider", "equipmentIOCardFsm", "equipmentIOCardFsmStage", "equipmentIOCardFsmTask", "equipmentIOCardStats", "equipmentIOCardStatsHist", "equipmentIOCardTypeDef", "equipmentInbandMgmtCap", "equipmentIndicatorLed", "equipmentIntegratedComponentCapProvider", "equipmentItem", "equipmentKvmMgmtCap", "equipmentLed", "equipmentLocalDiskCapProvider", "equipmentLocalDiskControllerCapProvider", "equipmentLocalDiskControllerCapRef", "equipmentLocalDiskControllerDef", "equipmentLocalDiskControllerTypeDef", "equipmentLocalDiskDef", "equipmentLocatorLed", "equipmentLocatorLedFsm", "equipmentLocatorLedFsmStage", "equipmentLocatorLedFsmTask", "equipmentManufacturingDef", "equipmentMemoryUnitCapProvider", "equipmentMemoryUnitDiscoveryModifierDef", "equipmentMgmtCapProvider", "equipmentMgmtExtCapProvider", "equipmentNetworkElementFanStats", "equipmentNetworkElementFanStatsHist", "equipmentNonCopperPassiveCap", "equipmentPOST", "equipmentPOSTCode", "equipmentPOSTCodeContainer", "equipmentPOSTCodeData", "equipmentPOSTCodeReporter", "equipmentPOSTCodeTemplate", "equipmentPciDef", "equipmentPfcMmuCap", "equipmentPhysDevicesPerBoard", "equipmentPhysicalDef", "equipmentPicture", "equipmentPortCap", "equipmentPortGroupAggregationDef", "equipmentPortGroupDef", "equipmentPortGroupSwComplexDef", "equipmentPortSwComplexRef", "equipmentPowerCapDef", "equipmentProcessorUnitCapProvider", "equipmentProcessorUnitDef", "equipmentPsu", "equipmentPsuCapProvider", "equipmentPsuDef", "equipmentPsuFsm", "equipmentPsuFsmStage", "equipmentPsuFsmTask", "equipmentPsuInputStats", "equipmentPsuInputStatsHist", "equipmentPsuOutputStats", "equipmentPsuOutputStatsHist", "equipmentPsuStats", "equipmentPsuStatsHist", "equipmentRackFanModuleDef", "equipmentRackUnitCapProvider", "equipmentRackUnitFanStats", "equipmentRackUnitFanStatsHist", "equipmentRackUnitPsuStats", "equipmentRackUnitPsuStatsHist", "equipmentRaidDef", "equipmentSecureBoot", "equipmentSecureController", "equipmentSecurityUnitCapProvider", "equipmentServerFeatureCap", "equipmentServerPortCapProvider", "equipmentServerUnitCapProvider", "equipmentServiceDef", "equipmentSharedIOModule", "equipmentSlotArray", "equipmentSlotArrayRef", "equipmentSlotEnclosure", "equipmentStateful", "equipmentStatefulBladeComp", "equipmentStatefulChComp", "equipmentStorageControllerConfig", "equipmentStorageControllerSlotDef", "equipmentStorageLimitCap", "equipmentStorageSasExpanderCapProvider", "equipmentStorageSasExpanderCapRef", "equipmentSwitchCap", "equipmentSwitchCapProvider", "equipmentSwitchCard", "equipmentSwitchIOCard", "equipmentSwitchIOCardCapProvider", "equipmentSwitchIOCardFsm", "equipmentSwitchIOCardFsmStage", "equipmentSwitchIOCardFsmTask", "equipmentSwitchTypeDef", "equipmentTpm", "equipmentTpmCapProvider", "equipmentUnifiedPortCapProvider", "equipmentUuidFeatureCap", "equipmentVersionConstraint", "equipmentXcvr", "etherCIoEp", "etherConfig", "etherErrStats", "etherErrStatsHist", "etherExternalEp", "etherExternalPc", "etherFcoeInterfaceStats", "etherFcoeInterfaceStatsHist", "etherIfConfig", "etherIntFIoEp", "etherInternalPc", "etherLossStats", "etherLossStatsHist", "etherNiErrStats", "etherNiErrStatsHist", "etherNicIfConfig", "etherPIo", "etherPIoEndPoint", "etherPIoEp", "etherPIoFsm", "etherPIoFsmStage", "etherPauseStats", "etherPauseStatsHist", "etherPc", "etherPortChanIdElem", "etherPortChanIdUniverse", "etherRxStats", "etherRxStatsHist", "etherServerIntFIo", "etherServerIntFIoFsm", "etherServerIntFIoFsmStage", "etherServerIntFIoFsmTask", "etherServerIntFIoPc", "etherServerIntFIoPcEp", "etherSwIfConfig", "etherSwitchIntFIo", "etherSwitchIntFIoPc", "etherSwitchIntFIoPcEp", "etherTxStats", "etherTxStatsHist", "eventEpCtrl", "eventHolder", "eventInst", "eventLog", "eventPolicy", "eventRecord", "extmgmtArpTargets", "extmgmtGatewayPing", "extmgmtIf", "extmgmtIfMonPolicy", "extmgmtMiiStatus", "extmgmtNdiscTargets", "extpolClient", "extpolClientCont", "extpolConnector", "extpolConnectorContainer", "extpolController", "extpolControllerCont", "extpolEp", "extpolEpFsm", "extpolEpFsmStage", "extpolEpFsmTask", "extpolProvider", "extpolProviderCont", "extpolProviderFsm", "extpolProviderFsmStage", "extpolProviderFsmTask", "extpolRegistry", "extpolRegistryFsm", "extpolRegistryFsmStage", "extpolRegistryFsmTask", "extpolSvc", "extpolSystemContext", "extvmmEp", "extvmmEpFsm", "extvmmEpFsmStage", "extvmmEpFsmTask", "extvmmFNDReference", "extvmmFabricNetwork", "extvmmFabricNetworkDefinition", "extvmmKeyInst", "extvmmKeyRing", "extvmmKeyStore", "extvmmKeyStoreFsm", "extvmmKeyStoreFsmStage", "extvmmKeyStoreFsmTask", "extvmmMasterExtKey", "extvmmMasterExtKeyFsm", "extvmmMasterExtKeyFsmStage", "extvmmMasterExtKeyFsmTask", "extvmmNetworkSets", "extvmmNetworkSetsFsm", "extvmmNetworkSetsFsmStage", "extvmmNetworkSetsFsmTask", "extvmmProvider", "extvmmProviderFsm", "extvmmProviderFsmStage", "extvmmProviderFsmTask", "extvmmSwitchDelTask", "extvmmSwitchDelTaskFsm", "extvmmSwitchDelTaskFsmStage", "extvmmSwitchDelTaskFsmTask", "extvmmSwitchSet", "extvmmUpLinkPP", "extvmmVMNDRef", "extvmmVMNetwork", "extvmmVMNetworkDefinition", "extvmmVMNetworkSets", "fabricADceSwSrvEp", "fabricAEthEstcEp", "fabricAEthLanEp", "fabricAFcEstcEp", "fabricAFcSanEp", "fabricAFcoeEstcEp", "fabricAFcoeSanEp", "fabricAVlan", "fabricAVsan", "fabricBHVlan", "fabricBreakout", "fabricCIoEp", "fabricCabling", "fabricCablingSw", "fabricCartridgePhEp", "fabricCartridgeSlotEp", "fabricCartridgeSlotEpFsm", "fabricCartridgeSlotEpFsmStage", "fabricCartridgeSlotEpFsmTask", "fabricCdpLinkPolicy", "fabricChangedObjectRef", "fabricChassisEp", "fabricCloud", "fabricComputeEp", "fabricComputeMSlotEp", "fabricComputeMSlotEpFsm", "fabricComputeMSlotEpFsmStage", "fabricComputeMSlotEpFsmTask", "fabricComputePhEp", "fabricComputeSlotEp", "fabricComputeSlotEpFsm", "fabricComputeSlotEpFsmStage", "fabricComputeSlotEpFsmTask", "fabricDceSrv", "fabricDceSwSrv", "fabricDceSwSrvEp", "fabricDceSwSrvPc", "fabricDceSwSrvPcEp", "fabricDomain", "fabricEp", "fabricEpMgr", "fabricEpMgrFsm", "fabricEpMgrFsmStage", "fabricEpMgrFsmTask", "fabricEpVlan", "fabricEpVsan", "fabricEquipmentEp", "fabricEstcEp", "fabricEstcPc", "fabricEthCdpPolicy", "fabricEthEstc", "fabricEthEstcCloud", "fabricEthEstcEp", "fabricEthEstcPc", "fabricEthEstcPcEp", "fabricEthFlowMonLan", "fabricEthFlowMonSrcRef", "fabricEthLan", "fabricEthLanEp", "fabricEthLanFlowMon", "fabricEthLanFlowMonCollector", "fabricEthLanFlowMonExporter", "fabricEthLanFlowMonSrcEp", "fabricEthLanFlowMonitor", "fabricEthLanFlowMonitoring", "fabricEthLanPc", "fabricEthLanPcEp", "fabricEthLinkPolicy", "fabricEthLinkProfile", "fabricEthMon", "fabricEthMonDestEp", "fabricEthMonFiltEp", "fabricEthMonFiltRef", "fabricEthMonLan", "fabricEthMonSrcEp", "fabricEthMonSrcRef", "fabricEthTargetEp", "fabricEthUdldPolicy", "fabricEthVlanPc", "fabricEthVlanPortEp", "fabricExternal", "fabricExternalEp", "fabricExternalEstc", "fabricExternalPc", "fabricFcEstc", "fabricFcEstcCloud", "fabricFcEstcEp", "fabricFcMon", "fabricFcMonDestEp", "fabricFcMonFiltEp", "fabricFcMonFiltRef", "fabricFcMonSan", "fabricFcMonSrcEp", "fabricFcMonSrcRef", "fabricFcSan", "fabricFcSanEp", "fabricFcSanPc", "fabricFcSanPcEp", "fabricFcVsanPc", "fabricFcVsanPortEp", "fabricFcoeEstcEp", "fabricFcoeSanEp", "fabricFcoeSanPc", "fabricFcoeSanPcEp", "fabricFcoeVsanPc", "fabricFcoeVsanPortEp", "fabricFlowMon", "fabricFlowMonCollector", "fabricFlowMonDefinition", "fabricFlowMonExporter", "fabricFlowMonExporterProfile", "fabricFlowMonItem", "fabricFlowMonSrcEp", "fabricFlowMonSrcRef", "fabricFlowMonitor", "fabricFlowMonitoringCategory", "fabricIf", "fabricInternal", "fabricInternalDceSrv", "fabricInternalEp", "fabricInternalPc", "fabricLacpPolicy", "fabricLan", "fabricLanAccessMgr", "fabricLanCloud", "fabricLanCloudFsm", "fabricLanCloudFsmStage", "fabricLanCloudFsmTask", "fabricLanCloudPolicy", "fabricLanEp", "fabricLanFlowMon", "fabricLanFlowMonCollector", "fabricLanFlowMonExporter", "fabricLanFlowMonSrcEp", "fabricLanFlowMonitor", "fabricLanMonCloud", "fabricLanPc", "fabricLanPinGroup", "fabricLanPinTarget", "fabricLastAckedSlot", "fabricLinkPolicy", "fabricLocale", "fabricMon", "fabricMonDestEp", "fabricMonFiltRef", "fabricMonSrcEp", "fabricMonSrcFiltEp", "fabricMonSrcRef", "fabricMulticastPolicy", "fabricNetGroup", "fabricNetflowCollector", "fabricNetflowIPv4Addr", "fabricNetflowMonExporter", "fabricNetflowMonExporterRef", "fabricNetflowMonSession", "fabricNetflowMonSrcEp", "fabricNetflowMonSrcRef", "fabricNetflowMonitor", "fabricNetflowMonitorRef", "fabricNetflowTimeoutPolicy", "fabricOrgVlanPolicy", "fabricPIoEp", "fabricPath", "fabricPathConn", "fabricPathEp", "fabricPinGroup", "fabricPinTarget", "fabricPoolableVlan", "fabricPooledVlan", "fabricSan", "fabricSanCloud", "fabricSanCloudFsm", "fabricSanCloudFsmStage", "fabricSanCloudFsmTask", "fabricSanEp", "fabricSanMonCloud", "fabricSanPc", "fabricSanPinGroup", "fabricSanPinTarget", "fabricSubGroup", "fabricSwChEp", "fabricSwChPhEp", "fabricSwSrvEp", "fabricSwSrvPc", "fabricSwSubGroup", "fabricTargetEp", "fabricUdldLinkPolicy", "fabricUdldPolicy", "fabricVCon", "fabricVConProfile", "fabricVlan", "fabricVlanEp", "fabricVlanGroupReq", "fabricVlanPermit", "fabricVlanReq", "fabricVnetEp", "fabricVnetEpSyncEp", "fabricVnetEpSyncEpFsm", "fabricVnetEpSyncEpFsmStage", "fabricVnetEpSyncEpFsmTask", "fabricVnetGroupReq", "fabricVnetPermit", "fabricVnetReq", "fabricVsan", "fabricVsanEp", "fabricVsanMembership", "fabricZoneIdUniverse", "faultAffectedClass", "faultBaseHolder", "faultBasePolicy", "faultHolder", "faultInst", "faultLocalTypedHolder", "faultPolicy", "faultSuppressPolicy", "faultSuppressPolicyItem", "faultSuppressTask", "fcConfig", "fcErrStats", "fcErrStatsHist", "fcIfConfig", "fcNicIfConfig", "fcPIo", "fcPIoFsm", "fcPIoFsmStage", "fcStats", "fcStatsHist", "fcSwIfConfig", "fcpoolAddr", "fcpoolBlock", "fcpoolBootTarget", "fcpoolFormat", "fcpoolInitiator", "fcpoolInitiatorEp", "fcpoolInitiators", "fcpoolPoolable", "fcpoolUniverse", "featureBaseRef", "featureContextEp", "featureDefinition", "featureDefinitionInstance", "featureDefinitionRef", "featureFruCapProviderInstance", "featureFruCapProviderRef", "featureProvider", "featureProviderInstance", "firmwareABundleTypeCapProvider", "firmwareAConstraint", "firmwareAck", "firmwareActivity", "firmwareAutoSyncPolicy", "firmwareBlade", "firmwareBootDefinition", "firmwareBootUnit", "firmwareBundleInfo", "firmwareBundleInfoDigest", "firmwareBundleType", "firmwareBundleTypeCapProvider", "firmwareCapProvider", "firmwareCatalogPack", "firmwareCatalogue", "firmwareCompItem", "firmwareCompSource", "firmwareCompTarget", "firmwareComputeHostPack", "firmwareComputeMgmtPack", "firmwareComputePack", "firmwareConstraints", "firmwareDependency", "firmwareDistImage", "firmwareDistributable", "firmwareDistributableFsm", "firmwareDistributableFsmStage", "firmwareDistributableFsmTask", "firmwareDownloader", "firmwareDownloaderFsm", "firmwareDownloaderFsmStage", "firmwareDownloaderFsmTask", "firmwareExcludeServerComponent", "firmwareFileUnit", "firmwareHost", "firmwareHostPackModImpact", "firmwareImage", "firmwareImageFsm", "firmwareImageFsmStage", "firmwareImageFsmTask", "firmwareImageLock", "firmwareInfra", "firmwareInfraPack", "firmwareInstallImpact", "firmwareInstallable", "firmwarePCHStorageConfigConstraint", "firmwarePack", "firmwarePackItem", "firmwarePlatformBundleTypeCapProvider", "firmwareProcessorTypeConstraint", "firmwareRack", "firmwareRunning", "firmwareServerTypeConstraint", "firmwareSpec", "firmwareStatus", "firmwareSystem", "firmwareSystemCompCheckResult", "firmwareSystemFsm", "firmwareSystemFsmStage", "firmwareSystemFsmTask", "firmwareType", "firmwareUcscInfo", "firmwareUnit", "firmwareUpdatable", "firmwareUpgradeConstraint", "firmwareUpgradeDetail", "firmwareUpgradeInfo", "firmwareVicSlotConstraint", "firmwareVnicCdnConstraint", "flowctrlDefinition", "flowctrlItem", "fsmFsm", "fsmStage", "fsmStatus", "fsmTask", "gmetaClass", "gmetaEp", "gmetaHolder", "gmetaHolderFsm", "gmetaHolderFsmStage", "gmetaHolderFsmTask", "gmetaPolicyMapElement", "gmetaPolicyMapHolder", "gmetaProp", "graphicsCard", "graphicsController", "hostimgPolicy", "hostimgTarget", "identIdentCtx", "identIdentRequest", "identIdentRequestFsm", "identIdentRequestFsmStage", "identIdentRequestFsmTask", "identMetaSystem", "identMetaSystemFsm", "identMetaSystemFsmStage", "identMetaSystemFsmTask", "identMetaVerse", "identRequestEp", "identSysInfo", "imgprovPolicy", "imgprovTarget", "imgsecKey", "imgsecPolicy", "initiatorEp", "initiatorFcInitiatorEp", "initiatorGroupEp", "initiatorIScsiInitiatorEp", "initiatorInitiatorEp", "initiatorLunEp", "initiatorMemberEp", "initiatorRequestorEp", "initiatorRequestorGrpEp", "initiatorStoreEp", "initiatorUnitEp", "ipDnsSuffix", "ipIPv4Dns", "ipIPv4WinsServer", "ipIpV4Addr", "ipIpV4StaticAddr", "ipIpV4StaticTargetAddr", "ipServiceIf", "ippoolAddr", "ippoolBlock", "ippoolIpV6Addr", "ippoolIpV6Block", "ippoolIpV6Pooled", "ippoolPool", "ippoolPoolable", "ippoolPooled", "ippoolUniverse", "iqnpoolAbsBlock", "iqnpoolAddr", "iqnpoolBlock", "iqnpoolFormat", "iqnpoolPool", "iqnpoolPoolable", "iqnpoolPooled", "iqnpoolTransportBlock", "iqnpoolUniverse", "iscsiAuthProfile", "licenseCapProvider", "licenseContents", "licenseDownloader", "licenseDownloaderFsm", "licenseDownloaderFsmStage", "licenseDownloaderFsmTask", "licenseEp", "licenseFeature", "licenseFeatureCapProvider", "licenseFeatureLine", "licenseFile", "licenseFileFsm", "licenseFileFsmStage", "licenseFileFsmTask", "licenseInstance", "licenseInstanceFsm", "licenseInstanceFsmStage", "licenseInstanceFsmTask", "licenseProp", "licenseServerHostId", "licenseSource", "licenseSourceFile", "licenseTarget", "lldpAcquired", "lsAgentPolicy", "lsBinding", "lsComputeBinding", "lsFcLocale", "lsFcZone", "lsFcZoneGroup", "lsFcZoneMember", "lsIdentityInfo", "lsIssues", "lsPower", "lsRequirement", "lsServer", "lsServerAssocCtx", "lsServerExtension", "lsServerFsm", "lsServerFsmStage", "lsServerFsmTask", "lsTier", "lsUuidHistory", "lsVConAssign", "lsVersionBeh", "lsZoneInitiatorMember", "lsZoneTargetMember", "lsbootADef", "lsbootBootSecurity", "lsbootCategory", "lsbootDef", "lsbootDefaultLocalImage", "lsbootEmbeddedLocalDiskImage", "lsbootEmbeddedLocalDiskImagePath", "lsbootEmbeddedLocalLunImage", "lsbootIScsi", "lsbootIScsiImagePath", "lsbootImage", "lsbootImagePath", "lsbootItem", "lsbootLan", "lsbootLanImagePath", "lsbootLocalDiskImage", "lsbootLocalDiskImagePath", "lsbootLocalHddImage", "lsbootLocalImage", "lsbootLocalLunImagePath", "lsbootLocalMediaItem", "lsbootLocalStorage", "lsbootPolicy", "lsbootRemoteImage", "lsbootSan", "lsbootSanCatSanImage", "lsbootSanCatSanImagePath", "lsbootSanImage", "lsbootSanImagePath", "lsbootStorage", "lsbootUEFIBootParam", "lsbootUsbExternalImage", "lsbootUsbFlashStorageImage", "lsbootUsbInternalImage", "lsbootVirtualMedia", "lsmaintAck", "lsmaintMaintPolicy", "lstorageControllerDef", "lstorageControllerModeConfig", "lstorageControllerQualifier", "lstorageDasScsiLun", "lstorageDiskGroupConfig", "lstorageDiskGroupConfigDef", "lstorageDiskGroupConfigPolicy", "lstorageDiskGroupQualifier", "lstorageItem", "lstorageLocalDiskConfigRef", "lstorageLocalDiskRef", "lstorageProfile", "lstorageProfileBase", "lstorageProfileBinding", "lstorageProfileBindingBase", "lstorageProfileDef", "lstorageScsiLun", "lstorageVirtualDriveDef", "macpoolAddr", "macpoolBlock", "macpoolFormat", "macpoolPool", "macpoolPoolable", "macpoolPooled", "macpoolUniverse", "memoryArray", "memoryArrayEnvStats", "memoryArrayEnvStatsHist", "memoryBufferUnit", "memoryBufferUnitEnvStats", "memoryBufferUnitEnvStatsHist", "memoryErrorStats", "memoryQual", "memoryRuntime", "memoryRuntimeHist", "memoryUnit", "memoryUnitEnvStats", "memoryUnitEnvStatsHist", "mgmtAccessPolicy", "mgmtAccessPolicyItem", "mgmtAccessPort", "mgmtBackup", "mgmtBackupExportExtPolicy", "mgmtBackupFsm", "mgmtBackupFsmStage", "mgmtBackupFsmTask", "mgmtBackupPolicy", "mgmtBackupPolicyConfig", "mgmtBackupPolicyFsm", "mgmtBackupPolicyFsmStage", "mgmtCfgExportPolicy", "mgmtCfgExportPolicyFsm", "mgmtCfgExportPolicyFsmStage", "mgmtCimcInterface", "mgmtCimcSecureBoot", "mgmtConnection", "mgmtController", "mgmtControllerFsm", "mgmtControllerFsmStage", "mgmtControllerFsmTask", "mgmtEntity", "mgmtExportPolicy", "mgmtExportPolicyFsm", "mgmtExportPolicyFsmStage", "mgmtExportPolicyFsmTask", "mgmtHealthAttr", "mgmtHealthStatus", "mgmtIPv6Addr", "mgmtIPv6IfAddr", "mgmtIPv6IfAddrFsm", "mgmtIPv6IfAddrFsmStage", "mgmtIPv6IfAddrFsmTask", "mgmtIPv6IfConfig", "mgmtIf", "mgmtIfFsm", "mgmtIfFsmStage", "mgmtIfFsmTask", "mgmtImporter", "mgmtImporterFsm", "mgmtImporterFsmStage", "mgmtImporterFsmTask", "mgmtInbandProfile", "mgmtIntAuthPolicy", "mgmtInterface", "mgmtPmonEntry", "mgmtProfDerivedInterface", "mgmtVnet", "moTopProps", "namingNamedIdentifiedObject", "namingNamedObject", "networkALanNeighborEntry", "networkANeighborEntry", "networkCIoEp", "networkConn", "networkDomainEp", "networkElement", "networkEp", "networkIfEp", "networkIfStats", "networkLanNeighborEntry", "networkLanNeighbors", "networkLldpNeighborEntry", "networkLldpNeighbors", "networkOperLevel", "networkPIoEp", "networkPhysEp", "networkSanNeighborEntry", "networkSanNeighbors", "networkVnetEp", "networkruleDefinition", "networkruleItem", "networkruleRequirement", "nfsEp", "nfsMountDef", "nfsMountDefFsm", "nfsMountDefFsmStage", "nfsMountDefFsmTask", "nfsMountInst", "nfsMountInstFsm", "nfsMountInstFsmStage", "nfsMountInstFsmTask", "nwctrlDefinition", "observeFilter", "observeObserved", "observeObservedCont", "observeObservedFsm", "observeObservedFsmStage", "observeObservedFsmTask", "orgOrg", "orgSourceMask", "osARPLinkMonitoringPolicy", "osARPTarget", "osAgent", "osEthBondIntf", "osEthBondMode", "osEthBondModeActiveBackup", "osEthBondModeBalancedALB", "osEthBondModeBalancedRR", "osEthBondModeBalancedTLB", "osEthBondModeBalancedXOR", "osEthBondModeBroadcast", "osEthBondModeLB", "osEthIntf", "osInstance", "osIntf", "osLinkMonitoringPolicy", "osMiiLinkMonitoringPolicy", "osPrimarySlave", "pciCard", "pciEquipSlot", "pciUnit", "pkiCertReq", "pkiDefinition", "pkiEp", "pkiEpFsm", "pkiEpFsmStage", "pkiEpFsmTask", "pkiItem", "pkiKeyRing", "pkiTP", "policyBinding", "policyCentraleSync", "policyCommunication", "policyConfigBackup", "policyControl", "policyControlEp", "policyControlEpFsm", "policyControlEpFsmStage", "policyControlEpFsmTask", "policyControlled", "policyControlledInstance", "policyControlledType", "policyControlledTypeFsm", "policyControlledTypeFsmStage", "policyControlledTypeFsmTask", "policyDateTime", "policyDefinition", "policyDigest", "policyDiscovery", "policyDns", "policyElement", "policyEquipment", "policyFault", "policyHolder", "policyIdResolvePolicy", "policyInfraFirmware", "policyItem", "policyLocalMap", "policyMEp", "policyMonitoring", "policyObject", "policyPolicyEp", "policyPolicyRequestor", "policyPolicyScope", "policyPolicyScopeCont", "policyPolicyScopeContext", "policyPolicyScopeFsm", "policyPolicyScopeFsmStage", "policyPolicyScopeFsmTask", "policyPortConfig", "policyPowerMgmt", "policyPsu", "policyRefReq", "policySecurity", "policyStorageAutoConfig", "policySystemEp", "poolElement", "poolPool", "poolPoolMember", "poolPoolable", "poolUniverse", "portDomainEp", "portGroup", "portIntFIo", "portPIo", "portPIoFsm", "portPIoFsmStage", "portPIoFsmTask", "portPhysSwitchIo", "portServerIntFIo", "portSubGroup", "portSwitchIntFIo", "portTrustMode", "powerABudget", "powerAGroup", "powerBudget", "powerChassisMember", "powerEp", "powerGroup", "powerGroupAdditionPolicy", "powerGroupMember", "powerGroupQual", "powerGroupStats", "powerGroupStatsHist", "powerMgmtPolicy", "powerPlacement", "powerPolicy", "powerPrioWght", "powerProfiledPower", "powerRackUnitMember", "procDoer", "procManager", "procProcCounts", "procProcs", "procPrt", "procPrtCounts", "procStimulusCounts", "procSvc", "procTxCounts", "processorComponent", "processorCore", "processorEnvStats", "processorEnvStatsHist", "processorErrorStats", "processorQual", "processorRuntime", "processorRuntimeHist", "processorThread", "processorUnit", "processorUnitAssocCtx", "qosDefinition", "qosItem", "qosclassDefinition", "qosclassDefinitionFsm", "qosclassDefinitionFsmStage", "qosclassDefinitionFsmTask", "qosclassEth", "qosclassEthBE", "qosclassEthClassified", "qosclassFc", "qosclassItem", "queryresultDependency", "queryresultItem", "queryresultUsage", "ruleDefinition", "ruleItem", "ruleRequirement", "ruleSizeRequirement", "securityUnit", "solConfig", "solDef", "solIf", "solPolicy", "statsCollectionPolicy", "statsCollectionPolicyFsm", "statsCollectionPolicyFsmStage", "statsCollectionPolicyFsmTask", "statsCurr", "statsHist", "statsHolder", "statsItem", "statsThr32Definition", "statsThr32Value", "statsThr64Definition", "statsThr64Value", "statsThrFloatDefinition", "statsThrFloatValue", "statsThresholdClass", "statsThresholdDefinition", "statsThresholdPolicy", "statsThresholdValue", "storageADef", "storageALun", "storageALunRefBase", "storageAuthKey", "storageConnectionDef", "storageConnectionPolicy", "storageController", "storageControllerBase", "storageDevice", "storageDiskEnvStats", "storageDiskEnvStatsHist", "storageDomainEp", "storageDrive", "storageEnclosure", "storageEp", "storageEpUser", "storageEtherIf", "storageFcIf", "storageFcTargetEp", "storageFcTargetIf", "storageFlexFlashCard", "storageFlexFlashController", "storageFlexFlashControllerFsm", "storageFlexFlashControllerFsmStage", "storageFlexFlashControllerFsmTask", "storageFlexFlashDrive", "storageFlexFlashVirtualDrive", "storageIScsiTargetIf", "storageIniGroup", "storageInitiator", "storageItem", "storageL2If", "storageLocalDisk", "storageLocalDiskBase", "storageLocalDiskConfig", "storageLocalDiskConfigDef", "storageLocalDiskConfigPolicy", "storageLocalDiskPartition", "storageLocalDiskSlotEp", "storageLocalLun", "storageLogical", "storageLunDisk", "storageLunResourceSelectionLog", "storageMezzFlashLife", "storageNodeEp", "storageOperation", "storagePhysical", "storageQual", "storageRaidBattery", "storageSasExpander", "storageScsiLunRef", "storageSystem", "storageSystemFsm", "storageSystemFsmStage", "storageSystemFsmTask", "storageTarget", "storageTargetIf", "storageTransportIf", "storageTransportableFlashModule", "storageUnit", "storageVDMemberEp", "storageVirtualDrive", "storageVirtualDriveRef", "storageVsanRef", "swAccessDomain", "swAccessDomainFsm", "swAccessDomainFsmStage", "swAccessDomainFsmTask", "swAccessEp", "swBorderDomain", "swBorderEp", "swBorderPc", "swCIoEp", "swCardEnvStats", "swCardEnvStatsHist", "swCmclan", "swDomain", "swEnvStats", "swEnvStatsHist", "swEstcEp", "swEthEstcEp", "swEthEstcPc", "swEthFlowMonSession", "swEthLanBorder", "swEthLanBorderFsm", "swEthLanBorderFsmStage", "swEthLanBorderFsmTask", "swEthLanEp", "swEthLanFlowMon", "swEthLanFlowMonExporter", "swEthLanFlowMonFsm", "swEthLanFlowMonFsmStage", "swEthLanFlowMonFsmTask", "swEthLanFlowMonitor", "swEthLanFlowRecordDef", "swEthLanMon", "swEthLanPc", "swEthMon", "swEthMonDestEp", "swEthMonFsm", "swEthMonFsmStage", "swEthMonFsmTask", "swEthMonSrcEp", "swEthTargetEp", "swExtUtility", "swExtUtilityFsm", "swExtUtilityFsmStage", "swExtUtilityFsmTask", "swFabricZoneNs", "swFabricZoneNsOverride", "swFcEstcEp", "swFcMon", "swFcMonDestEp", "swFcMonFsm", "swFcMonFsmStage", "swFcMonFsmTask", "swFcMonSrcEp", "swFcSanBorder", "swFcSanBorderFsm", "swFcSanBorderFsmStage", "swFcSanBorderFsmTask", "swFcSanEp", "swFcSanMon", "swFcSanPc", "swFcServerZoneGroup", "swFcZone", "swFcZoneMember", "swFcZoneSet", "swFcoeEstcEp", "swFcoeSanEp", "swFcoeSanPc", "swFlowMonExporter", "swFlowMonitor", "swFlowRecordDef", "swIpRoute", "swLanBorder", "swLanEp", "swLanMon", "swLanPc", "swMon", "swMonDestEp", "swMonDomain", "swMonSrcEp", "swNFExporterRef", "swNetflowExporter", "swNetflowMonSession", "swNetflowMonitor", "swNetflowMonitorRef", "swNetflowRecordDef", "swPIoEp", "swPhys", "swPhysEtherEp", "swPhysFcEp", "swPhysFsm", "swPhysFsmStage", "swPhysFsmTask", "swPortBreakout", "swSanBorder", "swSanEp", "swSanMon", "swSanPc", "swSubGroup", "swSystemStats", "swSystemStatsHist", "swTargetEp", "swUlan", "swUtilityDomain", "swUtilityDomainFsm", "swUtilityDomainFsmStage", "swUtilityDomainFsmTask", "swVIFRef", "swVirtL3Intf", "swVlan", "swVlanGroup", "swVlanPortNs", "swVlanPortNsOverride", "swVlanRef", "swVsan", "swZoneInitiatorMember", "swZoneTargetMember", "swatAction", "swatCondition", "swatInjection", "swatResultstats", "swatTarget", "swatTrigger", "syntheticDirectory", "syntheticFile", "syntheticFileSystem", "syntheticFsObj", "syntheticFsObjFsm", "syntheticFsObjFsmStage", "syntheticFsObjFsmTask", "syntheticTime", "sysdebugAutoCoreFileExportTarget", "sysdebugAutoCoreFileExportTargetFsm", "sysdebugAutoCoreFileExportTargetFsmStage", "sysdebugAutoCoreFileExportTargetFsmTask", "sysdebugBackupBehavior", "sysdebugCore", "sysdebugCoreFileExportTarget", "sysdebugCoreFileRepository", "sysdebugCoreFsm", "sysdebugCoreFsmStage", "sysdebugCoreFsmTask", "sysdebugEp", "sysdebugExporter", "sysdebugFile", "sysdebugLogBehavior", "sysdebugLogControlDestinationFile", "sysdebugLogControlDestinationSyslog", "sysdebugLogControlDomain", "sysdebugLogControlEp", "sysdebugLogControlEpFsm", "sysdebugLogControlEpFsmStage", "sysdebugLogControlEpFsmTask", "sysdebugLogControlModule", "sysdebugLogExportPolicy", "sysdebugLogExportPolicyFsm", "sysdebugLogExportPolicyFsmStage", "sysdebugLogExportPolicyFsmTask", "sysdebugLogExportStatus", "sysdebugMEpLog", "sysdebugMEpLogDef", "sysdebugMEpLogPolicy", "sysdebugManualCoreFileExportTarget", "sysdebugManualCoreFileExportTargetFsm", "sysdebugManualCoreFileExportTargetFsmStage", "sysdebugManualCoreFileExportTargetFsmTask", "sysdebugRepository", "sysdebugTechSupFileRepository", "sysdebugTechSupport", "sysdebugTechSupportCmdOpt", "sysdebugTechSupportFsm", "sysdebugTechSupportFsmStage", "sysdebugTechSupportFsmTask", "sysfileDigest", "sysfileEp", "sysfileExporter", "sysfileImporter", "sysfileInstance", "sysfileMutation", "sysfileMutationFsm", "sysfileMutationFsmStage", "sysfileMutationFsmTask", "sysfileRepository", "topInfoPolicy", "topInfoSyncPolicy", "topMetaInf", "topRoot", "topSysDefaults", "topSystem", "trigAbsWindow", "trigAck", "trigBaseAbsWindow", "trigBaseRecurrWindow", "trigBaseSched", "trigClientToken", "trigConfAck", "trigInst", "trigLocalAbsWindow", "trigLocalSched", "trigMeta", "trigRecurrWindow", "trigResAck", "trigSched", "trigSchedWindow", "trigTest", "trigToken", "trigTriggerable", "trigTriggered", "trigWindow", "unspecified", "uuidpoolAddr", "uuidpoolBlock", "uuidpoolFormat", "uuidpoolPool", "uuidpoolPoolable", "uuidpoolPooled", "uuidpoolUniverse", "versionApplication", "versionEp", "versionVersion", "vmAdaptor", "vmClientContainer", "vmComputeEp", "vmCont", "vmDC", "vmDCOrg", "vmDirCont", "vmEp", "vmHba", "vmHv", "vmInstance", "vmLifeCyclePolicy", "vmLifeCyclePolicyFsm", "vmLifeCyclePolicyFsmStage", "vmLifeCyclePolicyFsmTask", "vmNic", "vmOrg", "vmSwitch", "vmVif", "vmVirtual", "vmVlan", "vmVnicProfCl", "vmVnicProfInst", "vmVsan", "vnicABeh", "vnicAEtherIf", "vnicAFcIf", "vnicAGroup", "vnicAIpcIf", "vnicAScsiIf", "vnicBootIpPolicy", "vnicBootTarget", "vnicConPolicyRef", "vnicConReq", "vnicConnDef", "vnicConnection", "vnicDefBeh", "vnicDynamicCon", "vnicDynamicConPolicy", "vnicDynamicConPolicyRef", "vnicDynamicConReq", "vnicDynamicIdUniverse", "vnicDynamicProvider", "vnicDynamicProviderEp", "vnicEthConfig", "vnicEthLif", "vnicEther", "vnicEtherBase", "vnicEtherBaseIf", "vnicEtherIf", "vnicFc", "vnicFcBase", "vnicFcGroupDef", "vnicFcGroupTempl", "vnicFcIf", "vnicFcLif", "vnicFcNode", "vnicFcOEIf", "vnicIPIf", "vnicIPv4Dhcp", "vnicIPv4Dns", "vnicIPv4If", "vnicIPv4IscsiAddr", "vnicIPv4PooledIscsiAddr", "vnicIPv4StaticRoute", "vnicIPv6If", "vnicIScsi", "vnicIScsiAutoTargetIf", "vnicIScsiBase", "vnicIScsiBootParams", "vnicIScsiBootVnic", "vnicIScsiConfig", "vnicIScsiInitAutoConfigPolicy", "vnicIScsiLCP", "vnicIScsiNode", "vnicIScsiStaticTargetIf", "vnicIScsiTargetIf", "vnicIf", "vnicIfRole", "vnicInitiatorAutoConfigPolicy", "vnicInternalProfile", "vnicIpAddr", "vnicIpV4Addr", "vnicIpV4AddrConf", "vnicIpV4AddrExplConf", "vnicIpV4History", "vnicIpV4MgmtPooledAddr", "vnicIpV4PooledAddr", "vnicIpV4ProfDerivedAddr", "vnicIpV4StaticAddr", "vnicIpV6Addr", "vnicIpV6AddrConf", "vnicIpV6AddrExplConf", "vnicIpV6History", "vnicIpV6MgmtPooledAddr", "vnicIpV6StaticAddr", "vnicIpc", "vnicIpcIf", "vnicIqnHistory", "vnicL2If", "vnicL2Lif", "vnicL3If", "vnicLanConnPolicy", "vnicLanConnTempl", "vnicLifVlan", "vnicLifVsan", "vnicLun", "vnicMacHistory", "vnicNicConn", "vnicOProfileAlias", "vnicProfile", "vnicProfileAlias", "vnicProfileRef", "vnicProfileSet", "vnicProfileSetFsm", "vnicProfileSetFsmStage", "vnicProfileSetFsmTask", "vnicRackServerDiscoveryProfile", "vnicSanConnPolicy", "vnicSanConnTempl", "vnicScsi", "vnicScsiIf", "vnicTempl", "vnicUsnicConPolicy", "vnicUsnicConPolicyRef", "vnicUsnicConReq", "vnicVProfileAlias", "vnicVhbaBehPolicy", "vnicVlan", "vnicVmqConPolicy", "vnicVmqConPolicyRef", "vnicVmqConReq", "vnicVnic", "vnicVnicBehPolicy", "vnicWwnnHistory", "vnicWwpnHistory"], []),
"oper_code": MoPropertyMeta("oper_code", "operCode", "string", VersionMeta.Version302a, MoPropertyMeta.READ_WRITE, 0x10, None, None, None, ["create", "delete"], []),
"rn": MoPropertyMeta("rn", "rn", "string", VersionMeta.Version302a, MoPropertyMeta.READ_ONLY, 0x20, 0, 256, None, [], []),
"sacl": MoPropertyMeta("sacl", "sacl", "string", VersionMeta.Version302a, MoPropertyMeta.READ_ONLY, None, None, None, r"""((none|del|mod|addchild|cascade),){0,4}(none|del|mod|addchild|cascade){0,1}""", [], []),
"secondary_key": MoPropertyMeta("secondary_key", "secondaryKey", "string", VersionMeta.Version302a, MoPropertyMeta.READ_ONLY, None, 0, 510, None, [], []),
"source_mo_dn": MoPropertyMeta("source_mo_dn", "sourceMoDn", "string", VersionMeta.Version302a, MoPropertyMeta.READ_WRITE, 0x40, 0, 256, None, [], []),
"status": MoPropertyMeta("status", "status", "string", VersionMeta.Version302a, MoPropertyMeta.READ_WRITE, 0x80, None, None, r"""((removed|created|modified|deleted),){0,3}(removed|created|modified|deleted){0,1}""", [], []),
}
prop_map = {
"childAction": "child_action",
"clientMoDn": "client_mo_dn",
"dn": "dn",
"id": "id",
"isSystem": "is_system",
"moClassId": "mo_class_id",
"operCode": "oper_code",
"rn": "rn",
"sacl": "sacl",
"secondaryKey": "secondary_key",
"sourceMoDn": "source_mo_dn",
"status": "status",
}
def __init__(self, parent_mo_or_dn, id, **kwargs):
self._dirty_mask = 0
self.id = id
self.child_action = None
self.client_mo_dn = None
self.is_system = None
self.mo_class_id = None
self.oper_code = None
self.sacl = None
self.secondary_key = None
self.source_mo_dn = None
self.status = None
ManagedObject.__init__(self, "DupeScope", parent_mo_or_dn, **kwargs)
|
"""
m2g.utils.reg_utils
~~~~~~~~~~~~~~~~~~~~
Contains small-scale registration utilities.
"""
# standard library imports
import os
import subprocess
# package imports
import nibabel as nib
import numpy as np
import nilearn.image as nl
from dipy.align.imaffine import MutualInformationMetric
from dipy.align.imaffine import AffineRegistration
from dipy.align.imaffine import transform_origins
from dipy.align.transforms import TranslationTransform3D
from dipy.align.transforms import RigidTransform3D
from dipy.align.transforms import AffineTransform3D
from dipy.align.imwarp import SymmetricDiffeomorphicRegistration
from dipy.align.metrics import CCMetric
from dipy.viz import regtools
# m2g imports
from m2g.utils import gen_utils
from m2g.utils.gen_utils import print_arguments, timer
def erode_mask(mask, v=0):
"""A function to erode a mask by a specified number of voxels. Here, we define
erosion as the process of checking whether all the voxels within a number of voxels
for a mask have valuess.
Parameters
----------
mask : array
a numpy array of a mask to be eroded
v : int, optional
the number of voxels to erode by, by default 0
Returns
-------
numpy array
eroded mask
Raises
------
ValueError
The mask you provided for erosion has an invalid shape (must be x.shape=y.shape=z.shape)
"""
print("Eroding Mask...")
for i in range(0, v):
# masked_vox is a tuple 0f [x]. [y]. [z] cooords
# wherever mask is nonzero
erode_mask = np.zeros(mask.shape)
x, y, z = np.where(mask != 0)
if x.shape == y.shape and y.shape == z.shape:
# iterated over all the nonzero voxels
for j in range(0, x.shape[0]):
# check that the 3d voxels within 1 voxel are 1
# if so, add to the new mask
md = mask.shape
if (
mask[x[j], y[j], z[j]]
and mask[np.min((x[j] + 1, md[0] - 1)), y[j], z[j]]
and mask[x[j], np.min((y[j] + 1, md[1] - 1)), z[j]]
and mask[x[j], y[j], np.min((z[j] + 1, md[2] - 1))]
and mask[np.max((x[j] - 1, 0)), y[j], z[j]]
and mask[x[j], np.max((y[j] - 1, 0)), z[j]]
and mask[x[j], y[j], np.max((z[j] - 1, 0))]
):
erode_mask[x[j], y[j], z[j]] = 1
else:
raise ValueError("Your mask erosion has an invalid shape.")
mask = erode_mask
return mask
@print_arguments(inputs=[0], outputs=[1])
def probmap2mask(prob_map, mask_path, t, erode=0):
"""
A function to extract a mask from a probability map.
Also, performs mask erosion as a substep.
**Positional Arguments:**
prob_map:
- the path to probability map for the given class
of brain tissue.
mask_path:
- the path to the extracted mask.
t:
- the threshold to consider voxels part of the class.
erode=0:
- the number of voxels to erode by. Defaults to 0.
"""
print(f"Extracting Mask from probability map {prob_map}...")
prob = nib.load(prob_map)
prob_dat = prob.get_data()
mask = (prob_dat > t).astype(int)
if erode > 0:
mask = erode_mask(mask, v=erode)
img = nib.Nifti1Image(mask, header=prob.header, affine=prob.get_affine())
# save the corrected image
nib.save(img, mask_path)
return mask_path
@print_arguments(inputs=[0, 1], outputs=[2])
def apply_mask(inp, mask, out):
"""A function to generate a brain-only mask for an input image using 3dcalc
Parameters
----------
inp : str
path for the input image. If 4d, the mask should be 4d. If 3d, the mask should be 3d.
mask : str
path to the mask to apply to the data. Should be nonzero in mask region.
out : str
the path for the output skull-extracted image.
"""
cmd = f'3dcalc -a {inp} -b {mask} -expr "a*step(b)" -prefix {out}'
gen_utils.run(cmd)
@print_arguments(inputs=[1], outputs=[2])
def extract_t1w_brain(t1w, out, tmpdir, skull="none"):
"""A function to extract the brain from an input T1w image
using AFNI's brain extraction utilities.
Parameters
----------
t1w : str
path for the input T1w image
out : str
path for the output brain image
tmpdir : str
Path for the temporary directory to store images
skull : str, optional
skullstrip parameter pre-set. Default is "none".
"""
t1w_name = gen_utils.get_filename(t1w)
# the t1w image with the skull removed.
skull_t1w = f"{tmpdir}/{t1w_name}_noskull.nii.gz"
# 3dskullstrip to extract the brain-only t1w
t1w_skullstrip(t1w, skull_t1w, skull)
# 3dcalc to apply the mask over the 4d image
apply_mask(t1w, skull_t1w, out)
@print_arguments(inputs=[1], outputs=[0])
def normalize_t1w(inp, out):
"""
A function that normalizes intensity values for anatomical
T1w images. Makes brain extraction much more robust
in the event that we have poor shading in our T1w image.
**Positional Arguments:**
- inp:
- the input T1w image.
- out:
- the output intensity-normalized image.
"""
cmd = f"3dUnifize -prefix {out} -input {inp}"
gen_utils.run(cmd)
@print_arguments(inputs=[0], outputs=[1])
def resample_fsl(base, res, goal_res, interp="spline"):
"""
A function to resample a base image in fsl to that of a template.
**Positional Arguments:**
base:
- the path to the base image to resample.
res:
- the filename after resampling.
goal_res:
- the desired resolution.
interp:
- the interpolation strategy to use.
"""
# resample using an isometric transform in fsl
cmd = f"flirt -in {base} -ref {base} -out {res} -applyisoxfm {goal_res} -interp {interp}"
gen_utils.run(cmd)
def skullstrip_check(dmrireg, parcellations, outdir, prep_anat, vox_size, reg_style):
"""Peforms the alignment of atlas to dwi space and checks if the alignment results in roi loss
Parameters
----------
dmrireg : object
object created in the pipeline containing relevant paths and class methods for analysing tractography
parcellations : str, list
the path to the t1w image to be segmented
outdir : str
the basename for outputs. Often it will be most convenient for this to be the dataset, followed by the subject,
followed by the step of processing. Note that this anticipates a path as well;
ie, /path/to/dataset_sub_nuis, with no extension.
preproc_dir : str
Path to anatomical preprocessing directory location.
vox_size : str
additional options that can optionally be passed to fast. Desirable options might be -P, which will use
prior probability maps if the input T1w MRI is in standard space, by default ""
reg_style : str
Tractography space, must be either native or native_dsn
Returns
-------
list
List containing the paths to the aligned label files
Raises
------
KeyError
The atlas has lost an roi due to alignment
"""
if reg_style == "native":
dsn = False
elif reg_style == "native_dsn":
dsn = True
else:
raise ValueError("Unsupported tractography space, must be native or native_dsn")
labels_im_file_list = []
for idx, label in enumerate(parcellations):
labels_im_file = gen_utils.reorient_t1w(parcellations[idx], prep_anat)
labels_im_file = gen_utils.match_target_vox_res(
labels_im_file, vox_size, outdir, sens="anat"
)
orig_lab = nib.load(labels_im_file)
orig_lab = orig_lab.get_data().astype("int")
n_ids = orig_lab[orig_lab > 0]
num = len(np.unique(n_ids))
labels_im_file_dwi = dmrireg.atlas2t1w2dwi_align(labels_im_file, dsn)
labels_im = nib.load(labels_im_file_dwi)
align_lab = labels_im.get_data().astype("int")
n_ids_2 = align_lab[align_lab > 0]
num2 = len(np.unique(n_ids_2))
if num != num2:
print('''WARNING: The atlas has lost an roi due to alignment! A file containing the lost ROI values will be generated in the
same folder as the connectome output. Try rerunning m2g with the appropriate --skull flag.'''
)
labels_im_file_list.append(labels_im_file_dwi)
return labels_im_file_list
@timer
@print_arguments(inputs=[0], outputs=[1])
def t1w_skullstrip(t1w, out, skull=None):
"""Skull-strips the t1w image using AFNIs 3dSkullStrip algorithm, which is a modification of FSLs BET specialized to t1w images.
Offers robust skull-stripping with no hyperparameters
Note: renormalizes the intensities, call extract_t1w_brain instead if you want the original intensity values
Parameters
----------
t1w : str
path for the input t1w image file
out : str
path for the output skull-stripped image file
skull : str, optional
skullstrip parameter pre-set. Default is "none".
"""
if skull == "below":
cmd = f"3dSkullStrip -prefix {out} -input {t1w} -shrink_fac_bot_lim 0.6 -ld 45"
elif skull == "cerebelum":
cmd = f"3dSkullStrip -prefix {out} -input {t1w} -shrink_fac_bot_lim 0.3 -ld 45"
elif skull == "eye":
cmd = f"3dSkullStrip -prefix {out} -input {t1w} -no_avoid_eyes -ld 45"
elif skull == "general":
cmd = f"3dSkullStrip -prefix {out} -input {t1w} -push_to_edge -ld 45"
else:
cmd = f"3dSkullStrip -prefix {out} -input {t1w} -ld 30"
gen_utils.run(cmd)
@print_arguments(inputs=[0], outputs=[1])
def segment_t1w(t1w, basename, opts=""):
"""Uses FSLs FAST to segment an anatomical image into GM, WM, and CSF probability maps.
Parameters
----------
t1w : str
the path to the t1w image to be segmented
basename : str
the basename for outputs. Often it will be most convenient for this to be the dataset, followed by the subject,
followed by the step of processing. Note that this anticipates a path as well;
ie, /path/to/dataset_sub_nuis, with no extension.
opts : str, optional
additional options that can optionally be passed to fast. Desirable options might be -P, which will use
prior probability maps if the input T1w MRI is in standard space, by default ""
Returns
-------
dict
dictionary of output files
"""
# run FAST, with options -t for the image type and -n to
# segment into CSF (pve_0), WM (pve_1), GM (pve_2)
cmd = f"fast -t 1 {opts} -n 3 -o {basename} {t1w}"
gen_utils.run(cmd)
out = {} # the outputs
out["wm_prob"] = f"{basename}_pve_2.nii.gz"
out["gm_prob"] = f"{basename}_pve_1.nii.gz"
out["csf_prob"] = f"{basename}_pve_0.nii.gz"
return out
@print_arguments(inputs=[0, 1], outputs=[3])
def align(
inp,
ref,
xfm=None,
out=None,
dof=12,
searchrad=True,
bins=256,
interp=None,
cost="mutualinfo",
sch=None,
wmseg=None,
init=None,
finesearch=None,
):
"""Aligns two images using FSLs flirt function and stores the transform between them
Parameters
----------
inp : str
path to input image being altered to align with the reference image as a nifti image file
ref : str
path to reference image being aligned to as a nifti image file
xfm : str, optional
where to save the 4x4 affine matrix containing the transform between two images, by default None
out : str, optional
determines whether the image will be automatically aligned and where the resulting image will be saved, by default None
dof : int, optional
the number of degrees of free dome of the alignment, by default 12
searchrad : bool, optional
whether to use the predefined searchradius parameter (180 degree sweep in x, y, and z), by default True
bins : int, optional
number of histogram bins, by default 256
interp : str, optional
interpolation method to be used (trilinear,nearestneighbour,sinc,spline), by default None
cost : str, optional
cost function to be used in alignment (mutualinfo, corratio, normcorr, normmi, leastsq, labeldiff, or bbr), by default "mutualinfo"
sch : str, optional
the optional FLIRT schedule, by default None
wmseg : str, optional
an optional white-matter segmentation for bbr, by default None
init : str, optional
an initial guess of an alignment in the form of the path to a matrix file, by default None
finesearch : int, optional
angle in degrees, by default None
"""
cmd = f"flirt -in {inp} -ref {ref}"
if xfm is not None:
cmd += f" -omat {xfm}"
if out is not None:
cmd += f" -out {out}"
if dof is not None:
cmd += f" -dof {dof}"
if bins is not None:
cmd += f" -bins {bins}"
if interp is not None:
cmd += f" -interp {interp}"
if cost is not None:
cmd += f" -cost {cost}"
if searchrad is not None:
cmd += " -searchrx -180 180 -searchry -180 180 " + "-searchrz -180 180"
if sch is not None:
cmd += f" -schedule {sch}"
if wmseg is not None:
cmd += f" -wmseg {wmseg}"
if init is not None:
cmd += f" -init {init}"
gen_utils.run(cmd)
@print_arguments(inputs=[0, 1, 2], outputs=[3])
def align_epi(epi, t1, brain, out):
"""
Algins EPI images to T1w image
"""
cmd = f"epi_reg --epi={epi} --t1={t1} --t1brain={brain} --out={out}"
gen_utils.run(cmd)
@timer
@print_arguments(inputs=[0, 1], outputs=[3])
def align_nonlinear(inp, ref, xfm, out, warp, ref_mask=None, in_mask=None, config=None):
"""Aligns two images using nonlinear methods and stores the transform between them using fnirt
Parameters
----------
inp : str
path to the input image
ref : str
path to the reference image that the input will be aligned to
xfm : str
path to the file containing the affine transform matrix created by reg_utils.align()
out : str
path for the desired output image
warp : str
the path to store the output file containing the nonlinear warp coefficients/fields
ref_mask : str, optional
path to the reference image brain_mask, by default None
in_mask : str, optional
path for the file with mask in input image space, by default None
config : str, optional
path to the config file specifying command line arguments, by default None
"""
cmd = f"fnirt --in={inp} --ref={ref} --aff={xfm} --iout={out} --cout={warp} --warpres=8,8,8"
if ref_mask is not None:
cmd += f" --refmask={ref_mask} --applyrefmask=1"
if in_mask is not None:
cmd += f" --inmask={in_mask} --applyinmask=1"
if config is not None:
cmd += f" --config={config}"
gen_utils.run(cmd)
@print_arguments(inputs=[0, 1, 2], outputs=[3])
def applyxfm(ref, inp, xfm, aligned, interp="trilinear", dof=6):
"""Aligns two images with a given transform using FSLs flirt command
Parameters
----------
ref : str
path of reference image to be aligned to as a nifti image file
inp : str
path of input image to be aligned as a nifti image file
xfm : str
path to the transform matrix between the two images
aligned : str
path for the output aligned image
interp : str, optional
interpolation method, by default "trilinear"
dof : int, optional
degrees of freedom for the alignment, by default 6
"""
cmd = f"flirt -in {inp} -ref {ref} -out {aligned} -init {xfm} -interp {interp} -dof {dof} -applyxfm"
gen_utils.run(cmd)
@print_arguments(inputs=[0, 1], outputs=[2, 3])
def apply_warp(ref, inp, out, warp, xfm=None, mask=None, interp=None, sup=False):
"""Applies a warp from the structural to reference space in a single step using information about
the structural -> ref mapping as well as the functional to structural mapping.
Parameters
----------
ref : str
path of the reference image to be aligned to
inp : str
path of the input image to be aligned
out : str
path for the resulting warped output image
warp : str
path for the warp coefficent file to go from inp -> ref
xfm : str, optional
path of the affine transformation matrix file from inp -> ref, by default None
mask : str, optional
path of filename for mask image (in reference space), by default None
interp : str, optional
interpolation method {nn, trilinear, sinc, spline}, by default None
sup : bool, optional
whether to perform automatic intermediary supersampling, by default False
"""
cmd = (
"applywarp --ref=" + ref + " --in=" + inp + " --out=" + out + " --warp=" + warp
)
if xfm is not None:
cmd += " --premat=" + xfm
if mask is not None:
cmd += " --mask=" + mask
if interp is not None:
cmd += " --interp=" + interp
if sup is True:
cmd += " --super --superlevel=a"
gen_utils.run(cmd)
@print_arguments(inputs=[0, 2], outputs=[1])
def inverse_warp(ref, out, warp):
"""Takes a non-linear mapping and finds the inverse. Takes the file conaining warp-coefficients/fields specified in the
variable warp (t1w -> mni) and creates its inverse (mni -> t1w) which is saved in the location determined by the variable out
Parameters
----------
ref : str
path to a file in target space, which is a different target space than warp (a image that has not been mapped to mni)
out : str
path to the output file, containing warps that are now inverted
warp : str
path to the warp/shiftmap transform volume wanting to be inverted
"""
cmd = "invwarp --warp=" + warp + " --out=" + out + " --ref=" + ref
gen_utils.run(cmd)
@print_arguments(inputs=[0, 2], outputs=[1])
def resample(base, ingested, template):
"""
Resamples the image such that images which have already been aligned
in real coordinates also overlap in the image/voxel space.
**Positional Arguments**
base:
- Image to be aligned
ingested:
- Name of image after alignment
template:
- Image that is the target of the alignment
"""
# Loads images
template_im = nib.load(template)
base_im = nib.load(base)
# Aligns images
target_im = nl.resample_img(
base_im,
target_affine=template_im.get_affine(),
target_shape=template_im.get_data().shape,
interpolation="nearest",
)
# Saves new image
nib.save(target_im, ingested)
@print_arguments(inputs=[0, 1], outputs=[2])
def combine_xfms(xfm1, xfm2, xfmout):
"""A function to combine two transformations and output the resulting transformation
Parameters
----------
xfm1 : str
path to the first transformation
xfm2 : str
path to the second transformation
xfmout : str
path for the ouput transformation
"""
cmd = f"convert_xfm -omat {xfmout} -concat {xfm1} {xfm2}"
gen_utils.run(cmd)
@print_arguments(inputs=[0, 1], outputs=[2])
def wm_syn(template_path, fa_path, working_dir):
"""A function to perform ANTS SyN registration using dipy functions
Parameters
----------
template_path : str
File path to the template reference FA image.
fa_path : str
File path to the FA moving image (image to be fitted to reference)
working_dir : str
Path to the working directory to perform SyN and save outputs.
Returns
-------
DiffeomorphicMap
An object that can be used to register images back and forth between static (template) and moving (FA) domains
AffineMap
An object used to transform the moving (FA) image towards the static image (template)
"""
fa_img = nib.load(fa_path)
template_img = nib.load(template_path)
static = template_img.get_data()
static_affine = template_img.affine
moving = fa_img.get_data().astype(np.float32)
moving_affine = fa_img.affine
affine_map = transform_origins(static, static_affine, moving, moving_affine)
nbins = 32
sampling_prop = None
metric = MutualInformationMetric(nbins, sampling_prop)
level_iters = [10, 10, 5]
sigmas = [3.0, 1.0, 0.0]
factors = [4, 2, 1]
affine_reg = AffineRegistration(
metric=metric, level_iters=level_iters, sigmas=sigmas, factors=factors
)
transform = TranslationTransform3D()
params0 = None
translation = affine_reg.optimize(
static, moving, transform, params0, static_affine, moving_affine
)
transform = RigidTransform3D()
rigid_map = affine_reg.optimize(
static,
moving,
transform,
params0,
static_affine,
moving_affine,
starting_affine=translation.affine,
)
transform = AffineTransform3D()
# We bump up the iterations to get a more exact fit:
affine_reg.level_iters = [1000, 1000, 100]
affine_opt = affine_reg.optimize(
static,
moving,
transform,
params0,
static_affine,
moving_affine,
starting_affine=rigid_map.affine,
)
# We now perform the non-rigid deformation using the Symmetric Diffeomorphic Registration(SyN) Algorithm:
metric = CCMetric(3)
level_iters = [10, 10, 5]
sdr = SymmetricDiffeomorphicRegistration(metric, level_iters)
mapping = sdr.optimize(
static, moving, static_affine, moving_affine, affine_opt.affine
)
warped_moving = mapping.transform(moving)
# We show the registration result with:
regtools.overlay_slices(
static,
warped_moving,
None,
0,
"Static",
"Moving",
f"{working_dir}/transformed_sagittal.png",
)
regtools.overlay_slices(
static,
warped_moving,
None,
1,
"Static",
"Moving",
f"{working_dir}/transformed_coronal.png",
)
regtools.overlay_slices(
static,
warped_moving,
None,
2,
"Static",
"Moving",
f"{working_dir}/transformed_axial.png",
)
return mapping, affine_map
|
import os
import socket
import sys
import threading
import queue
import time
os.system("cls"),
os.system("mode con lines=22 cols=38"),
os.system("rem IPV4_DOWNED"),
os.system("title PORT SCANNER"),
os.system("rem REMADE by @IPV4_DOWNED"),
common_ports = {
"21": "FTP",
"22": "SSH",
"23": "Telnet",
"25": "SMTP",
"53": "DNS",
"80": "HTTP",
"194": "IRC",
"443": "HTTPS",
"3306": "MySQL",
"25565": "Minecraft"
}
def get_scan_args():
if len(sys.argv) == 2:
return (sys.argv[1], 0, 1024)
elif len(sys.argv) == 3:
return (sys.argv[1], 0, int(sys.argv[2]))
elif len(sys.argv) == 4:
return (sys.argv[1], int(sys.argv[2]), int(sys.argv[3]))
def is_port_open(host, port): #Return boolean
try:
sock = socket.socket(socket.AF_INET, socket.SOCK_STREAM)
sock.settimeout(0.5)
sock.connect((host, port))
except socket.error:
return False
return True
print("[40;33m======================================")
print("[40;31m ____ ____ ____ ___________")
print("[40;31m / __ \/ __ \/ __ \/_ __/ ___/")
print("[40;31m / /_/ / / / / /_/ / / / \__ \ ")
print("[40;31m / ____/ /_/ / _, _/ / / ___/ / ")
print("[40;31m /_/ \____/_/ |_| /_/ /____/ ")
print("")
print("[40;33m======================================")
print("")
def scanner_worker_thread(host):
while True:
port = port_queue.get()
if is_port_open(host, port):
if str(port) in common_ports:
print(" [40;37m{} [40;31m([40;33m{}[40;31m) [40;32mis OPEN PORT! ".format(str(port), common_ports[str(port)]))
else:
print(" [40;37m{} [40;32mis OPEN PORT! [40;36m ".format(port))
print(" [40;33mALL OTHER PORTS ARE CLOSED")
print(" [40;33mIF NOT SHOWN")
port_queue.task_done()
scan_args = get_scan_args()
port_queue = queue.Queue()
for _ in range(20):
t = threading.Thread(target=scanner_worker_thread, kwargs={"host": scan_args[0]})
t.daemon = True
t.start()
start_time = time.time()
for port in range(scan_args[1], scan_args[2]):
port_queue.put(port)
port_queue.join()
end_time = time.time()
print("")
print(" [40;37mScanning Time {:.3f} seconds.".format(end_time - start_time))
os.system("pause >nul"),
print("exit"),
|
from fastapi import FastAPI
from config.settings import settings
from config.middleware import register_middleware
from config.tortoise import register_tortoise
from config.exception import register_exception
from config.routes import register_routes
from core.helpers.responses import responses
def create_app():
app = FastAPI(
title=settings.APP_NAME,
description=None,
version=settings.APP_VERSION,
docs_url=None,
redoc_url=f'{settings.ROOT_PATH}/docs',
openapi_url=f'{settings.ROOT_PATH}/openapi.json',
responses=responses,
)
register_middleware(app)
register_tortoise(app)
register_exception(app)
register_routes(app, root=settings.ROOT_PATH)
return app
|
import matplotlib.pyplot as plt
#from matplotlib import rc
from matplotlib import rcParams
from MCPM.cpmfitsource import CpmFitSource
def plot_tpf_data(ra, dec, channel, campaign, file_out, half_size=2,
stars_subtract=[], adjust=None, xlabel=None, ylabel=None, **kwargs):
"""
Plot TPF data for given settings.
"""
cpm_source = CpmFitSource(ra=ra, dec=dec, campaign=campaign, channel=channel)
cpm_source.set_pixels_square(half_size)
for (ra, dec, flux) in stars_subtract:
cpm_source.subtract_flux_from_star(ra, dec, flux)
cpm_source.plot_pixel_curves(**kwargs)
if adjust is not None:
plt.subplots_adjust(**adjust)
if xlabel is not None:
plt.figtext(0.51, 0.004, xlabel)
if ylabel is not None:
plt.figtext(0.002, 0.5, ylabel, rotation=90)
plt.savefig(file_out)
plt.close()
if __name__ == "__main__":
#stars_0241 = [[270.63370, -27.52653, 30.e3]]
stars_0241 = [[270.63370, -27.52653, 16996.5]]
plot_tpf_data(270.6323333, -27.5296111, 49, 91, "ob160241_c91_pixel_curves.png",
half_size=3, stars_subtract=stars_0241)
plot_tpf_data(270.6323333, -27.5296111, 49, 92, "ob160241_c92_pixel_curves.png",
half_size=3, stars_subtract=stars_0241)
plot_tpf_data(269.5648750, -27.9635833, 31, 92, "ob160940_pixel_curves.png")
default = rcParams['font.size']
rcParams['font.size'] = 18
plt.rc('text', usetex=True)
plot_tpf_data(
271.2375417, -28.6278056, 52, 92, "ob160975_pixel_curves.png",
adjust={"left": 0.07, "bottom":0.06, "right":.995, "top":.995},
xlabel="BJD-2450000", ylabel=r'counts [e$^-$s$^{-1}$]')
plot_tpf_data(
271.001083, -28.155111, 52, 91, "ob160795_pixel_curves.png",
adjust={"left": 0.07, "bottom":0.06, "right":.995, "top":.995},
xlabel="BJD-2450000", ylabel='counts')
plot_tpf_data(
269.886542, -28.407417, 31, 91, "ob160813_pixel_curves.png",
adjust={"left": 0.07, "bottom":0.06, "right":.995, "top":.995},
xlabel="BJD-2450000", ylabel='counts')
plot_tpf_data(
271.354292, -28.005583, 52, 92, "ob160980_pixel_curves.png",
adjust={"left": 0.07, "bottom":0.06, "right":.995, "top":.995},
xlabel="BJD-2450000", ylabel=r'counts [e$^-$s$^{-1}$]')
rcParams['font.size'] = default
plot_tpf_data(269.9291250, -28.4108333, 31, 91, "eb234840_pixel_curves.png")
|
def extractKnokkroTranslations(item):
"""
"""
vol, chp, frag, postfix = extractVolChapterFragmentPostfix(item['title'])
if not (chp or vol) or 'preview' in item['title'].lower():
return None
if 'Eternal Life' in item['tags']:
return buildReleaseMessageWithType(item, 'Eternal Life', vol, chp, frag=frag, postfix=postfix)
return False
|
from active_work.miscellaneous import RTPring
from active_work.plot import list_colormap
from active_work.init import get_env
import matplotlib.pyplot as plt
try: plt.style.use('paper')
except: print('Matplotlib stylesheet \'paper\' does not exist.')
from matplotlib.lines import Line2D
import numpy as np
r = RTPring()
_L = [.5, 2, 10]
x = {L: np.array(sorted(np.linspace(0, np.max([-2, -1./L]), 1000, endpoint=False)) + np.linspace(0, 5, 1000).tolist()) for L in _L}
s = {L: np.array(list(map(lambda _: r.s(L, _), x[L])))*L for L in _L}
nu = {L: np.array(list(map(lambda _: r.nu(_), x[L]))) for L in _L}
nuAve = {L: np.array(list(map(lambda _: r.nuAve(L, _), x[L]))) for L in _L}
smin = -10
xmax = 0.5
# plot
colors = list_colormap(_L, sort=True)
adjust = {'left': 0.32, 'right': 0.99, 'bottom': 0.20, 'top': 0.95}
# psi
fig, ax = plt.subplots()
ax.set_xlabel(r'$\lambda L v_0$')
ax.set_xlim([-10, 10])
ax.set_xticks([-10, -8, -6, -4, -2, 0, 2, 4, 6, 8, 10])
ax.set_xticklabels(['', r'$-8$', '', r'$-4$', '', r'$0$', '', r'$4$', '', r'$8$', ''])
ax.set_ylabel(r'$\tau_{\rm p} \psi^{\rm RTP}(\lambda)$')
line = {}
for L in _L:
line[L], = ax.plot(s[L][(x[L] < xmax)*(s[L] > smin)], x[L][(x[L] < xmax)*(s[L] > smin)],
color=colors[L], label=r'$\tilde{L} = %s$' % L)
ax.axvline(x=0, color='black', linewidth=2)
ax.axhline(y=0, color='black', linewidth=2)
plt.sca(ax)
ax.add_artist(plt.legend(loc='lower right', ncol=1, borderpad=0.025, handletextpad=0.2,
handles=[
Line2D([0], [0], lw=0, label=r'$L/l=$'),
*[Line2D([0], [0], color=line[L].get_color(), label=r'$%s$' % L)
for L in _L]]))
fig.subplots_adjust(**adjust)
# nu
fign, axn = plt.subplots()
axn.set_xlabel(r'$\lambda L v_0$')
axn.set_xlim([-10, 10])
axn.set_xticks([-10, -8, -6, -4, -2, 0, 2, 4, 6, 8, 10])
axn.set_xticklabels(['', r'$-8$', '', r'$-4$', '', r'$0$', '', r'$4$', '', r'$8$', ''])
axn.set_ylabel(r'$\nu^{\rm RTP}(\lambda)$')
axn.set_ylim([0, 1])
linen = {}
for L in _L:
# linen[L], = axn.plot(s[L][(x[L] < xmax)*(s[L] > smin)], nu[L][(x[L] < xmax)*(s[L] > smin)],
linen[L], = axn.plot(s[L][(s[L] > smin)], nu[L][(s[L] > smin)],
color=colors[L], linestyle='--', label=r'$\tilde{L} = %s$' % L)
linen[L], = axn.plot(s[L][(s[L] > smin)], nuAve[L][(s[L] > smin)],
color=colors[L], linestyle='-', label=r'$\tilde{L} = %s$' % L)
axn.axvline(x=0, color='black', linewidth=2)
axn.axhline(y=0.5, color='black', linewidth=2)
plt.sca(axn)
axn.add_artist(plt.legend(loc='upper right', ncol=1, borderpad=0.025, handletextpad=0.2, labelspacing=0.2,
handles=[
Line2D([0], [0], lw=0, label=r'$L/l=$'),
*[Line2D([0], [0], color=linen[L].get_color(), label=r'$%s$' % L)
for L in _L]]))
axn.add_artist(plt.legend(loc='lower left', ncol=1, borderpad=0.025, handletextpad=0.2,
handles=[
Line2D([0], [0], color='black', linestyle='--',
label=r'$\nu^{\rm RTP}_{\rm end}$'),
Line2D([0], [0], color='black', linestyle='-',
label=r'$\nu^{\rm RTP}_{\rm ave}$')]))
fign.subplots_adjust(**adjust)
# show and save
if get_env('SAVE', default=False, vartype=bool):
def save(f, fname):
f.savefig(fname + '.eps')
f.savefig(fname + '.svg')
save(fig, 'exactPsiRTP')
save(fign, 'exactNuRTP')
if get_env('SHOW', default=True, vartype=bool): plt.show()
|
"""
Tests core functionality of naming workers when there are multiple processes.
See https://pytorch.org/tutorials/intermediate/ddp_tutorial.html to decide
how we want to support DistributedDataParallel with limited user configuration.
The key methods are
torch.distributed.get_rank() - when manually spawning processes
"""
# Standard Library
import os
import shutil
# Third Party
import numpy as nn
import pytest
import torch
import torch.distributed as dist
import torch.nn as nn
import torch.nn.functional as F
import torch.optim as optim
from torch import multiprocessing
from torch.multiprocessing import Process
# First Party
import smdebug.pytorch as smd
from smdebug.trials import create_trial
out_dir = "/tmp/run"
class Net(nn.Module):
"""Returns f(x) = sigmoid(w*x + b)"""
def __init__(self):
super().__init__()
self.add_module("fc", nn.Linear(1, 1))
def forward(self, x):
x = self.fc(x)
x = F.sigmoid(x)
return x
def dataset(batch_size=4):
"""Return a dataset of (data, target)."""
data = torch.rand(batch_size, 1)
target = F.sigmoid(2 * data + 1)
return data, target
def train(model, device, optimizer, num_steps=10):
"""Runs the training loop."""
model.train()
for i in range(num_steps):
batch_size = 4
data = torch.rand(batch_size, 1)
target = F.sigmoid(2 * data + 1)
data, target = data.to(device), target.to(device)
optimizer.zero_grad()
output = model(data)
loss = F.mse_loss(output, target)
loss.backward()
optimizer.step()
def run(rank, size, include_workers="one", num_epochs=10, batch_size=128, num_batches=10):
"""Distributed function to be implemented later."""
torch.manual_seed(1234)
device = torch.device("cpu")
model = Net().to(device)
optimizer = optim.SGD(model.parameters(), lr=1)
shutil.rmtree(out_dir, ignore_errors=True)
hook = smd.Hook(
out_dir=out_dir,
save_config=smd.SaveConfig(save_steps=[0, 1, 5]),
save_all=True,
include_workers=include_workers,
)
hook.register_module(model)
for epoch in range(num_epochs):
epoch_loss = 0.0
for _ in range(num_batches):
optimizer.zero_grad()
data, target = dataset(batch_size)
output = model(data)
loss = F.mse_loss(output, target)
epoch_loss += loss.item()
loss.backward()
average_gradients(model)
optimizer.step()
# print(f"Rank {dist.get_rank()}, epoch {epoch}: {epoch_loss / num_batches}")
assert hook._get_worker_name() == f"worker_{dist.get_rank()}"
# Race condition here where both workers attempt to move
# /tmp/{out_dir}/END_OF_JOB.ts to {out_dir}/END_OF_JOB.ts
try:
hook._cleanup()
except FileNotFoundError:
pass
def average_gradients(model):
"""Gradient averaging."""
size = float(dist.get_world_size())
for param in model.parameters():
dist.all_reduce(param.grad.data, op=dist.reduce_op.SUM)
param.grad.data /= size
def init_processes(rank, size, include_workers, fn, backend="gloo"):
"""Initialize the distributed environment."""
os.environ["MASTER_ADDR"] = "127.0.0.1"
os.environ["MASTER_PORT"] = "29500"
dist.init_process_group(backend, rank=rank, world_size=size)
fn(rank, size, include_workers)
def _run_net_distributed(include_workers="one"):
"""Runs a single linear layer on 2 processes."""
# torch.distributed is empty on Mac on Torch <= 1.2
if not hasattr(dist, "is_initialized"):
return
multiprocessing.set_start_method("spawn", force=True)
size = 2
processes = []
for rank in range(size):
p = Process(target=init_processes, args=(rank, size, include_workers, run))
p.start()
processes.append(p)
for p in processes:
p.join()
# WARNING: assert statements do not cause test failure inside subprocesses
# https://stackoverflow.com/questions/13400546/py-test-how-to-automatically-detect-an-exception-in-a-child-process
assert all([not p.exitcode for p in processes]), f"Some processes failed. processes={processes}"
out_dir = "/tmp/run"
trial = create_trial(path=out_dir)
return trial
@pytest.mark.slow # 0:05 to run
def test_run_net_single_process():
"""Runs a single linear layer."""
device = torch.device("cpu")
model = Net().to(device)
optimizer = optim.SGD(model.parameters(), lr=0.01)
shutil.rmtree(out_dir, ignore_errors=True)
hook = smd.Hook(
out_dir=out_dir, save_config=smd.SaveConfig(save_steps=[0, 1, 5]), save_all=True
)
hook.register_module(model)
train(model=model, device=device, optimizer=optimizer)
hook._cleanup()
assert hook._get_worker_name() == "worker_0"
trial = create_trial(path=out_dir)
assert len(trial.workers()) == 1, f"trial.workers() = {trial.workers()}"
assert len(trial.steps()) == 3, f"trial.steps() = {trial.steps()}"
shutil.rmtree(out_dir, ignore_errors=True)
@pytest.mark.slow # 0:07 to run
def test_run_net_distributed_save_all_workers():
trial = _run_net_distributed(include_workers="all")
assert len(trial.workers()) == 2, f"trial.workers() = {trial.workers()}"
assert len(trial.steps()) == 3, f"trial.steps() = {trial.steps()}"
@pytest.mark.slow # 0:07 to run
def test_run_net_distributed_save_one_worker():
trial = _run_net_distributed(include_workers="one")
assert len(trial.workers()) == 1, f"trial.workers() = {trial.workers()}"
assert len(trial.steps()) == 3, f"trial.steps() = {trial.steps()}"
|
import logging
logging.addLevelName(5, 'SILLY')
# max(map(len, [logging.getLevelName(level) for level in range(0, 60, 10)])) == 8
# %(asctime)14s
logging.basicConfig(format='%(levelname)-8s (%(name)s): %(message)s')
class Logger(logging.Logger):
def silly(self, msg, *args, **kwargs):
level = logging.getLevelName('SILLY')
if self.isEnabledFor(level):
self._log(level, msg, args, **kwargs)
def notset(self, msg, *args, **kwargs):
level = logging.getLevelName('NOTSET')
if self.isEnabledFor(level):
self._log(level, msg, args, **kwargs)
logging.setLoggerClass(Logger)
|
""" Initialization script for Minos. """
from minos.app import create_app
from minos.database import db
# Create the app and push the context
app = create_app(init=True)
app.app_context().push()
app.config.from_envvar('FLASK_SETTINGS')
# Create all tables and stuff.
db.create_all()
|
from typing import List, Optional
from prompt_toolkit.formatted_text import AnyFormattedText
from prompt_toolkit.layout.containers import (
AnyContainer,
HSplit,
VSplit,
Window,
WindowAlign,
)
from prompt_toolkit.layout.controls import FormattedTextControl
from prompt_toolkit.layout.dimension import D
from prompt_toolkit.widgets import Box, Button, Frame, SearchToolbar, TextArea
from grand_geckos.ui.bindings import bottombar_bindings, panel_bindings
class VerticalDivider(Window):
def __init__(self) -> None:
super().__init__(width=1, char="│", style="class:line")
class HorizontalDivider(Window):
def __init__(self) -> None:
super().__init__(height=1, char="─", style="class:line")
class VerticalSpacer(Window):
def __init__(self, width=1) -> None:
super().__init__(width=width, char=" ", style="class:line")
class HorizontalSpacer(Window):
def __init__(self, height=1) -> None:
super().__init__(height=height, char=" ", style="class:line")
class TitleView(Window):
def __init__(self, text: str) -> None:
super().__init__(
height=2,
content=FormattedTextControl([("class:title bold", text)]),
align=WindowAlign.CENTER,
)
class SearchBarView(TextArea):
def __init__(self) -> None:
super().__init__(
height=1,
prompt=" Search: ",
style="class:input-field",
multiline=False,
wrap_lines=False,
search_field=SearchToolbar(),
)
class PanelView(Frame):
def __init__(
self,
data: List[AnyContainer],
title: AnyFormattedText,
) -> None:
super().__init__(
title=title,
body=Box(body=HSplit(data, padding=1), padding=1, height=D()),
key_bindings=panel_bindings(),
)
class ControlBarView(Frame):
def __init__(self, controls: Optional[List[AnyContainer]] = None) -> None:
super().__init__(
body=Box(body=VSplit(controls or [], align="CENTER", padding=2), style="class:bottom-bar", height=1),
# TODO: This border color is not matching background
style="#111111",
key_bindings=bottombar_bindings(),
)
class ButtonView(Box):
def __init__(self, text, action, width=16) -> None:
super().__init__(
body=Button(
text=text,
handler=action,
width=width,
left_symbol="",
right_symbol="",
),
style="class:button",
height=1,
)
|
from .basegamestate import BaseGameState
class GameStateIntro(BaseGameState):
def on_enter(self):
self.game.r_int.fade = True
# self.logo_engine = self.game.m_res.get_splash("ulix_logo_small")
self.logo_framework = "splash/dexflow_logo_small"
self.game.r_int.load_sprite(self.logo_framework)
self.game.r_int.init_sprite_drawer()
self.stage = 0
def on_tick(self, time, frame_time):
self.time = time
if self.stage == 0:
self.stage = 1
self.timer = 0
else:
self.timer += frame_time
if self.stage == 1 and self.timer > 0.5:
self.stage = 2
if self.timer > 0.7:
self.game.m_gst.switch_state("overworld")
return False
def on_exit(self):
self.game.r_int.fade = False
pass
def on_render(self, time, frame_time):
self.draw_interface(time, frame_time)
def event_keypress(self, key, modifiers):
pass
def draw_interface(self, time, frame_time):
if self.stage < 3:
# self.game.r_int.draw_image(
# self.logo_engine, (0.5, 0.5), centre=True, size=0.5
# )
# elif self.stage == 2:
self.game.r_int.draw_image(
self.logo_framework, (0.5, 0.5), centre=True, size=0.5
)
|
#!/usr/bin/env python
import sys
import string
from subprocess import *
import re
import time
from optparse import OptionParser
from util_ap import *
from GenericSampler import GenericSampler
from UdpJsonTransmitter import UdpJsonTransmitter
#import pymongo
#from pymongo.errors import AutoReconnect
class ChUtilSampler(GenericSampler):
def __init__(self, intf, fake=False, description=''):
self.intf = intf
self.fake = fake
self.description = description
self.CMD_SURVEY="iw dev %s survey dump" % self.intf
self.mac_addr = get_mac_address(INTF)
#print self.mac_addr
#hostname = get_hostname()
self.hostname = 'deadbeef'
if self.fake:
self.last_raw_utils = self.init_stats_sim()
else:
self.last_raw_utils = self.init_stats()
def switch_channel(self, channel):
pipe = Popen("iwconfig %s channel %d" % (self.intf,channel), shell=True, stdout=PIPE)
out, err = pipe.communicate()
if (err):
print "switch channel failed - exiting..." % err
sys.exit(0)
def parse_iw_output(self, str):
utils = []
for line in string.split(str,"\n"):
if line.lstrip().startswith("frequency"):
_line = line[line.find(":")+1:]
m = re.match(r"\s+(?P<val>\w+) MHz", _line.rstrip())
freq = m.group('val')
if line.lstrip().startswith("channel active time"):
_line=line[line.find(":")+1:]
m = re.match(r"\s+(?P<val>\w+) ms",_line.rstrip())
active_time = int(m.group('val'))
if line.lstrip().startswith("channel busy time"):
_line=line[line.find(":")+1:]
m = re.match(r"\s+(?P<val>\w+) ms",_line.rstrip())
busy_time = int(m.group('val'))
if line.lstrip().startswith("channel receive time"):
_line=line[line.find(":")+1:]
m = re.match(r"\s+(?P<val>\w+) ms",_line.rstrip())
receive_time = int(m.group('val'))
if line.lstrip().startswith("channel transmit time"):
_line=line[line.find(":")+1:]
m = re.match(r"\s+(?P<val>\w+) ms",_line.rstrip())
transmit_time = int(m.group('val'))
#print "%s:%d,%d,%d,%d" % (freq,active_time,busy_time,receive_time,transmit_time)
utils.append({'timestamp':time.time(),'mac_addr':self.mac_addr,'hostname':self.hostname,'freq':freq,'intf':self.intf,
'active':active_time,'busy':busy_time,'receive':receive_time,'transmit':transmit_time, 'description':self.description})
return utils
def update_stats_sim(self, last_raw_utils):
utils = [{'timestamp':time.time(),'mac_addr':self.mac_addr,'hostname':self.hostname,'freq':"2412",
'active':1000,'busy':600,'receive':200,'transmit':200, 'intf':self.intf,'description':self.description}]
return last_raw_utils,utils
def init_stats_sim(self):
return [{'timestamp':time.time(),'mac_addr':self.mac_addr,'hostname':self.hostname,'freq':"2412",
'active':1000,'busy':600,'receive':200,'transmit':200, 'intf':self.intf,'description':self.description}]
def init_stats(self):
pipe = Popen(self.CMD_SURVEY,shell=True,stdout=PIPE)
out,err = pipe.communicate()
if (err):
print "iw failed - exiting...(%s)" % err
sys.exit(0)
else:
raw_utils = self.parse_iw_output(out)
return raw_utils
def update_stats(self, last_raw_utils):
pipe = Popen(self.CMD_SURVEY,shell=True,stdout=PIPE)
out,err = pipe.communicate()
if (err):
print "iw failed - exiting...(%s)" % err
sys.exit(0)
else:
raw_utils = self.parse_iw_output(out)
utils = []
for raw_util,last_raw_util in zip(raw_utils, last_raw_utils):
utils.append({'timestamp':raw_util['timestamp'],'freq':raw_util['freq'],'mac_addr':self.mac_addr,'hostname':self.hostname, 'intf':self.intf,'description':self.description,
'active':raw_util['active'] - last_raw_util['active'],
'busy':raw_util['busy'] - last_raw_util['busy'],
'receive':raw_util['receive'] - last_raw_util['receive'],
'transmit':raw_util['transmit'] - last_raw_util['transmit']})
return raw_utils, utils
def next(self):
if self.fake:
self.last_raw_utils, last_utils = self.update_stats_sim(self.last_raw_utils)
else:
self.last_raw_utils, last_utils = self.update_stats(self.last_raw_utils)
self.printChUtilToFile(last_utils)
return last_utils
def printChUtilToFile(self, last_utils):
#print '------'
last_utils = sorted(last_utils, key=lambda x:x['active'], reverse=True)
#print last_utils
fname = '/var/run/wifi-survey-%s' % self.intf
f = open(fname, 'w')
print >>f, 'ts:%s,freq:%s,active:%d,busy:%d,receive:%d,transmit:%d' % (last_utils[0]['timestamp'],last_utils[0]['freq'], last_utils[0]['active'], last_utils[0]['busy'], last_utils[0]['receive'], last_utils[0]['transmit'])
f.close()
def log_stats(self, utils):
for util in utils:
if util['active'] != 0:
if options.verbose:
print "%s" % (util)
if __name__=="__main__":
parser = OptionParser()
parser.add_option("-s","--server",dest="server",
default="localhost",
help="hostname for the aggregation server")
parser.add_option("-p","--port",dest="port",
type="int", default=27017,
help="port for the aggregation server")
parser.add_option("-u","--updev",dest="updev",
default='br0',
help="upward network device to send back log entries")
parser.add_option("-v","--verbose",dest="verbose",
action="store_true", default=False,
help="verbose output.")
parser.add_option("-f","--fake",dest="fake",
action="store_true", default=False,
help="If set, simulate the wireless stats.")
#parser.add_option("-l","--loop",dest="loop",
# action="store_true", default=False,
# help="If set, loop monitoring between channels 1,6,11.")
parser.add_option("-i","--intf",dest="intf",
default="wlan0",
help="which interface to collect stats from")
parser.add_option("-I","--interval",dest="interval",
type="int", default=5,
help="Interval between stats updates (secs)")
parser.add_option("-d","--description",dest="description",
default="standard",
help="Description of the experiment, if any")
(options, args) = parser.parse_args()
channels = [1,6,11]
channel = 1
#UPWARD_DEVICE = 'br0'
#DST_IP = "172.24.74.179"
#DST_PORT = 5590
UPWARD_DEVICE = options.updev
DST_IP = options.server
DST_PORT = options.port
#INTF = 'wlan0'
#FAKE = True
INTF = options.intf
FAKE = options.fake
#sampler = TestSampler()
sampler = ChUtilSampler(INTF, FAKE, options.description)
#ID = get_ip_address(UPWARD_DEVICE)
ID = get_mac_address(UPWARD_DEVICE)
udpJsonTransmitter = UdpJsonTransmitter(sampler, DST_IP, DST_PORT, options.interval, ID)
udpJsonTransmitter.setDaemon(True)
udpJsonTransmitter.start()
#TODO : stop the threads using ctrl-c
while(True):
time.sleep(60)
|
from django.contrib import admin
# <HINT> Import any new Models here
from .models import Course, Lesson, Instructor, Learner, Question, Choice
# <HINT> Register QuestionInline and ChoiceInline classes here
class QuestionInline(admin.StackedInline):
model = Question
list_display = ('question', 'grade')
class ChoiceInline(admin.StackedInline):
model = Choice
list_display = ("question", "correct")
class LessonInline(admin.StackedInline):
model = Lesson
extra = 2
# Register your models here.
class CourseAdmin(admin.ModelAdmin):
inlines = [LessonInline]
list_display = ('name', 'pub_date')
list_filter = ['pub_date']
search_fields = ['name', 'description']
class LessonAdmin(admin.ModelAdmin):
list_display = ['title']
# <HINT> Register Question and Choice models here
admin.site.register(Course, CourseAdmin)
admin.site.register(Lesson, LessonAdmin)
admin.site.register(Instructor)
admin.site.register(Learner)
admin.site.register(Question)
admin.site.register(Choice)
|
# ---------------------------------------------------------------------
# Zyxel.MSAN.get_inventory
# ---------------------------------------------------------------------
# Copyright (C) 2007-2019 The NOC Project
# See LICENSE for details
# ---------------------------------------------------------------------
# Python modules
import re
# NOC modules
from noc.core.script.base import BaseScript
from noc.sa.interfaces.igetinventory import IGetInventory
from noc.core.text import parse_table
class Script(BaseScript):
name = "Zyxel.MSAN.get_inventory"
interface = IGetInventory
cache = True
rx_slot = re.compile(
r"^\s*slot(?P<number>\d+):\s*\n"
r"^\s*name\s*:\s+(?P<part_no>\S+)\s*\n"
r"^.+?\n"
r"^\s*hardware version\s*:\s+(?P<revision>\S+)\s*\n"
r"^\s*hardware serial number\s*:\s+(?P<serial>\S+)\s*\n",
re.MULTILINE | re.DOTALL,
)
rx_hw = re.compile(
r"^\s*Model\s*:\s+(?:\S+ \/ )?(?P<part_no>\S+)\s*\n"
r"^.+?\n"
r"^\s*Hardware version\s*:\s+(?P<revision>\S+)\s*\n"
r"^\s*Serial number\s*:\s+(?P<serial>\S+)\s*\n",
re.MULTILINE | re.DOTALL,
)
rx_hw2 = re.compile(
r"^\s*Hardware Version: (?P<revision>\S+)\s*\n" r"^\s*Serial Number: (?P<serial>\S+)\s*\n",
re.MULTILINE,
)
rx_chips = re.compile(r"^\s*(?P<platform>\S+?)([/ ](?P<module>\S+))?\s+")
M_TYPE = {
"IES-2000": "MSC1000",
"IES-2000M": "MSC1000A",
"IES-3000": "MSC1000",
"IES-3000M": "MSC1000A",
}
def execute(self):
r = []
slots = self.profile.get_slots_n(self)
version = self.scripts.get_version()
if slots > 1:
if version["platform"] not in ["IES-2000", "IES-2000M", "IES-3000", "IES-3000M"]:
for i in range(1, slots):
match = self.rx_slot.search(self.cli("lcman show %s" % i))
if match:
part_no = match.group("part_no")
r += [
{
"type": "LINECARD",
"number": match.group("number"),
"vendor": "ZYXEL",
"part_no": match.group("part_no"),
"serial": match.group("serial"),
"revision": match.group("revision"),
}
]
c = self.profile.get_platform(self, slots, part_no)
if c:
r.insert(0, {"type": "CHASSIS", "vendor": "ZYXEL", "part_no": c})
else:
r += [{"type": "CHASSIS", "vendor": "ZYXEL", "part_no": version["platform"]}]
t = parse_table(self.cli("lcman show", cached=True))
for i in t:
if i[1] == "-":
continue
part_no = i[2]
if part_no == "msc":
part_no = self.M_TYPE[version["platform"]]
r += [
{"type": "LINECARD", "number": i[0], "vendor": "ZYXEL", "part_no": part_no}
]
else:
module = None
match = self.rx_hw.search(self.cli("sys info show", cached=True))
if match:
c = self.profile.get_platform(self, slots, match.group("part_no"))
if match.group("part_no").startswith("AAM"):
module = match.group("part_no")
else:
match1 = self.rx_chips.search(self.cli("chips info", cached=True))
c = match1.group("platform")
module = match1.group("module")
match = self.rx_hw2.search(self.cli("sys info show", cached=True))
r = [
{
"type": "CHASSIS",
"vendor": "ZYXEL",
"part_no": c,
"serial": match.group("serial"),
"revision": match.group("revision"),
}
]
if module:
r += [{"type": "LINECARD", "number": 1, "vendor": "ZYXEL", "part_no": module}]
return r
|
# (C) StackState 2020
# All rights reserved
# Licensed under a 3-clause BSD style license (see LICENSE)
import pytest
@pytest.fixture(scope='session')
def sts_environment():
# This conf instance is used when running `checksdev env start mycheck myenv`.
# The start command places this as a `conf.yaml` in the `conf.d/mycheck/` directory.
# If you want to run an environment this object can not be empty.
return {"name": "nginx",
"location": "./tests/data/nginx_http.conf"}
@pytest.fixture
def demo_instance():
return {"nginx_status_url": "http://demo.nginx.com/api/",
"use_plus_api": True,
"plus_api_version": 6}
@pytest.fixture
def http_instance():
return {"name": "nginx",
"location": "./tests/data/nginx_http.conf"}
@pytest.fixture
def simple_instance():
return {"name": "nginx",
"location": "./tests/data/simple/nginx.conf"}
@pytest.fixture
def messy_instance():
return {"name": "nginx",
"location": "./tests/data/messy/nginx.conf"}
@pytest.fixture
def include_instance():
return {"name": "nginx",
"location": "./tests/data/with_include/nginx.conf"}
@pytest.fixture
def simple_upstream_instance():
return {"name": "nginx",
"location": "./tests/data/simple_upstream/nginx.conf"}
@pytest.fixture
def complex_instance():
return {"name": "nginx",
"location": "./tests/data/complex/nginx.conf"}
@pytest.fixture
def location_zone_instance():
return {"name": "nginx",
"location": "./tests/data/location_zone/nginx.conf"}
@pytest.fixture
def events_instance():
return {"name": "nginx",
"location": "./tests/data/nginx_events.conf"}
|
import random
from command import *
from consume import *
from util import *
_MIN_PLAYERS = 5
_MAX_PLAYERS = 10
_GOOD_COUNT = [3, 4, 4, 5, 6, 6]
_GOOD_CHARS = ["Merlin", "Percival"]
_EVIL_CHARS = ["Mordred", "Morgana", "Oberon"]
_DESCRIPTIONS = {
"Merlin": "Knows all evil players except Mordred",
"Percival": "Knows who Merlin and Morgana are, but not who is who",
"Mordred": "Evil wins if he guesses Merlin's identity correctly",
"Morgana": "Appears as Merlin to Percival",
"Oberon": "Unknown to the other evil players",
}
_TEAM_SIZE = [
[2, 3, 2, 3, 3],
[2, 3, 4, 3, 4],
[2, 3, 3, 4, 4],
[3, 4, 4, 5, 5],
[3, 4, 4, 5, 5],
[3, 4, 4, 5, 5]
]
_FAIL_THRESHOLD = [[2 if size >= 2 and quest == 3 else 1 for quest in range(5)] for size in range(len(_TEAM_SIZE))]
class _GameState(Enum):
LOBBY = "Lobby"
TEAM = "Team"
VOTE = "Vote"
QUEST = "Quest"
ASSASSIN = "Assassin"
_games = {group["_id"]: group["Avalon"] for group in group_query_all({"Avalon": {"$exists": True}})}
def _avalon_handler(author, text, thread_id, thread_type):
command, text = partition(text, ["start", "join", "add", "clear", "submit", "kill", "status"])
command = command or "status"
result = False
if command == "start":
result = _start_handler(author, thread_id)
elif command == "join":
result = _join_handler(author, thread_id)
elif command in ["add", "clear", "submit"]:
result = _team_handler(author, command, text, thread_id)
elif command == "kill":
result = _kill_handler(author, text, thread_id)
if result:
group_update(thread_id, {"$set": {"Avalon": _games[thread_id]}})
else:
_status_handler(thread_id)
return True
def _start_handler(author, thread_id):
game = _games.get(thread_id, None)
if game is None:
_games[thread_id] = {
"State": _GameState.LOBBY.value,
"Host": author["_id"],
"Players": {author["_id"]: {"Name": author["Name"], "Role": None}}
}
if "Alias" in author:
_games[thread_id]["Players"][author["_id"]]["Name"] += " ({})".format(author["Alias"])
reply = "An Avalon session has been created! Join the game with \"!avalon join\" "
reply += "and use \"!avalon start\" again once all players have joined to begin."
elif game["State"] == _GameState.LOBBY.value:
if len(game["Players"]) < _MIN_PLAYERS:
reply = "Not enough players to start the game. (minimum {})".format(_MIN_PLAYERS)
elif author["_id"] != game["Host"]:
reply = "Only the session host can start the game."
else:
_start_game(thread_id)
else:
reply = "A game is already in progress."
client.send(Message(reply), thread_id, ThreadType.GROUP)
return True
def _join_handler(author, thread_id):
game = _games.get(thread_id, None)
if game is None:
return False
elif game["State"] != _GameState.LOBBY.value:
reply = "A game is already in progress."
else:
players = game["Players"]
if author["_id"] in players:
reply = "You are already in the session."
elif len(players) >= _MAX_PLAYERS:
reply = "The session is full. (maximum {})".format(_MAX_PLAYERS)
else:
players[author["_id"]] = {"Name": author["Name"], "Role": None}
if "Alias" in author:
game["Players"][author["_id"]]["Name"] += " ({})".format(author["Alias"])
reply = "You have joined the session! There are now {} players total.".format(len(players))
client.send(Message(reply), thread_id, ThreadType.GROUP)
return True
def _team_handler(author, command, text, thread_id):
game = _games.get(thread_id, None)
if game is None:
return False
elif game["State"] != _GameState.TEAM.value:
reply = "A team is not currently being proposed."
elif game["Order"][game["Leader"]] != author["_id"]:
reply = "Only the leader can propose a team."
elif command == "add":
quest = game["Success"] + game["Fail"]
if len(game["Team"]) >= _TEAM_SIZE[len(game["Players"]) - _MIN_PLAYERS][quest]:
reply = "The team is full."
else:
user = match_user_in_group(thread_id, text)
if user is None:
reply = "User not found."
elif user["_id"] in game["Team"]:
reply = "{} is already on the team.".format(user["Name"])
elif user["_id"] not in game["Players"]:
reply = "{} is not in the game.".format(user["Name"])
else:
game["Team"].append(user["_id"])
reply = "{} has been added to the team.".format(user["Name"])
elif command == "clear":
game["Team"].clear()
reply = "The team has been cleared."
else:
quest = game["Success"] + game["Fail"]
team_size = _TEAM_SIZE[len(game["Players"]) - _MIN_PLAYERS][quest]
if len(game["Team"]) != team_size:
reply = "{} players are needed for the team. The current team size is {}."
reply = reply.format(team_size, len(game["Team"]))
else:
game["State"] = _GameState.VOTE.value
game["Votes"] = {"Accept": [], "Reject": []}
prompt = "A team has been proposed consisting of the following players:"
for member in game["Team"]:
prompt += "\n-> {}".format(game["Players"][member]["Name"])
prompt += "\n\nEnter \"accept\" to accept the team or \"reject\" to reject the team."
for player_id in game["Players"].keys():
add_active_consumption(None, player_id, ThreadType.USER, "AvalonVote", prompt, thread_id)
reply = "A team has been proposed! Please vote on the team in private chat."
client.send(Message(reply), thread_id, ThreadType.GROUP)
return True
def _kill_handler(author, text, thread_id):
game = _games.get(thread_id, None)
if game is None:
return False
elif game["State"] != _GameState.ASSASSIN.value:
reply = "It is not yet time to assassinate Merlin."
elif game["Players"][author["_id"]]["Role"] != "Mordred":
reply = "Only Mordred can name an assassination target."
else:
user = match_user_in_group(thread_id, text)
if user is None:
reply = "User not found."
elif user["_id"] not in game["Players"]:
reply = "{} is not in the game.".format(user["Name"])
elif game["Players"][user["_id"]]["Role"] == "Merlin":
reply = "The side of evil wins! Merlin was chosen correctly." # TODO game rewards
else:
merlin = next(filter(lambda p: p["Role"] == "Merlin", game["Players"].values()), None)["Name"]
reply = "The side of good wins! Merlin's real identity was {}.".format(merlin) # TODO game rewards
client.send(Message(reply), thread_id, ThreadType.GROUP)
return True
def _status_handler(thread_id):
game = _games.get(thread_id, None)
if game is None:
reply = "There is no active Avalon session. Create one with \"!avalon start\"."
elif game["State"] == _GameState.LOBBY.value:
reply = "The Avalon game has not yet started. Join the game with \"!avalon join\" "
reply += "and use \"!avalon start\" again once all players have joined to begin.\n\n"
reply += "*Host*: {}\n".format(game["Players"][game["Host"]]["Name"])
reply += "*Current players*:"
for player in game["Players"].values():
reply += "\n-> {}".format(player["Name"])
elif game["State"] == _GameState.TEAM.value:
reply = "A quest team is being proposed. The leader should use \"!avalon add <name>\" "
reply += "to add players to the team, \"!avalon clear\" to clear the current team, "
reply += "and \"!avalon submit\" to submit the current team.\n\n"
reply += "*Successful quests*: {}\n".format(game["Success"])
reply += "*Failed quests*: {}\n\n".format(game["Fail"])
reply += "*Rejected teams*: {}\n".format(game["Attempts"])
leader_id = game["Order"][game["Leader"]]
reply += "*Current leader*: {}\n".format(game["Players"][leader_id]["Name"])
if len(game["Team"]):
reply += "*Current team*:"
for player in game["Team"]:
reply += "\n-> {}".format(game["Players"][player]["Name"])
elif game["State"] == _GameState.VOTE.value:
reply = "A quest team is being voted on. Enter \"accept\" or \"reject\" in private chat to enter your vote.\n\n"
reply += "*Successful quests*: {}\n".format(game["Success"])
reply += "*Failed quests*: {}\n\n".format(game["Fail"])
reply += "*Rejected teams*: {}\n".format(game["Attempts"])
leader_id = game["Order"][game["Leader"]]
reply += "*Current leader*: {}\n".format(game["Players"][leader_id]["Name"])
reply += "*Current team*:"
for player in game["Team"]:
reply += "\n-> {}".format(game["Players"][player]["Name"])
votes = game["Votes"]
missing = filter(lambda p: p[0] not in votes["Accept"] and p[0] not in votes["Reject"], game["Players"].items())
reply += "\n\n*Missing votes*:"
for user_id, player in sorted(missing, key=lambda p: p[1]["Name"]):
reply += "\n-> {}".format(player["Name"])
elif game["State"] == _GameState.QUEST.value:
reply = "The selected team is embarking on a quest. Use \"success\" or \"fail\" in private chat "
reply += "to determine the outcome of the quest if you are on the selected team.\n\n"
reply += "*Successful quests*: {}\n".format(game["Success"])
reply += "*Failed quests*: {}\n\n".format(game["Fail"])
reply += "*Current team*:"
for player in game["Team"]:
reply += "\n-> {}".format(game["Players"][player]["Name"])
missing = filter(lambda p: p not in game["Votes"]["Success"] and p not in game["Votes"]["Fail"], game["Team"])
reply += "\n\n*Missing votes*:"
for user_id in sorted(missing, key=lambda p: game["Players"][p]["Name"]):
reply += "\n-> {}".format(game["Players"][user_id]["Name"])
else:
reply = "Three quests have been completed successfully, but Mordred still has one last chance to assassinate "
reply += "Merlin. Use \"!avalon kill <name>\" to select your target if you are Mordred.\n\n"
mordred = next(filter(lambda p: p["Role"] == "Mordred", game["Players"].values()), None)["Name"]
reply += "*Mordred*: {}\n*Merlin*: ???".format(mordred)
client.send(Message(reply), thread_id, ThreadType.GROUP)
def _start_game(thread_id):
game = _games[thread_id]
order = list(game["Players"].values())
random.shuffle(order)
good_count = _GOOD_COUNT[len(order) - _MIN_PLAYERS]
good_players, evil_players = order[:good_count], order[good_count:]
roles = {}
for i, player in enumerate(good_players):
if i < len(_GOOD_CHARS):
player["Role"] = _GOOD_CHARS[i]
roles[_GOOD_CHARS[i]] = player["Name"]
else:
player["Role"] = "Servant"
for i, player in enumerate(evil_players):
if i < len(_EVIL_CHARS):
player["Role"] = _EVIL_CHARS[i]
roles[_EVIL_CHARS[i]] = player["Name"]
else:
player["Role"] = "Minion"
# Inform players of their roles
for user_id, player in game["Players"].items():
role_name = player["Role"]
if role_name == "Servant":
role_name = "a Loyal Servant of Arthur"
elif role_name == "Minion":
role_name = "a Minion of Mordred"
reply = "You are playing as {}! ".format(role_name)
if player["Role"] == "Merlin":
reply += "You are on the side of good. You know the identities of all evil players except for Mordred, "
reply += "but make sure Mordred doesn't figure out your identity or your side will lose!\n\n"
reply += "The evil players (minus Mordred) are as follows:"
for name in sorted([player["Name"] for player in filter(lambda p: p["Role"] != "Mordred", evil_players)]):
reply += "\n-> {}".format(name)
elif player["Role"] == "Percival":
reply += "You are on the side of good. You know the identities of Merlin (good) and Morgana (evil), "
reply += "but you don't know who is who! Make sure Mordred doesn't figure out Merlin's identity "
reply += "or your side will lose!\n\n"
reply += "Merlin and Morgana are {} and {}.".format(*sorted([roles["Merlin"], roles["Morgana"]]))
elif player["Role"] == "Servant":
reply += "You are on the side of good. Help complete three quests successfully and make sure "
reply += "Merlin's identity isn't found out to win the game!"
elif player["Role"] == "Mordred":
reply += "You are on the side of evil. Merlin does not know you are evil and you will have a chance "
reply += "to assassinate him at the end of the game, so try to figure out Merlin's identity!\n\n"
reply += "The other evil players are as follows:"
for name in sorted([player["Name"] for player in filter(lambda p: p != player, evil_players)]):
reply += "\n-> {}".format(name)
elif player["Role"] == "Morgana":
reply += "You are on the side of evil. Percival knows who you and Merlin are, but can't figure out "
reply += "who is who. Try to deceive him!\n\n"
reply += "The other evil players are as follows:"
for name in sorted([player["Name"] for player in filter(lambda p: p != player, evil_players)]):
reply += "\n-> {}".format(name)
elif player["Role"] == "Oberon":
reply += "You are on the side of evil. But unlike other evil characters, you don't know who the other "
reply += "evil characters are and they don't know who you are! Try to cooperate with your team without "
reply += "revealing your identity to the side of good!"
elif player["Role"] == "Minion":
reply += "You are on the side of evil. Help sabotage three quests or have Mordred figure out "
reply += "Merlin's identity to win the game!\n\n"
reply += "The other evil players are as follows:"
for name in sorted([player["Name"] for player in filter(lambda p: p != player, evil_players)]):
reply += "\n-> {}".format(name)
reply += "\n\nDirect messaging is allowed and encouraged."
client.send(Message(reply), user_id, ThreadType.USER)
# Summarize game rules
game["Success"] = 0
game["Fail"] = 0
group_reply = "The game has begun! The side of good wins if three quests are completed successfully. "
group_reply += "The side of evil wins if three quests fail or if Mordred can guess Merlin's identity "
group_reply += "at the end of the game.\n\n"
group_reply += "For each quest, a leader is chosen at random to select the team for that quest. "
group_reply += "The rest of the players vote to accept or reject the team. Once a team is chosen, "
group_reply += "the team members can chose whether or not to sabotage the quest.\n\n"
group_reply += "Many players have special roles. The special roles in this game are as follows:"
for role_name in filter(lambda r: r in roles, _GOOD_CHARS):
group_reply += "\n-> *{}* ({}): {}".format(role_name, "Good", _DESCRIPTIONS[role_name])
for role_name in filter(lambda r: r in roles, _EVIL_CHARS):
group_reply += "\n-> *{}* ({}): {}".format(role_name, "Evil", _DESCRIPTIONS[role_name])
client.send(Message(group_reply), thread_id, ThreadType.GROUP)
# Initialize team phase
game["State"] = _GameState.TEAM.value
game["Order"] = list(game["Players"].keys())
game["Leader"] = 0
game["Team"] = []
game["Attempts"] = 0
random.shuffle(game["Order"])
group_reply = "Team leaders will be chosen in the following order:"
for user_id in game["Order"]:
group_reply += "\n-> {}".format(game["Players"][user_id]["Name"])
leader_id = game["Order"][game["Leader"]]
group_reply += "\n\nThe current leader is {} ".format(game["Players"][leader_id]["Name"])
team_size = _TEAM_SIZE[len(game["Players"]) - _MIN_PLAYERS][0]
group_reply += "and {} players are needed for the team.\n\n".format(team_size)
group_reply += "Use \"!avalon add <name>\" to add players to the team, \"!avalon clear\" "
group_reply += "to clear the current team, and \"!avalon submit\" to submit the current team."
client.send(Message(group_reply), thread_id, ThreadType.GROUP)
def _prompt_vote(author, text, thread_id, thread_type, args):
text, _ = partition(text, ["accept", "reject"])
game = _games.get(args, None)
if game is None:
result = True
reply = "The game is no longer active."
elif text is None:
result = False
reply = "Enter \"accept\" to accept the team or \"reject\" to reject the team."
else:
result = True
if text == "accept":
game["Votes"]["Accept"].append(author["_id"])
elif text == "reject":
game["Votes"]["Reject"].append(author["_id"])
reply = "Your vote has been received."
client.send(Message(reply), thread_id, ThreadType.USER)
# Check for vote completion
if len(game["Players"]) == len(game["Votes"]["Accept"]) + len(game["Votes"]["Reject"]):
_on_vote_completion(args)
if args in _games:
group_update(args, {"$set": {"Avalon": _games[args]}})
else:
group_update(args, {"$unset": {"Avalon": None}})
return result
def _on_vote_completion(thread_id):
game = _games[thread_id]
reply = ""
if len(game["Votes"]["Accept"]):
reply += "*Accept*:"
for user_id in game["Votes"]["Accept"]:
reply += "\n-> {}".format(game["Players"][user_id]["Name"])
if len(game["Votes"]["Reject"]):
reply += "\n*Reject*:"
for user_id in game["Votes"]["Reject"]:
reply += "\n-> {}".format(game["Players"][user_id]["Name"])
# Begin quest phase
if len(game["Votes"]["Accept"]) > len(game["Votes"]["Reject"]):
game["State"] = _GameState.QUEST.value
del game["Attempts"]
game["Votes"] = {"Success": [], "Fail": []}
quest = game["Success"] + game["Fail"]
fail_threshold = _FAIL_THRESHOLD[len(game["Players"]) - _MIN_PLAYERS][quest]
reply += "\n\nThe vote has passed! The team must now vote to complete the quest in private chat."
reply += " The quest fails with {} fail vote(s).".format(fail_threshold)
prompt = "\n\nEnter \"success\" or \"fail\" to determine the outcome of the quest. "
prompt += "The quest fails with {} fail vote(s).\n\n".format(fail_threshold)
prompt += "The team consists of the following players:"
for member in game["Team"]:
prompt += "\n-> {}".format(game["Players"][member]["Name"])
for player_id in game["Team"]:
add_active_consumption(None, player_id, ThreadType.USER, "AvalonQuest", prompt, thread_id)
# Five attempts failed
elif game["Attempts"] >= 5:
reply += "\n\nFive attempts to create a team have failed. The side of evil wins by default."
del _games[thread_id]
# TODO game rewards
# Create another team
else:
game["State"] = _GameState.TEAM.value
game["Leader"] = (game["Leader"] + 1) % len(game["Order"])
game["Team"] = []
game["Attempts"] += 1
del game["Votes"]
leader_id = game["Order"][game["Leader"]]
reply += "\n\nThe vote has failed. {} ".format(game["Players"][leader_id]["Name"])
reply += "has been assigned as the new leader. As before, use \"!avalon add <name>\", "
reply += "\"!avalon clear\", and \"!avalon submit\" to create the team."
if game["Attempts"] == 4:
reply += "\n\nNote that this is the last attempt! The side of evil wins by default "
reply += "if the vote does not pass this time."
client.send(Message(reply), thread_id, ThreadType.GROUP)
def _prompt_quest(author, text, thread_id, thread_type, args):
text, _ = partition(text, ["success", "fail"])
game = _games.get(args, None)
if game is None:
result = True
reply = "The game is no longer active."
elif text is None:
result = False
reply = "Enter \"success\" or \"fail\" to determine the outcome of the quest."
else:
result = True
reply = "Your vote has been received."
if text == "success":
game["Votes"]["Success"].append(author["_id"])
elif text == "fail":
role = game["Players"][author["_id"]]["Role"]
if role in _GOOD_CHARS or role == "Servant":
result = False
reply = "The side of good is not allowed to fail quests. Please vote success instead."
else:
game["Votes"]["Fail"].append(author["_id"])
client.send(Message(reply), thread_id, ThreadType.USER)
# Check for quest completion
if len(game["Team"]) == len(game["Votes"]["Success"]) + len(game["Votes"]["Fail"]):
_on_quest_completion(args)
if args in _games:
group_update(args, {"$set": {"Avalon": _games[args]}})
else:
group_update(args, {"$unset": {"Avalon": None}})
return result
def _on_quest_completion(thread_id):
game = _games[thread_id]
quest = game["Success"] + game["Fail"]
fail_threshold = _FAIL_THRESHOLD[len(game["Players"]) - _MIN_PLAYERS][quest]
if len(game["Votes"]["Fail"]) < fail_threshold:
game["Success"] += 1
reply = "The quest was completed successfully!"
else:
game["Fail"] += 1
reply = "The quest was sabotaged!"
if game["Success"] >= 3:
game["State"] = _GameState.ASSASSIN.value
del game["Team"]
del game["Votes"]
mordred = next(filter(lambda p: p["Role"] == "Mordred", game["Players"].values()), None)["Name"]
reply += " The side of now has one last chance to win the game by assassinating Merlin. "
reply += " Use \"!avalon kill <name>\" to select your target if you are Mordred.\n\n"
reply += "*Mordred*: {}\n*Merlin*: ???".format(mordred)
elif game["Fail"] >= 3:
reply += " The side of evil wins!" # TODO game rewards
del _games[thread_id]
else:
game["State"] = _GameState.TEAM.value
game["Leader"] = (game["Leader"] + 1) % len(game["Order"])
game["Team"] = []
game["Attempts"] = 0
del game["Votes"]
quest = game["Success"] + game["Fail"]
team_size = _TEAM_SIZE[len(game["Players"]) - _MIN_PLAYERS][quest]
reply += "\n\n*Successful quests*: {}\n".format(game["Success"])
reply += "*Failed quests: {}*\n\n".format(game["Fail"])
leader_id = game["Order"][game["Leader"]]
reply += "The new leader is {} ".format(game["Players"][leader_id]["Name"])
reply += "and {} players are needed for the team. ".format(team_size)
reply += "Use \"!avalon add <name>\" to add players to the team, \"!avalon clear\" "
reply += "to clear the current team, and \"!avalon submit\" to submit the current team."
client.send(Message(reply), thread_id, ThreadType.GROUP)
_avalon_info = """<<Avalon>>
*Usage*: "!avalon start"
Creates an Avalon session. Use this command again after players have joined to start the game.
*Usage*: "!avalon join"
Joins the chat's current Avalon session.
*Usage*: "!avalon status"
Summarizes the status of the current Avalon session and tells you how to continue."""
map_group_command(["avalon"], _avalon_handler, 1, _avalon_info)
add_handler("AvalonVote", _prompt_vote)
add_handler("AvalonQuest", _prompt_quest)
|
# -*- coding: utf-8 -*-
# File: eval.py
import tqdm
import os
from collections import namedtuple, defaultdict
from contextlib import ExitStack
import numpy as np
import cv2
import json
from tensorpack.utils.utils import get_tqdm_kwargs
from models.rcnn.common import CustomResize, clip_boxes
from models.rcnn.config import config as cfg
DetectionResult = namedtuple('DetectionResult', ['box', 'score', 'class_id', 'mask'])
"""
box: 4 float
score: float
class_id: int, 1~NUM_CLASS
mask: None, or a binary image of the original image shape
"""
def fill_full_mask(box, mask, shape):
"""
Args:
box: 4 float
mask: MxM floats
shape: h,w
"""
# int() is floor
# box fpcoor=0.0 -> intcoor=0.0
x0, y0 = list(map(int, box[:2] + 0.5))
# box fpcoor=h -> intcoor=h-1, inclusive
x1, y1 = list(map(int, box[2:] - 0.5)) # inclusive
x1 = max(x0, x1) # require at least 1x1
y1 = max(y0, y1)
w = x1 + 1 - x0
h = y1 + 1 - y0
# rounding errors could happen here, because masks were not originally computed for this shape.
# but it's hard to do better, because the network does not know the "original" scale
mask = (cv2.resize(mask, (w, h)) > 0.5).astype('uint8')
ret = np.zeros(shape, dtype='uint8')
ret[y0:y1 + 1, x0:x1 + 1] = mask
return ret
def detect_one_image(img, model_func):
"""
Run detection on one image, using the TF callable.
This function should handle the preprocessing internally.
Args:
img: an image
model_func: a callable from TF model,
takes image and returns (boxes, probs, labels, [masks])
Returns:
[DetectionResult]
"""
orig_shape = img.shape[:2]
resizer = CustomResize(cfg.PREPROC.TEST_SHORT_EDGE_SIZE, cfg.PREPROC.MAX_SIZE)
resized_img = resizer.augment(img)
scale = np.sqrt(resized_img.shape[0] * 1.0 / img.shape[0] * resized_img.shape[1] / img.shape[1])
boxes, probs, labels, *masks = model_func(resized_img)
boxes = boxes / scale
# boxes are already clipped inside the graph, but after the floating point scaling, this may not be true any more.
boxes = clip_boxes(boxes, orig_shape)
if masks:
# has mask
full_masks = [fill_full_mask(box, mask, orig_shape) for box, mask in zip(boxes, masks[0])]
masks = full_masks
else:
# fill with none
masks = [None] * len(boxes)
results = [DetectionResult(*args) for args in zip(boxes, probs, labels, masks)]
return results
def eval_coco_old(df, detect_func, tqdm_bar=None):
"""
Args:
df: a DataFlow which produces (image, image_id)
detect_func: a callable, takes [image] and returns [DetectionResult]
tqdm_bar: a tqdm object to be shared among multiple evaluation instances. If None,
will create a new one.
Returns:
list of dict, to be dumped to COCO json format
"""
# lazy import
import pycocotools.mask as cocomask
from coco import COCOMeta
df.reset_state()
all_results = []
# tqdm is not quite thread-safe: https://github.com/tqdm/tqdm/issues/323
with ExitStack() as stack:
if tqdm_bar is None:
tqdm_bar = stack.enter_context(tqdm.tqdm(total=df.size(), **get_tqdm_kwargs()))
for img, img_id in df:
results = detect_func(img)
for r in results:
box = r.box
cat_id = COCOMeta.class_id_to_category_id[r.class_id]
box[2] -= box[0]
box[3] -= box[1]
res = {
'image_id': img_id,
'category_id': cat_id,
'bbox': list(map(lambda x: round(float(x), 3), box)),
'score': round(float(r.score), 4),
}
# also append segmentation to results
if r.mask is not None:
rle = cocomask.encode(np.array(r.mask[:, :, None], order='F'))[0]
rle['counts'] = rle['counts'].decode('ascii')
res['segmentation'] = rle
all_results.append(res)
tqdm_bar.update(1)
return all_results
def eval_coco(df, detect_func, tqdm_bar=None):
"""
Args:
df: a DataFlow which produces (image, image_id)
detect_func: a callable, takes [image] and returns [DetectionResult]
tqdm_bar: a tqdm object to be shared among multiple evaluation instances. If None,
will create a new one.
Returns:
list of dict, to be dumped to COCO json format
"""
from models.rcnn.breasts import CLASS_NAMES
df.reset_state()
all_results = []
# tqdm is not quite thread-safe: https://github.com/tqdm/tqdm/issues/323
with ExitStack() as stack:
if tqdm_bar is None:
tqdm_bar = stack.enter_context(tqdm.tqdm(total=df.size(), **get_tqdm_kwargs()))
for img, img_id in df:
results = detect_func(img)
for r in results:
box = r.box
cat_id = CLASS_NAMES[r.class_id]
box[2] -= box[0]
box[3] -= box[1]
res = {
'image_id': img_id,
'category_id': cat_id,
'bbox': list(map(lambda x: round(float(x), 3), box)),
'score': round(float(r.score), 4),
}
# also append segmentation to results
assert r.mask is None
# if r.mask is not None:
# rle = cocomask.encode(
# np.array(r.mask[:, :, None], order='F'))[0]
# rle['counts'] = rle['counts'].decode('ascii')
# res['segmentation'] = rle
all_results.append(res)
tqdm_bar.update(1)
return all_results
# https://github.com/pdollar/coco/blob/master/PythonAPI/pycocoEvalDemo.ipynb
def print_evaluation_scores_old(json_file):
import pycocotools.mask as COCOeval
from coco import COCO
ret = {}
assert cfg.DATA.BASEDIR and os.path.isdir(cfg.DATA.BASEDIR)
annofile = os.path.join(cfg.DATA.BASEDIR, 'annotations', 'instances_{}.json'.format(cfg.DATA.VAL))
coco = COCO(annofile)
cocoDt = coco.loadRes(json_file)
cocoEval = COCOeval(coco, cocoDt, 'bbox')
cocoEval.evaluate()
cocoEval.accumulate()
cocoEval.summarize()
fields = ['IoU=0.5:0.95', 'IoU=0.5', 'IoU=0.75', 'small', 'medium', 'large']
for k in range(6):
ret['mAP(bbox)/' + fields[k]] = cocoEval.stats[k]
if cfg.MODE_MASK:
cocoEval = COCOeval(coco, cocoDt, 'segm')
cocoEval.evaluate()
cocoEval.accumulate()
cocoEval.summarize()
for k in range(6):
ret['mAP(segm)/' + fields[k]] = cocoEval.stats[k]
return ret
# https://github.com/riblidezso/frcnn_cad/blob/master/demo.ipynb
def print_evaluation_scores(json_file,
include_fooling_stats=cfg.BREASTS.CALC_FOOLING_STATS,
confidence_score=cfg.BREASTS.MALIGNANT_CONFIDENCE):
from models.rcnn.breasts import BreastDetection
from sklearn import metrics
with open(json_file, 'r') as f:
results = json.load(f)
breast_metadata = BreastDetection.load_many(cfg.DATA.VAL_PATTERN)
breast_metadata = {m['id']: m for m in breast_metadata}
def get_predictions(results, annotations, include_gen):
preds = defaultdict(set)
for result in results:
preds[result['image_id']].add((result['category_id'], result['score']))
output = {}
scores = {}
for id, findings in preds.items():
if (not include_gen) and id.endswith("_gen"):
continue
malignant_scores = [score for klass, score in findings if klass == cfg.DATA.CLASS_NAMES[2]]
if not malignant_scores:
scores[id] = 0.0
else:
scores[id] = max(malignant_scores)
if malignant_scores and max(malignant_scores) >= confidence_score:
output[id] = 1
else:
output[id] = 0
# Handle cases when no bbox is found.
for key, value in annotations.items():
if (not include_gen) and key.endswith("_gen"):
continue
if key not in output:
output[key] = 0
scores[key] = 0.0
return output, scores
def to_numpy(preds, annotations, dtype=np.int32):
pred = []
truth = []
for id, lbl in preds.items():
assert id in annotations
pred.append(lbl)
truth.append(annotations[id]['label'])
return np.asarray(pred, dtype=dtype), np.asarray(truth, dtype=np.int32)
preds, scores = get_predictions(results, breast_metadata, False)
pred, truth = to_numpy(preds, breast_metadata)
scores, truth_scores = to_numpy(scores, breast_metadata, dtype=np.float32)
def get_fooling_stats(preds, annotations):
total = wrong_clf_H = wrong_clf_C = fooled_H2C = fooled_C2H = 0
inference_not_found = 0
for id, pred_lbl in preds.items():
assert id in annotations
if id.endswith("_gen"):
continue
total += 1
lbl = annotations[id]['label']
if pred_lbl == lbl:
# Correctly classified.
# t = list([k for k in preds.keys() if k.startswith(id)])
# print(t)
# print(t[0])
# print(t[1])
if (id + "_gen") not in preds:
inference_not_found += 1
continue
gen_pred_lbl = preds[id + "_gen"]
if lbl == 1:
if gen_pred_lbl == 0:
fooled_C2H += 1
else:
assert lbl == 0
if gen_pred_lbl == 1:
fooled_H2C += 1
else:
if lbl == 1:
wrong_clf_C += 1
else:
wrong_clf_H += 1
return {
'fooling/total_num': total,
'fooling/inference_not_found': inference_not_found,
'fooling/wrong_clf_H': wrong_clf_H,
'fooling/wrong_clf_C': wrong_clf_C,
'fooling/correct_clf': total - wrong_clf_H - wrong_clf_C,
'fooling/fooled': fooled_H2C + fooled_C2H,
'fooling/fooled_H2C': fooled_H2C,
'fooling/fooled_C2H': fooled_C2H,
}
ret = {
'acc': metrics.accuracy_score(truth, pred),
'roc_auc': metrics.roc_auc_score(truth_scores, scores),
'f1': metrics.f1_score(truth, pred),
'recall': metrics.recall_score(truth, pred),
'precision': metrics.precision_score(truth, pred),
# 'roc': metrics.roc_curve(truth_scores, scores),
}
if include_fooling_stats:
preds, scores = get_predictions(results, breast_metadata, True)
pred, truth = to_numpy(preds, breast_metadata)
scores, truth_scores = to_numpy(scores, breast_metadata, dtype=np.float32)
ret2 = {
'all/acc': metrics.accuracy_score(truth, pred),
'all/roc_auc': metrics.roc_auc_score(truth_scores, scores),
'all/f1': metrics.f1_score(truth, pred),
'all/recall': metrics.recall_score(truth, pred),
'all/precision': metrics.precision_score(truth, pred),
# 'all/roc': metrics.roc_curve(truth_scores, scores),
}
ret.update(get_fooling_stats(preds, breast_metadata))
ret.update(ret2)
return ret
|
from django import forms
from .models import Comment
class CommentForm(forms.ModelForm):
'''A form for the Comment model'''
class Meta:
'''Nested class that specifies the form fields'''
model = Comment
fields = ['body']
labels = {'body': ''}
widgets = {
'body': forms.Textarea(attrs={'cols': 50, 'rows': 10})
}
|
# Copyright 2018 Google LLC
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
from kfp.containers._component_builder import _generate_dockerfile, _dependency_to_requirements, VersionedDependency, DependencyHelper
import os
import unittest
import yaml
import tarfile
from pathlib import Path
import inspect
from collections import OrderedDict
from typing import NamedTuple
class TestVersionedDependency(unittest.TestCase):
def test_version(self):
""" test version overrides min_version and max_version """
version = VersionedDependency(name='tensorflow', version='0.3.0', min_version='0.1.0', max_version='0.4.0')
self.assertTrue(version.min_version == '0.3.0')
self.assertTrue(version.max_version == '0.3.0')
self.assertTrue(version.has_versions())
self.assertTrue(version.name == 'tensorflow')
def test_minmax_version(self):
""" test if min_version and max_version are configured when version is not given """
version = VersionedDependency(name='tensorflow', min_version='0.1.0', max_version='0.4.0')
self.assertTrue(version.min_version == '0.1.0')
self.assertTrue(version.max_version == '0.4.0')
self.assertTrue(version.has_versions())
def test_min_or_max_version(self):
""" test if min_version and max_version are configured when version is not given """
version = VersionedDependency(name='tensorflow', min_version='0.1.0')
self.assertTrue(version.min_version == '0.1.0')
self.assertTrue(version.has_versions())
version = VersionedDependency(name='tensorflow', max_version='0.3.0')
self.assertTrue(version.max_version == '0.3.0')
self.assertTrue(version.has_versions())
def test_no_version(self):
""" test the no version scenario """
version = VersionedDependency(name='tensorflow')
self.assertFalse(version.has_min_version())
self.assertFalse(version.has_max_version())
self.assertFalse(version.has_versions())
class TestDependencyHelper(unittest.TestCase):
def test_generate_requirement(self):
""" Test generating requirement file """
# prepare
test_data_dir = os.path.join(os.path.dirname(__file__), 'testdata')
temp_file = os.path.join(test_data_dir, 'test_requirements.tmp')
dependency_helper = DependencyHelper()
dependency_helper.add_python_package(dependency=VersionedDependency(name='tensorflow', min_version='0.10.0', max_version='0.11.0'))
dependency_helper.add_python_package(dependency=VersionedDependency(name='kubernetes', min_version='0.6.0'))
dependency_helper.add_python_package(dependency=VersionedDependency(name='pytorch', max_version='0.3.0'))
dependency_helper.generate_pip_requirements(temp_file)
golden_requirement_payload = '''\
tensorflow >= 0.10.0, <= 0.11.0
kubernetes >= 0.6.0
pytorch <= 0.3.0
'''
with open(temp_file, 'r') as f:
target_requirement_payload = f.read()
self.assertEqual(target_requirement_payload, golden_requirement_payload)
os.remove(temp_file)
def test_add_python_package(self):
""" Test add_python_package """
# prepare
test_data_dir = os.path.join(os.path.dirname(__file__), 'testdata')
temp_file = os.path.join(test_data_dir, 'test_requirements.tmp')
dependency_helper = DependencyHelper()
dependency_helper.add_python_package(dependency=VersionedDependency(name='tensorflow', min_version='0.10.0', max_version='0.11.0'))
dependency_helper.add_python_package(dependency=VersionedDependency(name='kubernetes', min_version='0.6.0'))
dependency_helper.add_python_package(dependency=VersionedDependency(name='tensorflow', min_version='0.12.0'), override=True)
dependency_helper.add_python_package(dependency=VersionedDependency(name='kubernetes', min_version='0.8.0'), override=False)
dependency_helper.add_python_package(dependency=VersionedDependency(name='pytorch', version='0.3.0'))
dependency_helper.generate_pip_requirements(temp_file)
golden_requirement_payload = '''\
tensorflow >= 0.12.0
kubernetes >= 0.6.0
pytorch >= 0.3.0, <= 0.3.0
'''
with open(temp_file, 'r') as f:
target_requirement_payload = f.read()
self.assertEqual(target_requirement_payload, golden_requirement_payload)
os.remove(temp_file)
class TestGenerator(unittest.TestCase):
def test_generate_dockerfile(self):
""" Test generate dockerfile """
# prepare
test_data_dir = os.path.join(os.path.dirname(__file__), 'testdata')
target_dockerfile = os.path.join(test_data_dir, 'component.temp.dockerfile')
golden_dockerfile_payload_one = '''\
FROM gcr.io/ngao-mlpipeline-testing/tensorflow:1.10.0
RUN apt-get update -y && apt-get install --no-install-recommends -y -q python3 python3-pip python3-setuptools
ADD main.py /ml/main.py
'''
golden_dockerfile_payload_two = '''\
FROM gcr.io/ngao-mlpipeline-testing/tensorflow:1.10.0
RUN apt-get update -y && apt-get install --no-install-recommends -y -q python3 python3-pip python3-setuptools
ADD requirements.txt /ml/requirements.txt
RUN pip3 install -r /ml/requirements.txt
ADD main.py /ml/main.py
'''
golden_dockerfile_payload_three = '''\
FROM gcr.io/ngao-mlpipeline-testing/tensorflow:1.10.0
RUN apt-get update -y && apt-get install --no-install-recommends -y -q python python-pip python-setuptools
ADD requirements.txt /ml/requirements.txt
RUN pip install -r /ml/requirements.txt
ADD main.py /ml/main.py
'''
# check
_generate_dockerfile(filename=target_dockerfile, base_image='gcr.io/ngao-mlpipeline-testing/tensorflow:1.10.0',
python_version='python3', add_files={'main.py': '/ml/main.py'})
with open(target_dockerfile, 'r') as f:
target_dockerfile_payload = f.read()
self.assertEqual(target_dockerfile_payload, golden_dockerfile_payload_one)
_generate_dockerfile(filename=target_dockerfile, base_image='gcr.io/ngao-mlpipeline-testing/tensorflow:1.10.0',
python_version='python3', requirement_filename='requirements.txt', add_files={'main.py': '/ml/main.py'})
with open(target_dockerfile, 'r') as f:
target_dockerfile_payload = f.read()
self.assertEqual(target_dockerfile_payload, golden_dockerfile_payload_two)
_generate_dockerfile(filename=target_dockerfile, base_image='gcr.io/ngao-mlpipeline-testing/tensorflow:1.10.0',
python_version='python2', requirement_filename='requirements.txt', add_files={'main.py': '/ml/main.py'})
with open(target_dockerfile, 'r') as f:
target_dockerfile_payload = f.read()
self.assertEqual(target_dockerfile_payload, golden_dockerfile_payload_three)
self.assertRaises(ValueError, _generate_dockerfile, filename=target_dockerfile,
base_image='gcr.io/ngao-mlpipeline-testing/tensorflow:1.10.0',
python_version='python4', requirement_filename='requirements.txt', add_files={'main.py': '/ml/main.py'})
# clean up
os.remove(target_dockerfile)
def test_generate_requirement(self):
# prepare
test_data_dir = os.path.join(os.path.dirname(__file__), 'testdata')
temp_file = os.path.join(test_data_dir, 'test_requirements.tmp')
dependencies = [
VersionedDependency(name='tensorflow', min_version='0.10.0', max_version='0.11.0'),
VersionedDependency(name='kubernetes', min_version='0.6.0'),
]
_dependency_to_requirements(dependencies, filename=temp_file)
golden_payload = '''\
tensorflow >= 0.10.0, <= 0.11.0
kubernetes >= 0.6.0
'''
with open(temp_file, 'r') as f:
target_payload = f.read()
self.assertEqual(target_payload, golden_payload)
os.remove(temp_file)
|
"/mnt/nfs/A.mp3"#! /usr/bin/python
# -*- coding: utf-8 -*-
# Python ctypes bindings for VLC
#
# Copyright (C) 2009-2012 the VideoLAN team
# $Id: $
#
# Authors: Olivier Aubert <contact at olivieraubert.net>
# Jean Brouwers <MrJean1 at gmail.com>
# Geoff Salmon <geoff.salmon at gmail.com>
#
# This library is free software; you can redistribute it and/or modify
# it under the terms of the GNU Lesser General Public License as
# published by the Free Software Foundation; either version 2.1 of the
# License, or (at your option) any later version.
#
# This library is distributed in the hope that it will be useful, but
# WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
# Lesser General Public License for more details.
#
# You should have received a copy of the GNU Lesser General Public
# License along with this library; if not, write to the Free Software
# Foundation, Inc., 51 Franklin Street, Fifth Floor, Boston MA 02110-1301 USA
"""This module provides bindings for the LibVLC public API, see
U{http://wiki.videolan.org/LibVLC}.
You can find the documentation and a README file with some examples
at U{http://www.advene.org/download/python-ctypes/}.
Basically, the most important class is L{Instance}, which is used
to create a libvlc instance. From this instance, you then create
L{MediaPlayer} and L{MediaListPlayer} instances.
Alternatively, you may create instances of the L{MediaPlayer} and
L{MediaListPlayer} class directly and an instance of L{Instance}
will be implicitly created. The latter can be obtained using the
C{get_instance} method of L{MediaPlayer} and L{MediaListPlayer}.
"""
import ctypes
from ctypes.util import find_library
import os
import sys
import functools
# Used by EventManager in override.py
from inspect import getargspec
__version__ = "N/A"
build_date = "Fri Oct 7 12:04:48 2016"
# The libvlc doc states that filenames are expected to be in UTF8, do
# not rely on sys.getfilesystemencoding() which will be confused,
# esp. on windows.
DEFAULT_ENCODING = 'utf-8'
if sys.version_info[0] > 2:
str = str
unicode = str
bytes = bytes
basestring = (str, bytes)
PYTHON3 = True
def str_to_bytes(s):
"""Translate string or bytes to bytes.
"""
if isinstance(s, str):
return bytes(s, DEFAULT_ENCODING)
else:
return s
def bytes_to_str(b):
"""Translate bytes to string.
"""
if isinstance(b, bytes):
return b.decode(DEFAULT_ENCODING)
else:
return b
else:
str = str
unicode = unicode
bytes = str
basestring = basestring
PYTHON3 = False
def str_to_bytes(s):
"""Translate string or bytes to bytes.
"""
if isinstance(s, unicode):
return s.encode(DEFAULT_ENCODING)
else:
return s
def bytes_to_str(b):
"""Translate bytes to unicode string.
"""
if isinstance(b, str):
return unicode(b, DEFAULT_ENCODING)
else:
return b
# Internal guard to prevent internal classes to be directly
# instanciated.
_internal_guard = object()
def find_lib():
dll = None
plugin_path = None
if sys.platform.startswith('linux'):
p = find_library('vlc')
try:
dll = ctypes.CDLL(p)
except OSError: # may fail
dll = ctypes.CDLL('libvlc.so.5')
elif sys.platform.startswith('win'):
libname = 'libvlc.dll'
p = find_library(libname)
if p is None:
try: # some registry settings
# leaner than win32api, win32con
if PYTHON3:
import winreg as w
else:
import _winreg as w
for r in w.HKEY_LOCAL_MACHINE, w.HKEY_CURRENT_USER:
try:
r = w.OpenKey(r, 'Software\\VideoLAN\\VLC')
plugin_path, _ = w.QueryValueEx(r, 'InstallDir')
w.CloseKey(r)
break
except w.error:
pass
except ImportError: # no PyWin32
pass
if plugin_path is None:
# try some standard locations.
programfiles = os.environ["ProgramFiles"]
homedir = os.environ["HOMEDRIVE"]
for p in ('{programfiles}\\VideoLan{libname}', '{homedir}:\\VideoLan{libname}',
'{programfiles}{libname}', '{homedir}:{libname}'):
p = p.format(homedir = homedir,
programfiles = programfiles,
libname = '\\VLC\\' + libname)
if os.path.exists(p):
plugin_path = os.path.dirname(p)
break
if plugin_path is not None: # try loading
p = os.getcwd()
os.chdir(plugin_path)
# if chdir failed, this will raise an exception
dll = ctypes.CDLL(libname)
# restore cwd after dll has been loaded
os.chdir(p)
else: # may fail
dll = ctypes.CDLL(libname)
else:
plugin_path = os.path.dirname(p)
dll = ctypes.CDLL(p)
elif sys.platform.startswith('darwin'):
# FIXME: should find a means to configure path
d = '/Applications/VLC.app/Contents/MacOS/'
p = d + 'lib/libvlc.dylib'
if os.path.exists(p):
dll = ctypes.CDLL(p)
for p in ('modules', 'plugins'):
p = d + p
if os.path.isdir(p):
plugin_path = p
break
else: # hope, some PATH is set...
dll = ctypes.CDLL('libvlc.dylib')
else:
raise NotImplementedError('%s: %s not supported' % (sys.argv[0], sys.platform))
return (dll, plugin_path)
# plugin_path used on win32 and MacOS in override.py
dll, plugin_path = find_lib()
class VLCException(Exception):
"""Exception raised by libvlc methods.
"""
pass
try:
_Ints = (int, long)
except NameError: # no long in Python 3+
_Ints = int
_Seqs = (list, tuple)
# Used for handling *event_manager() methods.
class memoize_parameterless(object):
"""Decorator. Caches a parameterless method's return value each time it is called.
If called later with the same arguments, the cached value is returned
(not reevaluated).
Adapted from https://wiki.python.org/moin/PythonDecoratorLibrary
"""
def __init__(self, func):
self.func = func
self._cache = {}
def __call__(self, obj):
try:
return self._cache[obj]
except KeyError:
v = self._cache[obj] = self.func(obj)
return v
def __repr__(self):
"""Return the function's docstring.
"""
return self.func.__doc__
def __get__(self, obj, objtype):
"""Support instance methods.
"""
return functools.partial(self.__call__, obj)
# Default instance. It is used to instanciate classes directly in the
# OO-wrapper.
_default_instance = None
def get_default_instance():
"""Return the default VLC.Instance.
"""
global _default_instance
if _default_instance is None:
_default_instance = Instance()
return _default_instance
_Cfunctions = {} # from LibVLC __version__
_Globals = globals() # sys.modules[__name__].__dict__
def _Cfunction(name, flags, errcheck, *types):
"""(INTERNAL) New ctypes function binding.
"""
if hasattr(dll, name) and name in _Globals:
p = ctypes.CFUNCTYPE(*types)
f = p((name, dll), flags)
if errcheck is not None:
f.errcheck = errcheck
# replace the Python function
# in this module, but only when
# running as python -O or -OO
if __debug__:
_Cfunctions[name] = f
else:
_Globals[name] = f
return f
raise NameError('no function %r' % (name,))
def _Cobject(cls, ctype):
"""(INTERNAL) New instance from ctypes.
"""
o = object.__new__(cls)
o._as_parameter_ = ctype
return o
def _Constructor(cls, ptr=_internal_guard):
"""(INTERNAL) New wrapper from ctypes.
"""
if ptr == _internal_guard:
raise VLCException("(INTERNAL) ctypes class. You should get references for this class through methods of the LibVLC API.")
if ptr is None or ptr == 0:
return None
return _Cobject(cls, ctypes.c_void_p(ptr))
class _Cstruct(ctypes.Structure):
"""(INTERNAL) Base class for ctypes structures.
"""
_fields_ = [] # list of 2-tuples ('name', ctyptes.<type>)
def __str__(self):
l = [' %s:\t%s' % (n, getattr(self, n)) for n, _ in self._fields_]
return '\n'.join([self.__class__.__name__] + l)
def __repr__(self):
return '%s.%s' % (self.__class__.__module__, self)
class _Ctype(object):
"""(INTERNAL) Base class for ctypes.
"""
@staticmethod
def from_param(this): # not self
"""(INTERNAL) ctypes parameter conversion method.
"""
if this is None:
return None
return this._as_parameter_
class ListPOINTER(object):
"""Just like a POINTER but accept a list of ctype as an argument.
"""
def __init__(self, etype):
self.etype = etype
def from_param(self, param):
if isinstance(param, _Seqs):
return (self.etype * len(param))(*param)
# errcheck functions for some native functions.
def string_result(result, func, arguments):
"""Errcheck function. Returns a string and frees the original pointer.
It assumes the result is a char *.
"""
if result:
# make a python string copy
s = bytes_to_str(ctypes.string_at(result))
# free original string ptr
libvlc_free(result)
return s
return None
def class_result(classname):
"""Errcheck function. Returns a function that creates the specified class.
"""
def wrap_errcheck(result, func, arguments):
if result is None:
return None
return classname(result)
return wrap_errcheck
# Wrapper for the opaque struct libvlc_log_t
class Log(ctypes.Structure):
pass
Log_ptr = ctypes.POINTER(Log)
# FILE* ctypes wrapper, copied from
# http://svn.python.org/projects/ctypes/trunk/ctypeslib/ctypeslib/contrib/pythonhdr.py
class FILE(ctypes.Structure):
pass
FILE_ptr = ctypes.POINTER(FILE)
if PYTHON3:
PyFile_FromFd = ctypes.pythonapi.PyFile_FromFd
PyFile_FromFd.restype = ctypes.py_object
PyFile_FromFd.argtypes = [ctypes.c_int,
ctypes.c_char_p,
ctypes.c_char_p,
ctypes.c_int,
ctypes.c_char_p,
ctypes.c_char_p,
ctypes.c_char_p,
ctypes.c_int ]
PyFile_AsFd = ctypes.pythonapi.PyObject_AsFileDescriptor
PyFile_AsFd.restype = ctypes.c_int
PyFile_AsFd.argtypes = [ctypes.py_object]
else:
PyFile_FromFile = ctypes.pythonapi.PyFile_FromFile
PyFile_FromFile.restype = ctypes.py_object
PyFile_FromFile.argtypes = [FILE_ptr,
ctypes.c_char_p,
ctypes.c_char_p,
ctypes.CFUNCTYPE(ctypes.c_int, FILE_ptr)]
PyFile_AsFile = ctypes.pythonapi.PyFile_AsFile
PyFile_AsFile.restype = FILE_ptr
PyFile_AsFile.argtypes = [ctypes.py_object]
# Generated enum types #
class _Enum(ctypes.c_uint):
'''(INTERNAL) Base class
'''
_enum_names_ = {}
def __str__(self):
n = self._enum_names_.get(self.value, '') or ('FIXME_(%r)' % (self.value,))
return '.'.join((self.__class__.__name__, n))
def __hash__(self):
return self.value
def __repr__(self):
return '.'.join((self.__class__.__module__, self.__str__()))
def __eq__(self, other):
return ( (isinstance(other, _Enum) and self.value == other.value)
or (isinstance(other, _Ints) and self.value == other) )
def __ne__(self, other):
return not self.__eq__(other)
class LogLevel(_Enum):
'''Logging messages level.
\note future libvlc versions may define new levels.
'''
_enum_names_ = {
0: 'DEBUG',
2: 'NOTICE',
3: 'WARNING',
4: 'ERROR',
}
LogLevel.DEBUG = LogLevel(0)
LogLevel.ERROR = LogLevel(4)
LogLevel.NOTICE = LogLevel(2)
LogLevel.WARNING = LogLevel(3)
class DialogQuestionType(_Enum):
'''@defgroup libvlc_dialog libvlc dialog
@ingroup libvlc
@{
@file
libvlc dialog external api.
'''
_enum_names_ = {
0: 'NORMAL',
1: 'WARNING',
2: 'CRITICAL',
}
DialogQuestionType.CRITICAL = DialogQuestionType(2)
DialogQuestionType.NORMAL = DialogQuestionType(0)
DialogQuestionType.WARNING = DialogQuestionType(1)
class EventType(_Enum):
'''Event types.
'''
_enum_names_ = {
0: 'MediaMetaChanged',
1: 'MediaSubItemAdded',
2: 'MediaDurationChanged',
3: 'MediaParsedChanged',
4: 'MediaFreed',
5: 'MediaStateChanged',
6: 'MediaSubItemTreeAdded',
0x100: 'MediaPlayerMediaChanged',
257: 'MediaPlayerNothingSpecial',
258: 'MediaPlayerOpening',
259: 'MediaPlayerBuffering',
260: 'MediaPlayerPlaying',
261: 'MediaPlayerPaused',
262: 'MediaPlayerStopped',
263: 'MediaPlayerForward',
264: 'MediaPlayerBackward',
265: 'MediaPlayerEndReached',
266: 'MediaPlayerEncounteredError',
267: 'MediaPlayerTimeChanged',
268: 'MediaPlayerPositionChanged',
269: 'MediaPlayerSeekableChanged',
270: 'MediaPlayerPausableChanged',
271: 'MediaPlayerTitleChanged',
272: 'MediaPlayerSnapshotTaken',
273: 'MediaPlayerLengthChanged',
274: 'MediaPlayerVout',
275: 'MediaPlayerScrambledChanged',
276: 'MediaPlayerESAdded',
277: 'MediaPlayerESDeleted',
278: 'MediaPlayerESSelected',
279: 'MediaPlayerCorked',
280: 'MediaPlayerUncorked',
281: 'MediaPlayerMuted',
282: 'MediaPlayerUnmuted',
283: 'MediaPlayerAudioVolume',
284: 'MediaPlayerAudioDevice',
285: 'MediaPlayerChapterChanged',
0x200: 'MediaListItemAdded',
513: 'MediaListWillAddItem',
514: 'MediaListItemDeleted',
515: 'MediaListWillDeleteItem',
516: 'MediaListEndReached',
0x300: 'MediaListViewItemAdded',
769: 'MediaListViewWillAddItem',
770: 'MediaListViewItemDeleted',
771: 'MediaListViewWillDeleteItem',
0x400: 'MediaListPlayerPlayed',
1025: 'MediaListPlayerNextItemSet',
1026: 'MediaListPlayerStopped',
0x500: 'MediaDiscovererStarted',
1281: 'MediaDiscovererEnded',
1282: 'RendererDiscovererItemAdded',
1283: 'RendererDiscovererItemDeleted',
0x600: 'VlmMediaAdded',
1537: 'VlmMediaRemoved',
1538: 'VlmMediaChanged',
1539: 'VlmMediaInstanceStarted',
1540: 'VlmMediaInstanceStopped',
1541: 'VlmMediaInstanceStatusInit',
1542: 'VlmMediaInstanceStatusOpening',
1543: 'VlmMediaInstanceStatusPlaying',
1544: 'VlmMediaInstanceStatusPause',
1545: 'VlmMediaInstanceStatusEnd',
1546: 'VlmMediaInstanceStatusError',
}
EventType.MediaDiscovererEnded = EventType(1281)
EventType.MediaDiscovererStarted = EventType(0x500)
EventType.MediaDurationChanged = EventType(2)
EventType.MediaFreed = EventType(4)
EventType.MediaListEndReached = EventType(516)
EventType.MediaListItemAdded = EventType(0x200)
EventType.MediaListItemDeleted = EventType(514)
EventType.MediaListPlayerNextItemSet = EventType(1025)
EventType.MediaListPlayerPlayed = EventType(0x400)
EventType.MediaListPlayerStopped = EventType(1026)
EventType.MediaListViewItemAdded = EventType(0x300)
EventType.MediaListViewItemDeleted = EventType(770)
EventType.MediaListViewWillAddItem = EventType(769)
EventType.MediaListViewWillDeleteItem = EventType(771)
EventType.MediaListWillAddItem = EventType(513)
EventType.MediaListWillDeleteItem = EventType(515)
EventType.MediaMetaChanged = EventType(0)
EventType.MediaParsedChanged = EventType(3)
EventType.MediaPlayerAudioDevice = EventType(284)
EventType.MediaPlayerAudioVolume = EventType(283)
EventType.MediaPlayerBackward = EventType(264)
EventType.MediaPlayerBuffering = EventType(259)
EventType.MediaPlayerChapterChanged = EventType(285)
EventType.MediaPlayerCorked = EventType(279)
EventType.MediaPlayerESAdded = EventType(276)
EventType.MediaPlayerESDeleted = EventType(277)
EventType.MediaPlayerESSelected = EventType(278)
EventType.MediaPlayerEncounteredError = EventType(266)
EventType.MediaPlayerEndReached = EventType(265)
EventType.MediaPlayerForward = EventType(263)
EventType.MediaPlayerLengthChanged = EventType(273)
EventType.MediaPlayerMediaChanged = EventType(0x100)
EventType.MediaPlayerMuted = EventType(281)
EventType.MediaPlayerNothingSpecial = EventType(257)
EventType.MediaPlayerOpening = EventType(258)
EventType.MediaPlayerPausableChanged = EventType(270)
EventType.MediaPlayerPaused = EventType(261)
EventType.MediaPlayerPlaying = EventType(260)
EventType.MediaPlayerPositionChanged = EventType(268)
EventType.MediaPlayerScrambledChanged = EventType(275)
EventType.MediaPlayerSeekableChanged = EventType(269)
EventType.MediaPlayerSnapshotTaken = EventType(272)
EventType.MediaPlayerStopped = EventType(262)
EventType.MediaPlayerTimeChanged = EventType(267)
EventType.MediaPlayerTitleChanged = EventType(271)
EventType.MediaPlayerUncorked = EventType(280)
EventType.MediaPlayerUnmuted = EventType(282)
EventType.MediaPlayerVout = EventType(274)
EventType.MediaStateChanged = EventType(5)
EventType.MediaSubItemAdded = EventType(1)
EventType.MediaSubItemTreeAdded = EventType(6)
EventType.RendererDiscovererItemAdded = EventType(1282)
EventType.RendererDiscovererItemDeleted = EventType(1283)
EventType.VlmMediaAdded = EventType(0x600)
EventType.VlmMediaChanged = EventType(1538)
EventType.VlmMediaInstanceStarted = EventType(1539)
EventType.VlmMediaInstanceStatusEnd = EventType(1545)
EventType.VlmMediaInstanceStatusError = EventType(1546)
EventType.VlmMediaInstanceStatusInit = EventType(1541)
EventType.VlmMediaInstanceStatusOpening = EventType(1542)
EventType.VlmMediaInstanceStatusPause = EventType(1544)
EventType.VlmMediaInstanceStatusPlaying = EventType(1543)
EventType.VlmMediaInstanceStopped = EventType(1540)
EventType.VlmMediaRemoved = EventType(1537)
class Meta(_Enum):
'''Meta data types.
'''
_enum_names_ = {
0: 'Title',
1: 'Artist',
2: 'Genre',
3: 'Copyright',
4: 'Album',
5: 'TrackNumber',
6: 'Description',
7: 'Rating',
8: 'Date',
9: 'Setting',
10: 'URL',
11: 'Language',
12: 'NowPlaying',
13: 'Publisher',
14: 'EncodedBy',
15: 'ArtworkURL',
16: 'TrackID',
17: 'TrackTotal',
18: 'Director',
19: 'Season',
20: 'Episode',
21: 'ShowName',
22: 'Actors',
23: 'AlbumArtist',
24: 'DiscNumber',
25: 'DiscTotal',
}
Meta.Actors = Meta(22)
Meta.Album = Meta(4)
Meta.AlbumArtist = Meta(23)
Meta.Artist = Meta(1)
Meta.ArtworkURL = Meta(15)
Meta.Copyright = Meta(3)
Meta.Date = Meta(8)
Meta.Description = Meta(6)
Meta.Director = Meta(18)
Meta.DiscNumber = Meta(24)
Meta.DiscTotal = Meta(25)
Meta.EncodedBy = Meta(14)
Meta.Episode = Meta(20)
Meta.Genre = Meta(2)
Meta.Language = Meta(11)
Meta.NowPlaying = Meta(12)
Meta.Publisher = Meta(13)
Meta.Rating = Meta(7)
Meta.Season = Meta(19)
Meta.Setting = Meta(9)
Meta.ShowName = Meta(21)
Meta.Title = Meta(0)
Meta.TrackID = Meta(16)
Meta.TrackNumber = Meta(5)
Meta.TrackTotal = Meta(17)
Meta.URL = Meta(10)
class State(_Enum):
'''Note the order of libvlc_state_t enum must match exactly the order of
See mediacontrol_playerstatus, See input_state_e enums,
and videolan.libvlc.state (at bindings/cil/src/media.cs).
expected states by web plugins are:
idle/close=0, opening=1, playing=3, paused=4,
stopping=5, ended=6, error=7.
'''
_enum_names_ = {
0: 'NothingSpecial',
1: 'Opening',
2: 'Buffering',
3: 'Playing',
4: 'Paused',
5: 'Stopped',
6: 'Ended',
7: 'Error',
}
State.Buffering = State(2)
State.Ended = State(6)
State.Error = State(7)
State.NothingSpecial = State(0)
State.Opening = State(1)
State.Paused = State(4)
State.Playing = State(3)
State.Stopped = State(5)
class TrackType(_Enum):
'''N/A
'''
_enum_names_ = {
-1: 'unknown',
0: 'audio',
1: 'video',
2: 'text',
}
TrackType.audio = TrackType(0)
TrackType.text = TrackType(2)
TrackType.unknown = TrackType(-1)
TrackType.video = TrackType(1)
class MediaType(_Enum):
'''Media type
See libvlc_media_get_type.
'''
_enum_names_ = {
0: 'unknown',
1: 'file',
2: 'directory',
3: 'disc',
4: 'stream',
5: 'playlist',
}
MediaType.directory = MediaType(2)
MediaType.disc = MediaType(3)
MediaType.file = MediaType(1)
MediaType.playlist = MediaType(5)
MediaType.stream = MediaType(4)
MediaType.unknown = MediaType(0)
class MediaParseFlag(_Enum):
'''Parse flags used by libvlc_media_parse_with_options()
See libvlc_media_parse_with_options.
'''
_enum_names_ = {
0x0: 'local',
0x1: 'network',
0x2: 'local',
0x4: 'network',
0x8: 'interact',
}
MediaParseFlag.interact = MediaParseFlag(0x8)
MediaParseFlag.local = MediaParseFlag(0x0)
MediaParseFlag.local = MediaParseFlag(0x2)
MediaParseFlag.network = MediaParseFlag(0x1)
MediaParseFlag.network = MediaParseFlag(0x4)
class MediaParsedStatus(_Enum):
'''Parse status used sent by libvlc_media_parse_with_options() or returned by
libvlc_media_get_parsed_status()
See libvlc_media_parse_with_options
See libvlc_media_get_parsed_status.
'''
_enum_names_ = {
1: 'skipped',
2: 'failed',
3: 'timeout',
4: 'done',
}
MediaParsedStatus.done = MediaParsedStatus(4)
MediaParsedStatus.failed = MediaParsedStatus(2)
MediaParsedStatus.skipped = MediaParsedStatus(1)
MediaParsedStatus.timeout = MediaParsedStatus(3)
class MediaSlaveType(_Enum):
'''Type of a media slave: subtitle or audio.
'''
_enum_names_ = {
0: 'subtitle',
1: 'audio',
}
MediaSlaveType.audio = MediaSlaveType(1)
MediaSlaveType.subtitle = MediaSlaveType(0)
class MediaDiscovererCategory(_Enum):
'''Category of a media discoverer
See libvlc_media_discoverer_list_get().
'''
_enum_names_ = {
0: 'devices',
1: 'lan',
2: 'podcasts',
3: 'localdirs',
}
MediaDiscovererCategory.devices = MediaDiscovererCategory(0)
MediaDiscovererCategory.lan = MediaDiscovererCategory(1)
MediaDiscovererCategory.localdirs = MediaDiscovererCategory(3)
MediaDiscovererCategory.podcasts = MediaDiscovererCategory(2)
class PlaybackMode(_Enum):
'''Defines playback modes for playlist.
'''
_enum_names_ = {
0: 'default',
1: 'loop',
2: 'repeat',
}
PlaybackMode.default = PlaybackMode(0)
PlaybackMode.loop = PlaybackMode(1)
PlaybackMode.repeat = PlaybackMode(2)
class VideoMarqueeOption(_Enum):
'''Marq options definition.
'''
_enum_names_ = {
0: 'Enable',
1: 'Text',
2: 'Color',
3: 'Opacity',
4: 'Position',
5: 'Refresh',
6: 'Size',
7: 'Timeout',
8: 'marquee_X',
9: 'marquee_Y',
}
VideoMarqueeOption.Color = VideoMarqueeOption(2)
VideoMarqueeOption.Enable = VideoMarqueeOption(0)
VideoMarqueeOption.Opacity = VideoMarqueeOption(3)
VideoMarqueeOption.Position = VideoMarqueeOption(4)
VideoMarqueeOption.Refresh = VideoMarqueeOption(5)
VideoMarqueeOption.Size = VideoMarqueeOption(6)
VideoMarqueeOption.Text = VideoMarqueeOption(1)
VideoMarqueeOption.Timeout = VideoMarqueeOption(7)
VideoMarqueeOption.marquee_X = VideoMarqueeOption(8)
VideoMarqueeOption.marquee_Y = VideoMarqueeOption(9)
class NavigateMode(_Enum):
'''Navigation mode.
'''
_enum_names_ = {
0: 'activate',
1: 'up',
2: 'down',
3: 'left',
4: 'right',
5: 'popup',
}
NavigateMode.activate = NavigateMode(0)
NavigateMode.down = NavigateMode(2)
NavigateMode.left = NavigateMode(3)
NavigateMode.popup = NavigateMode(5)
NavigateMode.right = NavigateMode(4)
NavigateMode.up = NavigateMode(1)
class Position(_Enum):
'''Enumeration of values used to set position (e.g. of video title).
'''
_enum_names_ = {
-1: 'disable',
0: 'center',
1: 'left',
2: 'right',
3: 'top',
4: 'left',
5: 'right',
6: 'bottom',
7: 'left',
8: 'right',
}
Position.bottom = Position(6)
Position.center = Position(0)
Position.disable = Position(-1)
Position.left = Position(1)
Position.left = Position(4)
Position.left = Position(7)
Position.right = Position(2)
Position.right = Position(5)
Position.right = Position(8)
Position.top = Position(3)
class VideoLogoOption(_Enum):
'''Option values for libvlc_video_{get,set}_logo_{int,string}.
'''
_enum_names_ = {
0: 'enable',
1: 'file',
2: 'logo_x',
3: 'logo_y',
4: 'delay',
5: 'repeat',
6: 'opacity',
7: 'position',
}
VideoLogoOption.delay = VideoLogoOption(4)
VideoLogoOption.enable = VideoLogoOption(0)
VideoLogoOption.file = VideoLogoOption(1)
VideoLogoOption.logo_x = VideoLogoOption(2)
VideoLogoOption.logo_y = VideoLogoOption(3)
VideoLogoOption.opacity = VideoLogoOption(6)
VideoLogoOption.position = VideoLogoOption(7)
VideoLogoOption.repeat = VideoLogoOption(5)
class VideoAdjustOption(_Enum):
'''Option values for libvlc_video_{get,set}_adjust_{int,float,bool}.
'''
_enum_names_ = {
0: 'Enable',
1: 'Contrast',
2: 'Brightness',
3: 'Hue',
4: 'Saturation',
5: 'Gamma',
}
VideoAdjustOption.Brightness = VideoAdjustOption(2)
VideoAdjustOption.Contrast = VideoAdjustOption(1)
VideoAdjustOption.Enable = VideoAdjustOption(0)
VideoAdjustOption.Gamma = VideoAdjustOption(5)
VideoAdjustOption.Hue = VideoAdjustOption(3)
VideoAdjustOption.Saturation = VideoAdjustOption(4)
class AudioOutputDeviceTypes(_Enum):
'''Audio device types.
'''
_enum_names_ = {
-1: 'Error',
1: 'Mono',
2: 'Stereo',
4: '_2F2R',
5: '_3F2R',
6: '_5_1',
7: '_6_1',
8: '_7_1',
10: 'SPDIF',
}
AudioOutputDeviceTypes.Error = AudioOutputDeviceTypes(-1)
AudioOutputDeviceTypes.Mono = AudioOutputDeviceTypes(1)
AudioOutputDeviceTypes.SPDIF = AudioOutputDeviceTypes(10)
AudioOutputDeviceTypes.Stereo = AudioOutputDeviceTypes(2)
AudioOutputDeviceTypes._2F2R = AudioOutputDeviceTypes(4)
AudioOutputDeviceTypes._3F2R = AudioOutputDeviceTypes(5)
AudioOutputDeviceTypes._5_1 = AudioOutputDeviceTypes(6)
AudioOutputDeviceTypes._6_1 = AudioOutputDeviceTypes(7)
AudioOutputDeviceTypes._7_1 = AudioOutputDeviceTypes(8)
class AudioOutputChannel(_Enum):
'''Audio channels.
'''
_enum_names_ = {
-1: 'Error',
1: 'Stereo',
2: 'RStereo',
3: 'Left',
4: 'Right',
5: 'Dolbys',
}
AudioOutputChannel.Dolbys = AudioOutputChannel(5)
AudioOutputChannel.Error = AudioOutputChannel(-1)
AudioOutputChannel.Left = AudioOutputChannel(3)
AudioOutputChannel.RStereo = AudioOutputChannel(2)
AudioOutputChannel.Right = AudioOutputChannel(4)
AudioOutputChannel.Stereo = AudioOutputChannel(1)
class MediaPlayerRole(_Enum):
'''Media player roles.
\version libvlc 3.0.0 and later.
see \ref libvlc_media_player_set_role().
'''
_enum_names_ = {
0: '_None',
1: 'Music',
2: 'Video',
3: 'Communication',
4: 'Game',
5: 'Notification',
6: 'Animation',
7: 'Production',
8: 'Accessibility',
9: 'Test',
}
MediaPlayerRole.Accessibility = MediaPlayerRole(8)
MediaPlayerRole.Animation = MediaPlayerRole(6)
MediaPlayerRole.Communication = MediaPlayerRole(3)
MediaPlayerRole.Game = MediaPlayerRole(4)
MediaPlayerRole.Music = MediaPlayerRole(1)
MediaPlayerRole.Notification = MediaPlayerRole(5)
MediaPlayerRole.Production = MediaPlayerRole(7)
MediaPlayerRole.Test = MediaPlayerRole(9)
MediaPlayerRole.Video = MediaPlayerRole(2)
MediaPlayerRole._None = MediaPlayerRole(0)
class Callback(ctypes.c_void_p):
"""Callback function notification.
@param p_event: the event triggering the callback.
"""
pass
class LogCb(ctypes.c_void_p):
"""Callback prototype for LibVLC log message handler.
@param data: data pointer as given to L{libvlc_log_set}().
@param level: message level (@ref libvlc_log_level).
@param ctx: message context (meta-information about the message).
@param fmt: printf() format string (as defined by ISO C11).
@param args: variable argument list for the format @note Log message handlers B{must} be thread-safe. @warning The message context pointer, the format string parameters and the variable arguments are only valid until the callback returns.
"""
pass
class MediaOpenCb(ctypes.c_void_p):
"""Callback prototype to open a custom bitstream input media.
The same media item can be opened multiple times. Each time, this callback
is invoked. It should allocate and initialize any instance-specific
resources, then store them in *datap. The instance resources can be freed
in the @ref libvlc_media_close_cb callback.
@param opaque: private pointer as passed to L{libvlc_media_new_callbacks}().
@return: datap storage space for a private data pointer, sizep byte length of the bitstream or UINT64_MAX if unknown.
"""
pass
class MediaReadCb(ctypes.c_void_p):
"""Callback prototype to read data from a custom bitstream input media.
@param opaque: private pointer as set by the @ref libvlc_media_open_cb callback.
@param buf: start address of the buffer to read data into.
@param len: bytes length of the buffer.
@return: strictly positive number of bytes read, 0 on end-of-stream, or -1 on non-recoverable error @note If no data is immediately available, then the callback should sleep. @warning The application is responsible for avoiding deadlock situations. In particular, the callback should return an error if playback is stopped; if it does not return, then L{libvlc_media_player_stop}() will never return.
"""
pass
class MediaSeekCb(ctypes.c_void_p):
"""Callback prototype to seek a custom bitstream input media.
@param opaque: private pointer as set by the @ref libvlc_media_open_cb callback.
@param offset: absolute byte offset to seek to.
@return: 0 on success, -1 on error.
"""
pass
class MediaCloseCb(ctypes.c_void_p):
"""Callback prototype to close a custom bitstream input media.
@param opaque: private pointer as set by the @ref libvlc_media_open_cb callback.
"""
pass
class VideoLockCb(ctypes.c_void_p):
"""Callback prototype to allocate and lock a picture buffer.
Whenever a new video frame needs to be decoded, the lock callback is
invoked. Depending on the video chroma, one or three pixel planes of
adequate dimensions must be returned via the second parameter. Those
planes must be aligned on 32-bytes boundaries.
@param opaque: private pointer as passed to L{libvlc_video_set_callbacks}() [IN].
@param planes: start address of the pixel planes (LibVLC allocates the array of void pointers, this callback must initialize the array) [OUT].
@return: a private pointer for the display and unlock callbacks to identify the picture buffers.
"""
pass
class VideoUnlockCb(ctypes.c_void_p):
"""Callback prototype to unlock a picture buffer.
When the video frame decoding is complete, the unlock callback is invoked.
This callback might not be needed at all. It is only an indication that the
application can now read the pixel values if it needs to.
@note: A picture buffer is unlocked after the picture is decoded,
but before the picture is displayed.
@param opaque: private pointer as passed to L{libvlc_video_set_callbacks}() [IN].
@param picture: private pointer returned from the @ref libvlc_video_lock_cb callback [IN].
@param planes: pixel planes as defined by the @ref libvlc_video_lock_cb callback (this parameter is only for convenience) [IN].
"""
pass
class VideoDisplayCb(ctypes.c_void_p):
"""Callback prototype to display a picture.
When the video frame needs to be shown, as determined by the media playback
clock, the display callback is invoked.
@param opaque: private pointer as passed to L{libvlc_video_set_callbacks}() [IN].
@param picture: private pointer returned from the @ref libvlc_video_lock_cb callback [IN].
"""
pass
class VideoFormatCb(ctypes.c_void_p):
"""Callback prototype to configure picture buffers format.
This callback gets the format of the video as output by the video decoder
and the chain of video filters (if any). It can opt to change any parameter
as it needs. In that case, LibVLC will attempt to convert the video format
(rescaling and chroma conversion) but these operations can be CPU intensive.
@param opaque: pointer to the private pointer passed to L{libvlc_video_set_callbacks}() [IN/OUT].
@param chroma: pointer to the 4 bytes video format identifier [IN/OUT].
@param width: pointer to the pixel width [IN/OUT].
@param height: pointer to the pixel height [IN/OUT].
@param pitches: table of scanline pitches in bytes for each pixel plane (the table is allocated by LibVLC) [OUT].
@return: lines table of scanlines count for each plane.
"""
pass
class VideoCleanupCb(ctypes.c_void_p):
"""Callback prototype to configure picture buffers format.
@param opaque: private pointer as passed to L{libvlc_video_set_callbacks}() (and possibly modified by @ref libvlc_video_format_cb) [IN].
"""
pass
class AudioPlayCb(ctypes.c_void_p):
"""Callback prototype for audio playback.
The LibVLC media player decodes and post-processes the audio signal
asynchronously (in an internal thread). Whenever audio samples are ready
to be queued to the output, this callback is invoked.
The number of samples provided per invocation may depend on the file format,
the audio coding algorithm, the decoder plug-in, the post-processing
filters and timing. Application must not assume a certain number of samples.
The exact format of audio samples is determined by L{libvlc_audio_set_format}()
or L{libvlc_audio_set_format_callbacks}() as is the channels layout.
Note that the number of samples is per channel. For instance, if the audio
track sampling rate is 48000Â Hz, then 1200Â samples represent 25Â milliseconds
of audio signal - regardless of the number of audio channels.
@param data: data pointer as passed to L{libvlc_audio_set_callbacks}() [IN].
@param samples: pointer to a table of audio samples to play back [IN].
@param count: number of audio samples to play back.
@param pts: expected play time stamp (see libvlc_delay()).
"""
pass
class AudioPauseCb(ctypes.c_void_p):
"""Callback prototype for audio pause.
LibVLC invokes this callback to pause audio playback.
@note: The pause callback is never called if the audio is already paused.
@param data: data pointer as passed to L{libvlc_audio_set_callbacks}() [IN].
@param pts: time stamp of the pause request (should be elapsed already).
"""
pass
class AudioResumeCb(ctypes.c_void_p):
"""Callback prototype for audio resumption.
LibVLC invokes this callback to resume audio playback after it was
previously paused.
@note: The resume callback is never called if the audio is not paused.
@param data: data pointer as passed to L{libvlc_audio_set_callbacks}() [IN].
@param pts: time stamp of the resumption request (should be elapsed already).
"""
pass
class AudioFlushCb(ctypes.c_void_p):
"""Callback prototype for audio buffer flush.
LibVLC invokes this callback if it needs to discard all pending buffers and
stop playback as soon as possible. This typically occurs when the media is
stopped.
@param data: data pointer as passed to L{libvlc_audio_set_callbacks}() [IN].
"""
pass
class AudioDrainCb(ctypes.c_void_p):
"""Callback prototype for audio buffer drain.
LibVLC may invoke this callback when the decoded audio track is ending.
There will be no further decoded samples for the track, but playback should
nevertheless continue until all already pending buffers are rendered.
@param data: data pointer as passed to L{libvlc_audio_set_callbacks}() [IN].
"""
pass
class AudioSetVolumeCb(ctypes.c_void_p):
"""Callback prototype for audio volume change.
@param data: data pointer as passed to L{libvlc_audio_set_callbacks}() [IN].
@param volume: software volume (1. = nominal, 0. = mute).
@param mute: muted flag.
"""
pass
class AudioSetupCb(ctypes.c_void_p):
"""Callback prototype to setup the audio playback.
This is called when the media player needs to create a new audio output.
@param opaque: pointer to the data pointer passed to L{libvlc_audio_set_callbacks}() [IN/OUT].
@param format: 4 bytes sample format [IN/OUT].
@param rate: sample rate [IN/OUT].
@param channels: channels count [IN/OUT].
@return: 0 on success, anything else to skip audio playback.
"""
pass
class AudioCleanupCb(ctypes.c_void_p):
"""Callback prototype for audio playback cleanup.
This is called when the media player no longer needs an audio output.
@param opaque: data pointer as passed to L{libvlc_audio_set_callbacks}() [IN].
"""
pass
class CallbackDecorators(object):
"Class holding various method decorators for callback functions."
Callback = ctypes.CFUNCTYPE(ctypes.c_void_p, ctypes.c_void_p, ctypes.c_void_p)
Callback.__doc__ = '''Callback function notification.
@param p_event: the event triggering the callback.
'''
LogCb = ctypes.CFUNCTYPE(ctypes.c_void_p, ctypes.c_void_p, ctypes.c_int, Log_ptr, ctypes.c_char_p, ctypes.c_void_p)
LogCb.__doc__ = '''Callback prototype for LibVLC log message handler.
@param data: data pointer as given to L{libvlc_log_set}().
@param level: message level (@ref libvlc_log_level).
@param ctx: message context (meta-information about the message).
@param fmt: printf() format string (as defined by ISO C11).
@param args: variable argument list for the format @note Log message handlers B{must} be thread-safe. @warning The message context pointer, the format string parameters and the variable arguments are only valid until the callback returns.
'''
MediaOpenCb = ctypes.CFUNCTYPE(ctypes.POINTER(ctypes.c_int), ctypes.c_void_p, ListPOINTER(ctypes.c_void_p), ctypes.POINTER(ctypes.c_uint64))
MediaOpenCb.__doc__ = '''Callback prototype to open a custom bitstream input media.
The same media item can be opened multiple times. Each time, this callback
is invoked. It should allocate and initialize any instance-specific
resources, then store them in *datap. The instance resources can be freed
in the @ref libvlc_media_close_cb callback.
@param opaque: private pointer as passed to L{libvlc_media_new_callbacks}().
@return: datap storage space for a private data pointer, sizep byte length of the bitstream or UINT64_MAX if unknown.
'''
MediaReadCb = ctypes.CFUNCTYPE(ctypes.POINTER(ctypes.c_ssize_t), ctypes.c_void_p, ctypes.c_char_p, ctypes.c_size_t)
MediaReadCb.__doc__ = '''Callback prototype to read data from a custom bitstream input media.
@param opaque: private pointer as set by the @ref libvlc_media_open_cb callback.
@param buf: start address of the buffer to read data into.
@param len: bytes length of the buffer.
@return: strictly positive number of bytes read, 0 on end-of-stream, or -1 on non-recoverable error @note If no data is immediately available, then the callback should sleep. @warning The application is responsible for avoiding deadlock situations. In particular, the callback should return an error if playback is stopped; if it does not return, then L{libvlc_media_player_stop}() will never return.
'''
MediaSeekCb = ctypes.CFUNCTYPE(ctypes.POINTER(ctypes.c_int), ctypes.c_void_p, ctypes.c_uint64)
MediaSeekCb.__doc__ = '''Callback prototype to seek a custom bitstream input media.
@param opaque: private pointer as set by the @ref libvlc_media_open_cb callback.
@param offset: absolute byte offset to seek to.
@return: 0 on success, -1 on error.
'''
MediaCloseCb = ctypes.CFUNCTYPE(ctypes.c_void_p, ctypes.c_void_p)
MediaCloseCb.__doc__ = '''Callback prototype to close a custom bitstream input media.
@param opaque: private pointer as set by the @ref libvlc_media_open_cb callback.
'''
VideoLockCb = ctypes.CFUNCTYPE(ctypes.c_void_p, ctypes.c_void_p, ListPOINTER(ctypes.c_void_p))
VideoLockCb.__doc__ = '''Callback prototype to allocate and lock a picture buffer.
Whenever a new video frame needs to be decoded, the lock callback is
invoked. Depending on the video chroma, one or three pixel planes of
adequate dimensions must be returned via the second parameter. Those
planes must be aligned on 32-bytes boundaries.
@param opaque: private pointer as passed to L{libvlc_video_set_callbacks}() [IN].
@param planes: start address of the pixel planes (LibVLC allocates the array of void pointers, this callback must initialize the array) [OUT].
@return: a private pointer for the display and unlock callbacks to identify the picture buffers.
'''
VideoUnlockCb = ctypes.CFUNCTYPE(ctypes.c_void_p, ctypes.c_void_p, ctypes.c_void_p, ListPOINTER(ctypes.c_void_p))
VideoUnlockCb.__doc__ = '''Callback prototype to unlock a picture buffer.
When the video frame decoding is complete, the unlock callback is invoked.
This callback might not be needed at all. It is only an indication that the
application can now read the pixel values if it needs to.
@note: A picture buffer is unlocked after the picture is decoded,
but before the picture is displayed.
@param opaque: private pointer as passed to L{libvlc_video_set_callbacks}() [IN].
@param picture: private pointer returned from the @ref libvlc_video_lock_cb callback [IN].
@param planes: pixel planes as defined by the @ref libvlc_video_lock_cb callback (this parameter is only for convenience) [IN].
'''
VideoDisplayCb = ctypes.CFUNCTYPE(ctypes.c_void_p, ctypes.c_void_p, ctypes.c_void_p)
VideoDisplayCb.__doc__ = '''Callback prototype to display a picture.
When the video frame needs to be shown, as determined by the media playback
clock, the display callback is invoked.
@param opaque: private pointer as passed to L{libvlc_video_set_callbacks}() [IN].
@param picture: private pointer returned from the @ref libvlc_video_lock_cb callback [IN].
'''
VideoFormatCb = ctypes.CFUNCTYPE(ctypes.POINTER(ctypes.c_uint), ListPOINTER(ctypes.c_void_p), ctypes.c_char_p, ctypes.POINTER(ctypes.c_uint), ctypes.POINTER(ctypes.c_uint), ctypes.POINTER(ctypes.c_uint), ctypes.POINTER(ctypes.c_uint))
VideoFormatCb.__doc__ = '''Callback prototype to configure picture buffers format.
This callback gets the format of the video as output by the video decoder
and the chain of video filters (if any). It can opt to change any parameter
as it needs. In that case, LibVLC will attempt to convert the video format
(rescaling and chroma conversion) but these operations can be CPU intensive.
@param opaque: pointer to the private pointer passed to L{libvlc_video_set_callbacks}() [IN/OUT].
@param chroma: pointer to the 4 bytes video format identifier [IN/OUT].
@param width: pointer to the pixel width [IN/OUT].
@param height: pointer to the pixel height [IN/OUT].
@param pitches: table of scanline pitches in bytes for each pixel plane (the table is allocated by LibVLC) [OUT].
@return: lines table of scanlines count for each plane.
'''
VideoCleanupCb = ctypes.CFUNCTYPE(ctypes.c_void_p, ctypes.c_void_p)
VideoCleanupCb.__doc__ = '''Callback prototype to configure picture buffers format.
@param opaque: private pointer as passed to L{libvlc_video_set_callbacks}() (and possibly modified by @ref libvlc_video_format_cb) [IN].
'''
AudioPlayCb = ctypes.CFUNCTYPE(ctypes.c_void_p, ctypes.c_void_p, ctypes.c_void_p, ctypes.c_uint, ctypes.c_int64)
AudioPlayCb.__doc__ = '''Callback prototype for audio playback.
The LibVLC media player decodes and post-processes the audio signal
asynchronously (in an internal thread). Whenever audio samples are ready
to be queued to the output, this callback is invoked.
The number of samples provided per invocation may depend on the file format,
the audio coding algorithm, the decoder plug-in, the post-processing
filters and timing. Application must not assume a certain number of samples.
The exact format of audio samples is determined by L{libvlc_audio_set_format}()
or L{libvlc_audio_set_format_callbacks}() as is the channels layout.
Note that the number of samples is per channel. For instance, if the audio
track sampling rate is 48000Â Hz, then 1200Â samples represent 25Â milliseconds
of audio signal - regardless of the number of audio channels.
@param data: data pointer as passed to L{libvlc_audio_set_callbacks}() [IN].
@param samples: pointer to a table of audio samples to play back [IN].
@param count: number of audio samples to play back.
@param pts: expected play time stamp (see libvlc_delay()).
'''
AudioPauseCb = ctypes.CFUNCTYPE(ctypes.c_void_p, ctypes.c_void_p, ctypes.c_int64)
AudioPauseCb.__doc__ = '''Callback prototype for audio pause.
LibVLC invokes this callback to pause audio playback.
@note: The pause callback is never called if the audio is already paused.
@param data: data pointer as passed to L{libvlc_audio_set_callbacks}() [IN].
@param pts: time stamp of the pause request (should be elapsed already).
'''
AudioResumeCb = ctypes.CFUNCTYPE(ctypes.c_void_p, ctypes.c_void_p, ctypes.c_int64)
AudioResumeCb.__doc__ = '''Callback prototype for audio resumption.
LibVLC invokes this callback to resume audio playback after it was
previously paused.
@note: The resume callback is never called if the audio is not paused.
@param data: data pointer as passed to L{libvlc_audio_set_callbacks}() [IN].
@param pts: time stamp of the resumption request (should be elapsed already).
'''
AudioFlushCb = ctypes.CFUNCTYPE(ctypes.c_void_p, ctypes.c_void_p, ctypes.c_int64)
AudioFlushCb.__doc__ = '''Callback prototype for audio buffer flush.
LibVLC invokes this callback if it needs to discard all pending buffers and
stop playback as soon as possible. This typically occurs when the media is
stopped.
@param data: data pointer as passed to L{libvlc_audio_set_callbacks}() [IN].
'''
AudioDrainCb = ctypes.CFUNCTYPE(ctypes.c_void_p, ctypes.c_void_p)
AudioDrainCb.__doc__ = '''Callback prototype for audio buffer drain.
LibVLC may invoke this callback when the decoded audio track is ending.
There will be no further decoded samples for the track, but playback should
nevertheless continue until all already pending buffers are rendered.
@param data: data pointer as passed to L{libvlc_audio_set_callbacks}() [IN].
'''
AudioSetVolumeCb = ctypes.CFUNCTYPE(ctypes.c_void_p, ctypes.c_void_p, ctypes.c_float, ctypes.c_bool)
AudioSetVolumeCb.__doc__ = '''Callback prototype for audio volume change.
@param data: data pointer as passed to L{libvlc_audio_set_callbacks}() [IN].
@param volume: software volume (1. = nominal, 0. = mute).
@param mute: muted flag.
'''
AudioSetupCb = ctypes.CFUNCTYPE(ctypes.POINTER(ctypes.c_int), ListPOINTER(ctypes.c_void_p), ctypes.c_char_p, ctypes.POINTER(ctypes.c_uint), ctypes.POINTER(ctypes.c_uint))
AudioSetupCb.__doc__ = '''Callback prototype to setup the audio playback.
This is called when the media player needs to create a new audio output.
@param opaque: pointer to the data pointer passed to L{libvlc_audio_set_callbacks}() [IN/OUT].
@param format: 4 bytes sample format [IN/OUT].
@param rate: sample rate [IN/OUT].
@param channels: channels count [IN/OUT].
@return: 0 on success, anything else to skip audio playback.
'''
AudioCleanupCb = ctypes.CFUNCTYPE(ctypes.c_void_p, ctypes.c_void_p)
AudioCleanupCb.__doc__ = '''Callback prototype for audio playback cleanup.
This is called when the media player no longer needs an audio output.
@param opaque: data pointer as passed to L{libvlc_audio_set_callbacks}() [IN].
'''
cb = CallbackDecorators
# End of generated enum types #
# From libvlc_structures.h
class AudioOutput(_Cstruct):
def __str__(self):
return '%s(%s:%s)' % (self.__class__.__name__, self.name, self.description)
AudioOutput._fields_ = [ # recursive struct
('name', ctypes.c_char_p),
('description', ctypes.c_char_p),
('next', ctypes.POINTER(AudioOutput)),
]
class LogMessage(_Cstruct):
_fields_ = [
('size', ctypes.c_uint ),
('severity', ctypes.c_int ),
('type', ctypes.c_char_p),
('name', ctypes.c_char_p),
('header', ctypes.c_char_p),
('message', ctypes.c_char_p),
]
def __init__(self):
super(LogMessage, self).__init__()
self.size = ctypes.sizeof(self)
def __str__(self):
return '%s(%d:%s): %s' % (self.__class__.__name__, self.severity, self.type, self.message)
class MediaEvent(_Cstruct):
_fields_ = [
('media_name', ctypes.c_char_p),
('instance_name', ctypes.c_char_p),
]
class MediaStats(_Cstruct):
_fields_ = [
('read_bytes', ctypes.c_int ),
('input_bitrate', ctypes.c_float),
('demux_read_bytes', ctypes.c_int ),
('demux_bitrate', ctypes.c_float),
('demux_corrupted', ctypes.c_int ),
('demux_discontinuity', ctypes.c_int ),
('decoded_video', ctypes.c_int ),
('decoded_audio', ctypes.c_int ),
('displayed_pictures', ctypes.c_int ),
('lost_pictures', ctypes.c_int ),
('played_abuffers', ctypes.c_int ),
('lost_abuffers', ctypes.c_int ),
('sent_packets', ctypes.c_int ),
('sent_bytes', ctypes.c_int ),
('send_bitrate', ctypes.c_float),
]
class MediaTrackInfo(_Cstruct):
_fields_ = [
('codec', ctypes.c_uint32),
('id', ctypes.c_int ),
('type', TrackType ),
('profile', ctypes.c_int ),
('level', ctypes.c_int ),
('channels_or_height', ctypes.c_uint ),
('rate_or_width', ctypes.c_uint ),
]
class AudioTrack(_Cstruct):
_fields_ = [
('channels', ctypes.c_uint),
('rate', ctypes.c_uint),
]
class VideoTrack(_Cstruct):
_fields_ = [
('height', ctypes.c_uint),
('width', ctypes.c_uint),
('sar_num', ctypes.c_uint),
('sar_den', ctypes.c_uint),
('frame_rate_num', ctypes.c_uint),
('frame_rate_den', ctypes.c_uint),
]
class SubtitleTrack(_Cstruct):
_fields_ = [
('encoding', ctypes.c_char_p),
]
class MediaTrackTracks(ctypes.Union):
_fields_ = [
('audio', ctypes.POINTER(AudioTrack)),
('video', ctypes.POINTER(VideoTrack)),
('subtitle', ctypes.POINTER(SubtitleTrack)),
]
class MediaTrack(_Cstruct):
_anonymous_ = ("u",)
_fields_ = [
('codec', ctypes.c_uint32),
('original_fourcc', ctypes.c_uint32),
('id', ctypes.c_int ),
('type', TrackType ),
('profile', ctypes.c_int ),
('level', ctypes.c_int ),
('u', MediaTrackTracks),
('bitrate', ctypes.c_uint),
('language', ctypes.c_char_p),
('description', ctypes.c_char_p),
]
class PlaylistItem(_Cstruct):
_fields_ = [
('id', ctypes.c_int ),
('uri', ctypes.c_char_p),
('name', ctypes.c_char_p),
]
def __str__(self):
return '%s #%d %s (uri %s)' % (self.__class__.__name__, self.id, self.name, self.uri)
class Position(object):
"""Enum-like, immutable window position constants.
See e.g. VideoMarqueeOption.Position.
"""
Center = 0
Left = 1
CenterLeft = 1
Right = 2
CenterRight = 2
Top = 4
TopCenter = 4
TopLeft = 5
TopRight = 6
Bottom = 8
BottomCenter = 8
BottomLeft = 9
BottomRight = 10
def __init__(self, *unused):
raise TypeError('constants only')
def __setattr__(self, *unused): #PYCHOK expected
raise TypeError('immutable constants')
class Rectangle(_Cstruct):
_fields_ = [
('top', ctypes.c_int),
('left', ctypes.c_int),
('bottom', ctypes.c_int),
('right', ctypes.c_int),
]
class TrackDescription(_Cstruct):
def __str__(self):
return '%s(%d:%s)' % (self.__class__.__name__, self.id, self.name)
TrackDescription._fields_ = [ # recursive struct
('id', ctypes.c_int ),
('name', ctypes.c_char_p),
('next', ctypes.POINTER(TrackDescription)),
]
def track_description_list(head):
"""Convert a TrackDescription linked list to a Python list (and release the former).
"""
r = []
if head:
item = head
while item:
item = item.contents
r.append((item.id, item.name))
item = item.next
try:
libvlc_track_description_release(head)
except NameError:
libvlc_track_description_list_release(head)
return r
class EventUnion(ctypes.Union):
_fields_ = [
('meta_type', ctypes.c_uint ),
('new_child', ctypes.c_uint ),
('new_duration', ctypes.c_longlong),
('new_status', ctypes.c_int ),
('media', ctypes.c_void_p ),
('new_state', ctypes.c_uint ),
# FIXME: Media instance
('new_cache', ctypes.c_float ),
('new_position', ctypes.c_float ),
('new_time', ctypes.c_longlong),
('new_title', ctypes.c_int ),
('new_seekable', ctypes.c_longlong),
('new_pausable', ctypes.c_longlong),
('new_scrambled', ctypes.c_longlong),
('new_count', ctypes.c_longlong),
# FIXME: Skipped MediaList and MediaListView...
('filename', ctypes.c_char_p ),
('new_length', ctypes.c_longlong),
('media_event', MediaEvent ),
]
class Event(_Cstruct):
_fields_ = [
('type', EventType ),
('object', ctypes.c_void_p),
('u', EventUnion ),
]
class ModuleDescription(_Cstruct):
def __str__(self):
return '%s %s (%s)' % (self.__class__.__name__, self.shortname, self.name)
ModuleDescription._fields_ = [ # recursive struct
('name', ctypes.c_char_p),
('shortname', ctypes.c_char_p),
('longname', ctypes.c_char_p),
('help', ctypes.c_char_p),
('next', ctypes.POINTER(ModuleDescription)),
]
def module_description_list(head):
"""Convert a ModuleDescription linked list to a Python list (and release the former).
"""
r = []
if head:
item = head
while item:
item = item.contents
r.append((item.name, item.shortname, item.longname, item.help))
item = item.next
libvlc_module_description_list_release(head)
return r
class AudioOutputDevice(_Cstruct):
def __str__(self):
return '%s(%d:%s)' % (self.__class__.__name__, self.id, self.name)
AudioOutputDevice._fields_ = [ # recursive struct
('next', ctypes.POINTER(AudioOutputDevice)),
('device', ctypes.c_char_p ),
('description', ctypes.c_char_p),
]
class TitleDescription(_Cstruct):
_fields = [
('duration', ctypes.c_longlong),
('name', ctypes.c_char_p),
('menu', ctypes.c_bool),
]
class ChapterDescription(_Cstruct):
_fields = [
('time_offset', ctypes.c_longlong),
('duration', ctypes.c_longlong),
('name', ctypes.c_char_p),
]
# This struct depends on the MediaSlaveType enum that is defined only
# in > 2.2
if 'MediaSlaveType' in locals():
class MediaSlave(_Cstruct):
_fields = [
('psz_uri', ctypes.c_char_p),
('i_type', MediaSlaveType),
('i_priority', ctypes.c_uint)
]
class RDDescription(_Cstruct):
_fields = [
('name', ctypes.c_char_p),
('longname', ctypes.c_char_p)
]
# End of header.py #
class EventManager(_Ctype):
'''Create an event manager with callback handler.
This class interposes the registration and handling of
event notifications in order to (a) remove the need for
decorating each callback functions with the decorator
'@callbackmethod', (b) allow any number of positional
and/or keyword arguments to the callback (in addition
to the Event instance) and (c) to preserve the Python
objects such that the callback and argument objects
remain alive (i.e. are not garbage collected) until
B{after} the notification has been unregistered.
@note: Only a single notification can be registered
for each event type in an EventManager instance.
'''
_callback_handler = None
_callbacks = {}
def __new__(cls, ptr=_internal_guard):
if ptr == _internal_guard:
raise VLCException("(INTERNAL) ctypes class.\nYou should get a reference to EventManager through the MediaPlayer.event_manager() method.")
return _Constructor(cls, ptr)
def event_attach(self, eventtype, callback, *args, **kwds):
"""Register an event notification.
@param eventtype: the desired event type to be notified about.
@param callback: the function to call when the event occurs.
@param args: optional positional arguments for the callback.
@param kwds: optional keyword arguments for the callback.
@return: 0 on success, ENOMEM on error.
@note: The callback function must have at least one argument,
an Event instance. Any other, optional positional and keyword
arguments are in B{addition} to the first one.
"""
if not isinstance(eventtype, EventType):
raise VLCException("%s required: %r" % ('EventType', eventtype))
if not hasattr(callback, '__call__'): # callable()
raise VLCException("%s required: %r" % ('callable', callback))
# check that the callback expects arguments
if not any(getargspec(callback)[:2]): # list(...)
raise VLCException("%s required: %r" % ('argument', callback))
if self._callback_handler is None:
_called_from_ctypes = ctypes.CFUNCTYPE(None, ctypes.POINTER(Event), ctypes.c_void_p)
@_called_from_ctypes
def _callback_handler(event, k):
"""(INTERNAL) handle callback call from ctypes.
@note: We cannot simply make this an EventManager
method since ctypes does not prepend self as the
first parameter, hence this closure.
"""
try: # retrieve Python callback and arguments
call, args, kwds = self._callbacks[k]
# deref event.contents to simplify callback code
call(event.contents, *args, **kwds)
except KeyError: # detached?
pass
self._callback_handler = _callback_handler
self._callbacks = {}
k = eventtype.value
r = libvlc_event_attach(self, k, self._callback_handler, k)
if not r:
self._callbacks[k] = (callback, args, kwds)
return r
def event_detach(self, eventtype):
"""Unregister an event notification.
@param eventtype: the event type notification to be removed.
"""
if not isinstance(eventtype, EventType):
raise VLCException("%s required: %r" % ('EventType', eventtype))
k = eventtype.value
if k in self._callbacks:
del self._callbacks[k] # remove, regardless of libvlc return value
libvlc_event_detach(self, k, self._callback_handler, k)
class Instance(_Ctype):
'''Create a new Instance instance.
It may take as parameter either:
- a string
- a list of strings as first parameters
- the parameters given as the constructor parameters (must be strings)
'''
def __new__(cls, *args):
if len(args) == 1:
# Only 1 arg. It is either a C pointer, or an arg string,
# or a tuple.
i = args[0]
if isinstance(i, _Ints):
return _Constructor(cls, i)
elif isinstance(i, basestring):
args = i.strip().split()
elif isinstance(i, _Seqs):
args = list(i)
else:
raise VLCException('Instance %r' % (args,))
else:
args = list(args)
if not args: # no parameters passed
args = ['vlc']
elif args[0] != 'vlc':
args.insert(0, 'vlc')
if plugin_path is not None:
# set plugin_path if detected, win32 and MacOS,
# if the user did not specify it itself.
os.environ.setdefault('VLC_PLUGIN_PATH', plugin_path)
if PYTHON3:
args = [ str_to_bytes(a) for a in args ]
return libvlc_new(len(args), args)
def media_player_new(self, uri=None):
"""Create a new MediaPlayer instance.
@param uri: an optional URI to play in the player.
"""
p = libvlc_media_player_new(self)
if uri:
p.set_media(self.media_new(uri))
p._instance = self
return p
def media_list_player_new(self):
"""Create a new MediaListPlayer instance.
"""
p = libvlc_media_list_player_new(self)
p._instance = self
return p
def media_new(self, mrl, *options):
"""Create a new Media instance.
If mrl contains a colon (:) preceded by more than 1 letter, it
will be treated as a URL. Else, it will be considered as a
local path. If you need more control, directly use
media_new_location/media_new_path methods.
Options can be specified as supplementary string parameters,
but note that many options cannot be set at the media level,
and rather at the Instance level. For instance, the marquee
filter must be specified when creating the vlc.Instance or
vlc.MediaPlayer.
Alternatively, options can be added to the media using the
Media.add_options method (with the same limitation).
@param options: optional media option=value strings
"""
if ':' in mrl and mrl.index(':') > 1:
# Assume it is a URL
m = libvlc_media_new_location(self, str_to_bytes(mrl))
else:
# Else it should be a local path.
m = libvlc_media_new_path(self, str_to_bytes(os.path.normpath(mrl)))
for o in options:
libvlc_media_add_option(m, str_to_bytes(o))
m._instance = self
return m
def media_list_new(self, mrls=None):
"""Create a new MediaList instance.
@param mrls: optional list of MRL strings
"""
l = libvlc_media_list_new(self)
# We should take the lock, but since we did not leak the
# reference, nobody else can access it.
if mrls:
for m in mrls:
l.add_media(m)
l._instance = self
return l
def audio_output_enumerate_devices(self):
"""Enumerate the defined audio output devices.
@return: list of dicts {name:, description:, devices:}
"""
r = []
head = libvlc_audio_output_list_get(self)
if head:
i = head
while i:
i = i.contents
d = [{'id': libvlc_audio_output_device_id (self, i.name, d),
'longname': libvlc_audio_output_device_longname(self, i.name, d)}
for d in range(libvlc_audio_output_device_count (self, i.name))]
r.append({'name': i.name, 'description': i.description, 'devices': d})
i = i.next
libvlc_audio_output_list_release(head)
return r
def audio_filter_list_get(self):
"""Returns a list of available audio filters.
"""
return module_description_list(libvlc_audio_filter_list_get(self))
def video_filter_list_get(self):
"""Returns a list of available video filters.
"""
return module_description_list(libvlc_video_filter_list_get(self))
def release(self):
'''Decrement the reference count of a libvlc instance, and destroy it
if it reaches zero.
'''
return libvlc_release(self)
def retain(self):
'''Increments the reference count of a libvlc instance.
The initial reference count is 1 after L{new}() returns.
'''
return libvlc_retain(self)
def add_intf(self, name):
'''Try to start a user interface for the libvlc instance.
@param name: interface name, or None for default.
@return: 0 on success, -1 on error.
'''
return libvlc_add_intf(self, str_to_bytes(name))
def set_user_agent(self, name, http):
'''Sets the application name. LibVLC passes this as the user agent string
when a protocol requires it.
@param name: human-readable application name, e.g. "FooBar player 1.2.3".
@param http: HTTP User Agent, e.g. "FooBar/1.2.3 Python/2.6.0".
@version: LibVLC 1.1.1 or later.
'''
return libvlc_set_user_agent(self, str_to_bytes(name), str_to_bytes(http))
def set_app_id(self, id, version, icon):
'''Sets some meta-information about the application.
See also L{set_user_agent}().
@param id: Java-style application identifier, e.g. "com.acme.foobar".
@param version: application version numbers, e.g. "1.2.3".
@param icon: application icon name, e.g. "foobar".
@version: LibVLC 2.1.0 or later.
'''
return libvlc_set_app_id(self, str_to_bytes(id), str_to_bytes(version), str_to_bytes(icon))
def log_unset(self):
'''Unsets the logging callback for a LibVLC instance. This is rarely needed:
the callback is implicitly unset when the instance is destroyed.
This function will wait for any pending callbacks invocation to complete
(causing a deadlock if called from within the callback).
@version: LibVLC 2.1.0 or later.
'''
return libvlc_log_unset(self)
def log_set(self, data, p_instance):
'''Sets the logging callback for a LibVLC instance.
This function is thread-safe: it will wait for any pending callbacks
invocation to complete.
@param data: opaque data pointer for the callback function @note Some log messages (especially debug) are emitted by LibVLC while is being initialized. These messages cannot be captured with this interface. @warning A deadlock may occur if this function is called from the callback.
@param p_instance: libvlc instance.
@version: LibVLC 2.1.0 or later.
'''
return libvlc_log_set(self, data, p_instance)
def log_set_file(self, stream):
'''Sets up logging to a file.
@param stream: FILE pointer opened for writing (the FILE pointer must remain valid until L{log_unset}()).
@version: LibVLC 2.1.0 or later.
'''
return libvlc_log_set_file(self, stream)
def media_new_location(self, psz_mrl):
'''Create a media with a certain given media resource location,
for instance a valid URL.
@note: To refer to a local file with this function,
the file://... URI syntax B{must} be used (see IETF RFC3986).
We recommend using L{media_new_path}() instead when dealing with
local files.
See L{media_release}.
@param psz_mrl: the media location.
@return: the newly created media or None on error.
'''
return libvlc_media_new_location(self, str_to_bytes(psz_mrl))
def media_new_path(self, path):
'''Create a media for a certain file path.
See L{media_release}.
@param path: local filesystem path.
@return: the newly created media or None on error.
'''
return libvlc_media_new_path(self, str_to_bytes(path))
def media_new_fd(self, fd):
'''Create a media for an already open file descriptor.
The file descriptor shall be open for reading (or reading and writing).
Regular file descriptors, pipe read descriptors and character device
descriptors (including TTYs) are supported on all platforms.
Block device descriptors are supported where available.
Directory descriptors are supported on systems that provide fdopendir().
Sockets are supported on all platforms where they are file descriptors,
i.e. all except Windows.
@note: This library will B{not} automatically close the file descriptor
under any circumstance. Nevertheless, a file descriptor can usually only be
rendered once in a media player. To render it a second time, the file
descriptor should probably be rewound to the beginning with lseek().
See L{media_release}.
@param fd: open file descriptor.
@return: the newly created media or None on error.
@version: LibVLC 1.1.5 and later.
'''
return libvlc_media_new_fd(self, fd)
def media_new_callbacks(self, open_cb, read_cb, seek_cb, close_cb, opaque):
'''Create a media with custom callbacks to read the data from.
@param open_cb: callback to open the custom bitstream input media.
@param read_cb: callback to read data (must not be None).
@param seek_cb: callback to seek, or None if seeking is not supported.
@param close_cb: callback to close the media, or None if unnecessary.
@param opaque: data pointer for the open callback.
@return: the newly created media or None on error @note If open_cb is None, the opaque pointer will be passed to read_cb, seek_cb and close_cb, and the stream size will be treated as unknown. @note The callbacks may be called asynchronously (from another thread). A single stream instance need not be reentrant. However the open_cb needs to be reentrant if the media is used by multiple player instances. @warning The callbacks may be used until all or any player instances that were supplied the media item are stopped. See L{media_release}.
@version: LibVLC 3.0.0 and later.
'''
return libvlc_media_new_callbacks(self, open_cb, read_cb, seek_cb, close_cb, opaque)
def media_new_as_node(self, psz_name):
'''Create a media as an empty node with a given name.
See L{media_release}.
@param psz_name: the name of the node.
@return: the new empty media or None on error.
'''
return libvlc_media_new_as_node(self, str_to_bytes(psz_name))
def media_discoverer_new(self, psz_name):
'''Create a media discoverer object by name.
After this object is created, you should attach to media_list events in
order to be notified of new items discovered.
You need to call L{media_discoverer_start}() in order to start the
discovery.
See L{media_discoverer_media_list}
See libvlc_media_discoverer_event_manager
See L{media_discoverer_start}.
@param psz_name: service name; use L{media_discoverer_list_get}() to get a list of the discoverer names available in this libVLC instance.
@return: media discover object or None in case of error.
@version: LibVLC 3.0.0 or later.
'''
return libvlc_media_discoverer_new(self, str_to_bytes(psz_name))
def media_discoverer_list_get(self, i_cat, ppp_services):
'''Get media discoverer services by category.
@param i_cat: category of services to fetch.
@param ppp_services: address to store an allocated array of media discoverer services (must be freed with L{media_discoverer_list_release}() by the caller) [OUT].
@return: the number of media discoverer services (0 on error).
@version: LibVLC 3.0.0 and later.
'''
return libvlc_media_discoverer_list_get(self, i_cat, ppp_services)
def media_library_new(self):
'''Create an new Media Library object.
@return: a new object or None on error.
'''
return libvlc_media_library_new(self)
def audio_output_list_get(self):
'''Gets the list of available audio output modules.
@return: list of available audio outputs. It must be freed with In case of error, None is returned.
'''
return libvlc_audio_output_list_get(self)
def audio_output_device_list_get(self, aout):
'''Gets a list of audio output devices for a given audio output module,
See L{audio_output_device_set}().
@note: Not all audio outputs support this. In particular, an empty (None)
list of devices does B{not} imply that the specified audio output does
not work.
@note: The list might not be exhaustive.
@warning: Some audio output devices in the list might not actually work in
some circumstances. By default, it is recommended to not specify any
explicit audio device.
@param aout: audio output name (as returned by L{audio_output_list_get}()).
@return: A None-terminated linked list of potential audio output devices. It must be freed with L{audio_output_device_list_release}().
@version: LibVLC 2.1.0 or later.
'''
return libvlc_audio_output_device_list_get(self, str_to_bytes(aout))
def renderer_discoverer_new(self, psz_name):
'''Create a renderer discoverer object by name
After this object is created, you should attach to events in order to be
notified of the discoverer events.
You need to call L{renderer_discoverer_start}() in order to start the
discovery.
See L{renderer_discoverer_event_manager}()
See L{renderer_discoverer_start}().
@param psz_name: service name; use L{renderer_discoverer_list_get}() to get a list of the discoverer names available in this libVLC instance.
@return: media discover object or None in case of error.
@version: LibVLC 3.0.0 or later.
'''
return libvlc_renderer_discoverer_new(self, str_to_bytes(psz_name))
def renderer_discoverer_list_get(self, ppp_services):
'''Get media discoverer services
See libvlc_renderer_list_release().
@param ppp_services: address to store an allocated array of renderer discoverer services (must be freed with libvlc_renderer_list_release() by the caller) [OUT].
@return: the number of media discoverer services (0 on error).
@version: LibVLC 3.0.0 and later.
'''
return libvlc_renderer_discoverer_list_get(self, ppp_services)
def vlm_release(self):
'''Release the vlm instance related to the given L{Instance}.
'''
return libvlc_vlm_release(self)
def vlm_add_broadcast(self, psz_name, psz_input, psz_output, i_options, ppsz_options, b_enabled, b_loop):
'''Add a broadcast, with one input.
@param psz_name: the name of the new broadcast.
@param psz_input: the input MRL.
@param psz_output: the output MRL (the parameter to the "sout" variable).
@param i_options: number of additional options.
@param ppsz_options: additional options.
@param b_enabled: boolean for enabling the new broadcast.
@param b_loop: Should this broadcast be played in loop ?
@return: 0 on success, -1 on error.
'''
return libvlc_vlm_add_broadcast(self, str_to_bytes(psz_name), str_to_bytes(psz_input), str_to_bytes(psz_output), i_options, ppsz_options, b_enabled, b_loop)
def vlm_add_vod(self, psz_name, psz_input, i_options, ppsz_options, b_enabled, psz_mux):
'''Add a vod, with one input.
@param psz_name: the name of the new vod media.
@param psz_input: the input MRL.
@param i_options: number of additional options.
@param ppsz_options: additional options.
@param b_enabled: boolean for enabling the new vod.
@param psz_mux: the muxer of the vod media.
@return: 0 on success, -1 on error.
'''
return libvlc_vlm_add_vod(self, str_to_bytes(psz_name), str_to_bytes(psz_input), i_options, ppsz_options, b_enabled, str_to_bytes(psz_mux))
def vlm_del_media(self, psz_name):
'''Delete a media (VOD or broadcast).
@param psz_name: the media to delete.
@return: 0 on success, -1 on error.
'''
return libvlc_vlm_del_media(self, str_to_bytes(psz_name))
def vlm_set_enabled(self, psz_name, b_enabled):
'''Enable or disable a media (VOD or broadcast).
@param psz_name: the media to work on.
@param b_enabled: the new status.
@return: 0 on success, -1 on error.
'''
return libvlc_vlm_set_enabled(self, str_to_bytes(psz_name), b_enabled)
def vlm_set_output(self, psz_name, psz_output):
'''Set the output for a media.
@param psz_name: the media to work on.
@param psz_output: the output MRL (the parameter to the "sout" variable).
@return: 0 on success, -1 on error.
'''
return libvlc_vlm_set_output(self, str_to_bytes(psz_name), str_to_bytes(psz_output))
def vlm_set_input(self, psz_name, psz_input):
'''Set a media's input MRL. This will delete all existing inputs and
add the specified one.
@param psz_name: the media to work on.
@param psz_input: the input MRL.
@return: 0 on success, -1 on error.
'''
return libvlc_vlm_set_input(self, str_to_bytes(psz_name), str_to_bytes(psz_input))
def vlm_add_input(self, psz_name, psz_input):
'''Add a media's input MRL. This will add the specified one.
@param psz_name: the media to work on.
@param psz_input: the input MRL.
@return: 0 on success, -1 on error.
'''
return libvlc_vlm_add_input(self, str_to_bytes(psz_name), str_to_bytes(psz_input))
def vlm_set_loop(self, psz_name, b_loop):
'''Set a media's loop status.
@param psz_name: the media to work on.
@param b_loop: the new status.
@return: 0 on success, -1 on error.
'''
return libvlc_vlm_set_loop(self, str_to_bytes(psz_name), b_loop)
def vlm_set_mux(self, psz_name, psz_mux):
'''Set a media's vod muxer.
@param psz_name: the media to work on.
@param psz_mux: the new muxer.
@return: 0 on success, -1 on error.
'''
return libvlc_vlm_set_mux(self, str_to_bytes(psz_name), str_to_bytes(psz_mux))
def vlm_change_media(self, psz_name, psz_input, psz_output, i_options, ppsz_options, b_enabled, b_loop):
'''Edit the parameters of a media. This will delete all existing inputs and
add the specified one.
@param psz_name: the name of the new broadcast.
@param psz_input: the input MRL.
@param psz_output: the output MRL (the parameter to the "sout" variable).
@param i_options: number of additional options.
@param ppsz_options: additional options.
@param b_enabled: boolean for enabling the new broadcast.
@param b_loop: Should this broadcast be played in loop ?
@return: 0 on success, -1 on error.
'''
return libvlc_vlm_change_media(self, str_to_bytes(psz_name), str_to_bytes(psz_input), str_to_bytes(psz_output), i_options, ppsz_options, b_enabled, b_loop)
def vlm_play_media(self, psz_name):
'''Play the named broadcast.
@param psz_name: the name of the broadcast.
@return: 0 on success, -1 on error.
'''
return libvlc_vlm_play_media(self, str_to_bytes(psz_name))
def vlm_stop_media(self, psz_name):
'''Stop the named broadcast.
@param psz_name: the name of the broadcast.
@return: 0 on success, -1 on error.
'''
return libvlc_vlm_stop_media(self, str_to_bytes(psz_name))
def vlm_pause_media(self, psz_name):
'''Pause the named broadcast.
@param psz_name: the name of the broadcast.
@return: 0 on success, -1 on error.
'''
return libvlc_vlm_pause_media(self, str_to_bytes(psz_name))
def vlm_seek_media(self, psz_name, f_percentage):
'''Seek in the named broadcast.
@param psz_name: the name of the broadcast.
@param f_percentage: the percentage to seek to.
@return: 0 on success, -1 on error.
'''
return libvlc_vlm_seek_media(self, str_to_bytes(psz_name), f_percentage)
def vlm_show_media(self, psz_name):
'''Return information about the named media as a JSON
string representation.
This function is mainly intended for debugging use,
if you want programmatic access to the state of
a vlm_media_instance_t, please use the corresponding
libvlc_vlm_get_media_instance_xxx -functions.
Currently there are no such functions available for
vlm_media_t though.
@param psz_name: the name of the media, if the name is an empty string, all media is described.
@return: string with information about named media, or None on error.
'''
return libvlc_vlm_show_media(self, str_to_bytes(psz_name))
def vlm_get_media_instance_position(self, psz_name, i_instance):
'''Get vlm_media instance position by name or instance id.
@param psz_name: name of vlm media instance.
@param i_instance: instance id.
@return: position as float or -1. on error.
'''
return libvlc_vlm_get_media_instance_position(self, str_to_bytes(psz_name), i_instance)
def vlm_get_media_instance_time(self, psz_name, i_instance):
'''Get vlm_media instance time by name or instance id.
@param psz_name: name of vlm media instance.
@param i_instance: instance id.
@return: time as integer or -1 on error.
'''
return libvlc_vlm_get_media_instance_time(self, str_to_bytes(psz_name), i_instance)
def vlm_get_media_instance_length(self, psz_name, i_instance):
'''Get vlm_media instance length by name or instance id.
@param psz_name: name of vlm media instance.
@param i_instance: instance id.
@return: length of media item or -1 on error.
'''
return libvlc_vlm_get_media_instance_length(self, str_to_bytes(psz_name), i_instance)
def vlm_get_media_instance_rate(self, psz_name, i_instance):
'''Get vlm_media instance playback rate by name or instance id.
@param psz_name: name of vlm media instance.
@param i_instance: instance id.
@return: playback rate or -1 on error.
'''
return libvlc_vlm_get_media_instance_rate(self, str_to_bytes(psz_name), i_instance)
def vlm_get_media_instance_title(self, psz_name, i_instance):
'''Get vlm_media instance title number by name or instance id.
@param psz_name: name of vlm media instance.
@param i_instance: instance id.
@return: title as number or -1 on error.
@bug: will always return 0.
'''
return libvlc_vlm_get_media_instance_title(self, str_to_bytes(psz_name), i_instance)
def vlm_get_media_instance_chapter(self, psz_name, i_instance):
'''Get vlm_media instance chapter number by name or instance id.
@param psz_name: name of vlm media instance.
@param i_instance: instance id.
@return: chapter as number or -1 on error.
@bug: will always return 0.
'''
return libvlc_vlm_get_media_instance_chapter(self, str_to_bytes(psz_name), i_instance)
def vlm_get_media_instance_seekable(self, psz_name, i_instance):
'''Is libvlc instance seekable ?
@param psz_name: name of vlm media instance.
@param i_instance: instance id.
@return: 1 if seekable, 0 if not, -1 if media does not exist.
@bug: will always return 0.
'''
return libvlc_vlm_get_media_instance_seekable(self, str_to_bytes(psz_name), i_instance)
@memoize_parameterless
def vlm_get_event_manager(self):
'''Get libvlc_event_manager from a vlm media.
The p_event_manager is immutable, so you don't have to hold the lock.
@return: libvlc_event_manager.
'''
return libvlc_vlm_get_event_manager(self)
class Media(_Ctype):
'''Create a new Media instance.
Usage: Media(MRL, *options)
See vlc.Instance.media_new documentation for details.
'''
def __new__(cls, *args):
if args:
i = args[0]
if isinstance(i, _Ints):
return _Constructor(cls, i)
if isinstance(i, Instance):
return i.media_new(*args[1:])
o = get_default_instance().media_new(*args)
return o
def get_instance(self):
return getattr(self, '_instance', None)
def add_options(self, *options):
"""Add a list of options to the media.
Options must be written without the double-dash. Warning: most
audio and video options, such as text renderer, have no
effects on an individual media. These options must be set at
the vlc.Instance or vlc.MediaPlayer instanciation.
@param options: optional media option=value strings
"""
for o in options:
self.add_option(o)
def tracks_get(self):
"""Get media descriptor's elementary streams description
Note, you need to call L{parse}() or play the media at least once
before calling this function.
Not doing this will result in an empty array.
The result must be freed with L{tracks_release}.
@version: LibVLC 2.1.0 and later.
"""
mediaTrack_pp = ctypes.POINTER(MediaTrack)()
n = libvlc_media_tracks_get(self, ctypes.byref(mediaTrack_pp))
info = ctypes.cast(mediaTrack_pp, ctypes.POINTER(ctypes.POINTER(MediaTrack) * n))
return info
def add_option(self, psz_options):
'''Add an option to the media.
This option will be used to determine how the media_player will
read the media. This allows to use VLC's advanced
reading/streaming options on a per-media basis.
@note: The options are listed in 'vlc --long-help' from the command line,
e.g. "-sout-all". Keep in mind that available options and their semantics
vary across LibVLC versions and builds.
@warning: Not all options affects L{Media} objects:
Specifically, due to architectural issues most audio and video options,
such as text renderer options, have no effects on an individual media.
These options must be set through L{new}() instead.
@param psz_options: the options (as a string).
'''
return libvlc_media_add_option(self, str_to_bytes(psz_options))
def add_option_flag(self, psz_options, i_flags):
'''Add an option to the media with configurable flags.
This option will be used to determine how the media_player will
read the media. This allows to use VLC's advanced
reading/streaming options on a per-media basis.
The options are detailed in vlc --long-help, for instance
"--sout-all". Note that all options are not usable on medias:
specifically, due to architectural issues, video-related options
such as text renderer options cannot be set on a single media. They
must be set on the whole libvlc instance instead.
@param psz_options: the options (as a string).
@param i_flags: the flags for this option.
'''
return libvlc_media_add_option_flag(self, str_to_bytes(psz_options), i_flags)
def retain(self):
'''Retain a reference to a media descriptor object (libvlc_media_t). Use
L{release}() to decrement the reference count of a
media descriptor object.
'''
return libvlc_media_retain(self)
def release(self):
'''Decrement the reference count of a media descriptor object. If the
reference count is 0, then L{release}() will release the
media descriptor object. It will send out an libvlc_MediaFreed event
to all listeners. If the media descriptor object has been released it
should not be used again.
'''
return libvlc_media_release(self)
def get_mrl(self):
'''Get the media resource locator (mrl) from a media descriptor object.
@return: string with mrl of media descriptor object.
'''
return libvlc_media_get_mrl(self)
def duplicate(self):
'''Duplicate a media descriptor object.
'''
return libvlc_media_duplicate(self)
def get_meta(self, e_meta):
'''Read the meta of the media.
If the media has not yet been parsed this will return None.
See L{parse}
See L{parse_with_options}
See libvlc_MediaMetaChanged.
@param e_meta: the meta to read.
@return: the media's meta.
'''
return libvlc_media_get_meta(self, e_meta)
def set_meta(self, e_meta, psz_value):
'''Set the meta of the media (this function will not save the meta, call
L{save_meta} in order to save the meta).
@param e_meta: the meta to write.
@param psz_value: the media's meta.
'''
return libvlc_media_set_meta(self, e_meta, str_to_bytes(psz_value))
def save_meta(self):
'''Save the meta previously set.
@return: true if the write operation was successful.
'''
return libvlc_media_save_meta(self)
def get_state(self):
'''Get current state of media descriptor object. Possible media states are
libvlc_NothingSpecial=0, libvlc_Opening, libvlc_Playing, libvlc_Paused,
libvlc_Stopped, libvlc_Ended, libvlc_Error.
See libvlc_state_t.
@return: state of media descriptor object.
'''
return libvlc_media_get_state(self)
def get_stats(self, p_stats):
'''Get the current statistics about the media.
@param p_stats:: structure that contain the statistics about the media (this structure must be allocated by the caller).
@return: true if the statistics are available, false otherwise \libvlc_return_bool.
'''
return libvlc_media_get_stats(self, p_stats)
def subitems(self):
'''Get subitems of media descriptor object. This will increment
the reference count of supplied media descriptor object. Use
L{list_release}() to decrement the reference counting.
@return: list of media descriptor subitems or None.
'''
return libvlc_media_subitems(self)
@memoize_parameterless
def event_manager(self):
'''Get event manager from media descriptor object.
NOTE: this function doesn't increment reference counting.
@return: event manager object.
'''
return libvlc_media_event_manager(self)
def get_duration(self):
'''Get duration (in ms) of media descriptor object item.
@return: duration of media item or -1 on error.
'''
return libvlc_media_get_duration(self)
def parse(self):
'''Parse a media.
This fetches (local) art, meta data and tracks information.
The method is synchronous.
See L{parse_with_options}
See L{get_meta}
See libvlc_media_get_tracks_info.
'''
return libvlc_media_parse(self)
def parse_with_options(self, parse_flag, timeout):
'''Parse the media asynchronously with options.
This fetches (local or network) art, meta data and/or tracks information.
This method is the extended version of L{parse_with_options}().
To track when this is over you can listen to libvlc_MediaParsedChanged
event. However if this functions returns an error, you will not receive any
events.
It uses a flag to specify parse options (see libvlc_media_parse_flag_t). All
these flags can be combined. By default, media is parsed if it's a local
file.
See libvlc_MediaParsedChanged
See L{get_meta}
See L{tracks_get}
See L{get_parsed_status}
See libvlc_media_parse_flag_t.
@param parse_flag: parse options:
@param timeout: maximum time allowed to preparse the media. If -1, the default "preparse-timeout" option will be used as a timeout. If 0, it will wait indefinitely. If > 0, the timeout will be used (in milliseconds).
@return: -1 in case of error, 0 otherwise.
@version: LibVLC 3.0.0 or later.
'''
return libvlc_media_parse_with_options(self, parse_flag, timeout)
def get_parsed_status(self):
'''Get Parsed status for media descriptor object.
See libvlc_MediaParsedChanged
See libvlc_media_parsed_status_t.
@return: a value of the libvlc_media_parsed_status_t enum.
@version: LibVLC 3.0.0 or later.
'''
return libvlc_media_get_parsed_status(self)
def set_user_data(self, p_new_user_data):
'''Sets media descriptor's user_data. user_data is specialized data
accessed by the host application, VLC.framework uses it as a pointer to
an native object that references a L{Media} pointer.
@param p_new_user_data: pointer to user data.
'''
return libvlc_media_set_user_data(self, p_new_user_data)
def get_user_data(self):
'''Get media descriptor's user_data. user_data is specialized data
accessed by the host application, VLC.framework uses it as a pointer to
an native object that references a L{Media} pointer.
'''
return libvlc_media_get_user_data(self)
def get_type(self):
'''Get the media type of the media descriptor object.
@return: media type.
@version: LibVLC 3.0.0 and later. See libvlc_media_type_t.
'''
return libvlc_media_get_type(self)
def slaves_add(self, i_type, i_priority, psz_uri):
'''Add a slave to the current media.
A slave is an external input source that may contains an additional subtitle
track (like a .srt) or an additional audio track (like a .ac3).
@note: This function must be called before the media is parsed (via
L{parse_with_options}()) or before the media is played (via
L{player_play}()).
@param i_type: subtitle or audio.
@param i_priority: from 0 (low priority) to 4 (high priority).
@param psz_uri: Uri of the slave (should contain a valid scheme).
@return: 0 on success, -1 on error.
@version: LibVLC 3.0.0 and later.
'''
return libvlc_media_slaves_add(self, i_type, i_priority, str_to_bytes(psz_uri))
def slaves_clear(self):
'''Clear all slaves previously added by L{slaves_add}() or
internally.
@version: LibVLC 3.0.0 and later.
'''
return libvlc_media_slaves_clear(self)
def slaves_get(self, ppp_slaves):
'''Get a media descriptor's slave list
The list will contain slaves parsed by VLC or previously added by
L{slaves_add}(). The typical use case of this function is to save
a list of slave in a database for a later use.
@param ppp_slaves: address to store an allocated array of slaves (must be freed with L{slaves_release}()) [OUT].
@return: the number of slaves (zero on error).
@version: LibVLC 3.0.0 and later. See L{slaves_add}.
'''
return libvlc_media_slaves_get(self, ppp_slaves)
def player_new_from_media(self):
'''Create a Media Player object from a Media.
@return: a new media player object, or None on error.
'''
return libvlc_media_player_new_from_media(self)
class MediaDiscoverer(_Ctype):
'''N/A
'''
def __new__(cls, ptr=_internal_guard):
'''(INTERNAL) ctypes wrapper constructor.
'''
return _Constructor(cls, ptr)
def start(self):
'''Start media discovery.
To stop it, call L{stop}() or
L{list_release}() directly.
See L{stop}.
@return: -1 in case of error, 0 otherwise.
@version: LibVLC 3.0.0 or later.
'''
return libvlc_media_discoverer_start(self)
def stop(self):
'''Stop media discovery.
See L{start}.
@version: LibVLC 3.0.0 or later.
'''
return libvlc_media_discoverer_stop(self)
def release(self):
'''Release media discover object. If the reference count reaches 0, then
the object will be released.
'''
return libvlc_media_discoverer_release(self)
def media_list(self):
'''Get media service discover media list.
@return: list of media items.
'''
return libvlc_media_discoverer_media_list(self)
def is_running(self):
'''Query if media service discover object is running.
@return: true if running, false if not \libvlc_return_bool.
'''
return libvlc_media_discoverer_is_running(self)
class MediaLibrary(_Ctype):
'''N/A
'''
def __new__(cls, ptr=_internal_guard):
'''(INTERNAL) ctypes wrapper constructor.
'''
return _Constructor(cls, ptr)
def release(self):
'''Release media library object. This functions decrements the
reference count of the media library object. If it reaches 0,
then the object will be released.
'''
return libvlc_media_library_release(self)
def retain(self):
'''Retain a reference to a media library object. This function will
increment the reference counting for this object. Use
L{release}() to decrement the reference count.
'''
return libvlc_media_library_retain(self)
def load(self):
'''Load media library.
@return: 0 on success, -1 on error.
'''
return libvlc_media_library_load(self)
def media_list(self):
'''Get media library subitems.
@return: media list subitems.
'''
return libvlc_media_library_media_list(self)
class MediaList(_Ctype):
'''Create a new MediaList instance.
Usage: MediaList(list_of_MRLs)
See vlc.Instance.media_list_new documentation for details.
'''
def __new__(cls, *args):
if args:
i = args[0]
if isinstance(i, _Ints):
return _Constructor(cls, i)
if isinstance(i, Instance):
return i.media_list_new(*args[1:])
o = get_default_instance().media_list_new(*args)
return o
def get_instance(self):
return getattr(self, '_instance', None)
def add_media(self, mrl):
"""Add media instance to media list.
The L{lock} should be held upon entering this function.
@param mrl: a media instance or a MRL.
@return: 0 on success, -1 if the media list is read-only.
"""
if isinstance(mrl, basestring):
mrl = (self.get_instance() or get_default_instance()).media_new(mrl)
return libvlc_media_list_add_media(self, mrl)
def release(self):
'''Release media list created with L{new}().
'''
return libvlc_media_list_release(self)
def retain(self):
'''Retain reference to a media list.
'''
return libvlc_media_list_retain(self)
def set_media(self, p_md):
'''Associate media instance with this media list instance.
If another media instance was present it will be released.
The L{lock} should NOT be held upon entering this function.
@param p_md: media instance to add.
'''
return libvlc_media_list_set_media(self, p_md)
def media(self):
'''Get media instance from this media list instance. This action will increase
the refcount on the media instance.
The L{lock} should NOT be held upon entering this function.
@return: media instance.
'''
return libvlc_media_list_media(self)
def insert_media(self, p_md, i_pos):
'''Insert media instance in media list on a position
The L{lock} should be held upon entering this function.
@param p_md: a media instance.
@param i_pos: position in array where to insert.
@return: 0 on success, -1 if the media list is read-only.
'''
return libvlc_media_list_insert_media(self, p_md, i_pos)
def remove_index(self, i_pos):
'''Remove media instance from media list on a position
The L{lock} should be held upon entering this function.
@param i_pos: position in array where to insert.
@return: 0 on success, -1 if the list is read-only or the item was not found.
'''
return libvlc_media_list_remove_index(self, i_pos)
def count(self):
'''Get count on media list items
The L{lock} should be held upon entering this function.
@return: number of items in media list.
'''
return libvlc_media_list_count(self)
def __len__(self):
return libvlc_media_list_count(self)
def item_at_index(self, i_pos):
'''List media instance in media list at a position
The L{lock} should be held upon entering this function.
@param i_pos: position in array where to insert.
@return: media instance at position i_pos, or None if not found. In case of success, L{media_retain}() is called to increase the refcount on the media.
'''
return libvlc_media_list_item_at_index(self, i_pos)
def __getitem__(self, i):
return libvlc_media_list_item_at_index(self, i)
def __iter__(self):
for i in range(len(self)):
yield self[i]
def index_of_item(self, p_md):
'''Find index position of List media instance in media list.
Warning: the function will return the first matched position.
The L{lock} should be held upon entering this function.
@param p_md: media instance.
@return: position of media instance or -1 if media not found.
'''
return libvlc_media_list_index_of_item(self, p_md)
def is_readonly(self):
'''This indicates if this media list is read-only from a user point of view.
@return: 1 on readonly, 0 on readwrite \libvlc_return_bool.
'''
return libvlc_media_list_is_readonly(self)
def lock(self):
'''Get lock on media list items.
'''
return libvlc_media_list_lock(self)
def unlock(self):
'''Release lock on media list items
The L{lock} should be held upon entering this function.
'''
return libvlc_media_list_unlock(self)
@memoize_parameterless
def event_manager(self):
'''Get libvlc_event_manager from this media list instance.
The p_event_manager is immutable, so you don't have to hold the lock.
@return: libvlc_event_manager.
'''
return libvlc_media_list_event_manager(self)
class MediaListPlayer(_Ctype):
'''Create a new MediaListPlayer instance.
It may take as parameter either:
- a vlc.Instance
- nothing
'''
def __new__(cls, arg=None):
if arg is None:
i = get_default_instance()
elif isinstance(arg, Instance):
i = arg
elif isinstance(arg, _Ints):
return _Constructor(cls, arg)
else:
raise TypeError('MediaListPlayer %r' % (arg,))
return i.media_list_player_new()
def get_instance(self):
"""Return the associated Instance.
"""
return self._instance #PYCHOK expected
def release(self):
'''Release a media_list_player after use
Decrement the reference count of a media player object. If the
reference count is 0, then L{release}() will
release the media player object. If the media player object
has been released, then it should not be used again.
'''
return libvlc_media_list_player_release(self)
def retain(self):
'''Retain a reference to a media player list object. Use
L{release}() to decrement reference count.
'''
return libvlc_media_list_player_retain(self)
@memoize_parameterless
def event_manager(self):
'''Return the event manager of this media_list_player.
@return: the event manager.
'''
return libvlc_media_list_player_event_manager(self)
def set_media_player(self, p_mi):
'''Replace media player in media_list_player with this instance.
@param p_mi: media player instance.
'''
return libvlc_media_list_player_set_media_player(self, p_mi)
def get_media_player(self):
'''Get media player of the media_list_player instance.
@return: media player instance @note the caller is responsible for releasing the returned instance.
'''
return libvlc_media_list_player_get_media_player(self)
def set_media_list(self, p_mlist):
'''Set the media list associated with the player.
@param p_mlist: list of media.
'''
return libvlc_media_list_player_set_media_list(self, p_mlist)
def play(self):
'''Play media list.
'''
return libvlc_media_list_player_play(self)
def pause(self):
'''Toggle pause (or resume) media list.
'''
return libvlc_media_list_player_pause(self)
def is_playing(self):
'''Is media list playing?
@return: true for playing and false for not playing \libvlc_return_bool.
'''
return libvlc_media_list_player_is_playing(self)
def get_state(self):
'''Get current libvlc_state of media list player.
@return: libvlc_state_t for media list player.
'''
return libvlc_media_list_player_get_state(self)
def play_item_at_index(self, i_index):
'''Play media list item at position index.
@param i_index: index in media list to play.
@return: 0 upon success -1 if the item wasn't found.
'''
return libvlc_media_list_player_play_item_at_index(self, i_index)
def __getitem__(self, i):
return libvlc_media_list_player_play_item_at_index(self, i)
def __iter__(self):
for i in range(len(self)):
yield self[i]
def play_item(self, p_md):
'''Play the given media item.
@param p_md: the media instance.
@return: 0 upon success, -1 if the media is not part of the media list.
'''
return libvlc_media_list_player_play_item(self, p_md)
def stop(self):
'''Stop playing media list.
'''
return libvlc_media_list_player_stop(self)
def next(self):
'''Play next item from media list.
@return: 0 upon success -1 if there is no next item.
'''
return libvlc_media_list_player_next(self)
def previous(self):
'''Play previous item from media list.
@return: 0 upon success -1 if there is no previous item.
'''
return libvlc_media_list_player_previous(self)
def set_playback_mode(self, e_mode):
'''Sets the playback mode for the playlist.
@param e_mode: playback mode specification.
'''
return libvlc_media_list_player_set_playback_mode(self, e_mode)
class MediaPlayer(_Ctype):
'''Create a new MediaPlayer instance.
It may take as parameter either:
- a string (media URI), options... In this case, a vlc.Instance will be created.
- a vlc.Instance, a string (media URI), options...
'''
def __new__(cls, *args):
if len(args) == 1 and isinstance(args[0], _Ints):
return _Constructor(cls, args[0])
if args and isinstance(args[0], Instance):
instance = args[0]
args = args[1:]
else:
instance = get_default_instance()
o = instance.media_player_new()
if args:
o.set_media(instance.media_new(*args))
return o
def get_instance(self):
"""Return the associated Instance.
"""
return self._instance #PYCHOK expected
def set_mrl(self, mrl, *options):
"""Set the MRL to play.
Warning: most audio and video options, such as text renderer,
have no effects on an individual media. These options must be
set at the vlc.Instance or vlc.MediaPlayer instanciation.
@param mrl: The MRL
@param options: optional media option=value strings
@return: the Media object
"""
m = self.get_instance().media_new(mrl, *options)
self.set_media(m)
return m
def video_get_spu_description(self):
"""Get the description of available video subtitles.
"""
return track_description_list(libvlc_video_get_spu_description(self))
def video_get_title_description(self):
"""Get the description of available titles.
"""
return track_description_list(libvlc_video_get_title_description(self))
def video_get_chapter_description(self, title):
"""Get the description of available chapters for specific title.
@param title: selected title (int)
"""
return track_description_list(libvlc_video_get_chapter_description(self, title))
def video_get_track_description(self):
"""Get the description of available video tracks.
"""
return track_description_list(libvlc_video_get_track_description(self))
def audio_get_track_description(self):
"""Get the description of available audio tracks.
"""
return track_description_list(libvlc_audio_get_track_description(self))
def get_full_title_descriptions(self):
'''Get the full description of available titles.
@return: the titles list
@version: LibVLC 3.0.0 and later.
'''
titleDescription_pp = ctypes.POINTER(TitleDescription)()
n = libvlc_media_player_get_full_title_descriptions(self, ctypes.byref(titleDescription_pp))
info = ctypes.cast(ctypes.titleDescription_pp, ctypes.POINTER(ctypes.POINTER(TitleDescription) * n))
return info
def get_full_chapter_descriptions(self, i_chapters_of_title):
'''Get the full description of available chapters.
@param i_chapters_of_title: index of the title to query for chapters (uses current title if set to -1).
@return: the chapters list
@version: LibVLC 3.0.0 and later.
'''
chapterDescription_pp = ctypes.POINTER(ChapterDescription)()
n = libvlc_media_player_get_full_chapter_descriptions(self, ctypes.byref(chapterDescription_pp))
info = ctypes.cast(ctypes.chapterDescription_pp, ctypes.POINTER(ctypes.POINTER(ChapterDescription) * n))
return info
def video_get_size(self, num=0):
"""Get the video size in pixels as 2-tuple (width, height).
@param num: video number (default 0).
"""
r = libvlc_video_get_size(self, num)
if isinstance(r, tuple) and len(r) == 2:
return r
else:
raise VLCException('invalid video number (%s)' % (num,))
def set_hwnd(self, drawable):
"""Set a Win32/Win64 API window handle (HWND).
Specify where the media player should render its video
output. If LibVLC was built without Win32/Win64 API output
support, then this has no effects.
@param drawable: windows handle of the drawable.
"""
if not isinstance(drawable, ctypes.c_void_p):
drawable = ctypes.c_void_p(int(drawable))
libvlc_media_player_set_hwnd(self, drawable)
def video_get_width(self, num=0):
"""Get the width of a video in pixels.
@param num: video number (default 0).
"""
return self.video_get_size(num)[0]
def video_get_height(self, num=0):
"""Get the height of a video in pixels.
@param num: video number (default 0).
"""
return self.video_get_size(num)[1]
def video_get_cursor(self, num=0):
"""Get the mouse pointer coordinates over a video as 2-tuple (x, y).
Coordinates are expressed in terms of the decoded video resolution,
B{not} in terms of pixels on the screen/viewport. To get the
latter, you must query your windowing system directly.
Either coordinate may be negative or larger than the corresponding
size of the video, if the cursor is outside the rendering area.
@warning: The coordinates may be out-of-date if the pointer is not
located on the video rendering area. LibVLC does not track the
mouse pointer if the latter is outside the video widget.
@note: LibVLC does not support multiple mouse pointers (but does
support multiple input devices sharing the same pointer).
@param num: video number (default 0).
"""
r = libvlc_video_get_cursor(self, num)
if isinstance(r, tuple) and len(r) == 2:
return r
raise VLCException('invalid video number (%s)' % (num,))
def get_fps(self):
'''Get movie fps rate
This function is provided for backward compatibility. It cannot deal with
multiple video tracks. In LibVLC versions prior to 3.0, it would also fail
if the file format did not convey the frame rate explicitly.
\deprecated Consider using L{media_tracks_get}() instead.
@return: frames per second (fps) for this playing movie, or 0 if unspecified.
'''
return libvlc_media_player_get_fps(self)
def set_agl(self, drawable):
'''\deprecated Use L{set_nsobject}() instead.
'''
return libvlc_media_player_set_agl(self, drawable)
def get_agl(self):
'''\deprecated Use L{get_nsobject}() instead.
'''
return libvlc_media_player_get_agl(self)
def release(self):
'''Release a media_player after use
Decrement the reference count of a media player object. If the
reference count is 0, then L{release}() will
release the media player object. If the media player object
has been released, then it should not be used again.
'''
return libvlc_media_player_release(self)
def retain(self):
'''Retain a reference to a media player object. Use
L{release}() to decrement reference count.
'''
return libvlc_media_player_retain(self)
def set_media(self, p_md):
'''Set the media that will be used by the media_player. If any,
previous md will be released.
@param p_md: the Media. Afterwards the p_md can be safely destroyed.
'''
return libvlc_media_player_set_media(self, p_md)
def get_media(self):
'''Get the media used by the media_player.
@return: the media associated with p_mi, or None if no media is associated.
'''
return libvlc_media_player_get_media(self)
@memoize_parameterless
def event_manager(self):
'''Get the Event Manager from which the media player send event.
@return: the event manager associated with p_mi.
'''
return libvlc_media_player_event_manager(self)
def is_playing(self):
'''is_playing.
@return: 1 if the media player is playing, 0 otherwise \libvlc_return_bool.
'''
return libvlc_media_player_is_playing(self)
def play(self):
'''Play.
@return: 0 if playback started (and was already started), or -1 on error.
'''
return libvlc_media_player_play(self)
def set_pause(self, do_pause):
'''Pause or resume (no effect if there is no media).
@param do_pause: play/resume if zero, pause if non-zero.
@version: LibVLC 1.1.1 or later.
'''
return libvlc_media_player_set_pause(self, do_pause)
def pause(self):
'''Toggle pause (no effect if there is no media).
'''
return libvlc_media_player_pause(self)
def stop(self):
'''Stop (no effect if there is no media).
'''
return libvlc_media_player_stop(self)
def set_renderer(self, p_item):
'''Set a renderer to the media player
@note: must be called before the first call of L{play}() to
take effect.
See L{renderer_discoverer_new}.
@param p_item: an item discovered by L{renderer_discoverer_start}().
@return: 0 on success, -1 on error.
@version: LibVLC 3.0.0 or later.
'''
return libvlc_media_player_set_renderer(self, p_item)
def video_set_callbacks(self, lock, unlock, display, opaque):
'''Set callbacks and private data to render decoded video to a custom area
in memory.
Use L{video_set_format}() or L{video_set_format_callbacks}()
to configure the decoded format.
@warning: Rendering video into custom memory buffers is considerably less
efficient than rendering in a custom window as normal.
For optimal perfomances, VLC media player renders into a custom window, and
does not use this function and associated callbacks. It is B{highly
recommended} that other LibVLC-based application do likewise.
To embed video in a window, use libvlc_media_player_set_xid() or equivalent
depending on the operating system.
If window embedding does not fit the application use case, then a custom
LibVLC video output display plugin is required to maintain optimal video
rendering performances.
The following limitations affect performance:
- Hardware video decoding acceleration will either be disabled completely,
or require (relatively slow) copy from video/DSP memory to main memory.
- Sub-pictures (subtitles, on-screen display, etc.) must be blent into the
main picture by the CPU instead of the GPU.
- Depending on the video format, pixel format conversion, picture scaling,
cropping and/or picture re-orientation, must be performed by the CPU
instead of the GPU.
- Memory copying is required between LibVLC reference picture buffers and
application buffers (between lock and unlock callbacks).
@param lock: callback to lock video memory (must not be None).
@param unlock: callback to unlock video memory (or None if not needed).
@param display: callback to display video (or None if not needed).
@param opaque: private pointer for the three callbacks (as first parameter).
@version: LibVLC 1.1.1 or later.
'''
return libvlc_video_set_callbacks(self, lock, unlock, display, opaque)
def video_set_format(self, chroma, width, height, pitch):
'''Set decoded video chroma and dimensions.
This only works in combination with L{video_set_callbacks}(),
and is mutually exclusive with L{video_set_format_callbacks}().
@param chroma: a four-characters string identifying the chroma (e.g. "RV32" or "YUYV").
@param width: pixel width.
@param height: pixel height.
@param pitch: line pitch (in bytes).
@version: LibVLC 1.1.1 or later.
@bug: All pixel planes are expected to have the same pitch. To use the YCbCr color space with chrominance subsampling, consider using L{video_set_format_callbacks}() instead.
'''
return libvlc_video_set_format(self, str_to_bytes(chroma), width, height, pitch)
def video_set_format_callbacks(self, setup, cleanup):
'''Set decoded video chroma and dimensions. This only works in combination with
L{video_set_callbacks}().
@param setup: callback to select the video format (cannot be None).
@param cleanup: callback to release any allocated resources (or None).
@version: LibVLC 2.0.0 or later.
'''
return libvlc_video_set_format_callbacks(self, setup, cleanup)
def set_nsobject(self, drawable):
'''Set the NSView handler where the media player should render its video output.
Use the vout called "macosx".
The drawable is an NSObject that follow the VLCOpenGLVideoViewEmbedding
protocol:
@code.m
\@protocol VLCOpenGLVideoViewEmbedding <NSObject>
- (void)addVoutSubview:(NSView *)view;
- (void)removeVoutSubview:(NSView *)view;
\@end
@endcode
Or it can be an NSView object.
If you want to use it along with Qt see the QMacCocoaViewContainer. Then
the following code should work:
@code.mm
NSView *video = [[NSView alloc] init];
QMacCocoaViewContainer *container = new QMacCocoaViewContainer(video, parent);
L{set_nsobject}(mp, video);
[video release];
@endcode
You can find a live example in VLCVideoView in VLCKit.framework.
@param drawable: the drawable that is either an NSView or an object following the VLCOpenGLVideoViewEmbedding protocol.
'''
return libvlc_media_player_set_nsobject(self, drawable)
def get_nsobject(self):
'''Get the NSView handler previously set with L{set_nsobject}().
@return: the NSView handler or 0 if none where set.
'''
return libvlc_media_player_get_nsobject(self)
def set_xwindow(self, drawable):
'''Set an X Window System drawable where the media player should render its
video output. The call takes effect when the playback starts. If it is
already started, it might need to be stopped before changes apply.
If LibVLC was built without X11 output support, then this function has no
effects.
By default, LibVLC will capture input events on the video rendering area.
Use L{video_set_mouse_input}() and L{video_set_key_input}() to
disable that and deliver events to the parent window / to the application
instead. By design, the X11 protocol delivers input events to only one
recipient.
@warning
The application must call the XInitThreads() function from Xlib before
L{new}(), and before any call to XOpenDisplay() directly or via any
other library. Failure to call XInitThreads() will seriously impede LibVLC
performance. Calling XOpenDisplay() before XInitThreads() will eventually
crash the process. That is a limitation of Xlib.
@param drawable: X11 window ID @note The specified identifier must correspond to an existing Input/Output class X11 window. Pixmaps are B{not} currently supported. The default X11 server is assumed, i.e. that specified in the DISPLAY environment variable. @warning LibVLC can deal with invalid X11 handle errors, however some display drivers (EGL, GLX, VA and/or VDPAU) can unfortunately not. Thus the window handle must remain valid until playback is stopped, otherwise the process may abort or crash.
@bug No more than one window handle per media player instance can be specified. If the media has multiple simultaneously active video tracks, extra tracks will be rendered into external windows beyond the control of the application.
'''
return libvlc_media_player_set_xwindow(self, drawable)
def get_xwindow(self):
'''Get the X Window System window identifier previously set with
L{set_xwindow}(). Note that this will return the identifier
even if VLC is not currently using it (for instance if it is playing an
audio-only input).
@return: an X window ID, or 0 if none where set.
'''
return libvlc_media_player_get_xwindow(self)
def get_hwnd(self):
'''Get the Windows API window handle (HWND) previously set with
L{set_hwnd}(). The handle will be returned even if LibVLC
is not currently outputting any video to it.
@return: a window handle or None if there are none.
'''
return libvlc_media_player_get_hwnd(self)
def set_android_context(self, p_awindow_handler):
'''Set the android context.
@param p_awindow_handler: org.videolan.libvlc.IAWindowNativeHandler jobject implemented by the org.videolan.libvlc.MediaPlayer class from the libvlc-android project.
@version: LibVLC 3.0.0 and later.
'''
return libvlc_media_player_set_android_context(self, p_awindow_handler)
def set_evas_object(self, p_evas_object):
'''Set the EFL Evas Object.
@param p_evas_object: a valid EFL Evas Object (Evas_Object).
@return: -1 if an error was detected, 0 otherwise.
@version: LibVLC 3.0.0 and later.
'''
return libvlc_media_player_set_evas_object(self, p_evas_object)
def audio_set_callbacks(self, play, pause, resume, flush, drain, opaque):
'''Sets callbacks and private data for decoded audio.
Use L{audio_set_format}() or L{audio_set_format_callbacks}()
to configure the decoded audio format.
@note: The audio callbacks override any other audio output mechanism.
If the callbacks are set, LibVLC will B{not} output audio in any way.
@param play: callback to play audio samples (must not be None).
@param pause: callback to pause playback (or None to ignore).
@param resume: callback to resume playback (or None to ignore).
@param flush: callback to flush audio buffers (or None to ignore).
@param drain: callback to drain audio buffers (or None to ignore).
@param opaque: private pointer for the audio callbacks (as first parameter).
@version: LibVLC 2.0.0 or later.
'''
return libvlc_audio_set_callbacks(self, play, pause, resume, flush, drain, opaque)
def audio_set_volume_callback(self, set_volume):
'''Set callbacks and private data for decoded audio. This only works in
combination with L{audio_set_callbacks}().
Use L{audio_set_format}() or L{audio_set_format_callbacks}()
to configure the decoded audio format.
@param set_volume: callback to apply audio volume, or None to apply volume in software.
@version: LibVLC 2.0.0 or later.
'''
return libvlc_audio_set_volume_callback(self, set_volume)
def audio_set_format_callbacks(self, setup, cleanup):
'''Sets decoded audio format via callbacks.
This only works in combination with L{audio_set_callbacks}().
@param setup: callback to select the audio format (cannot be None).
@param cleanup: callback to release any allocated resources (or None).
@version: LibVLC 2.0.0 or later.
'''
return libvlc_audio_set_format_callbacks(self, setup, cleanup)
def audio_set_format(self, format, rate, channels):
'''Sets a fixed decoded audio format.
This only works in combination with L{audio_set_callbacks}(),
and is mutually exclusive with L{audio_set_format_callbacks}().
@param format: a four-characters string identifying the sample format (e.g. "S16N" or "FL32").
@param rate: sample rate (expressed in Hz).
@param channels: channels count.
@version: LibVLC 2.0.0 or later.
'''
return libvlc_audio_set_format(self, str_to_bytes(format), rate, channels)
def get_length(self):
'''Get the current movie length (in ms).
@return: the movie length (in ms), or -1 if there is no media.
'''
return libvlc_media_player_get_length(self)
def get_time(self):
'''Get the current movie time (in ms).
@return: the movie time (in ms), or -1 if there is no media.
'''
return libvlc_media_player_get_time(self)
def set_time(self, i_time):
'''Set the movie time (in ms). This has no effect if no media is being played.
Not all formats and protocols support this.
@param i_time: the movie time (in ms).
'''
return libvlc_media_player_set_time(self, i_time)
def get_position(self):
'''Get movie position as percentage between 0.0 and 1.0.
@return: movie position, or -1. in case of error.
'''
return libvlc_media_player_get_position(self)
def set_position(self, f_pos):
'''Set movie position as percentage between 0.0 and 1.0.
This has no effect if playback is not enabled.
This might not work depending on the underlying input format and protocol.
@param f_pos: the position.
'''
return libvlc_media_player_set_position(self, f_pos)
def set_chapter(self, i_chapter):
'''Set movie chapter (if applicable).
@param i_chapter: chapter number to play.
'''
return libvlc_media_player_set_chapter(self, i_chapter)
def get_chapter(self):
'''Get movie chapter.
@return: chapter number currently playing, or -1 if there is no media.
'''
return libvlc_media_player_get_chapter(self)
def get_chapter_count(self):
'''Get movie chapter count.
@return: number of chapters in movie, or -1.
'''
return libvlc_media_player_get_chapter_count(self)
def will_play(self):
'''Is the player able to play.
@return: boolean \libvlc_return_bool.
'''
return libvlc_media_player_will_play(self)
def get_chapter_count_for_title(self, i_title):
'''Get title chapter count.
@param i_title: title.
@return: number of chapters in title, or -1.
'''
return libvlc_media_player_get_chapter_count_for_title(self, i_title)
def set_title(self, i_title):
'''Set movie title.
@param i_title: title number to play.
'''
return libvlc_media_player_set_title(self, i_title)
def get_title(self):
'''Get movie title.
@return: title number currently playing, or -1.
'''
return libvlc_media_player_get_title(self)
def get_title_count(self):
'''Get movie title count.
@return: title number count, or -1.
'''
return libvlc_media_player_get_title_count(self)
def previous_chapter(self):
'''Set previous chapter (if applicable).
'''
return libvlc_media_player_previous_chapter(self)
def next_chapter(self):
'''Set next chapter (if applicable).
'''
return libvlc_media_player_next_chapter(self)
def get_rate(self):
'''Get the requested movie play rate.
@warning: Depending on the underlying media, the requested rate may be
different from the real playback rate.
@return: movie play rate.
'''
return libvlc_media_player_get_rate(self)
def set_rate(self, rate):
'''Set movie play rate.
@param rate: movie play rate to set.
@return: -1 if an error was detected, 0 otherwise (but even then, it might not actually work depending on the underlying media protocol).
'''
return libvlc_media_player_set_rate(self, rate)
def get_state(self):
'''Get current movie state.
@return: the current state of the media player (playing, paused, ...) See libvlc_state_t.
'''
return libvlc_media_player_get_state(self)
def has_vout(self):
'''How many video outputs does this media player have?
@return: the number of video outputs.
'''
return libvlc_media_player_has_vout(self)
def is_seekable(self):
'''Is this media player seekable?
@return: true if the media player can seek \libvlc_return_bool.
'''
return libvlc_media_player_is_seekable(self)
def can_pause(self):
'''Can this media player be paused?
@return: true if the media player can pause \libvlc_return_bool.
'''
return libvlc_media_player_can_pause(self)
def program_scrambled(self):
'''Check if the current program is scrambled.
@return: true if the current program is scrambled \libvlc_return_bool.
@version: LibVLC 2.2.0 or later.
'''
return libvlc_media_player_program_scrambled(self)
def next_frame(self):
'''Display the next frame (if supported).
'''
return libvlc_media_player_next_frame(self)
def navigate(self, navigate):
'''Navigate through DVD Menu.
@param navigate: the Navigation mode.
@version: libVLC 2.0.0 or later.
'''
return libvlc_media_player_navigate(self, navigate)
def set_video_title_display(self, position, timeout):
'''Set if, and how, the video title will be shown when media is played.
@param position: position at which to display the title, or libvlc_position_disable to prevent the title from being displayed.
@param timeout: title display timeout in milliseconds (ignored if libvlc_position_disable).
@version: libVLC 2.1.0 or later.
'''
return libvlc_media_player_set_video_title_display(self, position, timeout)
def add_slave(self, i_type, psz_uri, b_select):
'''Add a slave to the current media player.
@note: If the player is playing, the slave will be added directly. This call
will also update the slave list of the attached L{Media}.
@param i_type: subtitle or audio.
@param psz_uri: Uri of the slave (should contain a valid scheme).
@param b_select: True if this slave should be selected when it's loaded.
@return: 0 on success, -1 on error.
@version: LibVLC 3.0.0 and later. See L{media_slaves_add}.
'''
return libvlc_media_player_add_slave(self, i_type, str_to_bytes(psz_uri), b_select)
def toggle_fullscreen(self):
'''Toggle fullscreen status on non-embedded video outputs.
@warning: The same limitations applies to this function
as to L{set_fullscreen}().
'''
return libvlc_toggle_fullscreen(self)
def set_fullscreen(self, b_fullscreen):
'''Enable or disable fullscreen.
@warning: With most window managers, only a top-level windows can be in
full-screen mode. Hence, this function will not operate properly if
L{set_xwindow}() was used to embed the video in a
non-top-level window. In that case, the embedding window must be reparented
to the root window B{before} fullscreen mode is enabled. You will want
to reparent it back to its normal parent when disabling fullscreen.
@param b_fullscreen: boolean for fullscreen status.
'''
return libvlc_set_fullscreen(self, b_fullscreen)
def get_fullscreen(self):
'''Get current fullscreen status.
@return: the fullscreen status (boolean) \libvlc_return_bool.
'''
return libvlc_get_fullscreen(self)
def video_set_key_input(self, on):
'''Enable or disable key press events handling, according to the LibVLC hotkeys
configuration. By default and for historical reasons, keyboard events are
handled by the LibVLC video widget.
@note: On X11, there can be only one subscriber for key press and mouse
click events per window. If your application has subscribed to those events
for the X window ID of the video widget, then LibVLC will not be able to
handle key presses and mouse clicks in any case.
@warning: This function is only implemented for X11 and Win32 at the moment.
@param on: true to handle key press events, false to ignore them.
'''
return libvlc_video_set_key_input(self, on)
def video_set_mouse_input(self, on):
'''Enable or disable mouse click events handling. By default, those events are
handled. This is needed for DVD menus to work, as well as a few video
filters such as "puzzle".
See L{video_set_key_input}().
@warning: This function is only implemented for X11 and Win32 at the moment.
@param on: true to handle mouse click events, false to ignore them.
'''
return libvlc_video_set_mouse_input(self, on)
def video_get_scale(self):
'''Get the current video scaling factor.
See also L{video_set_scale}().
@return: the currently configured zoom factor, or 0. if the video is set to fit to the output window/drawable automatically.
'''
return libvlc_video_get_scale(self)
def video_set_scale(self, f_factor):
'''Set the video scaling factor. That is the ratio of the number of pixels on
screen to the number of pixels in the original decoded video in each
dimension. Zero is a special value; it will adjust the video to the output
window/drawable (in windowed mode) or the entire screen.
Note that not all video outputs support scaling.
@param f_factor: the scaling factor, or zero.
'''
return libvlc_video_set_scale(self, f_factor)
def video_get_aspect_ratio(self):
'''Get current video aspect ratio.
@return: the video aspect ratio or None if unspecified (the result must be released with free() or L{free}()).
'''
return libvlc_video_get_aspect_ratio(self)
def video_set_aspect_ratio(self, psz_aspect):
'''Set new video aspect ratio.
@param psz_aspect: new video aspect-ratio or None to reset to default @note Invalid aspect ratios are ignored.
'''
return libvlc_video_set_aspect_ratio(self, str_to_bytes(psz_aspect))
def video_get_spu(self):
'''Get current video subtitle.
@return: the video subtitle selected, or -1 if none.
'''
return libvlc_video_get_spu(self)
def video_get_spu_count(self):
'''Get the number of available video subtitles.
@return: the number of available video subtitles.
'''
return libvlc_video_get_spu_count(self)
def video_set_spu(self, i_spu):
'''Set new video subtitle.
@param i_spu: video subtitle track to select (i_id from track description).
@return: 0 on success, -1 if out of range.
'''
return libvlc_video_set_spu(self, i_spu)
def video_get_spu_delay(self):
'''Get the current subtitle delay. Positive values means subtitles are being
displayed later, negative values earlier.
@return: time (in microseconds) the display of subtitles is being delayed.
@version: LibVLC 2.0.0 or later.
'''
return libvlc_video_get_spu_delay(self)
def video_set_spu_delay(self, i_delay):
'''Set the subtitle delay. This affects the timing of when the subtitle will
be displayed. Positive values result in subtitles being displayed later,
while negative values will result in subtitles being displayed earlier.
The subtitle delay will be reset to zero each time the media changes.
@param i_delay: time (in microseconds) the display of subtitles should be delayed.
@return: 0 on success, -1 on error.
@version: LibVLC 2.0.0 or later.
'''
return libvlc_video_set_spu_delay(self, i_delay)
def video_get_crop_geometry(self):
'''Get current crop filter geometry.
@return: the crop filter geometry or None if unset.
'''
return libvlc_video_get_crop_geometry(self)
def video_set_crop_geometry(self, psz_geometry):
'''Set new crop filter geometry.
@param psz_geometry: new crop filter geometry (None to unset).
'''
return libvlc_video_set_crop_geometry(self, str_to_bytes(psz_geometry))
def video_get_teletext(self):
'''Get current teletext page requested.
@return: the current teletext page requested.
'''
return libvlc_video_get_teletext(self)
def video_set_teletext(self, i_page):
'''Set new teletext page to retrieve.
@param i_page: teletex page number requested.
'''
return libvlc_video_set_teletext(self, i_page)
def toggle_teletext(self):
'''Toggle teletext transparent status on video output.
'''
return libvlc_toggle_teletext(self)
def video_get_track_count(self):
'''Get number of available video tracks.
@return: the number of available video tracks (int).
'''
return libvlc_video_get_track_count(self)
def video_get_track(self):
'''Get current video track.
@return: the video track ID (int) or -1 if no active input.
'''
return libvlc_video_get_track(self)
def video_set_track(self, i_track):
'''Set video track.
@param i_track: the track ID (i_id field from track description).
@return: 0 on success, -1 if out of range.
'''
return libvlc_video_set_track(self, i_track)
def video_take_snapshot(self, num, psz_filepath, i_width, i_height):
'''Take a snapshot of the current video window.
If i_width AND i_height is 0, original size is used.
If i_width XOR i_height is 0, original aspect-ratio is preserved.
@param num: number of video output (typically 0 for the first/only one).
@param psz_filepath: the path where to save the screenshot to.
@param i_width: the snapshot's width.
@param i_height: the snapshot's height.
@return: 0 on success, -1 if the video was not found.
'''
return libvlc_video_take_snapshot(self, num, str_to_bytes(psz_filepath), i_width, i_height)
def video_set_deinterlace(self, psz_mode):
'''Enable or disable deinterlace filter.
@param psz_mode: type of deinterlace filter, None to disable.
'''
return libvlc_video_set_deinterlace(self, str_to_bytes(psz_mode))
def video_get_marquee_int(self, option):
'''Get an integer marquee option value.
@param option: marq option to get See libvlc_video_marquee_int_option_t.
'''
return libvlc_video_get_marquee_int(self, option)
def video_get_marquee_string(self, option):
'''Get a string marquee option value.
@param option: marq option to get See libvlc_video_marquee_string_option_t.
'''
return libvlc_video_get_marquee_string(self, option)
def video_set_marquee_int(self, option, i_val):
'''Enable, disable or set an integer marquee option
Setting libvlc_marquee_Enable has the side effect of enabling (arg !0)
or disabling (arg 0) the marq filter.
@param option: marq option to set See libvlc_video_marquee_int_option_t.
@param i_val: marq option value.
'''
return libvlc_video_set_marquee_int(self, option, i_val)
def video_set_marquee_string(self, option, psz_text):
'''Set a marquee string option.
@param option: marq option to set See libvlc_video_marquee_string_option_t.
@param psz_text: marq option value.
'''
return libvlc_video_set_marquee_string(self, option, str_to_bytes(psz_text))
def video_get_logo_int(self, option):
'''Get integer logo option.
@param option: logo option to get, values of libvlc_video_logo_option_t.
'''
return libvlc_video_get_logo_int(self, option)
def video_set_logo_int(self, option, value):
'''Set logo option as integer. Options that take a different type value
are ignored.
Passing libvlc_logo_enable as option value has the side effect of
starting (arg !0) or stopping (arg 0) the logo filter.
@param option: logo option to set, values of libvlc_video_logo_option_t.
@param value: logo option value.
'''
return libvlc_video_set_logo_int(self, option, value)
def video_set_logo_string(self, option, psz_value):
'''Set logo option as string. Options that take a different type value
are ignored.
@param option: logo option to set, values of libvlc_video_logo_option_t.
@param psz_value: logo option value.
'''
return libvlc_video_set_logo_string(self, option, str_to_bytes(psz_value))
def video_get_adjust_int(self, option):
'''Get integer adjust option.
@param option: adjust option to get, values of libvlc_video_adjust_option_t.
@version: LibVLC 1.1.1 and later.
'''
return libvlc_video_get_adjust_int(self, option)
def video_set_adjust_int(self, option, value):
'''Set adjust option as integer. Options that take a different type value
are ignored.
Passing libvlc_adjust_enable as option value has the side effect of
starting (arg !0) or stopping (arg 0) the adjust filter.
@param option: adust option to set, values of libvlc_video_adjust_option_t.
@param value: adjust option value.
@version: LibVLC 1.1.1 and later.
'''
return libvlc_video_set_adjust_int(self, option, value)
def video_get_adjust_float(self, option):
'''Get float adjust option.
@param option: adjust option to get, values of libvlc_video_adjust_option_t.
@version: LibVLC 1.1.1 and later.
'''
return libvlc_video_get_adjust_float(self, option)
def video_set_adjust_float(self, option, value):
'''Set adjust option as float. Options that take a different type value
are ignored.
@param option: adust option to set, values of libvlc_video_adjust_option_t.
@param value: adjust option value.
@version: LibVLC 1.1.1 and later.
'''
return libvlc_video_set_adjust_float(self, option, value)
def audio_output_set(self, psz_name):
'''Selects an audio output module.
@note: Any change will take be effect only after playback is stopped and
restarted. Audio output cannot be changed while playing.
@param psz_name: name of audio output, use psz_name of See L{AudioOutput}.
@return: 0 if function succeeded, -1 on error.
'''
return libvlc_audio_output_set(self, str_to_bytes(psz_name))
def audio_output_device_enum(self):
'''Gets a list of potential audio output devices,
See L{audio_output_device_set}().
@note: Not all audio outputs support enumerating devices.
The audio output may be functional even if the list is empty (None).
@note: The list may not be exhaustive.
@warning: Some audio output devices in the list might not actually work in
some circumstances. By default, it is recommended to not specify any
explicit audio device.
@return: A None-terminated linked list of potential audio output devices. It must be freed with L{audio_output_device_list_release}().
@version: LibVLC 2.2.0 or later.
'''
return libvlc_audio_output_device_enum(self)
def audio_output_device_set(self, module, device_id):
'''Configures an explicit audio output device.
If the module paramater is None, audio output will be moved to the device
specified by the device identifier string immediately. This is the
recommended usage.
A list of adequate potential device strings can be obtained with
L{audio_output_device_enum}().
However passing None is supported in LibVLC version 2.2.0 and later only;
in earlier versions, this function would have no effects when the module
parameter was None.
If the module parameter is not None, the device parameter of the
corresponding audio output, if it exists, will be set to the specified
string. Note that some audio output modules do not have such a parameter
(notably MMDevice and PulseAudio).
A list of adequate potential device strings can be obtained with
L{audio_output_device_list_get}().
@note: This function does not select the specified audio output plugin.
L{audio_output_set}() is used for that purpose.
@warning: The syntax for the device parameter depends on the audio output.
Some audio output modules require further parameters (e.g. a channels map
in the case of ALSA).
@param module: If None, current audio output module. if non-None, name of audio output module.
@param device_id: device identifier string.
@return: Nothing. Errors are ignored (this is a design bug).
'''
return libvlc_audio_output_device_set(self, str_to_bytes(module), str_to_bytes(device_id))
def audio_output_device_get(self):
'''Get the current audio output device identifier.
This complements L{audio_output_device_set}().
@warning: The initial value for the current audio output device identifier
may not be set or may be some unknown value. A LibVLC application should
compare this value against the known device identifiers (e.g. those that
were previously retrieved by a call to L{audio_output_device_enum} or
L{audio_output_device_list_get}) to find the current audio output device.
It is possible that the selected audio output device changes (an external
change) without a call to L{audio_output_device_set}. That may make this
method unsuitable to use if a LibVLC application is attempting to track
dynamic audio device changes as they happen.
@return: the current audio output device identifier None if no device is selected or in case of error (the result must be released with free() or L{free}()).
@version: LibVLC 3.0.0 or later.
'''
return libvlc_audio_output_device_get(self)
def audio_toggle_mute(self):
'''Toggle mute status.
'''
return libvlc_audio_toggle_mute(self)
def audio_get_mute(self):
'''Get current mute status.
@return: the mute status (boolean) if defined, -1 if undefined/unapplicable.
'''
return libvlc_audio_get_mute(self)
def audio_set_mute(self, status):
'''Set mute status.
@param status: If status is true then mute, otherwise unmute @warning This function does not always work. If there are no active audio playback stream, the mute status might not be available. If digital pass-through (S/PDIF, HDMI...) is in use, muting may be unapplicable. Also some audio output plugins do not support muting at all. @note To force silent playback, disable all audio tracks. This is more efficient and reliable than mute.
'''
return libvlc_audio_set_mute(self, status)
def audio_get_volume(self):
'''Get current software audio volume.
@return: the software volume in percents (0 = mute, 100 = nominal / 0dB).
'''
return libvlc_audio_get_volume(self)
def audio_set_volume(self, i_volume):
'''Set current software audio volume.
@param i_volume: the volume in percents (0 = mute, 100 = 0dB).
@return: 0 if the volume was set, -1 if it was out of range.
'''
return libvlc_audio_set_volume(self, i_volume)
def audio_get_track_count(self):
'''Get number of available audio tracks.
@return: the number of available audio tracks (int), or -1 if unavailable.
'''
return libvlc_audio_get_track_count(self)
def audio_get_track(self):
'''Get current audio track.
@return: the audio track ID or -1 if no active input.
'''
return libvlc_audio_get_track(self)
def audio_set_track(self, i_track):
'''Set current audio track.
@param i_track: the track ID (i_id field from track description).
@return: 0 on success, -1 on error.
'''
return libvlc_audio_set_track(self, i_track)
def audio_get_channel(self):
'''Get current audio channel.
@return: the audio channel See libvlc_audio_output_channel_t.
'''
return libvlc_audio_get_channel(self)
def audio_set_channel(self, channel):
'''Set current audio channel.
@param channel: the audio channel, See libvlc_audio_output_channel_t.
@return: 0 on success, -1 on error.
'''
return libvlc_audio_set_channel(self, channel)
def audio_get_delay(self):
'''Get current audio delay.
@return: the audio delay (microseconds).
@version: LibVLC 1.1.1 or later.
'''
return libvlc_audio_get_delay(self)
def audio_set_delay(self, i_delay):
'''Set current audio delay. The audio delay will be reset to zero each time the media changes.
@param i_delay: the audio delay (microseconds).
@return: 0 on success, -1 on error.
@version: LibVLC 1.1.1 or later.
'''
return libvlc_audio_set_delay(self, i_delay)
def set_equalizer(self, p_equalizer):
'''Apply new equalizer settings to a media player.
The equalizer is first created by invoking L{audio_equalizer_new}() or
L{audio_equalizer_new_from_preset}().
It is possible to apply new equalizer settings to a media player whether the media
player is currently playing media or not.
Invoking this method will immediately apply the new equalizer settings to the audio
output of the currently playing media if there is any.
If there is no currently playing media, the new equalizer settings will be applied
later if and when new media is played.
Equalizer settings will automatically be applied to subsequently played media.
To disable the equalizer for a media player invoke this method passing None for the
p_equalizer parameter.
The media player does not keep a reference to the supplied equalizer so it is safe
for an application to release the equalizer reference any time after this method
returns.
@param p_equalizer: opaque equalizer handle, or None to disable the equalizer for this media player.
@return: zero on success, -1 on error.
@version: LibVLC 2.2.0 or later.
'''
return libvlc_media_player_set_equalizer(self, p_equalizer)
def get_role(self):
'''Gets the media role.
@return: the media player role (\ref libvlc_media_player_role_t).
@version: LibVLC 3.0.0 and later.
'''
return libvlc_media_player_get_role(self)
def set_role(self, role):
'''Sets the media role.
@param role: the media player role (\ref libvlc_media_player_role_t).
@return: 0 on success, -1 on error.
'''
return libvlc_media_player_set_role(self, role)
# LibVLC __version__ functions #
def libvlc_media_player_get_fps(p_mi):
'''Get movie fps rate
This function is provided for backward compatibility. It cannot deal with
multiple video tracks. In LibVLC versions prior to 3.0, it would also fail
if the file format did not convey the frame rate explicitly.
\deprecated Consider using L{libvlc_media_tracks_get}() instead.
@param p_mi: the Media Player.
@return: frames per second (fps) for this playing movie, or 0 if unspecified.
'''
f = _Cfunctions.get('libvlc_media_player_get_fps', None) or \
_Cfunction('libvlc_media_player_get_fps', ((1,),), None,
ctypes.c_float, MediaPlayer)
return f(p_mi)
def libvlc_media_player_set_agl(p_mi, drawable):
'''\deprecated Use L{libvlc_media_player_set_nsobject}() instead.
'''
f = _Cfunctions.get('libvlc_media_player_set_agl', None) or \
_Cfunction('libvlc_media_player_set_agl', ((1,), (1,),), None,
None, MediaPlayer, ctypes.c_uint32)
return f(p_mi, drawable)
def libvlc_media_player_get_agl(p_mi):
'''\deprecated Use L{libvlc_media_player_get_nsobject}() instead.
'''
f = _Cfunctions.get('libvlc_media_player_get_agl', None) or \
_Cfunction('libvlc_media_player_get_agl', ((1,),), None,
ctypes.c_uint32, MediaPlayer)
return f(p_mi)
def libvlc_errmsg():
'''A human-readable error message for the last LibVLC error in the calling
thread. The resulting string is valid until another error occurs (at least
until the next LibVLC call).
@warning
This will be None if there was no error.
'''
f = _Cfunctions.get('libvlc_errmsg', None) or \
_Cfunction('libvlc_errmsg', (), None,
ctypes.c_char_p)
return f()
def libvlc_clearerr():
'''Clears the LibVLC error status for the current thread. This is optional.
By default, the error status is automatically overridden when a new error
occurs, and destroyed when the thread exits.
'''
f = _Cfunctions.get('libvlc_clearerr', None) or \
_Cfunction('libvlc_clearerr', (), None,
None)
return f()
def libvlc_vprinterr(fmt, ap):
'''Sets the LibVLC error status and message for the current thread.
Any previous error is overridden.
@param fmt: the format string.
@param ap: the arguments.
@return: a nul terminated string in any case.
'''
f = _Cfunctions.get('libvlc_vprinterr', None) or \
_Cfunction('libvlc_vprinterr', ((1,), (1,),), None,
ctypes.c_char_p, ctypes.c_char_p, ctypes.c_void_p)
return f(fmt, ap)
def libvlc_new(argc, argv):
'''Create and initialize a libvlc instance.
This functions accept a list of "command line" arguments similar to the
main(). These arguments affect the LibVLC instance default configuration.
@note
LibVLC may create threads. Therefore, any thread-unsafe process
initialization must be performed before calling L{libvlc_new}(). In particular
and where applicable:
- setlocale() and textdomain(),
- setenv(), unsetenv() and putenv(),
- with the X11 display system, XInitThreads()
(see also L{libvlc_media_player_set_xwindow}()) and
- on Microsoft Windows, SetErrorMode().
- sigprocmask() shall never be invoked; pthread_sigmask() can be used.
On POSIX systems, the SIGCHLD signal must B{not} be ignored, i.e. the
signal handler must set to SIG_DFL or a function pointer, not SIG_IGN.
Also while LibVLC is active, the wait() function shall not be called, and
any call to waitpid() shall use a strictly positive value for the first
parameter (i.e. the PID). Failure to follow those rules may lead to a
deadlock or a busy loop.
Also on POSIX systems, it is recommended that the SIGPIPE signal be blocked,
even if it is not, in principles, necessary.
On Microsoft Windows Vista/2008, the process error mode
SEM_FAILCRITICALERRORS flag B{must} be set with the SetErrorMode()
function before using LibVLC. On later versions, it is optional and
unnecessary.
@param argc: the number of arguments (should be 0).
@param argv: list of arguments (should be None).
@return: the libvlc instance or None in case of error.
@version Arguments are meant to be passed from the command line to LibVLC, just like VLC media player does. The list of valid arguments depends on the LibVLC version, the operating system and platform, and set of available LibVLC plugins. Invalid or unsupported arguments will cause the function to fail (i.e. return None). Also, some arguments may alter the behaviour or otherwise interfere with other LibVLC functions. @warning There is absolutely no warranty or promise of forward, backward and cross-platform compatibility with regards to L{libvlc_new}() arguments. We recommend that you do not use them, other than when debugging.
'''
f = _Cfunctions.get('libvlc_new', None) or \
_Cfunction('libvlc_new', ((1,), (1,),), class_result(Instance),
ctypes.c_void_p, ctypes.c_int, ListPOINTER(ctypes.c_char_p))
return f(argc, argv)
def libvlc_release(p_instance):
'''Decrement the reference count of a libvlc instance, and destroy it
if it reaches zero.
@param p_instance: the instance to destroy.
'''
f = _Cfunctions.get('libvlc_release', None) or \
_Cfunction('libvlc_release', ((1,),), None,
None, Instance)
return f(p_instance)
def libvlc_retain(p_instance):
'''Increments the reference count of a libvlc instance.
The initial reference count is 1 after L{libvlc_new}() returns.
@param p_instance: the instance to reference.
'''
f = _Cfunctions.get('libvlc_retain', None) or \
_Cfunction('libvlc_retain', ((1,),), None,
None, Instance)
return f(p_instance)
def libvlc_add_intf(p_instance, name):
'''Try to start a user interface for the libvlc instance.
@param p_instance: the instance.
@param name: interface name, or None for default.
@return: 0 on success, -1 on error.
'''
f = _Cfunctions.get('libvlc_add_intf', None) or \
_Cfunction('libvlc_add_intf', ((1,), (1,),), None,
ctypes.c_int, Instance, ctypes.c_char_p)
return f(p_instance, name)
def libvlc_set_user_agent(p_instance, name, http):
'''Sets the application name. LibVLC passes this as the user agent string
when a protocol requires it.
@param p_instance: LibVLC instance.
@param name: human-readable application name, e.g. "FooBar player 1.2.3".
@param http: HTTP User Agent, e.g. "FooBar/1.2.3 Python/2.6.0".
@version: LibVLC 1.1.1 or later.
'''
f = _Cfunctions.get('libvlc_set_user_agent', None) or \
_Cfunction('libvlc_set_user_agent', ((1,), (1,), (1,),), None,
None, Instance, ctypes.c_char_p, ctypes.c_char_p)
return f(p_instance, name, http)
def libvlc_set_app_id(p_instance, id, version, icon):
'''Sets some meta-information about the application.
See also L{libvlc_set_user_agent}().
@param p_instance: LibVLC instance.
@param id: Java-style application identifier, e.g. "com.acme.foobar".
@param version: application version numbers, e.g. "1.2.3".
@param icon: application icon name, e.g. "foobar".
@version: LibVLC 2.1.0 or later.
'''
f = _Cfunctions.get('libvlc_set_app_id', None) or \
_Cfunction('libvlc_set_app_id', ((1,), (1,), (1,), (1,),), None,
None, Instance, ctypes.c_char_p, ctypes.c_char_p, ctypes.c_char_p)
return f(p_instance, id, version, icon)
def libvlc_get_version():
'''Retrieve libvlc version.
Example: "1.1.0-git The Luggage".
@return: a string containing the libvlc version.
'''
f = _Cfunctions.get('libvlc_get_version', None) or \
_Cfunction('libvlc_get_version', (), None,
ctypes.c_char_p)
return f()
def libvlc_get_compiler():
'''Retrieve libvlc compiler version.
Example: "gcc version 4.2.3 (Ubuntu 4.2.3-2ubuntu6)".
@return: a string containing the libvlc compiler version.
'''
f = _Cfunctions.get('libvlc_get_compiler', None) or \
_Cfunction('libvlc_get_compiler', (), None,
ctypes.c_char_p)
return f()
def libvlc_get_changeset():
'''Retrieve libvlc changeset.
Example: "aa9bce0bc4".
@return: a string containing the libvlc changeset.
'''
f = _Cfunctions.get('libvlc_get_changeset', None) or \
_Cfunction('libvlc_get_changeset', (), None,
ctypes.c_char_p)
return f()
def libvlc_free(ptr):
'''Frees an heap allocation returned by a LibVLC function.
If you know you're using the same underlying C run-time as the LibVLC
implementation, then you can call ANSI C free() directly instead.
@param ptr: the pointer.
'''
f = _Cfunctions.get('libvlc_free', None) or \
_Cfunction('libvlc_free', ((1,),), None,
None, ctypes.c_void_p)
return f(ptr)
def libvlc_event_attach(p_event_manager, i_event_type, f_callback, user_data):
'''Register for an event notification.
@param p_event_manager: the event manager to which you want to attach to. Generally it is obtained by vlc_my_object_event_manager() where my_object is the object you want to listen to.
@param i_event_type: the desired event to which we want to listen.
@param f_callback: the function to call when i_event_type occurs.
@param user_data: user provided data to carry with the event.
@return: 0 on success, ENOMEM on error.
'''
f = _Cfunctions.get('libvlc_event_attach', None) or \
_Cfunction('libvlc_event_attach', ((1,), (1,), (1,), (1,),), None,
ctypes.c_int, EventManager, ctypes.c_uint, Callback, ctypes.c_void_p)
return f(p_event_manager, i_event_type, f_callback, user_data)
def libvlc_event_detach(p_event_manager, i_event_type, f_callback, p_user_data):
'''Unregister an event notification.
@param p_event_manager: the event manager.
@param i_event_type: the desired event to which we want to unregister.
@param f_callback: the function to call when i_event_type occurs.
@param p_user_data: user provided data to carry with the event.
'''
f = _Cfunctions.get('libvlc_event_detach', None) or \
_Cfunction('libvlc_event_detach', ((1,), (1,), (1,), (1,),), None,
None, EventManager, ctypes.c_uint, Callback, ctypes.c_void_p)
return f(p_event_manager, i_event_type, f_callback, p_user_data)
def libvlc_event_type_name(event_type):
'''Get an event's type name.
@param event_type: the desired event.
'''
f = _Cfunctions.get('libvlc_event_type_name', None) or \
_Cfunction('libvlc_event_type_name', ((1,),), None,
ctypes.c_char_p, ctypes.c_uint)
return f(event_type)
def libvlc_log_get_context(ctx):
'''Gets debugging information about a log message: the name of the VLC module
emitting the message and the message location within the source code.
The returned module name and file name will be None if unknown.
The returned line number will similarly be zero if unknown.
@param ctx: message context (as passed to the @ref libvlc_log_cb callback).
@return: module module name storage (or None), file source code file name storage (or None), line source code file line number storage (or None).
@version: LibVLC 2.1.0 or later.
'''
f = _Cfunctions.get('libvlc_log_get_context', None) or \
_Cfunction('libvlc_log_get_context', ((1,), (2,), (2,), (2,),), None,
None, Log_ptr, ListPOINTER(ctypes.c_char_p), ListPOINTER(ctypes.c_char_p), ctypes.POINTER(ctypes.c_uint))
return f(ctx)
def libvlc_log_get_object(ctx, id):
'''Gets VLC object information about a log message: the type name of the VLC
object emitting the message, the object header if any and a temporaly-unique
object identifier. This information is mainly meant for B{manual}
troubleshooting.
The returned type name may be "generic" if unknown, but it cannot be None.
The returned header will be None if unset; in current versions, the header
is used to distinguish for VLM inputs.
The returned object ID will be zero if the message is not associated with
any VLC object.
@param ctx: message context (as passed to the @ref libvlc_log_cb callback).
@return: name object name storage (or None), header object header (or None), line source code file line number storage (or None).
@version: LibVLC 2.1.0 or later.
'''
f = _Cfunctions.get('libvlc_log_get_object', None) or \
_Cfunction('libvlc_log_get_object', ((1,), (2,), (2,), (1,),), None,
None, Log_ptr, ListPOINTER(ctypes.c_char_p), ListPOINTER(ctypes.c_char_p), ctypes.POINTER(ctypes.c_uint))
return f(ctx, id)
def libvlc_log_unset(p_instance):
'''Unsets the logging callback for a LibVLC instance. This is rarely needed:
the callback is implicitly unset when the instance is destroyed.
This function will wait for any pending callbacks invocation to complete
(causing a deadlock if called from within the callback).
@param p_instance: libvlc instance.
@version: LibVLC 2.1.0 or later.
'''
f = _Cfunctions.get('libvlc_log_unset', None) or \
_Cfunction('libvlc_log_unset', ((1,),), None,
None, Instance)
return f(p_instance)
def libvlc_log_set(cb, data, p_instance):
'''Sets the logging callback for a LibVLC instance.
This function is thread-safe: it will wait for any pending callbacks
invocation to complete.
@param cb: callback function pointer.
@param data: opaque data pointer for the callback function @note Some log messages (especially debug) are emitted by LibVLC while is being initialized. These messages cannot be captured with this interface. @warning A deadlock may occur if this function is called from the callback.
@param p_instance: libvlc instance.
@version: LibVLC 2.1.0 or later.
'''
f = _Cfunctions.get('libvlc_log_set', None) or \
_Cfunction('libvlc_log_set', ((1,), (1,), (1,),), None,
None, Instance, LogCb, ctypes.c_void_p)
return f(cb, data, p_instance)
def libvlc_log_set_file(p_instance, stream):
'''Sets up logging to a file.
@param p_instance: libvlc instance.
@param stream: FILE pointer opened for writing (the FILE pointer must remain valid until L{libvlc_log_unset}()).
@version: LibVLC 2.1.0 or later.
'''
f = _Cfunctions.get('libvlc_log_set_file', None) or \
_Cfunction('libvlc_log_set_file', ((1,), (1,),), None,
None, Instance, FILE_ptr)
return f(p_instance, stream)
def libvlc_module_description_list_release(p_list):
'''Release a list of module descriptions.
@param p_list: the list to be released.
'''
f = _Cfunctions.get('libvlc_module_description_list_release', None) or \
_Cfunction('libvlc_module_description_list_release', ((1,),), None,
None, ctypes.POINTER(ModuleDescription))
return f(p_list)
def libvlc_audio_filter_list_get(p_instance):
'''Returns a list of audio filters that are available.
@param p_instance: libvlc instance.
@return: a list of module descriptions. It should be freed with L{libvlc_module_description_list_release}(). In case of an error, None is returned. See L{ModuleDescription} See L{libvlc_module_description_list_release}.
'''
f = _Cfunctions.get('libvlc_audio_filter_list_get', None) or \
_Cfunction('libvlc_audio_filter_list_get', ((1,),), None,
ctypes.POINTER(ModuleDescription), Instance)
return f(p_instance)
def libvlc_video_filter_list_get(p_instance):
'''Returns a list of video filters that are available.
@param p_instance: libvlc instance.
@return: a list of module descriptions. It should be freed with L{libvlc_module_description_list_release}(). In case of an error, None is returned. See L{ModuleDescription} See L{libvlc_module_description_list_release}.
'''
f = _Cfunctions.get('libvlc_video_filter_list_get', None) or \
_Cfunction('libvlc_video_filter_list_get', ((1,),), None,
ctypes.POINTER(ModuleDescription), Instance)
return f(p_instance)
def libvlc_clock():
'''Return the current time as defined by LibVLC. The unit is the microsecond.
Time increases monotonically (regardless of time zone changes and RTC
adjustements).
The origin is arbitrary but consistent across the whole system
(e.g. the system uptim, the time since the system was booted).
@note: On systems that support it, the POSIX monotonic clock is used.
'''
f = _Cfunctions.get('libvlc_clock', None) or \
_Cfunction('libvlc_clock', (), None,
ctypes.c_int64)
return f()
def libvlc_dialog_set_context(p_id, p_context):
'''Associate an opaque pointer with the dialog id.
@version: LibVLC 3.0.0 and later.
'''
f = _Cfunctions.get('libvlc_dialog_set_context', None) or \
_Cfunction('libvlc_dialog_set_context', ((1,), (1,),), None,
None, ctypes.c_void_p, ctypes.c_void_p)
return f(p_id, p_context)
def libvlc_dialog_get_context(p_id):
'''Return the opaque pointer associated with the dialog id.
@version: LibVLC 3.0.0 and later.
'''
f = _Cfunctions.get('libvlc_dialog_get_context', None) or \
_Cfunction('libvlc_dialog_get_context', ((1,),), None,
ctypes.c_void_p, ctypes.c_void_p)
return f(p_id)
def libvlc_dialog_post_login(p_id, psz_username, psz_password, b_store):
'''Post a login answer
After this call, p_id won't be valid anymore
See libvlc_dialog_cbs.pf_display_login.
@param p_id: id of the dialog.
@param psz_username: valid and non empty string.
@param psz_password: valid string (can be empty).
@param b_store: if true, store the credentials.
@return: 0 on success, or -1 on error.
@version: LibVLC 3.0.0 and later.
'''
f = _Cfunctions.get('libvlc_dialog_post_login', None) or \
_Cfunction('libvlc_dialog_post_login', ((1,), (1,), (1,), (1,),), None,
ctypes.c_int, ctypes.c_void_p, ctypes.c_char_p, ctypes.c_char_p, ctypes.c_bool)
return f(p_id, psz_username, psz_password, b_store)
def libvlc_dialog_post_action(p_id, i_action):
'''Post a question answer
After this call, p_id won't be valid anymore
See libvlc_dialog_cbs.pf_display_question.
@param p_id: id of the dialog.
@param i_action: 1 for action1, 2 for action2.
@return: 0 on success, or -1 on error.
@version: LibVLC 3.0.0 and later.
'''
f = _Cfunctions.get('libvlc_dialog_post_action', None) or \
_Cfunction('libvlc_dialog_post_action', ((1,), (1,),), None,
ctypes.c_int, ctypes.c_void_p, ctypes.c_int)
return f(p_id, i_action)
def libvlc_dialog_dismiss(p_id):
'''Dismiss a dialog
After this call, p_id won't be valid anymore
See libvlc_dialog_cbs.pf_cancel.
@param p_id: id of the dialog.
@return: 0 on success, or -1 on error.
@version: LibVLC 3.0.0 and later.
'''
f = _Cfunctions.get('libvlc_dialog_dismiss', None) or \
_Cfunction('libvlc_dialog_dismiss', ((1,),), None,
ctypes.c_int, ctypes.c_void_p)
return f(p_id)
def libvlc_media_new_location(p_instance, psz_mrl):
'''Create a media with a certain given media resource location,
for instance a valid URL.
@note: To refer to a local file with this function,
the file://... URI syntax B{must} be used (see IETF RFC3986).
We recommend using L{libvlc_media_new_path}() instead when dealing with
local files.
See L{libvlc_media_release}.
@param p_instance: the instance.
@param psz_mrl: the media location.
@return: the newly created media or None on error.
'''
f = _Cfunctions.get('libvlc_media_new_location', None) or \
_Cfunction('libvlc_media_new_location', ((1,), (1,),), class_result(Media),
ctypes.c_void_p, Instance, ctypes.c_char_p)
return f(p_instance, psz_mrl)
def libvlc_media_new_path(p_instance, path):
'''Create a media for a certain file path.
See L{libvlc_media_release}.
@param p_instance: the instance.
@param path: local filesystem path.
@return: the newly created media or None on error.
'''
f = _Cfunctions.get('libvlc_media_new_path', None) or \
_Cfunction('libvlc_media_new_path', ((1,), (1,),), class_result(Media),
ctypes.c_void_p, Instance, ctypes.c_char_p)
return f(p_instance, path)
def libvlc_media_new_fd(p_instance, fd):
'''Create a media for an already open file descriptor.
The file descriptor shall be open for reading (or reading and writing).
Regular file descriptors, pipe read descriptors and character device
descriptors (including TTYs) are supported on all platforms.
Block device descriptors are supported where available.
Directory descriptors are supported on systems that provide fdopendir().
Sockets are supported on all platforms where they are file descriptors,
i.e. all except Windows.
@note: This library will B{not} automatically close the file descriptor
under any circumstance. Nevertheless, a file descriptor can usually only be
rendered once in a media player. To render it a second time, the file
descriptor should probably be rewound to the beginning with lseek().
See L{libvlc_media_release}.
@param p_instance: the instance.
@param fd: open file descriptor.
@return: the newly created media or None on error.
@version: LibVLC 1.1.5 and later.
'''
f = _Cfunctions.get('libvlc_media_new_fd', None) or \
_Cfunction('libvlc_media_new_fd', ((1,), (1,),), class_result(Media),
ctypes.c_void_p, Instance, ctypes.c_int)
return f(p_instance, fd)
def libvlc_media_new_callbacks(instance, open_cb, read_cb, seek_cb, close_cb, opaque):
'''Create a media with custom callbacks to read the data from.
@param instance: LibVLC instance.
@param open_cb: callback to open the custom bitstream input media.
@param read_cb: callback to read data (must not be None).
@param seek_cb: callback to seek, or None if seeking is not supported.
@param close_cb: callback to close the media, or None if unnecessary.
@param opaque: data pointer for the open callback.
@return: the newly created media or None on error @note If open_cb is None, the opaque pointer will be passed to read_cb, seek_cb and close_cb, and the stream size will be treated as unknown. @note The callbacks may be called asynchronously (from another thread). A single stream instance need not be reentrant. However the open_cb needs to be reentrant if the media is used by multiple player instances. @warning The callbacks may be used until all or any player instances that were supplied the media item are stopped. See L{libvlc_media_release}.
@version: LibVLC 3.0.0 and later.
'''
f = _Cfunctions.get('libvlc_media_new_callbacks', None) or \
_Cfunction('libvlc_media_new_callbacks', ((1,), (1,), (1,), (1,), (1,), (1,),), class_result(Media),
ctypes.c_void_p, Instance, MediaOpenCb, MediaReadCb, MediaSeekCb, MediaCloseCb, ctypes.c_void_p)
return f(instance, open_cb, read_cb, seek_cb, close_cb, opaque)
def libvlc_media_new_as_node(p_instance, psz_name):
'''Create a media as an empty node with a given name.
See L{libvlc_media_release}.
@param p_instance: the instance.
@param psz_name: the name of the node.
@return: the new empty media or None on error.
'''
f = _Cfunctions.get('libvlc_media_new_as_node', None) or \
_Cfunction('libvlc_media_new_as_node', ((1,), (1,),), class_result(Media),
ctypes.c_void_p, Instance, ctypes.c_char_p)
return f(p_instance, psz_name)
def libvlc_media_add_option(p_md, psz_options):
'''Add an option to the media.
This option will be used to determine how the media_player will
read the media. This allows to use VLC's advanced
reading/streaming options on a per-media basis.
@note: The options are listed in 'vlc --long-help' from the command line,
e.g. "-sout-all". Keep in mind that available options and their semantics
vary across LibVLC versions and builds.
@warning: Not all options affects L{Media} objects:
Specifically, due to architectural issues most audio and video options,
such as text renderer options, have no effects on an individual media.
These options must be set through L{libvlc_new}() instead.
@param p_md: the media descriptor.
@param psz_options: the options (as a string).
'''
f = _Cfunctions.get('libvlc_media_add_option', None) or \
_Cfunction('libvlc_media_add_option', ((1,), (1,),), None,
None, Media, ctypes.c_char_p)
return f(p_md, psz_options)
def libvlc_media_add_option_flag(p_md, psz_options, i_flags):
'''Add an option to the media with configurable flags.
This option will be used to determine how the media_player will
read the media. This allows to use VLC's advanced
reading/streaming options on a per-media basis.
The options are detailed in vlc --long-help, for instance
"--sout-all". Note that all options are not usable on medias:
specifically, due to architectural issues, video-related options
such as text renderer options cannot be set on a single media. They
must be set on the whole libvlc instance instead.
@param p_md: the media descriptor.
@param psz_options: the options (as a string).
@param i_flags: the flags for this option.
'''
f = _Cfunctions.get('libvlc_media_add_option_flag', None) or \
_Cfunction('libvlc_media_add_option_flag', ((1,), (1,), (1,),), None,
None, Media, ctypes.c_char_p, ctypes.c_uint)
return f(p_md, psz_options, i_flags)
def libvlc_media_retain(p_md):
'''Retain a reference to a media descriptor object (libvlc_media_t). Use
L{libvlc_media_release}() to decrement the reference count of a
media descriptor object.
@param p_md: the media descriptor.
'''
f = _Cfunctions.get('libvlc_media_retain', None) or \
_Cfunction('libvlc_media_retain', ((1,),), None,
None, Media)
return f(p_md)
def libvlc_media_release(p_md):
'''Decrement the reference count of a media descriptor object. If the
reference count is 0, then L{libvlc_media_release}() will release the
media descriptor object. It will send out an libvlc_MediaFreed event
to all listeners. If the media descriptor object has been released it
should not be used again.
@param p_md: the media descriptor.
'''
f = _Cfunctions.get('libvlc_media_release', None) or \
_Cfunction('libvlc_media_release', ((1,),), None,
None, Media)
return f(p_md)
def libvlc_media_get_mrl(p_md):
'''Get the media resource locator (mrl) from a media descriptor object.
@param p_md: a media descriptor object.
@return: string with mrl of media descriptor object.
'''
f = _Cfunctions.get('libvlc_media_get_mrl', None) or \
_Cfunction('libvlc_media_get_mrl', ((1,),), string_result,
ctypes.c_void_p, Media)
return f(p_md)
def libvlc_media_duplicate(p_md):
'''Duplicate a media descriptor object.
@param p_md: a media descriptor object.
'''
f = _Cfunctions.get('libvlc_media_duplicate', None) or \
_Cfunction('libvlc_media_duplicate', ((1,),), class_result(Media),
ctypes.c_void_p, Media)
return f(p_md)
def libvlc_media_get_meta(p_md, e_meta):
'''Read the meta of the media.
If the media has not yet been parsed this will return None.
See L{libvlc_media_parse}
See L{libvlc_media_parse_with_options}
See libvlc_MediaMetaChanged.
@param p_md: the media descriptor.
@param e_meta: the meta to read.
@return: the media's meta.
'''
f = _Cfunctions.get('libvlc_media_get_meta', None) or \
_Cfunction('libvlc_media_get_meta', ((1,), (1,),), string_result,
ctypes.c_void_p, Media, Meta)
return f(p_md, e_meta)
def libvlc_media_set_meta(p_md, e_meta, psz_value):
'''Set the meta of the media (this function will not save the meta, call
L{libvlc_media_save_meta} in order to save the meta).
@param p_md: the media descriptor.
@param e_meta: the meta to write.
@param psz_value: the media's meta.
'''
f = _Cfunctions.get('libvlc_media_set_meta', None) or \
_Cfunction('libvlc_media_set_meta', ((1,), (1,), (1,),), None,
None, Media, Meta, ctypes.c_char_p)
return f(p_md, e_meta, psz_value)
def libvlc_media_save_meta(p_md):
'''Save the meta previously set.
@param p_md: the media desriptor.
@return: true if the write operation was successful.
'''
f = _Cfunctions.get('libvlc_media_save_meta', None) or \
_Cfunction('libvlc_media_save_meta', ((1,),), None,
ctypes.c_int, Media)
return f(p_md)
def libvlc_media_get_state(p_md):
'''Get current state of media descriptor object. Possible media states are
libvlc_NothingSpecial=0, libvlc_Opening, libvlc_Playing, libvlc_Paused,
libvlc_Stopped, libvlc_Ended, libvlc_Error.
See libvlc_state_t.
@param p_md: a media descriptor object.
@return: state of media descriptor object.
'''
f = _Cfunctions.get('libvlc_media_get_state', None) or \
_Cfunction('libvlc_media_get_state', ((1,),), None,
State, Media)
return f(p_md)
def libvlc_media_get_stats(p_md, p_stats):
'''Get the current statistics about the media.
@param p_md:: media descriptor object.
@param p_stats:: structure that contain the statistics about the media (this structure must be allocated by the caller).
@return: true if the statistics are available, false otherwise \libvlc_return_bool.
'''
f = _Cfunctions.get('libvlc_media_get_stats', None) or \
_Cfunction('libvlc_media_get_stats', ((1,), (1,),), None,
ctypes.c_int, Media, ctypes.POINTER(MediaStats))
return f(p_md, p_stats)
def libvlc_media_subitems(p_md):
'''Get subitems of media descriptor object. This will increment
the reference count of supplied media descriptor object. Use
L{libvlc_media_list_release}() to decrement the reference counting.
@param p_md: media descriptor object.
@return: list of media descriptor subitems or None.
'''
f = _Cfunctions.get('libvlc_media_subitems', None) or \
_Cfunction('libvlc_media_subitems', ((1,),), class_result(MediaList),
ctypes.c_void_p, Media)
return f(p_md)
def libvlc_media_event_manager(p_md):
'''Get event manager from media descriptor object.
NOTE: this function doesn't increment reference counting.
@param p_md: a media descriptor object.
@return: event manager object.
'''
f = _Cfunctions.get('libvlc_media_event_manager', None) or \
_Cfunction('libvlc_media_event_manager', ((1,),), class_result(EventManager),
ctypes.c_void_p, Media)
return f(p_md)
def libvlc_media_get_duration(p_md):
'''Get duration (in ms) of media descriptor object item.
@param p_md: media descriptor object.
@return: duration of media item or -1 on error.
'''
f = _Cfunctions.get('libvlc_media_get_duration', None) or \
_Cfunction('libvlc_media_get_duration', ((1,),), None,
ctypes.c_longlong, Media)
return f(p_md)
def libvlc_media_parse(p_md):
'''Parse a media.
This fetches (local) art, meta data and tracks information.
The method is synchronous.
See L{libvlc_media_parse_with_options}
See L{libvlc_media_get_meta}
See libvlc_media_get_tracks_info.
@param p_md: media descriptor object.
'''
f = _Cfunctions.get('libvlc_media_parse', None) or \
_Cfunction('libvlc_media_parse', ((1,),), None,
None, Media)
return f(p_md)
def libvlc_media_parse_with_options(p_md, parse_flag, timeout):
'''Parse the media asynchronously with options.
This fetches (local or network) art, meta data and/or tracks information.
This method is the extended version of L{libvlc_media_parse_with_options}().
To track when this is over you can listen to libvlc_MediaParsedChanged
event. However if this functions returns an error, you will not receive any
events.
It uses a flag to specify parse options (see libvlc_media_parse_flag_t). All
these flags can be combined. By default, media is parsed if it's a local
file.
See libvlc_MediaParsedChanged
See L{libvlc_media_get_meta}
See L{libvlc_media_tracks_get}
See L{libvlc_media_get_parsed_status}
See libvlc_media_parse_flag_t.
@param p_md: media descriptor object.
@param parse_flag: parse options:
@param timeout: maximum time allowed to preparse the media. If -1, the default "preparse-timeout" option will be used as a timeout. If 0, it will wait indefinitely. If > 0, the timeout will be used (in milliseconds).
@return: -1 in case of error, 0 otherwise.
@version: LibVLC 3.0.0 or later.
'''
f = _Cfunctions.get('libvlc_media_parse_with_options', None) or \
_Cfunction('libvlc_media_parse_with_options', ((1,), (1,), (1,),), None,
ctypes.c_int, Media, MediaParseFlag, ctypes.c_int)
return f(p_md, parse_flag, timeout)
def libvlc_media_get_parsed_status(p_md):
'''Get Parsed status for media descriptor object.
See libvlc_MediaParsedChanged
See libvlc_media_parsed_status_t.
@param p_md: media descriptor object.
@return: a value of the libvlc_media_parsed_status_t enum.
@version: LibVLC 3.0.0 or later.
'''
f = _Cfunctions.get('libvlc_media_get_parsed_status', None) or \
_Cfunction('libvlc_media_get_parsed_status', ((1,),), None,
MediaParsedStatus, Media)
return f(p_md)
def libvlc_media_set_user_data(p_md, p_new_user_data):
'''Sets media descriptor's user_data. user_data is specialized data
accessed by the host application, VLC.framework uses it as a pointer to
an native object that references a L{Media} pointer.
@param p_md: media descriptor object.
@param p_new_user_data: pointer to user data.
'''
f = _Cfunctions.get('libvlc_media_set_user_data', None) or \
_Cfunction('libvlc_media_set_user_data', ((1,), (1,),), None,
None, Media, ctypes.c_void_p)
return f(p_md, p_new_user_data)
def libvlc_media_get_user_data(p_md):
'''Get media descriptor's user_data. user_data is specialized data
accessed by the host application, VLC.framework uses it as a pointer to
an native object that references a L{Media} pointer.
@param p_md: media descriptor object.
'''
f = _Cfunctions.get('libvlc_media_get_user_data', None) or \
_Cfunction('libvlc_media_get_user_data', ((1,),), None,
ctypes.c_void_p, Media)
return f(p_md)
def libvlc_media_tracks_get(p_md, tracks):
'''Get media descriptor's elementary streams description
Note, you need to call L{libvlc_media_parse}() or play the media at least once
before calling this function.
Not doing this will result in an empty array.
@param p_md: media descriptor object.
@param tracks: address to store an allocated array of Elementary Streams descriptions (must be freed with L{libvlc_media_tracks_release}.
@return: the number of Elementary Streams (zero on error).
@version: LibVLC 2.1.0 and later.
'''
f = _Cfunctions.get('libvlc_media_tracks_get', None) or \
_Cfunction('libvlc_media_tracks_get', ((1,), (1,),), None,
ctypes.c_uint, Media, ctypes.POINTER(ctypes.POINTER(MediaTrack)))
return f(p_md, tracks)
def libvlc_media_get_codec_description(i_type, i_codec):
'''Get codec description from media elementary stream.
@param i_type: i_type from L{MediaTrack}.
@param i_codec: i_codec or i_original_fourcc from L{MediaTrack}.
@return: codec description.
@version: LibVLC 3.0.0 and later. See L{MediaTrack}.
'''
f = _Cfunctions.get('libvlc_media_get_codec_description', None) or \
_Cfunction('libvlc_media_get_codec_description', ((1,), (1,),), None,
ctypes.c_char_p, TrackType, ctypes.c_uint32)
return f(i_type, i_codec)
def libvlc_media_tracks_release(p_tracks, i_count):
'''Release media descriptor's elementary streams description array.
@param p_tracks: tracks info array to release.
@param i_count: number of elements in the array.
@version: LibVLC 2.1.0 and later.
'''
f = _Cfunctions.get('libvlc_media_tracks_release', None) or \
_Cfunction('libvlc_media_tracks_release', ((1,), (1,),), None,
None, ctypes.POINTER(MediaTrack), ctypes.c_uint)
return f(p_tracks, i_count)
def libvlc_media_get_type(p_md):
'''Get the media type of the media descriptor object.
@param p_md: media descriptor object.
@return: media type.
@version: LibVLC 3.0.0 and later. See libvlc_media_type_t.
'''
f = _Cfunctions.get('libvlc_media_get_type', None) or \
_Cfunction('libvlc_media_get_type', ((1,),), None,
MediaType, Media)
return f(p_md)
def libvlc_media_slaves_add(p_md, i_type, i_priority, psz_uri):
'''Add a slave to the current media.
A slave is an external input source that may contains an additional subtitle
track (like a .srt) or an additional audio track (like a .ac3).
@note: This function must be called before the media is parsed (via
L{libvlc_media_parse_with_options}()) or before the media is played (via
L{libvlc_media_player_play}()).
@param p_md: media descriptor object.
@param i_type: subtitle or audio.
@param i_priority: from 0 (low priority) to 4 (high priority).
@param psz_uri: Uri of the slave (should contain a valid scheme).
@return: 0 on success, -1 on error.
@version: LibVLC 3.0.0 and later.
'''
f = _Cfunctions.get('libvlc_media_slaves_add', None) or \
_Cfunction('libvlc_media_slaves_add', ((1,), (1,), (1,), (1,),), None,
ctypes.c_int, Media, MediaSlaveType, ctypes.c_int, ctypes.c_char_p)
return f(p_md, i_type, i_priority, psz_uri)
def libvlc_media_slaves_clear(p_md):
'''Clear all slaves previously added by L{libvlc_media_slaves_add}() or
internally.
@param p_md: media descriptor object.
@version: LibVLC 3.0.0 and later.
'''
f = _Cfunctions.get('libvlc_media_slaves_clear', None) or \
_Cfunction('libvlc_media_slaves_clear', ((1,),), None,
None, Media)
return f(p_md)
def libvlc_media_slaves_get(p_md, ppp_slaves):
'''Get a media descriptor's slave list
The list will contain slaves parsed by VLC or previously added by
L{libvlc_media_slaves_add}(). The typical use case of this function is to save
a list of slave in a database for a later use.
@param p_md: media descriptor object.
@param ppp_slaves: address to store an allocated array of slaves (must be freed with L{libvlc_media_slaves_release}()) [OUT].
@return: the number of slaves (zero on error).
@version: LibVLC 3.0.0 and later. See L{libvlc_media_slaves_add}.
'''
f = _Cfunctions.get('libvlc_media_slaves_get', None) or \
_Cfunction('libvlc_media_slaves_get', ((1,), (1,),), None,
ctypes.c_int, Media, ctypes.POINTER(ctypes.POINTER(MediaSlave)))
return f(p_md, ppp_slaves)
def libvlc_media_slaves_release(pp_slaves, i_count):
'''Release a media descriptor's slave list.
@param pp_slaves: slave array to release.
@param i_count: number of elements in the array.
@version: LibVLC 3.0.0 and later.
'''
f = _Cfunctions.get('libvlc_media_slaves_release', None) or \
_Cfunction('libvlc_media_slaves_release', ((1,), (1,),), None,
None, ctypes.POINTER(MediaSlave), ctypes.c_int)
return f(pp_slaves, i_count)
def libvlc_media_discoverer_new(p_inst, psz_name):
'''Create a media discoverer object by name.
After this object is created, you should attach to media_list events in
order to be notified of new items discovered.
You need to call L{libvlc_media_discoverer_start}() in order to start the
discovery.
See L{libvlc_media_discoverer_media_list}
See libvlc_media_discoverer_event_manager
See L{libvlc_media_discoverer_start}.
@param p_inst: libvlc instance.
@param psz_name: service name; use L{libvlc_media_discoverer_list_get}() to get a list of the discoverer names available in this libVLC instance.
@return: media discover object or None in case of error.
@version: LibVLC 3.0.0 or later.
'''
f = _Cfunctions.get('libvlc_media_discoverer_new', None) or \
_Cfunction('libvlc_media_discoverer_new', ((1,), (1,),), class_result(MediaDiscoverer),
ctypes.c_void_p, Instance, ctypes.c_char_p)
return f(p_inst, psz_name)
def libvlc_media_discoverer_start(p_mdis):
'''Start media discovery.
To stop it, call L{libvlc_media_discoverer_stop}() or
L{libvlc_media_discoverer_list_release}() directly.
See L{libvlc_media_discoverer_stop}.
@param p_mdis: media discover object.
@return: -1 in case of error, 0 otherwise.
@version: LibVLC 3.0.0 or later.
'''
f = _Cfunctions.get('libvlc_media_discoverer_start', None) or \
_Cfunction('libvlc_media_discoverer_start', ((1,),), None,
ctypes.c_int, MediaDiscoverer)
return f(p_mdis)
def libvlc_media_discoverer_stop(p_mdis):
'''Stop media discovery.
See L{libvlc_media_discoverer_start}.
@param p_mdis: media discover object.
@version: LibVLC 3.0.0 or later.
'''
f = _Cfunctions.get('libvlc_media_discoverer_stop', None) or \
_Cfunction('libvlc_media_discoverer_stop', ((1,),), None,
None, MediaDiscoverer)
return f(p_mdis)
def libvlc_media_discoverer_release(p_mdis):
'''Release media discover object. If the reference count reaches 0, then
the object will be released.
@param p_mdis: media service discover object.
'''
f = _Cfunctions.get('libvlc_media_discoverer_release', None) or \
_Cfunction('libvlc_media_discoverer_release', ((1,),), None,
None, MediaDiscoverer)
return f(p_mdis)
def libvlc_media_discoverer_media_list(p_mdis):
'''Get media service discover media list.
@param p_mdis: media service discover object.
@return: list of media items.
'''
f = _Cfunctions.get('libvlc_media_discoverer_media_list', None) or \
_Cfunction('libvlc_media_discoverer_media_list', ((1,),), class_result(MediaList),
ctypes.c_void_p, MediaDiscoverer)
return f(p_mdis)
def libvlc_media_discoverer_is_running(p_mdis):
'''Query if media service discover object is running.
@param p_mdis: media service discover object.
@return: true if running, false if not \libvlc_return_bool.
'''
f = _Cfunctions.get('libvlc_media_discoverer_is_running', None) or \
_Cfunction('libvlc_media_discoverer_is_running', ((1,),), None,
ctypes.c_int, MediaDiscoverer)
return f(p_mdis)
def libvlc_media_discoverer_list_get(p_inst, i_cat, ppp_services):
'''Get media discoverer services by category.
@param p_inst: libvlc instance.
@param i_cat: category of services to fetch.
@param ppp_services: address to store an allocated array of media discoverer services (must be freed with L{libvlc_media_discoverer_list_release}() by the caller) [OUT].
@return: the number of media discoverer services (0 on error).
@version: LibVLC 3.0.0 and later.
'''
f = _Cfunctions.get('libvlc_media_discoverer_list_get', None) or \
_Cfunction('libvlc_media_discoverer_list_get', ((1,), (1,), (1,),), None,
ctypes.c_size_t, Instance, MediaDiscovererCategory, ctypes.POINTER(ctypes.POINTER(MediaDiscovererDescription)))
return f(p_inst, i_cat, ppp_services)
def libvlc_media_discoverer_list_release(pp_services, i_count):
'''Release an array of media discoverer services.
@param pp_services: array to release.
@param i_count: number of elements in the array.
@version: LibVLC 3.0.0 and later. See L{libvlc_media_discoverer_list_get}().
'''
f = _Cfunctions.get('libvlc_media_discoverer_list_release', None) or \
_Cfunction('libvlc_media_discoverer_list_release', ((1,), (1,),), None,
None, ctypes.POINTER(MediaDiscovererDescription), ctypes.c_size_t)
return f(pp_services, i_count)
def libvlc_media_library_new(p_instance):
'''Create an new Media Library object.
@param p_instance: the libvlc instance.
@return: a new object or None on error.
'''
f = _Cfunctions.get('libvlc_media_library_new', None) or \
_Cfunction('libvlc_media_library_new', ((1,),), class_result(MediaLibrary),
ctypes.c_void_p, Instance)
return f(p_instance)
def libvlc_media_library_release(p_mlib):
'''Release media library object. This functions decrements the
reference count of the media library object. If it reaches 0,
then the object will be released.
@param p_mlib: media library object.
'''
f = _Cfunctions.get('libvlc_media_library_release', None) or \
_Cfunction('libvlc_media_library_release', ((1,),), None,
None, MediaLibrary)
return f(p_mlib)
def libvlc_media_library_retain(p_mlib):
'''Retain a reference to a media library object. This function will
increment the reference counting for this object. Use
L{libvlc_media_library_release}() to decrement the reference count.
@param p_mlib: media library object.
'''
f = _Cfunctions.get('libvlc_media_library_retain', None) or \
_Cfunction('libvlc_media_library_retain', ((1,),), None,
None, MediaLibrary)
return f(p_mlib)
def libvlc_media_library_load(p_mlib):
'''Load media library.
@param p_mlib: media library object.
@return: 0 on success, -1 on error.
'''
f = _Cfunctions.get('libvlc_media_library_load', None) or \
_Cfunction('libvlc_media_library_load', ((1,),), None,
ctypes.c_int, MediaLibrary)
return f(p_mlib)
def libvlc_media_library_media_list(p_mlib):
'''Get media library subitems.
@param p_mlib: media library object.
@return: media list subitems.
'''
f = _Cfunctions.get('libvlc_media_library_media_list', None) or \
_Cfunction('libvlc_media_library_media_list', ((1,),), class_result(MediaList),
ctypes.c_void_p, MediaLibrary)
return f(p_mlib)
def libvlc_media_list_new(p_instance):
'''Create an empty media list.
@param p_instance: libvlc instance.
@return: empty media list, or None on error.
'''
f = _Cfunctions.get('libvlc_media_list_new', None) or \
_Cfunction('libvlc_media_list_new', ((1,),), class_result(MediaList),
ctypes.c_void_p, Instance)
return f(p_instance)
def libvlc_media_list_release(p_ml):
'''Release media list created with L{libvlc_media_list_new}().
@param p_ml: a media list created with L{libvlc_media_list_new}().
'''
f = _Cfunctions.get('libvlc_media_list_release', None) or \
_Cfunction('libvlc_media_list_release', ((1,),), None,
None, MediaList)
return f(p_ml)
def libvlc_media_list_retain(p_ml):
'''Retain reference to a media list.
@param p_ml: a media list created with L{libvlc_media_list_new}().
'''
f = _Cfunctions.get('libvlc_media_list_retain', None) or \
_Cfunction('libvlc_media_list_retain', ((1,),), None,
None, MediaList)
return f(p_ml)
def libvlc_media_list_set_media(p_ml, p_md):
'''Associate media instance with this media list instance.
If another media instance was present it will be released.
The L{libvlc_media_list_lock} should NOT be held upon entering this function.
@param p_ml: a media list instance.
@param p_md: media instance to add.
'''
f = _Cfunctions.get('libvlc_media_list_set_media', None) or \
_Cfunction('libvlc_media_list_set_media', ((1,), (1,),), None,
None, MediaList, Media)
return f(p_ml, p_md)
def libvlc_media_list_media(p_ml):
'''Get media instance from this media list instance. This action will increase
the refcount on the media instance.
The L{libvlc_media_list_lock} should NOT be held upon entering this function.
@param p_ml: a media list instance.
@return: media instance.
'''
f = _Cfunctions.get('libvlc_media_list_media', None) or \
_Cfunction('libvlc_media_list_media', ((1,),), class_result(Media),
ctypes.c_void_p, MediaList)
return f(p_ml)
def libvlc_media_list_add_media(p_ml, p_md):
'''Add media instance to media list
The L{libvlc_media_list_lock} should be held upon entering this function.
@param p_ml: a media list instance.
@param p_md: a media instance.
@return: 0 on success, -1 if the media list is read-only.
'''
f = _Cfunctions.get('libvlc_media_list_add_media', None) or \
_Cfunction('libvlc_media_list_add_media', ((1,), (1,),), None,
ctypes.c_int, MediaList, Media)
return f(p_ml, p_md)
def libvlc_media_list_insert_media(p_ml, p_md, i_pos):
'''Insert media instance in media list on a position
The L{libvlc_media_list_lock} should be held upon entering this function.
@param p_ml: a media list instance.
@param p_md: a media instance.
@param i_pos: position in array where to insert.
@return: 0 on success, -1 if the media list is read-only.
'''
f = _Cfunctions.get('libvlc_media_list_insert_media', None) or \
_Cfunction('libvlc_media_list_insert_media', ((1,), (1,), (1,),), None,
ctypes.c_int, MediaList, Media, ctypes.c_int)
return f(p_ml, p_md, i_pos)
def libvlc_media_list_remove_index(p_ml, i_pos):
'''Remove media instance from media list on a position
The L{libvlc_media_list_lock} should be held upon entering this function.
@param p_ml: a media list instance.
@param i_pos: position in array where to insert.
@return: 0 on success, -1 if the list is read-only or the item was not found.
'''
f = _Cfunctions.get('libvlc_media_list_remove_index', None) or \
_Cfunction('libvlc_media_list_remove_index', ((1,), (1,),), None,
ctypes.c_int, MediaList, ctypes.c_int)
return f(p_ml, i_pos)
def libvlc_media_list_count(p_ml):
'''Get count on media list items
The L{libvlc_media_list_lock} should be held upon entering this function.
@param p_ml: a media list instance.
@return: number of items in media list.
'''
f = _Cfunctions.get('libvlc_media_list_count', None) or \
_Cfunction('libvlc_media_list_count', ((1,),), None,
ctypes.c_int, MediaList)
return f(p_ml)
def libvlc_media_list_item_at_index(p_ml, i_pos):
'''List media instance in media list at a position
The L{libvlc_media_list_lock} should be held upon entering this function.
@param p_ml: a media list instance.
@param i_pos: position in array where to insert.
@return: media instance at position i_pos, or None if not found. In case of success, L{libvlc_media_retain}() is called to increase the refcount on the media.
'''
f = _Cfunctions.get('libvlc_media_list_item_at_index', None) or \
_Cfunction('libvlc_media_list_item_at_index', ((1,), (1,),), class_result(Media),
ctypes.c_void_p, MediaList, ctypes.c_int)
return f(p_ml, i_pos)
def libvlc_media_list_index_of_item(p_ml, p_md):
'''Find index position of List media instance in media list.
Warning: the function will return the first matched position.
The L{libvlc_media_list_lock} should be held upon entering this function.
@param p_ml: a media list instance.
@param p_md: media instance.
@return: position of media instance or -1 if media not found.
'''
f = _Cfunctions.get('libvlc_media_list_index_of_item', None) or \
_Cfunction('libvlc_media_list_index_of_item', ((1,), (1,),), None,
ctypes.c_int, MediaList, Media)
return f(p_ml, p_md)
def libvlc_media_list_is_readonly(p_ml):
'''This indicates if this media list is read-only from a user point of view.
@param p_ml: media list instance.
@return: 1 on readonly, 0 on readwrite \libvlc_return_bool.
'''
f = _Cfunctions.get('libvlc_media_list_is_readonly', None) or \
_Cfunction('libvlc_media_list_is_readonly', ((1,),), None,
ctypes.c_int, MediaList)
return f(p_ml)
def libvlc_media_list_lock(p_ml):
'''Get lock on media list items.
@param p_ml: a media list instance.
'''
f = _Cfunctions.get('libvlc_media_list_lock', None) or \
_Cfunction('libvlc_media_list_lock', ((1,),), None,
None, MediaList)
return f(p_ml)
def libvlc_media_list_unlock(p_ml):
'''Release lock on media list items
The L{libvlc_media_list_lock} should be held upon entering this function.
@param p_ml: a media list instance.
'''
f = _Cfunctions.get('libvlc_media_list_unlock', None) or \
_Cfunction('libvlc_media_list_unlock', ((1,),), None,
None, MediaList)
return f(p_ml)
def libvlc_media_list_event_manager(p_ml):
'''Get libvlc_event_manager from this media list instance.
The p_event_manager is immutable, so you don't have to hold the lock.
@param p_ml: a media list instance.
@return: libvlc_event_manager.
'''
f = _Cfunctions.get('libvlc_media_list_event_manager', None) or \
_Cfunction('libvlc_media_list_event_manager', ((1,),), class_result(EventManager),
ctypes.c_void_p, MediaList)
return f(p_ml)
def libvlc_media_list_player_new(p_instance):
'''Create new media_list_player.
@param p_instance: libvlc instance.
@return: media list player instance or None on error.
'''
f = _Cfunctions.get('libvlc_media_list_player_new', None) or \
_Cfunction('libvlc_media_list_player_new', ((1,),), class_result(MediaListPlayer),
ctypes.c_void_p, Instance)
return f(p_instance)
def libvlc_media_list_player_release(p_mlp):
'''Release a media_list_player after use
Decrement the reference count of a media player object. If the
reference count is 0, then L{libvlc_media_list_player_release}() will
release the media player object. If the media player object
has been released, then it should not be used again.
@param p_mlp: media list player instance.
'''
f = _Cfunctions.get('libvlc_media_list_player_release', None) or \
_Cfunction('libvlc_media_list_player_release', ((1,),), None,
None, MediaListPlayer)
return f(p_mlp)
def libvlc_media_list_player_retain(p_mlp):
'''Retain a reference to a media player list object. Use
L{libvlc_media_list_player_release}() to decrement reference count.
@param p_mlp: media player list object.
'''
f = _Cfunctions.get('libvlc_media_list_player_retain', None) or \
_Cfunction('libvlc_media_list_player_retain', ((1,),), None,
None, MediaListPlayer)
return f(p_mlp)
def libvlc_media_list_player_event_manager(p_mlp):
'''Return the event manager of this media_list_player.
@param p_mlp: media list player instance.
@return: the event manager.
'''
f = _Cfunctions.get('libvlc_media_list_player_event_manager', None) or \
_Cfunction('libvlc_media_list_player_event_manager', ((1,),), class_result(EventManager),
ctypes.c_void_p, MediaListPlayer)
return f(p_mlp)
def libvlc_media_list_player_set_media_player(p_mlp, p_mi):
'''Replace media player in media_list_player with this instance.
@param p_mlp: media list player instance.
@param p_mi: media player instance.
'''
f = _Cfunctions.get('libvlc_media_list_player_set_media_player', None) or \
_Cfunction('libvlc_media_list_player_set_media_player', ((1,), (1,),), None,
None, MediaListPlayer, MediaPlayer)
return f(p_mlp, p_mi)
def libvlc_media_list_player_get_media_player(p_mlp):
'''Get media player of the media_list_player instance.
@param p_mlp: media list player instance.
@return: media player instance @note the caller is responsible for releasing the returned instance.
'''
f = _Cfunctions.get('libvlc_media_list_player_get_media_player', None) or \
_Cfunction('libvlc_media_list_player_get_media_player', ((1,),), class_result(MediaPlayer),
ctypes.c_void_p, MediaListPlayer)
return f(p_mlp)
def libvlc_media_list_player_set_media_list(p_mlp, p_mlist):
'''Set the media list associated with the player.
@param p_mlp: media list player instance.
@param p_mlist: list of media.
'''
f = _Cfunctions.get('libvlc_media_list_player_set_media_list', None) or \
_Cfunction('libvlc_media_list_player_set_media_list', ((1,), (1,),), None,
None, MediaListPlayer, MediaList)
return f(p_mlp, p_mlist)
def libvlc_media_list_player_play(p_mlp):
'''Play media list.
@param p_mlp: media list player instance.
'''
f = _Cfunctions.get('libvlc_media_list_player_play', None) or \
_Cfunction('libvlc_media_list_player_play', ((1,),), None,
None, MediaListPlayer)
return f(p_mlp)
def libvlc_media_list_player_pause(p_mlp):
'''Toggle pause (or resume) media list.
@param p_mlp: media list player instance.
'''
f = _Cfunctions.get('libvlc_media_list_player_pause', None) or \
_Cfunction('libvlc_media_list_player_pause', ((1,),), None,
None, MediaListPlayer)
return f(p_mlp)
def libvlc_media_list_player_is_playing(p_mlp):
'''Is media list playing?
@param p_mlp: media list player instance.
@return: true for playing and false for not playing \libvlc_return_bool.
'''
f = _Cfunctions.get('libvlc_media_list_player_is_playing', None) or \
_Cfunction('libvlc_media_list_player_is_playing', ((1,),), None,
ctypes.c_int, MediaListPlayer)
return f(p_mlp)
def libvlc_media_list_player_get_state(p_mlp):
'''Get current libvlc_state of media list player.
@param p_mlp: media list player instance.
@return: libvlc_state_t for media list player.
'''
f = _Cfunctions.get('libvlc_media_list_player_get_state', None) or \
_Cfunction('libvlc_media_list_player_get_state', ((1,),), None,
State, MediaListPlayer)
return f(p_mlp)
def libvlc_media_list_player_play_item_at_index(p_mlp, i_index):
'''Play media list item at position index.
@param p_mlp: media list player instance.
@param i_index: index in media list to play.
@return: 0 upon success -1 if the item wasn't found.
'''
f = _Cfunctions.get('libvlc_media_list_player_play_item_at_index', None) or \
_Cfunction('libvlc_media_list_player_play_item_at_index', ((1,), (1,),), None,
ctypes.c_int, MediaListPlayer, ctypes.c_int)
return f(p_mlp, i_index)
def libvlc_media_list_player_play_item(p_mlp, p_md):
'''Play the given media item.
@param p_mlp: media list player instance.
@param p_md: the media instance.
@return: 0 upon success, -1 if the media is not part of the media list.
'''
f = _Cfunctions.get('libvlc_media_list_player_play_item', None) or \
_Cfunction('libvlc_media_list_player_play_item', ((1,), (1,),), None,
ctypes.c_int, MediaListPlayer, Media)
return f(p_mlp, p_md)
def libvlc_media_list_player_stop(p_mlp):
'''Stop playing media list.
@param p_mlp: media list player instance.
'''
f = _Cfunctions.get('libvlc_media_list_player_stop', None) or \
_Cfunction('libvlc_media_list_player_stop', ((1,),), None,
None, MediaListPlayer)
return f(p_mlp)
def libvlc_media_list_player_next(p_mlp):
'''Play next item from media list.
@param p_mlp: media list player instance.
@return: 0 upon success -1 if there is no next item.
'''
f = _Cfunctions.get('libvlc_media_list_player_next', None) or \
_Cfunction('libvlc_media_list_player_next', ((1,),), None,
ctypes.c_int, MediaListPlayer)
return f(p_mlp)
def libvlc_media_list_player_previous(p_mlp):
'''Play previous item from media list.
@param p_mlp: media list player instance.
@return: 0 upon success -1 if there is no previous item.
'''
f = _Cfunctions.get('libvlc_media_list_player_previous', None) or \
_Cfunction('libvlc_media_list_player_previous', ((1,),), None,
ctypes.c_int, MediaListPlayer)
return f(p_mlp)
def libvlc_media_list_player_set_playback_mode(p_mlp, e_mode):
'''Sets the playback mode for the playlist.
@param p_mlp: media list player instance.
@param e_mode: playback mode specification.
'''
f = _Cfunctions.get('libvlc_media_list_player_set_playback_mode', None) or \
_Cfunction('libvlc_media_list_player_set_playback_mode', ((1,), (1,),), None,
None, MediaListPlayer, PlaybackMode)
return f(p_mlp, e_mode)
def libvlc_media_player_new(p_libvlc_instance):
'''Create an empty Media Player object.
@param p_libvlc_instance: the libvlc instance in which the Media Player should be created.
@return: a new media player object, or None on error.
'''
f = _Cfunctions.get('libvlc_media_player_new', None) or \
_Cfunction('libvlc_media_player_new', ((1,),), class_result(MediaPlayer),
ctypes.c_void_p, Instance)
return f(p_libvlc_instance)
def libvlc_media_player_new_from_media(p_md):
'''Create a Media Player object from a Media.
@param p_md: the media. Afterwards the p_md can be safely destroyed.
@return: a new media player object, or None on error.
'''
f = _Cfunctions.get('libvlc_media_player_new_from_media', None) or \
_Cfunction('libvlc_media_player_new_from_media', ((1,),), class_result(MediaPlayer),
ctypes.c_void_p, Media)
return f(p_md)
def libvlc_media_player_release(p_mi):
'''Release a media_player after use
Decrement the reference count of a media player object. If the
reference count is 0, then L{libvlc_media_player_release}() will
release the media player object. If the media player object
has been released, then it should not be used again.
@param p_mi: the Media Player to free.
'''
f = _Cfunctions.get('libvlc_media_player_release', None) or \
_Cfunction('libvlc_media_player_release', ((1,),), None,
None, MediaPlayer)
return f(p_mi)
def libvlc_media_player_retain(p_mi):
'''Retain a reference to a media player object. Use
L{libvlc_media_player_release}() to decrement reference count.
@param p_mi: media player object.
'''
f = _Cfunctions.get('libvlc_media_player_retain', None) or \
_Cfunction('libvlc_media_player_retain', ((1,),), None,
None, MediaPlayer)
return f(p_mi)
def libvlc_media_player_set_media(p_mi, p_md):
'''Set the media that will be used by the media_player. If any,
previous md will be released.
@param p_mi: the Media Player.
@param p_md: the Media. Afterwards the p_md can be safely destroyed.
'''
f = _Cfunctions.get('libvlc_media_player_set_media', None) or \
_Cfunction('libvlc_media_player_set_media', ((1,), (1,),), None,
None, MediaPlayer, Media)
return f(p_mi, p_md)
def libvlc_media_player_get_media(p_mi):
'''Get the media used by the media_player.
@param p_mi: the Media Player.
@return: the media associated with p_mi, or None if no media is associated.
'''
f = _Cfunctions.get('libvlc_media_player_get_media', None) or \
_Cfunction('libvlc_media_player_get_media', ((1,),), class_result(Media),
ctypes.c_void_p, MediaPlayer)
return f(p_mi)
def libvlc_media_player_event_manager(p_mi):
'''Get the Event Manager from which the media player send event.
@param p_mi: the Media Player.
@return: the event manager associated with p_mi.
'''
f = _Cfunctions.get('libvlc_media_player_event_manager', None) or \
_Cfunction('libvlc_media_player_event_manager', ((1,),), class_result(EventManager),
ctypes.c_void_p, MediaPlayer)
return f(p_mi)
def libvlc_media_player_is_playing(p_mi):
'''is_playing.
@param p_mi: the Media Player.
@return: 1 if the media player is playing, 0 otherwise \libvlc_return_bool.
'''
f = _Cfunctions.get('libvlc_media_player_is_playing', None) or \
_Cfunction('libvlc_media_player_is_playing', ((1,),), None,
ctypes.c_int, MediaPlayer)
return f(p_mi)
def libvlc_media_player_play(p_mi):
'''Play.
@param p_mi: the Media Player.
@return: 0 if playback started (and was already started), or -1 on error.
'''
f = _Cfunctions.get('libvlc_media_player_play', None) or \
_Cfunction('libvlc_media_player_play', ((1,),), None,
ctypes.c_int, MediaPlayer)
return f(p_mi)
def libvlc_media_player_set_pause(mp, do_pause):
'''Pause or resume (no effect if there is no media).
@param mp: the Media Player.
@param do_pause: play/resume if zero, pause if non-zero.
@version: LibVLC 1.1.1 or later.
'''
f = _Cfunctions.get('libvlc_media_player_set_pause', None) or \
_Cfunction('libvlc_media_player_set_pause', ((1,), (1,),), None,
None, MediaPlayer, ctypes.c_int)
return f(mp, do_pause)
def libvlc_media_player_pause(p_mi):
'''Toggle pause (no effect if there is no media).
@param p_mi: the Media Player.
'''
f = _Cfunctions.get('libvlc_media_player_pause', None) or \
_Cfunction('libvlc_media_player_pause', ((1,),), None,
None, MediaPlayer)
return f(p_mi)
def libvlc_media_player_stop(p_mi):
'''Stop (no effect if there is no media).
@param p_mi: the Media Player.
'''
f = _Cfunctions.get('libvlc_media_player_stop', None) or \
_Cfunction('libvlc_media_player_stop', ((1,),), None,
None, MediaPlayer)
return f(p_mi)
def libvlc_media_player_set_renderer(p_mi, p_item):
'''Set a renderer to the media player
@note: must be called before the first call of L{libvlc_media_player_play}() to
take effect.
See L{libvlc_renderer_discoverer_new}.
@param p_mi: the Media Player.
@param p_item: an item discovered by L{libvlc_renderer_discoverer_start}().
@return: 0 on success, -1 on error.
@version: LibVLC 3.0.0 or later.
'''
f = _Cfunctions.get('libvlc_media_player_set_renderer', None) or \
_Cfunction('libvlc_media_player_set_renderer', ((1,), (1,),), None,
ctypes.c_int, MediaPlayer, ctypes.c_void_p)
return f(p_mi, p_item)
def libvlc_video_set_callbacks(mp, lock, unlock, display, opaque):
'''Set callbacks and private data to render decoded video to a custom area
in memory.
Use L{libvlc_video_set_format}() or L{libvlc_video_set_format_callbacks}()
to configure the decoded format.
@warning: Rendering video into custom memory buffers is considerably less
efficient than rendering in a custom window as normal.
For optimal perfomances, VLC media player renders into a custom window, and
does not use this function and associated callbacks. It is B{highly
recommended} that other LibVLC-based application do likewise.
To embed video in a window, use libvlc_media_player_set_xid() or equivalent
depending on the operating system.
If window embedding does not fit the application use case, then a custom
LibVLC video output display plugin is required to maintain optimal video
rendering performances.
The following limitations affect performance:
- Hardware video decoding acceleration will either be disabled completely,
or require (relatively slow) copy from video/DSP memory to main memory.
- Sub-pictures (subtitles, on-screen display, etc.) must be blent into the
main picture by the CPU instead of the GPU.
- Depending on the video format, pixel format conversion, picture scaling,
cropping and/or picture re-orientation, must be performed by the CPU
instead of the GPU.
- Memory copying is required between LibVLC reference picture buffers and
application buffers (between lock and unlock callbacks).
@param mp: the media player.
@param lock: callback to lock video memory (must not be None).
@param unlock: callback to unlock video memory (or None if not needed).
@param display: callback to display video (or None if not needed).
@param opaque: private pointer for the three callbacks (as first parameter).
@version: LibVLC 1.1.1 or later.
'''
f = _Cfunctions.get('libvlc_video_set_callbacks', None) or \
_Cfunction('libvlc_video_set_callbacks', ((1,), (1,), (1,), (1,), (1,),), None,
None, MediaPlayer, VideoLockCb, VideoUnlockCb, VideoDisplayCb, ctypes.c_void_p)
return f(mp, lock, unlock, display, opaque)
def libvlc_video_set_format(mp, chroma, width, height, pitch):
'''Set decoded video chroma and dimensions.
This only works in combination with L{libvlc_video_set_callbacks}(),
and is mutually exclusive with L{libvlc_video_set_format_callbacks}().
@param mp: the media player.
@param chroma: a four-characters string identifying the chroma (e.g. "RV32" or "YUYV").
@param width: pixel width.
@param height: pixel height.
@param pitch: line pitch (in bytes).
@version: LibVLC 1.1.1 or later.
@bug: All pixel planes are expected to have the same pitch. To use the YCbCr color space with chrominance subsampling, consider using L{libvlc_video_set_format_callbacks}() instead.
'''
f = _Cfunctions.get('libvlc_video_set_format', None) or \
_Cfunction('libvlc_video_set_format', ((1,), (1,), (1,), (1,), (1,),), None,
None, MediaPlayer, ctypes.c_char_p, ctypes.c_uint, ctypes.c_uint, ctypes.c_uint)
return f(mp, chroma, width, height, pitch)
def libvlc_video_set_format_callbacks(mp, setup, cleanup):
'''Set decoded video chroma and dimensions. This only works in combination with
L{libvlc_video_set_callbacks}().
@param mp: the media player.
@param setup: callback to select the video format (cannot be None).
@param cleanup: callback to release any allocated resources (or None).
@version: LibVLC 2.0.0 or later.
'''
f = _Cfunctions.get('libvlc_video_set_format_callbacks', None) or \
_Cfunction('libvlc_video_set_format_callbacks', ((1,), (1,), (1,),), None,
None, MediaPlayer, VideoFormatCb, VideoCleanupCb)
return f(mp, setup, cleanup)
def libvlc_media_player_set_nsobject(p_mi, drawable):
'''Set the NSView handler where the media player should render its video output.
Use the vout called "macosx".
The drawable is an NSObject that follow the VLCOpenGLVideoViewEmbedding
protocol:
@code.m
\@protocol VLCOpenGLVideoViewEmbedding <NSObject>
- (void)addVoutSubview:(NSView *)view;
- (void)removeVoutSubview:(NSView *)view;
\@end
@endcode
Or it can be an NSView object.
If you want to use it along with Qt see the QMacCocoaViewContainer. Then
the following code should work:
@code.mm
NSView *video = [[NSView alloc] init];
QMacCocoaViewContainer *container = new QMacCocoaViewContainer(video, parent);
L{libvlc_media_player_set_nsobject}(mp, video);
[video release];
@endcode
You can find a live example in VLCVideoView in VLCKit.framework.
@param p_mi: the Media Player.
@param drawable: the drawable that is either an NSView or an object following the VLCOpenGLVideoViewEmbedding protocol.
'''
f = _Cfunctions.get('libvlc_media_player_set_nsobject', None) or \
_Cfunction('libvlc_media_player_set_nsobject', ((1,), (1,),), None,
None, MediaPlayer, ctypes.c_void_p)
return f(p_mi, drawable)
def libvlc_media_player_get_nsobject(p_mi):
'''Get the NSView handler previously set with L{libvlc_media_player_set_nsobject}().
@param p_mi: the Media Player.
@return: the NSView handler or 0 if none where set.
'''
f = _Cfunctions.get('libvlc_media_player_get_nsobject', None) or \
_Cfunction('libvlc_media_player_get_nsobject', ((1,),), None,
ctypes.c_void_p, MediaPlayer)
return f(p_mi)
def libvlc_media_player_set_xwindow(p_mi, drawable):
'''Set an X Window System drawable where the media player should render its
video output. The call takes effect when the playback starts. If it is
already started, it might need to be stopped before changes apply.
If LibVLC was built without X11 output support, then this function has no
effects.
By default, LibVLC will capture input events on the video rendering area.
Use L{libvlc_video_set_mouse_input}() and L{libvlc_video_set_key_input}() to
disable that and deliver events to the parent window / to the application
instead. By design, the X11 protocol delivers input events to only one
recipient.
@warning
The application must call the XInitThreads() function from Xlib before
L{libvlc_new}(), and before any call to XOpenDisplay() directly or via any
other library. Failure to call XInitThreads() will seriously impede LibVLC
performance. Calling XOpenDisplay() before XInitThreads() will eventually
crash the process. That is a limitation of Xlib.
@param p_mi: media player.
@param drawable: X11 window ID @note The specified identifier must correspond to an existing Input/Output class X11 window. Pixmaps are B{not} currently supported. The default X11 server is assumed, i.e. that specified in the DISPLAY environment variable. @warning LibVLC can deal with invalid X11 handle errors, however some display drivers (EGL, GLX, VA and/or VDPAU) can unfortunately not. Thus the window handle must remain valid until playback is stopped, otherwise the process may abort or crash.
@bug No more than one window handle per media player instance can be specified. If the media has multiple simultaneously active video tracks, extra tracks will be rendered into external windows beyond the control of the application.
'''
f = _Cfunctions.get('libvlc_media_player_set_xwindow', None) or \
_Cfunction('libvlc_media_player_set_xwindow', ((1,), (1,),), None,
None, MediaPlayer, ctypes.c_uint32)
return f(p_mi, drawable)
def libvlc_media_player_get_xwindow(p_mi):
'''Get the X Window System window identifier previously set with
L{libvlc_media_player_set_xwindow}(). Note that this will return the identifier
even if VLC is not currently using it (for instance if it is playing an
audio-only input).
@param p_mi: the Media Player.
@return: an X window ID, or 0 if none where set.
'''
f = _Cfunctions.get('libvlc_media_player_get_xwindow', None) or \
_Cfunction('libvlc_media_player_get_xwindow', ((1,),), None,
ctypes.c_uint32, MediaPlayer)
return f(p_mi)
def libvlc_media_player_set_hwnd(p_mi, drawable):
'''Set a Win32/Win64 API window handle (HWND) where the media player should
render its video output. If LibVLC was built without Win32/Win64 API output
support, then this has no effects.
@param p_mi: the Media Player.
@param drawable: windows handle of the drawable.
'''
f = _Cfunctions.get('libvlc_media_player_set_hwnd', None) or \
_Cfunction('libvlc_media_player_set_hwnd', ((1,), (1,),), None,
None, MediaPlayer, ctypes.c_void_p)
return f(p_mi, drawable)
def libvlc_media_player_get_hwnd(p_mi):
'''Get the Windows API window handle (HWND) previously set with
L{libvlc_media_player_set_hwnd}(). The handle will be returned even if LibVLC
is not currently outputting any video to it.
@param p_mi: the Media Player.
@return: a window handle or None if there are none.
'''
f = _Cfunctions.get('libvlc_media_player_get_hwnd', None) or \
_Cfunction('libvlc_media_player_get_hwnd', ((1,),), None,
ctypes.c_void_p, MediaPlayer)
return f(p_mi)
def libvlc_media_player_set_android_context(p_mi, p_awindow_handler):
'''Set the android context.
@param p_mi: the media player.
@param p_awindow_handler: org.videolan.libvlc.IAWindowNativeHandler jobject implemented by the org.videolan.libvlc.MediaPlayer class from the libvlc-android project.
@version: LibVLC 3.0.0 and later.
'''
f = _Cfunctions.get('libvlc_media_player_set_android_context', None) or \
_Cfunction('libvlc_media_player_set_android_context', ((1,), (1,),), None,
None, MediaPlayer, ctypes.c_void_p)
return f(p_mi, p_awindow_handler)
def libvlc_media_player_set_evas_object(p_mi, p_evas_object):
'''Set the EFL Evas Object.
@param p_mi: the media player.
@param p_evas_object: a valid EFL Evas Object (Evas_Object).
@return: -1 if an error was detected, 0 otherwise.
@version: LibVLC 3.0.0 and later.
'''
f = _Cfunctions.get('libvlc_media_player_set_evas_object', None) or \
_Cfunction('libvlc_media_player_set_evas_object', ((1,), (1,),), None,
ctypes.c_int, MediaPlayer, ctypes.c_void_p)
return f(p_mi, p_evas_object)
def libvlc_audio_set_callbacks(mp, play, pause, resume, flush, drain, opaque):
'''Sets callbacks and private data for decoded audio.
Use L{libvlc_audio_set_format}() or L{libvlc_audio_set_format_callbacks}()
to configure the decoded audio format.
@note: The audio callbacks override any other audio output mechanism.
If the callbacks are set, LibVLC will B{not} output audio in any way.
@param mp: the media player.
@param play: callback to play audio samples (must not be None).
@param pause: callback to pause playback (or None to ignore).
@param resume: callback to resume playback (or None to ignore).
@param flush: callback to flush audio buffers (or None to ignore).
@param drain: callback to drain audio buffers (or None to ignore).
@param opaque: private pointer for the audio callbacks (as first parameter).
@version: LibVLC 2.0.0 or later.
'''
f = _Cfunctions.get('libvlc_audio_set_callbacks', None) or \
_Cfunction('libvlc_audio_set_callbacks', ((1,), (1,), (1,), (1,), (1,), (1,), (1,),), None,
None, MediaPlayer, AudioPlayCb, AudioPauseCb, AudioResumeCb, AudioFlushCb, AudioDrainCb, ctypes.c_void_p)
return f(mp, play, pause, resume, flush, drain, opaque)
def libvlc_audio_set_volume_callback(mp, set_volume):
'''Set callbacks and private data for decoded audio. This only works in
combination with L{libvlc_audio_set_callbacks}().
Use L{libvlc_audio_set_format}() or L{libvlc_audio_set_format_callbacks}()
to configure the decoded audio format.
@param mp: the media player.
@param set_volume: callback to apply audio volume, or None to apply volume in software.
@version: LibVLC 2.0.0 or later.
'''
f = _Cfunctions.get('libvlc_audio_set_volume_callback', None) or \
_Cfunction('libvlc_audio_set_volume_callback', ((1,), (1,),), None,
None, MediaPlayer, AudioSetVolumeCb)
return f(mp, set_volume)
def libvlc_audio_set_format_callbacks(mp, setup, cleanup):
'''Sets decoded audio format via callbacks.
This only works in combination with L{libvlc_audio_set_callbacks}().
@param mp: the media player.
@param setup: callback to select the audio format (cannot be None).
@param cleanup: callback to release any allocated resources (or None).
@version: LibVLC 2.0.0 or later.
'''
f = _Cfunctions.get('libvlc_audio_set_format_callbacks', None) or \
_Cfunction('libvlc_audio_set_format_callbacks', ((1,), (1,), (1,),), None,
None, MediaPlayer, AudioSetupCb, AudioCleanupCb)
return f(mp, setup, cleanup)
def libvlc_audio_set_format(mp, format, rate, channels):
'''Sets a fixed decoded audio format.
This only works in combination with L{libvlc_audio_set_callbacks}(),
and is mutually exclusive with L{libvlc_audio_set_format_callbacks}().
@param mp: the media player.
@param format: a four-characters string identifying the sample format (e.g. "S16N" or "FL32").
@param rate: sample rate (expressed in Hz).
@param channels: channels count.
@version: LibVLC 2.0.0 or later.
'''
f = _Cfunctions.get('libvlc_audio_set_format', None) or \
_Cfunction('libvlc_audio_set_format', ((1,), (1,), (1,), (1,),), None,
None, MediaPlayer, ctypes.c_char_p, ctypes.c_uint, ctypes.c_uint)
return f(mp, format, rate, channels)
def libvlc_media_player_get_length(p_mi):
'''Get the current movie length (in ms).
@param p_mi: the Media Player.
@return: the movie length (in ms), or -1 if there is no media.
'''
f = _Cfunctions.get('libvlc_media_player_get_length', None) or \
_Cfunction('libvlc_media_player_get_length', ((1,),), None,
ctypes.c_longlong, MediaPlayer)
return f(p_mi)
def libvlc_media_player_get_time(p_mi):
'''Get the current movie time (in ms).
@param p_mi: the Media Player.
@return: the movie time (in ms), or -1 if there is no media.
'''
f = _Cfunctions.get('libvlc_media_player_get_time', None) or \
_Cfunction('libvlc_media_player_get_time', ((1,),), None,
ctypes.c_longlong, MediaPlayer)
return f(p_mi)
def libvlc_media_player_set_time(p_mi, i_time):
'''Set the movie time (in ms). This has no effect if no media is being played.
Not all formats and protocols support this.
@param p_mi: the Media Player.
@param i_time: the movie time (in ms).
'''
f = _Cfunctions.get('libvlc_media_player_set_time', None) or \
_Cfunction('libvlc_media_player_set_time', ((1,), (1,),), None,
None, MediaPlayer, ctypes.c_longlong)
return f(p_mi, i_time)
def libvlc_media_player_get_position(p_mi):
'''Get movie position as percentage between 0.0 and 1.0.
@param p_mi: the Media Player.
@return: movie position, or -1. in case of error.
'''
f = _Cfunctions.get('libvlc_media_player_get_position', None) or \
_Cfunction('libvlc_media_player_get_position', ((1,),), None,
ctypes.c_float, MediaPlayer)
return f(p_mi)
def libvlc_media_player_set_position(p_mi, f_pos):
'''Set movie position as percentage between 0.0 and 1.0.
This has no effect if playback is not enabled.
This might not work depending on the underlying input format and protocol.
@param p_mi: the Media Player.
@param f_pos: the position.
'''
f = _Cfunctions.get('libvlc_media_player_set_position', None) or \
_Cfunction('libvlc_media_player_set_position', ((1,), (1,),), None,
None, MediaPlayer, ctypes.c_float)
return f(p_mi, f_pos)
def libvlc_media_player_set_chapter(p_mi, i_chapter):
'''Set movie chapter (if applicable).
@param p_mi: the Media Player.
@param i_chapter: chapter number to play.
'''
f = _Cfunctions.get('libvlc_media_player_set_chapter', None) or \
_Cfunction('libvlc_media_player_set_chapter', ((1,), (1,),), None,
None, MediaPlayer, ctypes.c_int)
return f(p_mi, i_chapter)
def libvlc_media_player_get_chapter(p_mi):
'''Get movie chapter.
@param p_mi: the Media Player.
@return: chapter number currently playing, or -1 if there is no media.
'''
f = _Cfunctions.get('libvlc_media_player_get_chapter', None) or \
_Cfunction('libvlc_media_player_get_chapter', ((1,),), None,
ctypes.c_int, MediaPlayer)
return f(p_mi)
def libvlc_media_player_get_chapter_count(p_mi):
'''Get movie chapter count.
@param p_mi: the Media Player.
@return: number of chapters in movie, or -1.
'''
f = _Cfunctions.get('libvlc_media_player_get_chapter_count', None) or \
_Cfunction('libvlc_media_player_get_chapter_count', ((1,),), None,
ctypes.c_int, MediaPlayer)
return f(p_mi)
def libvlc_media_player_will_play(p_mi):
'''Is the player able to play.
@param p_mi: the Media Player.
@return: boolean \libvlc_return_bool.
'''
f = _Cfunctions.get('libvlc_media_player_will_play', None) or \
_Cfunction('libvlc_media_player_will_play', ((1,),), None,
ctypes.c_int, MediaPlayer)
return f(p_mi)
def libvlc_media_player_get_chapter_count_for_title(p_mi, i_title):
'''Get title chapter count.
@param p_mi: the Media Player.
@param i_title: title.
@return: number of chapters in title, or -1.
'''
f = _Cfunctions.get('libvlc_media_player_get_chapter_count_for_title', None) or \
_Cfunction('libvlc_media_player_get_chapter_count_for_title', ((1,), (1,),), None,
ctypes.c_int, MediaPlayer, ctypes.c_int)
return f(p_mi, i_title)
def libvlc_media_player_set_title(p_mi, i_title):
'''Set movie title.
@param p_mi: the Media Player.
@param i_title: title number to play.
'''
f = _Cfunctions.get('libvlc_media_player_set_title', None) or \
_Cfunction('libvlc_media_player_set_title', ((1,), (1,),), None,
None, MediaPlayer, ctypes.c_int)
return f(p_mi, i_title)
def libvlc_media_player_get_title(p_mi):
'''Get movie title.
@param p_mi: the Media Player.
@return: title number currently playing, or -1.
'''
f = _Cfunctions.get('libvlc_media_player_get_title', None) or \
_Cfunction('libvlc_media_player_get_title', ((1,),), None,
ctypes.c_int, MediaPlayer)
return f(p_mi)
def libvlc_media_player_get_title_count(p_mi):
'''Get movie title count.
@param p_mi: the Media Player.
@return: title number count, or -1.
'''
f = _Cfunctions.get('libvlc_media_player_get_title_count', None) or \
_Cfunction('libvlc_media_player_get_title_count', ((1,),), None,
ctypes.c_int, MediaPlayer)
return f(p_mi)
def libvlc_media_player_previous_chapter(p_mi):
'''Set previous chapter (if applicable).
@param p_mi: the Media Player.
'''
f = _Cfunctions.get('libvlc_media_player_previous_chapter', None) or \
_Cfunction('libvlc_media_player_previous_chapter', ((1,),), None,
None, MediaPlayer)
return f(p_mi)
def libvlc_media_player_next_chapter(p_mi):
'''Set next chapter (if applicable).
@param p_mi: the Media Player.
'''
f = _Cfunctions.get('libvlc_media_player_next_chapter', None) or \
_Cfunction('libvlc_media_player_next_chapter', ((1,),), None,
None, MediaPlayer)
return f(p_mi)
def libvlc_media_player_get_rate(p_mi):
'''Get the requested movie play rate.
@warning: Depending on the underlying media, the requested rate may be
different from the real playback rate.
@param p_mi: the Media Player.
@return: movie play rate.
'''
f = _Cfunctions.get('libvlc_media_player_get_rate', None) or \
_Cfunction('libvlc_media_player_get_rate', ((1,),), None,
ctypes.c_float, MediaPlayer)
return f(p_mi)
def libvlc_media_player_set_rate(p_mi, rate):
'''Set movie play rate.
@param p_mi: the Media Player.
@param rate: movie play rate to set.
@return: -1 if an error was detected, 0 otherwise (but even then, it might not actually work depending on the underlying media protocol).
'''
f = _Cfunctions.get('libvlc_media_player_set_rate', None) or \
_Cfunction('libvlc_media_player_set_rate', ((1,), (1,),), None,
ctypes.c_int, MediaPlayer, ctypes.c_float)
return f(p_mi, rate)
def libvlc_media_player_get_state(p_mi):
'''Get current movie state.
@param p_mi: the Media Player.
@return: the current state of the media player (playing, paused, ...) See libvlc_state_t.
'''
f = _Cfunctions.get('libvlc_media_player_get_state', None) or \
_Cfunction('libvlc_media_player_get_state', ((1,),), None,
State, MediaPlayer)
return f(p_mi)
def libvlc_media_player_has_vout(p_mi):
'''How many video outputs does this media player have?
@param p_mi: the media player.
@return: the number of video outputs.
'''
f = _Cfunctions.get('libvlc_media_player_has_vout', None) or \
_Cfunction('libvlc_media_player_has_vout', ((1,),), None,
ctypes.c_uint, MediaPlayer)
return f(p_mi)
def libvlc_media_player_is_seekable(p_mi):
'''Is this media player seekable?
@param p_mi: the media player.
@return: true if the media player can seek \libvlc_return_bool.
'''
f = _Cfunctions.get('libvlc_media_player_is_seekable', None) or \
_Cfunction('libvlc_media_player_is_seekable', ((1,),), None,
ctypes.c_int, MediaPlayer)
return f(p_mi)
def libvlc_media_player_can_pause(p_mi):
'''Can this media player be paused?
@param p_mi: the media player.
@return: true if the media player can pause \libvlc_return_bool.
'''
f = _Cfunctions.get('libvlc_media_player_can_pause', None) or \
_Cfunction('libvlc_media_player_can_pause', ((1,),), None,
ctypes.c_int, MediaPlayer)
return f(p_mi)
def libvlc_media_player_program_scrambled(p_mi):
'''Check if the current program is scrambled.
@param p_mi: the media player.
@return: true if the current program is scrambled \libvlc_return_bool.
@version: LibVLC 2.2.0 or later.
'''
f = _Cfunctions.get('libvlc_media_player_program_scrambled', None) or \
_Cfunction('libvlc_media_player_program_scrambled', ((1,),), None,
ctypes.c_int, MediaPlayer)
return f(p_mi)
def libvlc_media_player_next_frame(p_mi):
'''Display the next frame (if supported).
@param p_mi: the media player.
'''
f = _Cfunctions.get('libvlc_media_player_next_frame', None) or \
_Cfunction('libvlc_media_player_next_frame', ((1,),), None,
None, MediaPlayer)
return f(p_mi)
def libvlc_media_player_navigate(p_mi, navigate):
'''Navigate through DVD Menu.
@param p_mi: the Media Player.
@param navigate: the Navigation mode.
@version: libVLC 2.0.0 or later.
'''
f = _Cfunctions.get('libvlc_media_player_navigate', None) or \
_Cfunction('libvlc_media_player_navigate', ((1,), (1,),), None,
None, MediaPlayer, ctypes.c_uint)
return f(p_mi, navigate)
def libvlc_media_player_set_video_title_display(p_mi, position, timeout):
'''Set if, and how, the video title will be shown when media is played.
@param p_mi: the media player.
@param position: position at which to display the title, or libvlc_position_disable to prevent the title from being displayed.
@param timeout: title display timeout in milliseconds (ignored if libvlc_position_disable).
@version: libVLC 2.1.0 or later.
'''
f = _Cfunctions.get('libvlc_media_player_set_video_title_display', None) or \
_Cfunction('libvlc_media_player_set_video_title_display', ((1,), (1,), (1,),), None,
None, MediaPlayer, Position, ctypes.c_int)
return f(p_mi, position, timeout)
def libvlc_media_player_add_slave(p_mi, i_type, psz_uri, b_select):
'''Add a slave to the current media player.
@note: If the player is playing, the slave will be added directly. This call
will also update the slave list of the attached L{Media}.
@param p_mi: the media player.
@param i_type: subtitle or audio.
@param psz_uri: Uri of the slave (should contain a valid scheme).
@param b_select: True if this slave should be selected when it's loaded.
@return: 0 on success, -1 on error.
@version: LibVLC 3.0.0 and later. See L{libvlc_media_slaves_add}.
'''
f = _Cfunctions.get('libvlc_media_player_add_slave', None) or \
_Cfunction('libvlc_media_player_add_slave', ((1,), (1,), (1,), (1,),), None,
ctypes.c_int, MediaPlayer, MediaSlaveType, ctypes.c_char_p, ctypes.c_bool)
return f(p_mi, i_type, psz_uri, b_select)
def libvlc_track_description_list_release(p_track_description):
'''Release (free) L{TrackDescription}.
@param p_track_description: the structure to release.
'''
f = _Cfunctions.get('libvlc_track_description_list_release', None) or \
_Cfunction('libvlc_track_description_list_release', ((1,),), None,
None, ctypes.POINTER(TrackDescription))
return f(p_track_description)
def libvlc_toggle_fullscreen(p_mi):
'''Toggle fullscreen status on non-embedded video outputs.
@warning: The same limitations applies to this function
as to L{libvlc_set_fullscreen}().
@param p_mi: the media player.
'''
f = _Cfunctions.get('libvlc_toggle_fullscreen', None) or \
_Cfunction('libvlc_toggle_fullscreen', ((1,),), None,
None, MediaPlayer)
return f(p_mi)
def libvlc_set_fullscreen(p_mi, b_fullscreen):
'''Enable or disable fullscreen.
@warning: With most window managers, only a top-level windows can be in
full-screen mode. Hence, this function will not operate properly if
L{libvlc_media_player_set_xwindow}() was used to embed the video in a
non-top-level window. In that case, the embedding window must be reparented
to the root window B{before} fullscreen mode is enabled. You will want
to reparent it back to its normal parent when disabling fullscreen.
@param p_mi: the media player.
@param b_fullscreen: boolean for fullscreen status.
'''
f = _Cfunctions.get('libvlc_set_fullscreen', None) or \
_Cfunction('libvlc_set_fullscreen', ((1,), (1,),), None,
None, MediaPlayer, ctypes.c_int)
return f(p_mi, b_fullscreen)
def libvlc_get_fullscreen(p_mi):
'''Get current fullscreen status.
@param p_mi: the media player.
@return: the fullscreen status (boolean) \libvlc_return_bool.
'''
f = _Cfunctions.get('libvlc_get_fullscreen', None) or \
_Cfunction('libvlc_get_fullscreen', ((1,),), None,
ctypes.c_int, MediaPlayer)
return f(p_mi)
def libvlc_video_set_key_input(p_mi, on):
'''Enable or disable key press events handling, according to the LibVLC hotkeys
configuration. By default and for historical reasons, keyboard events are
handled by the LibVLC video widget.
@note: On X11, there can be only one subscriber for key press and mouse
click events per window. If your application has subscribed to those events
for the X window ID of the video widget, then LibVLC will not be able to
handle key presses and mouse clicks in any case.
@warning: This function is only implemented for X11 and Win32 at the moment.
@param p_mi: the media player.
@param on: true to handle key press events, false to ignore them.
'''
f = _Cfunctions.get('libvlc_video_set_key_input', None) or \
_Cfunction('libvlc_video_set_key_input', ((1,), (1,),), None,
None, MediaPlayer, ctypes.c_uint)
return f(p_mi, on)
def libvlc_video_set_mouse_input(p_mi, on):
'''Enable or disable mouse click events handling. By default, those events are
handled. This is needed for DVD menus to work, as well as a few video
filters such as "puzzle".
See L{libvlc_video_set_key_input}().
@warning: This function is only implemented for X11 and Win32 at the moment.
@param p_mi: the media player.
@param on: true to handle mouse click events, false to ignore them.
'''
f = _Cfunctions.get('libvlc_video_set_mouse_input', None) or \
_Cfunction('libvlc_video_set_mouse_input', ((1,), (1,),), None,
None, MediaPlayer, ctypes.c_uint)
return f(p_mi, on)
def libvlc_video_get_size(p_mi, num):
'''Get the pixel dimensions of a video.
@param p_mi: media player.
@param num: number of the video (starting from, and most commonly 0).
@return: px pixel width, py pixel height.
'''
f = _Cfunctions.get('libvlc_video_get_size', None) or \
_Cfunction('libvlc_video_get_size', ((1,), (1,), (2,), (2,),), None,
ctypes.c_int, MediaPlayer, ctypes.c_uint, ctypes.POINTER(ctypes.c_uint), ctypes.POINTER(ctypes.c_uint))
return f(p_mi, num)
def libvlc_video_get_cursor(p_mi, num):
'''Get the mouse pointer coordinates over a video.
Coordinates are expressed in terms of the decoded video resolution,
B{not} in terms of pixels on the screen/viewport (to get the latter,
you can query your windowing system directly).
Either of the coordinates may be negative or larger than the corresponding
dimension of the video, if the cursor is outside the rendering area.
@warning: The coordinates may be out-of-date if the pointer is not located
on the video rendering area. LibVLC does not track the pointer if it is
outside of the video widget.
@note: LibVLC does not support multiple pointers (it does of course support
multiple input devices sharing the same pointer) at the moment.
@param p_mi: media player.
@param num: number of the video (starting from, and most commonly 0).
@return: px abscissa, py ordinate.
'''
f = _Cfunctions.get('libvlc_video_get_cursor', None) or \
_Cfunction('libvlc_video_get_cursor', ((1,), (1,), (2,), (2,),), None,
ctypes.c_int, MediaPlayer, ctypes.c_uint, ctypes.POINTER(ctypes.c_int), ctypes.POINTER(ctypes.c_int))
return f(p_mi, num)
def libvlc_video_get_scale(p_mi):
'''Get the current video scaling factor.
See also L{libvlc_video_set_scale}().
@param p_mi: the media player.
@return: the currently configured zoom factor, or 0. if the video is set to fit to the output window/drawable automatically.
'''
f = _Cfunctions.get('libvlc_video_get_scale', None) or \
_Cfunction('libvlc_video_get_scale', ((1,),), None,
ctypes.c_float, MediaPlayer)
return f(p_mi)
def libvlc_video_set_scale(p_mi, f_factor):
'''Set the video scaling factor. That is the ratio of the number of pixels on
screen to the number of pixels in the original decoded video in each
dimension. Zero is a special value; it will adjust the video to the output
window/drawable (in windowed mode) or the entire screen.
Note that not all video outputs support scaling.
@param p_mi: the media player.
@param f_factor: the scaling factor, or zero.
'''
f = _Cfunctions.get('libvlc_video_set_scale', None) or \
_Cfunction('libvlc_video_set_scale', ((1,), (1,),), None,
None, MediaPlayer, ctypes.c_float)
return f(p_mi, f_factor)
def libvlc_video_get_aspect_ratio(p_mi):
'''Get current video aspect ratio.
@param p_mi: the media player.
@return: the video aspect ratio or None if unspecified (the result must be released with free() or L{libvlc_free}()).
'''
f = _Cfunctions.get('libvlc_video_get_aspect_ratio', None) or \
_Cfunction('libvlc_video_get_aspect_ratio', ((1,),), string_result,
ctypes.c_void_p, MediaPlayer)
return f(p_mi)
def libvlc_video_set_aspect_ratio(p_mi, psz_aspect):
'''Set new video aspect ratio.
@param p_mi: the media player.
@param psz_aspect: new video aspect-ratio or None to reset to default @note Invalid aspect ratios are ignored.
'''
f = _Cfunctions.get('libvlc_video_set_aspect_ratio', None) or \
_Cfunction('libvlc_video_set_aspect_ratio', ((1,), (1,),), None,
None, MediaPlayer, ctypes.c_char_p)
return f(p_mi, psz_aspect)
def libvlc_video_get_spu(p_mi):
'''Get current video subtitle.
@param p_mi: the media player.
@return: the video subtitle selected, or -1 if none.
'''
f = _Cfunctions.get('libvlc_video_get_spu', None) or \
_Cfunction('libvlc_video_get_spu', ((1,),), None,
ctypes.c_int, MediaPlayer)
return f(p_mi)
def libvlc_video_get_spu_count(p_mi):
'''Get the number of available video subtitles.
@param p_mi: the media player.
@return: the number of available video subtitles.
'''
f = _Cfunctions.get('libvlc_video_get_spu_count', None) or \
_Cfunction('libvlc_video_get_spu_count', ((1,),), None,
ctypes.c_int, MediaPlayer)
return f(p_mi)
def libvlc_video_get_spu_description(p_mi):
'''Get the description of available video subtitles.
@param p_mi: the media player.
@return: list containing description of available video subtitles. It must be freed with L{libvlc_track_description_list_release}().
'''
f = _Cfunctions.get('libvlc_video_get_spu_description', None) or \
_Cfunction('libvlc_video_get_spu_description', ((1,),), None,
ctypes.POINTER(TrackDescription), MediaPlayer)
return f(p_mi)
def libvlc_video_set_spu(p_mi, i_spu):
'''Set new video subtitle.
@param p_mi: the media player.
@param i_spu: video subtitle track to select (i_id from track description).
@return: 0 on success, -1 if out of range.
'''
f = _Cfunctions.get('libvlc_video_set_spu', None) or \
_Cfunction('libvlc_video_set_spu', ((1,), (1,),), None,
ctypes.c_int, MediaPlayer, ctypes.c_int)
return f(p_mi, i_spu)
def libvlc_video_get_spu_delay(p_mi):
'''Get the current subtitle delay. Positive values means subtitles are being
displayed later, negative values earlier.
@param p_mi: media player.
@return: time (in microseconds) the display of subtitles is being delayed.
@version: LibVLC 2.0.0 or later.
'''
f = _Cfunctions.get('libvlc_video_get_spu_delay', None) or \
_Cfunction('libvlc_video_get_spu_delay', ((1,),), None,
ctypes.c_int64, MediaPlayer)
return f(p_mi)
def libvlc_video_set_spu_delay(p_mi, i_delay):
'''Set the subtitle delay. This affects the timing of when the subtitle will
be displayed. Positive values result in subtitles being displayed later,
while negative values will result in subtitles being displayed earlier.
The subtitle delay will be reset to zero each time the media changes.
@param p_mi: media player.
@param i_delay: time (in microseconds) the display of subtitles should be delayed.
@return: 0 on success, -1 on error.
@version: LibVLC 2.0.0 or later.
'''
f = _Cfunctions.get('libvlc_video_set_spu_delay', None) or \
_Cfunction('libvlc_video_set_spu_delay', ((1,), (1,),), None,
ctypes.c_int, MediaPlayer, ctypes.c_int64)
return f(p_mi, i_delay)
def libvlc_media_player_get_full_title_descriptions(p_mi, titles):
'''Get the full description of available titles.
@param p_mi: the media player.
@param titles: address to store an allocated array of title descriptions descriptions (must be freed with L{libvlc_title_descriptions_release}() by the caller) [OUT].
@return: the number of titles (-1 on error).
@version: LibVLC 3.0.0 and later.
'''
f = _Cfunctions.get('libvlc_media_player_get_full_title_descriptions', None) or \
_Cfunction('libvlc_media_player_get_full_title_descriptions', ((1,), (1,),), None,
ctypes.c_int, MediaPlayer, ctypes.POINTER(ctypes.POINTER(TitleDescription)))
return f(p_mi, titles)
def libvlc_title_descriptions_release(p_titles, i_count):
'''Release a title description.
@param p_titles: title description array to release.
@param i_count: number of title descriptions to release.
@version: LibVLC 3.0.0 and later.
'''
f = _Cfunctions.get('libvlc_title_descriptions_release', None) or \
_Cfunction('libvlc_title_descriptions_release', ((1,), (1,),), None,
None, ctypes.POINTER(TitleDescription), ctypes.c_uint)
return f(p_titles, i_count)
def libvlc_media_player_get_full_chapter_descriptions(p_mi, i_chapters_of_title, pp_chapters):
'''Get the full description of available chapters.
@param p_mi: the media player.
@param i_chapters_of_title: index of the title to query for chapters (uses current title if set to -1).
@param pp_chapters: address to store an allocated array of chapter descriptions descriptions (must be freed with L{libvlc_chapter_descriptions_release}() by the caller) [OUT].
@return: the number of chapters (-1 on error).
@version: LibVLC 3.0.0 and later.
'''
f = _Cfunctions.get('libvlc_media_player_get_full_chapter_descriptions', None) or \
_Cfunction('libvlc_media_player_get_full_chapter_descriptions', ((1,), (1,), (1,),), None,
ctypes.c_int, MediaPlayer, ctypes.c_int, ctypes.POINTER(ctypes.POINTER(ChapterDescription)))
return f(p_mi, i_chapters_of_title, pp_chapters)
def libvlc_chapter_descriptions_release(p_chapters, i_count):
'''Release a chapter description.
@param p_chapters: chapter description array to release.
@param i_count: number of chapter descriptions to release.
@version: LibVLC 3.0.0 and later.
'''
f = _Cfunctions.get('libvlc_chapter_descriptions_release', None) or \
_Cfunction('libvlc_chapter_descriptions_release', ((1,), (1,),), None,
None, ctypes.POINTER(ChapterDescription), ctypes.c_uint)
return f(p_chapters, i_count)
def libvlc_video_get_crop_geometry(p_mi):
'''Get current crop filter geometry.
@param p_mi: the media player.
@return: the crop filter geometry or None if unset.
'''
f = _Cfunctions.get('libvlc_video_get_crop_geometry', None) or \
_Cfunction('libvlc_video_get_crop_geometry', ((1,),), string_result,
ctypes.c_void_p, MediaPlayer)
return f(p_mi)
def libvlc_video_set_crop_geometry(p_mi, psz_geometry):
'''Set new crop filter geometry.
@param p_mi: the media player.
@param psz_geometry: new crop filter geometry (None to unset).
'''
f = _Cfunctions.get('libvlc_video_set_crop_geometry', None) or \
_Cfunction('libvlc_video_set_crop_geometry', ((1,), (1,),), None,
None, MediaPlayer, ctypes.c_char_p)
return f(p_mi, psz_geometry)
def libvlc_video_get_teletext(p_mi):
'''Get current teletext page requested.
@param p_mi: the media player.
@return: the current teletext page requested.
'''
f = _Cfunctions.get('libvlc_video_get_teletext', None) or \
_Cfunction('libvlc_video_get_teletext', ((1,),), None,
ctypes.c_int, MediaPlayer)
return f(p_mi)
def libvlc_video_set_teletext(p_mi, i_page):
'''Set new teletext page to retrieve.
@param p_mi: the media player.
@param i_page: teletex page number requested.
'''
f = _Cfunctions.get('libvlc_video_set_teletext', None) or \
_Cfunction('libvlc_video_set_teletext', ((1,), (1,),), None,
None, MediaPlayer, ctypes.c_int)
return f(p_mi, i_page)
def libvlc_toggle_teletext(p_mi):
'''Toggle teletext transparent status on video output.
@param p_mi: the media player.
'''
f = _Cfunctions.get('libvlc_toggle_teletext', None) or \
_Cfunction('libvlc_toggle_teletext', ((1,),), None,
None, MediaPlayer)
return f(p_mi)
def libvlc_video_get_track_count(p_mi):
'''Get number of available video tracks.
@param p_mi: media player.
@return: the number of available video tracks (int).
'''
f = _Cfunctions.get('libvlc_video_get_track_count', None) or \
_Cfunction('libvlc_video_get_track_count', ((1,),), None,
ctypes.c_int, MediaPlayer)
return f(p_mi)
def libvlc_video_get_track_description(p_mi):
'''Get the description of available video tracks.
@param p_mi: media player.
@return: list with description of available video tracks, or None on error. It must be freed with L{libvlc_track_description_list_release}().
'''
f = _Cfunctions.get('libvlc_video_get_track_description', None) or \
_Cfunction('libvlc_video_get_track_description', ((1,),), None,
ctypes.POINTER(TrackDescription), MediaPlayer)
return f(p_mi)
def libvlc_video_get_track(p_mi):
'''Get current video track.
@param p_mi: media player.
@return: the video track ID (int) or -1 if no active input.
'''
f = _Cfunctions.get('libvlc_video_get_track', None) or \
_Cfunction('libvlc_video_get_track', ((1,),), None,
ctypes.c_int, MediaPlayer)
return f(p_mi)
def libvlc_video_set_track(p_mi, i_track):
'''Set video track.
@param p_mi: media player.
@param i_track: the track ID (i_id field from track description).
@return: 0 on success, -1 if out of range.
'''
f = _Cfunctions.get('libvlc_video_set_track', None) or \
_Cfunction('libvlc_video_set_track', ((1,), (1,),), None,
ctypes.c_int, MediaPlayer, ctypes.c_int)
return f(p_mi, i_track)
def libvlc_video_take_snapshot(p_mi, num, psz_filepath, i_width, i_height):
'''Take a snapshot of the current video window.
If i_width AND i_height is 0, original size is used.
If i_width XOR i_height is 0, original aspect-ratio is preserved.
@param p_mi: media player instance.
@param num: number of video output (typically 0 for the first/only one).
@param psz_filepath: the path where to save the screenshot to.
@param i_width: the snapshot's width.
@param i_height: the snapshot's height.
@return: 0 on success, -1 if the video was not found.
'''
f = _Cfunctions.get('libvlc_video_take_snapshot', None) or \
_Cfunction('libvlc_video_take_snapshot', ((1,), (1,), (1,), (1,), (1,),), None,
ctypes.c_int, MediaPlayer, ctypes.c_uint, ctypes.c_char_p, ctypes.c_int, ctypes.c_int)
return f(p_mi, num, psz_filepath, i_width, i_height)
def libvlc_video_set_deinterlace(p_mi, psz_mode):
'''Enable or disable deinterlace filter.
@param p_mi: libvlc media player.
@param psz_mode: type of deinterlace filter, None to disable.
'''
f = _Cfunctions.get('libvlc_video_set_deinterlace', None) or \
_Cfunction('libvlc_video_set_deinterlace', ((1,), (1,),), None,
None, MediaPlayer, ctypes.c_char_p)
return f(p_mi, psz_mode)
def libvlc_video_get_marquee_int(p_mi, option):
'''Get an integer marquee option value.
@param p_mi: libvlc media player.
@param option: marq option to get See libvlc_video_marquee_int_option_t.
'''
f = _Cfunctions.get('libvlc_video_get_marquee_int', None) or \
_Cfunction('libvlc_video_get_marquee_int', ((1,), (1,),), None,
ctypes.c_int, MediaPlayer, ctypes.c_uint)
return f(p_mi, option)
def libvlc_video_get_marquee_string(p_mi, option):
'''Get a string marquee option value.
@param p_mi: libvlc media player.
@param option: marq option to get See libvlc_video_marquee_string_option_t.
'''
f = _Cfunctions.get('libvlc_video_get_marquee_string', None) or \
_Cfunction('libvlc_video_get_marquee_string', ((1,), (1,),), string_result,
ctypes.c_void_p, MediaPlayer, ctypes.c_uint)
return f(p_mi, option)
def libvlc_video_set_marquee_int(p_mi, option, i_val):
'''Enable, disable or set an integer marquee option
Setting libvlc_marquee_Enable has the side effect of enabling (arg !0)
or disabling (arg 0) the marq filter.
@param p_mi: libvlc media player.
@param option: marq option to set See libvlc_video_marquee_int_option_t.
@param i_val: marq option value.
'''
f = _Cfunctions.get('libvlc_video_set_marquee_int', None) or \
_Cfunction('libvlc_video_set_marquee_int', ((1,), (1,), (1,),), None,
None, MediaPlayer, ctypes.c_uint, ctypes.c_int)
return f(p_mi, option, i_val)
def libvlc_video_set_marquee_string(p_mi, option, psz_text):
'''Set a marquee string option.
@param p_mi: libvlc media player.
@param option: marq option to set See libvlc_video_marquee_string_option_t.
@param psz_text: marq option value.
'''
f = _Cfunctions.get('libvlc_video_set_marquee_string', None) or \
_Cfunction('libvlc_video_set_marquee_string', ((1,), (1,), (1,),), None,
None, MediaPlayer, ctypes.c_uint, ctypes.c_char_p)
return f(p_mi, option, psz_text)
def libvlc_video_get_logo_int(p_mi, option):
'''Get integer logo option.
@param p_mi: libvlc media player instance.
@param option: logo option to get, values of libvlc_video_logo_option_t.
'''
f = _Cfunctions.get('libvlc_video_get_logo_int', None) or \
_Cfunction('libvlc_video_get_logo_int', ((1,), (1,),), None,
ctypes.c_int, MediaPlayer, ctypes.c_uint)
return f(p_mi, option)
def libvlc_video_set_logo_int(p_mi, option, value):
'''Set logo option as integer. Options that take a different type value
are ignored.
Passing libvlc_logo_enable as option value has the side effect of
starting (arg !0) or stopping (arg 0) the logo filter.
@param p_mi: libvlc media player instance.
@param option: logo option to set, values of libvlc_video_logo_option_t.
@param value: logo option value.
'''
f = _Cfunctions.get('libvlc_video_set_logo_int', None) or \
_Cfunction('libvlc_video_set_logo_int', ((1,), (1,), (1,),), None,
None, MediaPlayer, ctypes.c_uint, ctypes.c_int)
return f(p_mi, option, value)
def libvlc_video_set_logo_string(p_mi, option, psz_value):
'''Set logo option as string. Options that take a different type value
are ignored.
@param p_mi: libvlc media player instance.
@param option: logo option to set, values of libvlc_video_logo_option_t.
@param psz_value: logo option value.
'''
f = _Cfunctions.get('libvlc_video_set_logo_string', None) or \
_Cfunction('libvlc_video_set_logo_string', ((1,), (1,), (1,),), None,
None, MediaPlayer, ctypes.c_uint, ctypes.c_char_p)
return f(p_mi, option, psz_value)
def libvlc_video_get_adjust_int(p_mi, option):
'''Get integer adjust option.
@param p_mi: libvlc media player instance.
@param option: adjust option to get, values of libvlc_video_adjust_option_t.
@version: LibVLC 1.1.1 and later.
'''
f = _Cfunctions.get('libvlc_video_get_adjust_int', None) or \
_Cfunction('libvlc_video_get_adjust_int', ((1,), (1,),), None,
ctypes.c_int, MediaPlayer, ctypes.c_uint)
return f(p_mi, option)
def libvlc_video_set_adjust_int(p_mi, option, value):
'''Set adjust option as integer. Options that take a different type value
are ignored.
Passing libvlc_adjust_enable as option value has the side effect of
starting (arg !0) or stopping (arg 0) the adjust filter.
@param p_mi: libvlc media player instance.
@param option: adust option to set, values of libvlc_video_adjust_option_t.
@param value: adjust option value.
@version: LibVLC 1.1.1 and later.
'''
f = _Cfunctions.get('libvlc_video_set_adjust_int', None) or \
_Cfunction('libvlc_video_set_adjust_int', ((1,), (1,), (1,),), None,
None, MediaPlayer, ctypes.c_uint, ctypes.c_int)
return f(p_mi, option, value)
def libvlc_video_get_adjust_float(p_mi, option):
'''Get float adjust option.
@param p_mi: libvlc media player instance.
@param option: adjust option to get, values of libvlc_video_adjust_option_t.
@version: LibVLC 1.1.1 and later.
'''
f = _Cfunctions.get('libvlc_video_get_adjust_float', None) or \
_Cfunction('libvlc_video_get_adjust_float', ((1,), (1,),), None,
ctypes.c_float, MediaPlayer, ctypes.c_uint)
return f(p_mi, option)
def libvlc_video_set_adjust_float(p_mi, option, value):
'''Set adjust option as float. Options that take a different type value
are ignored.
@param p_mi: libvlc media player instance.
@param option: adust option to set, values of libvlc_video_adjust_option_t.
@param value: adjust option value.
@version: LibVLC 1.1.1 and later.
'''
f = _Cfunctions.get('libvlc_video_set_adjust_float', None) or \
_Cfunction('libvlc_video_set_adjust_float', ((1,), (1,), (1,),), None,
None, MediaPlayer, ctypes.c_uint, ctypes.c_float)
return f(p_mi, option, value)
def libvlc_audio_output_list_get(p_instance):
'''Gets the list of available audio output modules.
@param p_instance: libvlc instance.
@return: list of available audio outputs. It must be freed with In case of error, None is returned.
'''
f = _Cfunctions.get('libvlc_audio_output_list_get', None) or \
_Cfunction('libvlc_audio_output_list_get', ((1,),), None,
ctypes.POINTER(AudioOutput), Instance)
return f(p_instance)
def libvlc_audio_output_list_release(p_list):
'''Frees the list of available audio output modules.
@param p_list: list with audio outputs for release.
'''
f = _Cfunctions.get('libvlc_audio_output_list_release', None) or \
_Cfunction('libvlc_audio_output_list_release', ((1,),), None,
None, ctypes.POINTER(AudioOutput))
return f(p_list)
def libvlc_audio_output_set(p_mi, psz_name):
'''Selects an audio output module.
@note: Any change will take be effect only after playback is stopped and
restarted. Audio output cannot be changed while playing.
@param p_mi: media player.
@param psz_name: name of audio output, use psz_name of See L{AudioOutput}.
@return: 0 if function succeeded, -1 on error.
'''
f = _Cfunctions.get('libvlc_audio_output_set', None) or \
_Cfunction('libvlc_audio_output_set', ((1,), (1,),), None,
ctypes.c_int, MediaPlayer, ctypes.c_char_p)
return f(p_mi, psz_name)
def libvlc_audio_output_device_enum(mp):
'''Gets a list of potential audio output devices,
See L{libvlc_audio_output_device_set}().
@note: Not all audio outputs support enumerating devices.
The audio output may be functional even if the list is empty (None).
@note: The list may not be exhaustive.
@warning: Some audio output devices in the list might not actually work in
some circumstances. By default, it is recommended to not specify any
explicit audio device.
@param mp: media player.
@return: A None-terminated linked list of potential audio output devices. It must be freed with L{libvlc_audio_output_device_list_release}().
@version: LibVLC 2.2.0 or later.
'''
f = _Cfunctions.get('libvlc_audio_output_device_enum', None) or \
_Cfunction('libvlc_audio_output_device_enum', ((1,),), None,
ctypes.POINTER(AudioOutputDevice), MediaPlayer)
return f(mp)
def libvlc_audio_output_device_list_get(p_instance, aout):
'''Gets a list of audio output devices for a given audio output module,
See L{libvlc_audio_output_device_set}().
@note: Not all audio outputs support this. In particular, an empty (None)
list of devices does B{not} imply that the specified audio output does
not work.
@note: The list might not be exhaustive.
@warning: Some audio output devices in the list might not actually work in
some circumstances. By default, it is recommended to not specify any
explicit audio device.
@param p_instance: libvlc instance.
@param aout: audio output name (as returned by L{libvlc_audio_output_list_get}()).
@return: A None-terminated linked list of potential audio output devices. It must be freed with L{libvlc_audio_output_device_list_release}().
@version: LibVLC 2.1.0 or later.
'''
f = _Cfunctions.get('libvlc_audio_output_device_list_get', None) or \
_Cfunction('libvlc_audio_output_device_list_get', ((1,), (1,),), None,
ctypes.POINTER(AudioOutputDevice), Instance, ctypes.c_char_p)
return f(p_instance, aout)
def libvlc_audio_output_device_list_release(p_list):
'''Frees a list of available audio output devices.
@param p_list: list with audio outputs for release.
@version: LibVLC 2.1.0 or later.
'''
f = _Cfunctions.get('libvlc_audio_output_device_list_release', None) or \
_Cfunction('libvlc_audio_output_device_list_release', ((1,),), None,
None, ctypes.POINTER(AudioOutputDevice))
return f(p_list)
def libvlc_audio_output_device_set(mp, module, device_id):
'''Configures an explicit audio output device.
If the module paramater is None, audio output will be moved to the device
specified by the device identifier string immediately. This is the
recommended usage.
A list of adequate potential device strings can be obtained with
L{libvlc_audio_output_device_enum}().
However passing None is supported in LibVLC version 2.2.0 and later only;
in earlier versions, this function would have no effects when the module
parameter was None.
If the module parameter is not None, the device parameter of the
corresponding audio output, if it exists, will be set to the specified
string. Note that some audio output modules do not have such a parameter
(notably MMDevice and PulseAudio).
A list of adequate potential device strings can be obtained with
L{libvlc_audio_output_device_list_get}().
@note: This function does not select the specified audio output plugin.
L{libvlc_audio_output_set}() is used for that purpose.
@warning: The syntax for the device parameter depends on the audio output.
Some audio output modules require further parameters (e.g. a channels map
in the case of ALSA).
@param mp: media player.
@param module: If None, current audio output module. if non-None, name of audio output module.
@param device_id: device identifier string.
@return: Nothing. Errors are ignored (this is a design bug).
'''
f = _Cfunctions.get('libvlc_audio_output_device_set', None) or \
_Cfunction('libvlc_audio_output_device_set', ((1,), (1,), (1,),), None,
None, MediaPlayer, ctypes.c_char_p, ctypes.c_char_p)
return f(mp, module, device_id)
def libvlc_audio_output_device_get(mp):
'''Get the current audio output device identifier.
This complements L{libvlc_audio_output_device_set}().
@warning: The initial value for the current audio output device identifier
may not be set or may be some unknown value. A LibVLC application should
compare this value against the known device identifiers (e.g. those that
were previously retrieved by a call to L{libvlc_audio_output_device_enum} or
L{libvlc_audio_output_device_list_get}) to find the current audio output device.
It is possible that the selected audio output device changes (an external
change) without a call to L{libvlc_audio_output_device_set}. That may make this
method unsuitable to use if a LibVLC application is attempting to track
dynamic audio device changes as they happen.
@param mp: media player.
@return: the current audio output device identifier None if no device is selected or in case of error (the result must be released with free() or L{libvlc_free}()).
@version: LibVLC 3.0.0 or later.
'''
f = _Cfunctions.get('libvlc_audio_output_device_get', None) or \
_Cfunction('libvlc_audio_output_device_get', ((1,),), None,
ctypes.c_char_p, MediaPlayer)
return f(mp)
def libvlc_audio_toggle_mute(p_mi):
'''Toggle mute status.
@param p_mi: media player @warning Toggling mute atomically is not always possible: On some platforms, other processes can mute the VLC audio playback stream asynchronously. Thus, there is a small race condition where toggling will not work. See also the limitations of L{libvlc_audio_set_mute}().
'''
f = _Cfunctions.get('libvlc_audio_toggle_mute', None) or \
_Cfunction('libvlc_audio_toggle_mute', ((1,),), None,
None, MediaPlayer)
return f(p_mi)
def libvlc_audio_get_mute(p_mi):
'''Get current mute status.
@param p_mi: media player.
@return: the mute status (boolean) if defined, -1 if undefined/unapplicable.
'''
f = _Cfunctions.get('libvlc_audio_get_mute', None) or \
_Cfunction('libvlc_audio_get_mute', ((1,),), None,
ctypes.c_int, MediaPlayer)
return f(p_mi)
def libvlc_audio_set_mute(p_mi, status):
'''Set mute status.
@param p_mi: media player.
@param status: If status is true then mute, otherwise unmute @warning This function does not always work. If there are no active audio playback stream, the mute status might not be available. If digital pass-through (S/PDIF, HDMI...) is in use, muting may be unapplicable. Also some audio output plugins do not support muting at all. @note To force silent playback, disable all audio tracks. This is more efficient and reliable than mute.
'''
f = _Cfunctions.get('libvlc_audio_set_mute', None) or \
_Cfunction('libvlc_audio_set_mute', ((1,), (1,),), None,
None, MediaPlayer, ctypes.c_int)
return f(p_mi, status)
def libvlc_audio_get_volume(p_mi):
'''Get current software audio volume.
@param p_mi: media player.
@return: the software volume in percents (0 = mute, 100 = nominal / 0dB).
'''
f = _Cfunctions.get('libvlc_audio_get_volume', None) or \
_Cfunction('libvlc_audio_get_volume', ((1,),), None,
ctypes.c_int, MediaPlayer)
return f(p_mi)
def libvlc_audio_set_volume(p_mi, i_volume):
'''Set current software audio volume.
@param p_mi: media player.
@param i_volume: the volume in percents (0 = mute, 100 = 0dB).
@return: 0 if the volume was set, -1 if it was out of range.
'''
f = _Cfunctions.get('libvlc_audio_set_volume', None) or \
_Cfunction('libvlc_audio_set_volume', ((1,), (1,),), None,
ctypes.c_int, MediaPlayer, ctypes.c_int)
return f(p_mi, i_volume)
def libvlc_audio_get_track_count(p_mi):
'''Get number of available audio tracks.
@param p_mi: media player.
@return: the number of available audio tracks (int), or -1 if unavailable.
'''
f = _Cfunctions.get('libvlc_audio_get_track_count', None) or \
_Cfunction('libvlc_audio_get_track_count', ((1,),), None,
ctypes.c_int, MediaPlayer)
return f(p_mi)
def libvlc_audio_get_track_description(p_mi):
'''Get the description of available audio tracks.
@param p_mi: media player.
@return: list with description of available audio tracks, or None. It must be freed with L{libvlc_track_description_list_release}().
'''
f = _Cfunctions.get('libvlc_audio_get_track_description', None) or \
_Cfunction('libvlc_audio_get_track_description', ((1,),), None,
ctypes.POINTER(TrackDescription), MediaPlayer)
return f(p_mi)
def libvlc_audio_get_track(p_mi):
'''Get current audio track.
@param p_mi: media player.
@return: the audio track ID or -1 if no active input.
'''
f = _Cfunctions.get('libvlc_audio_get_track', None) or \
_Cfunction('libvlc_audio_get_track', ((1,),), None,
ctypes.c_int, MediaPlayer)
return f(p_mi)
def libvlc_audio_set_track(p_mi, i_track):
'''Set current audio track.
@param p_mi: media player.
@param i_track: the track ID (i_id field from track description).
@return: 0 on success, -1 on error.
'''
f = _Cfunctions.get('libvlc_audio_set_track', None) or \
_Cfunction('libvlc_audio_set_track', ((1,), (1,),), None,
ctypes.c_int, MediaPlayer, ctypes.c_int)
return f(p_mi, i_track)
def libvlc_audio_get_channel(p_mi):
'''Get current audio channel.
@param p_mi: media player.
@return: the audio channel See libvlc_audio_output_channel_t.
'''
f = _Cfunctions.get('libvlc_audio_get_channel', None) or \
_Cfunction('libvlc_audio_get_channel', ((1,),), None,
ctypes.c_int, MediaPlayer)
return f(p_mi)
def libvlc_audio_set_channel(p_mi, channel):
'''Set current audio channel.
@param p_mi: media player.
@param channel: the audio channel, See libvlc_audio_output_channel_t.
@return: 0 on success, -1 on error.
'''
f = _Cfunctions.get('libvlc_audio_set_channel', None) or \
_Cfunction('libvlc_audio_set_channel', ((1,), (1,),), None,
ctypes.c_int, MediaPlayer, ctypes.c_int)
return f(p_mi, channel)
def libvlc_audio_get_delay(p_mi):
'''Get current audio delay.
@param p_mi: media player.
@return: the audio delay (microseconds).
@version: LibVLC 1.1.1 or later.
'''
f = _Cfunctions.get('libvlc_audio_get_delay', None) or \
_Cfunction('libvlc_audio_get_delay', ((1,),), None,
ctypes.c_int64, MediaPlayer)
return f(p_mi)
def libvlc_audio_set_delay(p_mi, i_delay):
'''Set current audio delay. The audio delay will be reset to zero each time the media changes.
@param p_mi: media player.
@param i_delay: the audio delay (microseconds).
@return: 0 on success, -1 on error.
@version: LibVLC 1.1.1 or later.
'''
f = _Cfunctions.get('libvlc_audio_set_delay', None) or \
_Cfunction('libvlc_audio_set_delay', ((1,), (1,),), None,
ctypes.c_int, MediaPlayer, ctypes.c_int64)
return f(p_mi, i_delay)
def libvlc_audio_equalizer_get_preset_count():
'''Get the number of equalizer presets.
@return: number of presets.
@version: LibVLC 2.2.0 or later.
'''
f = _Cfunctions.get('libvlc_audio_equalizer_get_preset_count', None) or \
_Cfunction('libvlc_audio_equalizer_get_preset_count', (), None,
ctypes.c_uint)
return f()
def libvlc_audio_equalizer_get_preset_name(u_index):
'''Get the name of a particular equalizer preset.
This name can be used, for example, to prepare a preset label or menu in a user
interface.
@param u_index: index of the preset, counting from zero.
@return: preset name, or None if there is no such preset.
@version: LibVLC 2.2.0 or later.
'''
f = _Cfunctions.get('libvlc_audio_equalizer_get_preset_name', None) or \
_Cfunction('libvlc_audio_equalizer_get_preset_name', ((1,),), None,
ctypes.c_char_p, ctypes.c_uint)
return f(u_index)
def libvlc_audio_equalizer_get_band_count():
'''Get the number of distinct frequency bands for an equalizer.
@return: number of frequency bands.
@version: LibVLC 2.2.0 or later.
'''
f = _Cfunctions.get('libvlc_audio_equalizer_get_band_count', None) or \
_Cfunction('libvlc_audio_equalizer_get_band_count', (), None,
ctypes.c_uint)
return f()
def libvlc_audio_equalizer_get_band_frequency(u_index):
'''Get a particular equalizer band frequency.
This value can be used, for example, to create a label for an equalizer band control
in a user interface.
@param u_index: index of the band, counting from zero.
@return: equalizer band frequency (Hz), or -1 if there is no such band.
@version: LibVLC 2.2.0 or later.
'''
f = _Cfunctions.get('libvlc_audio_equalizer_get_band_frequency', None) or \
_Cfunction('libvlc_audio_equalizer_get_band_frequency', ((1,),), None,
ctypes.c_float, ctypes.c_uint)
return f(u_index)
def libvlc_audio_equalizer_new():
'''Create a new default equalizer, with all frequency values zeroed.
The new equalizer can subsequently be applied to a media player by invoking
L{libvlc_media_player_set_equalizer}().
The returned handle should be freed via L{libvlc_audio_equalizer_release}() when
it is no longer needed.
@return: opaque equalizer handle, or None on error.
@version: LibVLC 2.2.0 or later.
'''
f = _Cfunctions.get('libvlc_audio_equalizer_new', None) or \
_Cfunction('libvlc_audio_equalizer_new', (), None,
ctypes.c_void_p)
return f()
def libvlc_audio_equalizer_new_from_preset(u_index):
'''Create a new equalizer, with initial frequency values copied from an existing
preset.
The new equalizer can subsequently be applied to a media player by invoking
L{libvlc_media_player_set_equalizer}().
The returned handle should be freed via L{libvlc_audio_equalizer_release}() when
it is no longer needed.
@param u_index: index of the preset, counting from zero.
@return: opaque equalizer handle, or None on error.
@version: LibVLC 2.2.0 or later.
'''
f = _Cfunctions.get('libvlc_audio_equalizer_new_from_preset', None) or \
_Cfunction('libvlc_audio_equalizer_new_from_preset', ((1,),), None,
ctypes.c_void_p, ctypes.c_uint)
return f(u_index)
def libvlc_audio_equalizer_release(p_equalizer):
'''Release a previously created equalizer instance.
The equalizer was previously created by using L{libvlc_audio_equalizer_new}() or
L{libvlc_audio_equalizer_new_from_preset}().
It is safe to invoke this method with a None p_equalizer parameter for no effect.
@param p_equalizer: opaque equalizer handle, or None.
@version: LibVLC 2.2.0 or later.
'''
f = _Cfunctions.get('libvlc_audio_equalizer_release', None) or \
_Cfunction('libvlc_audio_equalizer_release', ((1,),), None,
None, ctypes.c_void_p)
return f(p_equalizer)
def libvlc_audio_equalizer_set_preamp(p_equalizer, f_preamp):
'''Set a new pre-amplification value for an equalizer.
The new equalizer settings are subsequently applied to a media player by invoking
L{libvlc_media_player_set_equalizer}().
The supplied amplification value will be clamped to the -20.0 to +20.0 range.
@param p_equalizer: valid equalizer handle, must not be None.
@param f_preamp: preamp value (-20.0 to 20.0 Hz).
@return: zero on success, -1 on error.
@version: LibVLC 2.2.0 or later.
'''
f = _Cfunctions.get('libvlc_audio_equalizer_set_preamp', None) or \
_Cfunction('libvlc_audio_equalizer_set_preamp', ((1,), (1,),), None,
ctypes.c_int, ctypes.c_void_p, ctypes.c_float)
return f(p_equalizer, f_preamp)
def libvlc_audio_equalizer_get_preamp(p_equalizer):
'''Get the current pre-amplification value from an equalizer.
@param p_equalizer: valid equalizer handle, must not be None.
@return: preamp value (Hz).
@version: LibVLC 2.2.0 or later.
'''
f = _Cfunctions.get('libvlc_audio_equalizer_get_preamp', None) or \
_Cfunction('libvlc_audio_equalizer_get_preamp', ((1,),), None,
ctypes.c_float, ctypes.c_void_p)
return f(p_equalizer)
def libvlc_audio_equalizer_set_amp_at_index(p_equalizer, f_amp, u_band):
'''Set a new amplification value for a particular equalizer frequency band.
The new equalizer settings are subsequently applied to a media player by invoking
L{libvlc_media_player_set_equalizer}().
The supplied amplification value will be clamped to the -20.0 to +20.0 range.
@param p_equalizer: valid equalizer handle, must not be None.
@param f_amp: amplification value (-20.0 to 20.0 Hz).
@param u_band: index, counting from zero, of the frequency band to set.
@return: zero on success, -1 on error.
@version: LibVLC 2.2.0 or later.
'''
f = _Cfunctions.get('libvlc_audio_equalizer_set_amp_at_index', None) or \
_Cfunction('libvlc_audio_equalizer_set_amp_at_index', ((1,), (1,), (1,),), None,
ctypes.c_int, ctypes.c_void_p, ctypes.c_float, ctypes.c_uint)
return f(p_equalizer, f_amp, u_band)
def libvlc_audio_equalizer_get_amp_at_index(p_equalizer, u_band):
'''Get the amplification value for a particular equalizer frequency band.
@param p_equalizer: valid equalizer handle, must not be None.
@param u_band: index, counting from zero, of the frequency band to get.
@return: amplification value (Hz); NaN if there is no such frequency band.
@version: LibVLC 2.2.0 or later.
'''
f = _Cfunctions.get('libvlc_audio_equalizer_get_amp_at_index', None) or \
_Cfunction('libvlc_audio_equalizer_get_amp_at_index', ((1,), (1,),), None,
ctypes.c_float, ctypes.c_void_p, ctypes.c_uint)
return f(p_equalizer, u_band)
def libvlc_media_player_set_equalizer(p_mi, p_equalizer):
'''Apply new equalizer settings to a media player.
The equalizer is first created by invoking L{libvlc_audio_equalizer_new}() or
L{libvlc_audio_equalizer_new_from_preset}().
It is possible to apply new equalizer settings to a media player whether the media
player is currently playing media or not.
Invoking this method will immediately apply the new equalizer settings to the audio
output of the currently playing media if there is any.
If there is no currently playing media, the new equalizer settings will be applied
later if and when new media is played.
Equalizer settings will automatically be applied to subsequently played media.
To disable the equalizer for a media player invoke this method passing None for the
p_equalizer parameter.
The media player does not keep a reference to the supplied equalizer so it is safe
for an application to release the equalizer reference any time after this method
returns.
@param p_mi: opaque media player handle.
@param p_equalizer: opaque equalizer handle, or None to disable the equalizer for this media player.
@return: zero on success, -1 on error.
@version: LibVLC 2.2.0 or later.
'''
f = _Cfunctions.get('libvlc_media_player_set_equalizer', None) or \
_Cfunction('libvlc_media_player_set_equalizer', ((1,), (1,),), None,
ctypes.c_int, MediaPlayer, ctypes.c_void_p)
return f(p_mi, p_equalizer)
def libvlc_media_player_get_role(p_mi):
'''Gets the media role.
@param p_mi: media player.
@return: the media player role (\ref libvlc_media_player_role_t).
@version: LibVLC 3.0.0 and later.
'''
f = _Cfunctions.get('libvlc_media_player_get_role', None) or \
_Cfunction('libvlc_media_player_get_role', ((1,),), None,
ctypes.c_int, MediaPlayer)
return f(p_mi)
def libvlc_media_player_set_role(p_mi, role):
'''Sets the media role.
@param p_mi: media player.
@param role: the media player role (\ref libvlc_media_player_role_t).
@return: 0 on success, -1 on error.
'''
f = _Cfunctions.get('libvlc_media_player_set_role', None) or \
_Cfunction('libvlc_media_player_set_role', ((1,), (1,),), None,
ctypes.c_int, MediaPlayer, ctypes.c_uint)
return f(p_mi, role)
def libvlc_renderer_item_name(p_item):
'''Get the human readable name of a renderer item.
@return: the name of the item (can't be None, must *not* be freed).
@version: LibVLC 3.0.0 or later.
'''
f = _Cfunctions.get('libvlc_renderer_item_name', None) or \
_Cfunction('libvlc_renderer_item_name', ((1,),), None,
ctypes.c_char_p, ctypes.c_void_p)
return f(p_item)
def libvlc_renderer_item_type(p_item):
'''Get the type (not translated) of a renderer item. For now, the type can only
be "chromecast" ("upnp", "airplay" may come later).
@return: the type of the item (can't be None, must *not* be freed).
@version: LibVLC 3.0.0 or later.
'''
f = _Cfunctions.get('libvlc_renderer_item_type', None) or \
_Cfunction('libvlc_renderer_item_type', ((1,),), None,
ctypes.c_char_p, ctypes.c_void_p)
return f(p_item)
def libvlc_renderer_item_icon_uri(p_item):
'''Get the icon uri of a renderer item.
@return: the uri of the item's icon (can be None, must *not* be freed).
@version: LibVLC 3.0.0 or later.
'''
f = _Cfunctions.get('libvlc_renderer_item_icon_uri', None) or \
_Cfunction('libvlc_renderer_item_icon_uri', ((1,),), None,
ctypes.c_char_p, ctypes.c_void_p)
return f(p_item)
def libvlc_renderer_item_flags(p_item):
'''Get the flags of a renderer item
See LIBVLC_RENDERER_CAN_AUDIO
See LIBVLC_RENDERER_CAN_VIDEO.
@return: bitwise flag: capabilities of the renderer, see.
@version: LibVLC 3.0.0 or later.
'''
f = _Cfunctions.get('libvlc_renderer_item_flags', None) or \
_Cfunction('libvlc_renderer_item_flags', ((1,),), None,
ctypes.c_int, ctypes.c_void_p)
return f(p_item)
def libvlc_renderer_discoverer_new(p_inst, psz_name):
'''Create a renderer discoverer object by name
After this object is created, you should attach to events in order to be
notified of the discoverer events.
You need to call L{libvlc_renderer_discoverer_start}() in order to start the
discovery.
See L{libvlc_renderer_discoverer_event_manager}()
See L{libvlc_renderer_discoverer_start}().
@param p_inst: libvlc instance.
@param psz_name: service name; use L{libvlc_renderer_discoverer_list_get}() to get a list of the discoverer names available in this libVLC instance.
@return: media discover object or None in case of error.
@version: LibVLC 3.0.0 or later.
'''
f = _Cfunctions.get('libvlc_renderer_discoverer_new', None) or \
_Cfunction('libvlc_renderer_discoverer_new', ((1,), (1,),), None,
ctypes.c_void_p, Instance, ctypes.c_char_p)
return f(p_inst, psz_name)
def libvlc_renderer_discoverer_release(p_rd):
'''Release a renderer discoverer object.
@param p_rd: renderer discoverer object.
@version: LibVLC 3.0.0 or later.
'''
f = _Cfunctions.get('libvlc_renderer_discoverer_release', None) or \
_Cfunction('libvlc_renderer_discoverer_release', ((1,),), None,
None, ctypes.c_void_p)
return f(p_rd)
def libvlc_renderer_discoverer_start(p_rd):
'''Start renderer discovery
To stop it, call L{libvlc_renderer_discoverer_stop}() or
L{libvlc_renderer_discoverer_release}() directly.
See L{libvlc_renderer_discoverer_stop}().
@param p_rd: renderer discoverer object.
@return: -1 in case of error, 0 otherwise.
@version: LibVLC 3.0.0 or later.
'''
f = _Cfunctions.get('libvlc_renderer_discoverer_start', None) or \
_Cfunction('libvlc_renderer_discoverer_start', ((1,),), None,
ctypes.c_int, ctypes.c_void_p)
return f(p_rd)
def libvlc_renderer_discoverer_stop(p_rd):
'''Stop renderer discovery.
See L{libvlc_renderer_discoverer_start}().
@param p_rd: renderer discoverer object.
@version: LibVLC 3.0.0 or later.
'''
f = _Cfunctions.get('libvlc_renderer_discoverer_stop', None) or \
_Cfunction('libvlc_renderer_discoverer_stop', ((1,),), None,
None, ctypes.c_void_p)
return f(p_rd)
def libvlc_renderer_discoverer_event_manager(p_rd):
'''Get the event manager of the renderer discoverer
The possible events to attach are @ref libvlc_RendererDiscovererItemAdded
and @ref libvlc_RendererDiscovererItemDeleted.
The @ref libvlc_renderer_item_t struct passed to event callbacks is owned by
VLC, users should take care of copying this struct for their internal usage.
See libvlc_event_t.u.renderer_discoverer_item_added.item
See libvlc_event_t.u.renderer_discoverer_item_removed.item.
@return: a valid event manager (can't fail).
@version: LibVLC 3.0.0 or later.
'''
f = _Cfunctions.get('libvlc_renderer_discoverer_event_manager', None) or \
_Cfunction('libvlc_renderer_discoverer_event_manager', ((1,),), class_result(EventManager),
ctypes.c_void_p, ctypes.c_void_p)
return f(p_rd)
def libvlc_renderer_discoverer_list_get(p_inst, ppp_services):
'''Get media discoverer services
See libvlc_renderer_list_release().
@param p_inst: libvlc instance.
@param ppp_services: address to store an allocated array of renderer discoverer services (must be freed with libvlc_renderer_list_release() by the caller) [OUT].
@return: the number of media discoverer services (0 on error).
@version: LibVLC 3.0.0 and later.
'''
f = _Cfunctions.get('libvlc_renderer_discoverer_list_get', None) or \
_Cfunction('libvlc_renderer_discoverer_list_get', ((1,), (1,),), None,
ctypes.c_size_t, Instance, ctypes.POINTER(ctypes.POINTER(RDDescription)))
return f(p_inst, ppp_services)
def libvlc_renderer_discoverer_list_release(pp_services, i_count):
'''Release an array of media discoverer services
See L{libvlc_renderer_discoverer_list_get}().
@param pp_services: array to release.
@param i_count: number of elements in the array.
@version: LibVLC 3.0.0 and later.
'''
f = _Cfunctions.get('libvlc_renderer_discoverer_list_release', None) or \
_Cfunction('libvlc_renderer_discoverer_list_release', ((1,), (1,),), None,
None, ctypes.POINTER(RDDescription), ctypes.c_size_t)
return f(pp_services, i_count)
def libvlc_vlm_release(p_instance):
'''Release the vlm instance related to the given L{Instance}.
@param p_instance: the instance.
'''
f = _Cfunctions.get('libvlc_vlm_release', None) or \
_Cfunction('libvlc_vlm_release', ((1,),), None,
None, Instance)
return f(p_instance)
def libvlc_vlm_add_broadcast(p_instance, psz_name, psz_input, psz_output, i_options, ppsz_options, b_enabled, b_loop):
'''Add a broadcast, with one input.
@param p_instance: the instance.
@param psz_name: the name of the new broadcast.
@param psz_input: the input MRL.
@param psz_output: the output MRL (the parameter to the "sout" variable).
@param i_options: number of additional options.
@param ppsz_options: additional options.
@param b_enabled: boolean for enabling the new broadcast.
@param b_loop: Should this broadcast be played in loop ?
@return: 0 on success, -1 on error.
'''
f = _Cfunctions.get('libvlc_vlm_add_broadcast', None) or \
_Cfunction('libvlc_vlm_add_broadcast', ((1,), (1,), (1,), (1,), (1,), (1,), (1,), (1,),), None,
ctypes.c_int, Instance, ctypes.c_char_p, ctypes.c_char_p, ctypes.c_char_p, ctypes.c_int, ListPOINTER(ctypes.c_char_p), ctypes.c_int, ctypes.c_int)
return f(p_instance, psz_name, psz_input, psz_output, i_options, ppsz_options, b_enabled, b_loop)
def libvlc_vlm_add_vod(p_instance, psz_name, psz_input, i_options, ppsz_options, b_enabled, psz_mux):
'''Add a vod, with one input.
@param p_instance: the instance.
@param psz_name: the name of the new vod media.
@param psz_input: the input MRL.
@param i_options: number of additional options.
@param ppsz_options: additional options.
@param b_enabled: boolean for enabling the new vod.
@param psz_mux: the muxer of the vod media.
@return: 0 on success, -1 on error.
'''
f = _Cfunctions.get('libvlc_vlm_add_vod', None) or \
_Cfunction('libvlc_vlm_add_vod', ((1,), (1,), (1,), (1,), (1,), (1,), (1,),), None,
ctypes.c_int, Instance, ctypes.c_char_p, ctypes.c_char_p, ctypes.c_int, ListPOINTER(ctypes.c_char_p), ctypes.c_int, ctypes.c_char_p)
return f(p_instance, psz_name, psz_input, i_options, ppsz_options, b_enabled, psz_mux)
def libvlc_vlm_del_media(p_instance, psz_name):
'''Delete a media (VOD or broadcast).
@param p_instance: the instance.
@param psz_name: the media to delete.
@return: 0 on success, -1 on error.
'''
f = _Cfunctions.get('libvlc_vlm_del_media', None) or \
_Cfunction('libvlc_vlm_del_media', ((1,), (1,),), None,
ctypes.c_int, Instance, ctypes.c_char_p)
return f(p_instance, psz_name)
def libvlc_vlm_set_enabled(p_instance, psz_name, b_enabled):
'''Enable or disable a media (VOD or broadcast).
@param p_instance: the instance.
@param psz_name: the media to work on.
@param b_enabled: the new status.
@return: 0 on success, -1 on error.
'''
f = _Cfunctions.get('libvlc_vlm_set_enabled', None) or \
_Cfunction('libvlc_vlm_set_enabled', ((1,), (1,), (1,),), None,
ctypes.c_int, Instance, ctypes.c_char_p, ctypes.c_int)
return f(p_instance, psz_name, b_enabled)
def libvlc_vlm_set_output(p_instance, psz_name, psz_output):
'''Set the output for a media.
@param p_instance: the instance.
@param psz_name: the media to work on.
@param psz_output: the output MRL (the parameter to the "sout" variable).
@return: 0 on success, -1 on error.
'''
f = _Cfunctions.get('libvlc_vlm_set_output', None) or \
_Cfunction('libvlc_vlm_set_output', ((1,), (1,), (1,),), None,
ctypes.c_int, Instance, ctypes.c_char_p, ctypes.c_char_p)
return f(p_instance, psz_name, psz_output)
def libvlc_vlm_set_input(p_instance, psz_name, psz_input):
'''Set a media's input MRL. This will delete all existing inputs and
add the specified one.
@param p_instance: the instance.
@param psz_name: the media to work on.
@param psz_input: the input MRL.
@return: 0 on success, -1 on error.
'''
f = _Cfunctions.get('libvlc_vlm_set_input', None) or \
_Cfunction('libvlc_vlm_set_input', ((1,), (1,), (1,),), None,
ctypes.c_int, Instance, ctypes.c_char_p, ctypes.c_char_p)
return f(p_instance, psz_name, psz_input)
def libvlc_vlm_add_input(p_instance, psz_name, psz_input):
'''Add a media's input MRL. This will add the specified one.
@param p_instance: the instance.
@param psz_name: the media to work on.
@param psz_input: the input MRL.
@return: 0 on success, -1 on error.
'''
f = _Cfunctions.get('libvlc_vlm_add_input', None) or \
_Cfunction('libvlc_vlm_add_input', ((1,), (1,), (1,),), None,
ctypes.c_int, Instance, ctypes.c_char_p, ctypes.c_char_p)
return f(p_instance, psz_name, psz_input)
def libvlc_vlm_set_loop(p_instance, psz_name, b_loop):
'''Set a media's loop status.
@param p_instance: the instance.
@param psz_name: the media to work on.
@param b_loop: the new status.
@return: 0 on success, -1 on error.
'''
f = _Cfunctions.get('libvlc_vlm_set_loop', None) or \
_Cfunction('libvlc_vlm_set_loop', ((1,), (1,), (1,),), None,
ctypes.c_int, Instance, ctypes.c_char_p, ctypes.c_int)
return f(p_instance, psz_name, b_loop)
def libvlc_vlm_set_mux(p_instance, psz_name, psz_mux):
'''Set a media's vod muxer.
@param p_instance: the instance.
@param psz_name: the media to work on.
@param psz_mux: the new muxer.
@return: 0 on success, -1 on error.
'''
f = _Cfunctions.get('libvlc_vlm_set_mux', None) or \
_Cfunction('libvlc_vlm_set_mux', ((1,), (1,), (1,),), None,
ctypes.c_int, Instance, ctypes.c_char_p, ctypes.c_char_p)
return f(p_instance, psz_name, psz_mux)
def libvlc_vlm_change_media(p_instance, psz_name, psz_input, psz_output, i_options, ppsz_options, b_enabled, b_loop):
'''Edit the parameters of a media. This will delete all existing inputs and
add the specified one.
@param p_instance: the instance.
@param psz_name: the name of the new broadcast.
@param psz_input: the input MRL.
@param psz_output: the output MRL (the parameter to the "sout" variable).
@param i_options: number of additional options.
@param ppsz_options: additional options.
@param b_enabled: boolean for enabling the new broadcast.
@param b_loop: Should this broadcast be played in loop ?
@return: 0 on success, -1 on error.
'''
f = _Cfunctions.get('libvlc_vlm_change_media', None) or \
_Cfunction('libvlc_vlm_change_media', ((1,), (1,), (1,), (1,), (1,), (1,), (1,), (1,),), None,
ctypes.c_int, Instance, ctypes.c_char_p, ctypes.c_char_p, ctypes.c_char_p, ctypes.c_int, ListPOINTER(ctypes.c_char_p), ctypes.c_int, ctypes.c_int)
return f(p_instance, psz_name, psz_input, psz_output, i_options, ppsz_options, b_enabled, b_loop)
def libvlc_vlm_play_media(p_instance, psz_name):
'''Play the named broadcast.
@param p_instance: the instance.
@param psz_name: the name of the broadcast.
@return: 0 on success, -1 on error.
'''
f = _Cfunctions.get('libvlc_vlm_play_media', None) or \
_Cfunction('libvlc_vlm_play_media', ((1,), (1,),), None,
ctypes.c_int, Instance, ctypes.c_char_p)
return f(p_instance, psz_name)
def libvlc_vlm_stop_media(p_instance, psz_name):
'''Stop the named broadcast.
@param p_instance: the instance.
@param psz_name: the name of the broadcast.
@return: 0 on success, -1 on error.
'''
f = _Cfunctions.get('libvlc_vlm_stop_media', None) or \
_Cfunction('libvlc_vlm_stop_media', ((1,), (1,),), None,
ctypes.c_int, Instance, ctypes.c_char_p)
return f(p_instance, psz_name)
def libvlc_vlm_pause_media(p_instance, psz_name):
'''Pause the named broadcast.
@param p_instance: the instance.
@param psz_name: the name of the broadcast.
@return: 0 on success, -1 on error.
'''
f = _Cfunctions.get('libvlc_vlm_pause_media', None) or \
_Cfunction('libvlc_vlm_pause_media', ((1,), (1,),), None,
ctypes.c_int, Instance, ctypes.c_char_p)
return f(p_instance, psz_name)
def libvlc_vlm_seek_media(p_instance, psz_name, f_percentage):
'''Seek in the named broadcast.
@param p_instance: the instance.
@param psz_name: the name of the broadcast.
@param f_percentage: the percentage to seek to.
@return: 0 on success, -1 on error.
'''
f = _Cfunctions.get('libvlc_vlm_seek_media', None) or \
_Cfunction('libvlc_vlm_seek_media', ((1,), (1,), (1,),), None,
ctypes.c_int, Instance, ctypes.c_char_p, ctypes.c_float)
return f(p_instance, psz_name, f_percentage)
def libvlc_vlm_show_media(p_instance, psz_name):
'''Return information about the named media as a JSON
string representation.
This function is mainly intended for debugging use,
if you want programmatic access to the state of
a vlm_media_instance_t, please use the corresponding
libvlc_vlm_get_media_instance_xxx -functions.
Currently there are no such functions available for
vlm_media_t though.
@param p_instance: the instance.
@param psz_name: the name of the media, if the name is an empty string, all media is described.
@return: string with information about named media, or None on error.
'''
f = _Cfunctions.get('libvlc_vlm_show_media', None) or \
_Cfunction('libvlc_vlm_show_media', ((1,), (1,),), string_result,
ctypes.c_void_p, Instance, ctypes.c_char_p)
return f(p_instance, psz_name)
def libvlc_vlm_get_media_instance_position(p_instance, psz_name, i_instance):
'''Get vlm_media instance position by name or instance id.
@param p_instance: a libvlc instance.
@param psz_name: name of vlm media instance.
@param i_instance: instance id.
@return: position as float or -1. on error.
'''
f = _Cfunctions.get('libvlc_vlm_get_media_instance_position', None) or \
_Cfunction('libvlc_vlm_get_media_instance_position', ((1,), (1,), (1,),), None,
ctypes.c_float, Instance, ctypes.c_char_p, ctypes.c_int)
return f(p_instance, psz_name, i_instance)
def libvlc_vlm_get_media_instance_time(p_instance, psz_name, i_instance):
'''Get vlm_media instance time by name or instance id.
@param p_instance: a libvlc instance.
@param psz_name: name of vlm media instance.
@param i_instance: instance id.
@return: time as integer or -1 on error.
'''
f = _Cfunctions.get('libvlc_vlm_get_media_instance_time', None) or \
_Cfunction('libvlc_vlm_get_media_instance_time', ((1,), (1,), (1,),), None,
ctypes.c_int, Instance, ctypes.c_char_p, ctypes.c_int)
return f(p_instance, psz_name, i_instance)
def libvlc_vlm_get_media_instance_length(p_instance, psz_name, i_instance):
'''Get vlm_media instance length by name or instance id.
@param p_instance: a libvlc instance.
@param psz_name: name of vlm media instance.
@param i_instance: instance id.
@return: length of media item or -1 on error.
'''
f = _Cfunctions.get('libvlc_vlm_get_media_instance_length', None) or \
_Cfunction('libvlc_vlm_get_media_instance_length', ((1,), (1,), (1,),), None,
ctypes.c_int, Instance, ctypes.c_char_p, ctypes.c_int)
return f(p_instance, psz_name, i_instance)
def libvlc_vlm_get_media_instance_rate(p_instance, psz_name, i_instance):
'''Get vlm_media instance playback rate by name or instance id.
@param p_instance: a libvlc instance.
@param psz_name: name of vlm media instance.
@param i_instance: instance id.
@return: playback rate or -1 on error.
'''
f = _Cfunctions.get('libvlc_vlm_get_media_instance_rate', None) or \
_Cfunction('libvlc_vlm_get_media_instance_rate', ((1,), (1,), (1,),), None,
ctypes.c_int, Instance, ctypes.c_char_p, ctypes.c_int)
return f(p_instance, psz_name, i_instance)
def libvlc_vlm_get_media_instance_title(p_instance, psz_name, i_instance):
'''Get vlm_media instance title number by name or instance id.
@param p_instance: a libvlc instance.
@param psz_name: name of vlm media instance.
@param i_instance: instance id.
@return: title as number or -1 on error.
@bug: will always return 0.
'''
f = _Cfunctions.get('libvlc_vlm_get_media_instance_title', None) or \
_Cfunction('libvlc_vlm_get_media_instance_title', ((1,), (1,), (1,),), None,
ctypes.c_int, Instance, ctypes.c_char_p, ctypes.c_int)
return f(p_instance, psz_name, i_instance)
def libvlc_vlm_get_media_instance_chapter(p_instance, psz_name, i_instance):
'''Get vlm_media instance chapter number by name or instance id.
@param p_instance: a libvlc instance.
@param psz_name: name of vlm media instance.
@param i_instance: instance id.
@return: chapter as number or -1 on error.
@bug: will always return 0.
'''
f = _Cfunctions.get('libvlc_vlm_get_media_instance_chapter', None) or \
_Cfunction('libvlc_vlm_get_media_instance_chapter', ((1,), (1,), (1,),), None,
ctypes.c_int, Instance, ctypes.c_char_p, ctypes.c_int)
return f(p_instance, psz_name, i_instance)
def libvlc_vlm_get_media_instance_seekable(p_instance, psz_name, i_instance):
'''Is libvlc instance seekable ?
@param p_instance: a libvlc instance.
@param psz_name: name of vlm media instance.
@param i_instance: instance id.
@return: 1 if seekable, 0 if not, -1 if media does not exist.
@bug: will always return 0.
'''
f = _Cfunctions.get('libvlc_vlm_get_media_instance_seekable', None) or \
_Cfunction('libvlc_vlm_get_media_instance_seekable', ((1,), (1,), (1,),), None,
ctypes.c_int, Instance, ctypes.c_char_p, ctypes.c_int)
return f(p_instance, psz_name, i_instance)
def libvlc_vlm_get_event_manager(p_instance):
'''Get libvlc_event_manager from a vlm media.
The p_event_manager is immutable, so you don't have to hold the lock.
@param p_instance: a libvlc instance.
@return: libvlc_event_manager.
'''
f = _Cfunctions.get('libvlc_vlm_get_event_manager', None) or \
_Cfunction('libvlc_vlm_get_event_manager', ((1,),), class_result(EventManager),
ctypes.c_void_p, Instance)
return f(p_instance)
# 5 function(s) blacklisted:
# libvlc_audio_output_get_device_type
# libvlc_audio_output_set_device_type
# libvlc_dialog_set_callbacks
# libvlc_printerr
# libvlc_set_exit_handler
# 47 function(s) not wrapped as methods:
# libvlc_audio_equalizer_get_amp_at_index
# libvlc_audio_equalizer_get_band_count
# libvlc_audio_equalizer_get_band_frequency
# libvlc_audio_equalizer_get_preamp
# libvlc_audio_equalizer_get_preset_count
# libvlc_audio_equalizer_get_preset_name
# libvlc_audio_equalizer_new
# libvlc_audio_equalizer_new_from_preset
# libvlc_audio_equalizer_release
# libvlc_audio_equalizer_set_amp_at_index
# libvlc_audio_equalizer_set_preamp
# libvlc_audio_output_device_list_release
# libvlc_audio_output_list_release
# libvlc_chapter_descriptions_release
# libvlc_clearerr
# libvlc_clock
# libvlc_dialog_dismiss
# libvlc_dialog_get_context
# libvlc_dialog_post_action
# libvlc_dialog_post_login
# libvlc_dialog_set_context
# libvlc_errmsg
# libvlc_event_type_name
# libvlc_free
# libvlc_get_changeset
# libvlc_get_compiler
# libvlc_get_version
# libvlc_log_get_context
# libvlc_log_get_object
# libvlc_media_discoverer_list_release
# libvlc_media_get_codec_description
# libvlc_media_slaves_release
# libvlc_media_tracks_release
# libvlc_module_description_list_release
# libvlc_new
# libvlc_renderer_discoverer_event_manager
# libvlc_renderer_discoverer_list_release
# libvlc_renderer_discoverer_release
# libvlc_renderer_discoverer_start
# libvlc_renderer_discoverer_stop
# libvlc_renderer_item_flags
# libvlc_renderer_item_icon_uri
# libvlc_renderer_item_name
# libvlc_renderer_item_type
# libvlc_title_descriptions_release
# libvlc_track_description_list_release
# libvlc_vprinterr
# Start of footer.py #
# Backward compatibility
def callbackmethod(callback):
"""Now obsolete @callbackmethod decorator."""
return callback
# libvlc_free is not present in some versions of libvlc. If it is not
# in the library, then emulate it by calling libc.free
if not hasattr(dll, 'libvlc_free'):
# need to find the free function in the C runtime. This is
# platform specific.
# For Linux and MacOSX
libc_path = find_library('c')
if libc_path:
libc = ctypes.CDLL(libc_path)
libvlc_free = libc.free
else:
# On win32, it is impossible to guess the proper lib to call
# (msvcrt, mingw...). Just ignore the call: it will memleak,
# but not prevent to run the application.
def libvlc_free(p):
pass
# ensure argtypes is right, because default type of int won't
# work on 64-bit systems
libvlc_free.argtypes = [ ctypes.c_void_p ]
# Version functions
def _dot2int(v):
'''(INTERNAL) Convert 'i.i.i[.i]' str to int.
'''
t = [int(i) for i in v.split('.')]
if len(t) == 3:
t.append(0)
elif len(t) != 4:
raise ValueError('"i.i.i[.i]": %r' % (v,))
if min(t) < 0 or max(t) > 255:
raise ValueError('[0..255]: %r' % (v,))
i = t.pop(0)
while t:
i = (i << 8) + t.pop(0)
return i
def hex_version():
"""Return the version of these bindings in hex or 0 if unavailable.
"""
try:
return _dot2int(__version__)
except (NameError, ValueError):
return 0
def libvlc_hex_version():
"""Return the libvlc version in hex or 0 if unavailable.
"""
try:
return _dot2int(bytes_to_str(libvlc_get_version()).split()[0])
except ValueError:
return 0
def debug_callback(event, *args, **kwds):
'''Example callback, useful for debugging.
'''
l = ['event %s' % (event.type,)]
if args:
l.extend(map(str, args))
if kwds:
l.extend(sorted('%s=%s' % t for t in kwds.items()))
print('Debug callback (%s)' % ', '.join(l))
if __name__ == '__main__':
try:
from msvcrt import getch
except ImportError:
import termios
import tty
def getch(): # getchar(), getc(stdin) #PYCHOK flake
fd = sys.stdin.fileno()
old = termios.tcgetattr(fd)
try:
tty.setraw(fd)
ch = sys.stdin.read(1)
finally:
termios.tcsetattr(fd, termios.TCSADRAIN, old)
return ch
def end_callback(event):
print('End of media stream (event %s)' % event.type)
sys.exit(0)
echo_position = False
def pos_callback(event, player):
if echo_position:
sys.stdout.write('\r%s to %.2f%% (%.2f%%)' % (event.type,
event.u.new_position * 100,
player.get_position() * 100))
sys.stdout.flush()
def print_version():
"""Print version of this vlc.py and of the libvlc"""
try:
print('Build date: %s (%#x)' % (build_date, hex_version()))
print('LibVLC version: %s (%#x)' % (bytes_to_str(libvlc_get_version()), libvlc_hex_version()))
print('LibVLC compiler: %s' % bytes_to_str(libvlc_get_compiler()))
if plugin_path:
print('Plugin path: %s' % plugin_path)
except:
print('Error: %s' % sys.exc_info()[1])
if sys.argv[1:] and '-h' not in sys.argv[1:] and '--help' not in sys.argv[1:]:
movie = os.path.expanduser(sys.argv.pop())
if not os.access(movie, os.R_OK):
print('Error: %s file not readable' % movie)
sys.exit(1)
# Need --sub-source=marq in order to use marquee below
instance = Instance(["--sub-source=marq"] + sys.argv[1:])
try:
media = instance.media_new(movie)
except (AttributeError, NameError) as e:
print('%s: %s (%s %s vs LibVLC %s)' % (e.__class__.__name__, e,
sys.argv[0], __version__,
libvlc_get_version()))
sys.exit(1)
player = instance.media_player_new()
player.set_media(media)
player.play()
# Some marquee examples. Marquee requires '--sub-source marq' in the
# Instance() call above, see <http://www.videolan.org/doc/play-howto/en/ch04.html>
player.video_set_marquee_int(VideoMarqueeOption.Enable, 1)
player.video_set_marquee_int(VideoMarqueeOption.Size, 24) # pixels
player.video_set_marquee_int(VideoMarqueeOption.Position, Position.Bottom)
if False: # only one marquee can be specified
player.video_set_marquee_int(VideoMarqueeOption.Timeout, 5000) # millisec, 0==forever
t = media.get_mrl() # movie
else: # update marquee text periodically
player.video_set_marquee_int(VideoMarqueeOption.Timeout, 0) # millisec, 0==forever
player.video_set_marquee_int(VideoMarqueeOption.Refresh, 1000) # millisec (or sec?)
##t = '$L / $D or $P at $T'
t = '%Y-%m-%d %H:%M:%S'
player.video_set_marquee_string(VideoMarqueeOption.Text, str_to_bytes(t))
# Some event manager examples. Note, the callback can be any Python
# callable and does not need to be decorated. Optionally, specify
# any number of positional and/or keyword arguments to be passed
# to the callback (in addition to the first one, an Event instance).
event_manager = player.event_manager()
event_manager.event_attach(EventType.MediaPlayerEndReached, end_callback)
event_manager.event_attach(EventType.MediaPlayerPositionChanged, pos_callback, player)
def mspf():
"""Milliseconds per frame"""
return int(1000 // (player.get_fps() or 25))
def print_info():
"""Print information about the media"""
try:
print_version()
media = player.get_media()
print('State: %s' % player.get_state())
print('Media: %s' % bytes_to_str(media.get_mrl()))
print('Track: %s/%s' % (player.video_get_track(), player.video_get_track_count()))
print('Current time: %s/%s' % (player.get_time(), media.get_duration()))
print('Position: %s' % player.get_position())
print('FPS: %s (%d ms)' % (player.get_fps(), mspf()))
print('Rate: %s' % player.get_rate())
print('Video size: %s' % str(player.video_get_size(0))) # num=0
print('Scale: %s' % player.video_get_scale())
print('Aspect ratio: %s' % player.video_get_aspect_ratio())
#print('Window:' % player.get_hwnd()
except Exception:
print('Error: %s' % sys.exc_info()[1])
def sec_forward():
"""Go forward one sec"""
player.set_time(player.get_time() + 1000)
def sec_backward():
"""Go backward one sec"""
player.set_time(player.get_time() - 1000)
def frame_forward():
"""Go forward one frame"""
player.set_time(player.get_time() + mspf())
def frame_backward():
"""Go backward one frame"""
player.set_time(player.get_time() - mspf())
def print_help():
"""Print help"""
print('Single-character commands:')
for k, m in sorted(keybindings.items()):
m = (m.__doc__ or m.__name__).splitlines()[0]
print(' %s: %s.' % (k, m.rstrip('.')))
print('0-9: go to that fraction of the movie')
def quit_app():
"""Stop and exit"""
sys.exit(0)
def toggle_echo_position():
"""Toggle echoing of media position"""
global echo_position
echo_position = not echo_position
keybindings = {
' ': player.pause,
'+': sec_forward,
'-': sec_backward,
'.': frame_forward,
',': frame_backward,
'f': player.toggle_fullscreen,
'i': print_info,
'p': toggle_echo_position,
'q': quit_app,
'?': print_help,
}
print('Press q to quit, ? to get help.%s' % os.linesep)
while True:
k = getch()
print('> %s' % k)
if k in keybindings:
keybindings[k]()
elif k.isdigit():
# jump to fraction of the movie.
player.set_position(float('0.'+k))
else:
print('Usage: %s [options] <movie_filename>' % sys.argv[0])
print('Once launched, type ? for help.')
print('')
print_version()
|
# --------------------------------------------------------------------------
#
# Copyright (c) Microsoft Corporation. All rights reserved.
#
# The MIT License (MIT)
#
# Permission is hereby granted, free of charge, to any person obtaining a copy
# of this software and associated documentation files (the ""Software""), to
# deal in the Software without restriction, including without limitation the
# rights to use, copy, modify, merge, publish, distribute, sublicense, and/or
# sell copies of the Software, and to permit persons to whom the Software is
# furnished to do so, subject to the following conditions:
#
# The above copyright notice and this permission notice shall be included in
# all copies or substantial portions of the Software.
#
# THE SOFTWARE IS PROVIDED *AS IS*, WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
# IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
# FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
# AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
# LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING
# FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS
# IN THE SOFTWARE.
#
# --------------------------------------------------------------------------
import os
from azure.identity import ClientSecretCredential
from azure.schemaregistry import SchemaRegistryClient
from azure.schemaregistry.serializer.avroserializer import SchemaRegistryAvroSerializer
TENANT_ID=os.environ['SCHEMA_REGISTRY_AZURE_TENANT_ID']
CLIENT_ID=os.environ['SCHEMA_REGISTRY_AZURE_CLIENT_ID']
CLIENT_SECRET=os.environ['SCHEMA_REGISTRY_AZURE_CLIENT_SECRET']
SCHEMA_REGISTRY_ENDPOINT=os.environ['SCHEMA_REGISTRY_ENDPOINT']
SCHEMA_GROUP=os.environ['SCHEMA_REGISTRY_GROUP']
SCHEMA_STRING = """
{"namespace": "example.avro",
"type": "record",
"name": "User",
"fields": [
{"name": "name", "type": "string"},
{"name": "favorite_number", "type": ["int", "null"]},
{"name": "favorite_color", "type": ["string", "null"]}
]
}"""
token_credential = ClientSecretCredential(
tenant_id=TENANT_ID,
client_id=CLIENT_ID,
client_secret=CLIENT_SECRET
)
def serialize(serializer):
dict_data_ben = {"name": u"Ben", "favorite_number": 7, "favorite_color": u"red"}
dict_data_alice = {"name": u"Alice", "favorite_number": 15, "favorite_color": u"green"}
# Schema would be automatically registered into Schema Registry and cached locally.
payload_ben = serializer.serialize(dict_data_ben, SCHEMA_STRING)
# The second call won't trigger a service call.
payload_alice = serializer.serialize(dict_data_alice, SCHEMA_STRING)
print('Encoded bytes are: ', payload_ben)
print('Encoded bytes are: ', payload_alice)
return [payload_ben, payload_alice]
def deserialize(serializer, bytes_payload):
# serializer.deserialize would extract the schema id from the payload,
# retrieve schema from Schema Registry and cache the schema locally.
# If the schema id is the local cache, the call won't trigger a service call.
dict_data = serializer.deserialize(bytes_payload)
print('Deserialized data is: ', dict_data)
return dict_data
if __name__ == '__main__':
schema_registry = SchemaRegistryClient(endpoint=SCHEMA_REGISTRY_ENDPOINT, credential=token_credential)
serializer = SchemaRegistryAvroSerializer(schema_registry, SCHEMA_GROUP)
bytes_data_ben, bytes_data_alice = serialize(serializer)
dict_data_ben = deserialize(serializer, bytes_data_ben)
dict_data_alice = deserialize(serializer, bytes_data_alice)
serializer.close()
|
import sys
from PyQt5 import QtWidgets, QtCore, QtGui
class MyWidget(QtWidgets.QWidget):
def __init__(self):
super().__init__()
# Hiding the window title
self.setWindowFlag(QtCore.Qt.FramelessWindowHint)
# Will not be displayed
self.setWindowTitle('-- Light_Manager_v0.01 --')
# Changing the color to Black
self.setStyleSheet('background-color: rgb(10,10,10)')
'''
WIDGETS
'''
# Setting up the Title Label
self.title = QtWidgets.QLabel('Title', self)
self.title.setStyleSheet('color: white')
# Setting up the close button
self.close = QtWidgets.QPushButton('close', self)
self.close.setStyleSheet('color: white')
# Setting up the Icon
self.icon = QtWidgets.QLabel('ICON', self)
self.icon.setStyleSheet('color: white')
# Setting up the loading bar
self.loadBar = QtWidgets.QLabel('LOADING BAR', self)
self.loadBar.setStyleSheet('color: white')
# Setting up the loading infos
self.infoTop = QtWidgets.QLabel('Info top', self)
self.infoTop.setStyleSheet('color: white')
self.infoBot = QtWidgets.QLabel('Info bottom', self)
self.infoBot.setStyleSheet('color: white')
'''
LAYOUT
'''
self.mainLayout = QtWidgets.QVBoxLayout(self)
# Title and close button
self.horizontalLayout = QtWidgets.QHBoxLayout()
self.horizontalLayout.addWidget(self.title)
self.horizontalLayout.addWidget(self.close)
# Adding the horizontal layout to the top of the the main layout
self.mainLayout.addLayout(self.horizontalLayout)
# Adding the other widgets to the vertical layout
self.mainLayout.addWidget(self.icon)
self.mainLayout.addWidget(self.loadBar)
self.mainLayout.addWidget(self.infoTop)
self.mainLayout.addWidget(self.infoBot)
if __name__ == '__main__':
app = QtWidgets.QApplication([])
widget = MyWidget()
widget.resize(800,500)
widget.show()
sys.exit(app.exec())
|
# Copyright (c) 2017- Salas Lin (leVirve)
#
# This software is released under the MIT License.
# https://opensource.org/licenses/MIT
import torch
import torch.nn as nn
import torch.nn.functional as F
from torch.autograd import grad
import onegan
def adversarial_ce_loss(x, value: float):
''' x: output tensor of discriminator
value: float
'''
label = torch.zeros_like(x).fill_(value)
return nn.functional.binary_cross_entropy(x, label)
def adversarial_ls_loss(x, value: float):
''' x: output tensor of discriminator
value: float
'''
label = torch.zeros_like(x).fill_(value)
return nn.functional.mse_loss(x, label)
def adversarial_w_loss(x, value: bool):
''' x: output tensor of discriminator
value: True -> -1, False -> 1
'''
return -torch.mean(x) if value else torch.mean(x)
def gradient_penalty(dnet, target, pred):
w = torch.rand(target.size(0), 1, 1, 1, device=onegan.device()).expand_as(target)
interp = torch.tensor(w * target + (1 - w) * pred, requires_grad=True, device=onegan.device())
output = dnet(interp)
grads = grad(outputs=output, inputs=interp,
grad_outputs=torch.ones(output.size(), device=onegan.device()),
create_graph=True, retain_graph=True)[0]
return ((grads.view(grads.size(0), -1).norm(dim=1) - 1) ** 2).mean()
def conditional_input(source, another, conditional):
return torch.cat((source, another), dim=1) if conditional else another
class FocalLoss2d(nn.Module):
def __init__(self, gamma=2, weight=None, size_average=True, ignore_index=255):
super(FocalLoss2d, self).__init__()
self.gamma = gamma
self.nll_loss = nn.NLLLoss(weight, size_average, ignore_index)
def forward(self, inputs, targets):
return self.nll_loss((1 - F.softmax(inputs, dim=1)) ** self.gamma * F.log_softmax(inputs, dim=1), targets)
|
from collections.abc import Hashable
from copy import deepcopy
from itertools import chain, product
from functools import partial
import warnings
import numpy as np
from numpy import ma
import pandas as pd
from pandas.api.types import is_categorical_dtype
import pytest
from scipy import sparse
from boltons.iterutils import research, remap, default_exit
from anndata import AnnData, Raw, concat
from anndata._core.index import _subset
from anndata._core import merge
from anndata.tests import helpers
from anndata.tests.helpers import assert_equal, gen_adata
from anndata.utils import asarray
@pytest.fixture(
params=[asarray, sparse.csr_matrix, sparse.csc_matrix],
ids=["np_array", "scipy_csr", "scipy_csc"],
)
def array_type(request):
return request.param
@pytest.fixture(params=["inner", "outer"])
def join_type(request):
return request.param
@pytest.fixture(params=[0, np.nan, np.pi])
def fill_val(request):
return request.param
@pytest.fixture(params=[0, 1])
def axis(request):
return request.param
@pytest.fixture(params=list(merge.MERGE_STRATEGIES.keys()))
def merge_strategy(request):
return request.param
def fix_known_differences(orig, result, backwards_compat=True):
"""
Helper function for reducing anndata's to only the elements we expect to be
equivalent after concatenation.
Only for the case where orig is the ground truth result of what concatenation should be.
If backwards_compat, checks against what `AnnData.concatenate` could do. Otherwise checks for `concat`.
"""
orig = orig.copy()
result = result.copy()
result.strings_to_categoricals() # Should this be implicit in concatenation?
# TODO
# * merge varm, varp similar to uns
# * merge obsp, but some information should be lost
del orig.obsp # TODO
if backwards_compat:
del orig.varm
del orig.varp
result.obs.drop(columns=["batch"], inplace=True)
# Possibly need to fix this, ordered categoricals lose orderedness
for k, dtype in orig.obs.dtypes.items():
if is_categorical_dtype(dtype) and dtype.ordered:
result.obs[k] = result.obs[k].astype(dtype)
return orig, result
@pytest.mark.parametrize(
["concat_func", "backwards_compat"],
[
(partial(concat, merge="unique"), False),
(lambda x, **kwargs: x[0].concatenate(x[1:], **kwargs), True),
],
)
def test_concatenate_roundtrip(join_type, array_type, concat_func, backwards_compat):
adata = gen_adata((100, 10), X_type=array_type)
remaining = adata.obs_names
subsets = []
while len(remaining) > 0:
n = min(len(remaining), np.random.choice(50))
subset_idx = np.random.choice(remaining, n, replace=False)
subsets.append(adata[subset_idx])
remaining = remaining.difference(subset_idx)
result = concat_func(subsets, join=join_type, uns_merge="same", index_unique=None)
# Correcting for known differences
orig, result = fix_known_differences(
adata, result, backwards_compat=backwards_compat
)
assert_equal(result[orig.obs_names].copy(), orig)
def test_concatenate_dense():
# dense data
X1 = np.array([[1, 2, 3], [4, 5, 6]])
X2 = np.array([[1, 2, 3], [4, 5, 6]])
X3 = np.array([[1, 2, 3], [4, 5, 6]])
adata1 = AnnData(
X1,
dict(obs_names=["s1", "s2"], anno1=["c1", "c2"]),
dict(var_names=["a", "b", "c"], annoA=[0, 1, 2]),
obsm=dict(X_1=X1, X_2=X2, X_3=X3),
layers=dict(Xs=X1),
)
adata2 = AnnData(
X2,
dict(obs_names=["s3", "s4"], anno1=["c3", "c4"]),
dict(var_names=["d", "c", "b"], annoA=[0, 1, 2]),
obsm=dict(X_1=X1, X_2=X2, X_3=X3),
layers={"Xs": X2},
)
adata3 = AnnData(
X3,
dict(obs_names=["s1", "s2"], anno2=["d3", "d4"]),
dict(var_names=["d", "c", "b"], annoB=[0, 1, 2]),
obsm=dict(X_1=X1, X_2=X2),
layers=dict(Xs=X3),
)
# inner join
adata = adata1.concatenate(adata2, adata3)
X_combined = [[2, 3], [5, 6], [3, 2], [6, 5], [3, 2], [6, 5]]
assert adata.X.astype(int).tolist() == X_combined
assert adata.layers["Xs"].astype(int).tolist() == X_combined
assert adata.obs_keys() == ["anno1", "anno2", "batch"]
assert adata.var_keys() == ["annoA-0", "annoA-1", "annoB-2"]
assert adata.var.values.tolist() == [[1, 2, 2], [2, 1, 1]]
assert adata.obsm_keys() == ["X_1", "X_2"]
assert adata.obsm["X_1"].tolist() == np.concatenate([X1, X1, X1]).tolist()
# with batch_key and batch_categories
adata = adata1.concatenate(adata2, adata3, batch_key="batch1")
assert adata.obs_keys() == ["anno1", "anno2", "batch1"]
adata = adata1.concatenate(adata2, adata3, batch_categories=["a1", "a2", "a3"])
assert adata.obs["batch"].cat.categories.tolist() == ["a1", "a2", "a3"]
assert adata.var_names.tolist() == ["b", "c"]
# outer join
adata = adata1.concatenate(adata2, adata3, join="outer")
X_ref = np.array(
[
[1.0, 2.0, 3.0, np.nan],
[4.0, 5.0, 6.0, np.nan],
[np.nan, 3.0, 2.0, 1.0],
[np.nan, 6.0, 5.0, 4.0],
[np.nan, 3.0, 2.0, 1.0],
[np.nan, 6.0, 5.0, 4.0],
]
)
np.testing.assert_equal(adata.X, X_ref)
var_ma = ma.masked_invalid(adata.var.values.tolist())
var_ma_ref = ma.masked_invalid(
np.array(
[
[0.0, np.nan, np.nan],
[1.0, 2.0, 2.0],
[2.0, 1.0, 1.0],
[np.nan, 0.0, 0.0],
]
)
)
assert np.array_equal(var_ma.mask, var_ma_ref.mask)
assert np.allclose(var_ma.compressed(), var_ma_ref.compressed())
def test_concatenate_layers(array_type, join_type):
adatas = []
for _ in range(5):
a = array_type(sparse.random(100, 200, format="csr"))
adatas.append(AnnData(X=a, layers={"a": a}))
merged = adatas[0].concatenate(adatas[1:], join=join_type)
assert_equal(merged.X, merged.layers["a"])
@pytest.fixture
def obsm_adatas():
def gen_index(n):
return [f"cell{i}" for i in range(n)]
return [
AnnData(
X=sparse.csr_matrix((3, 5)),
obs=pd.DataFrame(index=gen_index(3)),
obsm={
"dense": np.arange(6).reshape(3, 2),
"sparse": sparse.csr_matrix(np.arange(6).reshape(3, 2)),
"df": pd.DataFrame(
{
"a": np.arange(3),
"b": list("abc"),
"c": pd.Categorical(list("aab")),
},
index=gen_index(3),
),
},
),
AnnData(
X=sparse.csr_matrix((4, 10)),
obs=pd.DataFrame(index=gen_index(4)),
obsm=dict(
dense=np.arange(12).reshape(4, 3),
df=pd.DataFrame(dict(a=np.arange(3, 7)), index=gen_index(4)),
),
),
AnnData(
X=sparse.csr_matrix((2, 100)),
obs=pd.DataFrame(index=gen_index(2)),
obsm={
"sparse": np.arange(8).reshape(2, 4),
"dense": np.arange(4, 8).reshape(2, 2),
"df": pd.DataFrame(
{
"a": np.arange(7, 9),
"b": list("cd"),
"c": pd.Categorical(list("ab")),
},
index=gen_index(2),
),
},
),
]
def test_concatenate_obsm_inner(obsm_adatas):
adata = obsm_adatas[0].concatenate(obsm_adatas[1:], join="inner")
assert set(adata.obsm.keys()) == {"dense", "df"}
assert adata.obsm["dense"].shape == (9, 2)
assert adata.obsm["dense"].tolist() == [
[0, 1],
[2, 3],
[4, 5],
[0, 1],
[3, 4],
[6, 7],
[9, 10],
[4, 5],
[6, 7],
]
assert adata.obsm["df"].columns == ["a"]
assert adata.obsm["df"]["a"].tolist() == list(range(9))
# fmt: off
true_df = (
pd.concat([a.obsm["df"] for a in obsm_adatas], join="inner")
.reset_index(drop=True)
)
# fmt: on
cur_df = adata.obsm["df"].reset_index(drop=True)
pd.testing.assert_frame_equal(true_df, cur_df)
def test_concatenate_obsm_outer(obsm_adatas, fill_val):
outer = obsm_adatas[0].concatenate(
obsm_adatas[1:], join="outer", fill_value=fill_val
)
inner = obsm_adatas[0].concatenate(obsm_adatas[1:], join="inner")
for k, inner_v in inner.obsm.items():
assert np.array_equal(
_subset(outer.obsm[k], (slice(None), slice(None, inner_v.shape[1]))),
inner_v,
)
assert set(outer.obsm.keys()) == {"dense", "df", "sparse"}
assert isinstance(outer.obsm["dense"], np.ndarray)
np.testing.assert_equal(
outer.obsm["dense"],
np.array(
[
[0, 1, fill_val],
[2, 3, fill_val],
[4, 5, fill_val],
[0, 1, 2],
[3, 4, 5],
[6, 7, 8],
[9, 10, 11],
[4, 5, fill_val],
[6, 7, fill_val],
]
),
)
assert isinstance(outer.obsm["sparse"], sparse.spmatrix)
np.testing.assert_equal(
outer.obsm["sparse"].toarray(),
np.array(
[
[0, 1, fill_val, fill_val],
[2, 3, fill_val, fill_val],
[4, 5, fill_val, fill_val],
[fill_val, fill_val, fill_val, fill_val],
[fill_val, fill_val, fill_val, fill_val],
[fill_val, fill_val, fill_val, fill_val],
[fill_val, fill_val, fill_val, fill_val],
[0, 1, 2, 3],
[4, 5, 6, 7],
]
),
)
# fmt: off
true_df = (
pd.concat([a.obsm["df"] for a in obsm_adatas], join="outer")
.reset_index(drop=True)
)
# fmt: on
cur_df = outer.obsm["df"].reset_index(drop=True)
pd.testing.assert_frame_equal(true_df, cur_df)
def test_concat_annot_join(obsm_adatas, join_type):
adatas = [
AnnData(sparse.csr_matrix(a.shape), obs=a.obsm["df"], var=a.var)
for a in obsm_adatas
]
pd.testing.assert_frame_equal(
concat(adatas, join=join_type).obs,
pd.concat([a.obs for a in adatas], join=join_type),
)
def test_concatenate_layers_misaligned(array_type, join_type):
adatas = []
for _ in range(5):
a = array_type(sparse.random(100, 200, format="csr"))
adata = AnnData(X=a, layers={"a": a})
adatas.append(
adata[:, np.random.choice(adata.var_names, 150, replace=False)].copy()
)
merged = adatas[0].concatenate(adatas[1:], join=join_type)
assert_equal(merged.X, merged.layers["a"])
def test_concatenate_layers_outer(array_type, fill_val):
# Testing that issue #368 is fixed
a = AnnData(
X=np.ones((10, 20)),
layers={"a": array_type(sparse.random(10, 20, format="csr"))},
)
b = AnnData(X=np.ones((10, 20)))
c = a.concatenate(b, join="outer", fill_value=fill_val, batch_categories=["a", "b"])
np.testing.assert_array_equal(
asarray(c[c.obs["batch"] == "b"].layers["a"]), fill_val
)
def test_concatenate_fill_value(fill_val):
def get_obs_els(adata):
return {
"X": adata.X,
**{f"layer_{k}": adata.layers[k] for k in adata.layers},
**{f"obsm_{k}": adata.obsm[k] for k in adata.obsm},
}
adata1 = gen_adata((10, 10))
adata1.obsm = {
k: v for k, v in adata1.obsm.items() if not isinstance(v, pd.DataFrame)
}
adata2 = gen_adata((10, 5))
adata2.obsm = {
k: v[:, : v.shape[1] // 2]
for k, v in adata2.obsm.items()
if not isinstance(v, pd.DataFrame)
}
adata3 = gen_adata((7, 3))
adata3.obsm = {
k: v[:, : v.shape[1] // 3]
for k, v in adata3.obsm.items()
if not isinstance(v, pd.DataFrame)
}
joined = adata1.concatenate([adata2, adata3], join="outer", fill_value=fill_val)
ptr = 0
for orig in [adata1, adata2, adata3]:
cur = joined[ptr : ptr + orig.n_obs]
cur_els = get_obs_els(cur)
orig_els = get_obs_els(orig)
for k, cur_v in cur_els.items():
orig_v = orig_els.get(k, sparse.csr_matrix((orig.n_obs, 0)))
assert_equal(cur_v[:, : orig_v.shape[1]], orig_v)
np.testing.assert_equal(asarray(cur_v[:, orig_v.shape[1] :]), fill_val)
ptr += orig.n_obs
def test_concatenate_dense_duplicates():
X1 = np.array([[1, 2, 3], [4, 5, 6]])
X2 = np.array([[1, 2, 3], [4, 5, 6]])
X3 = np.array([[1, 2, 3], [4, 5, 6]])
# inner join duplicates
adata1 = AnnData(
X1,
dict(obs_names=["s1", "s2"], anno1=["c1", "c2"]),
dict(
var_names=["a", "b", "c"],
annoA=[0, 1, 2],
annoB=[1.1, 1.0, 2.0],
annoC=[1.1, 1.0, 2.0],
annoD=[2.1, 2.0, 3.0],
),
)
adata2 = AnnData(
X2,
dict(obs_names=["s3", "s4"], anno1=["c3", "c4"]),
dict(
var_names=["a", "b", "c"],
annoA=[0, 1, 2],
annoB=[1.1, 1.0, 2.0],
annoC=[1.1, 1.0, 2.0],
annoD=[2.1, 2.0, 3.0],
),
)
adata3 = AnnData(
X3,
dict(obs_names=["s1", "s2"], anno2=["d3", "d4"]),
dict(
var_names=["a", "b", "c"],
annoA=[0, 1, 2],
annoB=[1.1, 1.0, 2.0],
annoD=[2.1, 2.0, 3.1],
),
)
adata = adata1.concatenate(adata2, adata3)
assert adata.var_keys() == [
"annoA",
"annoB",
"annoC-0",
"annoD-0",
"annoC-1",
"annoD-1",
"annoD-2",
]
def test_concatenate_sparse():
# sparse data
from scipy.sparse import csr_matrix
X1 = csr_matrix([[0, 2, 3], [0, 5, 6]])
X2 = csr_matrix([[0, 2, 3], [0, 5, 6]])
X3 = csr_matrix([[1, 2, 0], [0, 5, 6]])
adata1 = AnnData(
X1,
dict(obs_names=["s1", "s2"], anno1=["c1", "c2"]),
dict(var_names=["a", "b", "c"]),
layers=dict(Xs=X1),
)
adata2 = AnnData(
X2,
dict(obs_names=["s3", "s4"], anno1=["c3", "c4"]),
dict(var_names=["d", "c", "b"]),
layers=dict(Xs=X2),
)
adata3 = AnnData(
X3,
dict(obs_names=["s5", "s6"], anno2=["d3", "d4"]),
dict(var_names=["d", "c", "b"]),
layers=dict(Xs=X3),
)
# inner join
adata = adata1.concatenate(adata2, adata3)
X_combined = [[2, 3], [5, 6], [3, 2], [6, 5], [0, 2], [6, 5]]
assert adata.X.toarray().astype(int).tolist() == X_combined
assert adata.layers["Xs"].toarray().astype(int).tolist() == X_combined
# outer join
adata = adata1.concatenate(adata2, adata3, join="outer")
assert adata.X.toarray().tolist() == [
[0.0, 2.0, 3.0, 0.0],
[0.0, 5.0, 6.0, 0.0],
[0.0, 3.0, 2.0, 0.0],
[0.0, 6.0, 5.0, 0.0],
[0.0, 0.0, 2.0, 1.0],
[0.0, 6.0, 5.0, 0.0],
]
def test_concatenate_mixed():
X1 = sparse.csr_matrix(np.array([[1, 2, 0], [4, 0, 6], [0, 0, 9]]))
X2 = sparse.csr_matrix(np.array([[0, 2, 3], [4, 0, 0], [7, 0, 9]]))
X3 = sparse.csr_matrix(np.array([[1, 0, 3], [0, 0, 6], [0, 8, 0]]))
X4 = np.array([[0, 2, 3], [4, 0, 0], [7, 0, 9]])
adata1 = AnnData(
X1,
dict(obs_names=["s1", "s2", "s3"], anno1=["c1", "c2", "c3"]),
dict(var_names=["a", "b", "c"], annoA=[0, 1, 2]),
layers=dict(counts=X1),
)
adata2 = AnnData(
X2,
dict(obs_names=["s4", "s5", "s6"], anno1=["c3", "c4", "c5"]),
dict(var_names=["d", "c", "b"], annoA=[0, 1, 2]),
layers=dict(counts=X4), # sic
)
adata3 = AnnData(
X3,
dict(obs_names=["s7", "s8", "s9"], anno2=["d3", "d4", "d5"]),
dict(var_names=["d", "c", "b"], annoA=[0, 2, 3], annoB=[0, 1, 2]),
layers=dict(counts=X3),
)
adata4 = AnnData(
X4,
dict(obs_names=["s4", "s5", "s6"], anno1=["c3", "c4", "c5"]),
dict(var_names=["d", "c", "b"], annoA=[0, 1, 2]),
layers=dict(counts=X2), # sic
)
adata_all = AnnData.concatenate(adata1, adata2, adata3, adata4)
assert isinstance(adata_all.X, sparse.csr_matrix)
assert isinstance(adata_all.layers["counts"], sparse.csr_matrix)
def test_concatenate_with_raw():
# dense data
X1 = np.array([[1, 2, 3], [4, 5, 6]])
X2 = np.array([[1, 2, 3], [4, 5, 6]])
X3 = np.array([[1, 2, 3], [4, 5, 6]])
X4 = np.array([[1, 2, 3, 4], [5, 6, 7, 8]])
adata1 = AnnData(
X1,
dict(obs_names=["s1", "s2"], anno1=["c1", "c2"]),
dict(var_names=["a", "b", "c"], annoA=[0, 1, 2]),
layers=dict(Xs=X1),
)
adata2 = AnnData(
X2,
dict(obs_names=["s3", "s4"], anno1=["c3", "c4"]),
dict(var_names=["d", "c", "b"], annoA=[0, 1, 2]),
layers=dict(Xs=X2),
)
adata3 = AnnData(
X3,
dict(obs_names=["s1", "s2"], anno2=["d3", "d4"]),
dict(var_names=["d", "c", "b"], annoB=[0, 1, 2]),
layers=dict(Xs=X3),
)
adata4 = AnnData(
X4,
dict(obs_names=["s1", "s2"], anno1=["c1", "c2"]),
dict(var_names=["a", "b", "c", "z"], annoA=[0, 1, 2, 3]),
layers=dict(Xs=X4),
)
adata1.raw = adata1
adata2.raw = adata2
adata3.raw = adata3
adata_all = AnnData.concatenate(adata1, adata2, adata3)
assert isinstance(adata_all.raw, Raw)
assert set(adata_all.raw.var_names) == {"b", "c"}
assert_equal(adata_all.raw.to_adata().obs, adata_all.obs)
assert np.array_equal(adata_all.raw.X, adata_all.X)
adata_all = AnnData.concatenate(adata1, adata2, adata3, join="outer")
assert isinstance(adata_all.raw, Raw)
assert set(adata_all.raw.var_names) == set("abcd")
assert_equal(adata_all.raw.to_adata().obs, adata_all.obs)
assert np.array_equal(np.nan_to_num(adata_all.raw.X), np.nan_to_num(adata_all.X))
adata3.raw = adata4
adata_all = AnnData.concatenate(adata1, adata2, adata3, join="outer")
assert isinstance(adata_all.raw, Raw)
assert set(adata_all.raw.var_names) == set("abcdz")
assert set(adata_all.var_names) == set("abcd")
assert not np.array_equal(
np.nan_to_num(adata_all.raw.X), np.nan_to_num(adata_all.X)
)
del adata3.raw
with pytest.warns(
UserWarning,
match=(
"Only some AnnData objects have `.raw` attribute, "
"not concatenating `.raw` attributes."
),
):
adata_all = AnnData.concatenate(adata1, adata2, adata3)
assert adata_all.raw is None
del adata1.raw
del adata2.raw
assert all(_adata.raw is None for _adata in (adata1, adata2, adata3))
adata_all = AnnData.concatenate(adata1, adata2, adata3)
assert adata_all.raw is None
def test_pairwise_concat(axis, array_type):
dim_sizes = [[100, 200, 50], [50, 50, 50]]
if axis:
dim_sizes.reverse()
Ms, Ns = dim_sizes
dim = ("obs", "var")[axis]
alt = ("var", "obs")[axis]
dim_attr = f"{dim}p"
alt_attr = f"{alt}p"
def gen_dim_array(m):
return array_type(sparse.random(m, m, format="csr", density=0.1))
adatas = {
k: AnnData(
**{
"X": sparse.csr_matrix((m, n)),
"obsp": {"arr": gen_dim_array(m)},
"varp": {"arr": gen_dim_array(n)},
}
)
for k, m, n in zip("abc", Ms, Ns)
}
w_pairwise = concat(adatas, axis=axis, label="orig", pairwise=True)
wo_pairwise = concat(adatas, axis=axis, label="orig", pairwise=False)
# Check that argument controls whether elements are included
assert getattr(wo_pairwise, dim_attr) == {}
assert getattr(w_pairwise, dim_attr) != {}
# Check values of included elements
full_inds = np.arange(w_pairwise.shape[axis])
groups = getattr(w_pairwise, dim).groupby("orig").indices
for k, inds in groups.items():
orig_arr = getattr(adatas[k], dim_attr)["arr"]
full_arr = getattr(w_pairwise, dim_attr)["arr"]
# Check original values are intact
assert_equal(orig_arr, _subset(full_arr, (inds, inds)))
# Check that entries are filled with zeroes
assert_equal(
sparse.csr_matrix((len(inds), len(full_inds) - len(inds))),
_subset(full_arr, (inds, np.setdiff1d(full_inds, inds))),
)
assert_equal(
sparse.csr_matrix((len(full_inds) - len(inds), len(inds))),
_subset(full_arr, (np.setdiff1d(full_inds, inds), inds)),
)
# Check that argument does not affect alternative axis
assert "arr" in getattr(
concat(adatas, axis=axis, pairwise=False, merge="first"), alt_attr
)
def test_nan_merge(axis, join_type, array_type):
# concat_dim = ("obs", "var")[axis]
alt_dim = ("var", "obs")[axis]
mapping_attr = f"{alt_dim}m"
adata_shape = (20, 10)
arr = array_type(
sparse.random(adata_shape[1 - axis], 10, density=0.1, format="csr")
)
arr_nan = arr.copy()
with warnings.catch_warnings():
warnings.simplefilter("ignore", category=sparse.SparseEfficiencyWarning)
for _ in range(10):
arr_nan[
np.random.choice(arr.shape[0]), np.random.choice(arr.shape[1])
] = np.nan
_data = {"X": sparse.csr_matrix(adata_shape), mapping_attr: {"arr": arr_nan}}
orig1 = AnnData(**_data)
orig2 = AnnData(**_data)
result = concat([orig1, orig2], axis=axis, merge="same")
assert_equal(getattr(orig1, mapping_attr), getattr(result, mapping_attr))
orig_nonan = AnnData(
**{"X": sparse.csr_matrix(adata_shape), mapping_attr: {"arr": arr}}
)
result_nonan = concat([orig1, orig_nonan], axis=axis, merge="same")
assert len(getattr(result_nonan, mapping_attr)) == 0
def test_merge_unique():
from anndata._core.merge import merge_unique
# Simple cases
assert merge_unique([{"a": "b"}, {"a": "b"}]) == {"a": "b"}
assert merge_unique([{"a": {"b": "c"}}, {"a": {"b": "c"}}]) == {"a": {"b": "c"}}
assert merge_unique([{"a": {"b": "c"}}, {"a": {"b": "d"}}]) == {}
assert merge_unique([{"a": {"b": "c", "d": "e"}}, {"a": {"b": "c", "d": "f"}}]) == {
"a": {"b": "c"}
}
assert merge_unique(
[{"a": {"b": {"c": {"d": "e"}}}}, {"a": {"b": {"c": {"d": "e"}}}}]
) == {"a": {"b": {"c": {"d": "e"}}}}
assert (
merge_unique(
[
{"a": {"b": {"c": {"d": "e"}}}},
{"a": {"b": {"c": {"d": "f"}}}},
{"a": {"b": {"c": {"d": "e"}}}},
]
)
== {}
)
assert merge_unique([{"a": 1}, {"b": 2}]) == {"a": 1, "b": 2}
assert merge_unique([{"a": 1}, {"b": 2}, {"a": 1, "b": {"c": 2, "d": 3}}]) == {
"a": 1
}
# Test equivalency between arrays and lists
assert list(
merge_unique([{"a": np.ones(5)}, {"a": list(np.ones(5))}])["a"]
) == list(np.ones(5))
assert merge_unique([{"a": np.ones(5)}, {"a": list(np.ones(4))}]) == {}
def test_merge_same():
from anndata._core.merge import merge_same
# Same as unique for a number of cases:
assert merge_same([{"a": "b"}, {"a": "b"}]) == {"a": "b"}
assert merge_same([{"a": {"b": "c"}}, {"a": {"b": "c"}}]) == {"a": {"b": "c"}}
assert merge_same([{"a": {"b": "c"}}, {"a": {"b": "d"}}]) == {}
assert merge_same([{"a": {"b": "c", "d": "e"}}, {"a": {"b": "c", "d": "f"}}]) == {
"a": {"b": "c"}
}
assert merge_same([{"a": {"b": "c"}, "d": "e"}, {"a": {"b": "c"}, "d": 2}]) == {
"a": {"b": "c"}
}
assert merge_same(
[{"a": {"b": {"c": {"d": "e"}}}}, {"a": {"b": {"c": {"d": "e"}}}}]
) == {"a": {"b": {"c": {"d": "e"}}}}
assert merge_same([{"a": 1}, {"b": 2}]) == {}
assert merge_same([{"a": 1}, {"b": 2}, {"a": 1, "b": {"c": 2, "d": 3}}]) == {}
# Test equivalency between arrays and lists
assert list(merge_same([{"a": np.ones(5)}, {"a": list(np.ones(5))}])["a"]) == list(
np.ones(5)
)
def test_merge_first():
from anndata._core.merge import merge_first
assert merge_first([{"a": "b"}, {"a": "b"}]) == {"a": "b"}
assert merge_first([{"a": {"b": "c"}}, {"a": {"b": "c"}}]) == {"a": {"b": "c"}}
assert merge_first([{"a": 1}, {"a": 2}]) == {"a": 1}
assert merge_first([{"a": 1}, {"a": {"b": {"c": {"d": "e"}}}}]) == {"a": 1}
assert merge_first([{"a": {"b": {"c": {"d": "e"}}}}, {"a": 1}]) == {
"a": {"b": {"c": {"d": "e"}}}
}
# Helpers for test_concatenate_uns
def uns_ad(uns):
return AnnData(np.zeros((10, 10)), uns=uns)
def map_values(mapping, path, key, old_parent, new_parent, new_items):
ret = default_exit(path, key, old_parent, new_parent, new_items)
for k, v in ret.items():
if isinstance(v, Hashable) and v in mapping:
ret[k] = mapping[v]
return ret
def permute_nested_values(dicts: "List[dict]", gen_val: "Callable[[int], Any]"):
"""
This function permutes the values of a nested mapping, for testing that out merge
method work regardless of the values types.
Assumes the intial dictionary had integers for values.
"""
dicts = deepcopy(dicts)
initial_values = [
x[1] for x in research(dicts, query=lambda p, k, v: isinstance(v, int))
]
mapping = {k: gen_val(k) for k in initial_values}
return [remap(d, exit=partial(map_values, mapping)) for d in dicts]
def gen_df(n):
return helpers.gen_typed_df(n)
def gen_array(n):
return np.random.randn(n)
def gen_list(n):
return list(gen_array(n))
def gen_sparse(n):
return sparse.random(np.random.randint(1, 100), np.random.randint(1, 100))
def gen_something(n):
options = [gen_df, gen_array, gen_list, gen_sparse]
return np.random.choice(options)(n)
def gen_concat_params(unss, compat2result):
value_generators = [
lambda x: x,
gen_df,
gen_array,
gen_list,
gen_sparse,
gen_something,
]
for gen, (mode, result) in product(value_generators, compat2result.items()):
yield pytest.param(unss, mode, result, gen)
@pytest.mark.parametrize(
["unss", "merge_strategy", "result", "value_gen"],
chain(
gen_concat_params(
[{"a": 1}, {"a": 2}],
{None: {}, "first": {"a": 1}, "unique": {}, "same": {}, "only": {}},
),
gen_concat_params(
[{"a": 1}, {"b": 2}],
{
None: {},
"first": {"a": 1, "b": 2},
"unique": {"a": 1, "b": 2},
"same": {},
"only": {"a": 1, "b": 2},
},
),
gen_concat_params(
[
{"a": {"b": 1, "c": {"d": 3}}},
{"a": {"b": 1, "c": {"e": 4}}},
],
{
None: {},
"first": {"a": {"b": 1, "c": {"d": 3, "e": 4}}},
"unique": {"a": {"b": 1, "c": {"d": 3, "e": 4}}},
"same": {"a": {"b": 1}},
"only": {"a": {"c": {"d": 3, "e": 4}}},
},
),
gen_concat_params(
[
{"a": 1},
{"a": 1, "b": 2},
{"a": 1, "b": {"b.a": 1}, "c": 3},
{"d": 4},
],
{
None: {},
"first": {"a": 1, "b": 2, "c": 3, "d": 4},
"unique": {"a": 1, "c": 3, "d": 4},
"same": {},
"only": {"c": 3, "d": 4},
},
),
gen_concat_params(
[{"a": i} for i in range(15)],
{None: {}, "first": {"a": 0}, "unique": {}, "same": {}, "only": {}},
),
gen_concat_params(
[{"a": 1} for i in range(10)] + [{"a": 2}],
{None: {}, "first": {"a": 1}, "unique": {}, "same": {}, "only": {}},
),
),
)
def test_concatenate_uns(unss, merge_strategy, result, value_gen):
"""
Test that concatenation works out for different strategies and sets of values.
Params
------
unss
Set of patterns for values in uns.
compat
Strategy to use for merging uns.
result
Pattern we expect to see for the given input and strategy.
value_gen
Maps values in unss and results to another set of values. This is for checking that
we're comparing values correctly. For example `[{"a": 1}, {"a": 1}]` may get mapped
to `[{"a": [1, 2, 3]}, {"a": [1, 2, 3]}]`.
"""
# So we can see what the initial pattern was meant to be
print(merge_strategy, "\n", unss, "\n", result)
result, *unss = permute_nested_values([result] + unss, value_gen)
adatas = [uns_ad(uns) for uns in unss]
assert_equal(
adatas[0].concatenate(adatas[1:], uns_merge=merge_strategy).uns,
result,
elem_name="uns",
)
def test_transposed_concat(array_type, axis, join_type, merge_strategy, fill_val):
lhs = gen_adata((10, 10), X_type=array_type)
rhs = gen_adata((10, 12), X_type=array_type)
a = concat([lhs, rhs], axis=axis, join=join_type, merge=merge_strategy)
b = concat(
[lhs.T, rhs.T], axis=abs(axis - 1), join=join_type, merge=merge_strategy
).T
assert_equal(a, b)
def test_batch_key(axis):
"""Test that concat only adds a label if the key is provided"""
def get_annot(adata):
return getattr(adata, ("obs", "var")[axis])
lhs = gen_adata((10, 10))
rhs = gen_adata((10, 12))
# There is probably a prettier way to do this
annot = get_annot(concat([lhs, rhs], axis=axis))
assert (
list(
annot.columns.difference(
get_annot(lhs).columns.union(get_annot(rhs).columns)
)
)
== []
)
batch_annot = get_annot(concat([lhs, rhs], axis=axis, label="batch"))
assert list(
batch_annot.columns.difference(
get_annot(lhs).columns.union(get_annot(rhs).columns)
)
) == ["batch"]
def test_concat_categories_from_mapping():
mapping = {
"a": gen_adata((10, 10)),
"b": gen_adata((10, 10)),
}
keys = list(mapping.keys())
adatas = list(mapping.values())
mapping_call = partial(concat, mapping)
iter_call = partial(concat, adatas, keys=keys)
assert_equal(mapping_call(), iter_call())
assert_equal(mapping_call(label="batch"), iter_call(label="batch"))
assert_equal(mapping_call(index_unique="-"), iter_call(index_unique="-"))
assert_equal(
mapping_call(label="group", index_unique="+"),
iter_call(label="group", index_unique="+"),
)
def test_concat_names(axis):
def get_annot(adata):
return getattr(adata, ("obs", "var")[axis])
lhs = gen_adata((10, 10))
rhs = gen_adata((10, 10))
assert not get_annot(concat([lhs, rhs], axis=axis)).index.is_unique
assert get_annot(concat([lhs, rhs], axis=axis, index_unique="-")).index.is_unique
def test_concat_size_0_dim():
# https://github.com/theislab/anndata/issues/526
a = gen_adata((5, 10))
b = gen_adata((5, 0))
assert concat([a, b], axis=0).shape == (10, 0)
assert concat([a, b], axis=1).shape == (5, 10)
def test_concatenate_size_0_dim():
# https://github.com/theislab/anndata/issues/526
a = gen_adata((5, 10))
b = gen_adata((5, 0))
# Mostly testing that this doesn't error
a.concatenate([b]).shape == (10, 0)
b.concatenate([a]).shape == (10, 0)
# Leaving out for now. See definition of these values for explanation
# def test_concatenate_uns_types():
# from anndata._core.merge import UNS_STRATEGIES, UNS_STRATEGIES_TYPE
# assert set(UNS_STRATEGIES.keys()) == set(UNS_STRATEGIES_TYPE.__args__)
|
from logging import root
import os, cv2
import numpy as np
class Loader:
def __init__(self, root_path) -> None:
self.root_path = root_path
def get_alpha_data(self, file_path):
if (self.root_path):
file_path = os.path.join(self.root_path, file_path)
class VideoMatte240KLoader():
def __init__(self, root_path, pha='pha', fgr='fgr') -> None:
self.root_path = root_path
self.pha = pha
self.fgr = fgr
def get_data(self, file_path):
if (self.root_path):
pha_file_path = os.path.join(self.root_path, 'train', self.pha, file_path)
fgr_file_path = os.path.join(self.root_path, 'train', self.fgr, file_path)
pha_cap = cv2.VideoCapture(pha_file_path)
fgr_cap = cv2.VideoCapture(fgr_file_path)
flag = True
while(True):
ret, pha_frame = pha_cap.read()
ret2, fgr_frame = fgr_cap.read()
if(ret is not True or ret2 is not True):
print(ret, ret2)
break
else:
gray = pha_frame[..., 0]
if(gray.shape[0]!=1080 and flag is True):
print(file_path)
flag = False
class RWP636Loader():
def __init__(self, root_path) -> None:
self.root_path = root_path
def get_data(self, file_path):
if (self.root_path):
pha_file_path = os.path.join(self.root_path, 'train', self.pha, file_path)
if(__name__=='__main__'):
files = os.listdir(r'E:\CVDataset\VideoMatte240K\train\pha')
for f in files:
loader = VideoMatte240KLoader(r'E:\CVDataset\VideoMatte240K')
loader.get_data(f)
|
#!/usr/bin/env python
import time
print "This demo will build 4 stacks of 40 cubes in the scene. Watch out!"
#print "\x1b!0|"
for x in range(0,1):
for y in range(0,40):
print "\x1b!1;{}.0;{}.0;-1.2;0.1;0.1;0.1|".format(x,y)
print "Done."
|
class TicTacToe():
def __init__(self):
# Initialising the game board to empty strings
self.board = {
'1': ' ', '2': ' ', '3': ' ',
'4': ' ', '5': ' ', '6': ' ',
'7': ' ', '8': ' ', '9': ' '
}
# The x for the left and right columns
self.LRx = ' X'
# The x for the centre column
self.Cx = ' X '
# The o for the left and right columns
self.LRo = ' O'
# The o for the centre column
self.Co = ' O '
self.boardEnd = '------------------------------------'
def start_game(self):
"""
This method is used to start the game after the class has been created
:return: Nothing is returned
"""
# Prints the board showing the numbers for the input
print(' 1 | 2 | 3 ')
print('---+---+---')
print(' 4 | 5 | 6 ')
print('---+---+---')
print(' 7 | 8 | 9 ')
print(self.boardEnd)
print("Type 'exit' to exit")
# Player 1 starts
player = 1
# Symbol is X for Player 1
symbol = 'X'
# The number of plays (cannot be greater than 9)
moves = 0
# To check if there is a winner
won = False
# Switch symbol depending on the player (1 = X, 2 = O)
if player != 1:
symbol = 'O'
while not won:
if self.board['1'] == self.LRx and self.board['2'] == self.Cx and self.board['3'] == self.LRx:
print('Player 1 wins!')
break
if moves > 8:
break
while player == 1 and won == False:
# Check if tie
if moves > 8:
break
# Display the board with any changes
print(self.board['1'] + ' |' + self.board['2'] + '|' + self.board['3'])
print('---+---+---')
print(self.board['4'] + ' |' + self.board['5'] + '|' + self.board['6'])
print('---+---+---')
print(self.board['7'] + ' |' + self.board['8'] + '|' + self.board['9'])
# Check if there is a winner
# For player 1
# First row
if self.board['1'] == self.LRx and self.board['2'] == self.Cx and self.board['3'] == self.LRx:
print('Player 1 wins!')
won = True
break
# Second row
elif self.board['4'] == self.LRx and self.board['5'] == self.Cx and self.board['6'] == self.LRx:
print('Player 1 wins!')
won = True
break
# Third row
elif self.board['7'] == self.LRx and self.board['8'] == self.Cx and self.board['9'] == self.LRx:
print('Player 1 wins!')
won = True
break
# First column
elif self.board['1'] == self.LRx and self.board['4'] == self.LRx and self.board['7'] == self.LRx:
print('Player 1 wins!')
won = True
break
# Second column
elif self.board['2'] == self.LRx and self.board['5'] == self.LRx and self.board['8'] == self.LRx:
print('Player 1 wins!')
won = True
break
# Third column
elif self.board['3'] == self.LRx and self.board['6'] == self.LRx and self.board['9'] == self.LRx:
print('Player 1 wins!')
won = True
break
# Diagonal - top left to bottom right
elif self.board['1'] == self.LRx and self.board['5'] == self.Cx and self.board['9'] == self.LRx:
print('Player 1 wins!')
won = True
break
# Diagonal - bottom left to top right
elif self.board['7'] == self.LRx and self.board['5'] == self.Cx and self.board['3'] == self.LRx:
print('Player 1 wins!')
won = True
break
# For player 2
# First row
elif self.board['1'] == self.LRo and self.board['2'] == self.Co and self.board['3'] == self.LRo:
print('Player 2 wins!')
won = True
break
# Second row
elif self.board['4'] == self.LRo and self.board['5'] == self.Co and self.board['6'] == self.LRo:
print('Player 2 wins!')
won = True
break
# Third row
elif self.board['7'] == self.LRo and self.board['8'] == self.Co and self.board['9'] == self.LRo:
print('Player 2 wins!')
won = True
break
# First column
elif self.board['1'] == self.LRo and self.board['4'] == self.LRo and self.board['7'] == self.LRo:
print('Player 2 wins!')
won = True
break
# Second column
elif self.board['2'] == self.LRo and self.board['5'] == self.LRo and self.board['8'] == self.LRo:
print('Player 2 wins!')
won = True
break
# Third column
elif self.board['3'] == self.LRo and self.board['6'] == self.LRo and self.board['9'] == self.LRo:
print('Player 2 wins!')
won = True
break
# Diagonal - top left to bottom right
elif self.board['1'] == self.LRo and self.board['5'] == self.Co and self.board['9'] == self.LRo:
print('Player 2 wins!')
won = True
break
# Diagonal - bottom left to top right
elif self.board['7'] == self.LRo and self.board['5'] == self.Co and self.board['3'] == self.LRo:
print('Player 2 wins!')
won = True
break
else:
# Take input for the number of the square to
ip = input('Player 1:\n')
moves += 1
# Try getting input: if it is between 1 & 9,
try:
if ip in self.board and self.board[ip] == ' ' or self.board[ip] == ' ':
# Check to see which 'X' to insert (LRx or Cx)
if ip == '2' or ip == '5' or ip == '8':
self.board[ip] = self.Cx
# Check if the board is full
if moves > 8:
break
# Change player
player = 2
print(self.boardEnd)
else:
self.board[ip] = self.LRx
# Check if the board is full
if moves > 8:
break
# Change player
player = 2
print(self.boardEnd)
# Check if that box is taken
elif self.board[ip] == ' X' or self.board[ip] == ' X ' or self.board[ip] == ' O' or self.board[ip] == ' O ':
print('That spot is taken')
moves -= 1
# If the input is not from the board (1-9) or exit
except KeyError as identifier:
if "exit" in ip:
exit(1)
else:
print('INVALID')
moves -= 1
break
while player == 2 and won == False:
# Check if tie
if moves > 8:
break
print(self.board['1'] + ' |' + self.board['2'] + '|' + self.board['3'])
print('---+---+---')
print(self.board['4'] + ' |' + self.board['5'] + '|' + self.board['6'])
print('---+---+---')
print(self.board['7'] + ' |' + self.board['8'] + '|' + self.board['9'])
# Check if there is a winner
# For player 1
# First row
if self.board['1'] == self.LRx and self.board['2'] == self.Cx and self.board['3'] == self.LRx:
print('Player 1 wins!')
won = True
break
# Second row
elif self.board['4'] == self.LRx and self.board['5'] == self.Cx and self.board['6'] == self.LRx:
print('Player 1 wins!')
won = True
break
# Third row
elif self.board['7'] == self.LRx and self.board['8'] == self.Cx and self.board['9'] == self.LRx:
print('Player 1 wins!')
won = True
break
# First column
elif self.board['1'] == self.LRx and self.board['4'] == self.LRx and self.board['7'] == self.LRx:
print('Player 1 wins!')
won = True
break
# Second column
elif self.board['2'] == self.LRx and self.board['5'] == self.LRx and self.board['8'] == self.LRx:
print('Player 1 wins!')
won = True
break
# Third column
elif self.board['3'] == self.LRx and self.board['6'] == self.LRx and self.board['9'] == self.LRx:
print('Player 1 wins!')
won = True
break
# Diagonal - top left to bottom right
elif self.board['1'] == self.LRx and self.board['5'] == self.Cx and self.board['9'] == self.LRx:
print('Player 1 wins!')
won = True
break
# Diagonal - bottom left to top right
elif self.board['7'] == self.LRx and self.board['5'] == self.Cx and self.board['3'] == self.LRx:
print('Player 1 wins!')
won = True
break
# For player 2
# First row
elif self.board['1'] == self.LRo and self.board['2'] == self.Co and self.board['3'] == self.LRo:
print('Player 2 wins!')
won = True
break
# Second row
elif self.board['4'] == self.LRo and self.board['5'] == self.Co and self.board['6'] == self.LRo:
print('Player 2 wins!')
won = True
break
# Third row
elif self.board['7'] == self.LRo and self.board['8'] == self.Co and self.board['9'] == self.LRo:
print('Player 2 wins!')
won = True
break
# First column
elif self.board['1'] == self.LRo and self.board['4'] == self.LRo and self.board['7'] == self.LRo:
print('Player 2 wins!')
won = True
break
# Second column
elif self.board['2'] == self.LRo and self.board['5'] == self.LRo and self.board['8'] == self.LRo:
print('Player 2 wins!')
won = True
break
# Third column
elif self.board['3'] == self.LRo and self.board['6'] == self.LRo and self.board['9'] == self.LRo:
print('Player 2 wins!')
won = True
break
# Diagonal - top left to bottom right
elif self.board['1'] == self.LRo and self.board['5'] == self.Co and self.board['9'] == self.LRo:
print('Player 2 wins!')
won = True
break
# Diagonal - bottom left to top right
elif self.board['7'] == self.LRo and self.board['5'] == self.Co and self.board['9'] == self.LRo:
print('Player 2 wins!')
won = True
break
else:
# Take input for the number of the square to
ip = input('Player 2:\n')
moves += 1
# Try getting input: if it is between 1 & 9,
try:
# Check if that box is empty
if ip in self.board and self.board[ip] == ' ' or self.board[ip] == ' ':
# Check to see which 'O' to insert (LRo or Co)
if ip == '2' or ip == '5' or ip == '8':
self.board[ip] = self.Co
# Check if the board is full
if moves > 8:
break
# Change player
player = 1
print(self.boardEnd)
else:
self.board[ip] = self.LRo
# Check if the board is full
if moves > 8:
break
# Change player
player = 1
print(self.boardEnd)
# Check if that box is taken
elif self.board[ip] == ' X' or self.board[ip] == ' X ' or self.board[ip] == ' O' or self.board[ip] == ' O ':
print('That spot is taken')
moves -= 1
# If the input is not from the board (1-9)
except KeyError as identifier:
if "exit" in ip:
return None
else:
print('INVALID')
moves -= 1
break
print('DONE')
|
# Copyright (c) 2021, Xu Chen, FUNLab, Xiamen University
# All rights reserved.
import torch
import torch.nn as nn
import torch.nn.functional as F
class EncoderDecoder(nn.Module):
def __init__(self, n_in_chs, n_out_chs, ):
super(EncoderDecoder, self).__init__()
self.n_in_chs = n_in_chs
self.n_out_chs = n_out_chs
# Encoder
self.conv1 = nn.Conv2d(n_in_chs, 128, 3, padding=1)
self.conv2 = nn.Conv2d(128, 64, 3, padding=1)
self.pool = nn.MaxPool2d(2, 2)
# Decoder
self.t_conv1 = nn.ConvTranspose2d(64, 128, 2, stride=2)
self.t_conv2 = nn.ConvTranspose2d(128, n_out_chs, 2, stride=2)
def forward(self, x):
x = F.relu(self.conv1(x))
x = self.pool(x)
x = F.relu(self.conv2(x))
x = self.pool(x)
x = F.relu(self.t_conv1(x))
x = torch.sigmoid(self.t_conv2(x))
return x
|
from rest_framework import permissions
class IsOwnerOrReadOnly(permissions.BasePermission):
"""
Custom permission to only allow owners of an object to edit it.
"""
def has_object_permission(self, request, view, obj):
# Read permissions are allowed to any request,
# so we'll always allow GET, HEAD or OPTIONS requests.
if request.method in permissions.SAFE_METHODS:
return True
# Write permissions are only allowed to the owner of the snippet.
return obj.owner == request.user
|
"""
Re-add triggers to update history.update_time when contents are changed.
"""
import logging
from sqlalchemy import MetaData
from galaxy.model.triggers import (
drop_timestamp_triggers,
install_timestamp_triggers,
)
log = logging.getLogger(__name__)
metadata = MetaData()
def upgrade(migrate_engine):
print(__doc__)
metadata.bind = migrate_engine
metadata.reflect()
install_timestamp_triggers(migrate_engine)
def downgrade(migrate_engine):
metadata.bind = migrate_engine
metadata.reflect()
drop_timestamp_triggers(migrate_engine)
|
from app import db
class Product(db.Model):
id = db.Column(db.BigInteger, primary_key=True, autoincrement=True)
plu = db.Column(db.BigInteger)
name = db.Column(db.String(128))
buying_price = db.Column(db.Float, nullable=True)
selling_price = db.Column(db.Float, nullable=True)
discount = db.Column(db.Float, nullable=True)
transaction_id = db.Column(
db.BigInteger, db.ForeignKey("transaction.id"), nullable=True
)
@property
def serialized(self):
return {
"id": self.id,
"plu": self.plu,
"name": self.name,
"buying_price": self.buying_price,
"selling_price": self.selling_price,
"discount": self.discount,
"transaction_id": self.transaction_id,
}
class Transaction(db.Model):
id = db.Column(db.BigInteger, primary_key=True, autoincrement=True)
date_time = db.Column(db.DateTime, nullable=True)
receipt_number = db.Column(db.BigInteger, nullable=True)
total_amount = db.Column(db.Float, nullable=True)
products = db.relationship("Product", backref="contains")
@property
def serialized(self):
return {
"id": self.id,
"date_time": self.date_time,
"receipt_number": self.receipt_number,
"total_amount": self.total_amount,
}
class ProductInfo(db.Model):
plu = db.Column(db.BigInteger, primary_key=True)
name = db.Column(db.String(128))
buying_price = db.Column(db.Float, nullable=True)
selling_price = db.Column(db.Float, nullable=True)
@property
def serialized(self):
return {
"plu": self.plu,
"name": self.name,
"buying_price": self.buying_price,
"selling_price": self.selling_price,
}
|
import math
import torch
import torch.nn as nn
def rgb_to_hls(image: torch.Tensor) -> torch.Tensor:
r"""Convert a RGB image to HLS.
The image data is assumed to be in the range of (0, 1).
Args:
image (torch.Tensor): RGB image to be converted to HLS with shape :math:`(*, 3, H, W)`.
Returns:
torch.Tensor: HLS version of the image with shape :math:`(*, 3, H, W)`.
Example:
>>> input = torch.rand(2, 3, 4, 5)
>>> output = rgb_to_hls(input) # 2x3x4x5
"""
if not isinstance(image, torch.Tensor):
raise TypeError("Input type is not a torch.Tensor. Got {}".format(type(image)))
if len(image.shape) < 3 or image.shape[-3] != 3:
raise ValueError("Input size must have a shape of (*, 3, H, W). Got {}".format(image.shape))
r: torch.Tensor = image[..., 0, :, :]
g: torch.Tensor = image[..., 1, :, :]
b: torch.Tensor = image[..., 2, :, :]
maxc: torch.Tensor = image.max(-3)[0]
minc: torch.Tensor = image.min(-3)[0]
imax: torch.Tensor = image.max(-3)[1]
l: torch.Tensor = (maxc + minc) / 2 # luminance
deltac: torch.Tensor = maxc - minc
s: torch.Tensor = torch.where(
l < 0.5, deltac / (maxc + minc), deltac / (torch.tensor(2.0) - (maxc + minc))
) # saturation
hi: torch.Tensor = torch.zeros_like(deltac)
hi[imax == 0] = (((g - b) / deltac) % 6)[imax == 0]
hi[imax == 1] = (((b - r) / deltac) + 2)[imax == 1]
hi[imax == 2] = (((r - g) / deltac) + 4)[imax == 2]
h: torch.Tensor = 2.0 * math.pi * (60.0 * hi) / 360.0 # hue [0, 2*pi]
image_hls: torch.Tensor = torch.stack([h, l, s], dim=-3)
# JIT indexing is not supported before 1.6.0 https://github.com/pytorch/pytorch/issues/38962
# image_hls[torch.isnan(image_hls)] = 0.
image_hls = torch.where(
torch.isnan(image_hls), torch.tensor(0.0, device=image_hls.device, dtype=image_hls.dtype), image_hls
)
return image_hls
def hls_to_rgb(image: torch.Tensor) -> torch.Tensor:
r"""Convert a HLS image to RGB.
The image data is assumed to be in the range of (0, 1).
Args:
image (torch.Tensor): HLS image to be converted to RGB with shape :math:`(*, 3, H, W)`.
Returns:
torch.Tensor: RGB version of the image with shape :math:`(*, 3, H, W)`.
Example:
>>> input = torch.rand(2, 3, 4, 5)
>>> output = hls_to_rgb(input) # 2x3x4x5
"""
if not isinstance(image, torch.Tensor):
raise TypeError("Input type is not a torch.Tensor. Got {}".format(type(image)))
if len(image.shape) < 3 or image.shape[-3] != 3:
raise ValueError("Input size must have a shape of (*, 3, H, W). Got {}".format(image.shape))
h: torch.Tensor = image[..., 0, :, :] * 360 / (2 * math.pi)
l: torch.Tensor = image[..., 1, :, :]
s: torch.Tensor = image[..., 2, :, :]
kr = (0 + h / 30) % 12
kg = (8 + h / 30) % 12
kb = (4 + h / 30) % 12
a = s * torch.min(l, torch.tensor(1.0) - l)
ones_k = torch.ones_like(kr)
fr: torch.Tensor = l - a * torch.max(
torch.min(torch.min(kr - torch.tensor(3.0), torch.tensor(9.0) - kr), ones_k), -1 * ones_k
)
fg: torch.Tensor = l - a * torch.max(
torch.min(torch.min(kg - torch.tensor(3.0), torch.tensor(9.0) - kg), ones_k), -1 * ones_k
)
fb: torch.Tensor = l - a * torch.max(
torch.min(torch.min(kb - torch.tensor(3.0), torch.tensor(9.0) - kb), ones_k), -1 * ones_k
)
out: torch.Tensor = torch.stack([fr, fg, fb], dim=-3)
return out
class RgbToHls(nn.Module):
r"""Convert an image from RGB to HLS.
The image data is assumed to be in the range of (0, 1).
Returns:
torch.Tensor: HLS version of the image.
Shape:
- image: :math:`(*, 3, H, W)`
- output: :math:`(*, 3, H, W)`
Examples:
>>> input = torch.rand(2, 3, 4, 5)
>>> hls = RgbToHls()
>>> output = hls(input) # 2x3x4x5
"""
def forward(self, image: torch.Tensor) -> torch.Tensor:
return rgb_to_hls(image)
class HlsToRgb(nn.Module):
r"""Convert an image from HLS to RGB.
The image data is assumed to be in the range of (0, 1).
Returns:
torch.Tensor: RGB version of the image.
Shape:
- input: :math:`(*, 3, H, W)`
- output: :math:`(*, 3, H, W)`
Reference:
https://en.wikipedia.org/wiki/HSL_and_HSV
Examples:
>>> input = torch.rand(2, 3, 4, 5)
>>> rgb = HlsToRgb()
>>> output = rgb(input) # 2x3x4x5
"""
def forward(self, image: torch.Tensor) -> torch.Tensor:
return hls_to_rgb(image)
|
# -*- coding: utf-8 -*-
"""DNA Center Get Sync Result for Virtual Account data model.
Copyright (c) 2019 Cisco and/or its affiliates.
Permission is hereby granted, free of charge, to any person obtaining a copy
of this software and associated documentation files (the "Software"), to deal
in the Software without restriction, including without limitation the rights
to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
copies of the Software, and to permit persons to whom the Software is
furnished to do so, subject to the following conditions:
The above copyright notice and this permission notice shall be included in all
copies or substantial portions of the Software.
THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
SOFTWARE.
"""
from __future__ import (
absolute_import,
division,
print_function,
unicode_literals,
)
import fastjsonschema
import json
from dnacentersdk.exceptions import MalformedRequest
from builtins import *
class JSONSchemaValidator0A9C988445Cb91C8(object):
"""Get Sync Result for Virtual Account request schema definition."""
def __init__(self):
super(JSONSchemaValidator0A9C988445Cb91C8, self).__init__()
self._validator = fastjsonschema.compile(json.loads(
'''{
"additionalProperties": false,
"type": "object"
}'''.replace("\n" + ' ' * 16, '')
))
def validate(self, request):
try:
self._validator(request)
except fastjsonschema.exceptions.JsonSchemaException as e:
raise MalformedRequest(
'{} is invalid. Reason: {}'.format(request, e.message)
)
|
# Moves a servo based on the accelerations of the Y axis
from Adafruit_I2C import Adafruit_I2C
from time import sleep
import Adafruit_BBIO.PWM as PWM
# initializes the i2c library and wakes up the IMU (MPU6050)
i2caddr = 0x68
i2c = Adafruit_I2C(i2caddr)
i2c.write8(0x6B, 0)
# sets up servo - from Adafruit tutorial
servoAddr = "P8_13"
duty_min = 3
duty_max = 14.5
duty_span = duty_max - duty_min
PWM.start(servoAddr, duty_max, 60.0)
# Utility funtion to convert a number in one range to another range
def map (x, min, max, newmin, newmax):
x = x - min
max = max - min
x = x / max
newspan = newmax - newmin
x = x * newspan
x = x + newmin
return x
# for loop = laziness
for i in range (0, 10000):
# reads in y axis accelerations and merges into one number
b = i2c.readS8(0x3D)
s = i2c.readU8(0x3E)
rawaccel = b * 256 + s
# converts raw reading into g's according to mode (+- 2 g's)
g = rawaccel / 16384.
# maps g's to the duty cycle range
duty = map(g, -2, 2, duty_min, duty_max)
# calls PWM & moves the servo
PWM.set_duty_cycle(servoAddr, duty)
# cleanup time
PWM.stop(servoAddr)
PWM.cleanup()
i2c.write8(0x6B, 0x40)
|
# coding: utf-8
"""
Kubernetes
No description provided (generated by Swagger Codegen https://github.com/swagger-api/swagger-codegen)
OpenAPI spec version: v1.10.0
Generated by: https://github.com/swagger-api/swagger-codegen.git
"""
from pprint import pformat
from six import iteritems
import re
class V1beta2DeploymentStatus(object):
"""
NOTE: This class is auto generated by the swagger code generator program.
Do not edit the class manually.
"""
"""
Attributes:
swagger_types (dict): The key is attribute name
and the value is attribute type.
attribute_map (dict): The key is attribute name
and the value is json key in definition.
"""
swagger_types = {
'available_replicas': 'int',
'collision_count': 'int',
'conditions': 'list[V1beta2DeploymentCondition]',
'observed_generation': 'int',
'ready_replicas': 'int',
'replicas': 'int',
'unavailable_replicas': 'int',
'updated_replicas': 'int'
}
attribute_map = {
'available_replicas': 'availableReplicas',
'collision_count': 'collisionCount',
'conditions': 'conditions',
'observed_generation': 'observedGeneration',
'ready_replicas': 'readyReplicas',
'replicas': 'replicas',
'unavailable_replicas': 'unavailableReplicas',
'updated_replicas': 'updatedReplicas'
}
def __init__(self, available_replicas=None, collision_count=None, conditions=None, observed_generation=None, ready_replicas=None, replicas=None, unavailable_replicas=None, updated_replicas=None):
"""
V1beta2DeploymentStatus - a model defined in Swagger
"""
self._available_replicas = None
self._collision_count = None
self._conditions = None
self._observed_generation = None
self._ready_replicas = None
self._replicas = None
self._unavailable_replicas = None
self._updated_replicas = None
self.discriminator = None
if available_replicas is not None:
self.available_replicas = available_replicas
if collision_count is not None:
self.collision_count = collision_count
if conditions is not None:
self.conditions = conditions
if observed_generation is not None:
self.observed_generation = observed_generation
if ready_replicas is not None:
self.ready_replicas = ready_replicas
if replicas is not None:
self.replicas = replicas
if unavailable_replicas is not None:
self.unavailable_replicas = unavailable_replicas
if updated_replicas is not None:
self.updated_replicas = updated_replicas
@property
def available_replicas(self):
"""
Gets the available_replicas of this V1beta2DeploymentStatus.
Total number of available pods (ready for at least minReadySeconds) targeted by this deployment.
:return: The available_replicas of this V1beta2DeploymentStatus.
:rtype: int
"""
return self._available_replicas
@available_replicas.setter
def available_replicas(self, available_replicas):
"""
Sets the available_replicas of this V1beta2DeploymentStatus.
Total number of available pods (ready for at least minReadySeconds) targeted by this deployment.
:param available_replicas: The available_replicas of this V1beta2DeploymentStatus.
:type: int
"""
self._available_replicas = available_replicas
@property
def collision_count(self):
"""
Gets the collision_count of this V1beta2DeploymentStatus.
Count of hash collisions for the Deployment. The Deployment controller uses this field as a collision avoidance mechanism when it needs to create the name for the newest ReplicaSet.
:return: The collision_count of this V1beta2DeploymentStatus.
:rtype: int
"""
return self._collision_count
@collision_count.setter
def collision_count(self, collision_count):
"""
Sets the collision_count of this V1beta2DeploymentStatus.
Count of hash collisions for the Deployment. The Deployment controller uses this field as a collision avoidance mechanism when it needs to create the name for the newest ReplicaSet.
:param collision_count: The collision_count of this V1beta2DeploymentStatus.
:type: int
"""
self._collision_count = collision_count
@property
def conditions(self):
"""
Gets the conditions of this V1beta2DeploymentStatus.
Represents the latest available observations of a deployment's current state.
:return: The conditions of this V1beta2DeploymentStatus.
:rtype: list[V1beta2DeploymentCondition]
"""
return self._conditions
@conditions.setter
def conditions(self, conditions):
"""
Sets the conditions of this V1beta2DeploymentStatus.
Represents the latest available observations of a deployment's current state.
:param conditions: The conditions of this V1beta2DeploymentStatus.
:type: list[V1beta2DeploymentCondition]
"""
self._conditions = conditions
@property
def observed_generation(self):
"""
Gets the observed_generation of this V1beta2DeploymentStatus.
The generation observed by the deployment controller.
:return: The observed_generation of this V1beta2DeploymentStatus.
:rtype: int
"""
return self._observed_generation
@observed_generation.setter
def observed_generation(self, observed_generation):
"""
Sets the observed_generation of this V1beta2DeploymentStatus.
The generation observed by the deployment controller.
:param observed_generation: The observed_generation of this V1beta2DeploymentStatus.
:type: int
"""
self._observed_generation = observed_generation
@property
def ready_replicas(self):
"""
Gets the ready_replicas of this V1beta2DeploymentStatus.
Total number of ready pods targeted by this deployment.
:return: The ready_replicas of this V1beta2DeploymentStatus.
:rtype: int
"""
return self._ready_replicas
@ready_replicas.setter
def ready_replicas(self, ready_replicas):
"""
Sets the ready_replicas of this V1beta2DeploymentStatus.
Total number of ready pods targeted by this deployment.
:param ready_replicas: The ready_replicas of this V1beta2DeploymentStatus.
:type: int
"""
self._ready_replicas = ready_replicas
@property
def replicas(self):
"""
Gets the replicas of this V1beta2DeploymentStatus.
Total number of non-terminated pods targeted by this deployment (their labels match the selector).
:return: The replicas of this V1beta2DeploymentStatus.
:rtype: int
"""
return self._replicas
@replicas.setter
def replicas(self, replicas):
"""
Sets the replicas of this V1beta2DeploymentStatus.
Total number of non-terminated pods targeted by this deployment (their labels match the selector).
:param replicas: The replicas of this V1beta2DeploymentStatus.
:type: int
"""
self._replicas = replicas
@property
def unavailable_replicas(self):
"""
Gets the unavailable_replicas of this V1beta2DeploymentStatus.
Total number of unavailable pods targeted by this deployment. This is the total number of pods that are still required for the deployment to have 100% available capacity. They may either be pods that are running but not yet available or pods that still have not been created.
:return: The unavailable_replicas of this V1beta2DeploymentStatus.
:rtype: int
"""
return self._unavailable_replicas
@unavailable_replicas.setter
def unavailable_replicas(self, unavailable_replicas):
"""
Sets the unavailable_replicas of this V1beta2DeploymentStatus.
Total number of unavailable pods targeted by this deployment. This is the total number of pods that are still required for the deployment to have 100% available capacity. They may either be pods that are running but not yet available or pods that still have not been created.
:param unavailable_replicas: The unavailable_replicas of this V1beta2DeploymentStatus.
:type: int
"""
self._unavailable_replicas = unavailable_replicas
@property
def updated_replicas(self):
"""
Gets the updated_replicas of this V1beta2DeploymentStatus.
Total number of non-terminated pods targeted by this deployment that have the desired template spec.
:return: The updated_replicas of this V1beta2DeploymentStatus.
:rtype: int
"""
return self._updated_replicas
@updated_replicas.setter
def updated_replicas(self, updated_replicas):
"""
Sets the updated_replicas of this V1beta2DeploymentStatus.
Total number of non-terminated pods targeted by this deployment that have the desired template spec.
:param updated_replicas: The updated_replicas of this V1beta2DeploymentStatus.
:type: int
"""
self._updated_replicas = updated_replicas
def to_dict(self):
"""
Returns the model properties as a dict
"""
result = {}
for attr, _ in iteritems(self.swagger_types):
value = getattr(self, attr)
if isinstance(value, list):
result[attr] = list(map(
lambda x: x.to_dict() if hasattr(x, "to_dict") else x,
value
))
elif hasattr(value, "to_dict"):
result[attr] = value.to_dict()
elif isinstance(value, dict):
result[attr] = dict(map(
lambda item: (item[0], item[1].to_dict())
if hasattr(item[1], "to_dict") else item,
value.items()
))
else:
result[attr] = value
return result
def to_str(self):
"""
Returns the string representation of the model
"""
return pformat(self.to_dict())
def __repr__(self):
"""
For `print` and `pprint`
"""
return self.to_str()
def __eq__(self, other):
"""
Returns true if both objects are equal
"""
if not isinstance(other, V1beta2DeploymentStatus):
return False
return self.__dict__ == other.__dict__
def __ne__(self, other):
"""
Returns true if both objects are not equal
"""
return not self == other
|
#!/usr/bin/env python
from __future__ import print_function
from setuptools import setup, find_packages
from setuptools.command.test import test as TestCommand
import codecs
import os
import sys
import re
here = os.path.abspath(os.path.dirname(__file__))
def read(*parts):
# intentionally *not* adding an encoding option to open
return codecs.open(os.path.join(here, *parts), 'r').read()
def find_version(*file_paths):
version_file = read(*file_paths)
version_match = re.search(r"^__version__ = ['\"]([^'\"]*)['\"]",
version_file, re.M)
if version_match:
return version_match.group(1)
raise RuntimeError("Unable to find version string.")
long_description = read('README.md')
class PyTest(TestCommand):
def finalize_options(self):
TestCommand.finalize_options(self)
self.test_args = []
self.test_suite = True
def run_tests(self):
import pytest
errcode = pytest.main(self.test_args)
sys.exit(errcode)
sdict = {
'name': 'redis_natives',
'version': find_version('redis_natives', '__init__.py'),
'description': 'Exposes Redis entities as native Python datatypes. Simple, plain but powerful. Supports namespacing, indexing, and some more.',
'long_description': long_description,
'url': 'http://github.com/cridenour/redis-natives-py',
'download_url' : 'http://github.com/downloads/cridenour/redis-natives-py/redis-natives-py-%s.zip' % find_version('redis_natives', '__init__.py'),
'author' : 'Peter Geil',
'author_email' : 'code@petergeil.name',
'maintainer' : 'Chris Ridenour',
'maintainer_email' : 'chrisridenour@gmail.com',
'keywords' : ['Redis', 'key-value store', 'redis-py', 'datatypes', 'natives', 'helper'],
'license' : 'BSD',
'packages' : ['redis_natives'],
'include_package_data': True,
'requires': ['redis (>=2.4)'],
'test_suite' : 'redis_natives.tests',
'tests_require': ['pytest'],
'cmdclass': {'test': PyTest},
'classifiers' : [
'Development Status :: 4 - Beta',
'Environment :: Console',
'Intended Audience :: Developers',
'License :: OSI Approved :: MIT License',
'License :: OSI Approved :: BSD License',
'Programming Language :: Python',
'Topic :: Database'],
'extras_require': {
'testing': ['pytest'],
}
}
setup(**sdict)
|
from io import BytesIO
from base64 import b64encode
from PIL import Image
import requests
from ebedke.utils import http
from ebedke import settings
VISION_API_ROOT = "https://vision.googleapis.com/v1/images:annotate"
def load_img(url: str) -> Image:
image = http.get_bytes(url)
return Image.open(BytesIO(image))
def ocr_image(img_bytes: bytes, langHint: str = "hu") -> str:
img_request = {"requests": [{
"image": {"content": b64encode(img_bytes).decode('ascii')},
"features": [{"type": "DOCUMENT_TEXT_DETECTION"}],
"imageContext": {"languageHints": [langHint]}
}]}
response = requests.post(VISION_API_ROOT, json=img_request,
params={'key': settings.google_token},
headers={'Content-Type': 'application/json'},
timeout=10)
if response.status_code != 200 or response.json().get('error'):
print("[ebedke] Google OCR error", response.text)
return ""
text_content = response.json()['responses'][0]['textAnnotations'][0]['description']
if text_content and isinstance(text_content, str):
return text_content
else:
return ""
|
try:
frozenset
except NameError:
# Import from the sets module for python 2.3
from sets import Set as set
from sets import ImmutableSet as frozenset
try:
from collections import deque
except ImportError:
from utils import deque
from constants import contentModelFlags, spaceCharacters
from constants import entitiesWindows1252, entities
from constants import asciiLowercase, asciiLetters, asciiUpper2Lower
from constants import digits, hexDigits, EOF
from inputstream import HTMLInputStream
# Group entities by their first character, for faster lookups
entitiesByFirstChar = {}
for e in entities:
entitiesByFirstChar.setdefault(e[0], []).append(e)
class HTMLTokenizer(object):
""" This class takes care of tokenizing HTML.
* self.currentToken
Holds the token that is currently being processed.
* self.state
Holds a reference to the method to be invoked... XXX
* self.states
Holds a mapping between states and methods that implement the state.
* self.stream
Points to HTMLInputStream object.
"""
# XXX need to fix documentation
def __init__(self, stream, encoding=None, parseMeta=True, useChardet=True,
lowercaseElementName=True, lowercaseAttrName=True):
self.stream = HTMLInputStream(stream, encoding, parseMeta, useChardet)
#Perform case conversions?
self.lowercaseElementName = lowercaseElementName
self.lowercaseAttrName = lowercaseAttrName
self.states = {
"data":self.dataState,
"entityData":self.entityDataState,
"tagOpen":self.tagOpenState,
"closeTagOpen":self.closeTagOpenState,
"tagName":self.tagNameState,
"beforeAttributeName":self.beforeAttributeNameState,
"attributeName":self.attributeNameState,
"afterAttributeName":self.afterAttributeNameState,
"beforeAttributeValue":self.beforeAttributeValueState,
"attributeValueDoubleQuoted":self.attributeValueDoubleQuotedState,
"attributeValueSingleQuoted":self.attributeValueSingleQuotedState,
"attributeValueUnQuoted":self.attributeValueUnQuotedState,
"afterAttributeValue":self.afterAttributeValueState,
"bogusComment":self.bogusCommentState,
"markupDeclarationOpen":self.markupDeclarationOpenState,
"commentStart":self.commentStartState,
"commentStartDash":self.commentStartDashState,
"comment":self.commentState,
"commentEndDash":self.commentEndDashState,
"commentEnd":self.commentEndState,
"doctype":self.doctypeState,
"beforeDoctypeName":self.beforeDoctypeNameState,
"doctypeName":self.doctypeNameState,
"afterDoctypeName":self.afterDoctypeNameState,
"beforeDoctypePublicIdentifier":self.beforeDoctypePublicIdentifierState,
"doctypePublicIdentifierDoubleQuoted":self.doctypePublicIdentifierDoubleQuotedState,
"doctypePublicIdentifierSingleQuoted":self.doctypePublicIdentifierSingleQuotedState,
"afterDoctypePublicIdentifier":self.afterDoctypePublicIdentifierState,
"beforeDoctypeSystemIdentifier":self.beforeDoctypeSystemIdentifierState,
"doctypeSystemIdentifierDoubleQuoted":self.doctypeSystemIdentifierDoubleQuotedState,
"doctypeSystemIdentifierSingleQuoted":self.doctypeSystemIdentifierSingleQuotedState,
"afterDoctypeSystemIdentifier":self.afterDoctypeSystemIdentifierState,
"bogusDoctype":self.bogusDoctypeState
}
# Setup the initial tokenizer state
self.contentModelFlag = contentModelFlags["PCDATA"]
self.escapeFlag = False
self.lastFourChars = []
self.state = self.states["data"]
# The current token being created
self.currentToken = None
def __iter__(self):
""" This is where the magic happens.
We do our usually processing through the states and when we have a token
to return we yield the token which pauses processing until the next token
is requested.
"""
self.tokenQueue = deque([])
# Start processing. When EOF is reached self.state will return False
# instead of True and the loop will terminate.
while self.state():
while self.stream.errors:
yield {"type": "ParseError", "data": self.stream.errors.pop(0)}
while self.tokenQueue:
yield self.tokenQueue.popleft()
# Below are various helper functions the tokenizer states use worked out.
def processSolidusInTag(self):
"""If the next character is a '>', convert the currentToken into
an EmptyTag
"""
rv = False
# We need to consume another character to make sure it's a ">"
data = self.stream.char()
if self.currentToken["type"] == "StartTag" and data == u">":
self.currentToken["type"] = "EmptyTag"
elif data == EOF:
self.tokenQueue.append({"type": "ParseError", "data":
"EOF following solidus"})
self.state = self.states["data"]
self.emitCurrentToken()
rv = True
else:
self.tokenQueue.append({"type": "ParseError", "data":
"incorrectly-placed-solidus"})
# The character we just consumed need to be put back on the stack so it
# doesn't get lost...
self.stream.unget(data)
return rv
def consumeNumberEntity(self, isHex):
"""This function returns either U+FFFD or the character based on the
decimal or hexadecimal representation. It also discards ";" if present.
If not present self.tokenQueue.append({"type": "ParseError"}) is invoked.
"""
allowed = digits
radix = 10
if isHex:
allowed = hexDigits
radix = 16
charStack = []
# Consume all the characters that are in range while making sure we
# don't hit an EOF.
c = self.stream.char()
while c in allowed and c is not EOF:
charStack.append(c)
c = self.stream.char()
# Convert the set of characters consumed to an int.
charAsInt = int("".join(charStack), radix)
if charAsInt == 13:
self.tokenQueue.append({"type": "ParseError", "data":
"incorrect-cr-newline-entity"})
charAsInt = 10
elif 127 < charAsInt < 160:
# If the integer is between 127 and 160 (so 128 and bigger and 159
# and smaller) we need to do the "windows trick".
self.tokenQueue.append({"type": "ParseError", "data":
"illegal-windows-1252-entity"})
charAsInt = entitiesWindows1252[charAsInt - 128]
# 0 is not a good number, neither are illegal Unicode code points (higher than 0x10FFFF) or surrogate characters (in the range 0xD800 to 0xDFFF).
if 0 < charAsInt and charAsInt <= 1114111 and not (55296 <= charAsInt and charAsInt <= 57343):
try:
# XXX We should have a separate function that does "int" to
# "unicodestring" conversion since this doesn't always work
# according to hsivonen. Also, unichr has a limitation of 65535
char = unichr(charAsInt)
except:
try:
char = eval("u'\\U%08x'" % charAsInt)
except:
self.tokenQueue.append({"type": "ParseError", "data":
"cant-convert-numeric-entity",
"datavars": {"charAsInt": charAsInt}})
else:
char = u"\uFFFD"
self.tokenQueue.append({"type": "ParseError", "data":
"illegal-codepoint-for-numeric-entity",
"datavars": {"charAsInt": charAsInt}})
# Discard the ; if present. Otherwise, put it back on the queue and
# invoke parseError on parser.
if c != u";":
self.tokenQueue.append({"type": "ParseError", "data":
"numeric-entity-without-semicolon"})
self.stream.unget(c)
return char
def consumeEntity(self, allowedChar=None, fromAttribute=False):
char = None
charStack = [self.stream.char()]
if charStack[0] in spaceCharacters or charStack[0] in (EOF, "<", "&")\
or (allowedChar is not None and allowedChar == charStack[0]):
self.stream.unget(charStack)
elif charStack[0] == u"#":
# We might have a number entity here.
charStack.extend([self.stream.char(), self.stream.char()])
if EOF in charStack[:2]:
# If we reach the end of the file put everything up to EOF
# back in the queue
charStack = charStack[:charStack.index(EOF)]
self.stream.unget(charStack)
self.tokenQueue.append({"type": "ParseError", "data":
"expected-numeric-entity-but-got-eof"})
else:
if charStack[1].lower() == u"x" \
and charStack[2] in hexDigits:
# Hexadecimal entity detected.
self.stream.unget(charStack[2])
char = self.consumeNumberEntity(True)
elif charStack[1] in digits:
# Decimal entity detected.
self.stream.unget(charStack[1:])
char = self.consumeNumberEntity(False)
else:
# No number entity detected.
self.stream.unget(charStack)
self.tokenQueue.append({"type": "ParseError", "data":
"expected-numeric-entity"})
else:
# At this point in the process might have named entity. Entities
# are stored in the global variable "entities".
#
# Consume characters and compare to these to a substring of the
# entity names in the list until the substring no longer matches.
filteredEntityList = entitiesByFirstChar.get(charStack[0], [])
def entitiesStartingWith(name):
return [e for e in filteredEntityList if e.startswith(name)]
while charStack[-1] != EOF and\
entitiesStartingWith("".join(charStack)):
charStack.append(self.stream.char())
# At this point we have a string that starts with some characters
# that may match an entity
entityName = None
# Try to find the longest entity the string will match to take care
# of ¬i for instance.
for entityLength in xrange(len(charStack)-1,1,-1):
possibleEntityName = "".join(charStack[:entityLength])
if possibleEntityName in entities:
entityName = possibleEntityName
break
if entityName is not None:
if entityName[-1] != ";":
self.tokenQueue.append({"type": "ParseError", "data":
"named-entity-without-semicolon"})
if entityName[-1] != ";" and fromAttribute and \
(charStack[entityLength] in asciiLetters
or charStack[entityLength] in digits):
self.stream.unget(charStack)
else:
char = entities[entityName]
self.stream.unget(charStack[entityLength:])
else:
self.tokenQueue.append({"type": "ParseError", "data":
"expected-named-entity"})
self.stream.unget(charStack)
return char
def processEntityInAttribute(self, allowedChar):
"""This method replaces the need for "entityInAttributeValueState".
"""
entity = self.consumeEntity(allowedChar=allowedChar, fromAttribute=True)
if entity:
self.currentToken["data"][-1][1] += entity
else:
self.currentToken["data"][-1][1] += u"&"
def emitCurrentToken(self):
"""This method is a generic handler for emitting the tags. It also sets
the state to "data" because that's what's needed after a token has been
emitted.
"""
token = self.currentToken
# Add token to the queue to be yielded
if (token["type"] in ("StartTag", "EndTag", "EmptyTag")):
if self.lowercaseElementName:
token["name"] = token["name"].translate(asciiUpper2Lower)
if token["type"] == "EndTag" and token["data"]:
self.tokenQueue.append({"type":"ParseError",
"data":"attributes-in-end-tag"})
self.tokenQueue.append(token)
self.state = self.states["data"]
# Below are the various tokenizer states worked out.
# XXX AT Perhaps we should have Hixie run some evaluation on billions of
# documents to figure out what the order of the various if and elif
# statements should be.
def dataState(self):
data = self.stream.char()
# Keep a charbuffer to handle the escapeFlag
if self.contentModelFlag in\
(contentModelFlags["CDATA"], contentModelFlags["RCDATA"]):
if len(self.lastFourChars) == 4:
self.lastFourChars.pop(0)
self.lastFourChars.append(data)
# The rest of the logic
if data == "&" and self.contentModelFlag in\
(contentModelFlags["PCDATA"], contentModelFlags["RCDATA"]) and not\
self.escapeFlag:
self.state = self.states["entityData"]
elif data == "-" and self.contentModelFlag in\
(contentModelFlags["CDATA"], contentModelFlags["RCDATA"]) and not\
self.escapeFlag and "".join(self.lastFourChars) == "<!--":
self.escapeFlag = True
self.tokenQueue.append({"type": "Characters", "data":data})
elif (data == "<" and (self.contentModelFlag == contentModelFlags["PCDATA"]
or (self.contentModelFlag in
(contentModelFlags["CDATA"],
contentModelFlags["RCDATA"]) and
self.escapeFlag == False))):
self.state = self.states["tagOpen"]
elif data == ">" and self.contentModelFlag in\
(contentModelFlags["CDATA"], contentModelFlags["RCDATA"]) and\
self.escapeFlag and "".join(self.lastFourChars)[1:] == "-->":
self.escapeFlag = False
self.tokenQueue.append({"type": "Characters", "data":data})
elif data == EOF:
# Tokenization ends.
return False
elif data in spaceCharacters:
# Directly after emitting a token you switch back to the "data
# state". At that point spaceCharacters are important so they are
# emitted separately.
self.tokenQueue.append({"type": "SpaceCharacters", "data":
data + self.stream.charsUntil(spaceCharacters, True)})
# No need to update lastFourChars here, since the first space will
# have already broken any <!-- or --> sequences
else:
chars = self.stream.charsUntil(("&", "<", ">", "-"))
self.tokenQueue.append({"type": "Characters", "data":
data + chars})
self.lastFourChars += chars[-4:]
self.lastFourChars = self.lastFourChars[-4:]
return True
def entityDataState(self):
entity = self.consumeEntity()
if entity:
self.tokenQueue.append({"type": "Characters", "data": entity})
else:
self.tokenQueue.append({"type": "Characters", "data": u"&"})
self.state = self.states["data"]
return True
def tagOpenState(self):
data = self.stream.char()
if self.contentModelFlag == contentModelFlags["PCDATA"]:
if data == u"!":
self.state = self.states["markupDeclarationOpen"]
elif data == u"/":
self.state = self.states["closeTagOpen"]
elif data in asciiLetters:
self.currentToken =\
{"type": "StartTag", "name": data, "data": []}
self.state = self.states["tagName"]
elif data == u">":
# XXX In theory it could be something besides a tag name. But
# do we really care?
self.tokenQueue.append({"type": "ParseError", "data":
"expected-tag-name-but-got-right-bracket"})
self.tokenQueue.append({"type": "Characters", "data": u"<>"})
self.state = self.states["data"]
elif data == u"?":
# XXX In theory it could be something besides a tag name. But
# do we really care?
self.tokenQueue.append({"type": "ParseError", "data":
"expected-tag-name-but-got-question-mark"})
self.stream.unget(data)
self.state = self.states["bogusComment"]
else:
# XXX
self.tokenQueue.append({"type": "ParseError", "data":
"expected-tag-name"})
self.tokenQueue.append({"type": "Characters", "data": u"<"})
self.stream.unget(data)
self.state = self.states["data"]
else:
# We know the content model flag is set to either RCDATA or CDATA
# now because this state can never be entered with the PLAINTEXT
# flag.
if data == u"/":
self.state = self.states["closeTagOpen"]
else:
self.tokenQueue.append({"type": "Characters", "data": u"<"})
self.stream.unget(data)
self.state = self.states["data"]
return True
def closeTagOpenState(self):
if (self.contentModelFlag in (contentModelFlags["RCDATA"],
contentModelFlags["CDATA"])):
if self.currentToken:
charStack = []
# So far we know that "</" has been consumed. We now need to know
# whether the next few characters match the name of last emitted
# start tag which also happens to be the currentToken. We also need
# to have the character directly after the characters that could
# match the start tag name.
for x in xrange(len(self.currentToken["name"]) + 1):
charStack.append(self.stream.char())
# Make sure we don't get hit by EOF
if charStack[-1] == EOF:
break
# Since this is just for checking. We put the characters back on
# the stack.
self.stream.unget(charStack)
if self.currentToken \
and self.currentToken["name"].lower() == "".join(charStack[:-1]).lower() \
and charStack[-1] in (spaceCharacters |
frozenset((u">", u"/", u"<", EOF))):
# Because the characters are correct we can safely switch to
# PCDATA mode now. This also means we don't have to do it when
# emitting the end tag token.
self.contentModelFlag = contentModelFlags["PCDATA"]
else:
self.tokenQueue.append({"type": "Characters", "data": u"</"})
self.state = self.states["data"]
# Need to return here since we don't want the rest of the
# method to be walked through.
return True
data = self.stream.char()
if data in asciiLetters:
self.currentToken = {"type":"EndTag", "name":data, "data":[]}
self.state = self.states["tagName"]
elif data == u">":
self.tokenQueue.append({"type": "ParseError", "data":
"expected-closing-tag-but-got-right-bracket"})
self.state = self.states["data"]
elif data == EOF:
self.tokenQueue.append({"type": "ParseError", "data":
"expected-closing-tag-but-got-eof"})
self.tokenQueue.append({"type": "Characters", "data": u"</"})
self.state = self.states["data"]
else:
# XXX data can be _'_...
self.tokenQueue.append({"type": "ParseError", "data":
"expected-closing-tag-but-got-char",
"datavars": {"data": data}})
self.stream.unget(data)
self.state = self.states["bogusComment"]
return True
def tagNameState(self):
data = self.stream.char()
if data in spaceCharacters:
self.state = self.states["beforeAttributeName"]
elif data in asciiLetters:
self.currentToken["name"] += data +\
self.stream.charsUntil(asciiLetters, True)
elif data == u">":
self.emitCurrentToken()
elif data == EOF:
self.tokenQueue.append({"type": "ParseError", "data":
"eof-in-tag-name"})
self.emitCurrentToken()
elif data == u"/":
self.processSolidusInTag()
self.state = self.states["beforeAttributeName"]
else:
self.currentToken["name"] += data
return True
def beforeAttributeNameState(self):
data = self.stream.char()
if data in spaceCharacters:
self.stream.charsUntil(spaceCharacters, True)
elif data in asciiLetters:
self.currentToken["data"].append([data, ""])
self.state = self.states["attributeName"]
elif data == u">":
self.emitCurrentToken()
elif data == u"/":
self.processSolidusInTag()
elif data == u"'" or data == u'"' or data == u"=":
self.tokenQueue.append({"type": "ParseError", "data":
"invalid-character-in-attribute-name"})
self.currentToken["data"].append([data, ""])
self.state = self.states["attributeName"]
elif data == EOF:
self.tokenQueue.append({"type": "ParseError", "data":
"expected-attribute-name-but-got-eof"})
self.emitCurrentToken()
else:
self.currentToken["data"].append([data, ""])
self.state = self.states["attributeName"]
return True
def attributeNameState(self):
data = self.stream.char()
leavingThisState = True
emitToken = False
if data == u"=":
self.state = self.states["beforeAttributeValue"]
elif data in asciiLetters:
self.currentToken["data"][-1][0] += data +\
self.stream.charsUntil(asciiLetters, True)
leavingThisState = False
elif data == u">":
# XXX If we emit here the attributes are converted to a dict
# without being checked and when the code below runs we error
# because data is a dict not a list
emitToken = True
elif data in spaceCharacters:
self.state = self.states["afterAttributeName"]
elif data == u"/":
if not self.processSolidusInTag():
self.state = self.states["beforeAttributeName"]
elif data == u"'" or data == u'"':
self.tokenQueue.append({"type": "ParseError", "data":
"invalid-character-in-attribute-name"})
self.currentToken["data"][-1][0] += data
leavingThisState = False
elif data == EOF:
self.tokenQueue.append({"type": "ParseError", "data":
"eof-in-attribute-name"})
self.state = self.states["data"]
emitToken = True
else:
self.currentToken["data"][-1][0] += data
leavingThisState = False
if leavingThisState:
# Attributes are not dropped at this stage. That happens when the
# start tag token is emitted so values can still be safely appended
# to attributes, but we do want to report the parse error in time.
if self.lowercaseAttrName:
self.currentToken["data"][-1][0] = (
self.currentToken["data"][-1][0].translate(asciiUpper2Lower))
for name, value in self.currentToken["data"][:-1]:
if self.currentToken["data"][-1][0] == name:
self.tokenQueue.append({"type": "ParseError", "data":
"duplicate-attribute"})
break
# XXX Fix for above XXX
if emitToken:
self.emitCurrentToken()
return True
def afterAttributeNameState(self):
data = self.stream.char()
if data in spaceCharacters:
self.stream.charsUntil(spaceCharacters, True)
elif data == u"=":
self.state = self.states["beforeAttributeValue"]
elif data == u">":
self.emitCurrentToken()
elif data in asciiLetters:
self.currentToken["data"].append([data, ""])
self.state = self.states["attributeName"]
elif data == u"/":
if not self.processSolidusInTag():
self.state = self.states["beforeAttributeName"]
elif data == EOF:
self.tokenQueue.append({"type": "ParseError", "data":
"expected-end-of-tag-but-got-eof"})
self.emitCurrentToken()
else:
self.currentToken["data"].append([data, ""])
self.state = self.states["attributeName"]
return True
def beforeAttributeValueState(self):
data = self.stream.char()
if data in spaceCharacters:
self.stream.charsUntil(spaceCharacters, True)
elif data == u"\"":
self.state = self.states["attributeValueDoubleQuoted"]
elif data == u"&":
self.state = self.states["attributeValueUnQuoted"]
self.stream.unget(data);
elif data == u"'":
self.state = self.states["attributeValueSingleQuoted"]
elif data == u">":
self.emitCurrentToken()
elif data == u"=":
self.tokenQueue.append({"type": "ParseError", "data":
"equals-in-unquoted-attribute-value"})
self.currentToken["data"][-1][1] += data
self.state = self.states["attributeValueUnQuoted"]
elif data == EOF:
self.tokenQueue.append({"type": "ParseError", "data":
"expected-attribute-value-but-got-eof"})
self.emitCurrentToken()
else:
self.currentToken["data"][-1][1] += data
self.state = self.states["attributeValueUnQuoted"]
return True
def attributeValueDoubleQuotedState(self):
data = self.stream.char()
if data == "\"":
self.state = self.states["afterAttributeValue"]
elif data == u"&":
self.processEntityInAttribute(u'"')
elif data == EOF:
self.tokenQueue.append({"type": "ParseError", "data":
"eof-in-attribute-value-double-quote"})
self.emitCurrentToken()
else:
self.currentToken["data"][-1][1] += data +\
self.stream.charsUntil(("\"", u"&"))
return True
def attributeValueSingleQuotedState(self):
data = self.stream.char()
if data == "'":
self.state = self.states["afterAttributeValue"]
elif data == u"&":
self.processEntityInAttribute(u"'")
elif data == EOF:
self.tokenQueue.append({"type": "ParseError", "data":
"eof-in-attribute-value-single-quote"})
self.emitCurrentToken()
else:
self.currentToken["data"][-1][1] += data +\
self.stream.charsUntil(("'", u"&"))
return True
def attributeValueUnQuotedState(self):
data = self.stream.char()
if data in spaceCharacters:
self.state = self.states["beforeAttributeName"]
elif data == u"&":
self.processEntityInAttribute(None)
elif data == u">":
self.emitCurrentToken()
elif data == u'"' or data == u"'" or data == u"=":
self.tokenQueue.append({"type": "ParseError", "data":
"unexpected-character-in-unquoted-attribute-value"})
self.currentToken["data"][-1][1] += data
elif data == EOF:
self.tokenQueue.append({"type": "ParseError", "data":
"eof-in-attribute-value-no-quotes"})
self.emitCurrentToken()
else:
self.currentToken["data"][-1][1] += data + self.stream.charsUntil( \
frozenset(("&", ">", "<", "=", "'", '"')) | spaceCharacters)
return True
def afterAttributeValueState(self):
data = self.stream.char()
if data in spaceCharacters:
self.state = self.states["beforeAttributeName"]
elif data == u">":
self.emitCurrentToken()
self.state = self.states["data"]
elif data == u"/":
if not self.processSolidusInTag():
self.state = self.states["beforeAttributeName"]
elif data == EOF:
self.tokenQueue.append({"type": "ParseError", "data":
"unexpected-EOF-after-attribute-value"})
self.emitCurrentToken()
self.stream.unget(data)
self.state = self.states["data"]
else:
self.tokenQueue.append({"type": "ParseError", "data":
"unexpected-character-after-attribute-value"})
self.stream.unget(data)
self.state = self.states["beforeAttributeName"]
return True
def bogusCommentState(self):
# Make a new comment token and give it as value all the characters
# until the first > or EOF (charsUntil checks for EOF automatically)
# and emit it.
self.tokenQueue.append(
{"type": "Comment", "data": self.stream.charsUntil((u">"))})
# Eat the character directly after the bogus comment which is either a
# ">" or an EOF.
self.stream.char()
self.state = self.states["data"]
return True
def markupDeclarationOpenState(self):
charStack = [self.stream.char(), self.stream.char()]
if charStack == [u"-", u"-"]:
self.currentToken = {"type": "Comment", "data": u""}
self.state = self.states["commentStart"]
else:
for x in xrange(5):
charStack.append(self.stream.char())
# Put in explicit EOF check
if (not EOF in charStack and
"".join(charStack).upper() == u"DOCTYPE"):
self.currentToken = {"type":"Doctype", "name":u"",
"publicId":None, "systemId":None, "correct":True}
self.state = self.states["doctype"]
else:
self.tokenQueue.append({"type": "ParseError", "data":
"expected-dashes-or-doctype"})
self.stream.unget(charStack)
self.state = self.states["bogusComment"]
return True
def commentStartState(self):
data = self.stream.char()
if data == "-":
self.state = self.states["commentStartDash"]
elif data == ">":
self.tokenQueue.append({"type": "ParseError", "data":
"incorrect-comment"})
self.tokenQueue.append(self.currentToken)
self.state = self.states["data"]
elif data == EOF:
self.tokenQueue.append({"type": "ParseError", "data":
"eof-in-comment"})
self.tokenQueue.append(self.currentToken)
self.state = self.states["data"]
else:
self.currentToken["data"] += data + self.stream.charsUntil(u"-")
self.state = self.states["comment"]
return True
def commentStartDashState(self):
data = self.stream.char()
if data == "-":
self.state = self.states["commentEnd"]
elif data == ">":
self.tokenQueue.append({"type": "ParseError", "data":
"incorrect-comment"})
self.tokenQueue.append(self.currentToken)
self.state = self.states["data"]
elif data == EOF:
self.tokenQueue.append({"type": "ParseError", "data":
"eof-in-comment"})
self.tokenQueue.append(self.currentToken)
self.state = self.states["data"]
else:
self.currentToken["data"] += "-" + data + self.stream.charsUntil(u"-")
self.state = self.states["comment"]
return True
def commentState(self):
data = self.stream.char()
if data == u"-":
self.state = self.states["commentEndDash"]
elif data == EOF:
self.tokenQueue.append({"type": "ParseError", "data":
"eof-in-comment"})
self.tokenQueue.append(self.currentToken)
self.state = self.states["data"]
else:
self.currentToken["data"] += data + self.stream.charsUntil(u"-")
return True
def commentEndDashState(self):
data = self.stream.char()
if data == u"-":
self.state = self.states["commentEnd"]
elif data == EOF:
self.tokenQueue.append({"type": "ParseError", "data":
"eof-in-comment-end-dash"})
self.tokenQueue.append(self.currentToken)
self.state = self.states["data"]
else:
self.currentToken["data"] += u"-" + data +\
self.stream.charsUntil(u"-")
# Consume the next character which is either a "-" or an EOF as
# well so if there's a "-" directly after the "-" we go nicely to
# the "comment end state" without emitting a ParseError() there.
self.stream.char()
return True
def commentEndState(self):
data = self.stream.char()
if data == u">":
self.tokenQueue.append(self.currentToken)
self.state = self.states["data"]
elif data == u"-":
self.tokenQueue.append({"type": "ParseError", "data":
"unexpected-dash-after-double-dash-in-comment"})
self.currentToken["data"] += data
elif data == EOF:
self.tokenQueue.append({"type": "ParseError", "data":
"eof-in-comment-double-dash"})
self.tokenQueue.append(self.currentToken)
self.state = self.states["data"]
else:
# XXX
self.tokenQueue.append({"type": "ParseError", "data":
"unexpected-char-in-comment"})
self.currentToken["data"] += u"--" + data
self.state = self.states["comment"]
return True
def doctypeState(self):
data = self.stream.char()
if data in spaceCharacters:
self.state = self.states["beforeDoctypeName"]
else:
self.tokenQueue.append({"type": "ParseError", "data":
"need-space-after-doctype"})
self.stream.unget(data)
self.state = self.states["beforeDoctypeName"]
return True
def beforeDoctypeNameState(self):
data = self.stream.char()
if data in spaceCharacters:
pass
elif data == u">":
self.tokenQueue.append({"type": "ParseError", "data":
"expected-doctype-name-but-got-right-bracket"})
self.currentToken["correct"] = False
self.tokenQueue.append(self.currentToken)
self.state = self.states["data"]
elif data == EOF:
self.tokenQueue.append({"type": "ParseError", "data":
"expected-doctype-name-but-got-eof"})
self.currentToken["correct"] = False
self.tokenQueue.append(self.currentToken)
self.state = self.states["data"]
else:
self.currentToken["name"] = data
self.state = self.states["doctypeName"]
return True
def doctypeNameState(self):
data = self.stream.char()
if data in spaceCharacters:
self.state = self.states["afterDoctypeName"]
elif data == u">":
self.tokenQueue.append(self.currentToken)
self.state = self.states["data"]
elif data == EOF:
self.tokenQueue.append({"type": "ParseError", "data":
"eof-in-doctype-name"})
self.currentToken["correct"] = False
self.tokenQueue.append(self.currentToken)
self.state = self.states["data"]
else:
self.currentToken["name"] += data
return True
def afterDoctypeNameState(self):
data = self.stream.char()
if data in spaceCharacters:
pass
elif data == u">":
self.tokenQueue.append(self.currentToken)
self.state = self.states["data"]
elif data == EOF:
self.currentToken["correct"] = False
self.stream.unget(data)
self.tokenQueue.append({"type": "ParseError", "data":
"eof-in-doctype"})
self.tokenQueue.append(self.currentToken)
self.state = self.states["data"]
else:
charStack = [data]
for x in xrange(5):
charStack.append(self.stream.char())
if EOF not in charStack and\
"".join(charStack).translate(asciiUpper2Lower) == "public":
self.state = self.states["beforeDoctypePublicIdentifier"]
elif EOF not in charStack and\
"".join(charStack).translate(asciiUpper2Lower) == "system":
self.state = self.states["beforeDoctypeSystemIdentifier"]
else:
self.stream.unget(charStack)
self.tokenQueue.append({"type": "ParseError", "data":
"expected-space-or-right-bracket-in-doctype", "datavars":
{"data": data}})
self.currentToken["correct"] = False
self.state = self.states["bogusDoctype"]
return True
def beforeDoctypePublicIdentifierState(self):
data = self.stream.char()
if data in spaceCharacters:
pass
elif data == "\"":
self.currentToken["publicId"] = u""
self.state = self.states["doctypePublicIdentifierDoubleQuoted"]
elif data == "'":
self.currentToken["publicId"] = u""
self.state = self.states["doctypePublicIdentifierSingleQuoted"]
elif data == ">":
self.tokenQueue.append({"type": "ParseError", "data":
"unexpected-end-of-doctype"})
self.currentToken["correct"] = False
self.tokenQueue.append(self.currentToken)
self.state = self.states["data"]
elif data == EOF:
self.tokenQueue.append({"type": "ParseError", "data":
"eof-in-doctype"})
self.currentToken["correct"] = False
self.tokenQueue.append(self.currentToken)
self.state = self.states["data"]
else:
self.tokenQueue.append({"type": "ParseError", "data":
"unexpected-char-in-doctype"})
self.currentToken["correct"] = False
self.state = self.states["bogusDoctype"]
return True
def doctypePublicIdentifierDoubleQuotedState(self):
data = self.stream.char()
if data == "\"":
self.state = self.states["afterDoctypePublicIdentifier"]
elif data == ">":
self.tokenQueue.append({"type": "ParseError", "data":
"unexpected-end-of-doctype"})
self.currentToken["correct"] = False
self.tokenQueue.append(self.currentToken)
self.state = self.states["data"]
elif data == EOF:
self.tokenQueue.append({"type": "ParseError", "data":
"eof-in-doctype"})
self.currentToken["correct"] = False
self.tokenQueue.append(self.currentToken)
self.state = self.states["data"]
else:
self.currentToken["publicId"] += data
return True
def doctypePublicIdentifierSingleQuotedState(self):
data = self.stream.char()
if data == "'":
self.state = self.states["afterDoctypePublicIdentifier"]
elif data == ">":
self.tokenQueue.append({"type": "ParseError", "data":
"unexpected-end-of-doctype"})
self.currentToken["correct"] = False
self.tokenQueue.append(self.currentToken)
self.state = self.states["data"]
elif data == EOF:
self.tokenQueue.append({"type": "ParseError", "data":
"eof-in-doctype"})
self.currentToken["correct"] = False
self.tokenQueue.append(self.currentToken)
self.state = self.states["data"]
else:
self.currentToken["publicId"] += data
return True
def afterDoctypePublicIdentifierState(self):
data = self.stream.char()
if data in spaceCharacters:
pass
elif data == "\"":
self.currentToken["systemId"] = u""
self.state = self.states["doctypeSystemIdentifierDoubleQuoted"]
elif data == "'":
self.currentToken["systemId"] = u""
self.state = self.states["doctypeSystemIdentifierSingleQuoted"]
elif data == ">":
self.tokenQueue.append(self.currentToken)
self.state = self.states["data"]
elif data == EOF:
self.tokenQueue.append({"type": "ParseError", "data":
"eof-in-doctype"})
self.currentToken["correct"] = False
self.tokenQueue.append(self.currentToken)
self.state = self.states["data"]
else:
self.tokenQueue.append({"type": "ParseError", "data":
"unexpected-char-in-doctype"})
self.currentToken["correct"] = False
self.state = self.states["bogusDoctype"]
return True
def beforeDoctypeSystemIdentifierState(self):
data = self.stream.char()
if data in spaceCharacters:
pass
elif data == "\"":
self.currentToken["systemId"] = u""
self.state = self.states["doctypeSystemIdentifierDoubleQuoted"]
elif data == "'":
self.currentToken["systemId"] = u""
self.state = self.states["doctypeSystemIdentifierSingleQuoted"]
elif data == ">":
self.tokenQueue.append({"type": "ParseError", "data":
"unexpected-char-in-doctype"})
self.currentToken["correct"] = False
self.tokenQueue.append(self.currentToken)
self.state = self.states["data"]
elif data == EOF:
self.tokenQueue.append({"type": "ParseError", "data":
"eof-in-doctype"})
self.currentToken["correct"] = False
self.tokenQueue.append(self.currentToken)
self.state = self.states["data"]
else:
self.tokenQueue.append({"type": "ParseError", "data":
"unexpected-char-in-doctype"})
self.currentToken["correct"] = False
self.state = self.states["bogusDoctype"]
return True
def doctypeSystemIdentifierDoubleQuotedState(self):
data = self.stream.char()
if data == "\"":
self.state = self.states["afterDoctypeSystemIdentifier"]
elif data == ">":
self.tokenQueue.append({"type": "ParseError", "data":
"unexpected-end-of-doctype"})
self.currentToken["correct"] = False
self.tokenQueue.append(self.currentToken)
self.state = self.states["data"]
elif data == EOF:
self.tokenQueue.append({"type": "ParseError", "data":
"eof-in-doctype"})
self.currentToken["correct"] = False
self.tokenQueue.append(self.currentToken)
self.state = self.states["data"]
else:
self.currentToken["systemId"] += data
return True
def doctypeSystemIdentifierSingleQuotedState(self):
data = self.stream.char()
if data == "'":
self.state = self.states["afterDoctypeSystemIdentifier"]
elif data == ">":
self.tokenQueue.append({"type": "ParseError", "data":
"unexpected-end-of-doctype"})
self.currentToken["correct"] = False
self.tokenQueue.append(self.currentToken)
self.state = self.states["data"]
elif data == EOF:
self.tokenQueue.append({"type": "ParseError", "data":
"eof-in-doctype"})
self.currentToken["correct"] = False
self.tokenQueue.append(self.currentToken)
self.state = self.states["data"]
else:
self.currentToken["systemId"] += data
return True
def afterDoctypeSystemIdentifierState(self):
data = self.stream.char()
if data in spaceCharacters:
pass
elif data == ">":
self.tokenQueue.append(self.currentToken)
self.state = self.states["data"]
elif data == EOF:
self.tokenQueue.append({"type": "ParseError", "data":
"eof-in-doctype"})
self.currentToken["correct"] = False
self.tokenQueue.append(self.currentToken)
self.state = self.states["data"]
else:
self.tokenQueue.append({"type": "ParseError", "data":
"unexpected-char-in-doctype"})
self.state = self.states["bogusDoctype"]
return True
def bogusDoctypeState(self):
data = self.stream.char()
if data == u">":
self.tokenQueue.append(self.currentToken)
self.state = self.states["data"]
elif data == EOF:
# XXX EMIT
self.stream.unget(data)
self.tokenQueue.append({"type": "ParseError", "data":
"eof-in-bogus-doctype"})
self.tokenQueue.append(self.currentToken)
self.state = self.states["data"]
else:
pass
return True
|
import numpy as np
import torch
import torchvision
import matplotlib.pyplot as plt
import pickle
from scipy.stats import norm
import re
import json
import os
from layers import Linear, Conv2d
from networks import FFNN, ConvMedBig, MyResnet, myNet, EfficientNet
from itertools import combinations
from PIL import Image
from warnings import warn
def get_network(device, dataset, net_name, input_size, input_channel, n_class, net_config=None, net_dim=None):
if net_name.startswith('ffnn_'):
tokens = net_name.split('_')
sizes = [int(x) for x in tokens[1:]]
net = FFNN(device, dataset, sizes, n_class, input_size, input_channel, net_dim=net_dim).to(device)
elif net_name.startswith('convmedbig_'):
tokens = net_name.split('_')
obj = ConvMedBig
assert tokens[0] == 'convmedbig'
width1 = int(tokens[2])
width2 = int(tokens[3])
width3 = int(tokens[4])
linear_size = int(tokens[5])
net = obj(device, dataset, n_class, input_size, input_channel, width1, width2, width3, linear_size=linear_size).to(device)
elif net_name.startswith('resnet'):
tokens = net_name.split('_')
net = MyResnet(device, dataset, [1, 2], n_class, input_size, input_channel, block=tokens[1], net_dim=net_dim).to(device)
elif net_name.startswith('myresnet'):
tokens = net_name.split('_')
if "p" in tokens[-1]:
pool = tokens[-1][1:]
tokens = tokens[:-1]
if pool == "g":
pooling = "global"
elif pool == "n":
pooling = None
else:
pooling = int(pool)
else:
pooling = "global"
if "w" in tokens[-1]:
widen = tokens[-1][1:]
tokens = tokens[:-1]
widen = int(widen)
else:
widen = 1
n_blocks = list(map(int, tokens[2:]))
net = MyResnet(device, dataset, n_blocks, n_class, input_size, input_channel, block=tokens[1], in_planes=16,
net_dim=net_dim, pooling=pooling, widen_factor=widen).to(device)
elif net_name.startswith("myNetMax"):
if net_config is None:
net_config = parse_net_config(net_name, n_class, input_size, input_channel)
net_config["max"] = True
net_config["scale_width"] = False
net = myNet(device, dataset, net_dim=net_dim, **net_config).to(device)
elif net_name.startswith("myNet"):
if net_config is None:
net_config = parse_net_config(net_name, n_class, input_size, input_channel)
net = myNet(device, dataset, net_dim=net_dim, **net_config).to(device)
elif net_name.startswith("efficientnet-"):
tokens = net_name.split("_")
pretrained = "pre" in tokens
adv = "adv" in tokens
net = EfficientNet(device, dataset, tokens[0], input_size, input_channel, n_class, pretrained, adv)
else:
assert False, 'Unknown network!'
net.determine_dims(torch.randn((2, input_channel, input_size, input_size), dtype=torch.float).to(device))
return net
def parse_net_config(net_name, n_class, input_size, input_channel):
tokens = [x.split("_") for x in net_name.split("__")[1:]]
conv_widths = [0]
kernel_size = [0]
strides = [1]
depth_conv = [0]
paddings = None
pool = False
bn = re.match("myNet-BN.*",net_name) is not None
if len(tokens) == 1:
linear_sizes = tokens[0]
elif len(tokens) == 3:
conv_widths, kernel_size, linear_sizes = tokens
elif len(tokens) == 4:
conv_widths, kernel_size, depth_conv, linear_sizes = tokens
elif len(tokens) == 5:
conv_widths, kernel_size, strides, depth_conv, linear_sizes = tokens
elif len(tokens) == 6:
conv_widths, kernel_size, strides, paddings, depth_conv, linear_sizes = tokens
elif len(tokens) == 7:
conv_widths, kernel_size, strides, paddings, depth_conv, linear_sizes, pool = tokens
if pool == 1:
pool = True
else:
raise RuntimeError("Cant read net configuration")
conv_widths = [int(x) for x in conv_widths]
kernel_size = [int(x) for x in kernel_size]
strides = [int(x) for x in strides]
paddings = None if paddings is None else [int(x) for x in paddings]
if conv_widths[0] == 0:
conv_widths = []
kernel_size = []
strides = []
linear_sizes = [int(x) for x in linear_sizes]
depth_conv = int(depth_conv[0]) if int(depth_conv[0]) > 0 else None
net_config = {"n_class": n_class, "input_size": input_size, "input_channel": input_channel,
"conv_widths": conv_widths, "kernel_sizes": kernel_size, "linear_sizes": linear_sizes,
"depth_conv": depth_conv, "paddings": paddings, "strides": strides, "pool": pool, "bn": bn}
return net_config
def get_net(device, dataset, net_name, input_size, input_channel, n_class, load_model=None, net_config=None, balance_factor=1, net_dim=None):
net = get_network(device, dataset, net_name, input_size, input_channel, n_class, net_config=net_config, net_dim=net_dim).to(device) #, feature_extract=-1).to(device)
if n_class == 1 and isinstance(net.blocks[-1], Linear) and net.blocks[-1].bias is not None:
net.blocks[-1].linear.bias.data = torch.tensor(-norm.ppf(balance_factor/(balance_factor+1)),
dtype=net.blocks[-1].linear.bias.data.dtype).view(net.blocks[-1].linear.bias.data.shape)
if load_model is not None:
if "crown-ibp" in load_model or "LiRPA" in load_model:
net = load_CROWNIBP_net_state(net, load_model)
else:
net = load_net_state(net, load_model)
init_slopes(net, device, trainable=False)
return net
def load_net_state(net, load_model):
state_dict_load = torch.load(load_model)
state_dict_new = net.state_dict()
try:
missing_keys, unexpected_keys = net.load_state_dict(state_dict_load, strict=False)
print("net loaded from %s. %d missing_keys. %d unexpected_keys" % (load_model, len(missing_keys), len(unexpected_keys)))
except:
counter = 0
for k_load, v_load in state_dict_load.items():
if k_load in state_dict_new and all(v_load.squeeze().shape == np.array(state_dict_new[k_load].squeeze().shape)):
state_dict_new.update({k_load: v_load.view(state_dict_new[k_load].shape)})
counter += 1
net.load_state_dict(state_dict_new, strict=False)
print("%d/%d parameters loaded from from %s" % (counter, len(state_dict_new), load_model))
return net
def match_keys(new_dict, loaded_dict):
new_dict_keys = list(new_dict.keys())
new_dict_type = np.array([("bn." if ".bn" in x else "")+re.match(".*\.([a-z,_]+)$", x).group(1) for x in new_dict_keys])
new_dict_shape = [tuple(x.size()) if len(x.size())>0 else (1,) for x in new_dict.values()]
unloaded = np.ones(len(new_dict_keys), dtype=int)
loaded_dict_type = [("bn." if "bn" in x else "")+re.match(".*\.([a-z,_]+)$", x).group(1) for x in loaded_dict.keys()]
if all([not "bn" in x for x in loaded_dict_type]) and any([not "bn" in x for x in new_dict_type]):
bn_ids = [re.match("(.*)\.(running_mean)$", x).group(1) for x in loaded_dict.keys() if "running_mean" in x]
loaded_dict_type = ["bn."+ x if re.match("(.*)\.[a-z,_]+$", y).group(1) in bn_ids else x for x,y in zip(loaded_dict_type,loaded_dict.keys())]
for i, (k,v) in enumerate(loaded_dict.items()):
matched_key_ids = (unloaded
*(new_dict_type == loaded_dict_type[i])
*(np.array([x == (tuple(v.size()) if len(v.size())>0 else (1,)) for x in new_dict_shape]))
).nonzero()[0]
if len(matched_key_ids)==0:
warn(f"Model only partially loaded. Failed at [{i+1:d}/{len(loaded_dict_type):d}]")
break
matched_key_idx = matched_key_ids[0] if isinstance(matched_key_ids, np.ndarray) else int(matched_key_ids)
matched_key = new_dict_keys[matched_key_idx]
key_match = re.match("(.*[0-9]+\.)([a-z]*\.){0,1}([bn.]{0,1}[a-z,_]+)$", matched_key)
matched_keys = [x for x in new_dict_keys if x.startswith(key_match.group(1)) and x.endswith(key_match.group(3))]
for j, x in enumerate(new_dict_keys):
if x in matched_keys and j in matched_key_ids:
unloaded[j] = 0
new_dict.update({x: v})
return new_dict
def load_CROWNIBP_net_state(net, load_model):
checkpoint = torch.load(load_model)
state_dict_new = net.state_dict()
if isinstance(checkpoint["state_dict"], list):
checkpoint["state_dict"] = checkpoint["state_dict"][0]
state_dict_load = match_keys(state_dict_new, checkpoint["state_dict"])
try:
missing_keys, unexpectet_keys = net.load_state_dict(state_dict_load, strict=False)
assert len([x for x in missing_keys if "weight" in x or "bias" in x]) == 0 and len(unexpectet_keys) == 0
print("net loaded from %s" % load_model)
except:
counter = 0
for k_load, v_load in state_dict_load.items():
if k_load in state_dict_new and v_load.shape == state_dict_new[k_load].shape:
state_dict_new.update({k_load: v_load})
counter += 1
net.load_state_dict(state_dict_new, strict=False)
print("%d/%d parameters loaded from from %s" % (counter, len(state_dict_load), load_model))
return net
def my_cauchy(*shape):
return torch.clamp(torch.FloatTensor(*shape).cuda().cauchy_(), -1e7, 1e7)
class Scheduler:
def __init__(self, start, end, n_steps, warmup, power=1):
self.start = start
self.end = end
self.n_steps = n_steps
self.warmup = warmup
self.curr_steps = 0
self.power=power
def advance_time(self, k_steps):
self.curr_steps += k_steps
def get(self):
if self.n_steps == self.warmup:
return self.end
if self.curr_steps < self.warmup:
return self.start
elif self.curr_steps > self.n_steps:
return self.end
inter_factor = (self.curr_steps - self.warmup) / float(self.n_steps - self.warmup)
inter_factor = np.power(inter_factor, 1/self.power)
return self.start + (self.end - self.start) * inter_factor
class Statistics:
def __init__(self, window_size, tb_writer, log_dir=None, post_fix=None):
self.window_size = window_size
self.tb_writer = tb_writer
self.values = {}
self.steps = 0
self.log_dir = log_dir
self.post_fix = "" if post_fix is None else post_fix
def update_post_fix(self, post_fix=""):
self.post_fix = post_fix
def report(self, metric_name, value):
metric_name = metric_name + self.post_fix
if metric_name not in self.values:
self.values[metric_name] = []
self.values[metric_name] += [value]
def export_to_json(self, file, epoch=None):
epoch = self.steps if epoch is None else epoch
data = {"epoch_%d/%s"%(epoch, k): np.mean(v) for k, v in self.values.items() if len(v)>0}
data = self.parse_keys(data)
with open(file, 'a') as f:
json.dump(data, f, skipkeys=False, ensure_ascii=True, check_circular=True, allow_nan=True, cls=None,
indent="\t", separators=None, default=None, sort_keys=True)
def report_hist(self, metric_name, values):
metric_name = metric_name + self.post_fix
self.tb_writer.add_histogram(metric_name, values.view(-1).detach().cpu().numpy(), self.steps)
def print_stats(self):
print('==============')
for k, v in self.values.items():
print('%s: %.5lf' % (k, np.mean(v)))
def get(self, k, no_post_fix=False):
k = k if no_post_fix else k+self.post_fix
return np.mean(self.values[k])
def update_tb(self, epoch=None):
if self.log_dir is not None:
self.export_to_json(os.path.join(self.log_dir, "training_log.json"), epoch=epoch)
for k, v in self.values.items():
if len(v) == 0: continue
self.tb_writer.add_scalar(k, np.mean(v), self.steps if epoch is None else epoch)
self.values[k] = []
self.steps += 1
def parse_keys(self, data_old):
data_new = {}
for k, v in data_old.items():
data_new = self.add_to_leveled_dict(data_new, k, v)
return data_new
def add_to_leveled_dict(self, data_dict, label, data):
if "/" not in label:
data_dict[label] = data
return data_dict
labels = label.split("/")
if labels[0] in data_dict:
data_dict[labels[0]] = self.add_to_leveled_dict(data_dict[labels[0]],"/".join(labels[1:]),data)
else:
data_dict[labels[0]] = self.add_to_leveled_dict({},"/".join(labels[1:]),data)
return data_dict
def init_slopes(net, device, trainable=False):
for param_name, param_value in net.named_parameters():
if 'deepz' in param_name:
param_value.data = -torch.ones(param_value.size()).to(device)
param_value.requires_grad_(trainable)
def count_vars(args, net):
var_count = 0
var_count_t = 0
var_count_relu = 0
for p_name, params in net.named_parameters():
if "weight" in p_name or "bias" in p_name:
var_count += int(params.numel())
var_count_t += int(params.numel() * params.requires_grad)
elif "deepz_lambda" in p_name:
var_count_relu += int(params.numel())
args.var_count = var_count
args.var_count_t = var_count_t
args.var_count_relu = var_count_relu
print('Number of parameters: ', var_count)
def write_config(args, file_path):
f=open(file_path,'w+')
for param in [param for param in dir(args) if not param[0] == "_"]:
f.write("{:<30} {}\n".format(param + ":", (str)(getattr(args, param))))
f.close()
class AdvAttack:
def __init__(self, eps=2./255, n_steps=1, step_size=1.25, adv_type="pgd"):
self.eps = eps
self.n_steps = n_steps
self.step_size = step_size
self.adv_type = adv_type
def update_params(self, eps=None, n_steps=None, step_size=None, adv_type=None):
self.eps = self.eps if eps is None else eps
self.n_steps = self.n_steps if n_steps is None else n_steps
self.step_size = self.step_size if step_size is None else step_size
self.adv_type = self.adv_type if adv_type is None else adv_type
def get_params(self):
return self.eps, self.n_steps, self.step_size, self.adv_type
def get_lp_loss(blocks, p=1, input_size=1, scale_conv=True):
lp_loss = 0
N = input_size
for block in blocks:
if isinstance(block,Conv2d):
conv = block.conv
N = max(np.floor((N + 2 * conv.padding[0] - conv.dilation[0] * (conv.kernel_size[0] - 1) - 1) /
conv.stride[0]), 0.0) + 1
lp_loss += block.weight.abs().pow(p).sum() * ((N * N) if scale_conv else 1)
elif isinstance(block, Linear):
lp_loss += block.weight.abs().pow(p).sum()
return lp_loss
class MyVisionDataset(torchvision.datasets.vision.VisionDataset):
def __init__(self, root, train=True, transform=None, target_transform=None, data=None, targets=None, classes=None,
class_to_idx=None, orig_idx=None, sample_weights=None, yield_weights=True, loader=None):
super(MyVisionDataset, self).__init__(root, transform=transform, target_transform=target_transform)
self.train = train # training set or test set
self.loader = loader
self.data = data.cpu().detach().numpy() if isinstance(data, torch.Tensor) else data
if isinstance(data, torch.Tensor) and self.data.ndim == 4 and self.data.shape[2] == self.data.shape[3]:
self.data = self.data.transpose((0, 2, 3, 1)) # convert to HWC
self.targets = targets.cpu().detach().tolist() if isinstance(targets, torch.Tensor) else targets
self.targets = targets.tolist() if isinstance(targets, np.ndarray) else targets
sample_weights = len(targets)*[1.] if sample_weights is None else sample_weights
self.sample_weights = sample_weights.cpu().detach().tolist() if isinstance(sample_weights, torch.Tensor) else sample_weights
self.sample_weights = sample_weights.tolist() if isinstance(sample_weights, np.ndarray) else sample_weights
self.yield_weights = yield_weights
if orig_idx is None:
self.orig_idx = np.arange(0, len(targets))
else:
self.orig_idx = orig_idx
self.classes = classes
self.class_to_idx = class_to_idx
@staticmethod
def from_idx(dataset, idx, sample_weights=None, train=True):
new_len = len(idx) # For debugmode
new_data = np.array([v[0] for v,i in zip(dataset.samples[:new_len],idx) if i]) if not hasattr(dataset,"data") \
else dataset.data[:new_len][idx]
new_targets = np.array(dataset.targets)[:new_len][idx].tolist()
new_weights = None if (not hasattr(dataset,"sample_weights") or dataset.sample_weights is None) else \
np.array(dataset.sample_weights)[:new_len][idx].tolist()
yield_weights = False if not hasattr(dataset,"yield_weights") else dataset.yield_weights
old_orig_idx = dataset.orig_idx if hasattr(dataset,"orig_idx") else np.arange(0,len(dataset))
new_orig_idx = old_orig_idx[:new_len][idx]
loader = dataset.loader if hasattr(dataset, "loader") else None
new_dataset = MyVisionDataset(dataset.root, train, dataset.transform, dataset.target_transform, new_data,
new_targets, dataset.classes, dataset.class_to_idx, new_orig_idx, new_weights,
yield_weights, loader)
if sample_weights is not None:
new_dataset.set_weights(sample_weights)
return new_dataset
@staticmethod
def from_idx_and_targets(dataset, idx, new_targets, classes, sample_weights=None):
new_len = len(idx) # For debugmode
new_data = np.array([v[0] for v,i in zip(dataset.samples[:new_len],idx) if i]) if not hasattr(dataset,"data") \
else dataset.data[:new_len][idx]
assert new_data.shape[0] == len(new_targets)
new_targets = new_targets.cpu().detach().numpy() if isinstance(new_targets,torch.Tensor) else new_targets
new_weights = None if (not hasattr(dataset,"sample_weights") or dataset.sample_weights is None) else \
np.array(dataset.sample_weights)[:new_len][idx].tolist()
yield_weights = False if not hasattr(dataset,"yield_weights") else dataset.yield_weights
class_to_idx = {int(k) : classes[i] for i,k in enumerate(np.unique(np.array(new_targets)))}
old_orig_idx = dataset.orig_idx if hasattr(dataset,"orig_idx") else np.arange(0,len(dataset))
new_orig_idx = old_orig_idx[:new_len][idx]
loader = dataset.loader if hasattr(dataset,"loader") else None
new_dataset = MyVisionDataset(dataset.root, dataset.train, dataset.transform, dataset.target_transform, new_data,
new_targets, classes, class_to_idx, new_orig_idx, new_weights, yield_weights, loader)
if sample_weights is not None:
new_dataset.set_weights(sample_weights)
return new_dataset
def set_weights(self, sample_weights=None, yield_weights=True):
sample_weights = len(self.targets) * [1.] if sample_weights is None else sample_weights
self.sample_weights = sample_weights.cpu().detach().tolist() if isinstance(sample_weights,
torch.Tensor) else sample_weights
self.sample_weights = sample_weights.tolist() if isinstance(sample_weights, np.ndarray) else sample_weights
self.yield_weights = yield_weights
def __getitem__(self, index):
img, target = self.data[index], self.targets[index]
# doing this so that it is consistent with all other datasets
# to return a PIL Image
if self.loader is not None:
img = self.loader(img)
else:
img = Image.fromarray(img)
if self.transform is not None:
img = self.transform(img)
if self.target_transform is not None:
target = self.target_transform(target)
if self.yield_weights:
weight = self.sample_weights[index]
return img, target, weight
return img, target
def __len__(self):
return len(self.data)
def get_scaled_eps(args, layers, relu_ids, curr_layer_idx, j):
if args.eps_scaling_mode == "COLT":
eps = args.eps_factor ** (len(layers) - 2 - j) * (args.start_eps_factor * args.train_eps)
elif args.eps_scaling_mode == "depth":
depth = len([x for x in relu_ids if x > curr_layer_idx])
eps = args.eps_factor ** depth * (args.start_eps_factor * args.train_eps)
elif args.eps_scaling_mode in ["none", "None", None]:
eps = args.train_eps
else:
assert False, "eps scaling mode %s is not known" % args.eps_scaling_mode
return eps
def reset_params(args, net, dtype, reset=True):
""" Resets DeepZ slope parameters to the original values. """
relu_params = []
for param_name, param_value in net.named_parameters():
if 'deepz' in param_name:
relu_params.append(param_value)
if args.test_domain == 'zono_iter' and reset:
param_value.data = -torch.ones(param_value.size()).to(param_value.device, dtype=dtype)
param_value.requires_grad_(True)
else:
param_value.requires_grad_(False)
return relu_params
def get_layers(train_mode, cnet, n_attack_layers=1, min_layer=-2, base_layers=True, protected_layers=0):
### return n layers to be attacked plus one previous "warm up" layer prepended
### if base_layers use [-2,-1] to prepend, else use layers in the order of their occurence
relaxed_net = cnet.relaxed_net if "relaxed_net" in dir(cnet) else cnet.module.relaxed_net
relu_ids = relaxed_net.get_relu_ids()
if "COLT" in train_mode:
attack_layers = [x for x in ([-1] + relu_ids) if x >= min_layer]
if protected_layers > 0:
attack_layers = attack_layers[:-min(protected_layers,len(attack_layers))]
warmup_layers = ([-2,-1] if min_layer>=0 else [-2]) \
if base_layers \
else ([-2] + [x for x in ([-1] + relu_ids) if x < min_layer])[-max(1,1+n_attack_layers-len(attack_layers)):]
layers = warmup_layers + attack_layers
layers = layers[:min(n_attack_layers+1, len(layers))]
assert len(layers) >= 2, "Not enough layers remaining for COLT training"
elif "adv" in train_mode:
layers = [-1, -1]
elif "nat" in train_mode:
layers = [-2, -2]
elif "diffAI" in train_mode:
layers = [-2, -2]
else:
raise RuntimeError(f"Unknown train mode {train_mode:}")
return layers
|
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
import re
from neutron_lib.api.definitions import portbindings
from neutron_lib import constants as const
OVN_SG_EXT_ID_KEY = 'neutron:security_group_id'
OVN_SG_RULE_EXT_ID_KEY = 'neutron:security_group_rule_id'
OVN_ML2_MECH_DRIVER_NAME = 'ovn'
OVN_NETWORK_NAME_EXT_ID_KEY = 'neutron:network_name'
OVN_NETWORK_MTU_EXT_ID_KEY = 'neutron:mtu'
OVN_PORT_NAME_EXT_ID_KEY = 'neutron:port_name'
OVN_PORT_FIP_EXT_ID_KEY = 'neutron:port_fip'
OVN_ROUTER_NAME_EXT_ID_KEY = 'neutron:router_name'
OVN_ROUTER_AZ_HINTS_EXT_ID_KEY = 'neutron:availability_zone_hints'
OVN_ROUTER_IS_EXT_GW = 'neutron:is_ext_gw'
OVN_GW_PORT_EXT_ID_KEY = 'neutron:gw_port_id'
OVN_SUBNET_EXT_ID_KEY = 'neutron:subnet_id'
OVN_SUBNET_EXT_IDS_KEY = 'neutron:subnet_ids'
OVN_PHYSNET_EXT_ID_KEY = 'neutron:provnet-physical-network'
OVN_NETTYPE_EXT_ID_KEY = 'neutron:provnet-network-type'
OVN_SEGID_EXT_ID_KEY = 'neutron:provnet-segmentation-id'
OVN_PROJID_EXT_ID_KEY = 'neutron:project_id'
OVN_DEVID_EXT_ID_KEY = 'neutron:device_id'
OVN_CIDRS_EXT_ID_KEY = 'neutron:cidrs'
OVN_FIP_EXT_ID_KEY = 'neutron:fip_id'
OVN_FIP_PORT_EXT_ID_KEY = 'neutron:fip_port_id'
OVN_FIP_EXT_MAC_KEY = 'neutron:fip_external_mac'
OVN_FIP_NET_ID = 'neutron:fip_network_id'
OVN_REV_NUM_EXT_ID_KEY = 'neutron:revision_number'
OVN_QOS_POLICY_EXT_ID_KEY = 'neutron:qos_policy_id'
OVN_SG_IDS_EXT_ID_KEY = 'neutron:security_group_ids'
OVN_DEVICE_OWNER_EXT_ID_KEY = 'neutron:device_owner'
OVN_LIVENESS_CHECK_EXT_ID_KEY = 'neutron:liveness_check_at'
METADATA_LIVENESS_CHECK_EXT_ID_KEY = 'neutron:metadata_liveness_check_at'
OVN_PORT_BINDING_PROFILE = portbindings.PROFILE
OVN_PORT_BINDING_PROFILE_PARAMS = [{'parent_name': str,
'tag': int},
{'vtep-physical-switch': str,
'vtep-logical-switch': str}]
MIGRATING_ATTR = 'migrating_to'
OVN_ROUTER_PORT_OPTION_KEYS = ['router-port', 'nat-addresses']
OVN_GATEWAY_CHASSIS_KEY = 'redirect-chassis'
OVN_CHASSIS_REDIRECT = 'chassisredirect'
OVN_GATEWAY_NAT_ADDRESSES_KEY = 'nat-addresses'
OVN_DROP_PORT_GROUP_NAME = 'neutron_pg_drop'
OVN_ROUTER_PORT_GW_MTU_OPTION = 'gateway_mtu'
OVN_PROVNET_PORT_NAME_PREFIX = 'provnet-'
# Agent extension constants
OVN_AGENT_DESC_KEY = 'neutron:description'
OVN_AGENT_METADATA_SB_CFG_KEY = 'neutron:ovn-metadata-sb-cfg'
OVN_AGENT_METADATA_DESC_KEY = 'neutron:description-metadata'
OVN_AGENT_METADATA_ID_KEY = 'neutron:ovn-metadata-id'
OVN_CONTROLLER_AGENT = 'OVN Controller agent'
OVN_CONTROLLER_GW_AGENT = 'OVN Controller Gateway agent'
OVN_METADATA_AGENT = 'OVN Metadata agent'
# OVN ACLs have priorities. The highest priority ACL that matches is the one
# that takes effect. Our choice of priority numbers is arbitrary, but it
# leaves room above and below the ACLs we create. We only need two priorities.
# The first is for all the things we allow. The second is for dropping traffic
# by default.
ACL_PRIORITY_ALLOW = 1002
ACL_PRIORITY_DROP = 1001
ACL_ACTION_DROP = 'drop'
ACL_ACTION_ALLOW_RELATED = 'allow-related'
ACL_ACTION_ALLOW = 'allow'
# When a OVN L3 gateway is created, it needs to be bound to a chassis. In
# case a chassis is not found OVN_GATEWAY_INVALID_CHASSIS will be set in
# the options column of the Logical Router. This value is used to detect
# unhosted router gateways to schedule.
OVN_GATEWAY_INVALID_CHASSIS = 'neutron-ovn-invalid-chassis'
# NOTE(lucasagomes): These options were last synced from
# https://github.com/ovn-org/ovn/blob/feb5d6e81d5a0290aa3618a229c860d01200422e/lib/ovn-l7.h
#
# NOTE(lucasagomes): Whenever we update these lists please also update
# the related documentation at doc/source/ovn/dhcp_opts.rst
#
# Mappping between Neutron option names and OVN ones
SUPPORTED_DHCP_OPTS_MAPPING = {
4: {'arp-timeout': 'arp_cache_timeout',
'tcp-keepalive': 'tcp_keepalive_interval',
'netmask': 'netmask',
'router': 'router',
'dns-server': 'dns_server',
'log-server': 'log_server',
'lpr-server': 'lpr_server',
'domain-name': 'domain_name',
'swap-server': 'swap_server',
'policy-filter': 'policy_filter',
'router-solicitation': 'router_solicitation',
'nis-server': 'nis_server',
'ntp-server': 'ntp_server',
'server-id': 'server_id',
'tftp-server': 'tftp_server',
'classless-static-route': 'classless_static_route',
'ms-classless-static-route': 'ms_classless_static_route',
'ip-forward-enable': 'ip_forward_enable',
'router-discovery': 'router_discovery',
'ethernet-encap': 'ethernet_encap',
'default-ttl': 'default_ttl',
'tcp-ttl': 'tcp_ttl',
'mtu': 'mtu',
'lease-time': 'lease_time',
'T1': 'T1',
'T2': 'T2',
'bootfile-name': 'bootfile_name',
'wpad': 'wpad',
'path-prefix': 'path_prefix',
'tftp-server-address': 'tftp_server_address',
'server-ip-address': 'tftp_server_address',
'1': 'netmask',
'3': 'router',
'6': 'dns_server',
'7': 'log_server',
'9': 'lpr_server',
'15': 'domain_name',
'16': 'swap_server',
'21': 'policy_filter',
'32': 'router_solicitation',
'35': 'arp_cache_timeout',
'38': 'tcp_keepalive_interval',
'41': 'nis_server',
'42': 'ntp_server',
'54': 'server_id',
'66': 'tftp_server',
'121': 'classless_static_route',
'249': 'ms_classless_static_route',
'19': 'ip_forward_enable',
'31': 'router_discovery',
'36': 'ethernet_encap',
'23': 'default_ttl',
'37': 'tcp_ttl',
'26': 'mtu',
'51': 'lease_time',
'58': 'T1',
'59': 'T2',
'67': 'bootfile_name',
'252': 'wpad',
'210': 'path_prefix',
'150': 'tftp_server_address'},
6: {'server-id': 'server_id',
'dns-server': 'dns_server',
'domain-search': 'domain_search',
'ia-addr': 'ip_addr',
'2': 'server_id',
'5': 'ia_addr',
'24': 'domain_search',
'23': 'dns_server'},
}
# Special option for disabling DHCP via extra DHCP options
DHCP_DISABLED_OPT = 'dhcp_disabled'
DHCPV6_STATELESS_OPT = 'dhcpv6_stateless'
# When setting global DHCP options, these options will be ignored
# as they are required for basic network functions and will be
# set by Neutron.
GLOBAL_DHCP_OPTS_PROHIBIT_LIST = {
4: ['server_id', 'lease_time', 'mtu', 'router', 'server_mac',
'dns_server', 'classless_static_route'],
6: ['dhcpv6_stateless', 'dns_server', 'server_id']}
CHASSIS_DATAPATH_NETDEV = 'netdev'
CHASSIS_IFACE_DPDKVHOSTUSER = 'dpdkvhostuser'
OVN_IPV6_ADDRESS_MODES = {
const.IPV6_SLAAC: const.IPV6_SLAAC,
const.DHCPV6_STATEFUL: const.DHCPV6_STATEFUL.replace('-', '_'),
const.DHCPV6_STATELESS: const.DHCPV6_STATELESS.replace('-', '_')
}
DB_MAX_RETRIES = 60
DB_INITIAL_RETRY_INTERVAL = 0.5
DB_MAX_RETRY_INTERVAL = 1
TXN_COMMITTED = 'committed'
INITIAL_REV_NUM = -1
ACL_EXPECTED_COLUMNS_NBDB = (
'external_ids', 'direction', 'log', 'priority',
'name', 'action', 'severity', 'match')
# Resource types
TYPE_NETWORKS = 'networks'
TYPE_PORTS = 'ports'
TYPE_SECURITY_GROUP_RULES = 'security_group_rules'
TYPE_ROUTERS = 'routers'
TYPE_ROUTER_PORTS = 'router_ports'
TYPE_SECURITY_GROUPS = 'security_groups'
TYPE_FLOATINGIPS = 'floatingips'
TYPE_SUBNETS = 'subnets'
_TYPES_PRIORITY_ORDER = (
TYPE_NETWORKS,
TYPE_SECURITY_GROUPS,
TYPE_SUBNETS,
TYPE_ROUTERS,
TYPE_PORTS,
TYPE_ROUTER_PORTS,
TYPE_FLOATINGIPS,
TYPE_SECURITY_GROUP_RULES)
# The order in which the resources should be created or updated by the
# maintenance task: Root ones first and leafs at the end.
MAINTENANCE_CREATE_UPDATE_TYPE_ORDER = {
t: n for n, t in enumerate(_TYPES_PRIORITY_ORDER, 1)}
# The order in which the resources should be deleted by the maintenance
# task: Leaf ones first and roots at the end.
MAINTENANCE_DELETE_TYPE_ORDER = {
t: n for n, t in enumerate(reversed(_TYPES_PRIORITY_ORDER), 1)}
# The addresses field to set in the logical switch port which has a
# peer router port (connecting to the logical router).
DEFAULT_ADDR_FOR_LSP_WITH_PEER = 'router'
# FIP ACTIONS
FIP_ACTION_ASSOCIATE = 'fip_associate'
FIP_ACTION_DISASSOCIATE = 'fip_disassociate'
# Loadbalancer constants
LRP_PREFIX = "lrp-"
RE_PORT_FROM_GWC = re.compile(r'(%s)([\w-]+)_([\w-]+)' % LRP_PREFIX)
LB_VIP_PORT_PREFIX = "ovn-lb-vip-"
LB_EXT_IDS_LS_REFS_KEY = 'ls_refs'
LB_EXT_IDS_LR_REF_KEY = 'lr_ref'
LB_EXT_IDS_POOL_PREFIX = 'pool_'
LB_EXT_IDS_LISTENER_PREFIX = 'listener_'
LB_EXT_IDS_MEMBER_PREFIX = 'member_'
LB_EXT_IDS_VIP_KEY = 'neutron:vip'
LB_EXT_IDS_VIP_FIP_KEY = 'neutron:vip_fip'
LB_EXT_IDS_VIP_PORT_ID_KEY = 'neutron:vip_port_id'
# Hash Ring constants
HASH_RING_NODES_TIMEOUT = 60
HASH_RING_TOUCH_INTERVAL = 30
HASH_RING_CACHE_TIMEOUT = 30
HASH_RING_ML2_GROUP = 'mechanism_driver'
# Maximum chassis count where a gateway port can be hosted
MAX_GW_CHASSIS = 5
UNKNOWN_ADDR = 'unknown'
PORT_CAP_SWITCHDEV = 'switchdev'
# The name of the port security group attribute is currently not in neutron nor
# neutron-lib api definitions or constants. To avoid importing the extension
# code directly we keep a copy here.
PORT_SECURITYGROUPS = 'security_groups'
# TODO(lucasagomes): Create constants for other LSP types
LSP_TYPE_LOCALNET = 'localnet'
LSP_TYPE_VIRTUAL = 'virtual'
LSP_TYPE_EXTERNAL = 'external'
LSP_OPTIONS_VIRTUAL_PARENTS_KEY = 'virtual-parents'
LSP_OPTIONS_VIRTUAL_IP_KEY = 'virtual-ip'
HA_CHASSIS_GROUP_DEFAULT_NAME = 'default_ha_chassis_group'
HA_CHASSIS_GROUP_HIGHEST_PRIORITY = 32767
# OVN igmp options
MCAST_SNOOP = 'mcast_snoop'
MCAST_FLOOD_UNREGISTERED = 'mcast_flood_unregistered'
EXTERNAL_PORT_TYPES = (portbindings.VNIC_DIRECT,
portbindings.VNIC_DIRECT_PHYSICAL,
portbindings.VNIC_MACVTAP)
NEUTRON_AVAILABILITY_ZONES = 'neutron-availability-zones'
OVN_CMS_OPTIONS = 'ovn-cms-options'
CMS_OPT_CHASSIS_AS_GW = 'enable-chassis-as-gw'
CMS_OPT_AVAILABILITY_ZONES = 'availability-zones'
# OVN vlan transparency option
VLAN_PASSTHRU = 'vlan-passthru'
|
# Generated by Django 2.1.15 on 2020-09-16 20:20
from django.db import migrations, models
class Migration(migrations.Migration):
initial = True
dependencies = [
('auth', '0009_alter_user_last_name_max_length'),
]
operations = [
migrations.CreateModel(
name='User',
fields=[
('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')),
('password', models.CharField(max_length=128, verbose_name='password')),
('last_login', models.DateTimeField(blank=True, null=True, verbose_name='last login')),
('is_superuser', models.BooleanField(default=False, help_text='Designates that this user has all permissions without explicitly assigning them.', verbose_name='superuser status')),
('email', models.EmailField(max_length=255, unique=True)),
('name', models.CharField(max_length=255)),
('is_active', models.BooleanField(default=True)),
('is_staff', models.BooleanField(default=False)),
('groups', models.ManyToManyField(blank=True, help_text='The groups this user belongs to. A user will get all permissions granted to each of their groups.', related_name='user_set', related_query_name='user', to='auth.Group', verbose_name='groups')),
('user_permissions', models.ManyToManyField(blank=True, help_text='Specific permissions for this user.', related_name='user_set', related_query_name='user', to='auth.Permission', verbose_name='user permissions')),
],
options={
'abstract': False,
},
),
]
|
## INFO ########################################################################
## ##
## COUBLET ##
## ======= ##
## ##
## Cross-platform desktop client to follow posts from COUB ##
## Version: 0.6.93.172 (20140814) ##
## ##
## File: widgets/media.py ##
## ##
## Designed and written by Peter Varo. Copyright (c) 2014 ##
## License agreement is provided in the LICENSE file ##
## For more info visit: https://github.com/petervaro/coub ##
## ##
## Copyright (c) 2014 Coub Ltd and/or its suppliers and licensors, ##
## 5 Themistokli Dervi Street, Elenion Building, 1066 Nicosia, Cyprus. ##
## All rights reserved. COUB (TM) is a trademark of Coub Ltd. ##
## http://coub.com ##
## ##
######################################################################## INFO ##
# Import python modules
from textwrap import fill
# Import PyQt5 modules
from PyQt5.QtGui import QPixmap
from PyQt5.QtCore import Qt, QUrl
from PyQt5.QtMultimediaWidgets import QVideoWidget
from PyQt5.QtWidgets import QWidget, QVBoxLayout, QLabel
from PyQt5.QtMultimedia import QMediaContent, QMediaPlayer
#------------------------------------------------------------------------------#
class CoubletMediaPlayerWidget(QWidget):
#- - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - #
def __init__(self, width, height, thumb_file, video_file, audio_file=None,
loop_video=False, loop_audio=False, error_font=None,
error_color=None, error_background=None, parent=None):
super().__init__(parent)
# Restrict size
self.setFixedSize(width, height)
# Store static values
self._error_font = error_font
self._error_color = error_color
self._error_background = error_background
# Create thumbnail preview
self._thumb = thumb = QLabel(self)
thumb.setPixmap(QPixmap(thumb_file).scaled(width, height))
# Create video player and its content
self._video = video = QVideoWidget(self)
video.setFixedSize(width, height)
# Set video player file
self._video_player = video_player = QMediaPlayer(None, QMediaPlayer.VideoSurface)
video_player.setVideoOutput(video)
video_player.error.connect(lambda: self.set_error(self.get_error()))
video_player.setMedia(QMediaContent(QUrl.fromLocalFile(video_file)))
# Set looping for video
if loop_video:
self._loop_video = False
video_player.stateChanged.connect(self.on_video_player_state_changed)
# Set separate playe for audio file if any
if audio_file:
self._audio_player = audio_player = QMediaPlayer(None, QMediaPlayer.StreamPlayback)
audio_player.error.connect(lambda: self.set_error(self.get_error()))
# Store MediaContent, otherwise it will be GC'd after stop()
self._audio = QMediaContent(QUrl(audio_file))
audio_player.setMedia(self._audio)
# Ste looping for audio
if loop_audio:
self._loop_audio = False
audio_player.stateChanged.connect(self.on_audio_player_state_changed)
# Make sure all flags are set and
# only the proper widgets are visible
self.stop()
#- - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - #
def on_video_player_state_changed(self, event):
# If playing => has to be looped => start over!
if self._loop_video:
self._video_player.play()
# If paused
else:
# Reset looping
self._loop_video = True
#- - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - #
def on_audio_player_state_changed(self, event):
# If playing => has to be looped => start over!
if self._loop_audio:
self._audio_player.play()
# If paused
else:
# Reset looping
self._loop_audio = True
#- - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - #
def set_error(self, message):
try:
self._loop_audio = False
except AttributeError:
pass
self._loop_video = False
self._video.hide()
self._thumb.hide()
layout = QVBoxLayout()
error_msg = self._video_player.errorString()
error_label = QLabel(fill('ERROR: {}'.format(message.upper()), width=32))
if self._error_font:
error_label.setFont(self._error_font)
if self._error_color:
error_label.setPalette(self._error_color)
if self._error_background:
self.setPalette(self._error_background)
self.setAutoFillBackground(True)
layout.addWidget(error_label, alignment=Qt.AlignHCenter)
self.setLayout(layout)
#- - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - #
def get_error(self):
message = self._video_player.errorString()
if message:
return '{!r} @video'.format(message)
try:
message = self._audio_player.errorString()
if message:
return '{!r} @audio'.format(message)
except AttributeError:
pass
return 'unknown'
#- - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - #
def play(self):
if self._stopped:
self._stopped = False
self._video.show()
self._thumb.hide()
try:
self._loop_audio = True
self._audio_player.play()
except AttributeError:
pass
self._loop_video = True
self._video_player.play()
#- - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - #
def pause(self):
try:
self._loop_audio = False
self._audio_player.pause()
except AttributeError:
pass
self._loop_video = False
self._video_player.pause()
#- - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - #
def stop(self):
self._stopped = True
self._thumb.show()
self._video.hide()
try:
self._loop_audio = False
self._audio_player.stop()
except AttributeError:
pass
self._loop_video = False
self._video_player.stop()
#- - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - #
def state(self):
return self._video_player.state()
#- - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - #
def set_volume(self, volume):
try:
self._audio_player.setVolume(volume)
except AttributeError:
pass
self._video_player.setVolume(volume)
|
#To get current date and time we need to use the datetime library
from datetime import datetime
# The now function returns current date and time
today = datetime.now()
# use day, month, year, hour, minute, second functions
# to display only part of the date
# All these functions return integers
# Convert them to strings before concatenating them to another string
print('Day: ' + str(today.day))
print('Month: ' + str(today.month))
print('Year: ' + str(today.year))
print('Hour: ' + str(today.hour))
print('Minute: ' + str(today.minute))
print('Second: ' + str(today.second))
|
import torch
import torch.nn as nn
import torch.nn.functional as F
class Generator(nn.Module):
def __init__ (self, noise_size=201, cube_resolution=32):
super(Generator, self).__init__()
self.noise_size = noise_size
self.cube_resolution = cube_resolution
self.gen_conv1 = torch.nn.ConvTranspose3d(self.noise_size, 256, kernel_size=[4,4,4], stride=[2,2,2], padding=1)
self.gen_conv2 = torch.nn.ConvTranspose3d(256, 128, kernel_size=[4,4,4], stride=[2,2,2], padding=1)
self.gen_conv3 = torch.nn.ConvTranspose3d(128, 64, kernel_size=[4,4,4], stride=[2,2,2], padding=1)
self.gen_conv4 = torch.nn.ConvTranspose3d(64, 32, kernel_size=[4,4,4], stride=[2,2,2], padding=1)
self.gen_conv5 = torch.nn.ConvTranspose3d(32, 1, kernel_size=[4,4,4], stride=[2,2,2], padding=1)
self.gen_bn1 = nn.BatchNorm3d(256)
self.gen_bn2 = nn.BatchNorm3d(128)
self.gen_bn3 = nn.BatchNorm3d(64)
self.gen_bn4 = nn.BatchNorm3d(32)
def forward(self, x, condition):
condition_tensor = condition * torch.ones([x.shape[0],1], device=x.device)
x = torch.cat([x, condition_tensor], dim=1)
x = x.view(x.shape[0],self.noise_size,1,1,1)
x = F.relu(self.gen_bn1(self.gen_conv1(x)))
x = F.relu(self.gen_bn2(self.gen_conv2(x)))
x = F.relu(self.gen_bn3(self.gen_conv3(x)))
x = F.relu(self.gen_bn4(self.gen_conv4(x)))
x = self.gen_conv5(x)
x = torch.sigmoid(x)
return x.squeeze()
class Discriminator(nn.Module):
def __init__ (self, cube_resolution=32):
super(Discriminator, self).__init__()
self.cube_resolution = cube_resolution
self.disc_conv1 = torch.nn.Conv3d(2, 32, kernel_size=[4,4,4], stride=[2,2,2], padding=1)
self.disc_conv2 = torch.nn.Conv3d(32, 64, kernel_size=[4,4,4], stride=[2,2,2], padding=1)
self.disc_conv3 = torch.nn.Conv3d(64, 128, kernel_size=[4,4,4], stride=[2,2,2], padding=1)
self.disc_conv4 = torch.nn.Conv3d(128, 256, kernel_size=[4,4,4], stride=[2,2,2], padding=1)
self.disc_conv5 = torch.nn.Conv3d(256, 1, kernel_size=[4,4,4], stride=[2,2,2], padding=1)
self.disc_bn1 = nn.BatchNorm3d(32)
self.disc_bn2 = nn.BatchNorm3d(64)
self.disc_bn3 = nn.BatchNorm3d(128)
self.disc_bn4 = nn.BatchNorm3d(256)
self.LRelu = nn.LeakyReLU(0.2, True)
def forward(self, x, condition):
x = x.unsqueeze(1)
condition_tensor = condition * torch.ones_like(x, device=x.device)
x = torch.cat([x, condition_tensor], dim=1)
x = self.LRelu(self.disc_bn1(self.disc_conv1(x)))
x = self.LRelu(self.disc_bn2(self.disc_conv2(x)))
x = self.LRelu(self.disc_bn3(self.disc_conv3(x)))
x = self.LRelu(self.disc_bn4(self.disc_conv4(x)))
x = self.disc_conv5(x)
x = torch.sigmoid(x)
return x.squeeze()
|
from datetime import datetime
from io import BytesIO
from io import TextIOWrapper
import os
from pathlib import Path
import sys
import tarfile
from typing import Dict
from typing import Iterable
from typing import Optional
from typing import Union
import zipfile
import yaml
class Archiver:
def __init__(self, file, mode="r"):
if Path(file).suffix == ".tar":
self.type = "tar"
elif Path(file).suffix == ".tgz" or Path(file).suffixes == [".tar", ".gz"]:
self.type = "tar"
if mode == "w":
mode = "w:gz"
elif Path(file).suffix == ".tbz2" or Path(file).suffixes == [".tar", ".bz2"]:
self.type = "tar"
if mode == "w":
mode = "w:bz2"
elif Path(file).suffix == ".txz" or Path(file).suffixes == [".tar", ".xz"]:
self.type = "tar"
if mode == "w":
mode = "w:xz"
elif Path(file).suffix == ".zip":
self.type = "zip"
else:
raise ValueError(f"Cannot detect archive format: type={file}")
if self.type == "tar":
self.fopen = tarfile.open(file, mode=mode)
elif self.type == "zip":
self.fopen = zipfile.ZipFile(file, mode=mode)
else:
raise ValueError(f"Not supported: type={type}")
def __enter__(self):
return self
def __exit__(self, exc_type, exc_val, exc_tb):
self.fopen.close()
def close(self):
self.fopen.close()
def __iter__(self):
if self.type == "tar":
return iter(self.fopen)
elif self.type == "zip":
return iter(self.fopen.infolist())
else:
raise ValueError(f"Not supported: type={self.type}")
def add(self, filename, arcname=None):
if self.type == "tar":
return self.fopen.add(filename, arcname)
elif self.type == "zip":
return self.fopen.write(filename, arcname)
else:
raise ValueError(f"Not supported: type={self.type}")
def addfile(self, info, fileobj):
if self.type == "tar":
return self.fopen.addfile(info, fileobj)
elif self.type == "zip":
return self.fopen.writestr(info, fileobj.read())
else:
raise ValueError(f"Not supported: type={self.type}")
def generate_info(self, name, size) -> Union[tarfile.TarInfo, zipfile.ZipInfo]:
"""Generate TarInfo using system information"""
if self.type == "tar":
tarinfo = tarfile.TarInfo(str(name))
if os.name == "posix":
tarinfo.gid = os.getgid()
tarinfo.uid = os.getuid()
tarinfo.mtime = datetime.now().timestamp()
tarinfo.size = size
# Keep mode as default
return tarinfo
elif self.type == "zip":
zipinfo = zipfile.ZipInfo(str(name), datetime.now().timetuple()[:6])
zipinfo.file_size = size
return zipinfo
else:
raise ValueError(f"Not supported: type={self.type}")
def get_name_from_info(self, info):
if self.type == "tar":
assert isinstance(info, tarfile.TarInfo), type(info)
return info.name
elif self.type == "zip":
assert isinstance(info, zipfile.ZipInfo), type(info)
return info.filename
else:
raise ValueError(f"Not supported: type={self.type}")
def extract(self, info, path=None):
if self.type == "tar":
return self.fopen.extract(info, path)
elif self.type == "zip":
return self.fopen.extract(info, path)
else:
raise ValueError(f"Not supported: type={self.type}")
def extractfile(self, info, mode="r"):
if self.type == "tar":
f = self.fopen.extractfile(info)
if mode == "r":
return TextIOWrapper(f)
else:
return f
elif self.type == "zip":
if mode == "rb":
mode = "r"
return self.fopen.open(info, mode)
else:
raise ValueError(f"Not supported: type={self.type}")
def find_path_and_change_it_recursive(value, src: str, tgt: str):
if isinstance(value, dict):
return {
k: find_path_and_change_it_recursive(v, src, tgt) for k, v in value.items()
}
elif isinstance(value, (list, tuple)):
return [find_path_and_change_it_recursive(v, src, tgt) for v in value]
elif isinstance(value, str) and Path(value) == Path(src):
return tgt
else:
return value
def get_dict_from_cache(meta: Union[Path, str]) -> Optional[Dict[str, str]]:
meta = Path(meta)
outpath = meta.parent.parent
if not meta.exists():
return None
with meta.open("r", encoding="utf-8") as f:
d = yaml.safe_load(f)
assert isinstance(d, dict), type(d)
yaml_files = d["yaml_files"]
files = d["files"]
assert isinstance(yaml_files, dict), type(yaml_files)
assert isinstance(files, dict), type(files)
retval = {}
for key, value in list(yaml_files.items()) + list(files.items()):
if not (outpath / value).exists():
return None
retval[key] = str(outpath / value)
return retval
def unpack(
input_archive: Union[Path, str],
outpath: Union[Path, str],
use_cache: bool = True,
) -> Dict[str, str]:
"""Scan all files in the archive file and return as a dict of files.
Examples:
tarfile:
model.pth
some1.file
some2.file
>>> unpack("tarfile", "out")
{'asr_model_file': 'out/model.pth'}
"""
input_archive = Path(input_archive)
outpath = Path(outpath)
with Archiver(input_archive) as archive:
for info in archive:
if Path(archive.get_name_from_info(info)).name == "meta.yaml":
if (
use_cache
and (outpath / Path(archive.get_name_from_info(info))).exists()
):
retval = get_dict_from_cache(
outpath / Path(archive.get_name_from_info(info))
)
if retval is not None:
return retval
d = yaml.safe_load(archive.extractfile(info))
assert isinstance(d, dict), type(d)
yaml_files = d["yaml_files"]
files = d["files"]
assert isinstance(yaml_files, dict), type(yaml_files)
assert isinstance(files, dict), type(files)
break
else:
raise RuntimeError("Format error: not found meta.yaml")
for info in archive:
fname = archive.get_name_from_info(info)
outname = outpath / fname
outname.parent.mkdir(parents=True, exist_ok=True)
if fname in set(yaml_files.values()):
d = yaml.safe_load(archive.extractfile(info))
# Rewrite yaml
for info2 in archive:
name = archive.get_name_from_info(info2)
d = find_path_and_change_it_recursive(d, name, str(outpath / name))
with outname.open("w", encoding="utf-8") as f:
yaml.safe_dump(d, f)
else:
archive.extract(info, path=outpath)
retval = {}
for key, value in list(yaml_files.items()) + list(files.items()):
retval[key] = str(outpath / value)
return retval
def _to_relative_or_resolve(f):
# Resolve to avoid symbolic link
p = Path(f).resolve()
try:
# Change to relative if it can
p = p.relative_to(Path(".").resolve())
except ValueError:
pass
return str(p)
def pack(
files: Dict[str, Union[str, Path]],
yaml_files: Dict[str, Union[str, Path]],
outpath: Union[str, Path],
option: Iterable[Union[str, Path]] = (),
):
for v in list(files.values()) + list(yaml_files.values()) + list(option):
if not Path(v).exists():
raise FileNotFoundError(f"No such file or directory: {v}")
files = {k: _to_relative_or_resolve(v) for k, v in files.items()}
yaml_files = {k: _to_relative_or_resolve(v) for k, v in yaml_files.items()}
option = [_to_relative_or_resolve(v) for v in option]
meta_objs = dict(
files=files,
yaml_files=yaml_files,
timestamp=datetime.now().timestamp(),
python=sys.version,
)
try:
import torch
meta_objs.update(torch=torch.__version__)
except ImportError:
pass
try:
import espnet
meta_objs.update(espnet=espnet.__version__)
except ImportError:
pass
Path(outpath).parent.mkdir(parents=True, exist_ok=True)
with Archiver(outpath, mode="w") as archive:
# Write packed/meta.yaml
fileobj = BytesIO(yaml.safe_dump(meta_objs).encode())
info = archive.generate_info("meta.yaml", fileobj.getbuffer().nbytes)
archive.addfile(info, fileobj=fileobj)
for f in list(yaml_files.values()) + list(files.values()) + list(option):
archive.add(f)
|
import _plotly_utils.basevalidators
class XValidator(_plotly_utils.basevalidators.NumberValidator):
def __init__(
self, plotly_name="x", parent_name="scattermapbox.marker.colorbar", **kwargs
):
super(XValidator, self).__init__(
plotly_name=plotly_name,
parent_name=parent_name,
edit_type=kwargs.pop("edit_type", "calc"),
max=kwargs.pop("max", 3),
min=kwargs.pop("min", -2),
**kwargs
)
|
from mosaic.simulation.parameter import Parameter
from mosaic.simulation.scenario import WorkflowListTask
def get_configuration_DummyClassifier():
DummyClassifier = WorkflowListTask(is_ordered=False, name="DummyClassifier",
tasks=["DummyClassifier__strategy"])
sampler = {
"DummyClassifier__strategy": Parameter("DummyClassifier__strategy",
["stratified", "most_frequent", "prior", "uniform"], "choice", "string"),
}
rules = []
return DummyClassifier, sampler, rules
|
import csv
import requests
df = open("bridgeData3.csv",'r').readlines()
fin = open('final.csv','r').readlines()
finCsv = fin[1:]
# url = https://b2ptc.herokuapp.com/bridges
finalCsv = df[1:]
obj = {}
for i in finalCsv:
x = i.split(',')
obj[x[1]] = {'bridge_name':x[0],'proj_code':x[1],'before_img':x[2],'after_img':x[3]}
for key in obj:
print(f'obj[key])
# for i in finCsv:
# x = i.split(',')
# if x[6] in obj:
# y= obj[x[6]] +{'myname':'bill'}
# print(y)
# for i in finalCsv:
# x = i.split(',')
# requests.put(url+x[0],data={before:x[2],after:x[3]})
# pull each id,before image and after from df
# for each data item do a put request with the id as the param id
# and then put the before and after image in an dict and place it as the data for the put request
|
#!/usr/bin/env python
"""RandDisc.py: Randomize a playlist to a collection of folders."""
# Author: Jeroen Lodder (https://github.com/Jeroen6/randdisc)
# License: Public domain
# Version: 0.1
#
# How does it work?
#
# 0. Note: it moves files. (see simulate)
#
# 1. Set "source", "destination", "tracks" and "discs" as you please.
# An example could be:
# Takes files from "/playlist", move them to "collection/"
# with "10" tracks per disc with a maximum of "20" discs.
#
# 2. Run it.
# The script will take a random file from source,
# and move it to a folder named CD## with a track number prepended.
# Thats it!
import os, shutil
import random
import sys
# Settings:
# Source file directory (no folders!), must end with /
source = "playlist/"
# Desination directory, must end with /
destination = "USB/"
# maximum tracks per disc folder
tracks = 99
# maximum disc folders
discs = 99
# Simulate (set to 1 to no move any files)
sim = 0
# Program:
# Get list of all files in source
files = os.listdir(source)
files.sort()
# Loop until reached max discs
d = 1
while (d <= discs):
t = 1
# Loop until reached max tracks on disc
while (t <= tracks):
# Loop until out of source file
if (len(files) == 0):
sys.exit('Done! Out of files!')
# Pick randomg file from list
item = random.choice(files)
files.remove(item)
# Debug: Output the filename with disc and track number
#print str(d) + " - " + str(t) + " - " + item
#
# The moving part
# Compile the destination part (destination/CD##/file)
destdir = destination + "CD" + str(d).zfill(len(str(discs))) + "/"
# Prepend track number
filename = str(t).zfill(len(str(tracks))) + " - " + item
# Make the destination directory
if not os.path.exists(os.path.dirname(destdir)):
os.makedirs(os.path.dirname(destdir))
# Move the file
src = source + item
dst = destdir + filename
if not sim:
shutil.move(src,dst)
print src + "; " + dst
# Increment track counter
t = t + 1
# Increment disc counter
d = d + 1
# It also lets you know you have selected too much files.
if (len(files) > 0):
if not (d <= discs):
sys.exit('Reached disc limit!')
sys.exit('Done')
|
from __future__ import absolute_import, unicode_literals
from celery import Celery
from django.conf import settings
import os
os.environ.setdefault('DJANGO_SETTINGS_MODULE', 'sending_scheduler.settings')
app = Celery('sending_scheduler')
app.config_from_object(settings, namespace='CELERY')
# # Load task modules from all registered Django apps.
app.autodiscover_tasks()
|
from flask_mail import Message
from flask import render_template
from . import mail
def mail_message(subject,template,to,**kwargs):
sender_email = '10silvianjoki@gmail.com'
email = Message(subject, sender=sender_email, recipients=[to])
email.body= render_template(template + ".txt",**kwargs)
email.html = render_template(template + ".html",**kwargs)
mail.send(email)
|
## 1. Lists ##
row_2 = ['Instagram', 0.0, 'USD', 2161558, 4.5]
row_3 = ['Clash of Clans', 0.0, 'USD', 2130805, 4.5]
## 2. Indexing ##
row_1 = ['Facebook', 0.0, 'USD', 2974676, 3.5]
row_2 = ['Instagram', 0.0, 'USD', 2161558, 4.5]
row_3 = ['Clash of Clans', 0.0, 'USD', 2130805, 4.5]
# get individual ratings
ratings_1 = row_1[3]
ratings_2 = row_2[3]
ratings_3 = row_3[3]
# compute average
total = ratings_1 + ratings_2 + ratings_3
average = total / 3
## 3. Negative Indexing ##
row_1 = ['Facebook', 0.0, 'USD', 2974676, 3.5]
row_2 = ['Instagram', 0.0, 'USD', 2161558, 4.5]
row_3 = ['Clash of Clans', 0.0, 'USD', 2130805, 4.5]
# get individual ratings with negative index
rating_1 = row_1[-1]
rating_2 = row_2[-1]
rating_3 = row_3[-1]
# compute average_rating
total_rating = rating_1 + rating_2 + rating_3
average_rating = total_rating / 3
## 4. Retrieving Multiple List Elements ##
row_1 = ['Facebook', 0.0, 'USD', 2974676, 3.5]
row_2 = ['Instagram', 0.0, 'USD', 2161558, 4.5]
row_3 = ['Clash of Clans', 0.0, 'USD', 2130805, 4.5]
row_4 = ['Temple Run', 0.0, 'USD', 1724546, 4.5]
row_5 = ['Pandora - Music & Radio', 0.0, 'USD', 1126879, 4.0]
# get individual ratings for fb, insta and pandora
fb_rating_data = [row_1[0], row_1[3], row_1[-1]]
insta_rating_data = [row_2[0], row_2[3], row_2[4]]
pandora_rating_data = [row_5[0], row_5[3], row_5[4]]
# computes total ratings
avg_rating = (fb_rating_data[2] + insta_rating_data[2] + pandora_rating_data[2]) / 3
## 5. List Slicing ##
row_1 = ['Facebook', 0.0, 'USD', 2974676, 3.5]
row_2 = ['Instagram', 0.0, 'USD', 2161558, 4.5]
row_3 = ['Clash of Clans', 0.0, 'USD', 2130805, 4.5]
row_4 = ['Temple Run', 0.0, 'USD', 1724546, 4.5]
row_5 = ['Pandora - Music & Radio', 0.0, 'USD', 1126879, 4.0]
first_4_fb = row_1[:4]
last_3_fb = row_1[-3:]
pandora_3_4 = row_5[2:4]
print(first_4_fb)
print(last_3_fb)
print(pandora_3_4)
## 6. List of Lists ##
row_1 = ['Facebook', 0.0, 'USD', 2974676, 3.5]
row_2 = ['Instagram', 0.0, 'USD', 2161558, 4.5]
row_3 = ['Clash of Clans', 0.0, 'USD', 2130805, 4.5]
row_4 = ['Temple Run', 0.0, 'USD', 1724546, 4.5]
row_5 = ['Pandora - Music & Radio', 0.0, 'USD', 1126879, 4.0]
# collect data set
app_data_set = [row_1, row_2, row_3, row_4, row_5]
# computes average ratings
avg_rating = (app_data_set[0][-1] + app_data_set[1][-1] + app_data_set[2][-1] + app_data_set[3][-1] + app_data_set[4][-1]) / 5
## 7. Opening a File ##
from csv import reader
opened_file = open('AppleStore.csv')
read_file = reader(opened_file)
apps_data = list(read_file)
# print results
print(len(apps_data))
print(apps_data[0])
print(apps_data[1:3])
## 8. Repetitive Processes ##
row_1 = ['Facebook', 0.0, 'USD', 2974676, 3.5]
row_2 = ['Instagram', 0.0, 'USD', 2161558, 4.5]
row_3 = ['Clash of Clans', 0.0, 'USD', 2130805, 4.5]
row_4 = ['Temple Run', 0.0, 'USD', 1724546, 4.5]
row_5 = ['Pandora - Music & Radio', 0.0, 'USD', 1126879, 4.0]
app_data_set = [row_1, row_2, row_3, row_4, row_5]
for row in app_data_set:
print(row)
## 9. For Loops ##
row_1 = ['Facebook', 0.0, 'USD', 2974676, 3.5]
row_2 = ['Instagram', 0.0, 'USD', 2161558, 4.5]
row_3 = ['Clash of Clans', 0.0, 'USD', 2130805, 4.5]
row_4 = ['Temple Run', 0.0, 'USD', 1724546, 4.5]
row_5 = ['Pandora - Music & Radio', 0.0, 'USD', 1126879, 4.0]
app_data_set = [row_1, row_2, row_3, row_4, row_5]
# initialize sum
rating_sum = 0
# loop through each line of dataset and sums rating
for row in app_data_set:
rating = row[-1]
rating_sum += rating
print(rating_sum)
# computes avg_rating
avg_rating = rating_sum / len(app_data_set)
## 10. The Average App Rating ##
opened_file = open('AppleStore.csv')
from csv import reader
read_file = reader(opened_file)
apps_data = list(read_file)
rating_sum = 0
for row in apps_data[1:]:
rating = float(row[7])
rating_sum += rating
avg_rating = rating_sum / len(apps_data[1:])
## 11. Alternative Way to Compute an Average ##
opened_file = open('AppleStore.csv')
from csv import reader
read_file = reader(opened_file)
apps_data = list(read_file)
all_ratings = []
for row in apps_data[1:]:
rating = float(row[7])
all_ratings.append(rating)
avg_rating = sum(all_ratings) / len(all_ratings)
avg_rating
|
# -*- coding: utf-8 -*-
# File: trainers.py
import sys
import os
import tensorflow as tf
import multiprocessing as mp
from ..callbacks import RunOp, CallbackFactory
from ..tfutils.sesscreate import NewSessionCreator
from ..utils import logger
from ..utils.argtools import map_arg
from ..utils.develop import HIDE_DOC, log_deprecated
from ..tfutils import get_global_step_var
from ..tfutils.distributed import get_distributed_session_creator
from ..tfutils.tower import TrainTowerContext
from ..input_source import QueueInput, FeedfreeInput
from ..graph_builder.training import (
SyncMultiGPUParameterServerBuilder,
SyncMultiGPUReplicatedBuilder,
AsyncMultiGPUBuilder)
from ..graph_builder.distributed import DistributedReplicatedBuilder, DistributedParameterServerBuilder
from ..graph_builder.utils import override_to_local_variable
from .tower import SingleCostTrainer
__all__ = ['NoOpTrainer', 'SimpleTrainer',
'QueueInputTrainer',
'SyncMultiGPUTrainer',
'SyncMultiGPUTrainerReplicated',
'SyncMultiGPUTrainerParameterServer',
'AsyncMultiGPUTrainer',
'DistributedTrainerParameterServer',
'DistributedTrainerReplicated',
'HorovodTrainer']
def _int_to_range(x):
if isinstance(x, int):
assert x > 0, "Argument cannot be {}!".format(x)
return list(range(x))
return x
class SimpleTrainer(SingleCostTrainer):
"""
Single-GPU single-cost single-tower trainer.
"""
def _setup_graph(self, input, get_cost_fn, get_opt_fn):
logger.info("Building graph for a single training tower ...")
with TrainTowerContext(''):
grads = self._make_get_grad_fn(input, get_cost_fn, get_opt_fn)()
opt = get_opt_fn()
self.train_op = opt.apply_gradients(grads, name='min_op')
return []
class NoOpTrainer(SimpleTrainer):
"""
A special trainer that builds the graph (if given a tower function)
and does nothing in each step.
It is used to only run the callbacks.
Note that `steps_per_epoch` and `max_epochs` are still valid options.
"""
def run_step(self):
pass
# Only exists for type check & back-compatibility
class QueueInputTrainer(SimpleTrainer):
def _setup_graph(self, input, get_cost_fn, get_opt_fn):
assert isinstance(input, QueueInput), input
return super(QueueInputTrainer, self)._setup_graph(input, get_cost_fn, get_opt_fn)
class SyncMultiGPUTrainerParameterServer(SingleCostTrainer):
__doc__ = SyncMultiGPUParameterServerBuilder.__doc__
devices = None
"""
List of GPU ids.
"""
@map_arg(gpus=_int_to_range)
def __init__(self, gpus, ps_device=None):
"""
Args:
gpus ([int]): list of GPU ids.
ps_device: either 'gpu' or 'cpu', where variables are stored.
The default value is subject to change.
"""
self.devices = gpus
if ps_device is None:
ps_device = 'gpu' if len(gpus) <= 2 else 'cpu'
self._builder = SyncMultiGPUParameterServerBuilder(gpus, ps_device)
super(SyncMultiGPUTrainerParameterServer, self).__init__()
def _setup_graph(self, input, get_cost_fn, get_opt_fn):
if len(self.devices) > 1:
assert isinstance(input, FeedfreeInput), input
self.train_op = self._builder.build(
self._make_get_grad_fn(input, get_cost_fn, get_opt_fn), get_opt_fn)
return []
def SyncMultiGPUTrainer(gpus):
"""
Return a default multi-GPU trainer, if you don't care about the details.
It may not be the most efficient one for your task.
Args:
gpus (list[int]): list of GPU ids.
"""
return SyncMultiGPUTrainerParameterServer(gpus, ps_device='cpu')
class AsyncMultiGPUTrainer(SingleCostTrainer):
__doc__ = AsyncMultiGPUBuilder.__doc__
devices = None
"""
List of GPU ids.
"""
@map_arg(gpus=_int_to_range)
def __init__(self, gpus, scale_gradient=True):
"""
Args:
gpus ([int]): list of GPU ids.
scale_gradient (bool): if True, will scale each gradient by ``1.0/nr_gpu``.
"""
self.devices = gpus
self._builder = AsyncMultiGPUBuilder(gpus, scale_gradient)
super(AsyncMultiGPUTrainer, self).__init__()
def _setup_graph(self, input, get_cost_fn, get_opt_fn):
if len(self.devices) > 1:
assert isinstance(input, FeedfreeInput), input
self.train_op = self._builder.build(
self._make_get_grad_fn(input, get_cost_fn, get_opt_fn), get_opt_fn)
return []
class SyncMultiGPUTrainerReplicated(SingleCostTrainer):
__doc__ = SyncMultiGPUReplicatedBuilder.__doc__
devices = None
"""
List of GPU ids.
"""
@map_arg(gpus=_int_to_range)
def __init__(self, gpus, average=True, mode=None, use_nccl=None):
"""
Args:
gpus (int or [int]): list of GPU ids.
average (bool): whether to average or sum gradients.
mode (str or None): Gradient aggregation mode.
Supported values: ['nccl', 'hierarchical', 'cpu'].
Default to pick automatically by heuristics.
These modes may have slight (within 5%) differences in speed.
"hierarchical" mode was designed for DGX-like 8GPU machines.
use_nccl: deprecated option
"""
self.devices = gpus
if use_nccl is not None:
mode = 'nccl' if use_nccl else None
log_deprecated("use_nccl option", "Use the `mode` option instead!", "2019-01-31")
if mode is None:
mode = 'hierarchical' if len(gpus) == 8 else 'nccl'
mode = mode.lower()
self._builder = SyncMultiGPUReplicatedBuilder(gpus, average, mode)
super(SyncMultiGPUTrainerReplicated, self).__init__()
def _setup_graph(self, input, get_cost_fn, get_opt_fn):
if len(self.devices) > 1:
assert isinstance(input, FeedfreeInput), input
self.train_op, post_init_op = self._builder.build(
self._make_get_grad_fn(input, get_cost_fn, get_opt_fn), get_opt_fn)
cb = RunOp(
post_init_op,
run_before=True, run_as_trigger=True, verbose=True)
return [cb]
class DistributedTrainerBase(SingleCostTrainer):
devices = None
def __init__(self, gpus, server):
super(DistributedTrainerBase, self).__init__()
self.devices = gpus
self.server = server
self.job_name = server.server_def.job_name
logger.info("Distributed training on cluster:\n" + str(server.server_def.cluster))
def join(self):
logger.info("Calling server.join() on {}:{}".format(self.job_name, self.server.server_def.task_index))
logger.info("Kill me with 'kill {}'".format(os.getpid()))
self.server.join() # this function will never return tensorflow#4713
raise RuntimeError("This is a bug. Server.join() for should never return!")
@HIDE_DOC
def initialize(self, session_creator, session_init):
if not isinstance(session_creator, NewSessionCreator) or \
session_creator.user_provided_config:
raise ValueError(
"You are not allowed to set session_creator or session_config for distributed training! "
"To use a custom session config, pass it to tf.train.Server.")
super(DistributedTrainerBase, self).initialize(
get_distributed_session_creator(self.server), session_init)
class DistributedTrainerParameterServer(DistributedTrainerBase):
__doc__ = DistributedParameterServerBuilder.__doc__
@map_arg(gpus=_int_to_range)
def __init__(self, gpus, server, caching_device='cpu'):
"""
Args:
gpus ([int]): list of GPU ids.
server (tf.train.Server): the server with ps and workers.
caching_device (str): either 'cpu' or 'gpu'. The device to cache variables copied from PS
"""
super(DistributedTrainerParameterServer, self).__init__(gpus, server)
assert self.job_name in ['ps', 'worker'], self.job_name
if self.job_name == 'ps':
self.join()
self._builder = DistributedParameterServerBuilder(gpus, server, caching_device)
self.is_chief = self._builder.is_chief
def _setup_graph(self, input, get_cost_fn, get_opt_fn):
assert isinstance(input, FeedfreeInput), input
self.train_op = self._builder.build(
self._make_get_grad_fn(input, get_cost_fn, get_opt_fn), get_opt_fn)
return []
class DistributedTrainerReplicated(DistributedTrainerBase):
__doc__ = DistributedReplicatedBuilder.__doc__
@map_arg(gpus=_int_to_range)
def __init__(self, gpus, server):
"""
Args:
gpus (list[int]): list of GPU ids.
server (tf.train.Server): the server with ps and workers.
"""
super(DistributedTrainerReplicated, self).__init__(gpus, server)
assert self.job_name in ['ps', 'worker'], self.job_name
if self.job_name == 'ps':
self.join()
self._builder = DistributedReplicatedBuilder(gpus, server)
self.is_chief = self._builder.is_chief
def _setup_input(self, inputs_desc, input):
with override_to_local_variable():
get_global_step_var() # gs should be local
# input source may create variable (queue size summary)
# TODO This is not good because we don't know from here
# whether something should be global or local. We now assume
# they should be local.
assert not input.setup_done()
return input.setup(inputs_desc)
def _setup_graph(self, input, get_cost_fn, get_opt_fn):
assert isinstance(input, FeedfreeInput), input
self.train_op, initial_sync_op, model_sync_op = self._builder.build(
self._make_get_grad_fn(input, get_cost_fn, get_opt_fn), get_opt_fn)
callbacks = []
# Initial syncing vars from PS
cb = RunOp(lambda: initial_sync_op,
run_before=True, run_as_trigger=False, verbose=True)
cb.chief_only = False
callbacks.append(cb)
# Sync model_variables to PS, only chief needs to do this
if model_sync_op:
cb = RunOp(lambda: model_sync_op,
run_before=False, run_as_trigger=True, verbose=True)
logger.warn("For efficiency, local MODEL_VARIABLES are only synced to PS once "
"every epoch. Be careful if you save the model more frequently than this.")
callbacks.append(cb)
return callbacks
@property
def _main_tower_vs_name(self):
return "tower0"
class HorovodTrainer(SingleCostTrainer):
"""
Horovod trainer, support both multi-GPU and distributed training.
To use for multi-GPU training:
.. code-block:: bash
# First, change trainer to HorovodTrainer(), then
CUDA_VISIBLE_DEVICES=0,1,2,3 NCCL_DEBUG=INFO mpirun -np 4 --output-filename mylog python train.py
To use for distributed training:
.. code-block:: bash
# First, change trainer to HorovodTrainer(), then
mpirun -np 8 -H server1:4,server2:4 \\
-bind-to none -map-by slot \\
--output-filename mylog -x NCCL_DEBUG=INFO -x LD_LIBRARY_PATH \\
python train.py
# Add other environment variables you need by -x, e.g. PYTHONPATH, PATH.
# If using all GPUs, you can always skip the `CUDA_VISIBLE_DEVICES` option.
# There are other MPI options that can potentially improve performance especially on special hardwares.
Note:
1. To reach the maximum speed in your system, there are many options to tune
for Horovod installation and in the MPI command line.
See Horovod docs for details.
2. Due to a TF bug, you must not initialize CUDA context before the trainer starts training.
Therefore TF functions like `is_gpu_available()` or `list_local_devices()`
must be avoided.
2. MPI does not like `fork()`. If your dataflow contains multiprocessing, it may cause problems.
3. MPI sometimes fails to kill all processes in the end. Be sure to check it afterwards.
4. Keep in mind that there is one process running the script per GPU, therefore:
+ Make sure your InputSource has reasonable randomness.
+ If your data processing is heavy, doing it in a separate dedicated process might be
a better choice than doing them repeatedly in each process.
+ You need to make sure log directories in each process won't conflict.
You can set it only for the chief process, or set a different one for each process.
+ Callbacks have an option to be run only in the chief process, or in all processes.
See :meth:`callback.set_chief_only()`. Most callbacks have a reasonable
default already, but certain callbacks may not behave properly by default. Report an issue if you find any.
+ You can use Horovod API such as `hvd.rank()` to know which process you are and choose
different code path. Chief process has rank 0.
5. Due to these caveats, see
`ResNet-Horovod <https://github.com/tensorpack/benchmarks/tree/master/ResNet-Horovod>`_
for a full example which has handled these common issues.
This example can train ImageNet in roughly an hour following the paper's setup.
"""
def __init__(self, average=True):
"""
Args:
average (bool): whether to average or sum the gradients across processes.
"""
if 'pyarrow' in sys.modules:
logger.warn("Horovod and pyarrow may conflict due to pyarrow bugs. "
"Uninstall pyarrow and use msgpack instead.")
# lazy import
import horovod.tensorflow as _hvd
global hvd
hvd = _hvd
hvd.init()
self.is_chief = hvd.rank() == 0
self._local_rank = hvd.local_rank()
self._average = average
logger.info("[HorovodTrainer] local rank={}".format(self._local_rank))
super(HorovodTrainer, self).__init__()
def allreduce(self, grads):
if hvd.size() == 1:
return grads
# copied from https://github.com/uber/horovod/blob/master/horovod/tensorflow/__init__.py
averaged_gradients = []
with tf.name_scope("HVDAllReduce"):
for grad, var in grads:
if grad is not None:
avg_grad = hvd.allreduce(grad, average=self._average)
averaged_gradients.append((avg_grad, var))
else:
averaged_gradients.append((None, var))
return averaged_gradients
def _setup_graph(self, input, get_cost_fn, get_opt_fn):
with TrainTowerContext(''):
grads = self._make_get_grad_fn(input, get_cost_fn, get_opt_fn)()
grads = self.allreduce(grads)
opt = get_opt_fn()
self.train_op = opt.apply_gradients(grads, name='min_op')
def broadcast(self):
logger.info("Running horovod broadcast ...")
# the op will be created later in initialize()
self.trainer._broadcast_op.run()
cb = CallbackFactory(trigger=broadcast).set_chief_only(False)
return [cb]
@HIDE_DOC
def initialize(self, session_creator, session_init):
# broadcast_op should be the last setup_graph: it needs to be created
# "right before" the graph is finalized,
# because it needs to capture all the variables (which may be created by callbacks).
with tf.name_scope('horovod_broadcast'):
self._broadcast_op = hvd.broadcast_global_variables(0)
# it's important that our NewSessionCreator does not finalize the graph
if not isinstance(session_creator, NewSessionCreator):
raise ValueError(
"session_creator has to be `NewSessionCreator` for horovod training! ")
# NOTE It will fail if GPU was already detected before initializing the session
# https://github.com/tensorflow/tensorflow/issues/8136
session_creator.config.gpu_options.visible_device_list = str(self._local_rank)
try:
session_creator.config.inter_op_parallelism_threads = mp.cpu_count() // hvd.local_size()
except AttributeError: # old horovod does not have local_size
pass
super(HorovodTrainer, self).initialize(session_creator, session_init)
# This broadcast belongs to the "intialize" stage
# It should not be delayed to the "before_train" stage.
# TODO:
# 1. a allgather helper to concat strings
# 2. check variables on each rank match each other, print warnings, and broadcast the common set.
logger.info("Broadcasting initialized variables ...")
self.sess.run(self._broadcast_op)
# for lazy import
hvd = None
|
from django.contrib import admin
# Register your models here.
from .models import Event, Location, Schedule, Slot, Booking
admin.site.register(Event)
admin.site.register(Location)
admin.site.register(Schedule)
admin.site.register(Slot)
admin.site.register(Booking)
|
import subprocess
import collections
import glob
import inspect
import os
import random
import re
import shutil
import tempfile
import time
from contextlib import contextmanager
from getpass import getpass
import sys
import psutil
import requests
from pathlib import Path
from cloudmesh.common.console import Console
import pyfiglet
import socket
import platform
try:
collectionsAbc = collections.abc
except AttributeError:
collectionsAbc = collections
@contextmanager
def tempdir(*args, **kwargs):
"""A contextmanager to work in an auto-removed temporary directory
Arguments are passed through to tempfile.mkdtemp
example:
>>> with tempdir() as path:
... pass
"""
d = tempfile.mkdtemp(*args, **kwargs)
try:
yield d
finally:
shutil.rmtree(d)
def check_root(dryrun=False, terminate=True):
"""
check if I am the root user. If not, simply exits the program.
:param dryrun: if set to true, does not terminate if not root user
:type dryrun: bool
:param terminate: terminates if not root user and dryrun is False
:type terminate: bool
"""
uid = os.getuid()
if uid == 0:
Console.ok("You are executing as a root user")
else:
Console.error("You do not run as root")
if terminate and not dryrun:
sys.exit()
def exponential_backoff(fn, sleeptime_s_max=30 * 60):
"""
Calls `fn` until it returns True, with an exponentially increasing wait
time between calls
:param fn: the function to be called that returns Truw or False
:type fn: object
:param sleeptime_s_max: the sleep time in milliseconds
:type sleeptime_s_max: int
:return: None
"""
sleeptime_ms = 500
while True:
if fn():
return True
else:
print('Sleeping {} ms'.format(sleeptime_ms))
time.sleep(sleeptime_ms / 1000.0)
sleeptime_ms *= 2
if sleeptime_ms / 1000.0 > sleeptime_s_max:
return False
def download(source, destination, force=False):
"""
Downloads the file from source to destination
For large files, see cloudmesh.common.Shell.download
:param source: The http source
:param destination: The destination in the file system
:param force: If True the file will be downloaded even if
it already exists
"""
if os.path.isfile(destination) and not force:
Console.warning(f"File {destination} already exists. "
"Skipping download ...")
else:
directory = os.path.dirname(destination)
Path(directory).mkdir(parents=True, exist_ok=True)
r = requests.get(source, allow_redirects=True)
open(destination, 'wb').write(r.content)
def search(lines, pattern):
"""
return all lines that match the pattern
#TODO: we need an example
:param lines:
:param pattern:
:return:
"""
p = pattern.replace("*", ".*")
test = re.compile(p)
result = []
for l in lines: # noqa: E741
if test.search(l):
result.append(l)
return result
def grep(pattern, filename):
"""Very simple grep that returns the first matching line in a file.
String matching only, does not do REs as currently implemented.
"""
try:
# for line in file
# if line matches pattern:
# return line
return next((L for L in open(filename) if L.find(pattern) >= 0))
except StopIteration:
return ''
def is_local(host):
"""
Checks if the host is the localhost
:param host: The hostname or ip
:return: True if the host is the localhost
"""
return host in ["127.0.0.1",
"localhost",
socket.gethostname(),
# just in case socket.gethostname() does not work we also try the following:
platform.node(),
socket.gethostbyaddr(socket.gethostname())[0]
]
# noinspection PyPep8
def is_gitbash():
"""
returns True if you run in a Windows gitbash
:return: True if gitbash
"""
try:
exepath = os.environ['EXEPATH']
return "Git" in exepath
except:
return False
def is_powershell():
"""
True if you run in powershell
:return: True if you run in powershell
"""
# psutil.Process(parent_pid).name() returns -
# cmd.exe for CMD
# powershell.exe for powershell
# bash.exe for git bash
return (psutil.Process(os.getppid()).name() == "powershell.exe")
def is_cmd_exe():
"""
return True if you run in a Windows CMD
:return: True if you run in CMD
"""
if is_gitbash():
return False
else:
try:
return os.environ['OS'] == 'Windows_NT'
except:
return False
def path_expand(text):
""" returns a string with expanded variable.
:param text: the path to be expanded, which can include ~ and environment variables
:param text: string
"""
result = os.path.expandvars(os.path.expanduser(text))
if result.startswith("./"):
result = result.replace(".", os.getcwd(), 1)
if is_gitbash() or is_cmd_exe():
result = result.replace("/", "\\")
return result
def convert_from_unicode(data):
"""
converts unicode data to a string
:param data: the data to convert
:return:
"""
# if isinstance(data, basestring):
if isinstance(data, str):
return str(data)
elif isinstance(data, collectionsAbc.Mapping):
return dict(map(convert_from_unicode, data.items()))
elif isinstance(data, collectionsAbc.Iterable):
return type(data)(map(convert_from_unicode, data))
else:
return data
def yn_choice(message, default='y', tries=None):
"""asks for a yes/no question.
:param tries: the number of tries
:param message: the message containing the question
:param default: the default answer
"""
# http://stackoverflow.com/questions/3041986/python-command-line-yes-no-input"""
choices = 'Y/n' if default.lower() in ('y', 'yes') else 'y/N'
if tries is None:
choice = input(f"{message} ({choices}) ")
values = ('y', 'yes', '') if default == 'y' else ('y', 'yes')
return True if choice.strip().lower() in values else False
else:
while tries > 0:
choice = input(f"{message} ({choices}) ('q' to discard)")
choice = choice.strip().lower()
if choice in ['y', 'yes']:
return True
elif choice in ['n', 'no', 'q']:
return False
else:
print("Invalid input...")
tries -= 1
def str_banner(txt=None, c="-", prefix="#", debug=True, label=None,
color="BLUE", padding=False,
figlet=False, font="big"):
"""
prints a banner of the form with a frame of # around the txt::
# --------------------------
# txt
# --------------------------
:param color: prints in the given color
:param label: adds a label
:param debug: prints only if debug is true
:param txt: a text message to be printed
:type txt: string
:param c: the character used instead of c
:type c: character
"""
output = ""
if debug:
output = "\n"
output += prefix + " " + 70 * c + "\n"
if padding:
output += prefix + "\n"
if label is not None:
output += prefix + " " + label + "\n"
output += prefix + " " + 70 * c + "\n"
if txt is not None:
if figlet:
txt = pyfiglet.figlet_format(txt, font=font)
for line in txt.splitlines():
output += prefix + " " + line + "\n"
if padding:
output += prefix + "\n"
output += prefix + " " + 70 * c + "\n"
return output
def banner(txt=None, c="-", prefix="#", debug=True, label=None,
color="BLUE", padding=False,
figlet=False, font="big"):
"""
prints a banner of the form with a frame of # around the txt::
# --------------------------
# txt
# --------------------------
:param color: prints in the given color
:param label: adds a label
:param debug: prints only if debug is true
:param txt: a text message to be printed
:type txt: string
:param c: the character used instead of c
:type c: character
:param padding: ads additional comment line around the text so the banner is larger
:type padding: bool
"""
output = str_banner(txt=txt, c=c, prefix=prefix, debug=debug, label=label,
color=color, padding=padding, figlet=figlet, font=font)
Console.cprint(color, "", output)
# noinspection PyPep8Naming
def HEADING(txt=None, c="#", color="HEADER"):
"""
Prints a message to stdout with #### surrounding it. This is useful for
pytests to better distinguish them.
:param c: uses the given char to wrap the header
:param txt: a text message to be printed
:type txt: string
"""
frame = inspect.getouterframes(inspect.currentframe())
filename = frame[1][1].replace(os.getcwd(), "")
line = frame[1][2] - 1
method = frame[1][3]
if txt is None:
msg = "{} {} {}".format(method, filename, line)
else:
msg = "{}\n {} {} {}".format(txt, method, filename, line)
print()
banner(msg, c=c, color=color)
# noinspection PyPep8Naming
def FUNCTIONNAME():
"""
Returns the anme of a function.
"""
frame = inspect.getouterframes(inspect.currentframe())
filename = frame[1][1].replace(os.getcwd(), "")
line = frame[1][2] - 1
method = frame[1][3]
return method
def backup_name(filename):
"""
:param filename: given a filename creates a backup name of the form
filename.bak.1. If the filename already exists
the number will be increased as much as needed so
the file does not exist in the given location.
The filename can consists a path and is expanded
with ~ and environment variables.
:type filename: string
:rtype: string
"""
location = path_expand(filename)
n = 0
found = True
backup = None
while found:
n += 1
backup = "{0}.bak.{1}".format(location, n)
found = os.path.isfile(backup)
return backup
def auto_create_version(class_name, version, filename="__init__.py"):
"""
creates a version number in the __init__.py file.
it can be accessed with __version__
:param class_name:
:param version:
:param filename:
:return:
"""
version_filename = Path(
"{classname}/{filename}".format(classname=class_name,
filename=filename))
with open(version_filename, "r") as f:
content = f.read()
if content != '__version__ = "{0}"'.format(version):
banner("Updating version to {0}".format(version))
with open(version_filename, "w") as text_file:
text_file.write('__version__ = "{0:s}"'.format(version))
def auto_create_requirements(requirements):
"""
creates a requirement.txt file form the requirements in the list. If the file
exists, it get changed only if the
requirements in the list are different from the existing file
:param requirements: the requirements in a list
"""
banner("Creating requirements.txt file")
try:
with open("requirements.txt", "r") as f:
file_content = f.read()
except:
file_content = ""
setup_requirements = '\n'.join(requirements)
if setup_requirements != file_content:
with open("requirements.txt", "w") as text_file:
text_file.write(setup_requirements)
def copy_files(files_glob, source_dir, dest_dir):
"""
copies the files to the destination
:param files_glob: `*.yaml`
:param source_dir: source directory
:param dest_dir: destination directory
"""
files = glob.iglob(os.path.join(source_dir, files_glob))
for filename in files:
if os.path.isfile(filename):
shutil.copy2(filename, dest_dir)
def readfile(filename, mode='r'):
"""
returns the content of a file
:param filename: the filename
:return:
"""
if mode != 'r' and mode != 'rb':
Console.error(f"incorrect mode : expected 'r' or 'rb' given {mode}")
else:
with open(path_expand(filename), mode) as f:
content = f.read()
f.close()
return content
def writefile(filename, content):
"""
writes the content into the file
:param filename: the filename
:param content: teh content
:return:
"""
with open(path_expand(filename), 'w') as outfile:
outfile.write(content)
outfile.truncate()
def writefd(filename, content, mode='w', flags=os.O_RDWR | os.O_CREAT, mask=0o600):
"""
writes the content into the file and control permissions
:param filename: the full or relative path to the filename
:param content: the content being written
:param mode: the write mode ('w') or write bytes mode ('wb')
:param flags: the os flags that determine the permissions for the file
:param mask: the mask that the permissions will be applied to
"""
if mode != 'w' and mode != 'wb':
Console.error(f"incorrect mode : expected 'w' or 'wb' given {mode}")
with os.fdopen(os.open(filename, flags, mask), mode) as outfile:
outfile.write(content)
outfile.truncate()
outfile.close()
def sudo_readfile(filename, split=True, trim=False):
"""
Reads the content of the file as sudo and returns the result
:param filename: the filename
:type filename: str
:param split: uf true returns a list of lines
:type split: bool
:param trim: trim trailing whitespace. This is useful to
prevent empty string entries when splitting by '\n'
:type trim: bool
:return: the content
:rtype: str or list
"""
result = subprocess.getoutput(f"sudo cat {filename}")
if trim:
result = result.rstrip()
if split:
result = result.split('\n')
return result
# Reference: http://interactivepython.org/runestone/static/everyday/2013/01/3_password.html
def generate_password(length=8, lower=True, upper=True, number=True):
"""
generates a simple password. We should not really use this in production.
:param length: the length of the password
:param lower: True of lower case characters are allowed
:param upper: True if upper case characters are allowed
:param number: True if numbers are allowed
:return:
"""
lletters = "abcdefghijklmnopqrstuvwxyz"
uletters = "ABCDEFGHIJKLMNOPQRSTUVWXYZ"
# This doesn't guarantee both lower and upper cases will show up
alphabet = lletters + uletters
digit = "0123456789"
mypw = ""
def _random_character(texts):
return texts[random.randrange(len(texts))]
if not lower:
alphabet = uletters
elif not upper:
alphabet = lletters
for i in range(length):
# last half length will be filled with numbers
if number and i >= int(length / 2):
mypw = mypw + _random_character(digit)
else:
mypw = mypw + _random_character(alphabet)
return mypw
def str_bool(value):
return str(value).lower() in ['yes', '1', 'y', 'true', 't']
def get_password(prompt):
from cloudmesh.common.systeminfo import os_is_windows
try:
if os_is_windows() and is_gitbash():
continuing = True
while continuing:
sys.stdout.write(prompt)
sys.stdout.flush()
subprocess.check_call(["stty", "-echo"])
password = input()
subprocess.check_call(["stty", "echo"])
sys.stdout.write('Please retype the password:\n')
sys.stdout.flush()
subprocess.check_call(["stty", "-echo"])
password2 = input()
subprocess.check_call(["stty", "echo"])
if password == password2:
continuing = False
else:
Console.error('Passwords do not match\n')
return password
else:
continuing = True
while continuing:
password = getpass(prompt)
password2 = getpass('Please retype the password:\n')
if password == password2:
continuing = False
else:
Console.error('Passwords do not match\n')
return password
except KeyboardInterrupt:
#Console.error('Detected Ctrl + C. Quitting...')
if is_gitbash():
subprocess.check_call(["stty", "echo"])
raise ValueError('Detected Ctrl + C. Quitting...')
|
#!/usr/bin/env python3.7
# -*- coding: utf-8 -*-
"""This is a simple cloud program running docker containers. The program provides
an API to view and monitor services running in the cloud.
Core components:
- User-defined Docker bridge network
- Service registry (consul) container
- Service discovery container (registrator)
- TCP proxy (HAProxy) container, configured dynamically using consul-template
- Gateway: optional, used for communication between docker networks
- Additional web servers: can be created or scaled manually
"""
from simplecloud import docker_client, docker_api_client, logger
from simplecloud.network import BridgeNetwork, OpenVSwitchNetwork, OvsException
from simplecloud.registrator import Registrator
import requests
import base64
import traceback
proxy_configs = {
'mode': ('tcp', 'http'),
'balance': (
'roundrobin', 'static-rr', 'leastconn',
'first', 'source', 'uri', 'url_param', 'rdp-cookie'
)
}
def _check_alive_container(container):
try:
container.reload()
return container.status == 'running'
except:
return False
def _stop_container(container):
try:
logger.info(f'Stopping container {container.id}')
container.remove(force=True)
except:
pass
finally:
return True
class MyCloudService:
def __init__(self, image, name, network, port,
init_scale=1, command=None, is_ovs=False):
self.image = image
self.name = name
self.port = port
self.command = command
self.network = network
self.ovs = is_ovs
self.containers = []
self.idx = 1
self.mode = None
self.balance = None
# start the current services with a number of running containers
self.start(init_scale)
@property
def size(self):
self.reload()
return len(self.containers)
def _create_container(self):
host_config = docker_api_client.create_host_config(
auto_remove=True,
port_bindings={
self.port: None
}
)
pending_container = docker_api_client.create_container(
image=self.image,
name=f'{self.name}_{self.idx:02d}_{self.network.name}',
command=self.command,
detach=True,
host_config=host_config,
ports=[self.port],
environment={
'SERVICE_NAME': self.name,
'SERVICE_ID': f'{self.name}_{self.idx:02d}'
}
)
container = docker_client.containers.get(pending_container)
self.idx += 1
return container
def _run_container(self):
container = self._create_container()
# order of operation may affect how registrator works
if self.network.ovs:
container.start()
self.network.add_container(container, register=True)
else:
self.network.add_container(container)
container.start()
return container
def info(self):
_info = {
"Image": self.image,
"Service name": self.name,
"Port": self.port,
"Number of containers": self.size,
"Containers": [
{c.id[:12]: c.name} for c in self.containers
],
"Mode": self.mode or 'tcp',
"LB algorithm": self.balance or 'roundrobin'
}
return _info
def start(self, scale):
"""Start the service with an initial number of containers"""
for _ in range(scale):
try:
container = self._run_container()
self.containers.append(container)
except Exception as e:
logger.error(e)
def reload(self):
"""Refresh the docker client for up-to-date containers status"""
self.containers = list(filter(_check_alive_container, self.containers))
def scale(self, new_size):
"""Scale up or down the current service"""
if new_size < 1:
return False
cur_size = self.size
if new_size == cur_size:
return True
elif new_size < cur_size:
# stop some running containers
for container in self.containers[new_size:]:
try:
self.network.remove_container(container.id)
_stop_container(container)
except (OvsException,):
pass
self.reload()
else:
# start new containers
for _ in range(new_size - cur_size):
try:
container = self._run_container()
self.containers.append(container)
except Exception as e:
logger.error(e)
return True
def stop(self):
"""Stop all containers"""
for container in self.containers:
try:
self.network.remove_container(container.id)
_stop_container(container)
except (OvsException,):
pass
self.containers = []
def __str__(self):
return f'Service: {self.name}'
class CloudException(Exception):
pass
class MyCloud:
def __init__(self, subnet=None, network_name=None, ovs=False, proxy_ip=None,
gateway_ip=None, initial_services=None, entrypoint=None):
self.running = True
# declare variables for network stuff
self.proxy_ip = proxy_ip
self.gateway_ip = gateway_ip
reservations = {
'proxy': proxy_ip,
'gateway': gateway_ip
}
if not proxy_ip:
reservations.pop('proxy')
if not gateway_ip:
reservations.pop('gateway')
if ovs:
self.network = OpenVSwitchNetwork(
network_name,
subnet,
reservations
)
else:
self.network = BridgeNetwork(
network_name,
subnet,
reservations
)
self.registry_ip = self.network.reserve_ip('registry')
logger.debug(f'registry ip requested: {self.registry_ip}')
# create variables for important containers
self.registry_name = "service-registry-%s" % network_name
self.registrator_name = "service-registrator-%s" % network_name
self.proxy_name = "proxy-%s" % network_name
self.proxy_entrypoint = entrypoint
self.registry = None
self.registrator = None
self.proxy = None
self.services = {}
self.used_ports = set()
try:
self.create_registry()
self.create_proxy()
self.proxy.start()
self.network.add_container(self.proxy, reservation='proxy')
self.proxy.exec_run('/root/entry/custom-entrypoint.sh')
logger.info("Proxy has been started")
self.registry.start()
self.network.add_container(self.registry, reservation='registry')
logger.info("Service registry has been started")
if self.network.ovs:
self.network.registrator = Registrator(self.registry)
else:
self.create_registrator()
if self.registrator:
self.network.add_container(self.registrator)
self.registrator.start()
logger.info("Service registrator has been started")
if initial_services:
self.initialize_services(initial_services)
except Exception as e:
logger.error(''.join(
traceback.format_exception(
type(e), e, e.__traceback__)))
self.cleanup()
raise CloudException
def create_registry(self):
host_config = docker_api_client.create_host_config(
restart_policy={
"Name": "on-failure",
"MaximumRetryCount": 10
}
)
container = docker_api_client.create_container(
image="citelab/consul-server:latest",
command=["-bootstrap", f'-advertise={self.registry_ip}'],
name=self.registry_name,
host_config=host_config,
detach=True
)
self.registry = docker_client.containers.get(container)
def create_registrator(self):
host_config = docker_api_client.create_host_config(
restart_policy={
"Name": "on-failure",
"MaximumRetryCount": 10
},
binds=[
"/var/run/docker.sock:/tmp/docker.sock"
]
)
container = docker_api_client.create_container(
image="citelab/registrator:latest",
command=["-internal",
"-explicit",
"-network=%s" % self.network.name,
"-retry-attempts=10",
"-retry-interval=1000",
"consul://%s:8500" % self.registry_ip],
name=self.registrator_name,
volumes=["/tmp/docker.sock"],
host_config=host_config,
detach=True
)
self.registrator = docker_client.containers.get(container)
def create_proxy(self):
if self.proxy_entrypoint:
proxy_binds = ["%s:/root/entry/custom-entrypoint.sh" % self.proxy_entrypoint]
proxy_volumes = ["/root/entry/custom-entrypoint.sh"]
proxy_entrypoint = "/root/entry/custom-entrypoint.sh"
else:
proxy_binds = []
proxy_volumes = []
proxy_entrypoint = None
host_config = docker_api_client.create_host_config(
restart_policy={
"Name": "on-failure",
"MaximumRetryCount": 10
},
binds=proxy_binds,
privileged=True
)
container = docker_api_client.create_container(
image="citelab/haproxy:latest",
entrypoint=proxy_entrypoint,
command=[
"consul-template",
"-config=/tmp/haproxy.conf",
"-consul=%s:8500" % self.registry_ip,
"-log-level=debug"
],
volumes=proxy_volumes,
name=self.proxy_name,
host_config=host_config,
detach=True
)
self.proxy = docker_client.containers.get(container)
@property
def _registry_public_ip(self):
self.registry.reload()
return self.registry.attrs['NetworkSettings']['Networks']['bridge']['IPAddress']
def registry_update(self, service, key, value=None, action='put'):
if service not in self.services:
return False
if key not in proxy_configs or value not in proxy_configs[key]:
return False
# craft uri from arguments
if self.network.ovs:
uri = 'http://%s:8500/v1/kv/service/%s/%s' % (self._registry_public_ip, service, key)
else:
uri = 'http://%s:8500/v1/kv/service/%s/%s' % (self.registry_ip, service, key)
if action == 'put' and value is not None:
resp = requests.put(uri, data=value)
if resp.json(): # success
setattr(self.services[service], key, value)
return True
return False
elif action == 'delete':
resp = requests.delete(uri)
if resp.json():
setattr(self.services[service], key, None)
return True
return False
else:
return False
def registry_get(self, service, key):
# TODO
if service not in self.services:
return False
if key not in proxy_configs:
return False
# craft uri from arguments
if self.network.ovs:
pass
else:
uri = 'http://%s:8500/v1/kv/service/%s/%s'
resp = requests.get(uri)
# returns default values if key does not exists
if resp.status_code == 404:
return 'tcp' if key == 'mode' else 'roundrobin'
else:
value = resp.json()[0]['Value']
return base64.b64decode(value)
def start_service(self, image, name, port, scale=1, command=None):
if name in self.services:
logger.warning(f"Service {name} already exists")
return
if port in self.used_ports:
logger.warning(f"Port {port} has already been used!")
return
new_service = MyCloudService(
image, name, self.network,
port, scale, command)
self.services[name] = new_service
self.used_ports.add(port)
def initialize_services(self, services_list):
for service in services_list:
self.start_service(**service)
def stop_service(self, name):
old_service = self.services.pop(name, None)
if old_service:
old_service.stop()
self.used_ports.remove(old_service.port)
logger.info(f"Removed service: {old_service.name}")
return True
logger.warning(f"Service {name} does not exist")
return False
def list_services(self):
return self.services.keys()
def info_service(self, name):
if name in self.services:
return self.services[name].info()
else:
return {}
def scale_service(self, name, size):
if name in self.services:
return self.services[name].scale(size)
else:
return False
def _update(self):
self.network.reload()
for container in (self.registry, self.registrator, self.proxy):
container.reload()
for service in self.services.values():
service.reload()
def cleanup(self):
logger.debug("Cleaning up everything")
self.network.stop_listening()
for container in (self.registry, self.registrator, self.proxy):
try:
self.network.remove_container(container.id)
_stop_container(container)
except:
continue
for service in self.services.values():
service.stop()
try:
self.network.remove()
except:
pass
self.running = False
logger.debug("Removed running services and docker network")
|
# Copyright 2019 The TensorFlow Authors. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# ==============================================================================
"""Detection config template."""
from configs import base_config
from hyperparameters import params_dict
# pylint: disable=line-too-long
# For ResNet, this freezes the variables of the first conv1 and conv2_x
# layers [1], which leads to higher training speed and slightly better testing
# accuracy. The intuition is that the low-level architecture (e.g., ResNet-50)
# is able to capture low-level features such as edges; therefore, it does not
# need to be fine-tuned for the detection task.
# Note that we need to trailing `/` to avoid the incorrect match.
# [1]: https://github.com/facebookresearch/Detectron/blob/master/detectron/core/config.py#L198
RESNET_FROZEN_VAR_PREFIX = r'(resnet\d+)\/(conv2d(|_([1-9]|10))|batch_normalization(|_([1-9]|10)))\/'
DETECTION_CFG = params_dict.ParamsDict(base_config.BASE_CFG)
DETECTION_CFG.override({
'architecture': {
# Note that `num_classes` is the total number of classes including
# one background classes whose index is 0.
'num_classes': 91
},
'eval': {
'type': 'box',
'eval_samples': 5000,
'use_json_file': True,
'val_json_file': '',
'per_category_metrics': False,
},
'anchor': {
'num_scales': 3,
'aspect_ratios': [1.0, 2.0, 0.5],
'anchor_size': 4.0,
},
'fpn': {
'fpn_feat_dims': 256,
'use_separable_conv': False,
'use_batch_norm': True,
},
'nasfpn': {
'fpn_feat_dims': 256,
'num_repeats': 5,
'use_separable_conv': False,
'init_drop_connect_rate': None,
'block_fn': 'conv',
},
'postprocess': {
'apply_nms': True,
'use_batched_nms': False,
'max_total_size': 100,
'nms_iou_threshold': 0.5,
'score_threshold': 0.05,
'pre_nms_num_boxes': 5000,
},
}, is_strict=False)
# pylint: enable=line-too-long
|
#!/usr/bin/env python
############################################################################
#
# Licensed to the Apache Software Foundation (ASF) under one
# or more contributor license agreements. See the NOTICE file
# distributed with this work for additional information
# regarding copyright ownership. The ASF licenses this file
# to you under the Apache License, Version 2.0 (the
# "License"); you may not use this file except in compliance
# with the License. You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
#
############################################################################
# This script is intended for use where HBase/Phoenix is loaded from HBase classpath
# therefore HBASE_DIR environment variable needs to be configured for this script to execute
import os
import subprocess
import sys
import phoenix_utils
phoenix_utils.setPath()
args = phoenix_utils.shell_quote(sys.argv[1:])
# HBase configuration folder path (where hbase-site.xml reside) for
# HBase/Phoenix client side property override
hbase_config_path = os.getenv('HBASE_CONF_DIR', phoenix_utils.current_dir)
hbase_path = os.getenv('HBASE_DIR')
java_home = os.getenv('JAVA_HOME')
# load hbase-env.??? to extract JAVA_HOME, HBASE_PID_DIR, HBASE_LOG_DIR
hbase_env_path = None
hbase_env_cmd = None
if os.name == 'posix':
hbase_env_path = os.path.join(hbase_config_path, 'hbase-env.sh')
hbase_env_cmd = ['bash', '-c', 'source %s && env' % hbase_env_path]
elif os.name == 'nt':
hbase_env_path = os.path.join(hbase_config_path, 'hbase-env.cmd')
hbase_env_cmd = ['cmd.exe', '/c', 'call %s & set' % hbase_env_path]
if not hbase_env_path or not hbase_env_cmd:
print >> sys.stderr, "hbase-env file unknown on platform %s" % os.name
sys.exit(-1)
hbase_env = {}
if os.path.isfile(hbase_env_path):
p = subprocess.Popen(hbase_env_cmd, stdout = subprocess.PIPE)
for x in p.stdout:
(k, _, v) = x.partition('=')
hbase_env[k.strip()] = v.strip()
if hbase_env.has_key('JAVA_HOME'):
java_home = hbase_env['JAVA_HOME']
if java_home:
java = os.path.join(java_home, 'bin', 'java')
else:
java = 'java'
print "HBASE_DIR environment variable is currently set to: " + hbase_path
# Get the HBase classpath
hbasecp, stderr = subprocess.Popen(hbase_path + "/bin/hbase classpath",
shell=True,
stdout=subprocess.PIPE,
stderr=subprocess.PIPE).communicate()
java_cmd = java +' -cp "' + hbasecp + os.pathsep + phoenix_utils.pherf_conf_path + os.pathsep + phoenix_utils.hbase_conf_dir + os.pathsep + phoenix_utils.phoenix_pherf_jar + \
'" -Dlog4j.configuration=file:' + \
os.path.join(phoenix_utils.current_dir, "log4j.properties") + \
" org.apache.phoenix.pherf.Pherf " + args
os.execl("/bin/sh", "/bin/sh", "-c", java_cmd)
|
import panel as pn
from bokeh.document import Document
from holoviews import opts
from panel.pane import HoloViews, Markdown
from panel.template.fast.list import FastListDarkTheme, FastListTemplate
from panel.tests.template.fast.test_fast_grid_template import (
INFO, _create_hvplot, _fast_button_card, _sidebar_items)
ACCENT_COLOR = "purple"
opts.defaults(opts.Ellipse(line_width=3, color=ACCENT_COLOR))
def test_template_theme_parameter():
template = FastListTemplate(title="Fast", theme="dark")
# Not '#3f3f3f' which is for the Vanilla theme
doc = template.server_doc(Document())
assert doc.theme._json['attrs']['Figure']['background_fill_color']=="#181818"
assert isinstance(template._get_theme(), FastListDarkTheme)
def test_accepts_colors_by_name():
template = FastListTemplate(
accent_base_color="red",
header_background="green",
header_color="white",
header_accent_base_color="blue",
)
template._update_vars()
def test_app():
app = FastListTemplate(
title="FastListTemplate w. #ORSOME colors",
site="Panel",
accent_base_color=ACCENT_COLOR,
header_background=ACCENT_COLOR,
header_accent_base_color="#FFFFFF",
main_layout="",
shadow=True,
)
app.main[:] = [
Markdown(INFO, sizing_mode="stretch_both"),
HoloViews(_create_hvplot(), sizing_mode="stretch_both"),
_fast_button_card(),
HoloViews(_create_hvplot(), sizing_mode="stretch_both"),
]
app.sidebar.extend(_sidebar_items())
return app
if __name__.startswith("bokeh"):
pn.extension(sizing_mode="stretch_width")
test_app().servable()
|
import uuid
from copy import deepcopy
from datetime import date, timedelta
from decimal import Decimal
from unittest import mock
from unittest.mock import ANY, MagicMock, Mock, call, patch
import graphene
import pytest
from django.contrib.auth.models import AnonymousUser
from django.core.exceptions import ValidationError
from freezegun import freeze_time
from measurement.measures import Weight
from prices import Money, TaxedMoney
from ....account.models import CustomerEvent
from ....core.anonymize import obfuscate_email
from ....core.notify_events import NotifyEventType
from ....core.prices import quantize_price
from ....core.taxes import TaxError, zero_taxed_money
from ....discount.models import OrderDiscount
from ....order import FulfillmentStatus, OrderStatus
from ....order import events as order_events
from ....order.error_codes import OrderErrorCode
from ....order.events import order_replacement_created
from ....order.models import Order, OrderEvent
from ....order.notifications import get_default_order_payload
from ....payment import ChargeStatus, PaymentError
from ....payment.models import Payment
from ....plugins.manager import PluginsManager
from ....shipping.models import ShippingMethod, ShippingMethodChannelListing
from ....warehouse.models import Allocation, Stock
from ....warehouse.tests.utils import get_available_quantity_for_stock
from ...order.mutations.orders import (
clean_order_cancel,
clean_order_capture,
clean_refund_payment,
try_payment_action,
)
from ...payment.types import PaymentChargeStatusEnum
from ...tests.utils import assert_no_permission, get_graphql_content
from ..utils import validate_draft_order
@pytest.fixture
def orders_query_with_filter():
query = """
query ($filter: OrderFilterInput!, ) {
orders(first: 5, filter:$filter) {
totalCount
edges {
node {
id
}
}
}
}
"""
return query
@pytest.fixture
def draft_orders_query_with_filter():
query = """
query ($filter: OrderDraftFilterInput!, ) {
draftOrders(first: 5, filter:$filter) {
totalCount
edges {
node {
id
}
}
}
}
"""
return query
@pytest.fixture
def orders(customer_user, channel_USD, channel_PLN):
return Order.objects.bulk_create(
[
Order(
user=customer_user,
status=OrderStatus.CANCELED,
token=uuid.uuid4(),
channel=channel_USD,
),
Order(
user=customer_user,
status=OrderStatus.UNFULFILLED,
token=uuid.uuid4(),
channel=channel_USD,
),
Order(
user=customer_user,
status=OrderStatus.PARTIALLY_FULFILLED,
token=uuid.uuid4(),
channel=channel_USD,
),
Order(
user=customer_user,
status=OrderStatus.FULFILLED,
token=uuid.uuid4(),
channel=channel_PLN,
),
Order(
user=customer_user,
status=OrderStatus.DRAFT,
token=uuid.uuid4(),
channel=channel_PLN,
),
Order(
user=customer_user,
status=OrderStatus.UNCONFIRMED,
token=uuid.uuid4(),
channel=channel_PLN,
),
]
)
def test_orderline_query(staff_api_client, permission_manage_orders, fulfilled_order):
order = fulfilled_order
query = """
query OrdersQuery {
orders(first: 1) {
edges {
node {
lines {
thumbnail(size: 540) {
url
}
variant {
id
}
quantity
allocations {
id
quantity
warehouse {
id
}
}
unitPrice {
currency
gross {
amount
}
}
totalPrice {
currency
gross {
amount
}
}
}
}
}
}
}
"""
line = order.lines.first()
line.save()
staff_api_client.user.user_permissions.add(permission_manage_orders)
response = staff_api_client.post_graphql(query)
content = get_graphql_content(response)
order_data = content["data"]["orders"]["edges"][0]["node"]
first_order_data_line = order_data["lines"][0]
variant_id = graphene.Node.to_global_id("ProductVariant", line.variant.pk)
assert first_order_data_line["thumbnail"] is None
assert first_order_data_line["variant"]["id"] == variant_id
assert first_order_data_line["quantity"] == line.quantity
assert first_order_data_line["unitPrice"]["currency"] == line.unit_price.currency
expected_unit_price = Money(
amount=str(first_order_data_line["unitPrice"]["gross"]["amount"]),
currency="USD",
)
assert first_order_data_line["totalPrice"]["currency"] == line.unit_price.currency
assert expected_unit_price == line.unit_price.gross
expected_total_price = Money(
amount=str(first_order_data_line["totalPrice"]["gross"]["amount"]),
currency="USD",
)
assert expected_total_price == line.unit_price.gross * line.quantity
allocation = line.allocations.first()
allocation_id = graphene.Node.to_global_id("Allocation", allocation.pk)
warehouse_id = graphene.Node.to_global_id(
"Warehouse", allocation.stock.warehouse.pk
)
assert first_order_data_line["allocations"] == [
{
"id": allocation_id,
"quantity": allocation.quantity_allocated,
"warehouse": {"id": warehouse_id},
}
]
def test_order_line_with_allocations(
staff_api_client,
permission_manage_orders,
order_with_lines,
):
# given
order = order_with_lines
query = """
query OrdersQuery {
orders(first: 1) {
edges {
node {
lines {
id
allocations {
id
quantity
warehouse {
id
}
}
}
}
}
}
}
"""
staff_api_client.user.user_permissions.add(permission_manage_orders)
# when
response = staff_api_client.post_graphql(query)
# then
content = get_graphql_content(response)
lines = content["data"]["orders"]["edges"][0]["node"]["lines"]
for line in lines:
_, _id = graphene.Node.from_global_id(line["id"])
order_line = order.lines.get(pk=_id)
allocations_from_query = {
allocation["quantity"] for allocation in line["allocations"]
}
allocations_from_db = set(
order_line.allocations.values_list("quantity_allocated", flat=True)
)
assert allocations_from_query == allocations_from_db
ORDERS_QUERY = """
query OrdersQuery {
orders(first: 1) {
edges {
node {
number
canFinalize
status
channel {
slug
}
languageCodeEnum
statusDisplay
paymentStatus
paymentStatusDisplay
userEmail
isPaid
shippingPrice {
gross {
amount
}
}
shippingTaxRate
lines {
id
unitPrice{
gross{
amount
}
}
unitDiscount{
amount
}
undiscountedUnitPrice{
gross{
amount
}
}
}
discounts{
id
valueType
value
reason
amount{
amount
}
}
fulfillments {
fulfillmentOrder
}
payments{
id
}
subtotal {
net {
amount
}
}
total {
net {
amount
}
}
availableShippingMethods {
id
price {
amount
}
minimumOrderPrice {
amount
currency
}
type
}
shippingMethod{
id
}
}
}
}
}
"""
def test_order_query(
staff_api_client, permission_manage_orders, fulfilled_order, shipping_zone
):
# given
order = fulfilled_order
net = Money(amount=Decimal("10"), currency="USD")
gross = Money(amount=net.amount * Decimal(1.23), currency="USD").quantize()
shipping_price = TaxedMoney(net=net, gross=gross)
order.shipping_price = shipping_price
shipping_tax_rate = Decimal("0.23")
order.shipping_tax_rate = shipping_tax_rate
order.save()
staff_api_client.user.user_permissions.add(permission_manage_orders)
# when
response = staff_api_client.post_graphql(ORDERS_QUERY)
content = get_graphql_content(response)
# then
order_data = content["data"]["orders"]["edges"][0]["node"]
assert order_data["number"] == str(order.pk)
assert order_data["channel"]["slug"] == order.channel.slug
assert order_data["canFinalize"] is True
assert order_data["status"] == order.status.upper()
assert order_data["statusDisplay"] == order.get_status_display()
payment_charge_status = PaymentChargeStatusEnum.NOT_CHARGED
assert order_data["paymentStatus"] == payment_charge_status.name
assert (
order_data["paymentStatusDisplay"]
== dict(ChargeStatus.CHOICES)[payment_charge_status.value]
)
assert order_data["isPaid"] == order.is_fully_paid()
assert order_data["userEmail"] == order.user_email
assert order_data["languageCodeEnum"] == order.language_code.upper()
expected_price = Money(
amount=str(order_data["shippingPrice"]["gross"]["amount"]), currency="USD"
)
assert expected_price == shipping_price.gross
assert order_data["shippingTaxRate"] == float(shipping_tax_rate)
assert len(order_data["lines"]) == order.lines.count()
fulfillment = order.fulfillments.first().fulfillment_order
fulfillment_order = order_data["fulfillments"][0]["fulfillmentOrder"]
assert fulfillment_order == fulfillment
assert len(order_data["payments"]) == order.payments.count()
expected_methods = ShippingMethod.objects.applicable_shipping_methods(
price=order.get_subtotal().gross,
weight=order.get_total_weight(),
country_code=order.shipping_address.country.code,
channel_id=order.channel_id,
)
assert len(order_data["availableShippingMethods"]) == (expected_methods.count())
method = order_data["availableShippingMethods"][0]
expected_method = expected_methods.first()
expected_shipping_price = expected_method.channel_listings.get(
channel_id=order.channel_id
)
assert float(expected_shipping_price.price.amount) == method["price"]["amount"]
assert float(expected_shipping_price.minimum_order_price.amount) == (
method["minimumOrderPrice"]["amount"]
)
assert expected_method.type.upper() == method["type"]
def test_order_query_shipping_method_channel_listing_does_not_exist(
staff_api_client,
permission_manage_orders,
order_with_lines,
):
# given
order = order_with_lines
order.status = OrderStatus.UNFULFILLED
order.save()
shipping_method = order.shipping_method
ShippingMethodChannelListing.objects.filter(
shipping_method=shipping_method, channel=order.channel
).delete()
staff_api_client.user.user_permissions.add(permission_manage_orders)
# when
response = staff_api_client.post_graphql(ORDERS_QUERY)
content = get_graphql_content(response)
# then
order_data = content["data"]["orders"]["edges"][0]["node"]
assert order_data["shippingMethod"]["id"] == graphene.Node.to_global_id(
"ShippingMethod", order.shipping_method.id
)
def test_order_discounts_query(
staff_api_client,
permission_manage_orders,
draft_order_with_fixed_discount_order,
):
# given
order = draft_order_with_fixed_discount_order
order.status = OrderStatus.UNFULFILLED
order.save()
discount = order.discounts.get()
staff_api_client.user.user_permissions.add(permission_manage_orders)
# when
response = staff_api_client.post_graphql(ORDERS_QUERY)
content = get_graphql_content(response)
# then
order_data = content["data"]["orders"]["edges"][0]["node"]
discounts_data = order_data.get("discounts")
assert len(discounts_data) == 1
discount_data = discounts_data[0]
_, discount_id = graphene.Node.from_global_id(discount_data["id"])
assert int(discount_id) == discount.id
assert discount_data["valueType"] == discount.value_type.upper()
assert discount_data["value"] == discount.value
assert discount_data["amount"]["amount"] == discount.amount_value
assert discount_data["reason"] == discount.reason
def test_order_line_discount_query(
staff_api_client,
permission_manage_orders,
draft_order_with_fixed_discount_order,
):
# given
order = draft_order_with_fixed_discount_order
order.status = OrderStatus.UNFULFILLED
order.save()
unit_discount_value = Decimal("5.0")
line = order.lines.first()
line.unit_discount = Money(unit_discount_value, currency=order.currency)
line.unit_price -= line.unit_discount
line.save()
line_with_discount_id = graphene.Node.to_global_id("OrderLine", line.pk)
staff_api_client.user.user_permissions.add(permission_manage_orders)
# when
response = staff_api_client.post_graphql(ORDERS_QUERY)
content = get_graphql_content(response)
# then
order_data = content["data"]["orders"]["edges"][0]["node"]
lines_data = order_data.get("lines")
line_with_discount = [
line for line in lines_data if line["id"] == line_with_discount_id
][0]
unit_gross_amount = quantize_price(
Decimal(line_with_discount["unitPrice"]["gross"]["amount"]),
currency=order.currency,
)
unit_discount_amount = quantize_price(
Decimal(line_with_discount["unitDiscount"]["amount"]), currency=order.currency
)
undiscounted_unit_price = quantize_price(
Decimal(line_with_discount["undiscountedUnitPrice"]["gross"]["amount"]),
currency=order.currency,
)
expected_unit_price_gross_amount = quantize_price(
line.unit_price.gross.amount, currency=order.currency
)
expected_unit_discount_amount = quantize_price(
line.unit_discount.amount, currency=order.currency
)
expected_undiscounted_unit_price = quantize_price(
line.undiscounted_unit_price.gross.amount, currency=order.currency
)
assert unit_gross_amount == expected_unit_price_gross_amount
assert unit_discount_amount == expected_unit_discount_amount
assert undiscounted_unit_price == expected_undiscounted_unit_price
def test_order_query_in_pln_channel(
staff_api_client,
permission_manage_orders,
order_with_lines_channel_PLN,
shipping_zone,
channel_PLN,
):
shipping_zone.channels.add(channel_PLN)
order = order_with_lines_channel_PLN
staff_api_client.user.user_permissions.add(permission_manage_orders)
response = staff_api_client.post_graphql(ORDERS_QUERY)
content = get_graphql_content(response)
order_data = content["data"]["orders"]["edges"][0]["node"]
assert order_data["number"] == str(order.pk)
assert order_data["channel"]["slug"] == order.channel.slug
assert order_data["canFinalize"] is True
assert order_data["status"] == order.status.upper()
assert order_data["statusDisplay"] == order.get_status_display()
payment_charge_status = PaymentChargeStatusEnum.NOT_CHARGED
assert order_data["paymentStatus"] == payment_charge_status.name
assert (
order_data["paymentStatusDisplay"]
== dict(ChargeStatus.CHOICES)[payment_charge_status.value]
)
assert order_data["isPaid"] == order.is_fully_paid()
assert order_data["userEmail"] == order.user_email
expected_price = Money(
amount=str(order_data["shippingPrice"]["gross"]["amount"]),
currency=channel_PLN.currency_code,
)
assert expected_price == order.shipping_price.gross
assert len(order_data["lines"]) == order.lines.count()
assert len(order_data["payments"]) == order.payments.count()
expected_methods = ShippingMethod.objects.applicable_shipping_methods(
price=order.get_subtotal().gross,
weight=order.get_total_weight(),
country_code=order.shipping_address.country.code,
channel_id=order.channel_id,
)
assert len(order_data["availableShippingMethods"]) == (expected_methods.count())
method = order_data["availableShippingMethods"][0]
expected_method = expected_methods.first()
expected_shipping_price = expected_method.channel_listings.get(
channel_id=order.channel_id
)
assert float(expected_shipping_price.price.amount) == method["price"]["amount"]
assert float(expected_shipping_price.minimum_order_price.amount) == (
method["minimumOrderPrice"]["amount"]
)
assert expected_method.type.upper() == method["type"]
ORDERS_QUERY_SHIPPING_METHODS = """
query OrdersQuery {
orders(first: 1) {
edges {
node {
availableShippingMethods {
name
}
}
}
}
}
"""
def test_order_query_without_available_shipping_methods(
staff_api_client,
permission_manage_orders,
order,
shipping_method_channel_PLN,
channel_USD,
):
order.channel = channel_USD
order.shipping_method = shipping_method_channel_PLN
order.save()
staff_api_client.user.user_permissions.add(permission_manage_orders)
response = staff_api_client.post_graphql(ORDERS_QUERY_SHIPPING_METHODS)
content = get_graphql_content(response)
order_data = content["data"]["orders"]["edges"][0]["node"]
assert len(order_data["availableShippingMethods"]) == 0
@pytest.mark.parametrize("minimum_order_weight_value", [0, 2, None])
def test_order_available_shipping_methods_with_weight_based_shipping_method(
staff_api_client,
order_line,
shipping_method_weight_based,
permission_manage_orders,
minimum_order_weight_value,
):
shipping_method = shipping_method_weight_based
order = order_line.order
if minimum_order_weight_value is not None:
weight = Weight(kg=minimum_order_weight_value)
shipping_method.minimum_order_weight = weight
order.weight = weight
order.save(update_fields=["weight"])
else:
shipping_method.minimum_order_weight = minimum_order_weight_value
shipping_method.save(update_fields=["minimum_order_weight"])
staff_api_client.user.user_permissions.add(permission_manage_orders)
response = staff_api_client.post_graphql(ORDERS_QUERY_SHIPPING_METHODS)
content = get_graphql_content(response)
order_data = content["data"]["orders"]["edges"][0]["node"]
shipping_methods = [
method["name"] for method in order_data["availableShippingMethods"]
]
assert shipping_method.name in shipping_methods
def test_order_available_shipping_methods_weight_method_with_higher_minimal_weigh(
staff_api_client, order_line, shipping_method_weight_based, permission_manage_orders
):
order = order_line.order
shipping_method = shipping_method_weight_based
weight_value = 5
shipping_method.minimum_order_weight = Weight(kg=weight_value)
shipping_method.save(update_fields=["minimum_order_weight"])
order.weight = Weight(kg=1)
order.save(update_fields=["weight"])
staff_api_client.user.user_permissions.add(permission_manage_orders)
response = staff_api_client.post_graphql(ORDERS_QUERY_SHIPPING_METHODS)
content = get_graphql_content(response)
order_data = content["data"]["orders"]["edges"][0]["node"]
shipping_methods = [
method["name"] for method in order_data["availableShippingMethods"]
]
assert shipping_method.name not in shipping_methods
def test_order_query_shipping_zones_with_available_shipping_methods(
staff_api_client,
permission_manage_orders,
fulfilled_order,
shipping_zone,
):
staff_api_client.user.user_permissions.add(permission_manage_orders)
response = staff_api_client.post_graphql(ORDERS_QUERY_SHIPPING_METHODS)
content = get_graphql_content(response)
order_data = content["data"]["orders"]["edges"][0]["node"]
assert len(order_data["availableShippingMethods"]) == 1
def test_order_query_shipping_zones_without_channel(
staff_api_client,
permission_manage_orders,
fulfilled_order,
shipping_zone,
channel_USD,
):
channel_USD.shipping_zones.clear()
staff_api_client.user.user_permissions.add(permission_manage_orders)
response = staff_api_client.post_graphql(ORDERS_QUERY_SHIPPING_METHODS)
content = get_graphql_content(response)
order_data = content["data"]["orders"]["edges"][0]["node"]
assert len(order_data["availableShippingMethods"]) == 0
def test_order_query_shipping_methods_excluded_postal_codes(
staff_api_client,
permission_manage_orders,
order_with_lines_channel_PLN,
channel_PLN,
):
order = order_with_lines_channel_PLN
order.shipping_method.postal_code_rules.create(start="HB3", end="HB6")
order.shipping_address.postal_code = "HB5"
order.shipping_address.save(update_fields=["postal_code"])
staff_api_client.user.user_permissions.add(permission_manage_orders)
response = staff_api_client.post_graphql(ORDERS_QUERY_SHIPPING_METHODS)
content = get_graphql_content(response)
order_data = content["data"]["orders"]["edges"][0]["node"]
assert order_data["availableShippingMethods"] == []
@pytest.mark.parametrize(
"expected_price_type, expected_price, display_gross_prices",
(("gross", 13, True), ("net", 10, False)),
)
def test_order_available_shipping_methods_query(
expected_price_type,
expected_price,
display_gross_prices,
monkeypatch,
staff_api_client,
permission_manage_orders,
fulfilled_order,
shipping_zone,
site_settings,
):
query = """
query OrdersQuery {
orders(first: 1) {
edges {
node {
availableShippingMethods {
id
price {
amount
}
type
}
}
}
}
}
"""
shipping_method = shipping_zone.shipping_methods.first()
shipping_price = shipping_method.channel_listings.get(
channel_id=fulfilled_order.channel_id
).price
taxed_price = TaxedMoney(net=Money(10, "USD"), gross=Money(13, "USD"))
apply_taxes_to_shipping_mock = Mock(return_value=taxed_price)
monkeypatch.setattr(
PluginsManager, "apply_taxes_to_shipping", apply_taxes_to_shipping_mock
)
site_settings.display_gross_prices = display_gross_prices
site_settings.save()
staff_api_client.user.user_permissions.add(permission_manage_orders)
response = staff_api_client.post_graphql(query)
content = get_graphql_content(response)
order_data = content["data"]["orders"]["edges"][0]["node"]
method = order_data["availableShippingMethods"][0]
apply_taxes_to_shipping_mock.assert_called_once_with(shipping_price, ANY)
assert expected_price == method["price"]["amount"]
def test_order_query_customer(api_client):
query = """
query OrdersQuery {
orders(first: 1) {
edges {
node {
id
}
}
}
}
"""
response = api_client.post_graphql(query)
assert_no_permission(response)
def test_order_query_gift_cards(
staff_api_client, permission_manage_orders, order_with_lines, gift_card
):
query = """
query OrderQuery($id: ID!) {
order(id: $id) {
giftCards {
displayCode
currentBalance {
amount
}
}
}
}
"""
order_with_lines.gift_cards.add(gift_card)
order_id = graphene.Node.to_global_id("Order", order_with_lines.id)
variables = {"id": order_id}
staff_api_client.user.user_permissions.add(permission_manage_orders)
response = staff_api_client.post_graphql(query, variables)
content = get_graphql_content(response)
gift_card_data = content["data"]["order"]["giftCards"][0]
assert gift_card.display_code == gift_card_data["displayCode"]
assert (
gift_card.current_balance.amount == gift_card_data["currentBalance"]["amount"]
)
def test_order_query_shows_non_draft_orders(
staff_api_client, permission_manage_orders, orders
):
query = """
query OrdersQuery {
orders(first: 10) {
edges {
node {
id
}
}
}
}
"""
staff_api_client.user.user_permissions.add(permission_manage_orders)
response = staff_api_client.post_graphql(query)
edges = get_graphql_content(response)["data"]["orders"]["edges"]
assert len(edges) == Order.objects.non_draft().count()
ORDER_CONFIRM_MUTATION = """
mutation orderConfirm($id: ID!) {
orderConfirm(id: $id) {
orderErrors {
field
code
}
order {
status
}
}
}
"""
@patch("saleor.plugins.manager.PluginsManager.notify")
@patch("saleor.payment.gateway.capture")
def test_order_confirm(
capture_mock,
mocked_notify,
staff_api_client,
order_unconfirmed,
permission_manage_orders,
payment_txn_preauth,
):
payment_txn_preauth.order = order_unconfirmed
payment_txn_preauth.save()
staff_api_client.user.user_permissions.add(permission_manage_orders)
assert not OrderEvent.objects.exists()
response = staff_api_client.post_graphql(
ORDER_CONFIRM_MUTATION,
{"id": graphene.Node.to_global_id("Order", order_unconfirmed.id)},
)
order_data = get_graphql_content(response)["data"]["orderConfirm"]["order"]
assert order_data["status"] == OrderStatus.UNFULFILLED.upper()
order_unconfirmed.refresh_from_db()
assert order_unconfirmed.status == OrderStatus.UNFULFILLED
assert OrderEvent.objects.count() == 2
assert OrderEvent.objects.filter(
order=order_unconfirmed,
user=staff_api_client.user,
type=order_events.OrderEvents.CONFIRMED,
).exists()
assert OrderEvent.objects.filter(
order=order_unconfirmed,
user=staff_api_client.user,
type=order_events.OrderEvents.PAYMENT_CAPTURED,
parameters__amount=payment_txn_preauth.get_total().amount,
).exists()
capture_mock.assert_called_once_with(payment_txn_preauth, ANY)
expected_payload = {
"order": get_default_order_payload(order_unconfirmed, ""),
"recipient_email": order_unconfirmed.user.email,
"requester_user_id": staff_api_client.user.id,
"site_name": "mirumee.com",
"domain": "mirumee.com",
}
mocked_notify.assert_called_once_with(
NotifyEventType.ORDER_CONFIRMED, expected_payload
)
def test_order_confirm_unfulfilled(staff_api_client, order, permission_manage_orders):
staff_api_client.user.user_permissions.add(permission_manage_orders)
response = staff_api_client.post_graphql(
ORDER_CONFIRM_MUTATION, {"id": graphene.Node.to_global_id("Order", order.id)}
)
content = get_graphql_content(response)["data"]["orderConfirm"]
errors = content["orderErrors"]
order.refresh_from_db()
assert order.status == OrderStatus.UNFULFILLED
assert content["order"] is None
assert len(errors) == 1
assert errors[0]["field"] == "id"
assert errors[0]["code"] == OrderErrorCode.INVALID.name
def test_order_confirm_no_products_in_order(
staff_api_client, order_unconfirmed, permission_manage_orders
):
staff_api_client.user.user_permissions.add(permission_manage_orders)
order_unconfirmed.lines.set([])
response = staff_api_client.post_graphql(
ORDER_CONFIRM_MUTATION,
{"id": graphene.Node.to_global_id("Order", order_unconfirmed.id)},
)
content = get_graphql_content(response)["data"]["orderConfirm"]
errors = content["orderErrors"]
order_unconfirmed.refresh_from_db()
assert order_unconfirmed.is_unconfirmed()
assert content["order"] is None
assert len(errors) == 1
assert errors[0]["field"] == "id"
assert errors[0]["code"] == OrderErrorCode.INVALID.name
@patch("saleor.payment.gateway.capture")
def test_order_confirm_wont_call_capture_for_non_active_payment(
capture_mock,
staff_api_client,
order_unconfirmed,
permission_manage_orders,
payment_txn_preauth,
):
payment_txn_preauth.order = order_unconfirmed
payment_txn_preauth.is_active = False
payment_txn_preauth.save()
staff_api_client.user.user_permissions.add(permission_manage_orders)
assert not OrderEvent.objects.exists()
response = staff_api_client.post_graphql(
ORDER_CONFIRM_MUTATION,
{"id": graphene.Node.to_global_id("Order", order_unconfirmed.id)},
)
order_data = get_graphql_content(response)["data"]["orderConfirm"]["order"]
assert order_data["status"] == OrderStatus.UNFULFILLED.upper()
order_unconfirmed.refresh_from_db()
assert order_unconfirmed.status == OrderStatus.UNFULFILLED
assert OrderEvent.objects.count() == 1
assert OrderEvent.objects.filter(
order=order_unconfirmed,
user=staff_api_client.user,
type=order_events.OrderEvents.CONFIRMED,
).exists()
assert not capture_mock.called
def test_orders_with_channel(
staff_api_client, permission_manage_orders, orders, channel_USD
):
query = """
query OrdersQuery($channel: String) {
orders(first: 10, channel: $channel) {
edges {
node {
id
}
}
}
}
"""
staff_api_client.user.user_permissions.add(permission_manage_orders)
variables = {"channel": channel_USD.slug}
response = staff_api_client.post_graphql(query, variables)
edges = get_graphql_content(response)["data"]["orders"]["edges"]
assert len(edges) == 3
def test_orders_without_channel(staff_api_client, permission_manage_orders, orders):
query = """
query OrdersQuery {
orders(first: 10) {
edges {
node {
id
}
}
}
}
"""
staff_api_client.user.user_permissions.add(permission_manage_orders)
response = staff_api_client.post_graphql(query)
edges = get_graphql_content(response)["data"]["orders"]["edges"]
assert len(edges) == Order.objects.non_draft().count()
def test_draft_order_query(staff_api_client, permission_manage_orders, orders):
query = """
query DraftOrdersQuery {
draftOrders(first: 10) {
edges {
node {
id
}
}
}
}
"""
staff_api_client.user.user_permissions.add(permission_manage_orders)
response = staff_api_client.post_graphql(query)
edges = get_graphql_content(response)["data"]["draftOrders"]["edges"]
assert len(edges) == Order.objects.drafts().count()
def test_nested_order_events_query(
staff_api_client,
permission_manage_orders,
fulfilled_order,
fulfillment,
staff_user,
warehouse,
):
query = """
query OrdersQuery {
orders(first: 1) {
edges {
node {
events {
date
type
user {
email
}
message
email
emailType
amount
quantity
composedId
orderNumber
fulfilledItems {
quantity
orderLine {
productName
variantName
}
}
paymentId
paymentGateway
warehouse {
name
}
}
}
}
}
}
"""
event = order_events.fulfillment_fulfilled_items_event(
order=fulfilled_order,
user=staff_user,
fulfillment_lines=fulfillment.lines.all(),
)
event.parameters.update(
{
"message": "Example note",
"email_type": order_events.OrderEventsEmails.PAYMENT,
"amount": "80.00",
"quantity": "10",
"composed_id": "10-10",
"warehouse": warehouse.pk,
}
)
event.save()
staff_api_client.user.user_permissions.add(permission_manage_orders)
response = staff_api_client.post_graphql(query)
content = get_graphql_content(response)
data = content["data"]["orders"]["edges"][0]["node"]["events"][0]
assert data["message"] == event.parameters["message"]
assert data["amount"] == float(event.parameters["amount"])
assert data["emailType"] == "PAYMENT_CONFIRMATION"
assert data["quantity"] == int(event.parameters["quantity"])
assert data["composedId"] == event.parameters["composed_id"]
assert data["user"]["email"] == staff_user.email
assert data["type"] == "FULFILLMENT_FULFILLED_ITEMS"
assert data["date"] == event.date.isoformat()
assert data["orderNumber"] == str(fulfilled_order.pk)
assert data["fulfilledItems"] == [
{
"quantity": line.quantity,
"orderLine": {
"productName": line.order_line.product_name,
"variantName": line.order_line.variant_name,
},
}
for line in fulfillment.lines.all()
]
assert data["paymentId"] is None
assert data["paymentGateway"] is None
assert data["warehouse"]["name"] == warehouse.name
def test_related_order_events_query(
staff_api_client, permission_manage_orders, order, payment_dummy, staff_user
):
query = """
query OrdersQuery {
orders(first: 2) {
edges {
node {
id
events {
relatedOrder{
id
}
}
}
}
}
}
"""
new_order = deepcopy(order)
new_order.id = None
new_order.token = None
new_order.save()
related_order_id = graphene.Node.to_global_id("Order", new_order.id)
order_replacement_created(
original_order=order, replace_order=new_order, user=staff_user
)
staff_api_client.user.user_permissions.add(permission_manage_orders)
response = staff_api_client.post_graphql(query)
content = get_graphql_content(response)
data = content["data"]["orders"]["edges"]
for order_data in data:
events_data = order_data["node"]["events"]
if order_data["node"]["id"] != related_order_id:
assert events_data[0]["relatedOrder"]["id"] == related_order_id
def test_payment_information_order_events_query(
staff_api_client, permission_manage_orders, order, payment_dummy, staff_user
):
query = """
query OrdersQuery {
orders(first: 1) {
edges {
node {
events {
type
user {
email
}
message
email
emailType
amount
quantity
composedId
orderNumber
lines {
quantity
}
paymentId
paymentGateway
}
}
}
}
}
"""
amount = order.total.gross.amount
order_events.payment_captured_event(
order=order, user=staff_user, amount=amount, payment=payment_dummy
)
staff_api_client.user.user_permissions.add(permission_manage_orders)
response = staff_api_client.post_graphql(query)
content = get_graphql_content(response)
data = content["data"]["orders"]["edges"][0]["node"]["events"][0]
assert data["message"] is None
assert Money(str(data["amount"]), "USD") == order.total.gross
assert data["emailType"] is None
assert data["quantity"] is None
assert data["composedId"] is None
assert data["lines"] is None
assert data["user"]["email"] == staff_user.email
assert data["type"] == "PAYMENT_CAPTURED"
assert data["orderNumber"] == str(order.pk)
assert data["paymentId"] == payment_dummy.token
assert data["paymentGateway"] == payment_dummy.gateway
def test_non_staff_user_cannot_only_see_his_order(user_api_client, order):
query = """
query OrderQuery($id: ID!) {
order(id: $id) {
number
}
}
"""
ID = graphene.Node.to_global_id("Order", order.id)
variables = {"id": ID}
response = user_api_client.post_graphql(query, variables)
assert_no_permission(response)
def test_query_order_as_app(app_api_client, permission_manage_orders, order):
query = """
query OrderQuery($id: ID!) {
order(id: $id) {
token
}
}
"""
ID = graphene.Node.to_global_id("Order", order.id)
variables = {"id": ID}
response = app_api_client.post_graphql(
query, variables, permissions=[permission_manage_orders]
)
content = get_graphql_content(response)
order_data = content["data"]["order"]
assert order_data["token"] == order.token
DRAFT_ORDER_CREATE_MUTATION = """
mutation draftCreate(
$user: ID, $discount: PositiveDecimal, $lines: [OrderLineCreateInput],
$shippingAddress: AddressInput, $shippingMethod: ID, $voucher: ID,
$customerNote: String, $channel: ID, $redirectUrl: String
) {
draftOrderCreate(
input: {user: $user, discount: $discount,
lines: $lines, shippingAddress: $shippingAddress,
shippingMethod: $shippingMethod, voucher: $voucher,
channel: $channel,
redirectUrl: $redirectUrl,
customerNote: $customerNote}) {
orderErrors {
field
code
variants
message
}
order {
discount {
amount
}
discountName
redirectUrl
lines {
productName
productSku
quantity
}
status
voucher {
code
}
customerNote
}
}
}
"""
def test_draft_order_create(
staff_api_client,
permission_manage_orders,
staff_user,
customer_user,
product_without_shipping,
shipping_method,
variant,
voucher,
channel_USD,
graphql_address_data,
):
variant_0 = variant
query = DRAFT_ORDER_CREATE_MUTATION
# Ensure no events were created yet
assert not OrderEvent.objects.exists()
user_id = graphene.Node.to_global_id("User", customer_user.id)
variant_0_id = graphene.Node.to_global_id("ProductVariant", variant_0.id)
variant_1 = product_without_shipping.variants.first()
variant_1.quantity = 2
variant_1.save()
variant_1_id = graphene.Node.to_global_id("ProductVariant", variant_1.id)
discount = "10"
customer_note = "Test note"
variant_list = [
{"variantId": variant_0_id, "quantity": 2},
{"variantId": variant_1_id, "quantity": 1},
]
shipping_address = graphql_address_data
shipping_id = graphene.Node.to_global_id("ShippingMethod", shipping_method.id)
voucher_id = graphene.Node.to_global_id("Voucher", voucher.id)
channel_id = graphene.Node.to_global_id("Channel", channel_USD.id)
redirect_url = "https://www.example.com"
variables = {
"user": user_id,
"discount": discount,
"lines": variant_list,
"shippingAddress": shipping_address,
"shippingMethod": shipping_id,
"voucher": voucher_id,
"customerNote": customer_note,
"channel": channel_id,
"redirectUrl": redirect_url,
}
response = staff_api_client.post_graphql(
query, variables, permissions=[permission_manage_orders]
)
content = get_graphql_content(response)
assert not content["data"]["draftOrderCreate"]["orderErrors"]
data = content["data"]["draftOrderCreate"]["order"]
assert data["status"] == OrderStatus.DRAFT.upper()
assert data["voucher"]["code"] == voucher.code
assert data["customerNote"] == customer_note
assert data["redirectUrl"] == redirect_url
order = Order.objects.first()
assert order.user == customer_user
# billing address should be copied
assert order.billing_address.pk != customer_user.default_billing_address.pk
assert (
order.billing_address.as_data()
== customer_user.default_billing_address.as_data()
)
assert order.shipping_method == shipping_method
assert order.shipping_address.first_name == graphql_address_data["firstName"]
# Ensure the correct event was created
created_draft_event = OrderEvent.objects.get(
type=order_events.OrderEvents.DRAFT_CREATED
)
assert created_draft_event.user == staff_user
assert created_draft_event.parameters == {}
def test_draft_order_create_with_inactive_channel(
staff_api_client,
permission_manage_orders,
staff_user,
customer_user,
product_without_shipping,
shipping_method,
variant,
voucher,
channel_USD,
graphql_address_data,
):
variant_0 = variant
query = DRAFT_ORDER_CREATE_MUTATION
# Ensure no events were created yet
assert not OrderEvent.objects.exists()
user_id = graphene.Node.to_global_id("User", customer_user.id)
channel_USD.is_active = False
channel_USD.save()
variant_0_id = graphene.Node.to_global_id("ProductVariant", variant_0.id)
variant_1 = product_without_shipping.variants.first()
variant_1.quantity = 2
variant_1.save()
variant_1_id = graphene.Node.to_global_id("ProductVariant", variant_1.id)
discount = "10"
customer_note = "Test note"
variant_list = [
{"variantId": variant_0_id, "quantity": 2},
{"variantId": variant_1_id, "quantity": 1},
]
shipping_address = graphql_address_data
shipping_id = graphene.Node.to_global_id("ShippingMethod", shipping_method.id)
voucher_id = graphene.Node.to_global_id("Voucher", voucher.id)
channel_id = graphene.Node.to_global_id("Channel", channel_USD.id)
variables = {
"user": user_id,
"discount": discount,
"lines": variant_list,
"shippingAddress": shipping_address,
"shippingMethod": shipping_id,
"voucher": voucher_id,
"customerNote": customer_note,
"channel": channel_id,
}
response = staff_api_client.post_graphql(
query, variables, permissions=[permission_manage_orders]
)
content = get_graphql_content(response)
assert not content["data"]["draftOrderCreate"]["orderErrors"]
data = content["data"]["draftOrderCreate"]["order"]
assert data["status"] == OrderStatus.DRAFT.upper()
assert data["voucher"]["code"] == voucher.code
assert data["customerNote"] == customer_note
order = Order.objects.first()
assert order.user == customer_user
# billing address should be copied
assert order.billing_address.pk != customer_user.default_billing_address.pk
assert (
order.billing_address.as_data()
== customer_user.default_billing_address.as_data()
)
assert order.shipping_method == shipping_method
assert order.shipping_address.first_name == graphql_address_data["firstName"]
# Ensure the correct event was created
created_draft_event = OrderEvent.objects.get(
type=order_events.OrderEvents.DRAFT_CREATED
)
assert created_draft_event.user == staff_user
assert created_draft_event.parameters == {}
def test_draft_order_create_variant_with_0_price(
staff_api_client,
permission_manage_orders,
staff_user,
customer_user,
product_without_shipping,
shipping_method,
variant,
voucher,
graphql_address_data,
channel_USD,
):
variant_0 = variant
query = DRAFT_ORDER_CREATE_MUTATION
# Ensure no events were created yet
assert not OrderEvent.objects.exists()
user_id = graphene.Node.to_global_id("User", customer_user.id)
variant_0_id = graphene.Node.to_global_id("ProductVariant", variant_0.id)
channel_id = graphene.Node.to_global_id("Channel", channel_USD.id)
variant_1 = product_without_shipping.variants.first()
variant_1.quantity = 2
variant.price = Money(0, "USD")
variant_1.save()
variant_1_id = graphene.Node.to_global_id("ProductVariant", variant_1.id)
variant_list = [
{"variantId": variant_0_id, "quantity": 2},
{"variantId": variant_1_id, "quantity": 1},
]
shipping_address = graphql_address_data
shipping_id = graphene.Node.to_global_id("ShippingMethod", shipping_method.id)
variables = {
"user": user_id,
"lines": variant_list,
"shippingAddress": shipping_address,
"shippingMethod": shipping_id,
"channel": channel_id,
}
response = staff_api_client.post_graphql(
query, variables, permissions=[permission_manage_orders]
)
content = get_graphql_content(response)
assert not content["data"]["draftOrderCreate"]["orderErrors"]
data = content["data"]["draftOrderCreate"]["order"]
assert data["status"] == OrderStatus.DRAFT.upper()
order = Order.objects.first()
assert order.user == customer_user
# billing address should be copied
assert order.billing_address.pk != customer_user.default_billing_address.pk
assert (
order.billing_address.as_data()
== customer_user.default_billing_address.as_data()
)
assert order.shipping_method == shipping_method
assert order.shipping_address.first_name == graphql_address_data["firstName"]
# Ensure the correct event was created
created_draft_event = OrderEvent.objects.get(
type=order_events.OrderEvents.DRAFT_CREATED
)
assert created_draft_event.user == staff_user
assert created_draft_event.parameters == {}
@patch("saleor.graphql.order.mutations.draft_orders.add_variant_to_order")
def test_draft_order_create_tax_error(
add_variant_to_order_mock,
staff_api_client,
permission_manage_orders,
staff_user,
customer_user,
product_without_shipping,
shipping_method,
variant,
voucher,
graphql_address_data,
channel_USD,
):
variant_0 = variant
err_msg = "Test error"
add_variant_to_order_mock.side_effect = TaxError(err_msg)
query = DRAFT_ORDER_CREATE_MUTATION
# Ensure no events were created yet
assert not OrderEvent.objects.exists()
channel_id = graphene.Node.to_global_id("Channel", channel_USD.id)
user_id = graphene.Node.to_global_id("User", customer_user.id)
variant_0_id = graphene.Node.to_global_id("ProductVariant", variant_0.id)
variant_1 = product_without_shipping.variants.first()
variant_1.quantity = 2
variant_1.save()
variant_1_id = graphene.Node.to_global_id("ProductVariant", variant_1.id)
discount = "10"
customer_note = "Test note"
variant_list = [
{"variantId": variant_0_id, "quantity": 2},
{"variantId": variant_1_id, "quantity": 1},
]
shipping_address = graphql_address_data
shipping_id = graphene.Node.to_global_id("ShippingMethod", shipping_method.id)
voucher_id = graphene.Node.to_global_id("Voucher", voucher.id)
variables = {
"user": user_id,
"discount": discount,
"lines": variant_list,
"shippingAddress": shipping_address,
"shippingMethod": shipping_id,
"voucher": voucher_id,
"customerNote": customer_note,
"channel": channel_id,
}
response = staff_api_client.post_graphql(
query, variables, permissions=[permission_manage_orders]
)
content = get_graphql_content(response)
data = content["data"]["draftOrderCreate"]
errors = data["orderErrors"]
assert not data["order"]
assert len(errors) == 1
assert errors[0]["code"] == OrderErrorCode.TAX_ERROR.name
assert errors[0]["message"] == f"Unable to calculate taxes - {err_msg}"
order_count = Order.objects.all().count()
assert order_count == 0
def test_draft_order_create_with_voucher_not_assigned_to_order_channel(
staff_api_client,
permission_manage_orders,
staff_user,
customer_user,
shipping_method,
variant,
voucher,
channel_USD,
graphql_address_data,
):
query = DRAFT_ORDER_CREATE_MUTATION
channel_id = graphene.Node.to_global_id("Channel", channel_USD.id)
user_id = graphene.Node.to_global_id("User", customer_user.id)
variant_id = graphene.Node.to_global_id("ProductVariant", variant.id)
discount = "10"
customer_note = "Test note"
variant_list = [
{"variantId": variant_id, "quantity": 2},
]
shipping_address = graphql_address_data
shipping_id = graphene.Node.to_global_id("ShippingMethod", shipping_method.id)
voucher_id = graphene.Node.to_global_id("Voucher", voucher.id)
channel_id = graphene.Node.to_global_id("Channel", channel_USD.id)
voucher.channel_listings.all().delete()
variables = {
"user": user_id,
"discount": discount,
"lines": variant_list,
"shippingAddress": shipping_address,
"shippingMethod": shipping_id,
"voucher": voucher_id,
"customerNote": customer_note,
"channel": channel_id,
}
response = staff_api_client.post_graphql(
query, variables, permissions=[permission_manage_orders]
)
content = get_graphql_content(response)
error = content["data"]["draftOrderCreate"]["orderErrors"][0]
assert error["code"] == OrderErrorCode.NOT_AVAILABLE_IN_CHANNEL.name
assert error["field"] == "voucher"
def test_draft_order_create_with_product_and_variant_not_assigned_to_order_channel(
staff_api_client,
permission_manage_orders,
customer_user,
shipping_method,
variant,
channel_USD,
graphql_address_data,
):
query = DRAFT_ORDER_CREATE_MUTATION
user_id = graphene.Node.to_global_id("User", customer_user.id)
variant_id = graphene.Node.to_global_id("ProductVariant", variant.id)
discount = "10"
customer_note = "Test note"
variant_list = [
{"variantId": variant_id, "quantity": 2},
]
shipping_address = graphql_address_data
shipping_id = graphene.Node.to_global_id("ShippingMethod", shipping_method.id)
channel_id = graphene.Node.to_global_id("Channel", channel_USD.id)
variant.product.channel_listings.all().delete()
variant.channel_listings.all().delete()
variables = {
"user": user_id,
"discount": discount,
"lines": variant_list,
"shippingAddress": shipping_address,
"shippingMethod": shipping_id,
"customerNote": customer_note,
"channel": channel_id,
}
response = staff_api_client.post_graphql(
query, variables, permissions=[permission_manage_orders]
)
content = get_graphql_content(response)
error = content["data"]["draftOrderCreate"]["orderErrors"][0]
assert error["code"] == OrderErrorCode.PRODUCT_NOT_PUBLISHED.name
assert error["field"] == "lines"
assert error["variants"] == [variant_id]
def test_draft_order_create_with_variant_not_assigned_to_order_channel(
staff_api_client,
permission_manage_orders,
customer_user,
shipping_method,
variant,
channel_USD,
graphql_address_data,
):
query = DRAFT_ORDER_CREATE_MUTATION
user_id = graphene.Node.to_global_id("User", customer_user.id)
variant_id = graphene.Node.to_global_id("ProductVariant", variant.id)
discount = "10"
customer_note = "Test note"
variant_list = [
{"variantId": variant_id, "quantity": 2},
]
shipping_address = graphql_address_data
shipping_id = graphene.Node.to_global_id("ShippingMethod", shipping_method.id)
channel_id = graphene.Node.to_global_id("Channel", channel_USD.id)
variant.channel_listings.all().delete()
variables = {
"user": user_id,
"discount": discount,
"lines": variant_list,
"shippingAddress": shipping_address,
"shippingMethod": shipping_id,
"customerNote": customer_note,
"channel": channel_id,
}
response = staff_api_client.post_graphql(
query, variables, permissions=[permission_manage_orders]
)
content = get_graphql_content(response)
error = content["data"]["draftOrderCreate"]["orderErrors"][0]
assert error["code"] == OrderErrorCode.NOT_AVAILABLE_IN_CHANNEL.name
assert error["field"] == "lines"
assert error["variants"] == [variant_id]
def test_draft_order_create_without_channel(
staff_api_client,
permission_manage_orders,
staff_user,
customer_user,
product_without_shipping,
shipping_method,
variant,
voucher,
graphql_address_data,
):
variant_0 = variant
query = DRAFT_ORDER_CREATE_MUTATION
user_id = graphene.Node.to_global_id("User", customer_user.id)
variant_0_id = graphene.Node.to_global_id("ProductVariant", variant_0.id)
variant_1 = product_without_shipping.variants.first()
variant_1.quantity = 2
variant_1.save()
variant_1_id = graphene.Node.to_global_id("ProductVariant", variant_1.id)
variant_list = [
{"variantId": variant_0_id, "quantity": 2},
{"variantId": variant_1_id, "quantity": 1},
]
variables = {
"user": user_id,
"lines": variant_list,
}
response = staff_api_client.post_graphql(
query, variables, permissions=[permission_manage_orders]
)
content = get_graphql_content(response)
error = content["data"]["draftOrderCreate"]["orderErrors"][0]
assert error["code"] == OrderErrorCode.REQUIRED.name
assert error["field"] == "channel"
def test_draft_order_create_with_negative_quantity_line(
staff_api_client,
permission_manage_orders,
staff_user,
customer_user,
product_without_shipping,
shipping_method,
channel_USD,
variant,
voucher,
graphql_address_data,
):
variant_0 = variant
query = DRAFT_ORDER_CREATE_MUTATION
user_id = graphene.Node.to_global_id("User", customer_user.id)
variant_0_id = graphene.Node.to_global_id("ProductVariant", variant_0.id)
variant_1 = product_without_shipping.variants.first()
variant_1.quantity = 2
variant_1.save()
channel_id = graphene.Node.to_global_id("Channel", channel_USD.id)
variant_1_id = graphene.Node.to_global_id("ProductVariant", variant_1.id)
variant_list = [
{"variantId": variant_0_id, "quantity": -2},
{"variantId": variant_1_id, "quantity": 1},
]
variables = {
"user": user_id,
"lines": variant_list,
"channel": channel_id,
}
response = staff_api_client.post_graphql(
query, variables, permissions=[permission_manage_orders]
)
content = get_graphql_content(response)
error = content["data"]["draftOrderCreate"]["orderErrors"][0]
assert error["code"] == OrderErrorCode.ZERO_QUANTITY.name
assert error["field"] == "quantity"
def test_draft_order_create_with_channel_with_unpublished_product(
staff_api_client,
permission_manage_orders,
staff_user,
customer_user,
product_without_shipping,
shipping_method,
variant,
voucher,
graphql_address_data,
channel_USD,
):
variant_0 = variant
query = DRAFT_ORDER_CREATE_MUTATION
# Ensure no events were created yet
assert not OrderEvent.objects.exists()
user_id = graphene.Node.to_global_id("User", customer_user.id)
variant_0_id = graphene.Node.to_global_id("ProductVariant", variant_0.id)
variant_1 = product_without_shipping.variants.first()
channel_listing = variant_1.product.channel_listings.get()
channel_listing.is_published = False
channel_listing.save()
variant_1.quantity = 2
variant_1.save()
variant_1_id = graphene.Node.to_global_id("ProductVariant", variant_1.id)
discount = "10"
customer_note = "Test note"
variant_list = [
{"variantId": variant_0_id, "quantity": 2},
{"variantId": variant_1_id, "quantity": 1},
]
shipping_address = graphql_address_data
shipping_id = graphene.Node.to_global_id("ShippingMethod", shipping_method.id)
channel_id = graphene.Node.to_global_id("Channel", channel_USD.id)
voucher_id = graphene.Node.to_global_id("Voucher", voucher.id)
variables = {
"user": user_id,
"discount": discount,
"channel": channel_id,
"lines": variant_list,
"shippingAddress": shipping_address,
"shippingMethod": shipping_id,
"voucher": voucher_id,
"customerNote": customer_note,
}
response = staff_api_client.post_graphql(
query, variables, permissions=[permission_manage_orders]
)
content = get_graphql_content(response)
error = content["data"]["draftOrderCreate"]["orderErrors"][0]
assert error["field"] == "lines"
assert error["code"] == OrderErrorCode.PRODUCT_NOT_PUBLISHED.name
assert error["variants"] == [variant_1_id]
def test_draft_order_create_with_channel_with_unpublished_product_by_date(
staff_api_client,
permission_manage_orders,
staff_user,
customer_user,
product_without_shipping,
shipping_method,
variant,
voucher,
graphql_address_data,
channel_USD,
):
variant_0 = variant
query = DRAFT_ORDER_CREATE_MUTATION
# Ensure no events were created yet
assert not OrderEvent.objects.exists()
next_day = date.today() + timedelta(days=1)
user_id = graphene.Node.to_global_id("User", customer_user.id)
variant_0_id = graphene.Node.to_global_id("ProductVariant", variant_0.id)
variant_1 = product_without_shipping.variants.first()
channel_listing = variant_1.product.channel_listings.get()
channel_listing.publication_date = next_day
channel_listing.save()
variant_1.quantity = 2
variant_1.save()
variant_1_id = graphene.Node.to_global_id("ProductVariant", variant_1.id)
discount = "10"
customer_note = "Test note"
variant_list = [
{"variantId": variant_0_id, "quantity": 2},
{"variantId": variant_1_id, "quantity": 1},
]
shipping_address = graphql_address_data
shipping_id = graphene.Node.to_global_id("ShippingMethod", shipping_method.id)
channel_id = graphene.Node.to_global_id("Channel", channel_USD.id)
voucher_id = graphene.Node.to_global_id("Voucher", voucher.id)
variables = {
"user": user_id,
"discount": discount,
"channel": channel_id,
"lines": variant_list,
"shippingAddress": shipping_address,
"shippingMethod": shipping_id,
"voucher": voucher_id,
"customerNote": customer_note,
}
response = staff_api_client.post_graphql(
query, variables, permissions=[permission_manage_orders]
)
content = get_graphql_content(response)
error = content["data"]["draftOrderCreate"]["orderErrors"][0]
assert error["field"] == "lines"
assert error["code"] == "PRODUCT_NOT_PUBLISHED"
assert error["variants"] == [variant_1_id]
def test_draft_order_create_with_channel(
staff_api_client,
permission_manage_orders,
staff_user,
customer_user,
product_without_shipping,
shipping_method,
variant,
voucher,
graphql_address_data,
channel_USD,
):
variant_0 = variant
query = DRAFT_ORDER_CREATE_MUTATION
# Ensure no events were created yet
assert not OrderEvent.objects.exists()
user_id = graphene.Node.to_global_id("User", customer_user.id)
variant_0_id = graphene.Node.to_global_id("ProductVariant", variant_0.id)
variant_1 = product_without_shipping.variants.first()
variant_1.quantity = 2
variant_1.save()
variant_1_id = graphene.Node.to_global_id("ProductVariant", variant_1.id)
discount = "10"
customer_note = "Test note"
variant_list = [
{"variantId": variant_0_id, "quantity": 2},
{"variantId": variant_1_id, "quantity": 1},
]
shipping_address = graphql_address_data
shipping_id = graphene.Node.to_global_id("ShippingMethod", shipping_method.id)
channel_id = graphene.Node.to_global_id("Channel", channel_USD.id)
voucher_id = graphene.Node.to_global_id("Voucher", voucher.id)
variables = {
"user": user_id,
"discount": discount,
"channel": channel_id,
"lines": variant_list,
"shippingAddress": shipping_address,
"shippingMethod": shipping_id,
"voucher": voucher_id,
"customerNote": customer_note,
}
response = staff_api_client.post_graphql(
query, variables, permissions=[permission_manage_orders]
)
content = get_graphql_content(response)
assert not content["data"]["draftOrderCreate"]["orderErrors"]
data = content["data"]["draftOrderCreate"]["order"]
assert data["status"] == OrderStatus.DRAFT.upper()
assert data["voucher"]["code"] == voucher.code
assert data["customerNote"] == customer_note
order = Order.objects.first()
assert order.user == customer_user
assert order.channel.id == channel_USD.id
# billing address should be copied
assert order.billing_address.pk != customer_user.default_billing_address.pk
assert (
order.billing_address.as_data()
== customer_user.default_billing_address.as_data()
)
assert order.shipping_method == shipping_method
assert order.shipping_address.first_name == graphql_address_data["firstName"]
# Ensure the correct event was created
created_draft_event = OrderEvent.objects.get(
type=order_events.OrderEvents.DRAFT_CREATED
)
assert created_draft_event.user == staff_user
assert created_draft_event.parameters == {}
DRAFT_UPDATE_QUERY = """
mutation draftUpdate(
$id: ID!,
$voucher: ID,
$channel: ID,
$customerNote: String
) {
draftOrderUpdate(
id: $id,
input: {
voucher: $voucher,
customerNote: $customerNote
channel: $channel
}) {
orderErrors {
field
code
message
}
order {
userEmail
channel {
id
}
}
}
}
"""
def test_draft_order_update_existing_channel_id(
staff_api_client, permission_manage_orders, order_with_lines, channel_PLN
):
order = order_with_lines
order.status = OrderStatus.DRAFT
order.save()
query = DRAFT_UPDATE_QUERY
order_id = graphene.Node.to_global_id("Order", order.id)
channel_id = graphene.Node.to_global_id("Channel", channel_PLN.id)
variables = {
"id": order_id,
"channel": channel_id,
}
response = staff_api_client.post_graphql(
query, variables, permissions=[permission_manage_orders]
)
content = get_graphql_content(response)
error = content["data"]["draftOrderUpdate"]["orderErrors"][0]
assert error["code"] == OrderErrorCode.NOT_EDITABLE.name
assert error["field"] == "channel"
def test_draft_order_update_voucher_not_available(
staff_api_client, permission_manage_orders, order_with_lines, voucher
):
order = order_with_lines
order.status = OrderStatus.DRAFT
order.save()
assert order.voucher is None
query = DRAFT_UPDATE_QUERY
order_id = graphene.Node.to_global_id("Order", order.id)
voucher_id = graphene.Node.to_global_id("Voucher", voucher.id)
voucher.channel_listings.all().delete()
variables = {
"id": order_id,
"voucher": voucher_id,
}
response = staff_api_client.post_graphql(
query, variables, permissions=[permission_manage_orders]
)
content = get_graphql_content(response)
error = content["data"]["draftOrderUpdate"]["orderErrors"][0]
assert error["code"] == OrderErrorCode.NOT_AVAILABLE_IN_CHANNEL.name
assert error["field"] == "voucher"
DRAFT_ORDER_UPDATE_MUTATION = """
mutation draftUpdate(
$id: ID!, $voucher: ID!, $customerNote: String, $shippingAddress: AddressInput
) {
draftOrderUpdate(id: $id,
input: {
voucher: $voucher,
customerNote: $customerNote,
shippingAddress: $shippingAddress,
}) {
orderErrors {
field
message
code
}
order {
userEmail
}
}
}
"""
def test_draft_order_update(
staff_api_client, permission_manage_orders, draft_order, voucher
):
order = draft_order
assert not order.voucher
assert not order.customer_note
query = DRAFT_ORDER_UPDATE_MUTATION
order_id = graphene.Node.to_global_id("Order", order.id)
voucher_id = graphene.Node.to_global_id("Voucher", voucher.id)
customer_note = "Test customer note"
variables = {
"id": order_id,
"voucher": voucher_id,
"customerNote": customer_note,
}
response = staff_api_client.post_graphql(
query, variables, permissions=[permission_manage_orders]
)
content = get_graphql_content(response)
data = content["data"]["draftOrderUpdate"]
assert not data["orderErrors"]
order.refresh_from_db()
assert order.voucher
assert order.customer_note == customer_note
def test_draft_order_update_with_non_draft_order(
staff_api_client, permission_manage_orders, order_with_lines, voucher
):
order = order_with_lines
query = DRAFT_ORDER_UPDATE_MUTATION
order_id = graphene.Node.to_global_id("Order", order.id)
voucher_id = graphene.Node.to_global_id("Voucher", voucher.id)
customer_note = "Test customer note"
variables = {"id": order_id, "voucher": voucher_id, "customerNote": customer_note}
response = staff_api_client.post_graphql(
query, variables, permissions=[permission_manage_orders]
)
content = get_graphql_content(response)
error = content["data"]["draftOrderUpdate"]["orderErrors"][0]
assert error["field"] == "id"
assert error["code"] == OrderErrorCode.INVALID.name
@patch("saleor.graphql.order.mutations.draft_orders.update_order_prices")
def test_draft_order_update_tax_error(
update_order_prices_mock,
staff_api_client,
permission_manage_orders,
draft_order,
voucher,
graphql_address_data,
):
err_msg = "Test error"
update_order_prices_mock.side_effect = TaxError(err_msg)
order = draft_order
assert not order.voucher
assert not order.customer_note
query = DRAFT_ORDER_UPDATE_MUTATION
order_id = graphene.Node.to_global_id("Order", order.id)
voucher_id = graphene.Node.to_global_id("Voucher", voucher.id)
customer_note = "Test customer note"
variables = {
"id": order_id,
"voucher": voucher_id,
"customerNote": customer_note,
"shippingAddress": graphql_address_data,
}
response = staff_api_client.post_graphql(
query, variables, permissions=[permission_manage_orders]
)
content = get_graphql_content(response)
data = content["data"]["draftOrderUpdate"]
errors = data["orderErrors"]
assert len(errors) == 1
assert errors[0]["code"] == OrderErrorCode.TAX_ERROR.name
assert errors[0]["message"] == f"Unable to calculate taxes - {err_msg}"
order.refresh_from_db()
assert not order.voucher
assert not order.customer_note
def test_draft_order_update_doing_nothing_generates_no_events(
staff_api_client, permission_manage_orders, order_with_lines
):
assert not OrderEvent.objects.exists()
query = """
mutation draftUpdate($id: ID!) {
draftOrderUpdate(id: $id, input: {}) {
errors {
field
message
}
}
}
"""
order_id = graphene.Node.to_global_id("Order", order_with_lines.id)
response = staff_api_client.post_graphql(
query, {"id": order_id}, permissions=[permission_manage_orders]
)
get_graphql_content(response)
# Ensure not event was created
assert not OrderEvent.objects.exists()
def test_draft_order_delete(
staff_api_client, permission_manage_orders, order_with_lines
):
order = order_with_lines
query = """
mutation draftDelete($id: ID!) {
draftOrderDelete(id: $id) {
order {
id
}
}
}
"""
order_id = graphene.Node.to_global_id("Order", order.id)
variables = {"id": order_id}
staff_api_client.post_graphql(
query, variables, permissions=[permission_manage_orders]
)
with pytest.raises(order._meta.model.DoesNotExist):
order.refresh_from_db()
ORDER_CAN_FINALIZE_QUERY = """
query OrderQuery($id: ID!){
order(id: $id){
canFinalize
}
}
"""
def test_can_finalize_order(staff_api_client, permission_manage_orders, draft_order):
order_id = graphene.Node.to_global_id("Order", draft_order.id)
variables = {"id": order_id}
staff_api_client.user.user_permissions.add(permission_manage_orders)
response = staff_api_client.post_graphql(ORDER_CAN_FINALIZE_QUERY, variables)
content = get_graphql_content(response)
assert content["data"]["order"]["canFinalize"] is True
def test_can_finalize_order_invalid_shipping_method_set(
staff_api_client, permission_manage_orders, draft_order
):
order_id = graphene.Node.to_global_id("Order", draft_order.id)
draft_order.channel.shipping_zones.clear()
variables = {"id": order_id}
staff_api_client.user.user_permissions.add(permission_manage_orders)
response = staff_api_client.post_graphql(ORDER_CAN_FINALIZE_QUERY, variables)
content = get_graphql_content(response)
assert content["data"]["order"]["canFinalize"] is False
def test_can_finalize_order_no_order_lines(
staff_api_client, permission_manage_orders, order
):
order.status = OrderStatus.DRAFT
order.save(update_fields=["status"])
order_id = graphene.Node.to_global_id("Order", order.id)
variables = {"id": order_id}
staff_api_client.user.user_permissions.add(permission_manage_orders)
response = staff_api_client.post_graphql(ORDER_CAN_FINALIZE_QUERY, variables)
content = get_graphql_content(response)
assert content["data"]["order"]["canFinalize"] is False
def test_can_finalize_order_product_unavailable_for_purchase(
staff_api_client, permission_manage_orders, draft_order
):
# given
order = draft_order
order.status = OrderStatus.DRAFT
order.save(update_fields=["status"])
product = order.lines.first().variant.product
product.channel_listings.update(available_for_purchase=None)
order_id = graphene.Node.to_global_id("Order", order.id)
variables = {"id": order_id}
staff_api_client.user.user_permissions.add(permission_manage_orders)
# when
response = staff_api_client.post_graphql(ORDER_CAN_FINALIZE_QUERY, variables)
# then
content = get_graphql_content(response)
assert content["data"]["order"]["canFinalize"] is False
def test_can_finalize_order_product_available_for_purchase_from_tomorrow(
staff_api_client, permission_manage_orders, draft_order
):
# given
order = draft_order
order.status = OrderStatus.DRAFT
order.save(update_fields=["status"])
product = order.lines.first().variant.product
product.channel_listings.update(
available_for_purchase=date.today() + timedelta(days=1)
)
order_id = graphene.Node.to_global_id("Order", order.id)
variables = {"id": order_id}
staff_api_client.user.user_permissions.add(permission_manage_orders)
# when
response = staff_api_client.post_graphql(ORDER_CAN_FINALIZE_QUERY, variables)
# then
content = get_graphql_content(response)
assert content["data"]["order"]["canFinalize"] is False
def test_validate_draft_order(draft_order):
# should not raise any errors
assert validate_draft_order(draft_order, "US") is None
def test_validate_draft_order_wrong_shipping(draft_order):
order = draft_order
shipping_zone = order.shipping_method.shipping_zone
shipping_zone.countries = ["DE"]
shipping_zone.save()
assert order.shipping_address.country.code not in shipping_zone.countries
with pytest.raises(ValidationError) as e:
validate_draft_order(order, "US")
msg = "Shipping method is not valid for chosen shipping address"
assert e.value.error_dict["shipping"][0].message == msg
def test_validate_draft_order_no_order_lines(order, shipping_method):
order.shipping_method = shipping_method
with pytest.raises(ValidationError) as e:
validate_draft_order(order, "US")
msg = "Could not create order without any products."
assert e.value.error_dict["lines"][0].message == msg
def test_validate_draft_order_non_existing_variant(draft_order):
order = draft_order
line = order.lines.first()
variant = line.variant
variant.delete()
line.refresh_from_db()
assert line.variant is None
with pytest.raises(ValidationError) as e:
validate_draft_order(order, "US")
msg = "Could not create orders with non-existing products."
assert e.value.error_dict["lines"][0].message == msg
def test_validate_draft_order_with_unpublished_product(draft_order):
order = draft_order
line = order.lines.first()
variant = line.variant
product_channel_listing = variant.product.channel_listings.get()
product_channel_listing.is_published = False
product_channel_listing.save(update_fields=["is_published"])
line.refresh_from_db()
with pytest.raises(ValidationError) as e:
validate_draft_order(order, "US")
msg = "Can't finalize draft with unpublished product."
error = e.value.error_dict["lines"][0]
assert error.message == msg
assert error.code == OrderErrorCode.PRODUCT_NOT_PUBLISHED
def test_validate_draft_order_with_unavailable_for_purchase_product(draft_order):
order = draft_order
line = order.lines.first()
variant = line.variant
variant.product.channel_listings.update(available_for_purchase=None)
line.refresh_from_db()
with pytest.raises(ValidationError) as e:
validate_draft_order(order, "US")
msg = "Can't finalize draft with product unavailable for purchase."
error = e.value.error_dict["lines"][0]
assert error.message == msg
assert error.code == OrderErrorCode.PRODUCT_UNAVAILABLE_FOR_PURCHASE
def test_validate_draft_order_with_product_available_for_purchase_in_future(
draft_order,
):
order = draft_order
line = order.lines.first()
variant = line.variant
variant.product.channel_listings.update(
available_for_purchase=date.today() + timedelta(days=2)
)
line.refresh_from_db()
with pytest.raises(ValidationError) as e:
validate_draft_order(order, "US")
msg = "Can't finalize draft with product unavailable for purchase."
error = e.value.error_dict["lines"][0]
assert error.message == msg
assert error.code == OrderErrorCode.PRODUCT_UNAVAILABLE_FOR_PURCHASE
def test_validate_draft_order_out_of_stock_variant(draft_order):
order = draft_order
line = order.lines.first()
variant = line.variant
stock = variant.stocks.get()
stock.quantity = 0
stock.save(update_fields=["quantity"])
with pytest.raises(ValidationError) as e:
validate_draft_order(order, "US")
msg = "Insufficient product stock: SKU_AA"
assert e.value.error_dict["lines"][0].message == msg
def test_validate_draft_order_no_shipping_address(draft_order):
order = draft_order
order.shipping_address = None
with pytest.raises(ValidationError) as e:
validate_draft_order(order, "US")
error = e.value.error_dict["order"][0]
assert error.message == "Can't finalize draft with no shipping address."
assert error.code.value == OrderErrorCode.ORDER_NO_SHIPPING_ADDRESS.value
def test_validate_draft_order_no_billing_address(draft_order):
order = draft_order
order.billing_address = None
with pytest.raises(ValidationError) as e:
validate_draft_order(order, "US")
error = e.value.error_dict["order"][0]
assert error.message == "Can't finalize draft with no billing address."
assert error.code.value == OrderErrorCode.BILLING_ADDRESS_NOT_SET.value
def test_validate_draft_order_no_shipping_method(draft_order):
order = draft_order
order.shipping_method = None
with pytest.raises(ValidationError) as e:
validate_draft_order(order, "US")
error = e.value.error_dict["shipping"][0]
assert error.message == "Shipping method is required."
assert error.code.value == OrderErrorCode.SHIPPING_METHOD_REQUIRED.value
def test_validate_draft_order_no_shipping_method_shipping_not_required(draft_order):
order = draft_order
order.shipping_method = None
required_mock = Mock(return_value=False)
order.is_shipping_required = required_mock
assert validate_draft_order(order, "US") is None
def test_validate_draft_order_no_shipping_address_no_method_shipping_not_required(
draft_order,
):
order = draft_order
order.shipping_method = None
order.shipping_address = None
required_mock = Mock(return_value=False)
order.is_shipping_required = required_mock
assert validate_draft_order(order, "US") is None
DRAFT_ORDER_COMPLETE_MUTATION = """
mutation draftComplete($id: ID!) {
draftOrderComplete(id: $id) {
orderErrors {
field
code
variants
}
order {
status
}
}
}
"""
def test_draft_order_complete(
staff_api_client,
permission_manage_orders,
staff_user,
draft_order,
):
order = draft_order
# Ensure no events were created
assert not OrderEvent.objects.exists()
# Ensure no allocation were created
assert not Allocation.objects.filter(order_line__order=order).exists()
order_id = graphene.Node.to_global_id("Order", order.id)
variables = {"id": order_id}
response = staff_api_client.post_graphql(
DRAFT_ORDER_COMPLETE_MUTATION, variables, permissions=[permission_manage_orders]
)
content = get_graphql_content(response)
data = content["data"]["draftOrderComplete"]["order"]
order.refresh_from_db()
assert data["status"] == order.status.upper()
for line in order.lines.all():
allocation = line.allocations.get()
assert allocation.quantity_allocated == line.quantity_unfulfilled
# ensure there are only 2 events with correct types
event_params = {
"user": staff_user,
"type__in": [
order_events.OrderEvents.PLACED_FROM_DRAFT,
order_events.OrderEvents.CONFIRMED,
],
"parameters": {},
}
matching_events = OrderEvent.objects.filter(**event_params)
assert matching_events.count() == 2
assert matching_events[0].type != matching_events[1].type
assert not OrderEvent.objects.exclude(**event_params).exists()
def test_draft_order_complete_with_inactive_channel(
staff_api_client,
permission_manage_orders,
staff_user,
draft_order,
):
order = draft_order
channel = order.channel
channel.is_active = False
channel.save()
order_id = graphene.Node.to_global_id("Order", order.id)
variables = {"id": order_id}
response = staff_api_client.post_graphql(
DRAFT_ORDER_COMPLETE_MUTATION, variables, permissions=[permission_manage_orders]
)
content = get_graphql_content(response)
data = content["data"]["draftOrderComplete"]
assert data["orderErrors"][0]["code"] == OrderErrorCode.CHANNEL_INACTIVE.name
assert data["orderErrors"][0]["field"] == "channel"
def test_draft_order_complete_with_unavailable_variant(
staff_api_client,
permission_manage_orders,
staff_user,
draft_order,
):
order = draft_order
variant = order.lines.first().variant
variant.channel_listings.filter(channel=order.channel).delete()
order_id = graphene.Node.to_global_id("Order", order.id)
variant_id = graphene.Node.to_global_id("ProductVariant", variant.id)
variables = {"id": order_id}
response = staff_api_client.post_graphql(
DRAFT_ORDER_COMPLETE_MUTATION, variables, permissions=[permission_manage_orders]
)
content = get_graphql_content(response)
data = content["data"]["draftOrderComplete"]
assert (
data["orderErrors"][0]["code"] == OrderErrorCode.NOT_AVAILABLE_IN_CHANNEL.name
)
assert data["orderErrors"][0]["field"] == "lines"
assert data["orderErrors"][0]["variants"] == [variant_id]
def test_draft_order_complete_channel_without_shipping_zones(
staff_api_client,
permission_manage_orders,
staff_user,
draft_order,
):
order = draft_order
order.channel.shipping_zones.clear()
# Ensure no events were created
assert not OrderEvent.objects.exists()
# Ensure no allocation were created
assert not Allocation.objects.filter(order_line__order=order).exists()
order_id = graphene.Node.to_global_id("Order", order.id)
variables = {"id": order_id}
response = staff_api_client.post_graphql(
DRAFT_ORDER_COMPLETE_MUTATION, variables, permissions=[permission_manage_orders]
)
content = get_graphql_content(response)
data = content["data"]["draftOrderComplete"]
assert len(data["orderErrors"]) == 1
assert (
data["orderErrors"][0]["code"]
== OrderErrorCode.SHIPPING_METHOD_NOT_APPLICABLE.name
)
assert data["orderErrors"][0]["field"] == "shipping"
def test_draft_order_complete_product_without_inventory_tracking(
staff_api_client,
shipping_method,
permission_manage_orders,
staff_user,
draft_order_without_inventory_tracking,
):
order = draft_order_without_inventory_tracking
order.shipping_method = shipping_method
order.save()
# Ensure no events were created
assert not OrderEvent.objects.exists()
# Ensure no allocation were created
assert not Allocation.objects.filter(order_line__order=order).exists()
order_id = graphene.Node.to_global_id("Order", order.id)
variables = {"id": order_id}
response = staff_api_client.post_graphql(
DRAFT_ORDER_COMPLETE_MUTATION, variables, permissions=[permission_manage_orders]
)
content = get_graphql_content(response)
data = content["data"]["draftOrderComplete"]["order"]
assert not content["data"]["draftOrderComplete"]["orderErrors"]
order.refresh_from_db()
assert data["status"] == order.status.upper()
assert not Allocation.objects.filter(order_line__order=order).exists()
# ensure there are only 2 events with correct types
event_params = {
"user": staff_user,
"type__in": [
order_events.OrderEvents.PLACED_FROM_DRAFT,
order_events.OrderEvents.CONFIRMED,
],
"parameters": {},
}
matching_events = OrderEvent.objects.filter(**event_params)
assert matching_events.count() == 2
assert matching_events[0].type != matching_events[1].type
assert not OrderEvent.objects.exclude(**event_params).exists()
def test_draft_order_complete_not_available_shipping_method(
staff_api_client,
permission_manage_orders,
staff_user,
draft_order,
):
# given
order = draft_order
order.channel.shipping_zones.clear()
# Ensure no events were created
assert not OrderEvent.objects.exists()
# Ensure no allocation were created
assert not Allocation.objects.filter(order_line__order=order).exists()
order_id = graphene.Node.to_global_id("Order", order.id)
variables = {"id": order_id}
# when
response = staff_api_client.post_graphql(
DRAFT_ORDER_COMPLETE_MUTATION, variables, permissions=[permission_manage_orders]
)
# then
content = get_graphql_content(response)
data = content["data"]["draftOrderComplete"]["order"]
content = get_graphql_content(response)
data = content["data"]["draftOrderComplete"]
assert len(data["orderErrors"]) == 1
assert (
data["orderErrors"][0]["code"]
== OrderErrorCode.SHIPPING_METHOD_NOT_APPLICABLE.name
)
assert data["orderErrors"][0]["field"] == "shipping"
def test_draft_order_complete_out_of_stock_variant(
staff_api_client, permission_manage_orders, staff_user, draft_order
):
order = draft_order
# Ensure no events were created
assert not OrderEvent.objects.exists()
line_1, _ = order.lines.order_by("-quantity").all()
stock_1 = Stock.objects.get(product_variant=line_1.variant)
line_1.quantity = get_available_quantity_for_stock(stock_1) + 1
line_1.save(update_fields=["quantity"])
order_id = graphene.Node.to_global_id("Order", order.id)
variables = {"id": order_id}
response = staff_api_client.post_graphql(
DRAFT_ORDER_COMPLETE_MUTATION, variables, permissions=[permission_manage_orders]
)
content = get_graphql_content(response)
error = content["data"]["draftOrderComplete"]["orderErrors"][0]
order.refresh_from_db()
assert order.status == OrderStatus.DRAFT
assert error["field"] == "lines"
assert error["code"] == OrderErrorCode.INSUFFICIENT_STOCK.name
def test_draft_order_complete_existing_user_email_updates_user_field(
staff_api_client, draft_order, customer_user, permission_manage_orders
):
order = draft_order
order.user_email = customer_user.email
order.user = None
order.save()
order_id = graphene.Node.to_global_id("Order", order.id)
variables = {"id": order_id}
response = staff_api_client.post_graphql(
DRAFT_ORDER_COMPLETE_MUTATION, variables, permissions=[permission_manage_orders]
)
content = get_graphql_content(response)
assert "errors" not in content
order.refresh_from_db()
assert order.user == customer_user
def test_draft_order_complete_anonymous_user_email_sets_user_field_null(
staff_api_client, draft_order, permission_manage_orders
):
order = draft_order
order.user_email = "anonymous@example.com"
order.user = None
order.save()
order_id = graphene.Node.to_global_id("Order", order.id)
variables = {"id": order_id}
response = staff_api_client.post_graphql(
DRAFT_ORDER_COMPLETE_MUTATION, variables, permissions=[permission_manage_orders]
)
content = get_graphql_content(response)
assert "errors" not in content
order.refresh_from_db()
assert order.user is None
def test_draft_order_complete_anonymous_user_no_email(
staff_api_client, draft_order, permission_manage_orders
):
order = draft_order
order.user_email = ""
order.user = None
order.save()
order_id = graphene.Node.to_global_id("Order", order.id)
variables = {"id": order_id}
response = staff_api_client.post_graphql(
DRAFT_ORDER_COMPLETE_MUTATION, variables, permissions=[permission_manage_orders]
)
content = get_graphql_content(response)
data = content["data"]["draftOrderComplete"]["order"]
assert data["status"] == OrderStatus.UNFULFILLED.upper()
def test_draft_order_complete_drops_shipping_address(
staff_api_client,
permission_manage_orders,
staff_user,
draft_order,
address,
):
order = draft_order
order.shipping_address = address.get_copy()
order.billing_address = address.get_copy()
order.save()
order.lines.update(is_shipping_required=False)
order_id = graphene.Node.to_global_id("Order", order.id)
variables = {"id": order_id}
response = staff_api_client.post_graphql(
DRAFT_ORDER_COMPLETE_MUTATION, variables, permissions=[permission_manage_orders]
)
content = get_graphql_content(response)
data = content["data"]["draftOrderComplete"]["order"]
order.refresh_from_db()
assert data["status"] == order.status.upper()
assert order.shipping_address is None
def test_draft_order_complete_unavailable_for_purchase(
staff_api_client, permission_manage_orders, staff_user, draft_order
):
# given
order = draft_order
# Ensure no events were created
assert not OrderEvent.objects.exists()
product = order.lines.first().variant.product
product.channel_listings.update(
available_for_purchase=date.today() + timedelta(days=5)
)
order_id = graphene.Node.to_global_id("Order", order.id)
variables = {"id": order_id}
# when
response = staff_api_client.post_graphql(
DRAFT_ORDER_COMPLETE_MUTATION, variables, permissions=[permission_manage_orders]
)
# then
content = get_graphql_content(response)
error = content["data"]["draftOrderComplete"]["orderErrors"][0]
order.refresh_from_db()
assert order.status == OrderStatus.DRAFT
assert error["field"] == "lines"
assert error["code"] == OrderErrorCode.PRODUCT_UNAVAILABLE_FOR_PURCHASE.name
ORDER_LINES_CREATE_MUTATION = """
mutation OrderLinesCreate($orderId: ID!, $variantId: ID!, $quantity: Int!) {
orderLinesCreate(id: $orderId,
input: [{variantId: $variantId, quantity: $quantity}]) {
orderErrors {
field
code
message
variants
}
orderLines {
id
quantity
productSku
}
order {
total {
gross {
amount
}
}
}
}
}
"""
@pytest.mark.parametrize("status", (OrderStatus.DRAFT, OrderStatus.UNCONFIRMED))
def test_order_lines_create(
status,
order_with_lines,
permission_manage_orders,
staff_api_client,
variant_with_many_stocks,
):
query = ORDER_LINES_CREATE_MUTATION
order = order_with_lines
order.status = status
order.save(update_fields=["status"])
variant = variant_with_many_stocks
quantity = 1
order_id = graphene.Node.to_global_id("Order", order.id)
variant_id = graphene.Node.to_global_id("ProductVariant", variant.id)
variables = {"orderId": order_id, "variantId": variant_id, "quantity": quantity}
# mutation should fail without proper permissions
response = staff_api_client.post_graphql(query, variables)
assert_no_permission(response)
assert not OrderEvent.objects.exists()
# assign permissions
staff_api_client.user.user_permissions.add(permission_manage_orders)
response = staff_api_client.post_graphql(query, variables)
assert OrderEvent.objects.count() == 1
assert OrderEvent.objects.last().type == order_events.OrderEvents.ADDED_PRODUCTS
content = get_graphql_content(response)
data = content["data"]["orderLinesCreate"]
assert data["orderLines"][0]["productSku"] == variant.sku
assert data["orderLines"][0]["quantity"] == quantity
# mutation should fail when quantity is lower than 1
variables = {"orderId": order_id, "variantId": variant_id, "quantity": 0}
response = staff_api_client.post_graphql(query, variables)
content = get_graphql_content(response)
data = content["data"]["orderLinesCreate"]
assert data["orderErrors"]
assert data["orderErrors"][0]["field"] == "quantity"
assert data["orderErrors"][0]["variants"] == [variant_id]
def test_order_lines_create_with_unavailable_variant(
draft_order, permission_manage_orders, staff_api_client
):
query = ORDER_LINES_CREATE_MUTATION
order = draft_order
channel = order.channel
line = order.lines.first()
variant = line.variant
variant.channel_listings.filter(channel=channel).update(price_amount=None)
quantity = 1
order_id = graphene.Node.to_global_id("Order", order.id)
variant_id = graphene.Node.to_global_id("ProductVariant", variant.id)
variables = {"orderId": order_id, "variantId": variant_id, "quantity": quantity}
response = staff_api_client.post_graphql(
query, variables, permissions=[permission_manage_orders]
)
content = get_graphql_content(response)
error = content["data"]["orderLinesCreate"]["orderErrors"][0]
assert error["code"] == OrderErrorCode.NOT_AVAILABLE_IN_CHANNEL.name
assert error["field"] == "input"
assert error["variants"] == [variant_id]
@pytest.mark.parametrize("status", (OrderStatus.DRAFT, OrderStatus.UNCONFIRMED))
def test_order_lines_create_with_existing_variant(
status,
order_with_lines,
permission_manage_orders,
staff_api_client,
):
query = ORDER_LINES_CREATE_MUTATION
order = order_with_lines
order.status = status
order.save(update_fields=["status"])
line = order.lines.first()
variant = line.variant
old_quantity = line.quantity
quantity = 1
order_id = graphene.Node.to_global_id("Order", order.id)
variant_id = graphene.Node.to_global_id("ProductVariant", variant.id)
variables = {"orderId": order_id, "variantId": variant_id, "quantity": quantity}
# mutation should fail without proper permissions
response = staff_api_client.post_graphql(query, variables)
assert_no_permission(response)
assert not OrderEvent.objects.exists()
# assign permissions
staff_api_client.user.user_permissions.add(permission_manage_orders)
response = staff_api_client.post_graphql(query, variables)
assert OrderEvent.objects.count() == 1
assert OrderEvent.objects.last().type == order_events.OrderEvents.ADDED_PRODUCTS
content = get_graphql_content(response)
data = content["data"]["orderLinesCreate"]
assert data["orderLines"][0]["productSku"] == variant.sku
assert data["orderLines"][0]["quantity"] == old_quantity + quantity
@pytest.mark.parametrize("status", (OrderStatus.DRAFT, OrderStatus.UNCONFIRMED))
def test_order_lines_create_with_product_and_variant_not_assigned_to_channel(
status, order_with_lines, permission_manage_orders, staff_api_client, variant
):
query = ORDER_LINES_CREATE_MUTATION
order = order_with_lines
order.status = status
order.save(update_fields=["status"])
line = order.lines.first()
assert variant != line.variant
order_id = graphene.Node.to_global_id("Order", order.id)
variant_id = graphene.Node.to_global_id("ProductVariant", variant.id)
variables = {"orderId": order_id, "variantId": variant_id, "quantity": 1}
variant.product.channel_listings.all().delete()
variant.channel_listings.all().delete()
response = staff_api_client.post_graphql(
query, variables, permissions=[permission_manage_orders]
)
content = get_graphql_content(response)
error = content["data"]["orderLinesCreate"]["orderErrors"][0]
assert error["code"] == OrderErrorCode.PRODUCT_NOT_PUBLISHED.name
assert error["field"] == "input"
assert error["variants"] == [variant_id]
@pytest.mark.parametrize("status", (OrderStatus.DRAFT, OrderStatus.UNCONFIRMED))
def test_order_lines_create_with_variant_not_assigned_to_channel(
status,
order_with_lines,
staff_api_client,
permission_manage_orders,
customer_user,
shipping_method,
variant,
channel_USD,
graphql_address_data,
):
query = ORDER_LINES_CREATE_MUTATION
order = order_with_lines
order.status = status
order.save(update_fields=["status"])
line = order.lines.first()
assert variant != line.variant
order_id = graphene.Node.to_global_id("Order", order.id)
variant_id = graphene.Node.to_global_id("ProductVariant", variant.id)
variables = {"orderId": order_id, "variantId": variant_id, "quantity": 1}
variant.channel_listings.all().delete()
response = staff_api_client.post_graphql(
query, variables, permissions=[permission_manage_orders]
)
content = get_graphql_content(response)
error = content["data"]["orderLinesCreate"]["orderErrors"][0]
assert error["code"] == OrderErrorCode.NOT_AVAILABLE_IN_CHANNEL.name
assert error["field"] == "input"
assert error["variants"] == [variant_id]
def test_invalid_order_when_creating_lines(
order_with_lines, staff_api_client, permission_manage_orders
):
query = ORDER_LINES_CREATE_MUTATION
order = order_with_lines
line = order.lines.first()
variant = line.variant
order_id = graphene.Node.to_global_id("Order", order.id)
variant_id = graphene.Node.to_global_id("ProductVariant", variant.id)
variables = {"orderId": order_id, "variantId": variant_id, "quantity": 1}
response = staff_api_client.post_graphql(
query, variables, permissions=[permission_manage_orders]
)
content = get_graphql_content(response)
data = content["data"]["orderLinesCreate"]
assert data["orderErrors"]
ORDER_LINE_UPDATE_MUTATION = """
mutation OrderLineUpdate($lineId: ID!, $quantity: Int!) {
orderLineUpdate(id: $lineId, input: {quantity: $quantity}) {
errors {
field
message
}
orderLine {
id
quantity
}
order {
total {
gross {
amount
}
}
}
}
}
"""
@pytest.mark.parametrize("status", (OrderStatus.DRAFT, OrderStatus.UNCONFIRMED))
def test_order_line_update(
status,
order_with_lines,
permission_manage_orders,
staff_api_client,
staff_user,
):
query = ORDER_LINE_UPDATE_MUTATION
order = order_with_lines
order.status = status
order.save(update_fields=["status"])
line = order.lines.first()
new_quantity = 1
removed_quantity = 2
line_id = graphene.Node.to_global_id("OrderLine", line.id)
variables = {"lineId": line_id, "quantity": new_quantity}
# Ensure the line has the expected quantity
assert line.quantity == 3
# No event should exist yet
assert not OrderEvent.objects.exists()
# mutation should fail without proper permissions
response = staff_api_client.post_graphql(query, variables)
assert_no_permission(response)
# assign permissions
staff_api_client.user.user_permissions.add(permission_manage_orders)
response = staff_api_client.post_graphql(query, variables)
content = get_graphql_content(response)
data = content["data"]["orderLineUpdate"]
assert data["orderLine"]["quantity"] == new_quantity
removed_items_event = OrderEvent.objects.last() # type: OrderEvent
assert removed_items_event.type == order_events.OrderEvents.REMOVED_PRODUCTS
assert removed_items_event.user == staff_user
assert removed_items_event.parameters == {
"lines": [{"quantity": removed_quantity, "line_pk": line.pk, "item": str(line)}]
}
# mutation should fail when quantity is lower than 1
variables = {"lineId": line_id, "quantity": 0}
response = staff_api_client.post_graphql(query, variables)
content = get_graphql_content(response)
data = content["data"]["orderLineUpdate"]
assert data["errors"]
assert data["errors"][0]["field"] == "quantity"
def test_invalid_order_when_updating_lines(
order_with_lines, staff_api_client, permission_manage_orders
):
query = ORDER_LINE_UPDATE_MUTATION
order = order_with_lines
line = order.lines.first()
line_id = graphene.Node.to_global_id("OrderLine", line.id)
variables = {"lineId": line_id, "quantity": 1}
response = staff_api_client.post_graphql(
query, variables, permissions=[permission_manage_orders]
)
content = get_graphql_content(response)
data = content["data"]["orderLineUpdate"]
assert data["errors"]
QUERY_GET_FIRST_EVENT = """
query OrdersQuery {
orders(first: 1) {
edges {
node {
events {
lines {
quantity
orderLine {
id
}
}
fulfilledItems {
id
}
}
}
}
}
}
"""
def test_retrieving_event_lines_with_deleted_line(
staff_api_client, order_with_lines, staff_user, permission_manage_orders
):
order = order_with_lines
lines = order_with_lines.lines.all()
quantities_per_lines = [(line.quantity, line) for line in lines]
# Create the test event
order_events.order_added_products_event(
order=order, user=staff_user, order_lines=quantities_per_lines
)
# Delete a line
deleted_line = lines.first()
deleted_line.delete()
# Prepare the query
staff_api_client.user.user_permissions.add(permission_manage_orders)
# Send the query and retrieve the data
content = get_graphql_content(staff_api_client.post_graphql(QUERY_GET_FIRST_EVENT))
data = content["data"]["orders"]["edges"][0]["node"]["events"][0]
# Check every line is returned and the one deleted is None
assert len(data["lines"]) == len(quantities_per_lines)
for expected_data, received_line in zip(quantities_per_lines, data["lines"]):
quantity, line = expected_data
if line is deleted_line:
assert received_line["orderLine"] is None
else:
assert received_line["orderLine"] is not None
assert received_line["orderLine"]["id"] == graphene.Node.to_global_id(
"OrderLine", line.pk
)
assert received_line["quantity"] == quantity
def test_retrieving_event_lines_with_missing_line_pk_in_data(
staff_api_client, order_with_lines, staff_user, permission_manage_orders
):
order = order_with_lines
line = order_with_lines.lines.first()
quantities_per_lines = [(line.quantity, line)]
# Create the test event
event = order_events.order_added_products_event(
order=order, user=staff_user, order_lines=quantities_per_lines
)
del event.parameters["lines"][0]["line_pk"]
event.save(update_fields=["parameters"])
# Prepare the query
staff_api_client.user.user_permissions.add(permission_manage_orders)
# Send the query and retrieve the data
content = get_graphql_content(staff_api_client.post_graphql(QUERY_GET_FIRST_EVENT))
data = content["data"]["orders"]["edges"][0]["node"]["events"][0]
# Check every line is returned and the one deleted is None
received_line = data["lines"][0]
assert len(data["lines"]) == 1
assert received_line["quantity"] == line.quantity
assert received_line["orderLine"] is None
ORDER_LINE_DELETE_MUTATION = """
mutation OrderLineDelete($id: ID!) {
orderLineDelete(id: $id) {
errors {
field
message
}
orderLine {
id
}
order {
id
}
}
}
"""
@pytest.mark.parametrize("status", (OrderStatus.DRAFT, OrderStatus.UNCONFIRMED))
def test_order_line_remove(
status, order_with_lines, permission_manage_orders, staff_api_client
):
query = ORDER_LINE_DELETE_MUTATION
order = order_with_lines
order.status = status
order.save(update_fields=["status"])
line = order.lines.first()
line_id = graphene.Node.to_global_id("OrderLine", line.id)
variables = {"id": line_id}
response = staff_api_client.post_graphql(
query, variables, permissions=[permission_manage_orders]
)
content = get_graphql_content(response)
data = content["data"]["orderLineDelete"]
assert OrderEvent.objects.count() == 1
assert OrderEvent.objects.last().type == order_events.OrderEvents.REMOVED_PRODUCTS
assert data["orderLine"]["id"] == line_id
assert line not in order.lines.all()
def test_invalid_order_when_removing_lines(
staff_api_client, order_with_lines, permission_manage_orders
):
query = ORDER_LINE_DELETE_MUTATION
order = order_with_lines
line = order.lines.first()
line_id = graphene.Node.to_global_id("OrderLine", line.id)
variables = {"id": line_id}
response = staff_api_client.post_graphql(
query, variables, permissions=[permission_manage_orders]
)
content = get_graphql_content(response)
data = content["data"]["orderLineDelete"]
assert data["errors"]
ORDER_UPDATE_MUTATION = """
mutation orderUpdate($id: ID!, $email: String, $address: AddressInput) {
orderUpdate(
id: $id, input: {
userEmail: $email,
shippingAddress: $address,
billingAddress: $address}) {
orderErrors {
field
code
}
order {
userEmail
}
}
}
"""
@patch("saleor.plugins.base_plugin.BasePlugin.order_updated")
def test_order_update(
plugin_mock,
staff_api_client,
permission_manage_orders,
order_with_lines,
graphql_address_data,
):
order = order_with_lines
order.user = None
order.save()
email = "not_default@example.com"
assert not order.user_email == email
assert not order.shipping_address.first_name == graphql_address_data["firstName"]
assert not order.billing_address.last_name == graphql_address_data["lastName"]
order_id = graphene.Node.to_global_id("Order", order.id)
variables = {"id": order_id, "email": email, "address": graphql_address_data}
response = staff_api_client.post_graphql(
ORDER_UPDATE_MUTATION, variables, permissions=[permission_manage_orders]
)
content = get_graphql_content(response)
assert not content["data"]["orderUpdate"]["orderErrors"]
data = content["data"]["orderUpdate"]["order"]
assert data["userEmail"] == email
order.refresh_from_db()
order.shipping_address.refresh_from_db()
order.billing_address.refresh_from_db()
assert order.shipping_address.first_name == graphql_address_data["firstName"]
assert order.billing_address.last_name == graphql_address_data["lastName"]
assert order.user_email == email
assert order.user is None
assert order.status == OrderStatus.UNFULFILLED
assert plugin_mock.called is True
@patch("saleor.plugins.base_plugin.BasePlugin.order_updated")
def test_order_update_with_draft_order(
plugin_mock,
staff_api_client,
permission_manage_orders,
draft_order,
graphql_address_data,
):
order = draft_order
order.user = None
order.save()
email = "not_default@example.com"
order_id = graphene.Node.to_global_id("Order", order.id)
variables = {"id": order_id, "email": email, "address": graphql_address_data}
response = staff_api_client.post_graphql(
ORDER_UPDATE_MUTATION, variables, permissions=[permission_manage_orders]
)
content = get_graphql_content(response)
error = content["data"]["orderUpdate"]["orderErrors"][0]
assert error["field"] == "id"
assert error["code"] == OrderErrorCode.INVALID.name
assert plugin_mock.called is False
def test_order_update_anonymous_user_no_user_email(
staff_api_client, order_with_lines, permission_manage_orders, graphql_address_data
):
order = order_with_lines
order.user = None
order.save()
query = """
mutation orderUpdate(
$id: ID!, $address: AddressInput) {
orderUpdate(
id: $id, input: {
shippingAddress: $address,
billingAddress: $address}) {
errors {
field
message
}
order {
id
}
}
}
"""
first_name = "Test fname"
last_name = "Test lname"
order_id = graphene.Node.to_global_id("Order", order.id)
variables = {"id": order_id, "address": graphql_address_data}
response = staff_api_client.post_graphql(
query, variables, permissions=[permission_manage_orders]
)
get_graphql_content(response)
order.refresh_from_db()
order.shipping_address.refresh_from_db()
order.billing_address.refresh_from_db()
assert order.shipping_address.first_name != first_name
assert order.billing_address.last_name != last_name
assert order.status == OrderStatus.UNFULFILLED
def test_order_update_user_email_existing_user(
staff_api_client,
order_with_lines,
customer_user,
permission_manage_orders,
graphql_address_data,
):
order = order_with_lines
order.user = None
order.save()
query = """
mutation orderUpdate(
$id: ID!, $email: String, $address: AddressInput) {
orderUpdate(
id: $id, input: {
userEmail: $email, shippingAddress: $address,
billingAddress: $address}) {
errors {
field
message
}
order {
userEmail
}
}
}
"""
email = customer_user.email
order_id = graphene.Node.to_global_id("Order", order.id)
variables = {"id": order_id, "address": graphql_address_data, "email": email}
response = staff_api_client.post_graphql(
query, variables, permissions=[permission_manage_orders]
)
content = get_graphql_content(response)
assert not content["data"]["orderUpdate"]["errors"]
data = content["data"]["orderUpdate"]["order"]
assert data["userEmail"] == email
order.refresh_from_db()
order.shipping_address.refresh_from_db()
order.billing_address.refresh_from_db()
assert order.shipping_address.first_name == graphql_address_data["firstName"]
assert order.billing_address.last_name == graphql_address_data["lastName"]
assert order.user_email == email
assert order.user == customer_user
ORDER_ADD_NOTE_MUTATION = """
mutation addNote($id: ID!, $message: String!) {
orderAddNote(order: $id, input: {message: $message}) {
orderErrors {
field
message
code
}
order {
id
}
event {
user {
email
}
message
}
}
}
"""
def test_order_add_note_as_staff_user(
staff_api_client, permission_manage_orders, order_with_lines, staff_user
):
"""We are testing that adding a note to an order as a staff user is doing the
expected behaviors."""
order = order_with_lines
assert not order.events.all()
order_id = graphene.Node.to_global_id("Order", order.id)
message = "nuclear note"
variables = {"id": order_id, "message": message}
response = staff_api_client.post_graphql(
ORDER_ADD_NOTE_MUTATION, variables, permissions=[permission_manage_orders]
)
content = get_graphql_content(response)
data = content["data"]["orderAddNote"]
assert data["order"]["id"] == order_id
assert data["event"]["user"]["email"] == staff_user.email
assert data["event"]["message"] == message
order.refresh_from_db()
assert order.status == OrderStatus.UNFULFILLED
# Ensure the correct order event was created
event = order.events.get()
assert event.type == order_events.OrderEvents.NOTE_ADDED
assert event.user == staff_user
assert event.parameters == {"message": message}
# Ensure not customer events were created as it was a staff action
assert not CustomerEvent.objects.exists()
@pytest.mark.parametrize(
"message",
(
"",
" ",
),
)
def test_order_add_note_fail_on_empty_message(
staff_api_client, permission_manage_orders, order_with_lines, message
):
order_id = graphene.Node.to_global_id("Order", order_with_lines.id)
variables = {"id": order_id, "message": message}
response = staff_api_client.post_graphql(
ORDER_ADD_NOTE_MUTATION, variables, permissions=[permission_manage_orders]
)
content = get_graphql_content(response)
data = content["data"]["orderAddNote"]
assert data["orderErrors"][0]["field"] == "message"
assert data["orderErrors"][0]["code"] == OrderErrorCode.REQUIRED.name
MUTATION_ORDER_CANCEL = """
mutation cancelOrder($id: ID!) {
orderCancel(id: $id) {
order {
status
}
orderErrors{
field
code
}
}
}
"""
@patch("saleor.graphql.order.mutations.orders.cancel_order")
@patch("saleor.graphql.order.mutations.orders.clean_order_cancel")
def test_order_cancel(
mock_clean_order_cancel,
mock_cancel_order,
staff_api_client,
permission_manage_orders,
order_with_lines,
):
order = order_with_lines
order_id = graphene.Node.to_global_id("Order", order.id)
variables = {"id": order_id}
response = staff_api_client.post_graphql(
MUTATION_ORDER_CANCEL, variables, permissions=[permission_manage_orders]
)
content = get_graphql_content(response)
data = content["data"]["orderCancel"]
assert not data["orderErrors"]
mock_clean_order_cancel.assert_called_once_with(order)
mock_cancel_order.assert_called_once_with(
order=order, user=staff_api_client.user, manager=ANY
)
@patch("saleor.graphql.order.mutations.orders.cancel_order")
@patch("saleor.graphql.order.mutations.orders.clean_order_cancel")
def test_order_cancel_as_app(
mock_clean_order_cancel,
mock_cancel_order,
app_api_client,
permission_manage_orders,
order_with_lines,
):
order = order_with_lines
order_id = graphene.Node.to_global_id("Order", order.id)
variables = {"id": order_id}
response = app_api_client.post_graphql(
MUTATION_ORDER_CANCEL, variables, permissions=[permission_manage_orders]
)
content = get_graphql_content(response)
data = content["data"]["orderCancel"]
assert not data["orderErrors"]
mock_clean_order_cancel.assert_called_once_with(order)
mock_cancel_order.assert_called_once_with(
order=order, user=AnonymousUser(), manager=ANY
)
@mock.patch("saleor.plugins.manager.PluginsManager.notify")
def test_order_capture(
mocked_notify,
staff_api_client,
permission_manage_orders,
payment_txn_preauth,
staff_user,
):
order = payment_txn_preauth.order
query = """
mutation captureOrder($id: ID!, $amount: PositiveDecimal!) {
orderCapture(id: $id, amount: $amount) {
order {
paymentStatus
paymentStatusDisplay
isPaid
totalCaptured {
amount
}
}
}
}
"""
order_id = graphene.Node.to_global_id("Order", order.id)
amount = float(payment_txn_preauth.total)
variables = {"id": order_id, "amount": amount}
response = staff_api_client.post_graphql(
query, variables, permissions=[permission_manage_orders]
)
content = get_graphql_content(response)
data = content["data"]["orderCapture"]["order"]
order.refresh_from_db()
assert data["paymentStatus"] == PaymentChargeStatusEnum.FULLY_CHARGED.name
payment_status_display = dict(ChargeStatus.CHOICES).get(ChargeStatus.FULLY_CHARGED)
assert data["paymentStatusDisplay"] == payment_status_display
assert data["isPaid"]
assert data["totalCaptured"]["amount"] == float(amount)
event_captured, event_order_fully_paid = order.events.all()
assert event_captured.type == order_events.OrderEvents.PAYMENT_CAPTURED
assert event_captured.user == staff_user
assert event_captured.parameters == {
"amount": str(amount),
"payment_gateway": "mirumee.payments.dummy",
"payment_id": "",
}
assert event_order_fully_paid.type == order_events.OrderEvents.ORDER_FULLY_PAID
assert event_order_fully_paid.user == staff_user
payment = Payment.objects.get()
expected_payment_payload = {
"order": get_default_order_payload(order),
"recipient_email": order.get_customer_email(),
"payment": {
"created": payment.created,
"modified": payment.modified,
"charge_status": payment.charge_status,
"total": payment.total,
"captured_amount": payment.captured_amount,
"currency": payment.currency,
},
"site_name": "mirumee.com",
"domain": "mirumee.com",
}
mocked_notify.assert_called_once_with(
NotifyEventType.ORDER_PAYMENT_CONFIRMATION, expected_payment_payload
)
MUTATION_MARK_ORDER_AS_PAID = """
mutation markPaid($id: ID!, $transaction: String) {
orderMarkAsPaid(id: $id, transactionReference: $transaction) {
errors {
field
message
}
orderErrors {
field
message
code
}
order {
isPaid
events{
transactionReference
}
}
}
}
"""
def test_paid_order_mark_as_paid(
staff_api_client, permission_manage_orders, payment_txn_preauth
):
order = payment_txn_preauth.order
query = MUTATION_MARK_ORDER_AS_PAID
order_id = graphene.Node.to_global_id("Order", order.id)
variables = {"id": order_id}
response = staff_api_client.post_graphql(
query, variables, permissions=[permission_manage_orders]
)
content = get_graphql_content(response)
errors = content["data"]["orderMarkAsPaid"]["errors"]
msg = "Orders with payments can not be manually marked as paid."
assert errors[0]["message"] == msg
assert errors[0]["field"] == "payment"
order_errors = content["data"]["orderMarkAsPaid"]["orderErrors"]
assert order_errors[0]["code"] == OrderErrorCode.PAYMENT_ERROR.name
def test_order_mark_as_paid_with_external_reference(
staff_api_client, permission_manage_orders, order_with_lines, staff_user
):
transaction_reference = "searchable-id"
order = order_with_lines
query = MUTATION_MARK_ORDER_AS_PAID
assert not order.is_fully_paid()
order_id = graphene.Node.to_global_id("Order", order.id)
variables = {"id": order_id, "transaction": transaction_reference}
response = staff_api_client.post_graphql(
query, variables, permissions=[permission_manage_orders]
)
content = get_graphql_content(response)
data = content["data"]["orderMarkAsPaid"]["order"]
order.refresh_from_db()
assert data["isPaid"] is True
assert len(data["events"]) == 1
assert data["events"][0]["transactionReference"] == transaction_reference
assert order.is_fully_paid()
event_order_paid = order.events.first()
assert event_order_paid.type == order_events.OrderEvents.ORDER_MARKED_AS_PAID
assert event_order_paid.user == staff_user
event_reference = event_order_paid.parameters.get("transaction_reference")
assert event_reference == transaction_reference
order_payments = order.payments.filter(
transactions__searchable_key=transaction_reference
)
assert order_payments.count() == 1
def test_order_mark_as_paid(
staff_api_client, permission_manage_orders, order_with_lines, staff_user
):
order = order_with_lines
query = MUTATION_MARK_ORDER_AS_PAID
assert not order.is_fully_paid()
order_id = graphene.Node.to_global_id("Order", order.id)
variables = {"id": order_id}
response = staff_api_client.post_graphql(
query, variables, permissions=[permission_manage_orders]
)
content = get_graphql_content(response)
data = content["data"]["orderMarkAsPaid"]["order"]
order.refresh_from_db()
assert data["isPaid"] is True is order.is_fully_paid()
event_order_paid = order.events.first()
assert event_order_paid.type == order_events.OrderEvents.ORDER_MARKED_AS_PAID
assert event_order_paid.user == staff_user
def test_order_mark_as_paid_no_billing_address(
staff_api_client, permission_manage_orders, order_with_lines, staff_user
):
order = order_with_lines
order_with_lines.billing_address = None
order_with_lines.save()
query = MUTATION_MARK_ORDER_AS_PAID
order_id = graphene.Node.to_global_id("Order", order.id)
variables = {"id": order_id}
response = staff_api_client.post_graphql(
query, variables, permissions=[permission_manage_orders]
)
content = get_graphql_content(response)
data = content["data"]["orderMarkAsPaid"]["orderErrors"]
assert data[0]["code"] == OrderErrorCode.BILLING_ADDRESS_NOT_SET.name
ORDER_VOID = """
mutation voidOrder($id: ID!) {
orderVoid(id: $id) {
order {
paymentStatus
paymentStatusDisplay
}
errors {
field
message
}
orderErrors {
field
message
code
}
}
}
"""
def test_order_void(
staff_api_client, permission_manage_orders, payment_txn_preauth, staff_user
):
order = payment_txn_preauth.order
order_id = graphene.Node.to_global_id("Order", order.id)
variables = {"id": order_id}
response = staff_api_client.post_graphql(
ORDER_VOID, variables, permissions=[permission_manage_orders]
)
content = get_graphql_content(response)
data = content["data"]["orderVoid"]["order"]
assert data["paymentStatus"] == PaymentChargeStatusEnum.NOT_CHARGED.name
payment_status_display = dict(ChargeStatus.CHOICES).get(ChargeStatus.NOT_CHARGED)
assert data["paymentStatusDisplay"] == payment_status_display
event_payment_voided = order.events.last()
assert event_payment_voided.type == order_events.OrderEvents.PAYMENT_VOIDED
assert event_payment_voided.user == staff_user
@patch.object(PluginsManager, "void_payment")
def test_order_void_payment_error(
mock_void_payment, staff_api_client, permission_manage_orders, payment_txn_preauth
):
msg = "Oops! Something went wrong."
order = payment_txn_preauth.order
order_id = graphene.Node.to_global_id("Order", order.id)
variables = {"id": order_id}
mock_void_payment.side_effect = ValueError(msg)
response = staff_api_client.post_graphql(
ORDER_VOID, variables, permissions=[permission_manage_orders]
)
content = get_graphql_content(response)
errors = content["data"]["orderVoid"]["errors"]
assert errors[0]["field"] == "payment"
assert errors[0]["message"] == msg
order_errors = content["data"]["orderVoid"]["orderErrors"]
assert order_errors[0]["code"] == OrderErrorCode.PAYMENT_ERROR.name
mock_void_payment.assert_called_once()
def test_order_refund(staff_api_client, permission_manage_orders, payment_txn_captured):
order = payment_txn_captured.order
query = """
mutation refundOrder($id: ID!, $amount: PositiveDecimal!) {
orderRefund(id: $id, amount: $amount) {
order {
paymentStatus
paymentStatusDisplay
isPaid
status
}
}
}
"""
order_id = graphene.Node.to_global_id("Order", order.id)
amount = float(payment_txn_captured.total)
variables = {"id": order_id, "amount": amount}
response = staff_api_client.post_graphql(
query, variables, permissions=[permission_manage_orders]
)
content = get_graphql_content(response)
data = content["data"]["orderRefund"]["order"]
order.refresh_from_db()
assert data["status"] == order.status.upper()
assert data["paymentStatus"] == PaymentChargeStatusEnum.FULLY_REFUNDED.name
payment_status_display = dict(ChargeStatus.CHOICES).get(ChargeStatus.FULLY_REFUNDED)
assert data["paymentStatusDisplay"] == payment_status_display
assert data["isPaid"] is False
refund_order_event = order.events.filter(
type=order_events.OrderEvents.PAYMENT_REFUNDED
).first()
assert refund_order_event.parameters["amount"] == str(amount)
@pytest.mark.parametrize(
"requires_amount, mutation_name",
((True, "orderRefund"), (False, "orderVoid"), (True, "orderCapture")),
)
def test_clean_payment_without_payment_associated_to_order(
staff_api_client, permission_manage_orders, order, requires_amount, mutation_name
):
assert not OrderEvent.objects.exists()
additional_arguments = ", amount: 2" if requires_amount else ""
query = """
mutation %(mutationName)s($id: ID!) {
%(mutationName)s(id: $id %(args)s) {
errors {
field
message
}
}
}
""" % {
"mutationName": mutation_name,
"args": additional_arguments,
}
order_id = graphene.Node.to_global_id("Order", order.id)
variables = {"id": order_id}
response = staff_api_client.post_graphql(
query, variables, permissions=[permission_manage_orders]
)
errors = get_graphql_content(response)["data"][mutation_name].get("errors")
message = "There's no payment associated with the order."
assert errors, "expected an error"
assert errors == [{"field": "payment", "message": message}]
assert not OrderEvent.objects.exists()
def test_try_payment_action_generates_event(order, staff_user, payment_dummy):
message = "The payment did a oopsie!"
assert not OrderEvent.objects.exists()
def _test_operation():
raise PaymentError(message)
with pytest.raises(ValidationError) as exc:
try_payment_action(
order=order, user=staff_user, payment=payment_dummy, func=_test_operation
)
assert exc.value.args[0]["payment"].message == message
error_event = OrderEvent.objects.get() # type: OrderEvent
assert error_event.type == order_events.OrderEvents.PAYMENT_FAILED
assert error_event.user == staff_user
assert error_event.parameters == {
"message": message,
"gateway": payment_dummy.gateway,
"payment_id": payment_dummy.token,
}
def test_clean_order_refund_payment():
payment = MagicMock(spec=Payment)
payment.can_refund.return_value = False
with pytest.raises(ValidationError) as e:
clean_refund_payment(payment)
assert e.value.error_dict["payment"][0].code == OrderErrorCode.CANNOT_REFUND
def test_clean_order_capture():
with pytest.raises(ValidationError) as e:
clean_order_capture(None)
msg = "There's no payment associated with the order."
assert e.value.error_dict["payment"][0].message == msg
@pytest.mark.parametrize(
"status",
[
FulfillmentStatus.RETURNED,
FulfillmentStatus.REFUNDED_AND_RETURNED,
FulfillmentStatus.REFUNDED,
FulfillmentStatus.CANCELED,
FulfillmentStatus.REPLACED,
],
)
def test_clean_order_cancel(status, fulfillment):
order = fulfillment.order
fulfillment.status = status
fulfillment.save()
# Shouldn't raise any errors
assert clean_order_cancel(order) is None
def test_clean_order_cancel_draft_order(
fulfilled_order_with_all_cancelled_fulfillments,
):
order = fulfilled_order_with_all_cancelled_fulfillments
order.status = OrderStatus.DRAFT
order.save()
with pytest.raises(ValidationError) as e:
clean_order_cancel(order)
assert e.value.error_dict["order"][0].code == OrderErrorCode.CANNOT_CANCEL_ORDER
def test_clean_order_cancel_canceled_order(
fulfilled_order_with_all_cancelled_fulfillments,
):
order = fulfilled_order_with_all_cancelled_fulfillments
order.status = OrderStatus.CANCELED
order.save()
with pytest.raises(ValidationError) as e:
clean_order_cancel(order)
assert e.value.error_dict["order"][0].code == OrderErrorCode.CANNOT_CANCEL_ORDER
def test_clean_order_cancel_order_with_fulfillment(
fulfilled_order_with_cancelled_fulfillment,
):
order = fulfilled_order_with_cancelled_fulfillment
order.status = OrderStatus.CANCELED
order.save()
with pytest.raises(ValidationError) as e:
clean_order_cancel(order)
assert e.value.error_dict["order"][0].code == OrderErrorCode.CANNOT_CANCEL_ORDER
ORDER_UPDATE_SHIPPING_QUERY = """
mutation orderUpdateShipping($order: ID!, $shippingMethod: ID) {
orderUpdateShipping(
order: $order, input: {shippingMethod: $shippingMethod}) {
orderErrors {
field
code
message
}
order {
id
}
}
}
"""
@pytest.mark.parametrize("status", [OrderStatus.UNCONFIRMED, OrderStatus.DRAFT])
def test_order_update_shipping(
status,
staff_api_client,
permission_manage_orders,
order_with_lines,
shipping_method,
staff_user,
):
order = order_with_lines
order.status = status
order.save()
assert order.shipping_method != shipping_method
query = ORDER_UPDATE_SHIPPING_QUERY
order_id = graphene.Node.to_global_id("Order", order.id)
method_id = graphene.Node.to_global_id("ShippingMethod", shipping_method.id)
variables = {"order": order_id, "shippingMethod": method_id}
response = staff_api_client.post_graphql(
query, variables, permissions=[permission_manage_orders]
)
content = get_graphql_content(response)
data = content["data"]["orderUpdateShipping"]
assert data["order"]["id"] == order_id
order.refresh_from_db()
shipping_total = shipping_method.channel_listings.get(
channel_id=order.channel_id
).get_total()
shipping_price = TaxedMoney(shipping_total, shipping_total)
assert order.status == status
assert order.shipping_method == shipping_method
assert order.shipping_price_net == shipping_price.net
assert order.shipping_price_gross == shipping_price.gross
assert order.shipping_tax_rate == Decimal("0.0")
assert order.shipping_method_name == shipping_method.name
def test_order_update_shipping_tax_included(
staff_api_client,
permission_manage_orders,
order_with_lines,
shipping_method,
staff_user,
vatlayer,
):
order = order_with_lines
order.status = OrderStatus.UNCONFIRMED
order.save(update_fields=["status"])
address = order_with_lines.shipping_address
address.country = "DE"
address.save()
query = ORDER_UPDATE_SHIPPING_QUERY
order_id = graphene.Node.to_global_id("Order", order.id)
method_id = graphene.Node.to_global_id("ShippingMethod", shipping_method.id)
variables = {"order": order_id, "shippingMethod": method_id}
response = staff_api_client.post_graphql(
query, variables, permissions=[permission_manage_orders]
)
content = get_graphql_content(response)
data = content["data"]["orderUpdateShipping"]
assert data["order"]["id"] == order_id
order.refresh_from_db()
shipping_total = shipping_method.channel_listings.get(
channel_id=order.channel_id
).get_total()
assert order.status == OrderStatus.UNCONFIRMED
assert order.shipping_method == shipping_method
assert order.shipping_price_gross == shipping_total
assert order.shipping_tax_rate == Decimal("0.19")
assert order.shipping_method_name == shipping_method.name
def test_order_update_shipping_clear_shipping_method(
staff_api_client, permission_manage_orders, order, staff_user, shipping_method
):
order.shipping_method = shipping_method
order.status = OrderStatus.UNCONFIRMED
order.save(update_fields=["status"])
shipping_total = shipping_method.channel_listings.get(
channel_id=order.channel_id,
).get_total()
shipping_price = TaxedMoney(shipping_total, shipping_total)
order.shipping_price = shipping_price
order.shipping_method_name = "Example shipping"
order.save()
query = ORDER_UPDATE_SHIPPING_QUERY
order_id = graphene.Node.to_global_id("Order", order.id)
variables = {"order": order_id, "shippingMethod": None}
response = staff_api_client.post_graphql(
query, variables, permissions=[permission_manage_orders]
)
content = get_graphql_content(response)
data = content["data"]["orderUpdateShipping"]
assert data["order"]["id"] == order_id
order.refresh_from_db()
assert order.shipping_method is None
assert order.shipping_price == zero_taxed_money(order.currency)
assert order.shipping_method_name is None
def test_order_update_shipping_shipping_required(
staff_api_client, permission_manage_orders, order_with_lines, staff_user
):
order = order_with_lines
order.status = OrderStatus.UNCONFIRMED
order.save(update_fields=["status"])
query = ORDER_UPDATE_SHIPPING_QUERY
order_id = graphene.Node.to_global_id("Order", order.id)
variables = {"order": order_id, "shippingMethod": None}
response = staff_api_client.post_graphql(
query, variables, permissions=[permission_manage_orders]
)
content = get_graphql_content(response)
data = content["data"]["orderUpdateShipping"]
assert data["orderErrors"][0]["field"] == "shippingMethod"
assert data["orderErrors"][0]["message"] == (
"Shipping method is required for this order."
)
@pytest.mark.parametrize(
"status",
[
OrderStatus.UNFULFILLED,
OrderStatus.FULFILLED,
OrderStatus.PARTIALLY_RETURNED,
OrderStatus.RETURNED,
OrderStatus.CANCELED,
],
)
def test_order_update_shipping_not_editable_order(
status,
staff_api_client,
permission_manage_orders,
order_with_lines,
shipping_method,
staff_user,
):
order = order_with_lines
order.status = status
order.save()
assert order.shipping_method != shipping_method
query = ORDER_UPDATE_SHIPPING_QUERY
order_id = graphene.Node.to_global_id("Order", order.id)
method_id = graphene.Node.to_global_id("ShippingMethod", shipping_method.id)
variables = {"order": order_id, "shippingMethod": method_id}
response = staff_api_client.post_graphql(
query, variables, permissions=[permission_manage_orders]
)
content = get_graphql_content(response)
data = content["data"]["orderUpdateShipping"]
assert data["orderErrors"][0]["field"] == "id"
assert data["orderErrors"][0]["code"] == OrderErrorCode.NOT_EDITABLE.name
def test_order_update_shipping_no_shipping_address(
staff_api_client,
permission_manage_orders,
order_with_lines,
shipping_method,
staff_user,
):
order = order_with_lines
order.status = OrderStatus.UNCONFIRMED
order.save(update_fields=["status"])
order.shipping_address = None
order.save()
query = ORDER_UPDATE_SHIPPING_QUERY
order_id = graphene.Node.to_global_id("Order", order.id)
method_id = graphene.Node.to_global_id("ShippingMethod", shipping_method.id)
variables = {"order": order_id, "shippingMethod": method_id}
response = staff_api_client.post_graphql(
query, variables, permissions=[permission_manage_orders]
)
content = get_graphql_content(response)
data = content["data"]["orderUpdateShipping"]
assert data["orderErrors"][0]["field"] == "order"
assert data["orderErrors"][0]["message"] == (
"Cannot choose a shipping method for an order without" " the shipping address."
)
def test_order_update_shipping_incorrect_shipping_method(
staff_api_client,
permission_manage_orders,
order_with_lines,
shipping_method,
staff_user,
):
order = order_with_lines
order.status = OrderStatus.UNCONFIRMED
order.save(update_fields=["status"])
zone = shipping_method.shipping_zone
zone.countries = ["DE"]
zone.save()
assert order.shipping_address.country.code not in zone.countries
query = ORDER_UPDATE_SHIPPING_QUERY
order_id = graphene.Node.to_global_id("Order", order.id)
method_id = graphene.Node.to_global_id("ShippingMethod", shipping_method.id)
variables = {"order": order_id, "shippingMethod": method_id}
response = staff_api_client.post_graphql(
query, variables, permissions=[permission_manage_orders]
)
content = get_graphql_content(response)
data = content["data"]["orderUpdateShipping"]
assert data["orderErrors"][0]["field"] == "shippingMethod"
assert data["orderErrors"][0]["message"] == (
"Shipping method cannot be used with this order."
)
def test_order_update_shipping_shipping_zone_without_channels(
staff_api_client,
permission_manage_orders,
order_with_lines,
shipping_method,
staff_user,
):
order = order_with_lines
order.status = OrderStatus.UNCONFIRMED
order.save(update_fields=["status"])
order.channel.shipping_zones.clear()
query = ORDER_UPDATE_SHIPPING_QUERY
order_id = graphene.Node.to_global_id("Order", order.id)
method_id = graphene.Node.to_global_id("ShippingMethod", shipping_method.id)
variables = {"order": order_id, "shippingMethod": method_id}
response = staff_api_client.post_graphql(
query, variables, permissions=[permission_manage_orders]
)
content = get_graphql_content(response)
data = content["data"]["orderUpdateShipping"]
errors = data["orderErrors"]
assert len(errors) == 1
assert errors[0]["field"] == "shippingMethod"
assert errors[0]["code"] == OrderErrorCode.SHIPPING_METHOD_NOT_APPLICABLE.name
def test_order_update_shipping_excluded_shipping_method_postal_code(
staff_api_client,
permission_manage_orders,
order_unconfirmed,
staff_user,
shipping_method_excluded_by_postal_code,
):
order = order_unconfirmed
order.shipping_method = shipping_method_excluded_by_postal_code
shipping_total = shipping_method_excluded_by_postal_code.channel_listings.get(
channel_id=order.channel_id,
).get_total()
shipping_price = TaxedMoney(shipping_total, shipping_total)
order.shipping_price = shipping_price
order.shipping_method_name = "Example shipping"
order.save()
query = ORDER_UPDATE_SHIPPING_QUERY
order_id = graphene.Node.to_global_id("Order", order.id)
method_id = graphene.Node.to_global_id(
"ShippingMethod", shipping_method_excluded_by_postal_code.id
)
variables = {"order": order_id, "shippingMethod": method_id}
response = staff_api_client.post_graphql(
query, variables, permissions=[permission_manage_orders]
)
content = get_graphql_content(response)
data = content["data"]["orderUpdateShipping"]
assert data["orderErrors"][0]["field"] == "shippingMethod"
assert data["orderErrors"][0]["message"] == (
"Shipping method cannot be used with this order."
)
def test_draft_order_clear_shipping_method(
staff_api_client, draft_order, permission_manage_orders
):
assert draft_order.shipping_method
query = ORDER_UPDATE_SHIPPING_QUERY
order_id = graphene.Node.to_global_id("Order", draft_order.id)
variables = {"order": order_id, "shippingMethod": None}
response = staff_api_client.post_graphql(
query, variables, permissions=[permission_manage_orders]
)
content = get_graphql_content(response)
data = content["data"]["orderUpdateShipping"]
assert data["order"]["id"] == order_id
draft_order.refresh_from_db()
assert draft_order.shipping_method is None
assert draft_order.shipping_price == zero_taxed_money(draft_order.currency)
assert draft_order.shipping_method_name is None
ORDER_BY_TOKEN_QUERY = """
query OrderByToken($token: UUID!) {
orderByToken(token: $token) {
id
shippingAddress {
firstName
lastName
streetAddress1
streetAddress2
phone
}
billingAddress {
firstName
lastName
streetAddress1
streetAddress2
phone
}
userEmail
}
}
"""
def test_order_by_token_query_by_anonymous_user(api_client, order):
# given
query = ORDER_BY_TOKEN_QUERY
order.billing_address.street_address_2 = "test"
order.billing_address.save()
order_id = graphene.Node.to_global_id("Order", order.id)
# when
response = api_client.post_graphql(query, {"token": order.token})
# then
content = get_graphql_content(response)
data = content["data"]["orderByToken"]
assert data["id"] == order_id
assert data["shippingAddress"]["firstName"] == order.shipping_address.first_name[
0
] + "." * (len(order.shipping_address.first_name) - 1)
assert data["shippingAddress"]["lastName"] == order.shipping_address.last_name[
0
] + "." * (len(order.shipping_address.last_name) - 1)
assert data["shippingAddress"][
"streetAddress1"
] == order.shipping_address.street_address_1[0] + "." * (
len(order.shipping_address.street_address_1) - 1
)
assert data["shippingAddress"][
"streetAddress2"
] == order.shipping_address.street_address_2[0] + "." * (
len(order.shipping_address.street_address_2) - 1
)
assert data["shippingAddress"]["phone"] == str(order.shipping_address.phone)[
:3
] + "." * (len(str(order.shipping_address.phone)) - 3)
assert data["billingAddress"]["firstName"] == order.billing_address.first_name[
0
] + "." * (len(order.billing_address.first_name) - 1)
assert data["billingAddress"]["lastName"] == order.billing_address.last_name[
0
] + "." * (len(order.billing_address.last_name) - 1)
assert data["billingAddress"][
"streetAddress1"
] == order.billing_address.street_address_1[0] + "." * (
len(order.billing_address.street_address_1) - 1
)
assert data["billingAddress"][
"streetAddress2"
] == order.billing_address.street_address_2[0] + "." * (
len(order.billing_address.street_address_2) - 1
)
assert data["billingAddress"]["phone"] == str(order.billing_address.phone)[
:3
] + "." * (len(str(order.billing_address.phone)) - 3)
assert data["userEmail"] == obfuscate_email(order.user_email)
def test_order_by_token_query_by_order_owner(user_api_client, order):
# given
query = ORDER_BY_TOKEN_QUERY
order.user = user_api_client.user
order.save()
order_id = graphene.Node.to_global_id("Order", order.id)
# when
response = user_api_client.post_graphql(query, {"token": order.token})
# then
content = get_graphql_content(response)
data = content["data"]["orderByToken"]
assert data["id"] == order_id
assert data["shippingAddress"]["firstName"] == order.shipping_address.first_name
assert data["shippingAddress"]["lastName"] == order.shipping_address.last_name
assert (
data["shippingAddress"]["streetAddress1"]
== order.shipping_address.street_address_1
)
assert (
data["shippingAddress"]["streetAddress2"]
== order.shipping_address.street_address_2
)
assert data["shippingAddress"]["phone"] == order.shipping_address.phone
assert data["billingAddress"]["firstName"] == order.billing_address.first_name
assert data["billingAddress"]["lastName"] == order.billing_address.last_name
assert (
data["billingAddress"]["streetAddress1"]
== order.billing_address.street_address_1
)
assert (
data["billingAddress"]["streetAddress2"]
== order.billing_address.street_address_2
)
assert data["billingAddress"]["phone"] == order.billing_address.phone
assert data["userEmail"] == order.user_email
def test_order_by_token_query_by_superuser(superuser_api_client, order):
# given
query = ORDER_BY_TOKEN_QUERY
order_id = graphene.Node.to_global_id("Order", order.id)
# when
response = superuser_api_client.post_graphql(query, {"token": order.token})
# then
content = get_graphql_content(response)
data = content["data"]["orderByToken"]
assert data["id"] == order_id
assert data["shippingAddress"]["firstName"] == order.shipping_address.first_name
assert data["shippingAddress"]["lastName"] == order.shipping_address.last_name
assert (
data["shippingAddress"]["streetAddress1"]
== order.shipping_address.street_address_1
)
assert (
data["shippingAddress"]["streetAddress2"]
== order.shipping_address.street_address_2
)
assert data["shippingAddress"]["phone"] == order.shipping_address.phone
assert data["billingAddress"]["firstName"] == order.billing_address.first_name
assert data["billingAddress"]["lastName"] == order.billing_address.last_name
assert (
data["billingAddress"]["streetAddress1"]
== order.billing_address.street_address_1
)
assert (
data["billingAddress"]["streetAddress2"]
== order.billing_address.street_address_2
)
assert data["billingAddress"]["phone"] == order.billing_address.phone
assert data["userEmail"] == order.user_email
def test_order_by_token_query_by_staff_with_permission(
staff_api_client, permission_manage_orders, order, customer_user
):
# given
query = ORDER_BY_TOKEN_QUERY
staff_user = staff_api_client.user
staff_user.user_permissions.add(permission_manage_orders)
order.user = customer_user
order.save()
order_id = graphene.Node.to_global_id("Order", order.id)
# when
response = staff_api_client.post_graphql(query, {"token": order.token})
# then
content = get_graphql_content(response)
data = content["data"]["orderByToken"]
assert data["id"] == order_id
assert data["shippingAddress"]["firstName"] == order.shipping_address.first_name
assert data["shippingAddress"]["lastName"] == order.shipping_address.last_name
assert (
data["shippingAddress"]["streetAddress1"]
== order.shipping_address.street_address_1
)
assert (
data["shippingAddress"]["streetAddress2"]
== order.shipping_address.street_address_2
)
assert data["shippingAddress"]["phone"] == order.shipping_address.phone
assert data["billingAddress"]["firstName"] == order.billing_address.first_name
assert data["billingAddress"]["lastName"] == order.billing_address.last_name
assert (
data["billingAddress"]["streetAddress1"]
== order.billing_address.street_address_1
)
assert (
data["billingAddress"]["streetAddress2"]
== order.billing_address.street_address_2
)
assert data["billingAddress"]["phone"] == order.billing_address.phone
assert data["userEmail"] == order.user_email
def test_order_by_token_query_by_staff_no_permission(
staff_api_client, order, customer_user
):
# given
query = ORDER_BY_TOKEN_QUERY
order.shipping_address.street_address_2 = "test"
order.shipping_address.save()
order.user = customer_user
order.save()
order_id = graphene.Node.to_global_id("Order", order.id)
# when
response = staff_api_client.post_graphql(query, {"token": order.token})
# then
content = get_graphql_content(response)
data = content["data"]["orderByToken"]
assert data["id"] == order_id
assert data["shippingAddress"]["firstName"] == order.shipping_address.first_name[
0
] + "." * (len(order.shipping_address.first_name) - 1)
assert data["shippingAddress"]["lastName"] == order.shipping_address.last_name[
0
] + "." * (len(order.shipping_address.last_name) - 1)
assert data["shippingAddress"][
"streetAddress1"
] == order.shipping_address.street_address_1[0] + "." * (
len(order.shipping_address.street_address_1) - 1
)
assert data["shippingAddress"][
"streetAddress2"
] == order.shipping_address.street_address_2[0] + "." * (
len(order.shipping_address.street_address_2) - 1
)
assert data["shippingAddress"]["phone"] == str(order.shipping_address.phone)[
:3
] + "." * (len(str(order.shipping_address.phone)) - 3)
assert data["billingAddress"]["firstName"] == order.billing_address.first_name[
0
] + "." * (len(order.billing_address.first_name) - 1)
assert data["billingAddress"]["lastName"] == order.billing_address.last_name[
0
] + "." * (len(order.billing_address.last_name) - 1)
assert data["billingAddress"][
"streetAddress1"
] == order.billing_address.street_address_1[0] + "." * (
len(order.billing_address.street_address_1) - 1
)
assert data["billingAddress"][
"streetAddress2"
] == order.billing_address.street_address_2[0] + "." * (
len(order.billing_address.street_address_2) - 1
)
assert data["billingAddress"]["phone"] == str(order.billing_address.phone)[
:3
] + "." * (len(str(order.billing_address.phone)) - 3)
def test_order_by_token_query_by_app(
app_api_client, order, customer_user, permission_manage_orders
):
# given
query = ORDER_BY_TOKEN_QUERY
order.user = customer_user
order.save()
app_api_client.app.permissions.add(permission_manage_orders)
order_id = graphene.Node.to_global_id("Order", order.id)
# when
response = app_api_client.post_graphql(query, {"token": order.token})
# then
content = get_graphql_content(response)
data = content["data"]["orderByToken"]
assert data["id"] == order_id
assert data["shippingAddress"]["firstName"] == order.shipping_address.first_name
assert data["shippingAddress"]["lastName"] == order.shipping_address.last_name
assert (
data["shippingAddress"]["streetAddress1"]
== order.shipping_address.street_address_1
)
assert (
data["shippingAddress"]["streetAddress2"]
== order.shipping_address.street_address_2
)
assert data["shippingAddress"]["phone"] == order.shipping_address.phone
assert data["billingAddress"]["firstName"] == order.billing_address.first_name
assert data["billingAddress"]["lastName"] == order.billing_address.last_name
assert (
data["billingAddress"]["streetAddress1"]
== order.billing_address.street_address_1
)
assert (
data["billingAddress"]["streetAddress2"]
== order.billing_address.street_address_2
)
assert data["billingAddress"]["phone"] == order.billing_address.phone
assert data["userEmail"] == order.user_email
def test_order_by_token_query_by_app_no_perm(
app_api_client, order, customer_user, permission_manage_orders
):
# given
query = ORDER_BY_TOKEN_QUERY
order.user = customer_user
order.save()
order_id = graphene.Node.to_global_id("Order", order.id)
# when
response = app_api_client.post_graphql(query, {"token": order.token})
# then
content = get_graphql_content(response)
data = content["data"]["orderByToken"]
assert data["id"] == order_id
assert data["shippingAddress"]["firstName"] == order.shipping_address.first_name[
0
] + "." * (len(order.shipping_address.first_name) - 1)
assert data["shippingAddress"]["lastName"] == order.shipping_address.last_name[
0
] + "." * (len(order.shipping_address.last_name) - 1)
assert data["shippingAddress"][
"streetAddress1"
] == order.shipping_address.street_address_1[0] + "." * (
len(order.shipping_address.street_address_1) - 1
)
assert data["shippingAddress"]["streetAddress2"] == ""
assert data["shippingAddress"]["phone"] == str(order.shipping_address.phone)[
:3
] + "." * (len(str(order.shipping_address.phone)) - 3)
assert data["billingAddress"]["firstName"] == order.billing_address.first_name[
0
] + "." * (len(order.billing_address.first_name) - 1)
assert data["billingAddress"]["lastName"] == order.billing_address.last_name[
0
] + "." * (len(order.billing_address.last_name) - 1)
assert data["billingAddress"][
"streetAddress1"
] == order.billing_address.street_address_1[0] + "." * (
len(order.billing_address.street_address_1) - 1
)
assert data["billingAddress"]["streetAddress2"] == ""
assert data["billingAddress"]["phone"] == str(order.billing_address.phone)[
:3
] + "." * (len(str(order.billing_address.phone)) - 3)
def test_order_by_token_user_restriction(api_client, order):
query = """
query OrderByToken($token: UUID!) {
orderByToken(token: $token) {
user {
id
}
}
}
"""
response = api_client.post_graphql(query, {"token": order.token})
assert_no_permission(response)
def test_order_by_token_events_restriction(api_client, order):
query = """
query OrderByToken($token: UUID!) {
orderByToken(token: $token) {
events {
id
}
}
}
"""
response = api_client.post_graphql(query, {"token": order.token})
assert_no_permission(response)
def test_authorized_access_to_order_by_token(
user_api_client, staff_api_client, customer_user, order, permission_manage_users
):
query = """
query OrderByToken($token: UUID!) {
orderByToken(token: $token) {
user {
id
}
}
}
"""
variables = {"token": order.token}
customer_user_id = graphene.Node.to_global_id("User", customer_user.id)
response = user_api_client.post_graphql(query, variables)
content = get_graphql_content(response)
assert content["data"]["orderByToken"]["user"]["id"] == customer_user_id
response = staff_api_client.post_graphql(
query, variables, permissions=[permission_manage_users]
)
content = get_graphql_content(response)
assert content["data"]["orderByToken"]["user"]["id"] == customer_user_id
def test_query_draft_order_by_token_with_requester_as_customer(
user_api_client, draft_order
):
draft_order.user = user_api_client.user
draft_order.save(update_fields=["user"])
query = ORDER_BY_TOKEN_QUERY
response = user_api_client.post_graphql(query, {"token": draft_order.token})
content = get_graphql_content(response)
assert not content["data"]["orderByToken"]
def test_query_draft_order_by_token_as_anonymous_customer(api_client, draft_order):
query = ORDER_BY_TOKEN_QUERY
response = api_client.post_graphql(query, {"token": draft_order.token})
content = get_graphql_content(response)
assert not content["data"]["orderByToken"]
def test_query_order_without_addresses(order, user_api_client, channel_USD):
# given
query = ORDER_BY_TOKEN_QUERY
order = Order.objects.create(
token=str(uuid.uuid4()),
channel=channel_USD,
user=user_api_client.user,
)
# when
response = user_api_client.post_graphql(query, {"token": order.token})
# then
content = get_graphql_content(response)
data = content["data"]["orderByToken"]
assert data["userEmail"] == user_api_client.user.email
assert data["billingAddress"] is None
assert data["shippingAddress"] is None
def test_order_query_address_without_order_user(
staff_api_client, permission_manage_orders, channel_USD, address
):
query = ORDER_BY_TOKEN_QUERY
shipping_address = address.get_copy()
billing_address = address.get_copy()
token = str(uuid.uuid4())
Order.objects.create(
channel=channel_USD,
shipping_address=shipping_address,
billing_address=billing_address,
token=token,
)
staff_api_client.user.user_permissions.add(permission_manage_orders)
response = staff_api_client.post_graphql(query, {"token": token})
content = get_graphql_content(response)
order = content["data"]["orderByToken"]
assert order["shippingAddress"] is not None
assert order["billingAddress"] is not None
MUTATION_ORDER_BULK_CANCEL = """
mutation CancelManyOrders($ids: [ID]!) {
orderBulkCancel(ids: $ids) {
count
orderErrors{
field
code
}
}
}
"""
@patch("saleor.graphql.order.bulk_mutations.orders.cancel_order")
def test_order_bulk_cancel(
mock_cancel_order,
staff_api_client,
order_list,
fulfilled_order_with_all_cancelled_fulfillments,
permission_manage_orders,
address,
):
orders = order_list
orders.append(fulfilled_order_with_all_cancelled_fulfillments)
expected_count = sum(order.can_cancel() for order in orders)
variables = {
"ids": [graphene.Node.to_global_id("Order", order.id) for order in orders],
}
response = staff_api_client.post_graphql(
MUTATION_ORDER_BULK_CANCEL, variables, permissions=[permission_manage_orders]
)
content = get_graphql_content(response)
data = content["data"]["orderBulkCancel"]
assert data["count"] == expected_count
assert not data["orderErrors"]
calls = [
call(order=order, user=staff_api_client.user, manager=ANY) for order in orders
]
mock_cancel_order.assert_has_calls(calls, any_order=True)
mock_cancel_order.call_count == expected_count
@patch("saleor.graphql.order.bulk_mutations.orders.cancel_order")
def test_order_bulk_cancel_as_app(
mock_cancel_order,
app_api_client,
order_list,
fulfilled_order_with_all_cancelled_fulfillments,
permission_manage_orders,
address,
):
orders = order_list
orders.append(fulfilled_order_with_all_cancelled_fulfillments)
expected_count = sum(order.can_cancel() for order in orders)
variables = {
"ids": [graphene.Node.to_global_id("Order", order.id) for order in orders],
}
response = app_api_client.post_graphql(
MUTATION_ORDER_BULK_CANCEL, variables, permissions=[permission_manage_orders]
)
content = get_graphql_content(response)
data = content["data"]["orderBulkCancel"]
assert data["count"] == expected_count
assert not data["orderErrors"]
calls = [call(order=order, user=AnonymousUser(), manager=ANY) for order in orders]
mock_cancel_order.assert_has_calls(calls, any_order=True)
assert mock_cancel_order.call_count == expected_count
def test_order_query_with_filter_channels_with_one_channel(
orders_query_with_filter,
staff_api_client,
permission_manage_orders,
orders,
channel_USD,
):
# given
channel_id = graphene.Node.to_global_id("Channel", channel_USD.pk)
variables = {"filter": {"channels": [channel_id]}}
# when
response = staff_api_client.post_graphql(
orders_query_with_filter, variables, permissions=(permission_manage_orders,)
)
# then
content = get_graphql_content(response)
orders = content["data"]["orders"]["edges"]
assert len(orders) == 3
def test_order_query_with_filter_channels_without_channel(
orders_query_with_filter,
staff_api_client,
permission_manage_orders,
orders,
):
# given
variables = {"filter": {"channels": []}}
# when
response = staff_api_client.post_graphql(
orders_query_with_filter, variables, permissions=(permission_manage_orders,)
)
# then
content = get_graphql_content(response)
orders = content["data"]["orders"]["edges"]
assert len(orders) == 5
def test_order_query_with_filter_channels_with_many_channel(
orders_query_with_filter,
staff_api_client,
permission_manage_orders,
orders,
channel_USD,
channel_PLN,
other_channel_USD,
):
# given
Order.objects.create(channel=other_channel_USD)
channel_usd_id = graphene.Node.to_global_id("Channel", channel_USD.pk)
channel_pln_id = graphene.Node.to_global_id("Channel", channel_PLN.pk)
variables = {"filter": {"channels": [channel_pln_id, channel_usd_id]}}
# when
response = staff_api_client.post_graphql(
orders_query_with_filter, variables, permissions=(permission_manage_orders,)
)
# then
content = get_graphql_content(response)
orders = content["data"]["orders"]["edges"]
assert len(orders) == 5
assert Order.objects.non_draft().count() == 6
def test_order_query_with_filter_channels_with_empty_channel(
orders_query_with_filter,
staff_api_client,
permission_manage_orders,
orders,
other_channel_USD,
):
# given
channel_id = graphene.Node.to_global_id("Channel", other_channel_USD.pk)
variables = {"filter": {"channels": [channel_id]}}
# when
response = staff_api_client.post_graphql(
orders_query_with_filter, variables, permissions=(permission_manage_orders,)
)
# then
content = get_graphql_content(response)
orders = content["data"]["orders"]["edges"]
assert len(orders) == 0
@pytest.mark.parametrize(
"orders_filter, count",
[
(
{
"created": {
"gte": str(date.today() - timedelta(days=3)),
"lte": str(date.today()),
}
},
1,
),
({"created": {"gte": str(date.today() - timedelta(days=3))}}, 1),
({"created": {"lte": str(date.today())}}, 2),
({"created": {"lte": str(date.today() - timedelta(days=3))}}, 1),
({"created": {"gte": str(date.today() + timedelta(days=1))}}, 0),
],
)
def test_order_query_with_filter_created(
orders_filter,
count,
orders_query_with_filter,
staff_api_client,
permission_manage_orders,
channel_USD,
):
Order.objects.create(channel=channel_USD)
with freeze_time("2012-01-14"):
Order.objects.create(channel=channel_USD)
variables = {"filter": orders_filter}
staff_api_client.user.user_permissions.add(permission_manage_orders)
response = staff_api_client.post_graphql(orders_query_with_filter, variables)
content = get_graphql_content(response)
orders = content["data"]["orders"]["edges"]
assert len(orders) == count
@pytest.mark.parametrize(
"orders_filter, count, payment_status",
[
({"paymentStatus": "FULLY_CHARGED"}, 1, ChargeStatus.FULLY_CHARGED),
({"paymentStatus": "NOT_CHARGED"}, 2, ChargeStatus.NOT_CHARGED),
({"paymentStatus": "PARTIALLY_CHARGED"}, 1, ChargeStatus.PARTIALLY_CHARGED),
({"paymentStatus": "PARTIALLY_REFUNDED"}, 1, ChargeStatus.PARTIALLY_REFUNDED),
({"paymentStatus": "FULLY_REFUNDED"}, 1, ChargeStatus.FULLY_REFUNDED),
({"paymentStatus": "FULLY_CHARGED"}, 0, ChargeStatus.FULLY_REFUNDED),
({"paymentStatus": "NOT_CHARGED"}, 1, ChargeStatus.FULLY_REFUNDED),
],
)
def test_order_query_with_filter_payment_status(
orders_filter,
count,
payment_status,
orders_query_with_filter,
staff_api_client,
payment_dummy,
permission_manage_orders,
channel_PLN,
):
payment_dummy.charge_status = payment_status
payment_dummy.save()
payment_dummy.id = None
payment_dummy.order = Order.objects.create(channel=channel_PLN)
payment_dummy.charge_status = ChargeStatus.NOT_CHARGED
payment_dummy.save()
variables = {"filter": orders_filter}
staff_api_client.user.user_permissions.add(permission_manage_orders)
response = staff_api_client.post_graphql(orders_query_with_filter, variables)
content = get_graphql_content(response)
orders = content["data"]["orders"]["edges"]
assert len(orders) == count
@pytest.mark.parametrize(
"orders_filter, count, status",
[
({"status": "UNFULFILLED"}, 2, OrderStatus.UNFULFILLED),
({"status": "UNCONFIRMED"}, 1, OrderStatus.UNCONFIRMED),
({"status": "PARTIALLY_FULFILLED"}, 1, OrderStatus.PARTIALLY_FULFILLED),
({"status": "FULFILLED"}, 1, OrderStatus.FULFILLED),
({"status": "CANCELED"}, 1, OrderStatus.CANCELED),
],
)
def test_order_query_with_filter_status(
orders_filter,
count,
status,
orders_query_with_filter,
staff_api_client,
payment_dummy,
permission_manage_orders,
order,
channel_USD,
):
order.status = status
order.save()
Order.objects.create(channel=channel_USD)
variables = {"filter": orders_filter}
staff_api_client.user.user_permissions.add(permission_manage_orders)
response = staff_api_client.post_graphql(orders_query_with_filter, variables)
content = get_graphql_content(response)
orders = content["data"]["orders"]["edges"]
order_id = graphene.Node.to_global_id("Order", order.pk)
orders_ids_from_response = [o["node"]["id"] for o in orders]
assert len(orders) == count
assert order_id in orders_ids_from_response
@pytest.mark.parametrize(
"orders_filter, user_field, user_value",
[
({"customer": "admin"}, "email", "admin@example.com"),
({"customer": "John"}, "first_name", "johnny"),
({"customer": "Snow"}, "last_name", "snow"),
],
)
def test_order_query_with_filter_customer_fields(
orders_filter,
user_field,
user_value,
orders_query_with_filter,
staff_api_client,
permission_manage_orders,
customer_user,
channel_USD,
):
setattr(customer_user, user_field, user_value)
customer_user.save()
customer_user.refresh_from_db()
order = Order(user=customer_user, token=str(uuid.uuid4()), channel=channel_USD)
Order.objects.bulk_create(
[order, Order(token=str(uuid.uuid4()), channel=channel_USD)]
)
variables = {"filter": orders_filter}
staff_api_client.user.user_permissions.add(permission_manage_orders)
response = staff_api_client.post_graphql(orders_query_with_filter, variables)
content = get_graphql_content(response)
orders = content["data"]["orders"]["edges"]
order_id = graphene.Node.to_global_id("Order", order.pk)
assert len(orders) == 1
assert orders[0]["node"]["id"] == order_id
@pytest.mark.parametrize(
"orders_filter, user_field, user_value",
[
({"customer": "admin"}, "email", "admin@example.com"),
({"customer": "John"}, "first_name", "johnny"),
({"customer": "Snow"}, "last_name", "snow"),
],
)
def test_draft_order_query_with_filter_customer_fields(
orders_filter,
user_field,
user_value,
draft_orders_query_with_filter,
staff_api_client,
permission_manage_orders,
customer_user,
channel_USD,
):
setattr(customer_user, user_field, user_value)
customer_user.save()
customer_user.refresh_from_db()
order = Order(
status=OrderStatus.DRAFT,
user=customer_user,
token=str(uuid.uuid4()),
channel=channel_USD,
)
Order.objects.bulk_create(
[
order,
Order(
token=str(uuid.uuid4()), status=OrderStatus.DRAFT, channel=channel_USD
),
]
)
variables = {"filter": orders_filter}
staff_api_client.user.user_permissions.add(permission_manage_orders)
response = staff_api_client.post_graphql(draft_orders_query_with_filter, variables)
content = get_graphql_content(response)
orders = content["data"]["draftOrders"]["edges"]
order_id = graphene.Node.to_global_id("Order", order.pk)
assert len(orders) == 1
assert orders[0]["node"]["id"] == order_id
@pytest.mark.parametrize(
"orders_filter, count",
[
(
{
"created": {
"gte": str(date.today() - timedelta(days=3)),
"lte": str(date.today()),
}
},
1,
),
({"created": {"gte": str(date.today() - timedelta(days=3))}}, 1),
({"created": {"lte": str(date.today())}}, 2),
({"created": {"lte": str(date.today() - timedelta(days=3))}}, 1),
({"created": {"gte": str(date.today() + timedelta(days=1))}}, 0),
],
)
def test_draft_order_query_with_filter_created_(
orders_filter,
count,
draft_orders_query_with_filter,
staff_api_client,
permission_manage_orders,
channel_USD,
):
Order.objects.create(status=OrderStatus.DRAFT, channel=channel_USD)
with freeze_time("2012-01-14"):
Order.objects.create(status=OrderStatus.DRAFT, channel=channel_USD)
variables = {"filter": orders_filter}
staff_api_client.user.user_permissions.add(permission_manage_orders)
response = staff_api_client.post_graphql(draft_orders_query_with_filter, variables)
content = get_graphql_content(response)
orders = content["data"]["draftOrders"]["edges"]
assert len(orders) == count
QUERY_ORDER_WITH_SORT = """
query ($sort_by: OrderSortingInput!) {
orders(first:5, sortBy: $sort_by) {
edges{
node{
number
}
}
}
}
"""
@pytest.mark.parametrize(
"order_sort, result_order",
[
({"field": "NUMBER", "direction": "ASC"}, [0, 1, 2, 3]),
({"field": "NUMBER", "direction": "DESC"}, [3, 2, 1, 0]),
({"field": "CREATION_DATE", "direction": "ASC"}, [1, 0, 2, 3]),
({"field": "CREATION_DATE", "direction": "DESC"}, [3, 2, 0, 1]),
({"field": "CUSTOMER", "direction": "ASC"}, [2, 0, 1, 3]),
({"field": "CUSTOMER", "direction": "DESC"}, [3, 1, 0, 2]),
({"field": "FULFILLMENT_STATUS", "direction": "ASC"}, [2, 1, 0, 3]),
({"field": "FULFILLMENT_STATUS", "direction": "DESC"}, [3, 0, 1, 2]),
],
)
def test_query_orders_with_sort(
order_sort,
result_order,
staff_api_client,
permission_manage_orders,
address,
channel_USD,
):
created_orders = []
with freeze_time("2017-01-14"):
created_orders.append(
Order.objects.create(
token=str(uuid.uuid4()),
billing_address=address,
status=OrderStatus.PARTIALLY_FULFILLED,
total=TaxedMoney(net=Money(10, "USD"), gross=Money(13, "USD")),
channel=channel_USD,
)
)
with freeze_time("2012-01-14"):
address2 = address.get_copy()
address2.first_name = "Walter"
address2.save()
created_orders.append(
Order.objects.create(
token=str(uuid.uuid4()),
billing_address=address2,
status=OrderStatus.FULFILLED,
total=TaxedMoney(net=Money(100, "USD"), gross=Money(130, "USD")),
channel=channel_USD,
)
)
address3 = address.get_copy()
address3.last_name = "Alice"
address3.save()
created_orders.append(
Order.objects.create(
token=str(uuid.uuid4()),
billing_address=address3,
status=OrderStatus.CANCELED,
total=TaxedMoney(net=Money(20, "USD"), gross=Money(26, "USD")),
channel=channel_USD,
)
)
created_orders.append(
Order.objects.create(
token=str(uuid.uuid4()),
billing_address=None,
status=OrderStatus.UNCONFIRMED,
total=TaxedMoney(net=Money(60, "USD"), gross=Money(80, "USD")),
channel=channel_USD,
)
)
variables = {"sort_by": order_sort}
staff_api_client.user.user_permissions.add(permission_manage_orders)
response = staff_api_client.post_graphql(QUERY_ORDER_WITH_SORT, variables)
content = get_graphql_content(response)
orders = content["data"]["orders"]["edges"]
for order, order_number in enumerate(result_order):
assert orders[order]["node"]["number"] == str(created_orders[order_number].pk)
QUERY_DRAFT_ORDER_WITH_SORT = """
query ($sort_by: OrderSortingInput!) {
draftOrders(first:5, sortBy: $sort_by) {
edges{
node{
number
}
}
}
}
"""
@pytest.mark.parametrize(
"draft_order_sort, result_order",
[
({"field": "NUMBER", "direction": "ASC"}, [0, 1, 2]),
({"field": "NUMBER", "direction": "DESC"}, [2, 1, 0]),
({"field": "CREATION_DATE", "direction": "ASC"}, [1, 0, 2]),
({"field": "CREATION_DATE", "direction": "DESC"}, [2, 0, 1]),
({"field": "CUSTOMER", "direction": "ASC"}, [2, 0, 1]),
({"field": "CUSTOMER", "direction": "DESC"}, [1, 0, 2]),
],
)
def test_query_draft_orders_with_sort(
draft_order_sort,
result_order,
staff_api_client,
permission_manage_orders,
address,
channel_USD,
):
created_orders = []
with freeze_time("2017-01-14"):
created_orders.append(
Order.objects.create(
token=str(uuid.uuid4()),
billing_address=address,
status=OrderStatus.DRAFT,
total=TaxedMoney(net=Money(10, "USD"), gross=Money(13, "USD")),
channel=channel_USD,
)
)
with freeze_time("2012-01-14"):
address2 = address.get_copy()
address2.first_name = "Walter"
address2.save()
created_orders.append(
Order.objects.create(
token=str(uuid.uuid4()),
billing_address=address2,
status=OrderStatus.DRAFT,
total=TaxedMoney(net=Money(100, "USD"), gross=Money(130, "USD")),
channel=channel_USD,
)
)
address3 = address.get_copy()
address3.last_name = "Alice"
address3.save()
created_orders.append(
Order.objects.create(
token=str(uuid.uuid4()),
billing_address=address3,
status=OrderStatus.DRAFT,
total=TaxedMoney(net=Money(20, "USD"), gross=Money(26, "USD")),
channel=channel_USD,
)
)
variables = {"sort_by": draft_order_sort}
staff_api_client.user.user_permissions.add(permission_manage_orders)
response = staff_api_client.post_graphql(QUERY_DRAFT_ORDER_WITH_SORT, variables)
content = get_graphql_content(response)
draft_orders = content["data"]["draftOrders"]["edges"]
for order, order_number in enumerate(result_order):
assert draft_orders[order]["node"]["number"] == str(
created_orders[order_number].pk
)
@pytest.mark.parametrize(
"orders_filter, count",
[
({"search": "test_discount"}, 2),
({"search": "test_discount1"}, 1),
({"search": "translated_discount1_name"}, 1),
({"search": "user"}, 2),
({"search": "user1@example.com"}, 1),
({"search": "test@example.com"}, 1),
({"search": "Leslie"}, 1),
({"search": "Wade"}, 1),
({"search": ""}, 3),
({"search": "ExternalID"}, 1),
],
)
def test_orders_query_with_filter_search(
orders_filter,
count,
orders_query_with_filter,
staff_api_client,
permission_manage_orders,
customer_user,
channel_USD,
):
orders = Order.objects.bulk_create(
[
Order(
user=customer_user,
token=str(uuid.uuid4()),
user_email="test@example.com",
channel=channel_USD,
),
Order(
token=str(uuid.uuid4()),
user_email="user1@example.com",
channel=channel_USD,
),
Order(
token=str(uuid.uuid4()),
user_email="user2@example.com",
channel=channel_USD,
),
]
)
OrderDiscount.objects.bulk_create(
[
OrderDiscount(
order=orders[0],
name="test_discount1",
value=Decimal("1"),
amount_value=Decimal("1"),
translated_name="translated_discount1_name",
),
OrderDiscount(
order=orders[2],
name="test_discount2",
value=Decimal("10"),
amount_value=Decimal("10"),
translated_name="translated_discount2_name",
),
]
)
order_with_payment = orders[1]
payment = Payment.objects.create(order=order_with_payment)
payment.transactions.create(
gateway_response={}, is_success=True, searchable_key="ExternalID"
)
variables = {"filter": orders_filter}
staff_api_client.user.user_permissions.add(permission_manage_orders)
response = staff_api_client.post_graphql(orders_query_with_filter, variables)
content = get_graphql_content(response)
assert content["data"]["orders"]["totalCount"] == count
def test_orders_query_with_filter_search_by_global_payment_id(
orders_query_with_filter,
staff_api_client,
permission_manage_orders,
customer_user,
channel_USD,
):
orders = Order.objects.bulk_create(
[
Order(
user=customer_user,
token=str(uuid.uuid4()),
channel=channel_USD,
user_email="test@example.com",
),
Order(
token=str(uuid.uuid4()),
channel=channel_USD,
user_email="user1@example.com",
),
]
)
OrderDiscount.objects.create(
order=orders[0],
name="test_discount1",
value=Decimal("1"),
amount_value=Decimal("1"),
translated_name="translated_discount1_name",
),
order_with_payment = orders[0]
payment = Payment.objects.create(order=order_with_payment)
global_id = graphene.Node.to_global_id("Payment", payment.pk)
variables = {"filter": {"search": global_id}}
staff_api_client.user.user_permissions.add(permission_manage_orders)
response = staff_api_client.post_graphql(orders_query_with_filter, variables)
content = get_graphql_content(response)
assert content["data"]["orders"]["totalCount"] == 1
def test_orders_query_with_filter_search_by_id(
orders_query_with_filter, order, staff_api_client, permission_manage_orders
):
variables = {"filter": {"search": order.pk}}
staff_api_client.user.user_permissions.add(permission_manage_orders)
response = staff_api_client.post_graphql(orders_query_with_filter, variables)
content = get_graphql_content(response)
assert content["data"]["orders"]["totalCount"] == 1
@pytest.mark.parametrize(
"draft_orders_filter, count",
[
({"search": "test_discount"}, 2),
({"search": "test_discount1"}, 1),
({"search": "translated_discount1_name"}, 1),
({"search": "user"}, 2),
({"search": "user1@example.com"}, 1),
({"search": "test@example.com"}, 1),
({"search": "Leslie"}, 1),
({"search": "Wade"}, 1),
({"search": ""}, 3),
],
)
def test_draft_orders_query_with_filter_search(
draft_orders_filter,
count,
draft_orders_query_with_filter,
staff_api_client,
permission_manage_orders,
customer_user,
channel_USD,
):
orders = Order.objects.bulk_create(
[
Order(
user=customer_user,
token=str(uuid.uuid4()),
user_email="test@example.com",
status=OrderStatus.DRAFT,
channel=channel_USD,
),
Order(
token=str(uuid.uuid4()),
user_email="user1@example.com",
status=OrderStatus.DRAFT,
channel=channel_USD,
),
Order(
token=str(uuid.uuid4()),
user_email="user2@example.com",
status=OrderStatus.DRAFT,
channel=channel_USD,
),
]
)
OrderDiscount.objects.bulk_create(
[
OrderDiscount(
order=orders[0],
name="test_discount1",
value=Decimal("1"),
amount_value=Decimal("1"),
translated_name="translated_discount1_name",
),
OrderDiscount(
order=orders[2],
name="test_discount2",
value=Decimal("10"),
amount_value=Decimal("10"),
translated_name="translated_discount2_name",
),
]
)
variables = {"filter": draft_orders_filter}
staff_api_client.user.user_permissions.add(permission_manage_orders)
response = staff_api_client.post_graphql(draft_orders_query_with_filter, variables)
content = get_graphql_content(response)
assert content["data"]["draftOrders"]["totalCount"] == count
def test_draft_orders_query_with_filter_search_by_id(
draft_orders_query_with_filter,
draft_order,
staff_api_client,
permission_manage_orders,
):
variables = {"filter": {"search": draft_order.pk}}
staff_api_client.user.user_permissions.add(permission_manage_orders)
response = staff_api_client.post_graphql(draft_orders_query_with_filter, variables)
content = get_graphql_content(response)
assert content["data"]["draftOrders"]["totalCount"] == 1
QUERY_GET_VARIANTS_FROM_ORDER = """
{
me{
orders(first:10){
edges{
node{
lines{
variant{
id
}
}
}
}
}
}
}
"""
def test_get_variant_from_order_line_variant_published_as_customer(
user_api_client, order_line
):
# given
# when
response = user_api_client.post_graphql(QUERY_GET_VARIANTS_FROM_ORDER, {})
# then
content = get_graphql_content(response)
orders = content["data"]["me"]["orders"]["edges"]
assert orders[0]["node"]["lines"][0]["variant"]["id"]
def test_get_variant_from_order_line_variant_published_as_admin(
staff_api_client, order_line, permission_manage_products
):
# given
order = order_line.order
order.user = staff_api_client.user
order.save()
# when
response = staff_api_client.post_graphql(
QUERY_GET_VARIANTS_FROM_ORDER,
{},
permissions=(permission_manage_products,),
check_no_permissions=False,
)
# then
content = get_graphql_content(response)
orders = content["data"]["me"]["orders"]["edges"]
assert orders[0]["node"]["lines"][0]["variant"]["id"]
def test_get_variant_from_order_line_variant_not_published_as_customer(
user_api_client, order_line
):
# given
product = order_line.variant.product
product.channel_listings.update(is_published=False)
# when
response = user_api_client.post_graphql(QUERY_GET_VARIANTS_FROM_ORDER, {})
# then
content = get_graphql_content(response)
orders = content["data"]["me"]["orders"]["edges"]
assert orders[0]["node"]["lines"][0]["variant"] is None
def test_get_variant_from_order_line_variant_not_published_as_admin(
staff_api_client, order_line, permission_manage_products
):
# given
order = order_line.order
order.user = staff_api_client.user
order.save()
product = order_line.variant.product
product.channel_listings.update(is_published=False)
# when
response = staff_api_client.post_graphql(
QUERY_GET_VARIANTS_FROM_ORDER,
{},
permissions=(permission_manage_products,),
check_no_permissions=False,
)
# then
content = get_graphql_content(response)
orders = content["data"]["me"]["orders"]["edges"]
assert orders[0]["node"]["lines"][0]["variant"]["id"]
def test_get_variant_from_order_line_variant_not_assigned_to_channel_as_customer(
user_api_client, order_line
):
# given
product = order_line.variant.product
product.channel_listings.all().delete()
# when
response = user_api_client.post_graphql(QUERY_GET_VARIANTS_FROM_ORDER, {})
# then
content = get_graphql_content(response)
orders = content["data"]["me"]["orders"]["edges"]
assert orders[0]["node"]["lines"][0]["variant"] is None
def test_get_variant_from_order_line_variant_not_assigned_to_channel_as_admin(
staff_api_client, order_line, permission_manage_products
):
# given
order = order_line.order
order.user = staff_api_client.user
order.save()
product = order_line.variant.product
product.channel_listings.all().delete()
# when
response = staff_api_client.post_graphql(
QUERY_GET_VARIANTS_FROM_ORDER,
{},
permissions=(permission_manage_products,),
check_no_permissions=False,
)
# then
content = get_graphql_content(response)
orders = content["data"]["me"]["orders"]["edges"]
assert orders[0]["node"]["lines"][0]["variant"]["id"]
def test_get_variant_from_order_line_variant_not_visible_in_listings_as_customer(
user_api_client, order_line
):
# given
product = order_line.variant.product
product.channel_listings.update(visible_in_listings=False)
# when
response = user_api_client.post_graphql(QUERY_GET_VARIANTS_FROM_ORDER, {})
# then
content = get_graphql_content(response)
orders = content["data"]["me"]["orders"]["edges"]
assert orders[0]["node"]["lines"][0]["variant"]["id"]
def test_get_variant_from_order_line_variant_not_visible_in_listings_as_admin(
staff_api_client, order_line, permission_manage_products
):
# given
order = order_line.order
order.user = staff_api_client.user
order.save()
product = order_line.variant.product
product.channel_listings.update(visible_in_listings=False)
# when
response = staff_api_client.post_graphql(
QUERY_GET_VARIANTS_FROM_ORDER,
{},
permissions=(permission_manage_products,),
check_no_permissions=False,
)
# then
content = get_graphql_content(response)
orders = content["data"]["me"]["orders"]["edges"]
assert orders[0]["node"]["lines"][0]["variant"]["id"]
def test_get_variant_from_order_line_variant_not_exists_as_customer(
user_api_client, order_line
):
# given
order_line.variant = None
order_line.save()
# when
response = user_api_client.post_graphql(QUERY_GET_VARIANTS_FROM_ORDER, {})
# then
content = get_graphql_content(response)
orders = content["data"]["me"]["orders"]["edges"]
assert orders[0]["node"]["lines"][0]["variant"] is None
def test_get_variant_from_order_line_variant_not_exists_as_staff(
staff_api_client, order_line, permission_manage_products
):
# given
order = order_line.order
order.user = staff_api_client.user
order.save()
order_line.variant = None
order_line.save()
# when
response = staff_api_client.post_graphql(
QUERY_GET_VARIANTS_FROM_ORDER,
{},
permissions=(permission_manage_products,),
check_no_permissions=False,
)
# then
content = get_graphql_content(response)
orders = content["data"]["me"]["orders"]["edges"]
assert orders[0]["node"]["lines"][0]["variant"] is None
|
from league_api.api import ApiType
from typing import List, Mapping
class Summoner(ApiType):
profileIconId: int = None # ID of the summoner icon associated with the summoner.
name: str = None # Summoner name.
puuid: str = None # Encrypted PUUID. Exact length of 78 characters.
summonerLevel: int = None # Summoner level associated with the summoner.
revisionDate: int = None # Date summoner was last modified specified as epoch milliseconds. The following events will update this timestamp: profile icon change, playing the tutorial or advanced tutorial, finishing a game, summoner name change
id: str = None # Encrypted summoner ID. Max length 63 characters.
accountId: str = None # Encrypted account ID. Max length 56 characters.
@property
def profile_icon_id(self):
return self.profileIconId
@profile_icon_id.setter
def profile_icon_id(self, value):
self.profileIconId = value
@property
def summoner_level(self):
return self.summonerLevel
@summoner_level.setter
def summoner_level(self, value):
self.summonerLevel = value
@property
def revision_date(self):
return self.revisionDate
@revision_date.setter
def revision_date(self, value):
self.revisionDate = value
@property
def account_id(self):
return self.accountId
@account_id.setter
def account_id(self, value):
self.accountId = value
|
import os
from wikipedia import summary, DisambiguationError, PageError
from ..help import add_help_item
from userbot import BOTLOG, BOTLOG_CHATID
from userbot.events import register
@register(outgoing=True, pattern=r"^\.wiki (.*)")
async def wiki(wiki_q):
""" For .google command, fetch content from Wikipedia. """
match = wiki_q.pattern_match.group(1)
try:
summary(match)
except DisambiguationError as error:
await wiki_q.edit(f"Disambiguated page found.\n\n{error}")
return
except PageError as pageerror:
await wiki_q.edit(f"Page not found.\n\n{pageerror}")
return
result = summary(match)
if len(result) >= 4096:
file = open("output.txt", "w+")
file.write(result)
file.close()
await wiki_q.client.send_file(
wiki_q.chat_id,
"output.txt",
reply_to=wiki_q.id,
caption="`Output too large, sending as file`",
)
if os.path.exists("output.txt"):
os.remove("output.txt")
return
await wiki_q.edit("**Search:**\n`" + match + "`\n\n**Result:**\n" + result)
if BOTLOG:
await wiki_q.client.send_message(
BOTLOG_CHATID, f"Wiki query {match} was executed successfully")
add_help_item(
".wiki",
"Misc",
"Searches wikipedia for the query.",
"""
`.wiki (query)`
"""
)
|
from opentrons import robot, containers, instruments
robot.head_speed(x=18000, y=18000, z=5000, a=700, b=700)
#Deck setup
tiprack_1000 = containers.load("tiprack-1000ul-H", "B3")
source_row = containers.load("FluidX_24_5ml", "A1", "acid")
source_col = containers.load("FluidX_24_5ml", "A2", "amine")
source_trough4row = containers.load("trough-12row", "C2")
destination_QC = containers.load("96-PCR-flat", "C1", "QC")
destination_screen = containers.load("Labcyte_384PP", "D1", "384_Screen")
trash = containers.load("point", "C3")
#Pipettes SetUp
p1000 = instruments.Pipette(
name= 'eppendorf1000_no_min',
axis='b',
trash_container=trash,
tip_racks=[tiprack_1000],
max_volume=1000,
min_volume=0,
channels=1,
)
row_loc_list = ['A1', 'A2', 'A3', 'A4', 'A5', 'A6', 'B1', 'B2', 'B3', 'B4', 'B5', 'B6']
col_loc_list = ['A1', 'A2', 'A3', 'A4', 'A5', 'A6', 'B1', 'B2']
volume_to_dispense = 20
location_QC_solvent = 'A5'
volume_QC_solvent = 100
volume_to_take_out = 30
# Do protocol
n=0
m=208
for i, x in enumerate(row_loc_list):
source_location = x
p1000.pick_up_tip()
p1000.transfer(volume_to_dispense, source_row.wells(source_location), destination_QC.wells(n).bottom(1), blow_out=True, new_tip = 'never')
p1000.transfer(volume_to_take_out, source_row.wells(source_location), destination_screen.wells(m).bottom(1), blow_out=True, new_tip = 'never')
p1000.drop_tip()
m=m+1
n=n+1
for i, x in enumerate(col_loc_list):
source_location = x
p1000.pick_up_tip()
p1000.transfer(volume_to_dispense, source_col.wells(source_location), destination_QC.wells(n).bottom(1), blow_out=True, new_tip = 'never')
p1000.transfer(volume_to_take_out, source_col.wells(source_location), destination_screen.wells(m).bottom(1), blow_out=True, new_tip = 'never')
p1000.drop_tip()
m=m+1
n=n+1
p1000.distribute(volume_QC_solvent, source_trough4row.wells(location_QC_solvent), [x.top() for x in destination_QC.wells(0, to=n-1)])
robot.home()
|
import argparse
import numpy as np
import pickle
import os
import random
from torch.utils.data import DataLoader
import torch
from transformers import BertConfig
from model import MidiBert
from finetune_trainer import FinetuneTrainer
from finetune_dataset import FinetuneDataset
from matplotlib import pyplot as plt
import os
this_dir = os.path.dirname(os.path.realpath(__file__))+'/'
def get_args():
parser = argparse.ArgumentParser(description='')
### mode ###
parser.add_argument('--task', choices=['melody', 'velocity', 'composer', 'emotion', 'reduction'], required=True)
### path setup ###
parser.add_argument('--dict_file', type=str, default=this_dir+'../../dict/CP.pkl')
parser.add_argument('--name', type=str, default='')
parser.add_argument('--ckpt', default=this_dir+'result/finetune/pretrain_model.ckpt')
### parameter setting ###
parser.add_argument('--num_workers', type=int, default=5)
parser.add_argument('--class_num', type=int)
parser.add_argument('--batch_size', type=int, default=12)
parser.add_argument('--max_seq_len', type=int, default=512, help='all sequences are padded to `max_seq_len`')
parser.add_argument('--hs', type=int, default=768)
parser.add_argument("--index_layer", type=int, default=12, help="number of layers")
parser.add_argument('--epochs', type=int, default=10, help='number of training epochs')
parser.add_argument('--lr', type=float, default=2e-5, help='initial learning rate')
parser.add_argument('--nopretrain', action="store_true") # default: false
### cuda ###
parser.add_argument("--cpu", action="store_true") # default=False
parser.add_argument("--cuda_devices", type=int, nargs='+', default=[0,1,2,3], help="CUDA device ids")
args = parser.parse_args()
if args.task == 'melody':
args.class_num = 4
elif args.task == 'velocity':
args.class_num = 7
elif args.task == 'composer':
args.class_num = 8
elif args.task == 'emotion':
args.class_num = 4
elif args.task == 'reduction':
args.class_num = 3 #(0 = padding, 1 = keep, 2 = discard)
return args
def load_data(dataset, task):
data_root = this_dir+'../../data/CP/'
if dataset == 'emotion':
dataset = 'emopia'
if dataset not in ['pop909', 'composer', 'emopia','custom_reduction']:
print(f'Dataset {dataset} not supported')
exit(1)
X_train = np.load(os.path.join(data_root, f'{dataset}_train.npy'), allow_pickle=True)
X_val = np.load(os.path.join(data_root, f'{dataset}_valid.npy'), allow_pickle=True)
X_test = np.load(os.path.join(data_root, f'{dataset}_test.npy'), allow_pickle=True)
print('X_train: {}, X_valid: {}, X_test: {}'.format(X_train.shape, X_val.shape, X_test.shape))
if dataset == 'pop909':
y_train = np.load(os.path.join(data_root, f'{dataset}_train_{task[:3]}ans.npy'), allow_pickle=True)
y_val = np.load(os.path.join(data_root, f'{dataset}_valid_{task[:3]}ans.npy'), allow_pickle=True)
y_test = np.load(os.path.join(data_root, f'{dataset}_test_{task[:3]}ans.npy'), allow_pickle=True)
else:
y_train = np.load(os.path.join(data_root, f'{dataset}_train_ans.npy'), allow_pickle=True)
y_val = np.load(os.path.join(data_root, f'{dataset}_valid_ans.npy'), allow_pickle=True)
y_test = np.load(os.path.join(data_root, f'{dataset}_test_ans.npy'), allow_pickle=True)
print('y_train: {}, y_valid: {}, y_test: {}'.format(y_train.shape, y_val.shape, y_test.shape))
return X_train, X_val, X_test, y_train, y_val, y_test
def main():
# set seed
seed = 2021
torch.manual_seed(seed) # cpu
torch.cuda.manual_seed(seed) # current gpu
torch.cuda.manual_seed_all(seed) # all gpu
np.random.seed(seed)
random.seed(seed)
# argument
args = get_args()
print("Loading Dictionary")
with open(args.dict_file, 'rb') as f:
e2w, w2e = pickle.load(f)
print("\nLoading Dataset")
if args.task == 'melody' or args.task == 'velocity':
dataset = 'pop909'
seq_class = False
elif args.task == 'composer':
dataset = 'composer'
seq_class = True
elif args.task == 'emotion':
dataset = 'emopia'
seq_class = True
elif args.task == 'reduction':
dataset = 'custom_reduction'
seq_class = False
X_train, X_val, X_test, y_train, y_val, y_test = load_data(dataset, args.task)
#TODO
trainset = FinetuneDataset(X=X_train, y=y_train)
validset = FinetuneDataset(X=X_val, y=y_val)
testset = FinetuneDataset(X=X_test, y=y_test)
train_loader = DataLoader(trainset, batch_size=args.batch_size, num_workers=args.num_workers, shuffle=True)
print(" len of train_loader",len(train_loader))
valid_loader = DataLoader(validset, batch_size=args.batch_size, num_workers=args.num_workers)
print(" len of valid_loader",len(valid_loader))
test_loader = DataLoader(testset, batch_size=args.batch_size, num_workers=args.num_workers)
print(" len of valid_loader",len(test_loader))
print("\nBuilding BERT model")
configuration = BertConfig(max_position_embeddings=args.max_seq_len,
position_embedding_type='relative_key_query',
hidden_size=args.hs)
midibert = MidiBert(bertConfig=configuration, e2w=e2w, w2e=w2e)
best_mdl = ''
if not args.nopretrain:
best_mdl = args.ckpt
print(" Loading pre-trained model from", best_mdl.split('/')[-1])
checkpoint = torch.load(best_mdl, map_location='cpu')
midibert.load_state_dict(checkpoint['state_dict'])
index_layer = int(args.index_layer)-13
print("\nCreating Finetune Trainer using index layer", index_layer)
trainer = FinetuneTrainer(midibert, train_loader, valid_loader, test_loader, index_layer, args.lr, args.class_num,
args.hs, y_test.shape, args.cpu, args.cuda_devices, None, seq_class)
print("\nTraining Start")
save_dir = os.path.join(this_dir+'result/finetune/', args.task + '_' + args.name)
os.makedirs(save_dir, exist_ok=True)
filename = os.path.join(save_dir, 'model.ckpt')
print(" save model at {}".format(filename))
best_acc, best_epoch = 0, 0
bad_cnt = 0
train_accs, valid_accs = [], []
with open(os.path.join(save_dir, 'log'), 'a') as outfile:
outfile.write("Loading pre-trained model from " + best_mdl.split('/')[-1] + '\n')
for epoch in range(args.epochs):
train_loss, train_acc = trainer.train()
valid_loss, valid_acc = trainer.valid()
test_loss, test_acc, _ = trainer.test()
is_best = valid_acc >= best_acc
best_acc = max(valid_acc, best_acc)
if is_best:
bad_cnt, best_epoch = 0, epoch
else:
bad_cnt += 1
print('epoch: {}/{} | Train Loss: {} | Train acc: {} | Valid Loss: {} | Valid acc: {} | Test loss: {} | Test acc: {}'.format(
epoch+1, args.epochs, train_loss, train_acc, valid_loss, valid_acc, test_loss, test_acc))
train_accs.append(train_acc)
valid_accs.append(valid_acc)
trainer.save_checkpoint(epoch, train_acc, valid_acc,
valid_loss, train_loss, is_best, filename)
outfile.write('Epoch {}: train_loss={}, valid_loss={}, test_loss={}, train_acc={}, valid_acc={}, test_acc={}\n'.format(
epoch+1, train_loss, valid_loss, test_loss, train_acc, valid_acc, test_acc))
if bad_cnt > 10:
print('valid acc not improving for 10 epochs')
break
# draw figure valid_acc & train_acc
plt.figure()
plt.plot(train_accs)
plt.plot(valid_accs)
plt.title(f'{args.task} task accuracy (w/ pre-training)')
plt.xlabel('epoch')
plt.ylabel('accuracy')
plt.legend(['train','valid'], loc='upper left')
plt.savefig(f'{this_dir}acc_{args.task}_scratch.jpg')
if __name__ == '__main__':
main()
|
# -*- coding: utf-8 -*-
# Generated by Django 1.10.5 on 2017-04-11 14:44
from __future__ import unicode_literals
from django.db import migrations
class Migration(migrations.Migration):
dependencies = [
('ubigeo', '0007_ubigeocontinente_continente_id'),
]
operations = [
migrations.RenameField(
model_name='ubigeopais',
old_name='id_pais',
new_name='alpha3',
),
]
|
import _plotly_utils.basevalidators
class SizeValidator(_plotly_utils.basevalidators.NumberValidator):
def __init__(
self, plotly_name='size', parent_name='pie.hoverlabel.font', **kwargs
):
super(SizeValidator, self).__init__(
plotly_name=plotly_name,
parent_name=parent_name,
array_ok=True,
edit_type='none',
min=1,
role='style',
**kwargs
)
|
# Generated by Django 2.2.24 on 2021-11-19 10:43
from django.db import migrations
class Migration(migrations.Migration):
dependencies = [
('collect', '0015_auto_20211109_1123'),
]
operations = [
migrations.RenameField(
model_name='collectactivity',
old_name='type',
new_name='collect_type',
),
]
|
"""
Operations for a ``Number`` class
"""
import cake
import operator
operator.divmod = divmod
# Add divmod function to operator interface
def evaluate(N, O, *, return_class = None, func: str = 'add'):
"""
Evaluate 2 tokens, if implementing in custom class, N will be self/current value
Parameters
----------
N: :class:`~typing.Any`
First token
O: :class:`~typing.Any`
Second token
return_class: :class:`~typing.Callable`
A function or class to be returned after evaluating the tokens, else returns the evaluated tokens
func: :class:`str`
The name of the operation, check out the ``operator`` module
"""
if hasattr(O, 'value'):
O = O.value
if hasattr(O, 'get_value'):
O = O.get_value()
if isinstance(func, str):
func = getattr(operator, func)
if cake.compare_any(N, O, type=(cake.Unknown, cake.Equation)):
try:
return func(N, O) if not return_class else return_class(func(N, O))
except (ValueError, TypeError):
return func(O, N) if not return_class else return_class(func(O, N))
if cake.compare_any(N, O, type=cake.Expression):
if hasattr(N, 'expression'):
if isinstance(O, cake.Expression):
O = O.expression
return cake.Expression(f'({N.expression}) + {O}')
if isinstance(N, cake.Expression):
N = N.expression
return cake.Expression(f'({O.expression}) + {N}')
try:
return func(O, N) if not return_class else return_class(func(O, N))
except Exception as e:
raise cake.InvalidObject(f'Cannot add type {N.__class__.__name__} with type {O.__class__.__name__}') from e
|
# Copyright 2015 The Chromium Authors. All rights reserved.
# Use of this source code is governed by a BSD-style license that can be
# found in the LICENSE file.
DEPS = [
'depot_tools/bot_update',
'depot_tools/gclient',
'depot_tools/git',
'recipe_engine/path',
'recipe_engine/properties',
'recipe_engine/python',
'recipe_engine/raw_io',
'webrtc',
]
def RunSteps(api):
api.gclient.set_config('webrtc')
step_result = api.python(
'check roll status',
api.path['build'].join('scripts', 'tools', 'pycurl.py'),
args=['https://webrtc-roll-cr-rev-status.appspot.com/status'],
stdout=api.raw_io.output(),
step_test_data=lambda: api.raw_io.test_api.stream_output(
'1', stream='stdout')
)
step_result.presentation.logs['stdout'] = step_result.stdout.splitlines()
if step_result.stdout.strip() != '1':
step_result.presentation.step_text = 'Rolling deactivated'
return
else:
step_result.presentation.step_text = 'Rolling activated'
api.bot_update.ensure_checkout(force=True)
api.gclient.runhooks()
# Enforce a clean state.
api.git(
'checkout', '-f', 'master',
cwd=api.path['checkout'],
)
api.git(
'clean', '-ffd',
cwd=api.path['checkout'],
)
# Run the roll script. It will take care of branch creation, modifying DEPS,
# uploading etc. It will also delete any previous roll branch.
api.python(
'autoroll chromium_revision',
api.path['checkout'].join('tools', 'autoroller',
'roll_chromium_revision.py'),
['--clean', '--verbose'],
cwd=api.path['checkout'],
)
def GenTests(api):
yield (
api.test('rolling_activated') +
api.properties.generic(mastername='client.webrtc.fyi',
buildername='Auto-roll - WebRTC DEPS')
)
yield (api.test('rolling_deactivated') +
api.properties.generic(mastername='client.webrtc.fyi',
buildername='Auto-roll - WebRTC DEPS') +
api.override_step_data('check roll status',
api.raw_io.stream_output('0', stream='stdout'))
)
|
from .import db
from flask_login import UserMixin
from . import login_manager
from werkzeug.security import generate_password_hash,check_password_hash
from datetime import datetime
@login_manager.user_loader
def load_user(user_id):
return User.query.get(int(user_id))
class User(UserMixin,db.Model):
''' class for user model'''
__tablename__ = 'users'
id = db.Column(db.Integer,primary_key = True)
username = db.Column(db.String(255),index = True)
email = db.Column(db.String(255),unique = True,index = True)
bio = db.Column(db.String(255))
pass_secure = db.Column(db.String(255))
@property
def password(self):
raise AttributeError('You cannot read the password attribute')
@password.setter
def password(self, password):
self.pass_secure = generate_password_hash(password)
def verify_password(self,password):
return check_password_hash(self.pass_secure,password)
def __repr__(self):
return f'User {self.username}'
class Pitch(db.Model):
__tablename__ = 'pitches'
id = db.Column(db.Integer, primary_key=True)
title = db.Column(db.String(250), nullable=False)
content= db.Column(db.Text, nullable=False)
category = db.Column(db.String)
user_id = db.Column(db.Integer, db.ForeignKey('users.id'), nullable=False)
user = db.relationship("User", foreign_keys=user_id)
def save_pitch(self):
db.session.add(self)
db.session.commit()
@classmethod
def get_pitches(cls, category):
pitches = cls.query.filter_by(category=category).all()
return pitches
@classmethod
def get_all_pitches(cls):
pitches = cls.query.all()
return pitches
@classmethod
def get_pitch(cls, id):
pitch = cls.query.filter_by(id=id).first()
return pitch
class Comment(db.Model):
__tablename__ = 'comments'
id = db.Column(db.Integer, primary_key=True)
comment = db.Column(db.String(1500))
user_id = db.Column(db.Integer, db.ForeignKey("users.id"))
user = db.relationship("User", foreign_keys=user_id)
pitch_id = db.Column(db.Integer, db.ForeignKey("pitches.id"))
def save_comment(self):
db.session.add(self)
db.session.commit()
@classmethod
def get_comments(cls, pitch):
comments = cls.query.filter_by(pitch_id=pitch).all()
return comments
|
#!/usr/bin/env python
from __future__ import print_function
import numpy as np
import cv2
import tensorflow as tf
import threading
import sys
import time
import os
def MakeDir(path):
try:
os.makedirs(path)
except:
pass
lab = False
load_model = False
train = True
test_display = True
test_write_video = True
path_work_dir = "~/rl_3d/"
vizdoom_path = "~/ViZDoom/"
vizdoom_scenario = vizdoom_path + "scenarios/simpler_basic.wad"
if (lab):
from env_lab import EnvLab
model_path = path_work_dir + "model_lab_a3c/"
else:
from env_vizdoom import EnvVizDoom
model_path = path_work_dir + "model_vizdoom_a3c/"
learning_rate = 0.00025
device = "/cpu:0"
num_workers = 3
t_max = 30
frame_repeat = 10 # 4
gamma = 0.99
step_num = int(2.5e5)
save_each = 0.01 * step_num
step_load = 100
entropy_beta = 0.01
grad_norm_clip = 40.0
global_scope_name = "global"
step = 0
train_scores = []
lock = threading.Lock()
start_time = 0
# Global.
env = None
MakeDir(model_path)
model_name = model_path + "a3c"
def PrintStat(elapsed_time, step, step_num, train_scores):
steps_per_s = 1.0 * step / elapsed_time
steps_per_m = 60.0 * step / elapsed_time
steps_per_h = 3600.0 * step / elapsed_time
steps_remain = step_num - step
remain_h = int(steps_remain / steps_per_h)
remain_m = int((steps_remain - remain_h * steps_per_h) / steps_per_m)
remain_s = int((steps_remain - remain_h * steps_per_h - remain_m * steps_per_m) / steps_per_s)
elapsed_h = int(elapsed_time / 3600)
elapsed_m = int((elapsed_time - elapsed_h * 3600) / 60)
elapsed_s = int((elapsed_time - elapsed_h * 3600 - elapsed_m * 60))
print("{}% | Steps: {}/{}, {:.2f}M step/h, {:02}:{:02}:{:02}/{:02}:{:02}:{:02}".format(
100.0 * step / step_num, step, step_num, steps_per_h / 1e6,
elapsed_h, elapsed_m, elapsed_s, remain_h, remain_m, remain_s), file=sys.stderr)
mean_train = 0
std_train = 0
min_train = 0
max_train = 0
if (len(train_scores) > 0):
train_scores = np.array(train_scores)
mean_train = train_scores.mean()
std_train = train_scores.std()
min_train = train_scores.min()
max_train = train_scores.max()
print("Episodes: {} Rewards: mean: {:.2f}, std: {:.2f}, min: {:.2f}, max: {:.2f}".format(
len(train_scores), mean_train, std_train, min_train, max_train), file=sys.stderr)
channels = 3
resolution = (40, 40, channels)
def Preprocess(frame):
if (channels == 1):
frame = cv2.cvtColor(frame, cv2.COLOR_BGR2GRAY)
frame = cv2.resize(frame, (resolution[1], resolution[0]))
return np.reshape(frame, resolution)
class ACNet(object):
def __init__(self, num_actions, scope, trainer):
with tf.variable_scope(scope):
self.inputs = tf.placeholder(shape=[None] + list(resolution), dtype=tf.float32)
conv1 = tf.contrib.layers.conv2d(self.inputs, num_outputs=16, kernel_size=[3, 3], stride=[2, 2])
conv2 = tf.contrib.layers.conv2d(conv1, num_outputs=32, kernel_size=[3, 3], stride=[2, 2])
conv2_flat = tf.contrib.layers.flatten(conv2)
hidden = tf.contrib.layers.fully_connected(conv2_flat, 256)
# Recurrent network for temporal dependencies
# Introduce a "fake" batch dimension of 1 after flatten so that we can do LSTM over time dim
rnn_in = tf.expand_dims(hidden, [0])
lstm_size = 256
lstm_cell = tf.contrib.rnn.BasicLSTMCell(lstm_size, state_is_tuple=True)
step_size = tf.shape(self.inputs)[:1]
c_init = np.zeros((1, lstm_cell.state_size.c), dtype=np.float32)
h_init = np.zeros((1, lstm_cell.state_size.h), dtype=np.float32)
self.state_init = [c_init, h_init]
self.rnn_state = self.state_init
c_in = tf.placeholder(shape=[1, lstm_cell.state_size.c], dtype=tf.float32)
h_in = tf.placeholder(shape=[1, lstm_cell.state_size.h], dtype=tf.float32)
self.state_in = (c_in, h_in)
state_in = tf.contrib.rnn.LSTMStateTuple(c_in, h_in)
lstm_outputs, lstm_state = tf.nn.dynamic_rnn(lstm_cell, rnn_in, initial_state=state_in,
sequence_length=step_size, time_major=False)
lstm_c, lstm_h = lstm_state
rnn_out = tf.reshape(lstm_outputs, [-1, lstm_size])
self.state_out = (lstm_c[:1, :], lstm_h[:1, :])
# Output layers for policy and value estimations
self.policy = tf.contrib.layers.fully_connected(rnn_out, num_actions, activation_fn=tf.nn.softmax,
weights_initializer=self.normalized_columns_initializer(0.01),
biases_initializer=None)
self.value = tf.contrib.layers.fully_connected(rnn_out, 1, activation_fn=None,
weights_initializer=self.normalized_columns_initializer(1.0),
biases_initializer=None)
# Only the worker network need ops for loss functions and gradient updating.
if (scope != global_scope_name):
self.actions = tf.placeholder(shape=[None], dtype=tf.int32)
actions_onehot = tf.one_hot(self.actions, num_actions, dtype=tf.float32)
self.target_v = tf.placeholder(shape=[None], dtype=tf.float32)
self.advantages = tf.placeholder(shape=[None], dtype=tf.float32)
responsible_outputs = tf.reduce_sum(self.policy * actions_onehot, [1])
# Loss functions
value_loss = 0.5 * tf.reduce_sum(tf.square(self.target_v - tf.reshape(self.value, [-1])))
entropy = -tf.reduce_sum(self.policy * tf.log(self.policy))
policy_loss = -tf.reduce_sum(tf.log(responsible_outputs) * self.advantages)
self.loss = 0.5 * value_loss + policy_loss - entropy * entropy_beta
# Get gradients from local network using local losses
local_vars = tf.get_collection(tf.GraphKeys.TRAINABLE_VARIABLES, scope)
self.gradients = tf.gradients(self.loss, local_vars)
if (grad_norm_clip != None):
grads, _ = tf.clip_by_global_norm(self.gradients, grad_norm_clip)
else:
grads = self.gradients
# Apply local gradients to global network
global_vars = tf.get_collection(tf.GraphKeys.TRAINABLE_VARIABLES, global_scope_name)
self.apply_grads = trainer.apply_gradients(zip(grads, global_vars))
# Used to initialize weights for policy and value output layers
def normalized_columns_initializer(self, std = 1.0):
def _initializer(shape, dtype=None, partition_info=None):
out = np.random.randn(*shape).astype(np.float32)
out *= std / np.sqrt(np.square(out).sum(axis=0, keepdims=True))
return tf.constant(out)
return _initializer
def Train(self, sess, discounted_rewards, states, actions, advantages):
states = states / 255.0
self.ResetLstm()
feed_dict = {self.target_v : discounted_rewards,
self.inputs : np.stack(states, axis=0),
self.actions : actions,
self.advantages : advantages,
self.state_in[0] : self.rnn_state[0],
self.state_in[1] : self.rnn_state[1]}
_ = sess.run([self.apply_grads], feed_dict=feed_dict)
def ResetLstm(self):
self.rnn_state = self.state_init
def GetAction(self, sess, state):
state = state / 255.0
a_dist, v, self.rnn_state = sess.run([self.policy, self.value, self.state_out],
feed_dict={self.inputs: [state],
self.state_in[0]: self.rnn_state[0],
self.state_in[1]: self.rnn_state[1]})
a = np.random.choice(a_dist[0], p=a_dist[0])
a = np.argmax(a_dist == a)
return a, v[0, 0]
def GetValue(self, sess, state):
state = state / 255.0
v = sess.run([self.value],
feed_dict={self.inputs: [state],
self.state_in[0]: self.rnn_state[0],
self.state_in[1]: self.rnn_state[1]})
return v[0][0, 0]
class Worker(object):
def __init__(self, number, num_actions, trainer, model_name):
self.name = "worker_" + str(number)
self.number = number
self.model_name = model_name
# Create the local copy of the network and the tensorflow op to copy global paramters to local network
self.local_ac = ACNet(num_actions, self.name, trainer)
self.update_target_graph = self.update_target(global_scope_name, self.name)
if (lab):
self.env = EnvLab(80, 80, 60, "seekavoid_arena_01")
else:
self.env = EnvVizDoom(vizdoom_scenario)
# Copies one set of variables to another.
# Used to set worker network parameters to those of global network.
def update_target(self, from_scope, to_scope):
from_vars = tf.get_collection(tf.GraphKeys.TRAINABLE_VARIABLES, from_scope)
to_vars = tf.get_collection(tf.GraphKeys.TRAINABLE_VARIABLES, to_scope)
op_holder = []
for from_var, to_var in zip(from_vars, to_vars):
op_holder.append(to_var.assign(from_var))
return op_holder
# Calculate discounted returns.
def Discount(self, x, gamma):
for idx in reversed(xrange(len(x) - 1)):
x[idx] += x[idx + 1] * gamma
return x
def Start(self, session, saver, coord):
worker_process = lambda: self.Process(session, saver, coord)
thread = threading.Thread(target=worker_process)
thread.start()
global start_time
start_time = time.time()
return thread
def Train(self, episode_buffer, sess, bootstrap_value):
episode_buffer = np.array(episode_buffer)
states = episode_buffer[:, 0]
actions = episode_buffer[:, 1]
rewards = episode_buffer[:, 2]
values = episode_buffer[:, 3]
# Here we take the rewards and values from the episode_buffer, and use them to
# generate the advantage and discounted returns.
# The advantage function uses "Generalized Advantage Estimation"
rewards_plus = np.asarray(rewards.tolist() + [bootstrap_value])
discounted_rewards = self.Discount(rewards_plus, gamma)[:-1]
value_plus = np.asarray(values.tolist() + [bootstrap_value])
advantages = rewards + gamma * value_plus[1:] - value_plus[:-1]
advantages = self.Discount(advantages, gamma)
# Update the global network using gradients from loss
# Generate network statistics to periodically save
self.local_ac.Train(sess, discounted_rewards, states, actions, advantages)
def Process(self, sess, saver, coord):
global step, train_scores, start_time, lock
print("Starting worker " + str(self.number))
while (not coord.should_stop()):
sess.run(self.update_target_graph)
episode_buffer = []
episode_reward = 0
self.env.Reset()
s = self.env.Observation()
s = Preprocess(s)
self.local_ac.ResetLstm()
while (self.env.IsRunning()):
# Take an action using probabilities from policy network output.
a, v = self.local_ac.GetAction(sess, s)
r = self.env.Act(a, frame_repeat)
finished = not self.env.IsRunning()
if (not finished):
s1 = self.env.Observation()
s1 = Preprocess(s1)
else:
s1 = None
episode_buffer.append([s, a, r, v])
episode_reward += r
s = s1
lock.acquire()
step += 1
if (step % save_each == 0):
model_name_curr = self.model_name + "_{:04}".format(int(step / save_each))
print("\nSaving the network weigths to:", model_name_curr, file=sys.stderr)
saver.save(sess, model_name_curr)
PrintStat(time.time() - start_time, step, step_num, train_scores)
train_scores = []
if (step == step_num):
coord.request_stop()
lock.release()
# If the episode hasn't ended, but the experience buffer is full, then we
# make an update step using that experience rollout.
if (len(episode_buffer) == t_max or (finished and len(episode_buffer) > 0)):
# Since we don't know what the true final return is,
# we "bootstrap" from our current value estimation.
if (not finished):
v1 = self.local_ac.GetValue(sess, s)
self.Train(episode_buffer, sess, v1)
episode_buffer = []
sess.run(self.update_target_graph)
else:
self.Train(episode_buffer, sess, 0.0)
lock.acquire()
train_scores.append(episode_reward)
lock.release()
class Agent(object):
def __init__(self):
config = tf.ConfigProto()
config.gpu_options.allow_growth = True
config.log_device_placement = False
config.allow_soft_placement = True
self.session = tf.Session(config=config)
with tf.device(device):
# Global network
self.global_net = ACNet(env.NumActions(), global_scope_name, None)
if (train):
trainer = tf.train.RMSPropOptimizer(learning_rate)
workers = []
for i in xrange(num_workers):
workers.append(Worker(i, env.NumActions(), trainer, model_name))
saver = tf.train.Saver(max_to_keep=100)
if (load_model):
model_name_curr = model_name + "_{:04}".format(step_load)
print("Loading model from: ", model_name_curr)
saver.restore(self.session, model_name_curr)
else:
self.session.run(tf.global_variables_initializer())
if (train):
coord = tf.train.Coordinator()
# Start the "work" process for each worker in a separate thread.
worker_threads = []
for worker in workers:
thread = worker.Start(self.session, saver, coord)
worker_threads.append(thread)
coord.join(worker_threads)
def Reset(self):
self.global_net.ResetLstm()
def Act(self, state):
action, _ = self.global_net.GetAction(self.session, state)
return action
def Test(agent):
if (test_write_video):
size = (640, 480)
fps = 30.0
fourcc = cv2.VideoWriter_fourcc(*'XVID') # cv2.cv.CV_FOURCC(*'XVID')
out_video = cv2.VideoWriter(path_work_dir + "test.avi", fourcc, fps, size)
reward_total = 0
num_episodes = 30
while (num_episodes != 0):
if (not env.IsRunning()):
env.Reset()
agent.Reset()
print("Total reward: {}".format(reward_total))
reward_total = 0
num_episodes -= 1
state_raw = env.Observation()
state = Preprocess(state_raw)
action = agent.Act(state)
for _ in xrange(frame_repeat):
if (test_display):
cv2.imshow("frame-test", state_raw)
cv2.waitKey(20)
if (test_write_video):
out_video.write(state_raw)
reward = env.Act(action, 1)
reward_total += reward
if (not env.IsRunning()):
break
state_raw = env.Observation()
if __name__ == '__main__':
if (lab):
env = EnvLab(80, 80, 60, "seekavoid_arena_01")
else:
env = EnvVizDoom(vizdoom_scenario)
agent = Agent()
Test(agent)
|
class APIEndPoints:
__GET_GEOLOCATION_API = 'https://api.weather.com/v3/location/search?apiKey=d522aa97197fd864d36b418f39ebb323&format=json&language=en-IN&locationType=locale&query={name_of_place}'
__GET_WEATHER_DATA = 'https://api.weather.com/v2/turbo/vt1dailyForecast?apiKey=d522aa97197fd864d36b418f39ebb323&format=json&geocode={geo_location}&language=en-IN&units=m'
@classmethod
def geolocation_api(cls, name_of_place) -> str:
name_of_place = str(name_of_place).replace(' ', '%20')
return cls.__GET_GEOLOCATION_API.format(name_of_place=name_of_place)
@classmethod
def weather_api(cls, latitude, longitude) -> str:
geo_location = f'{latitude}%2C{longitude}'
return cls.__GET_WEATHER_DATA.format(geo_location=geo_location)
@staticmethod
def get_lat_long(json_data):
return json_data['location']['latitude'][0], json_data['location']['longitude'][0]
|
# plots.py
"""Volume 1A: QR 2 (Least Squares and Computing Eigenvalues). Plotting file."""
from __future__ import print_function
import matplotlib
matplotlib.rcParams = matplotlib.rc_params_from_file('../../matplotlibrc')
from matplotlib import pyplot as plt
from functools import wraps
from sys import stdout
import os
def _save(filename):
"""Decorator for saving, clearing, and closing figures automatically."""
try:
name, extension = filename.split(".")
except (ValueError, TypeError) as e:
raise ValueError("Invalid file name '{}'".format(filename))
if extension not in {"pdf", "png"}:
raise ValueError("Invalid file extension '{}'".format(extension))
if not os.path.isdir("figures"):
os.mkdir("figures")
def decorator(func):
@wraps(func)
def wrapper(*args, **kwargs):
try:
print("{:.<40}".format(filename), end='')
stdout.flush()
plt.clf()
out = func(*args, **kwargs)
plt.savefig("figures/"+filename, format=extension)
print("done.")
return out
except Exception as e:
print("\n\t", e, sep='')
finally:
plt.clf()
plt.close('all')
return wrapper
return decorator
# Figures =====================================================================
import numpy as np
from scipy import linalg as la
from scipy.stats import linregress
@_save("line_fit_example.pdf")
def line():
x = np.linspace(0, 10, 20)
y = .5*x - 3 + np.random.randn(20)
a, b = linregress(x, y)[:2]
plt.plot(x, y, 'k*', label="Data Points")
plt.plot(x, a*x + b, 'b-', lw=2, label="Least Squares Fit")
plt.legend(loc="upper left")
@_save("circle_fit_example.pdf")
def circle():
"""Load the data from circle.npy. Use least squares to calculate the circle
that best fits the data.
Plot the original data points the least squares circle together.
"""
x, y = np.load("circle.npy").T
A = np.column_stack((2*x, 2*y, np.ones_like(x)))
b = x**2 + y**2
c1, c2, c3 = la.lstsq(A, b)[0]
r = np.sqrt(c1**2 + c2**2 + c3)
theta = np.linspace(0, 2*np.pi, 200)
plt.plot(r*np.cos(theta)+c1, r*np.sin(theta)+c2, '-', lw=2)
plt.plot(x, y, 'k*')
plt.axis("equal")
# =============================================================================
def draw_all():
line()
circle()
if __name__ == "__main__":
draw_all()
|
import os
import tensorflow as tf
from nets import nets_factory
import time
from dl.step1_cnn import Step1CNN
from dl.step2_cnn import Step2CNN
from dl.util import get_labels_to_names
import GPUtil as GPU
import django
os.environ.setdefault("DJANGO_SETTINGS_MODULE", "main.settings")
django.setup()
from goods.models import ExportAction
def load_step3_one(config, model_dir, export):
time0 = time.time()
traintype_modeldir = os.path.join(model_dir, str(export.pk))
checkpoint = tf.train.latest_checkpoint(traintype_modeldir)
tf.logging.info('begin loading step3 model: {}-{}'.format(export.train_action.traintype, checkpoint))
labels_to_names = get_labels_to_names(os.path.join(traintype_modeldir, 'labels.txt'))
network_fn = nets_factory.get_network_fn(
export.model_name,
num_classes=len(labels_to_names),
is_training=False)
image_size = network_fn.default_image_size
time1 = time.time()
_graph = tf.Graph()
with _graph.as_default():
input_image_path = tf.placeholder(dtype=tf.string, name='input_image')
image_string = tf.read_file(input_image_path)
image = tf.image.decode_jpeg(image_string, channels=3, name='image_tensor')
image = tf.image.convert_image_dtype(image, dtype=tf.float32)
image = tf.expand_dims(image, 0)
images = tf.image.resize_bilinear(image, [image_size, image_size], align_corners=False)
images = tf.subtract(images, 0.5)
images = tf.multiply(images, 2.0)
logits, _ = network_fn(images)
probabilities = tf.nn.softmax(logits, name='detection_classes')
time2 = time.time()
variables_to_restore = tf.global_variables()
saver = tf.train.Saver(variables_to_restore)
session = tf.Session(config=config)
saver.restore(session, checkpoint)
time3 = time.time()
tf.logging.info('end loading: %.2f, %.2f, %.2f, %.2f' % (time3 - time0, time1 - time0, time2 - time1, time3 - time2))
return session
def load_all(model_dir, traintype_to_session):
config = tf.ConfigProto()
config.gpu_options.allow_growth = True
time0 = time.time()
export1s = ExportAction.objects.filter(train_action__action='T1').filter(checkpoint_prefix__gt=0).order_by(
'-update_time')[:1]
step1_cnn = Step1CNN(os.path.join(model_dir, str(export1s[0].pk)))
step1_cnn.load(config)
traintype_to_session[0] = step1_cnn._session
GPU.showUtilization(True)
time1 = time.time()
export2s = ExportAction.objects.filter(train_action__action='T2').filter(checkpoint_prefix__gt=0).order_by(
'-update_time')[:1]
step2_cnn = Step2CNN(os.path.join(model_dir, str(export2s[0].pk)), export2s[0].model_name)
step2_cnn.load(config)
traintype_to_session[0] = step2_cnn._session
GPU.showUtilization(True)
time2 = time.time()
export3s = ExportAction.objects.filter(train_action__action='T3').filter(checkpoint_prefix__gt=0).order_by(
'-update_time')
for export in export3s:
traintype = export.train_action.traintype
if traintype not in traintype_to_session:
session = load_step3_one(config, model_dir, export)
traintype_to_session[traintype] = session
GPU.showUtilization(True)
time3 = time.time()
tf.logging.info('loading finish: %.2f, %.2f, %.2f, %.2f' % (time3 - time0, time1 - time0, time2 - time1, time3 - time2))
def main(_):
os.environ["CUDA_DEVICE_ORDER"] = "PCI_BUS_ID"
os.environ["CUDA_VISIBLE_DEVICES"] = "0"
tf.logging.set_verbosity('INFO')
traintype_to_session = {}
model_dir = '/home/src/goodsdl/dl/model'
load_all(model_dir, traintype_to_session)
time0 = time.time()
image_path = '/home/src/goodsdl/images/test_1.jpg'
test_traintype = 7
if test_traintype in traintype_to_session:
probabilities = traintype_to_session[test_traintype].run(
traintype_to_session[test_traintype].graph.get_tensor_by_name('detection_classes:0'),
feed_dict={traintype_to_session[test_traintype].graph.get_tensor_by_name('input_image:0'): image_path}
)
time1 = time.time()
tf.logging.info('test image: %.2f' % (time1 - time0))
if __name__ == '__main__':
tf.app.run()
|
import base64
import hashlib
import hmac
import json
import typing as t
from datetime import datetime, timedelta, timezone
def hmac_data(key: bytes, data: bytes) -> bytes:
return hmac.new(key, data, hashlib.sha256).digest()
class WebhookVerificationError(Exception):
pass
class Webhook:
_whsecret: bytes
_enc_key: t.Optional[bytes]
def __init__(self, whsecret: str, *, enc_key: t.Optional[str] = None):
self._whsecret = base64.b64decode(whsecret)
self._enc_key = base64.b64decode(enc_key) if enc_key is not None else None
def verify(self, data: t.Union[bytes, str], headers: t.Dict[str, str]) -> t.Dict[str, t.Any]:
data = data if isinstance(data, str) else data.decode()
headers = {k.lower(): v for (k, v) in headers.items()}
msg_id = headers.get("svix-id")
msg_signature = headers.get("svix-signature")
msg_timestamp = headers.get("svix-timestamp")
if not (msg_id and msg_timestamp and msg_signature):
raise WebhookVerificationError("Missing required headers")
self.__verify_timestamp(msg_timestamp)
to_sign = f"{msg_id}.{msg_timestamp}.{data}".encode()
expected_sig = hmac_data(self._whsecret, to_sign)
passed_sigs = msg_signature.split(" ")
for versioned_sig in passed_sigs:
(version, signature) = versioned_sig.split(",")
if version != "v1":
continue
sig_bytes = base64.b64decode(signature)
if hmac.compare_digest(expected_sig, sig_bytes):
return json.loads(data)
raise WebhookVerificationError("No matching signature found")
def __verify_timestamp(self, timestamp_header: str) -> None:
webhook_tolerance = timedelta(minutes=5)
now = datetime.now(tz=timezone.utc)
try:
timestamp = datetime.fromtimestamp(float(timestamp_header), tz=timezone.utc)
except Exception:
raise WebhookVerificationError("Invalid Signature Headers")
if timestamp < (now - webhook_tolerance):
raise WebhookVerificationError("Message timestamp too old")
if timestamp > (now + webhook_tolerance):
raise WebhookVerificationError("Message timestamp too new")
|
"""
Usage:
# Create train data:
python xml_to_csv.py -i [PATH_TO_IMAGES_FOLDER]/train -o [PATH_TO_ANNOTATIONS_FOLDER]/train_labels.csv
# Create test data:
python xml_to_csv.py -i [PATH_TO_IMAGES_FOLDER]/test -o [PATH_TO_ANNOTATIONS_FOLDER]/test_labels.csv
"""
import os
import glob
import pandas as pd
import argparse
import xml.etree.ElementTree as ET
def xml_to_csv(path):
"""Iterates through all .xml files (generated by labelImg) in a given directory and combines them in a single Pandas datagrame.
Parameters:
----------
path : {str}
The path containing the .xml files
Returns
-------
Pandas DataFrame
The produced dataframe
"""
classes_names = []
xml_list = []
for xml_file in glob.glob(path + "/*.xml"):
tree = ET.parse(xml_file)
root = tree.getroot()
for member in root.findall("object"):
classes_names.append(member[0].text)
file_name = xml_file.replace("xml", "jpg")
file_name = file_name.replace("data/images/train/", "")
file_name = file_name.replace("data/images/test/", "")
value = (
file_name,
int(root.find("size")[0].text),
int(root.find("size")[1].text),
member[0].text,
int(member[4][0].text),
int(member[4][1].text),
int(member[4][2].text),
int(member[4][3].text),
)
if member[0].text == "logo":
xml_list.append(value)
column_name = [
"filename",
"width",
"height",
"class",
"xmin",
"ymin",
"xmax",
"ymax",
]
xml_df = pd.DataFrame(xml_list, columns=column_name)
classes_names = list(set(classes_names))
classes_names.sort()
return xml_df, classes_names
def main():
# Initiate argument parser
parser = argparse.ArgumentParser(
description="Sample TensorFlow XML-to-CSV converter"
)
parser.add_argument(
"-i",
"--inputDir",
help="Path to the folder where the input .xml files are stored",
type=str,
)
parser.add_argument(
"-o", "--outputFile", help="Name of output .csv file (including path)", type=str
)
parser.add_argument(
"-l",
"--labelMapDir",
help="Directory path to save label_map.pbtxt file is specified.",
type=str,
default="",
)
args = parser.parse_args()
if args.inputDir is None:
args.inputDir = os.getcwd()
if args.outputFile is None:
args.outputFile = args.inputDir + "/labels.csv"
assert os.path.isdir(args.inputDir)
os.makedirs(os.path.dirname(args.outputFile), exist_ok=True)
xml_df, classes_names = xml_to_csv(args.inputDir)
xml_df.to_csv(args.outputFile, index=None)
print("Successfully converted xml to csv.")
if args.labelMapDir:
os.makedirs(args.labelMapDir, exist_ok=True)
label_map_path = os.path.join(args.labelMapDir, "label_map.pbtxt")
print("Generate `{}`".format(label_map_path))
# Create the `label_map.pbtxt` file
pbtxt_content = ""
for i, class_name in enumerate(classes_names):
pbtxt_content = (
pbtxt_content
+ "item {{\n id: {0}\n name: '{1}'\n}}\n\n".format(
i + 1, class_name
)
)
pbtxt_content = pbtxt_content.strip()
with open(label_map_path, "w") as f:
f.write(pbtxt_content)
if __name__ == "__main__":
main()
|
# Generated by Django 2.0.7 on 2018-07-31 11:52
from django.db import migrations, models
class Migration(migrations.Migration):
initial = True
dependencies = [
]
operations = [
migrations.CreateModel(
name='DeviceInfo',
fields=[
('devid', models.AutoField(primary_key=True, serialize=False, verbose_name='设备ID')),
('devip', models.CharField(max_length=16, verbose_name='设备IP地址')),
('devname', models.CharField(max_length=32, verbose_name='设备名称')),
('devnamealias', models.CharField(max_length=32, verbose_name='设备别名')),
('ostype', models.CharField(blank=True, max_length=64, null=True, verbose_name='操作系统类型')),
('cpusize', models.FloatField(blank=True, null=True, verbose_name='CPU大小(GHz)')),
('cpucorecount', models.PositiveSmallIntegerField(blank=True, null=True, verbose_name='CPU核数')),
('memsize', models.IntegerField(blank=True, null=True, verbose_name='内存大小(GB)')),
('disksize', models.FloatField(blank=True, null=True, verbose_name='磁盘容量(GB)')),
('location', models.CharField(blank=True, max_length=64, null=True, verbose_name='机房位置')),
('devdesc', models.CharField(blank=True, max_length=256, null=True, verbose_name='设备描述')),
('pdid', models.IntegerField(blank=True, null=True, verbose_name='项目ID')),
('customer1', models.CharField(blank=True, max_length=256, null=True, verbose_name='自定义字段1')),
('customer2', models.CharField(blank=True, max_length=256, null=True, verbose_name='自定义字段2')),
('customer3', models.CharField(blank=True, max_length=256, null=True, verbose_name='自定义字段3')),
('customer4', models.CharField(blank=True, max_length=256, null=True, verbose_name='自定义字段4')),
('customer5', models.CharField(blank=True, max_length=256, null=True, verbose_name='自定义字段5')),
('customer6', models.CharField(blank=True, max_length=256, null=True, verbose_name='自定义字段6')),
('customer7', models.CharField(blank=True, max_length=256, null=True, verbose_name='自定义字段7')),
('customer8', models.CharField(blank=True, max_length=256, null=True, verbose_name='自定义字段8')),
],
options={
'db_table': 't_device_info',
'managed': True,
'verbose_name_plural': '设备信息表',
'verbose_name': '设备信息表',
},
),
migrations.CreateModel(
name='ProjectInfo',
fields=[
('pid', models.AutoField(primary_key=True, serialize=False, verbose_name='项目ID')),
('projname', models.CharField(max_length=16, verbose_name='项目名称')),
('appsystem', models.CharField(max_length=64, unique=True, verbose_name='应用系统')),
('projdesc', models.CharField(blank=True, max_length=256, null=True, verbose_name='项目描述')),
('projcontactname', models.CharField(blank=True, max_length=10, null=True, verbose_name='项目联系人姓名')),
('projcontactphone', models.CharField(blank=True, max_length=16, null=True, verbose_name='项目联系人电话')),
('projcontactemail', models.EmailField(blank=True, max_length=256, null=True, verbose_name='项目联系人邮箱')),
('appcontactname', models.CharField(blank=True, max_length=10, null=True, verbose_name='应用联系人姓名')),
('appcontactphone', models.CharField(blank=True, max_length=16, null=True, verbose_name='应用联系人电话')),
('appcontactemail', models.EmailField(blank=True, max_length=256, null=True, verbose_name='应用联系人邮箱')),
('groupname', models.CharField(blank=True, max_length=32, null=True, verbose_name='小组名称')),
('customer1', models.CharField(blank=True, max_length=256, null=True, verbose_name='自定义字段1')),
('customer2', models.CharField(blank=True, max_length=256, null=True, verbose_name='自定义字段2')),
('customer3', models.CharField(blank=True, max_length=256, null=True, verbose_name='自定义字段3')),
('customer4', models.CharField(blank=True, max_length=256, null=True, verbose_name='自定义字段4')),
('customer5', models.CharField(blank=True, max_length=256, null=True, verbose_name='自定义字段5')),
],
options={
'db_table': 't_project_info',
'verbose_name_plural': '项目信息表',
'managed': True,
'verbose_name': '项目信息表',
},
),
migrations.CreateModel(
name='Relations',
fields=[
('rid', models.AutoField(primary_key=True, serialize=False, verbose_name='关系ID')),
('localip', models.CharField(max_length=16, verbose_name='本端设备')),
('upip', models.CharField(blank=True, max_length=16, null=True, verbose_name='上联设备')),
('updesc', models.CharField(blank=True, max_length=256, null=True, verbose_name='上联描述')),
('downip', models.CharField(blank=True, max_length=16, null=True, verbose_name='下联设备')),
('downdesc', models.CharField(blank=True, max_length=256, null=True, verbose_name='下联描述')),
],
options={
'db_table': 't_relations',
'managed': True,
'verbose_name_plural': '关系表',
'verbose_name': '关系表',
},
),
migrations.CreateModel(
name='SoftwareInfo',
fields=[
('sid', models.AutoField(primary_key=True, serialize=False, verbose_name='软件ID')),
('sip', models.CharField(max_length=16, verbose_name='IP地址')),
('sname', models.CharField(max_length=64, verbose_name='软件名称')),
('stype', models.CharField(blank=True, max_length=16, null=True, verbose_name='软件类型')),
('sport', models.CharField(blank=True, max_length=6, null=True, verbose_name='软件端口')),
('sversion', models.CharField(blank=True, max_length=16, null=True, verbose_name='版本')),
('spath', models.CharField(blank=True, max_length=128, null=True, verbose_name='路径')),
('sdesc', models.CharField(blank=True, max_length=256, null=True, verbose_name='软件描述')),
('customer1', models.CharField(blank=True, max_length=256, null=True, verbose_name='自定义字段1')),
('customer2', models.CharField(blank=True, max_length=256, null=True, verbose_name='自定义字段2')),
('customer3', models.CharField(blank=True, max_length=256, null=True, verbose_name='自定义字段3')),
('customer4', models.CharField(blank=True, max_length=256, null=True, verbose_name='自定义字段4')),
('customer5', models.CharField(blank=True, max_length=256, null=True, verbose_name='自定义字段5')),
],
options={
'db_table': 't_software_info',
'managed': True,
'verbose_name_plural': '软件信息表',
'verbose_name': '软件信息表',
},
),
migrations.AlterUniqueTogether(
name='projectinfo',
unique_together={('pid', 'appsystem')},
),
]
|
"""Anonymous functions - lambda."""
#lambda [arg1 [,arg2,.....argn]]:expression
#!/usr/bin/python
# Function definition
# sum1 = lambda a, b: a + b
#
# # Sum as a function
# print("sum1 : ", sum(10, 20))
# print("sum1 : ", sum(20, 20))
def key1(x):
return x[1]
a = [(1, 2), (3, 1), (5, 10), (11, -3)]
a.sort(key=key1)
|
# (C) Datadog, Inc. 2018-present
# All rights reserved
# Licensed under a 3-clause BSD style license (see LICENSE)
import csv
import io
import json
import os
import re
from ast import literal_eval
import requests
import semver
from ..utils import dir_exists, file_exists, read_file, write_file
from .config import load_config
from .constants import NOT_CHECKS, REPO_CHOICES, REPO_OPTIONS_MAP, VERSION_BUMP, get_root, set_root
from .git import get_latest_tag
# match integration's version within the __about__.py module
VERSION = re.compile(r'__version__ *= *(?:[\'"])(.+?)(?:[\'"])')
def format_commit_id(commit_id):
if commit_id:
if commit_id.isdigit():
return f'PR #{commit_id}'
else:
return f'commit hash `{commit_id}`'
return commit_id
def get_current_agent_version():
release_data = requests.get('https://raw.githubusercontent.com/DataDog/datadog-agent/master/release.json').json()
versions = set()
for version in release_data:
parts = version.split('.')
if len(parts) > 1:
versions.add((int(parts[0]), int(parts[1])))
most_recent = sorted(versions)[-1]
return f"{most_recent[0]}.{most_recent[1]}"
def is_package(d):
return file_exists(os.path.join(d, 'setup.py'))
def normalize_package_name(package_name):
return re.sub(r'[-_. ]+', '_', package_name).lower()
def string_to_toml_type(s):
if s.isdigit():
s = int(s)
elif s == 'true':
s = True
elif s == 'false':
s = False
elif s.startswith('['):
s = literal_eval(s)
return s
def get_check_file(check_name):
return os.path.join(get_root(), check_name, 'datadog_checks', check_name, check_name + '.py')
def get_readme_file(check_name):
return os.path.join(get_root(), check_name, 'README.md')
def check_root():
"""Check if root has already been set."""
existing_root = get_root()
if existing_root:
return True
root = os.getenv('DDEV_ROOT', '')
if root and os.path.isdir(root):
set_root(root)
return True
return False
def initialize_root(config, agent=False, core=False, extras=False, here=False):
"""Initialize root directory based on config and options"""
if check_root():
return
repo_choice = 'core' if core else 'extras' if extras else 'agent' if agent else config.get('repo', 'core')
config['repo_choice'] = repo_choice
config['repo_name'] = REPO_CHOICES.get(repo_choice, repo_choice)
message = None
# TODO: remove this legacy fallback lookup in any future major version bump
legacy_option = None if repo_choice == 'agent' else config.get(repo_choice)
root = os.path.expanduser(legacy_option or config.get('repos', {}).get(repo_choice, ''))
if here or not dir_exists(root):
if not here:
repo = 'datadog-agent' if repo_choice == 'agent' else f'integrations-{repo_choice}'
message = f'`{repo}` directory `{root}` does not exist, defaulting to the current location.'
root = os.getcwd()
set_root(root)
return message
def complete_set_root(args):
"""Set the root directory within the context of a cli completion operation."""
if check_root():
return
config = load_config()
kwargs = {REPO_OPTIONS_MAP[arg]: True for arg in args if arg in REPO_OPTIONS_MAP}
initialize_root(config, **kwargs)
def complete_testable_checks(ctx, args, incomplete):
complete_set_root(args)
return sorted(k for k in get_testable_checks() if k.startswith(incomplete))
def complete_valid_checks(ctx, args, incomplete):
complete_set_root(args)
return [k for k in get_valid_checks() if k.startswith(incomplete)]
def get_version_file(check_name):
if check_name == 'datadog_checks_base':
return os.path.join(get_root(), check_name, 'datadog_checks', 'base', '__about__.py')
elif check_name == 'datadog_checks_dev':
return os.path.join(get_root(), check_name, 'datadog_checks', 'dev', '__about__.py')
elif check_name == 'datadog_checks_downloader':
return os.path.join(get_root(), check_name, 'datadog_checks', 'downloader', '__about__.py')
else:
return os.path.join(get_root(), check_name, 'datadog_checks', check_name, '__about__.py')
def is_agent_check(check_name):
package_root = os.path.join(get_root(), check_name, 'datadog_checks', check_name, '__init__.py')
if not file_exists(package_root):
return False
contents = read_file(package_root)
# Anything more than the version must be a subclass of the base class
return contents.count('import ') > 1
def code_coverage_enabled(check_name):
if check_name in ('datadog_checks_base', 'datadog_checks_dev', 'datadog_checks_downloader'):
return True
return is_agent_check(check_name)
def get_manifest_file(check_name):
return os.path.join(get_root(), check_name, 'manifest.json')
def get_tox_file(check_name):
return os.path.join(get_root(), check_name, 'tox.ini')
def get_metadata_file(check_name):
return os.path.join(get_root(), check_name, 'metadata.csv')
def get_saved_views(check_name):
paths = load_manifest(check_name).get('assets', {}).get('saved_views', {})
views = []
for path in paths.values():
view = os.path.join(get_root(), check_name, *path.split('/'))
views.append(view)
return sorted(views)
def get_config_file(check_name):
return os.path.join(get_data_directory(check_name), 'conf.yaml.example')
def get_config_spec(check_name):
if check_name == 'agent':
return os.path.join(get_root(), 'pkg', 'config', 'conf_spec.yaml')
else:
path = load_manifest(check_name).get('assets', {}).get('configuration', {}).get('spec', '')
return os.path.join(get_root(), check_name, *path.split('/'))
def get_default_config_spec(check_name):
return os.path.join(get_root(), check_name, 'assets', 'configuration', 'spec.yaml')
def get_assets_directory(check_name):
return os.path.join(get_root(), check_name, 'assets')
def get_data_directory(check_name):
if check_name == 'agent':
return os.path.join(get_root(), 'pkg', 'config')
else:
return os.path.join(get_root(), check_name, 'datadog_checks', check_name, 'data')
def get_check_directory(check_name):
return os.path.join(get_root(), check_name, 'datadog_checks', check_name)
def get_test_directory(check_name):
return os.path.join(get_root(), check_name, 'tests')
def get_config_files(check_name):
"""TODO: Remove this function when all specs are finished"""
if check_name == 'agent':
return [os.path.join(get_root(), 'pkg', 'config', 'config_template.yaml')]
files = []
if check_name in NOT_CHECKS:
return files
root = get_root()
auto_conf = os.path.join(root, check_name, 'datadog_checks', check_name, 'data', 'auto_conf.yaml')
if file_exists(auto_conf):
files.append(auto_conf)
default_yaml = os.path.join(root, check_name, 'datadog_checks', check_name, 'data', 'conf.yaml.default')
if file_exists(default_yaml):
files.append(default_yaml)
example_yaml = os.path.join(root, check_name, 'datadog_checks', check_name, 'data', 'conf.yaml.example')
if file_exists(example_yaml):
files.append(example_yaml)
return sorted(files)
def get_check_files(check_name, file_suffix='.py', abs_file_path=True, include_dirs=None):
"""Return generator of filenames from within a given check.
By default, only includes files within 'datadog_checks' and 'tests' directories, this
can be expanded by adding to the `include_dirs` arg.
"""
base_dirs = ['datadog_checks', 'tests']
if include_dirs is not None:
base_dirs += include_dirs
bases = [os.path.join(get_root(), check_name, base) for base in base_dirs]
for base in bases:
for root, _, files in os.walk(base):
for f in files:
if f.endswith(file_suffix):
if abs_file_path:
yield os.path.join(root, f)
else:
yield f
def get_valid_checks():
return {path for path in os.listdir(get_root()) if file_exists(get_version_file(path))}
def get_valid_integrations():
return {path for path in os.listdir(get_root()) if file_exists(get_manifest_file(path))}
def get_testable_checks():
return {path for path in os.listdir(get_root()) if file_exists(get_tox_file(path))}
def get_metric_sources():
return {path for path in os.listdir(get_root()) if file_exists(get_metadata_file(path))}
def read_metric_data_file(check_name):
return read_file(os.path.join(get_root(), check_name, 'metadata.csv'))
def read_metadata_rows(metadata_file):
"""
Iterate over the rows of a `metadata.csv` file.
"""
with io.open(metadata_file, 'r', encoding='utf-8') as f:
reader = csv.DictReader(f, delimiter=',')
# Read header
reader._fieldnames = reader.fieldnames
for line_no, row in enumerate(reader, 2):
yield line_no, row
def read_version_file(check_name):
return read_file(get_version_file(check_name))
def get_version_string(check_name, tag_prefix='v', pattern=None):
"""
Get the version string for the given check.
"""
# Check the version file of the integration if available
# Otherwise, get the latest SemVer git tag for the project
if check_name:
version = VERSION.search(read_version_file(check_name))
if version:
return version.group(1)
else:
return get_latest_tag(pattern=pattern, tag_prefix=tag_prefix)
def load_manifest(check_name):
"""
Load the manifest file into a dictionary
"""
manifest_path = get_manifest_file(check_name)
if file_exists(manifest_path):
return json.loads(read_file(manifest_path).strip())
return {}
def load_saved_views(path):
"""
Load the manifest file into a dictionary
"""
if file_exists(path):
return json.loads(read_file(path).strip())
return {}
def write_manifest(manifest, check_name):
manifest_path = get_manifest_file(check_name)
write_file(manifest_path, f'{json.dumps(manifest, indent=2)}\n')
def get_bump_function(changelog_types):
minor_bump = False
for changelog_type in changelog_types:
bump_function = VERSION_BUMP.get(changelog_type)
if bump_function is semver.bump_major:
return bump_function
elif bump_function is semver.bump_minor:
minor_bump = True
return semver.bump_minor if minor_bump else semver.bump_patch
def parse_agent_req_file(contents):
"""
Returns a dictionary mapping {check-package-name --> pinned_version} from the
given file contents. We can assume lines are in the form:
datadog-active-directory==1.1.1; sys_platform == 'win32'
"""
catalog = {}
for line in contents.splitlines():
toks = line.split('==', 1)
if len(toks) != 2 or not toks[0] or not toks[1]:
# if we get here, the requirements file is garbled but let's stay
# resilient
continue
name, other = toks
version = other.split(';')
catalog[name] = version[0]
return catalog
def parse_version_parts(version):
if not isinstance(version, str):
return []
return [int(v) for v in version.split('.') if v.isdigit()]
def has_e2e(check):
for path, _, files in os.walk(get_test_directory(check)):
for fn in files:
if fn.startswith('test_') and fn.endswith('.py'):
with open(os.path.join(path, fn)) as test_file:
if 'pytest.mark.e2e' in test_file.read():
return True
return False
def is_tile_only(check):
config_file = get_config_file(check)
return not os.path.exists(config_file)
def find_legacy_signature(check):
"""
Validate that the given check does not use the legacy agent signature (contains agentConfig)
"""
for path, _, files in os.walk(get_check_directory(check)):
for f in files:
if f.endswith('.py'):
with open(os.path.join(path, f)) as test_file:
for num, line in enumerate(test_file):
if "__init__" in line and "agentConfig" in line:
return str(f), num
return None
|
class Solution:
def lengthOfLongestSubstring(self, s: str) -> int:
return len_of_longest_substring_no_repeat(s)
def len_of_longest_substring_no_repeat(s):
n = len(s)
res = 0
sub_str = set()
l = 0
r = 0
while r < n:
if s[r] not in sub_str:
sub_str.add(s[r])
res = max(res, len(sub_str))
r += 1
else:
sub_str.remove(s[l])
l += 1
return res
def len_of_longest_substring_no_repeat_dict(s):
n = len(s)
if n == 0:
return 0
d = dict()
m = 0
l = 0
r = 0
while r < n:
if s[r] in d:
l = max(l, d[s[r]] + 1)
d[s[r]] = r
m = max(m, r - l + 1)
r += 1
return m
len_of_longest_substring_no_repeat_dict('abcabcbb')
|
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.