id
int64 0
6k
| code
stringlengths 4k
8k
| code_compressed
listlengths 0
44
|
---|---|---|
0 | #-*- coding: utf-8 -*-
from vi import utils
from vi.widgets import ListWidget, EditWidget
from vi.priorityqueue import actionDelegateSelector, ModuleWidgetSelector
from flare.i18n import translate
from vi.config import conf
from vi.pane import Pane
from flare.button import Button
class ContextAction(Button):
def __init__(self, module, handler, actionName, *args, **kwargs):
dsc = actionName.split(".", 3)
assert dsc[0] == "context", u"Invalid definition!"
mod = dsc[1]
vars = dsc[2].split(",")
assert mod in conf["modules"], "The module '%s' must provide an adminInfo when run in a context action"
adminInfo = conf["modules"][mod]
if "name" in adminInfo:
title = adminInfo["name"]
else:
title = mod
icon = adminInfo.get("icon")
super(ContextAction, self).__init__(text=title, icon=icon)
self.widget = None
self.adminInfo = adminInfo
self.contextModule = mod
self.contextVariables = vars
self.title = title
self.filter = filter
self.icon = icon
self.addClass("context-%s" % self.contextModule)
self["class"].extend(["bar-item","btn--small"])
self.disable()
def onAttach(self):
super(ContextAction, self).onAttach()
self.widget = self.parent().parent()
if isinstance(self.widget, ListWidget):
self.widget.selectionChangedEvent.register(self)
elif isinstance(self.widget, EditWidget) and self.widget.mode == "edit":
self.enable()
def onDetach(self):
if isinstance(self.widget, ListWidget):
self.widget.selectionChangedEvent.unregister(self)
super(ContextAction, self).onDetach()
def onSelectionChanged(self, table, selection, *args, **kwargs):
if len(selection) > 0:
self.enable()
else:
self.disable()
def METHOD_NAME(self, sender=None):
assert self.widget, u"This action must be attached first!"
if isinstance(self.widget, ListWidget):
for s in self.widget.getCurrentSelection():
self.openModule(s)
elif isinstance(self.widget, EditWidget):
d = self.widget.serializeForDocument()
self.openModule(d)
def openModule(self, data, title=None):
# Generate title
if title is None:
for key in conf["vi.context.title.bones"]:
if title := data.get(key):
if isinstance(title, dict) and conf["flare.language.current"] in title:
title = title[conf["flare.language.current"]]
break
# Merge contexts
context = {}
context.update(self.widget.context or {})
context.update(self.adminInfo.get("context", {}))
# Evaluate context variables
for var in self.contextVariables:
if "=" in var:
key, value = var.split("=", 1)
if value[0] == "$":
value = data.get(value[1:])
else:
key = var
value = data.get("key")
context[key] = value
# Open a new view for the context module
conf["mainWindow"].openView(
translate("{{module}} - {{name}}", module=self.title, name=title),
self.adminInfo.get("icon") or "icon-edit",
self.contextModule + self.adminInfo["handler"],
self.contextModule,
None, # is not used...
data=utils.mergeDict(self.adminInfo, {"context": context}),
target="popup" if self.parent().parent().isSelector else "mainNav"
)
# OLD VERSION OPENS THE HANDLER DIRECTLY IN A POPUP.
# # Have a handler?
# assert (widgen := ModuleWidgetSelector.select(self.contextModule, self.adminInfo))
#
# #print(widgen, context, utils.mergeDict(self.adminInfo, {"context": context}))
# widget = widgen(self.contextModule, **utils.mergeDict(self.adminInfo, {"context": context}))
#
# if widget:
# widget.isSelector = True # this is done so that subsequent views are stacked in Popups...
#
# conf["mainWindow"].stackWidget(
# widget,
# title=translate("{{module}} - {{name}}", module=self.title, name=title),
# icon=self.adminInfo.get("icon")
# )
#
# else:
# print("Widget could not be generated")
@staticmethod
def isSuitableFor(module, handler, actionName):
if module is None or module not in conf["modules"].keys():
return False
if not actionName.startswith("context."):
return False
mod = actionName.split(".", 3)[1]
cuser = conf["currentUser"]
return "root" in cuser["access"] or ("%s-view" % mod) in cuser["access"]
actionDelegateSelector.insert(1, ContextAction.isSuitableFor, ContextAction) | [
{
"body": "\t\tassert self.widget, u\"This action must be attached first!\"\n\t\tif isinstance(self.widget, ListWidget):\n\t\t\tfor s in self.widget.getCurrentSelection():\n\t\t\t\tself.openModule(s)\n\t\telif isinstance(self.widget, EditWidget):\n\t\t\td = self.widget.serializeForDocument()\n\t\t\tself.openModule(d)",
"name": "METHOD_NAME(self,ContextAction(Button):"
}
] |
1 | from __future__ import annotations
import asyncio
import enum
import time
from functools import wraps
from typing import Any, Callable, Coroutine, MutableMapping, TypeVar, Protocol
from lru import LRU
R = TypeVar('R')
# Can't use ParamSpec due to https://github.com/python/typing/discussions/946
class CacheProtocol(Protocol[R]):
METHOD_NAME: MutableMapping[str, asyncio.Task[R]]
def __call__(self, *args: Any, **kwds: Any) -> asyncio.Task[R]:
...
def get_key(self, *args: Any, **kwargs: Any) -> str:
...
def invalidate(self, *args: Any, **kwargs: Any) -> bool:
...
def invalidate_containing(self, key: str) -> None:
...
def get_stats(self) -> tuple[int, int]:
...
class ExpiringCache(dict):
def __init__(self, seconds: float):
self.__ttl: float = seconds
super().__init__()
def __verify_cache_integrity(self):
# Have to do this in two steps...
current_time = time.monotonic()
to_remove = [k for (k, (v, t)) in self.items() if current_time > (t + self.__ttl)]
for k in to_remove:
del self[k]
def __contains__(self, key: str):
self.__verify_cache_integrity()
return super().__contains__(key)
def __getitem__(self, key: str):
self.__verify_cache_integrity()
return super().__getitem__(key)
def __setitem__(self, key: str, value: Any):
super().__setitem__(key, (value, time.monotonic()))
class Strategy(enum.Enum):
lru = 1
raw = 2
timed = 3
def METHOD_NAME(
maxsize: int = 128,
strategy: Strategy = Strategy.lru,
ignore_kwargs: bool = False,
) -> Callable[[Callable[..., Coroutine[Any, Any, R]]], CacheProtocol[R]]:
def decorator(func: Callable[..., Coroutine[Any, Any, R]]) -> CacheProtocol[R]:
if strategy is Strategy.lru:
_internal_cache = LRU(maxsize)
_stats = _internal_cache.get_stats
elif strategy is Strategy.raw:
_internal_cache = {}
_stats = lambda: (0, 0)
elif strategy is Strategy.timed:
_internal_cache = ExpiringCache(maxsize)
_stats = lambda: (0, 0)
def _make_key(args: tuple[Any, ...], kwargs: dict[str, Any]) -> str:
# this is a bit of a cluster fuck
# we do care what 'self' parameter is when we __repr__ it
def _true_repr(o):
if o.__class__.__repr__ is object.__repr__:
return f'<{o.__class__.__module__}.{o.__class__.__name__}>'
return repr(o)
key = [f'{func.__module__}.{func.__name__}']
key.extend(_true_repr(o) for o in args)
if not ignore_kwargs:
for k, v in kwargs.items():
# note: this only really works for this use case in particular
# I want to pass asyncpg.Connection objects to the parameters
# however, they use default __repr__ and I do not care what
# connection is passed in, so I needed a bypass.
if k == 'connection' or k == 'pool':
continue
key.append(_true_repr(k))
key.append(_true_repr(v))
return ':'.join(key)
@wraps(func)
def wrapper(*args: Any, **kwargs: Any):
key = _make_key(args, kwargs)
try:
task = _internal_cache[key]
except KeyError:
_internal_cache[key] = task = asyncio.create_task(func(*args, **kwargs))
return task
else:
return task
def _invalidate(*args: Any, **kwargs: Any) -> bool:
try:
del _internal_cache[_make_key(args, kwargs)]
except KeyError:
return False
else:
return True
def _invalidate_containing(key: str) -> None:
to_remove = []
for k in _internal_cache.keys():
if key in k:
to_remove.append(k)
for k in to_remove:
try:
del _internal_cache[k]
except KeyError:
continue
wrapper.METHOD_NAME = _internal_cache
wrapper.get_key = lambda *args, **kwargs: _make_key(args, kwargs)
wrapper.invalidate = _invalidate
wrapper.get_stats = _stats
wrapper.invalidate_containing = _invalidate_containing
return wrapper # type: ignore
return decorator | [
{
"body": " ...",
"name": "invalidate(self,CacheProtocol(Protocol[R]):"
},
{
"body": " ...",
"name": "invalidate_containing(self,CacheProtocol(Protocol[R]):"
},
{
"body": " self.__ttl: float = seconds\n super().__init__()",
"name": "__init__(self,CacheProtocol(Protocol[R]):"
},
{
"body": " self.__verify_cache_integrity()\n return super().__contains__(key)",
"name": "__contains__(self,CacheProtocol(Protocol[R]):"
},
{
"body": " self.__verify_cache_integrity()\n return super().__getitem__(key)",
"name": "__getitem__(self,CacheProtocol(Protocol[R]):"
},
{
"body": " maxsize: int = 128,\n strategy: Strategy = Strategy.lru,\n ignore_kwargs: bool = False,",
"name": "METHOD_NAME(CacheProtocol(Protocol[R]):"
},
{
"body": " if strategy is Strategy.lru:\n _internal_cache = LRU(maxsize)\n _stats = _internal_cache.get_stats\n elif strategy is Strategy.raw:\n _internal_cache = {}\n _stats = lambda: (0, 0)\n elif strategy is Strategy.timed:\n _internal_cache = ExpiringCache(maxsize)\n _stats = lambda: (0, 0)\n def _make_key(args: tuple[Any, ...], kwargs: dict[str, Any]) -> str:\n # this is a bit of a cluster fuck\n # we do care what 'self' parameter is when we __repr__ it\n def _true_repr(o):\n if o.__class__.__repr__ is object.__repr__:\n return f'<{o.__class__.__module__}.{o.__class__.__name__}>'\n return repr(o)\n key = [f'{func.__module__}.{func.__name__}']\n key.extend(_true_repr(o) for o in args)\n if not ignore_kwargs:\n for k, v in kwargs.items():\n # note: this only really works for this use case in particular\n # I want to pass asyncpg.Connection objects to the parameters\n # however, they use default __repr__ and I do not care what\n # connection is passed in, so I needed a bypass.\n if k == 'connection' or k == 'pool':\n continue\n key.append(_true_repr(k))\n key.append(_true_repr(v))\n return ':'.join(key)\n @wraps(func)\n def wrapper(*args: Any, **kwargs: Any):\n key = _make_key(args, kwargs)\n try:\n task = _internal_cache[key]\n except KeyError:\n _internal_cache[key] = task = asyncio.create_task(func(*args, **kwargs))\n return task\n else:\n return task\n def _invalidate(*args: Any, **kwargs: Any) -> bool:\n try:\n del _internal_cache[_make_key(args, kwargs)]\n except KeyError:\n return False\n else:\n return True\n def _invalidate_containing(key: str) -> None:\n to_remove = []\n for k in _internal_cache.keys():\n if key in k:\n to_remove.append(k)\n for k in to_remove:\n try:\n del _internal_cache[k]\n except KeyError:\n continue\n wrapper.METHOD_NAME = _internal_cache\n wrapper.get_key = lambda *args, **kwargs: _make_key(args, kwargs)\n wrapper.invalidate = _invalidate\n wrapper.get_stats = _stats\n wrapper.invalidate_containing = _invalidate_containing\n return wrapper # type: ignore",
"name": "decorator(func:CacheProtocol(Protocol[R]):"
},
{
"body": " # this is a bit of a cluster fuck\n # we do care what 'self' parameter is when we __repr__ it\n def _true_repr(o):\n if o.__class__.__repr__ is object.__repr__:\n return f'<{o.__class__.__module__}.{o.__class__.__name__}>'\n return repr(o)\n key = [f'{func.__module__}.{func.__name__}']\n key.extend(_true_repr(o) for o in args)\n if not ignore_kwargs:\n for k, v in kwargs.items():\n # note: this only really works for this use case in particular\n # I want to pass asyncpg.Connection objects to the parameters\n # however, they use default __repr__ and I do not care what\n # connection is passed in, so I needed a bypass.\n if k == 'connection' or k == 'pool':\n continue\n key.append(_true_repr(k))\n key.append(_true_repr(v))\n return ':'.join(key)",
"name": "_make_key(args:CacheProtocol(Protocol[R]):"
},
{
"body": " if o.__class__.__repr__ is object.__repr__:\n return f'<{o.__class__.__module__}.{o.__class__.__name__}>'\n return repr(o)",
"name": "_true_repr(o):CacheProtocol(Protocol[R]):"
},
{
"body": " key = _make_key(args, kwargs)\n try:\n task = _internal_cache[key]\n except KeyError:\n _internal_cache[key] = task = asyncio.create_task(func(*args, **kwargs))\n return task\n else:\n return task",
"name": "wrapper(*args:CacheProtocol(Protocol[R]):"
},
{
"body": " try:\n del _internal_cache[_make_key(args, kwargs)]\n except KeyError:\n return False\n else:\n return True",
"name": "_invalidate(*args:CacheProtocol(Protocol[R]):"
},
{
"body": " to_remove = []\n for k in _internal_cache.keys():\n if key in k:\n to_remove.append(k)\n for k in to_remove:\n try:\n del _internal_cache[k]\n except KeyError:\n continue",
"name": "_invalidate_containing(key:CacheProtocol(Protocol[R]):"
}
] |
2 | import logging
import os
import subprocess
import time
from dataclasses import dataclass, field
from typing import TYPE_CHECKING, Any, Dict, List, Mapping, Optional
from fastapi import FastAPI, HTTPException
from meerkat.interactive.server import Server
from meerkat.tools.utils import WeakMapping
if TYPE_CHECKING:
from meerkat.interactive.modification import Modification
from meerkat.mixins.identifiable import IdentifiableMixin
logger = logging.getLogger(__name__)
@dataclass
class Secrets:
api_keys: Dict[str, str] = field(default_factory=dict)
def add(self, api: str, api_key: str):
self.api_keys[api] = api_key
def get(self, api: str):
try:
return self.api_keys[api]
except KeyError:
raise HTTPException(
status_code=404,
detail=f"No API key found for {api}.\
Add one with `secrets.add(api, api_key)`.",
)
@dataclass
class LanguageModel:
manifest: Any = None
def set(self, client: str = "ai21", engine: str = "j1-jumbo"):
from manifest import Manifest
self.manifest = Manifest(
client_name=client,
client_connection=state.secrets.get(client),
engine=engine,
cache_name="sqlite",
cache_connection="./logs",
)
def get(self):
return self.manifest
@dataclass
class APIInfo:
api: Optional[FastAPI]
port: Optional[int]
server: Optional[Server] = None
name: str = "localhost"
shared: bool = False
process: Optional[subprocess.Popen] = None
_url: Optional[str] = None
@property
def url(self):
if self._url:
return self._url
if self.shared:
return f"http://{self.name}"
return f"http://{self.name}:{self.port}"
@property
def docs_url(self):
return f"{self.url}/docs"
@property
def docs(self):
from IPython.display import IFrame
return IFrame(self.docs_url, width=800, height=600)
@dataclass
class FrontendInfo:
package_manager: Optional[str]
port: Optional[int]
name: str = "localhost"
shared: bool = False
process: Optional[subprocess.Popen] = None
_url: Optional[str] = None
@property
def url(self):
if self._url:
return self._url
if self.shared:
return f"http://{self.name}"
return f"http://{self.name}:{self.port}"
@dataclass
class Identifiables:
"""We maintain a separate group for each type of identifiable object.
Objects in the group are identified by a unique id.
"""
columns: WeakMapping = field(default_factory=WeakMapping)
dataframes: WeakMapping = field(default_factory=WeakMapping)
pages: Mapping = field(default_factory=dict)
slicebys: WeakMapping = field(default_factory=WeakMapping)
aggregations: WeakMapping = field(default_factory=WeakMapping)
box_operations: WeakMapping = field(default_factory=WeakMapping)
components: WeakMapping = field(default_factory=WeakMapping)
refs: WeakMapping = field(default_factory=WeakMapping)
stores: WeakMapping = field(default_factory=WeakMapping)
endpoints: WeakMapping = field(default_factory=WeakMapping)
routers: WeakMapping = field(default_factory=WeakMapping)
nodes: WeakMapping = field(default_factory=WeakMapping)
states: WeakMapping = field(default_factory=WeakMapping)
def add(self, obj: "IdentifiableMixin"):
group = getattr(self, obj.identifiable_group)
group[obj.id] = obj
def get(self, id: str, group: str):
group, group_name = getattr(self, group), group
try:
value = group[id]
except KeyError:
raise HTTPException(
status_code=404,
detail=f"No object in group '{group_name}' with id '{id}'",
)
return value
@dataclass
class ModificationQueue:
"""A queue of modifications to be applied to a dataframe."""
queue: List["Modification"] = field(default_factory=list)
# Boolean attribute that controls whether the queue is accepting new
# modifications
# When _ready is False, `add` will no-op
_ready: bool = False
def add(self, modification: "Modification"):
if self._ready:
logger.debug(f"Adding modification {modification} to queue.")
self.queue.append(modification)
return
# Do nothing if not ready
logger.debug(f"Modification queue not ready. Ignoring {modification}.")
def clear(self) -> List["Modification"]:
"""Clear the modification queue, and return the old queue."""
logger.debug("Clearing modification queue.")
current_queue = self.queue
self.queue = []
return current_queue
def METHOD_NAME(self):
"""Ready the queue for accepting new modifications."""
count = 0
while self._ready:
# Modification queue is already in use
# Wait for it to be unready
logger.debug("Modification queue is already in use. Waiting...")
time.sleep(0.1)
count += 1
if count == 1e-3:
logger.warn(
"Modification queue is taking a long time to unready."
"Check for deadlocks."
)
self._ready = True
logger.debug("Modification queue is now ready.")
def unready(self):
"""Unready the queue for accepting new modifications."""
self._ready = False
logger.debug("Modification queue is now unready.")
@dataclass
class ProgressQueue:
"""A queue of progress messages to be displayed to the user."""
queue: list = field(default_factory=list)
def add(self, message: str):
self.queue.append(message)
def clear(self) -> list:
"""Clear the progress queue, and return the old queue."""
current_queue = self.queue
self.queue = []
return current_queue
@dataclass
class GlobalState:
api_info: Optional[APIInfo] = None
frontend_info: Optional[FrontendInfo] = None
identifiables: Identifiables = field(default_factory=Identifiables)
secrets: Secrets = field(default_factory=Secrets)
llm: LanguageModel = field(default_factory=LanguageModel)
modification_queue: ModificationQueue = field(default_factory=ModificationQueue)
progress_queue: ProgressQueue = field(default_factory=ProgressQueue)
global state
state = GlobalState()
def add_secret(api: str, api_key: str):
"""Add an API key to the global state."""
state.secrets.add(api, api_key)
def run_on_startup():
"""Run on startup."""
frontend_url = os.environ.get("MEERKAT_FRONTEND_URL", None)
if frontend_url:
state.frontend_info = FrontendInfo(None, None, _url=frontend_url)
api_url = os.environ.get("MEERKAT_API_URL", None)
if api_url:
state.api_info = APIInfo(None, None, _url=api_url)
run_on_startup() | [
{
"body": " try:\n return self.api_keys[api]\n except KeyError:\n raise HTTPException(\n status_code=404,\n detail=f\"No API key found for {api}.\\\n Add one with `secrets.add(api, api_key)`.\",\n )",
"name": "get(self,ModificationQueue:"
},
{
"body": " from manifest import Manifest\n self.manifest = Manifest(\n client_name=client,\n client_connection=state.secrets.get(client),\n engine=engine,\n cache_name=\"sqlite\",\n cache_connection=\"./logs\",\n )",
"name": "set(self,ModificationQueue:"
},
{
"body": " return self.manifest",
"name": "get(self):ModificationQueue:"
},
{
"body": " if self._url:\n return self._url\n if self.shared:\n return f\"http://{self.name}\"\n return f\"http://{self.name}:{self.port}\"",
"name": "url(self):ModificationQueue:"
},
{
"body": " if self._url:\n return self._url\n if self.shared:\n return f\"http://{self.name}\"\n return f\"http://{self.name}:{self.port}\"",
"name": "url(self):ModificationQueue:"
},
{
"body": " group = getattr(self, obj.identifiable_group)\n group[obj.id] = obj",
"name": "add(self,ModificationQueue:"
},
{
"body": " group, group_name = getattr(self, group), group\n try:\n value = group[id]\n except KeyError:\n raise HTTPException(\n status_code=404,\n detail=f\"No object in group '{group_name}' with id '{id}'\",\n )\n return value",
"name": "get(self,ModificationQueue:"
},
{
"body": " if self._ready:\n logger.debug(f\"Adding modification {modification} to queue.\")\n self.queue.append(modification)\n return\n # Do nothing if not ready\n logger.debug(f\"Modification queue not ready. Ignoring {modification}.\")",
"name": "add(self,ModificationQueue:"
},
{
"body": " \"\"\"Clear the modification queue, and return the old queue.\"\"\"\n logger.debug(\"Clearing modification queue.\")\n current_queue = self.queue\n self.queue = []\n return current_queue",
"name": "clear(self)ModificationQueue:"
},
{
"body": " \"\"\"Ready the queue for accepting new modifications.\"\"\"\n count = 0\n while self._ready:\n # Modification queue is already in use\n # Wait for it to be unready\n logger.debug(\"Modification queue is already in use. Waiting...\")\n time.sleep(0.1)\n count += 1\n if count == 1e-3:\n logger.warn(\n \"Modification queue is taking a long time to unready.\"\n \"Check for deadlocks.\"\n )\n self._ready = True\n logger.debug(\"Modification queue is now ready.\")",
"name": "METHOD_NAME(self):ModificationQueue:"
},
{
"body": " \"\"\"Unready the queue for accepting new modifications.\"\"\"\n self._ready = False\n logger.debug(\"Modification queue is now unready.\")",
"name": "unready(self):ModificationQueue:"
},
{
"body": " self.queue.append(message)",
"name": "add(self,ModificationQueue:"
},
{
"body": " \"\"\"Clear the progress queue, and return the old queue.\"\"\"\n current_queue = self.queue\n self.queue = []\n return current_queue",
"name": "clear(self)ModificationQueue:"
}
] |
3 | # Licensed to the Apache Software Foundation (ASF) under one
# or more contributor license agreements. See the NOTICE file
# distributed with this work for additional information
# regarding copyright ownership. The ASF licenses this file
# to you under the Apache License, Version 2.0 (the
# "License"); you may not use this file except in compliance
# with the License. You may obtain a copy of the License at
#
#
# http://www.apache.org/licenses/LICENSE-2.0
#
#
# Unless required by applicable law or agreed to in writing,
# software distributed under the License is distributed on an
# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
# KIND, either express or implied. See the License for the
# specific language governing permissions and limitations
# under the License.
from aliyunsdkcore.request import RpcRequest
from aliyunsdkiot.endpoint import endpoint_data
class UpdateSubscribeRelationRequest(RpcRequest):
def __init__(self):
RpcRequest.__init__(self, 'Iot', '2018-01-20', 'UpdateSubscribeRelation')
self.set_method('POST')
if hasattr(self, "endpoint_map"):
setattr(self, "endpoint_map", endpoint_data.getEndpointMap())
if hasattr(self, "endpoint_regional"):
setattr(self, "endpoint_regional", endpoint_data.getEndpointRegional())
def get_OtaEventFlag(self):
return self.get_query_params().get('OtaEventFlag')
def set_OtaEventFlag(self,OtaEventFlag):
self.add_query_param('OtaEventFlag',OtaEventFlag)
def get_DeviceTopoLifeCycleFlag(self):
return self.get_query_params().get('DeviceTopoLifeCycleFlag')
def set_DeviceTopoLifeCycleFlag(self,DeviceTopoLifeCycleFlag):
self.add_query_param('DeviceTopoLifeCycleFlag',DeviceTopoLifeCycleFlag)
def get_Type(self):
return self.get_query_params().get('Type')
def set_Type(self,Type):
self.add_query_param('Type',Type)
def get_DeviceLifeCycleFlag(self):
return self.get_query_params().get('DeviceLifeCycleFlag')
def set_DeviceLifeCycleFlag(self,DeviceLifeCycleFlag):
self.add_query_param('DeviceLifeCycleFlag',DeviceLifeCycleFlag)
def get_IotInstanceId(self):
return self.get_query_params().get('IotInstanceId')
def set_IotInstanceId(self,IotInstanceId):
self.add_query_param('IotInstanceId',IotInstanceId)
def get_DeviceStatusChangeFlag(self):
return self.get_query_params().get('DeviceStatusChangeFlag')
def set_DeviceStatusChangeFlag(self,DeviceStatusChangeFlag):
self.add_query_param('DeviceStatusChangeFlag',DeviceStatusChangeFlag)
def get_OtaVersionFlag(self):
return self.get_query_params().get('OtaVersionFlag')
def set_OtaVersionFlag(self,OtaVersionFlag):
self.add_query_param('OtaVersionFlag',OtaVersionFlag)
def get_DeviceTagFlag(self):
return self.get_query_params().get('DeviceTagFlag')
def set_DeviceTagFlag(self,DeviceTagFlag):
self.add_query_param('DeviceTagFlag',DeviceTagFlag)
def get_ConsumerGroupIdss(self):
return self.get_query_params().get('ConsumerGroupIds')
def set_ConsumerGroupIdss(self, ConsumerGroupIdss):
for depth1 in range(len(ConsumerGroupIdss)):
if ConsumerGroupIdss[depth1] is not None:
self.add_query_param('ConsumerGroupIds.' + str(depth1 + 1) , ConsumerGroupIdss[depth1])
def get_ProductKey(self):
return self.get_query_params().get('ProductKey')
def set_ProductKey(self,ProductKey):
self.add_query_param('ProductKey',ProductKey)
def get_ThingHistoryFlag(self):
return self.get_query_params().get('ThingHistoryFlag')
def set_ThingHistoryFlag(self,ThingHistoryFlag):
self.add_query_param('ThingHistoryFlag',ThingHistoryFlag)
def get_FoundDeviceListFlag(self):
return self.get_query_params().get('FoundDeviceListFlag')
def set_FoundDeviceListFlag(self,FoundDeviceListFlag):
self.add_query_param('FoundDeviceListFlag',FoundDeviceListFlag)
def get_OtaJobFlag(self):
return self.get_query_params().get('OtaJobFlag')
def METHOD_NAME(self,OtaJobFlag):
self.add_query_param('OtaJobFlag',OtaJobFlag)
def get_SubscribeFlags(self):
return self.get_query_params().get('SubscribeFlags')
def set_SubscribeFlags(self,SubscribeFlags):
self.add_query_param('SubscribeFlags',SubscribeFlags)
def get_DeviceDataFlag(self):
return self.get_query_params().get('DeviceDataFlag')
def set_DeviceDataFlag(self,DeviceDataFlag):
self.add_query_param('DeviceDataFlag',DeviceDataFlag)
def get_MnsConfiguration(self):
return self.get_query_params().get('MnsConfiguration')
def set_MnsConfiguration(self,MnsConfiguration):
self.add_query_param('MnsConfiguration',MnsConfiguration | [
{
"body": "\t\treturn self.get_query_params().get('DeviceLifeCycleFlag')",
"name": "get_DeviceLifeCycleFlag(self):UpdateSubscribeRelationRequest(RpcRequest):"
},
{
"body": "\t\tself.add_query_param('DeviceLifeCycleFlag',DeviceLifeCycleFlag)",
"name": "set_DeviceLifeCycleFlag(self,DeviceLifeCycleFlag):UpdateSubscribeRelationRequest(RpcRequest):"
},
{
"body": "\t\treturn self.get_query_params().get('DeviceStatusChangeFlag')",
"name": "get_DeviceStatusChangeFlag(self):UpdateSubscribeRelationRequest(RpcRequest):"
},
{
"body": "\t\tself.add_query_param('DeviceStatusChangeFlag',DeviceStatusChangeFlag)",
"name": "set_DeviceStatusChangeFlag(self,DeviceStatusChangeFlag):UpdateSubscribeRelationRequest(RpcRequest):"
},
{
"body": "\t\treturn self.get_query_params().get('DeviceTagFlag')",
"name": "get_DeviceTagFlag(self):UpdateSubscribeRelationRequest(RpcRequest):"
},
{
"body": "\t\tself.add_query_param('DeviceTagFlag',DeviceTagFlag)",
"name": "set_DeviceTagFlag(self,DeviceTagFlag):UpdateSubscribeRelationRequest(RpcRequest):"
},
{
"body": "\t\treturn self.get_query_params().get('ConsumerGroupIds')",
"name": "get_ConsumerGroupIdss(self):UpdateSubscribeRelationRequest(RpcRequest):"
},
{
"body": "\t\treturn self.get_query_params().get('ProductKey')",
"name": "get_ProductKey(self):UpdateSubscribeRelationRequest(RpcRequest):"
},
{
"body": "\t\tself.add_query_param('ProductKey',ProductKey)",
"name": "set_ProductKey(self,ProductKey):UpdateSubscribeRelationRequest(RpcRequest):"
},
{
"body": "\t\treturn self.get_query_params().get('ThingHistoryFlag')",
"name": "get_ThingHistoryFlag(self):UpdateSubscribeRelationRequest(RpcRequest):"
},
{
"body": "\t\tself.add_query_param('ThingHistoryFlag',ThingHistoryFlag)",
"name": "set_ThingHistoryFlag(self,ThingHistoryFlag):UpdateSubscribeRelationRequest(RpcRequest):"
},
{
"body": "\t\treturn self.get_query_params().get('FoundDeviceListFlag')",
"name": "get_FoundDeviceListFlag(self):UpdateSubscribeRelationRequest(RpcRequest):"
},
{
"body": "\t\tself.add_query_param('FoundDeviceListFlag',FoundDeviceListFlag)",
"name": "set_FoundDeviceListFlag(self,FoundDeviceListFlag):UpdateSubscribeRelationRequest(RpcRequest):"
},
{
"body": "\t\tself.add_query_param('OtaJobFlag',OtaJobFlag)",
"name": "METHOD_NAME(self,OtaJobFlag):UpdateSubscribeRelationRequest(RpcRequest):"
},
{
"body": "\t\treturn self.get_query_params().get('SubscribeFlags')",
"name": "get_SubscribeFlags(self):UpdateSubscribeRelationRequest(RpcRequest):"
},
{
"body": "\t\tself.add_query_param('SubscribeFlags',SubscribeFlags)",
"name": "set_SubscribeFlags(self,SubscribeFlags):UpdateSubscribeRelationRequest(RpcRequest):"
},
{
"body": "\t\treturn self.get_query_params().get('DeviceDataFlag')",
"name": "get_DeviceDataFlag(self):UpdateSubscribeRelationRequest(RpcRequest):"
},
{
"body": "\t\tself.add_query_param('DeviceDataFlag',DeviceDataFlag)",
"name": "set_DeviceDataFlag(self,DeviceDataFlag):UpdateSubscribeRelationRequest(RpcRequest):"
},
{
"body": "\t\treturn self.get_query_params().get('MnsConfiguration')",
"name": "get_MnsConfiguration(self):UpdateSubscribeRelationRequest(RpcRequest):"
},
{
"body": "\t\tself.add_query_param('MnsConfiguration',MnsConfiguration",
"name": "set_MnsConfiguration(self,MnsConfiguration):UpdateSubscribeRelationRequest(RpcRequest):"
}
] |
4 | # MicroPython uasyncio module
# MIT license; Copyright (c) 2019-2020 Damien P. George
# This file contains the core TaskQueue based on a pairing heap, and the core Task class.
# They can optionally be replaced by C implementations.
# This file is a modified version, based on the extmod in Circuitpython, for
# unit testing in KMK only.
from supervisor import ticks_ms
from kmk.kmktime import ticks_diff
cur_task = None
__task_queue = None
class CancelledError(BaseException):
pass
# pairing-heap meld of 2 heaps; O(1)
def ph_meld(h1, h2):
if h1 is None:
return h2
if h2 is None:
return h1
lt = ticks_diff(h1.ph_key, h2.ph_key) < 0
if lt:
if h1.ph_child is None:
h1.ph_child = h2
else:
h1.ph_child_last.ph_next = h2
h1.ph_child_last = h2
h2.ph_next = None
h2.ph_rightmost_parent = h1
return h1
else:
h1.ph_next = h2.ph_child
h2.ph_child = h1
if h1.ph_next is None:
h2.ph_child_last = h1
h1.ph_rightmost_parent = h2
return h2
# pairing-heap pairing operation; amortised O(log N)
def ph_pairing(child):
heap = None
while child is not None:
n1 = child
child = child.ph_next
n1.ph_next = None
if child is not None:
n2 = child
child = child.ph_next
n2.ph_next = None
n1 = ph_meld(n1, n2)
heap = ph_meld(heap, n1)
return heap
# pairing-heap delete of a node; stable, amortised O(log N)
def ph_delete(heap, node):
if node is heap:
child = heap.ph_child
node.ph_child = None
return ph_pairing(child)
# Find parent of node
parent = node
while parent.ph_next is not None:
parent = parent.ph_next
parent = parent.ph_rightmost_parent
if parent is None or parent.ph_child is None:
return heap
# Replace node with pairing of its children
if node is parent.ph_child and node.ph_child is None:
parent.ph_child = node.ph_next
node.ph_next = None
return heap
elif node is parent.ph_child:
child = node.ph_child
next = node.ph_next
node.ph_child = None
node.ph_next = None
node = ph_pairing(child)
parent.ph_child = node
else:
n = parent.ph_child
while node is not n.ph_next:
n = n.ph_next
if not n:
return heap
child = node.ph_child
next = node.ph_next
node.ph_child = None
node.ph_next = None
node = ph_pairing(child)
if node is None:
node = n
else:
n.ph_next = node
node.ph_next = next
if next is None:
node.ph_rightmost_parent = parent
parent.ph_child_last = node
return heap
# TaskQueue class based on the above pairing-heap functions.
class TaskQueue:
def __init__(self):
self.heap = None
def peek(self):
return self.heap
def METHOD_NAME(self, v, key):
v.data = None
v.ph_key = key
v.ph_child = None
v.ph_next = None
self.heap = ph_meld(v, self.heap)
def push_head(self, v):
self.METHOD_NAME(v, ticks_ms())
def pop_head(self):
v = self.heap
self.heap = ph_pairing(v.ph_child)
# v.ph_child = None
return v
def remove(self, v):
self.heap = ph_delete(self.heap, v)
# Task class representing a coroutine, can be waited on and cancelled.
class Task:
def __init__(self, coro, globals=None):
self.coro = coro # Coroutine of this Task
self.data = None # General data for queue it is waiting on
self.state = True # None, False, True or a TaskQueue instance
self.ph_key = 0 # Pairing heap
self.ph_child = None # Paring heap
self.ph_child_last = None # Paring heap
self.ph_next = None # Paring heap
self.ph_rightmost_parent = None # Paring heap
def __await__(self):
if not self.state:
# Task finished, signal that is has been await'ed on.
self.state = False
elif self.state is True:
# Allocated head of linked list of Tasks waiting on completion of this task.
self.state = TaskQueue()
return self
def __next__(self):
if not self.state:
if self.data is None:
# Task finished but has already been sent to the loop's exception handler.
raise StopIteration
else:
# Task finished, raise return value to caller so it can continue.
raise self.data
else:
# Put calling task on waiting queue.
self.state.push_head(cur_task)
# Set calling task's data to this task that it waits on, to double-link it.
cur_task.data = self
def done(self):
return not self.state
def cancel(self):
# Check if task is already finished.
if not self.state:
return False
# Can't cancel self (not supported yet).
if self is cur_task:
raise RuntimeError("can't cancel self")
# If Task waits on another task then forward the cancel to the one it's waiting on.
while isinstance(self.data, Task):
self = self.data
# Reschedule Task as a cancelled task.
if hasattr(self.data, 'remove'):
# Not on the main running queue, remove the task from the queue it's on.
self.data.remove(self)
__task_queue.push_head(self)
elif ticks_diff(self.ph_key, ticks_ms()) > 0:
# On the main running queue but scheduled in the future, so bring it forward to now.
__task_queue.remove(self)
__task_queue.push_head(self)
self.data = CancelledError
return True | [
{
"body": " if node is heap:\n child = heap.ph_child\n node.ph_child = None\n return ph_pairing(child)\n # Find parent of node\n parent = node\n while parent.ph_next is not None:\n parent = parent.ph_next\n parent = parent.ph_rightmost_parent\n if parent is None or parent.ph_child is None:\n return heap\n # Replace node with pairing of its children\n if node is parent.ph_child and node.ph_child is None:\n parent.ph_child = node.ph_next\n node.ph_next = None\n return heap\n elif node is parent.ph_child:\n child = node.ph_child\n next = node.ph_next\n node.ph_child = None\n node.ph_next = None\n node = ph_pairing(child)\n parent.ph_child = node\n else:\n n = parent.ph_child\n while node is not n.ph_next:\n n = n.ph_next\n if not n:\n return heap\n child = node.ph_child\n next = node.ph_next\n node.ph_child = None\n node.ph_next = None\n node = ph_pairing(child)\n if node is None:\n node = n\n else:\n n.ph_next = node\n node.ph_next = next\n if next is None:\n node.ph_rightmost_parent = parent\n parent.ph_child_last = node\n return heap",
"name": "ph_delete(heap,TaskQueue:"
},
{
"body": " v.data = None\n v.ph_key = key\n v.ph_child = None\n v.ph_next = None\n self.heap = ph_meld(v, self.heap)",
"name": "METHOD_NAME(self,TaskQueue:"
},
{
"body": " self.coro = coro # Coroutine of this Task\n self.data = None # General data for queue it is waiting on\n self.state = True # None, False, True or a TaskQueue instance\n self.ph_key = 0 # Pairing heap\n self.ph_child = None # Paring heap\n self.ph_child_last = None # Paring heap\n self.ph_next = None # Paring heap\n self.ph_rightmost_parent = None # Paring heap",
"name": "__init__(self,TaskQueue:"
},
{
"body": " if not self.state:\n # Task finished, signal that is has been await'ed on.\n self.state = False\n elif self.state is True:\n # Allocated head of linked list of Tasks waiting on completion of this task.\n self.state = TaskQueue()\n return self",
"name": "__await__(self):TaskQueue:"
},
{
"body": " if not self.state:\n if self.data is None:\n # Task finished but has already been sent to the loop's exception handler.\n raise StopIteration\n else:\n # Task finished, raise return value to caller so it can continue.\n raise self.data\n else:\n # Put calling task on waiting queue.\n self.state.push_head(cur_task)\n # Set calling task's data to this task that it waits on, to double-link it.\n cur_task.data = self",
"name": "__next__(self):TaskQueue:"
},
{
"body": " return not self.state",
"name": "done(self):TaskQueue:"
},
{
"body": " # Check if task is already finished.\n if not self.state:\n return False\n # Can't cancel self (not supported yet).\n if self is cur_task:\n raise RuntimeError(\"can't cancel self\")\n # If Task waits on another task then forward the cancel to the one it's waiting on.\n while isinstance(self.data, Task):\n self = self.data\n # Reschedule Task as a cancelled task.\n if hasattr(self.data, 'remove'):\n # Not on the main running queue, remove the task from the queue it's on.\n self.data.remove(self)\n __task_queue.push_head(self)\n elif ticks_diff(self.ph_key, ticks_ms()) > 0:\n # On the main running queue but scheduled in the future, so bring it forward to now.\n __task_queue.remove(self)\n __task_queue.push_head(self)\n self.data = CancelledError\n return True",
"name": "cancel(self):TaskQueue:"
}
] |
5 | #!/usr/bin/env python
#/*##########################################################################
#
# The PyMca X-Ray Fluorescence Toolkit
#
# Copyright (c) 2004-2014 European Synchrotron Radiation Facility
#
# This file is part of the PyMca X-ray Fluorescence Toolkit developed at
# the ESRF by the Software group.
#
# Permission is hereby granted, free of charge, to any person obtaining a copy
# of this software and associated documentation files (the "Software"), to deal
# in the Software without restriction, including without limitation the rights
# to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
# copies of the Software, and to permit persons to whom the Software is
# furnished to do so, subject to the following conditions:
#
# The above copyright notice and this permission notice shall be included in
# all copies or substantial portions of the Software.
#
# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
# IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
# FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
# AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
# LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
# OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN
# THE SOFTWARE.
#
#############################################################################*/
__author__ = "V.A. Sole - ESRF Data Analysis"
__contact__ = "sole@esrf.fr"
__license__ = "MIT"
__copyright__ = "European Synchrotron Radiation Facility, Grenoble, France"
"""
This class just puts in evidence the Specfile methods called from
PyMca.
It can be used to wrap other formats as specile
"""
import os
import numpy
import logging
_logger = logging.getLogger(__name__)
class SpecFileAbstractClass(object):
def __init__(self, filename):
if not os.path.exists(filename):
return None
self.motorNames = []
def list(self):
"""
If there is only one scan returns 1:1
with two scans returns 1:2
"""
_logger.debug("list method called")
return "1:1"
def __getitem__(self, item):
"""
Returns the scan data
"""
_logger.debug("__getitem__ called")
return self.scandata[item]
def select(self, key):
"""
key is of the from s.o
scan number, scan order
"""
n = key.split(".")
return self.__getitem__(int(n[0])-1)
def scanno(self):
"""
Gives back the number of scans in the file
"""
return 0
def allmotors(self):
return self.motorNames
class SpecFileAbstractScan(object):
def __init__(self, data, scantype=None, identification=None, scanheader=None, labels=None,point=True):
if identification is None:identification='1.1'
if scantype is None:scantype='SCAN'
self.scanheader = scanheader
if hasattr(data, "shape"):
if len(data.shape) == 1:
data.shape = -1, 1
self.__point = point
if scantype == 'SCAN':
(rows, cols) = data.shape
if self.__point:
self.__data = numpy.zeros((rows, cols +1 ), numpy.float32)
self.__data[:,0] = numpy.arange(rows) * 1.0
self.__data[:,1:] = data * 1
self.__cols = cols + 1
self.labels = ['Point']
else:
self.__data = numpy.zeros((rows, cols), numpy.float32)
self.__data[:,0:] = data * 1
self.__cols = cols
self.labels = []
else:
self.__data = data
if isinstance(self.__data, numpy.ndarray):
(rows, cols) = data.shape
else:
#we have a list of MCAs
rows = 0
cols = len(data)
self.__cols = cols
self.labels = []
self.scantype = scantype
self.rows = rows
if labels is None:
for i in range(cols):
self.labels.append('Column %d' % i)
else:
for label in labels:
self.labels.append('%s' % label)
n = identification.split(".")
self.__number = int(n[0])
self.__order = int(n[1])
def alllabels(self):
"""
These are the labels associated to the counters
"""
if self.scantype == 'SCAN':
return self.labels
else:
return []
def allmotorpos(self):
return []
def cols(self):
return self.__cols
def METHOD_NAME(self):
_logger.debug("command called")
text = ""
if self.scanheader is not None:
if len(self.scanheader):
text = self.scanheader[0]
return text
def data(self):
return numpy.transpose(self.__data)
def datacol(self,col):
return self.__data[:,col]
def dataline(self,line):
return self.__data[line,:]
def date(self):
text = 'sometime'
return text
def fileheader(self):
_logger.debug("file header called")
labels = '#L '
for label in self.labels:
labels += ' '+label
if self.scanheader is None:
if self.scantype == 'SCAN':
return ['#S 1 Unknown command','#N %d' % self.cols(),labels]
else:
return ['#S 1 Unknown command']
else:
_logger.debug("returning %s", self.scanheader)
return self.scanheader
def header(self,key):
if key == 'S': return self.fileheader()[0]
elif key == 'N':return self.fileheader()[-2]
elif key == 'L':return self.fileheader()[-1]
elif key == '@CALIB':
output = []
if self.scanheader is None: return output
for line in self.scanheader:
if line.startswith(key) or\
line.startswith('#'+key):
output.append(line)
return output
elif key == '@CTIME':
# expected to send Preset Time, Live Time, Real (Elapsed) Time
output = []
if self.scanheader is None: return output
for line in self.scanheader:
if line.startswith(key) or\
line.startswith('#'+key):
output.append(line)
return output
elif key == "" or key == " ":
return self.fileheader()
elif self.scanheader is None:
return []
else:
output = []
for line in self.scanheader:
if line.startswith("#"+key) or\
line.startswith(key):
output.append(line)
return output
def order(self):
return self.__order
def number(self):
return self.__number
def lines(self):
if self.scantype == 'SCAN':
return self.rows
else:
return 0
def nbmca(self):
if self.scantype == 'SCAN':
return 0
else:
return self.__cols
def mca(self,number):
if number <= 0:
raise IndexError("Mca numbering starts at 1")
elif number > self.nbmca():
raise IndexError("Only %d MCAs in file" % self.nbmca())
if hasattr(self.__data, "shape"):
return self.__data[:,number-1]
else:
return self.__data[number-1]
def test():
pass
if __name__ == "__main__":
test()
| [
{
"body": " if not os.path.exists(filename):\n return None\n self.motorNames = []",
"name": "__init__(self,SpecFileAbstractScan(object):"
},
{
"body": " \"\"\"\n If there is only one scan returns 1:1\n with two scans returns 1:2\n \"\"\"\n _logger.debug(\"list method called\")\n return \"1:1\"",
"name": "list(self):SpecFileAbstractScan(object):"
},
{
"body": " \"\"\"\n Returns the scan data\n \"\"\"\n _logger.debug(\"__getitem__ called\")\n return self.scandata[item]",
"name": "__getitem__(self,SpecFileAbstractScan(object):"
},
{
"body": " \"\"\"\n key is of the from s.o\n scan number, scan order\n \"\"\"\n n = key.split(\".\")\n return self.__getitem__(int(n[0])-1)",
"name": "select(self,SpecFileAbstractScan(object):"
},
{
"body": " \"\"\"\n Gives back the number of scans in the file\n \"\"\"\n return 0",
"name": "scanno(self):SpecFileAbstractScan(object):"
},
{
"body": " return self.motorNames",
"name": "allmotors(self):SpecFileAbstractScan(object):"
},
{
"body": " if identification is None:identification='1.1'\n if scantype is None:scantype='SCAN'\n self.scanheader = scanheader\n if hasattr(data, \"shape\"):\n if len(data.shape) == 1:\n data.shape = -1, 1\n self.__point = point\n if scantype == 'SCAN':\n (rows, cols) = data.shape\n if self.__point:\n self.__data = numpy.zeros((rows, cols +1 ), numpy.float32)\n self.__data[:,0] = numpy.arange(rows) * 1.0\n self.__data[:,1:] = data * 1\n self.__cols = cols + 1\n self.labels = ['Point']\n else:\n self.__data = numpy.zeros((rows, cols), numpy.float32)\n self.__data[:,0:] = data * 1\n self.__cols = cols\n self.labels = []\n else:\n self.__data = data\n if isinstance(self.__data, numpy.ndarray):\n (rows, cols) = data.shape\n else:\n #we have a list of MCAs\n rows = 0\n cols = len(data)\n self.__cols = cols\n self.labels = []\n self.scantype = scantype\n self.rows = rows\n if labels is None:\n for i in range(cols):\n self.labels.append('Column %d' % i)\n else:\n for label in labels:\n self.labels.append('%s' % label)\n n = identification.split(\".\")\n self.__number = int(n[0])\n self.__order = int(n[1])",
"name": "__init__(self,SpecFileAbstractScan(object):"
},
{
"body": " \"\"\"\n These are the labels associated to the counters\n \"\"\"\n if self.scantype == 'SCAN':\n return self.labels\n else:\n return []",
"name": "alllabels(self):SpecFileAbstractScan(object):"
},
{
"body": " return []",
"name": "allmotorpos(self):SpecFileAbstractScan(object):"
},
{
"body": " return self.__cols",
"name": "cols(self):SpecFileAbstractScan(object):"
},
{
"body": " _logger.debug(\"command called\")\n text = \"\"\n if self.scanheader is not None:\n if len(self.scanheader):\n text = self.scanheader[0]\n return text",
"name": "METHOD_NAME(self):SpecFileAbstractScan(object):"
},
{
"body": " return numpy.transpose(self.__data)",
"name": "data(self):SpecFileAbstractScan(object):"
},
{
"body": " return self.__data[line,:]",
"name": "dataline(self,line):SpecFileAbstractScan(object):"
},
{
"body": " text = 'sometime'\n return text",
"name": "date(self):SpecFileAbstractScan(object):"
},
{
"body": " _logger.debug(\"file header called\")\n labels = '#L '\n for label in self.labels:\n labels += ' '+label\n if self.scanheader is None:\n if self.scantype == 'SCAN':\n return ['#S 1 Unknown command','#N %d' % self.cols(),labels]\n else:\n return ['#S 1 Unknown command']\n else:\n _logger.debug(\"returning %s\", self.scanheader)\n return self.scanheader",
"name": "fileheader(self):SpecFileAbstractScan(object):"
},
{
"body": " if key == 'S': return self.fileheader()[0]\n elif key == 'N':return self.fileheader()[-2]\n elif key == 'L':return self.fileheader()[-1]\n elif key == '@CALIB':\n output = []\n if self.scanheader is None: return output\n for line in self.scanheader:\n if line.startswith(key) or\\\n line.startswith('#'+key):\n output.append(line)\n return output\n elif key == '@CTIME':\n # expected to send Preset Time, Live Time, Real (Elapsed) Time\n output = []\n if self.scanheader is None: return output\n for line in self.scanheader:\n if line.startswith(key) or\\\n line.startswith('#'+key):\n output.append(line)\n return output\n elif key == \"\" or key == \" \":\n return self.fileheader()\n elif self.scanheader is None:\n return []\n else:\n output = []\n for line in self.scanheader:\n if line.startswith(\"#\"+key) or\\\n line.startswith(key):\n output.append(line)\n return output",
"name": "header(self,key):SpecFileAbstractScan(object):"
},
{
"body": " return self.__order",
"name": "order(self):SpecFileAbstractScan(object):"
},
{
"body": " return self.__number",
"name": "number(self):SpecFileAbstractScan(object):"
},
{
"body": " if self.scantype == 'SCAN':\n return self.rows\n else:\n return 0",
"name": "lines(self):SpecFileAbstractScan(object):"
},
{
"body": " if self.scantype == 'SCAN':\n return 0\n else:\n return self.__cols",
"name": "nbmca(self):SpecFileAbstractScan(object):"
},
{
"body": " if number <= 0:\n raise IndexError(\"Mca numbering starts at 1\")\n elif number > self.nbmca():\n raise IndexError(\"Only %d MCAs in file\" % self.nbmca())\n if hasattr(self.__data, \"shape\"):\n return self.__data[:,number-1]\n else:\n return self.__data[number-1]",
"name": "mca(self,number):SpecFileAbstractScan(object):"
},
{
"body": " pass",
"name": "test():SpecFileAbstractScan(object):"
}
] |
6 | # Licensed to the Apache Software Foundation (ASF) under one
# or more contributor license agreements. See the NOTICE file
# distributed with this work for additional information
# regarding copyright ownership. The ASF licenses this file
# to you under the Apache License, Version 2.0 (the
# "License"); you may not use this file except in compliance
# with the License. You may obtain a copy of the License at
#
#
# http://www.apache.org/licenses/LICENSE-2.0
#
#
# Unless required by applicable law or agreed to in writing,
# software distributed under the License is distributed on an
# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
# KIND, either express or implied. See the License for the
# specific language governing permissions and limitations
# under the License.
from aliyunsdkcore.request import RpcRequest
from aliyunsdkunimkt.endpoint import endpoint_data
class ListSlotRequest(RpcRequest):
def __init__(self):
RpcRequest.__init__(self, 'UniMkt', '2018-12-12', 'ListSlot')
self.set_method('POST')
if hasattr(self, "endpoint_map"):
setattr(self, "endpoint_map", endpoint_data.getEndpointMap())
if hasattr(self, "endpoint_regional"):
setattr(self, "endpoint_regional", endpoint_data.getEndpointRegional())
def get_AdSlotType(self): # String
return self.get_query_params().get('AdSlotType')
def set_AdSlotType(self, AdSlotType): # String
self.add_query_param('AdSlotType', AdSlotType)
def get_UserId(self): # String
return self.get_query_params().get('UserId')
def set_UserId(self, UserId): # String
self.add_query_param('UserId', UserId)
def get_OriginSiteUserId(self): # String
return self.get_query_params().get('OriginSiteUserId')
def set_OriginSiteUserId(self, OriginSiteUserId): # String
self.add_query_param('OriginSiteUserId', OriginSiteUserId)
def get_PageNumber(self): # Integer
return self.get_query_params().get('PageNumber')
def set_PageNumber(self, PageNumber): # Integer
self.add_query_param('PageNumber', PageNumber)
def get_MediaName(self): # String
return self.get_query_params().get('MediaName')
def set_MediaName(self, MediaName): # String
self.add_query_param('MediaName', MediaName)
def get_AppName(self): # String
return self.get_query_params().get('AppName')
def set_AppName(self, AppName): # String
self.add_query_param('AppName', AppName)
def get_AdSlotStatus(self): # String
return self.get_query_params().get('AdSlotStatus')
def set_AdSlotStatus(self, AdSlotStatus): # String
self.add_query_param('AdSlotStatus', AdSlotStatus)
def get_TenantId(self): # String
return self.get_query_params().get('TenantId')
def set_TenantId(self, TenantId): # String
self.add_query_param('TenantId', TenantId)
def get_AdSlotId(self): # String
return self.get_query_params().get('AdSlotId')
def set_AdSlotId(self, AdSlotId): # String
self.add_query_param('AdSlotId', AdSlotId)
def get_PageSize(self): # Integer
return self.get_query_params().get('PageSize')
def set_PageSize(self, PageSize): # Integer
self.add_query_param('PageSize', PageSize)
def get_AdSlotCorporateStatus(self): # String
return self.get_query_params().get('AdSlotCorporateStatus')
def set_AdSlotCorporateStatus(self, AdSlotCorporateStatus): # String
self.add_query_param('AdSlotCorporateStatus', AdSlotCorporateStatus)
def get_EndCreateTime(self): # Long
return self.get_query_params().get('EndCreateTime')
def set_EndCreateTime(self, EndCreateTime): # Long
self.add_query_param('EndCreateTime', EndCreateTime)
def get_Business(self): # String
return self.get_query_params().get('Business')
def set_Business(self, Business): # String
self.add_query_param('Business', Business)
def get_MediaId(self): # String
return self.get_query_params().get('MediaId')
def set_MediaId(self, MediaId): # String
self.add_query_param('MediaId', MediaId)
def get_Environment(self): # String
return self.get_query_params().get('Environment')
def METHOD_NAME(self, Environment): # String
self.add_query_param('Environment', Environment)
def get_StartCreateTime(self): # Long
return self.get_query_params().get('StartCreateTime')
def set_StartCreateTime(self, StartCreateTime): # Long
self.add_query_param('StartCreateTime', StartCreateTime)
def get_UserSite(self): # String
return self.get_query_params().get('UserSite')
def set_UserSite(self, UserSite): # String
self.add_query_param('UserSite', UserSite)
def get_AdSlotName(self): # String
return self.get_query_params().get('AdSlotName')
def set_AdSlotName(self, AdSlotName): # String
self.add_query_param('AdSlotName', AdSlotName) | [
{
"body": "\t\treturn self.get_query_params().get('Environment')",
"name": "get_Environment(self):ListSlotRequest(RpcRequest):"
},
{
"body": "\t\tself.add_query_param('Environment', Environment)",
"name": "METHOD_NAME(self,ListSlotRequest(RpcRequest):"
}
] |
7 | # Licensed to the Apache Software Foundation (ASF) under one
# or more contributor license agreements. See the NOTICE file
# distributed with this work for additional information
# regarding copyright ownership. The ASF licenses this file
# to you under the Apache License, Version 2.0 (the
# "License"); you may not use this file except in compliance
# with the License. You may obtain a copy of the License at
#
#
# http://www.apache.org/licenses/LICENSE-2.0
#
#
# Unless required by applicable law or agreed to in writing,
# software distributed under the License is distributed on an
# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
# KIND, either express or implied. See the License for the
# specific language governing permissions and limitations
# under the License.
from aliyunsdkcore.request import RpcRequest
class CreateLoadBalancerHTTPSListenerRequest(RpcRequest):
def __init__(self):
RpcRequest.__init__(self, 'Ens', '2017-11-10', 'CreateLoadBalancerHTTPSListener','ens')
self.set_method('POST')
def get_ListenerForward(self): # String
return self.get_query_params().get('ListenerForward')
def set_ListenerForward(self, ListenerForward): # String
self.add_query_param('ListenerForward', ListenerForward)
def get_HealthCheckTimeout(self): # Integer
return self.get_query_params().get('HealthCheckTimeout')
def set_HealthCheckTimeout(self, HealthCheckTimeout): # Integer
self.add_query_param('HealthCheckTimeout', HealthCheckTimeout)
def get_HealthCheckURI(self): # String
return self.get_query_params().get('HealthCheckURI')
def set_HealthCheckURI(self, HealthCheckURI): # String
self.add_query_param('HealthCheckURI', HealthCheckURI)
def get_HealthCheck(self): # String
return self.get_query_params().get('HealthCheck')
def set_HealthCheck(self, HealthCheck): # String
self.add_query_param('HealthCheck', HealthCheck)
def get_Cookie(self): # String
return self.get_query_params().get('Cookie')
def set_Cookie(self, Cookie): # String
self.add_query_param('Cookie', Cookie)
def get_HealthCheckMethod(self): # String
return self.get_query_params().get('HealthCheckMethod')
def set_HealthCheckMethod(self, HealthCheckMethod): # String
self.add_query_param('HealthCheckMethod', HealthCheckMethod)
def get_HealthCheckDomain(self): # String
return self.get_query_params().get('HealthCheckDomain')
def set_HealthCheckDomain(self, HealthCheckDomain): # String
self.add_query_param('HealthCheckDomain', HealthCheckDomain)
def get_RequestTimeout(self): # Integer
return self.get_query_params().get('RequestTimeout')
def set_RequestTimeout(self, RequestTimeout): # Integer
self.add_query_param('RequestTimeout', RequestTimeout)
def get_LoadBalancerId(self): # String
return self.get_query_params().get('LoadBalancerId')
def set_LoadBalancerId(self, LoadBalancerId): # String
self.add_query_param('LoadBalancerId', LoadBalancerId)
def get_HealthCheckInterval(self): # Integer
return self.get_query_params().get('HealthCheckInterval')
def set_HealthCheckInterval(self, HealthCheckInterval): # Integer
self.add_query_param('HealthCheckInterval', HealthCheckInterval)
def get_Description(self): # String
return self.get_query_params().get('Description')
def set_Description(self, Description): # String
self.add_query_param('Description', Description)
def METHOD_NAME(self): # Integer
return self.get_query_params().get('UnhealthyThreshold')
def set_UnhealthyThreshold(self, UnhealthyThreshold): # Integer
self.add_query_param('UnhealthyThreshold', UnhealthyThreshold)
def get_HealthyThreshold(self): # Integer
return self.get_query_params().get('HealthyThreshold')
def set_HealthyThreshold(self, HealthyThreshold): # Integer
self.add_query_param('HealthyThreshold', HealthyThreshold)
def get_Scheduler(self): # String
return self.get_query_params().get('Scheduler')
def set_Scheduler(self, Scheduler): # String
self.add_query_param('Scheduler', Scheduler)
def get_ForwardPort(self): # Integer
return self.get_query_params().get('ForwardPort')
def set_ForwardPort(self, ForwardPort): # Integer
self.add_query_param('ForwardPort', ForwardPort)
def get_CookieTimeout(self): # Integer
return self.get_query_params().get('CookieTimeout')
def set_CookieTimeout(self, CookieTimeout): # Integer
self.add_query_param('CookieTimeout', CookieTimeout)
def get_StickySessionType(self): # String
return self.get_query_params().get('StickySessionType')
def set_StickySessionType(self, StickySessionType): # String
self.add_query_param('StickySessionType', StickySessionType)
def get_ListenerPort(self): # Integer
return self.get_query_params().get('ListenerPort')
def set_ListenerPort(self, ListenerPort): # Integer
self.add_query_param('ListenerPort', ListenerPort)
def get_ServerCertificateId(self): # String
return self.get_query_params().get('ServerCertificateId')
def set_ServerCertificateId(self, ServerCertificateId): # String
self.add_query_param('ServerCertificateId', ServerCertificateId)
def get_IdleTimeout(self): # Integer
return self.get_query_params().get('IdleTimeout')
def set_IdleTimeout(self, IdleTimeout): # Integer
self.add_query_param('IdleTimeout', IdleTimeout)
def get_HealthCheckConnectPort(self): # Integer
return self.get_query_params().get('HealthCheckConnectPort')
def set_HealthCheckConnectPort(self, HealthCheckConnectPort): # Integer
self.add_query_param('HealthCheckConnectPort', HealthCheckConnectPort)
def get_HealthCheckHttpCode(self): # String
return self.get_query_params().get('HealthCheckHttpCode')
def set_HealthCheckHttpCode(self, HealthCheckHttpCode): # String
self.add_query_param('HealthCheckHttpCode', HealthCheckHttpCode) | [
{
"body": "\t\treturn self.get_query_params().get('ListenerForward')",
"name": "get_ListenerForward(self):CreateLoadBalancerHTTPSListenerRequest(RpcRequest):"
},
{
"body": "\t\tself.add_query_param('ListenerForward', ListenerForward)",
"name": "set_ListenerForward(self,CreateLoadBalancerHTTPSListenerRequest(RpcRequest):"
},
{
"body": "\t\treturn self.get_query_params().get('HealthCheckTimeout')",
"name": "get_HealthCheckTimeout(self):CreateLoadBalancerHTTPSListenerRequest(RpcRequest):"
},
{
"body": "\t\tself.add_query_param('HealthCheckTimeout', HealthCheckTimeout)",
"name": "set_HealthCheckTimeout(self,CreateLoadBalancerHTTPSListenerRequest(RpcRequest):"
},
{
"body": "\t\treturn self.get_query_params().get('HealthCheckURI')",
"name": "get_HealthCheckURI(self):CreateLoadBalancerHTTPSListenerRequest(RpcRequest):"
},
{
"body": "\t\tself.add_query_param('HealthCheckURI', HealthCheckURI)",
"name": "set_HealthCheckURI(self,CreateLoadBalancerHTTPSListenerRequest(RpcRequest):"
},
{
"body": "\t\treturn self.get_query_params().get('HealthCheck')",
"name": "get_HealthCheck(self):CreateLoadBalancerHTTPSListenerRequest(RpcRequest):"
},
{
"body": "\t\tself.add_query_param('HealthCheck', HealthCheck)",
"name": "set_HealthCheck(self,CreateLoadBalancerHTTPSListenerRequest(RpcRequest):"
},
{
"body": "\t\treturn self.get_query_params().get('HealthCheckMethod')",
"name": "get_HealthCheckMethod(self):CreateLoadBalancerHTTPSListenerRequest(RpcRequest):"
},
{
"body": "\t\tself.add_query_param('HealthCheckMethod', HealthCheckMethod)",
"name": "set_HealthCheckMethod(self,CreateLoadBalancerHTTPSListenerRequest(RpcRequest):"
},
{
"body": "\t\treturn self.get_query_params().get('HealthCheckDomain')",
"name": "get_HealthCheckDomain(self):CreateLoadBalancerHTTPSListenerRequest(RpcRequest):"
},
{
"body": "\t\tself.add_query_param('HealthCheckDomain', HealthCheckDomain)",
"name": "set_HealthCheckDomain(self,CreateLoadBalancerHTTPSListenerRequest(RpcRequest):"
},
{
"body": "\t\treturn self.get_query_params().get('RequestTimeout')",
"name": "get_RequestTimeout(self):CreateLoadBalancerHTTPSListenerRequest(RpcRequest):"
},
{
"body": "\t\tself.add_query_param('RequestTimeout', RequestTimeout)",
"name": "set_RequestTimeout(self,CreateLoadBalancerHTTPSListenerRequest(RpcRequest):"
},
{
"body": "\t\treturn self.get_query_params().get('HealthCheckInterval')",
"name": "get_HealthCheckInterval(self):CreateLoadBalancerHTTPSListenerRequest(RpcRequest):"
},
{
"body": "\t\tself.add_query_param('HealthCheckInterval', HealthCheckInterval)",
"name": "set_HealthCheckInterval(self,CreateLoadBalancerHTTPSListenerRequest(RpcRequest):"
},
{
"body": "\t\treturn self.get_query_params().get('Description')",
"name": "get_Description(self):CreateLoadBalancerHTTPSListenerRequest(RpcRequest):"
},
{
"body": "\t\tself.add_query_param('Description', Description)",
"name": "set_Description(self,CreateLoadBalancerHTTPSListenerRequest(RpcRequest):"
},
{
"body": "\t\treturn self.get_query_params().get('UnhealthyThreshold')",
"name": "METHOD_NAME(self):CreateLoadBalancerHTTPSListenerRequest(RpcRequest):"
},
{
"body": "\t\treturn self.get_query_params().get('HealthyThreshold')",
"name": "get_HealthyThreshold(self):CreateLoadBalancerHTTPSListenerRequest(RpcRequest):"
},
{
"body": "\t\treturn self.get_query_params().get('CookieTimeout')",
"name": "get_CookieTimeout(self):CreateLoadBalancerHTTPSListenerRequest(RpcRequest):"
},
{
"body": "\t\treturn self.get_query_params().get('StickySessionType')",
"name": "get_StickySessionType(self):CreateLoadBalancerHTTPSListenerRequest(RpcRequest):"
},
{
"body": "\t\tself.add_query_param('StickySessionType', StickySessionType)",
"name": "set_StickySessionType(self,CreateLoadBalancerHTTPSListenerRequest(RpcRequest):"
},
{
"body": "\t\treturn self.get_query_params().get('ListenerPort')",
"name": "get_ListenerPort(self):CreateLoadBalancerHTTPSListenerRequest(RpcRequest):"
},
{
"body": "\t\tself.add_query_param('ListenerPort', ListenerPort)",
"name": "set_ListenerPort(self,CreateLoadBalancerHTTPSListenerRequest(RpcRequest):"
},
{
"body": "\t\treturn self.get_query_params().get('ServerCertificateId')",
"name": "get_ServerCertificateId(self):CreateLoadBalancerHTTPSListenerRequest(RpcRequest):"
},
{
"body": "\t\tself.add_query_param('ServerCertificateId', ServerCertificateId)",
"name": "set_ServerCertificateId(self,CreateLoadBalancerHTTPSListenerRequest(RpcRequest):"
},
{
"body": "\t\treturn self.get_query_params().get('HealthCheckConnectPort')",
"name": "get_HealthCheckConnectPort(self):CreateLoadBalancerHTTPSListenerRequest(RpcRequest):"
},
{
"body": "\t\tself.add_query_param('HealthCheckConnectPort', HealthCheckConnectPort)",
"name": "set_HealthCheckConnectPort(self,CreateLoadBalancerHTTPSListenerRequest(RpcRequest):"
},
{
"body": "\t\treturn self.get_query_params().get('HealthCheckHttpCode')",
"name": "get_HealthCheckHttpCode(self):CreateLoadBalancerHTTPSListenerRequest(RpcRequest):"
},
{
"body": "\t\tself.add_query_param('HealthCheckHttpCode', HealthCheckHttpCode)",
"name": "set_HealthCheckHttpCode(self,CreateLoadBalancerHTTPSListenerRequest(RpcRequest):"
}
] |
8 | # Licensed to the Apache Software Foundation (ASF) under one
# or more contributor license agreements. See the NOTICE file
# distributed with this work for additional information
# regarding copyright ownership. The ASF licenses this file
# to you under the Apache License, Version 2.0 (the
# "License"); you may not use this file except in compliance
# with the License. You may obtain a copy of the License at
#
#
# http://www.apache.org/licenses/LICENSE-2.0
#
#
# Unless required by applicable law or agreed to in writing,
# software distributed under the License is distributed on an
# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
# KIND, either express or implied. See the License for the
# specific language governing permissions and limitations
# under the License.
from aliyunsdkcore.request import RpcRequest
from aliyunsdksmartag.endpoint import endpoint_data
class ModifyACLRuleRequest(RpcRequest):
def __init__(self):
RpcRequest.__init__(self, 'Smartag', '2018-03-13', 'ModifyACLRule','smartag')
self.set_method('POST')
if hasattr(self, "endpoint_map"):
setattr(self, "endpoint_map", endpoint_data.getEndpointMap())
if hasattr(self, "endpoint_regional"):
setattr(self, "endpoint_regional", endpoint_data.getEndpointRegional())
def get_DpiGroupIdss(self): # RepeatList
return self.get_query_params().get('DpiGroupIds')
def set_DpiGroupIdss(self, DpiGroupIds): # RepeatList
pass
def get_ResourceOwnerId(self): # Long
return self.get_query_params().get('ResourceOwnerId')
def set_ResourceOwnerId(self, ResourceOwnerId): # Long
self.add_query_param('ResourceOwnerId', ResourceOwnerId)
def get_SourcePortRange(self): # String
return self.get_query_params().get('SourcePortRange')
def set_SourcePortRange(self, SourcePortRange): # String
self.add_query_param('SourcePortRange', SourcePortRange)
def get_SourceCidr(self): # String
return self.get_query_params().get('SourceCidr')
def set_SourceCidr(self, SourceCidr): # String
self.add_query_param('SourceCidr', SourceCidr)
def get_Description(self): # String
return self.get_query_params().get('Description')
def set_Description(self, Description): # String
self.add_query_param('Description', Description)
def get_Type(self): # String
return self.get_query_params().get('Type')
def METHOD_NAME(self, Type): # String
self.add_query_param('Type', Type)
def get_DestCidr(self): # String
return self.get_query_params().get('DestCidr')
def set_DestCidr(self, DestCidr): # String
self.add_query_param('DestCidr', DestCidr)
def get_DpiSignatureIdss(self): # RepeatList
return self.get_query_params().get('DpiSignatureIds')
def set_DpiSignatureIdss(self, DpiSignatureIds): # RepeatList
pass
def get_Direction(self): # String
return self.get_query_params().get('Direction')
def set_Direction(self, Direction): # String
self.add_query_param('Direction', Direction)
def get_Policy(self): # String
return self.get_query_params().get('Policy')
def set_Policy(self, Policy): # String
self.add_query_param('Policy', Policy)
def get_AclId(self): # String
return self.get_query_params().get('AclId')
def set_AclId(self, AclId): # String
self.add_query_param('AclId', AclId)
def get_ResourceOwnerAccount(self): # String
return self.get_query_params().get('ResourceOwnerAccount')
def set_ResourceOwnerAccount(self, ResourceOwnerAccount): # String
self.add_query_param('ResourceOwnerAccount', ResourceOwnerAccount)
def get_IpProtocol(self): # String
return self.get_query_params().get('IpProtocol')
def set_IpProtocol(self, IpProtocol): # String
self.add_query_param('IpProtocol', IpProtocol)
def get_OwnerAccount(self): # String
return self.get_query_params().get('OwnerAccount')
def set_OwnerAccount(self, OwnerAccount): # String
self.add_query_param('OwnerAccount', OwnerAccount)
def get_OwnerId(self): # Long
return self.get_query_params().get('OwnerId')
def set_OwnerId(self, OwnerId): # Long
self.add_query_param('OwnerId', OwnerId)
def get_Priority(self): # Integer
return self.get_query_params().get('Priority')
def set_Priority(self, Priority): # Integer
self.add_query_param('Priority', Priority)
def get_AcrId(self): # String
return self.get_query_params().get('AcrId')
def set_AcrId(self, AcrId): # String
self.add_query_param('AcrId', AcrId)
def get_DestPortRange(self): # String
return self.get_query_params().get('DestPortRange')
def set_DestPortRange(self, DestPortRange): # String
self.add_query_param('DestPortRange', DestPortRange)
def get_Name(self): # String
return self.get_query_params().get('Name')
def set_Name(self, Name): # String
self.add_query_param('Name', Name) | [
{
"body": "\t\treturn self.get_query_params().get('Description')",
"name": "get_Description(self):ModifyACLRuleRequest(RpcRequest):"
},
{
"body": "\t\tself.add_query_param('Description', Description)",
"name": "set_Description(self,ModifyACLRuleRequest(RpcRequest):"
},
{
"body": "\t\tself.add_query_param('Type', Type)",
"name": "METHOD_NAME(self,ModifyACLRuleRequest(RpcRequest):"
},
{
"body": "\t\treturn self.get_query_params().get('ResourceOwnerAccount')",
"name": "get_ResourceOwnerAccount(self):ModifyACLRuleRequest(RpcRequest):"
},
{
"body": "\t\tself.add_query_param('ResourceOwnerAccount', ResourceOwnerAccount)",
"name": "set_ResourceOwnerAccount(self,ModifyACLRuleRequest(RpcRequest):"
}
] |
9 | # Licensed to the Apache Software Foundation (ASF) under one
# or more contributor license agreements. See the NOTICE file
# distributed with this work for additional information
# regarding copyright ownership. The ASF licenses this file
# to you under the Apache License, Version 2.0 (the
# "License"); you may not use this file except in compliance
# with the License. You may obtain a copy of the License at
#
#
# http://www.apache.org/licenses/LICENSE-2.0
#
#
# Unless required by applicable law or agreed to in writing,
# software distributed under the License is distributed on an
# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
# KIND, either express or implied. See the License for the
# specific language governing permissions and limitations
# under the License.
from aliyunsdkcore.request import RpcRequest
from aliyunsdkidaas_doraemon.endpoint import endpoint_data
class VerifyUserAuthenticationRequest(RpcRequest):
def __init__(self):
RpcRequest.__init__(self, 'idaas-doraemon', '2021-05-20', 'VerifyUserAuthentication')
self.set_protocol_type('https')
self.set_method('POST')
if hasattr(self, "endpoint_map"):
setattr(self, "endpoint_map", endpoint_data.getEndpointMap())
if hasattr(self, "endpoint_regional"):
setattr(self, "endpoint_regional", endpoint_data.getEndpointRegional())
def get_LogParams(self): # String
return self.get_query_params().get('LogParams')
def METHOD_NAME(self, LogParams): # String
self.add_query_param('LogParams', LogParams)
def get_ClientExtendParamsJson(self): # String
return self.get_query_params().get('ClientExtendParamsJson')
def set_ClientExtendParamsJson(self, ClientExtendParamsJson): # String
self.add_query_param('ClientExtendParamsJson', ClientExtendParamsJson)
def get_UserId(self): # String
return self.get_query_params().get('UserId')
def set_UserId(self, UserId): # String
self.add_query_param('UserId', UserId)
def get_LogTag(self): # String
return self.get_query_params().get('LogTag')
def set_LogTag(self, LogTag): # String
self.add_query_param('LogTag', LogTag)
def get_ServerExtendParamsJson(self): # String
return self.get_query_params().get('ServerExtendParamsJson')
def set_ServerExtendParamsJson(self, ServerExtendParamsJson): # String
self.add_query_param('ServerExtendParamsJson', ServerExtendParamsJson)
def get_RequireBindHashBase64(self): # String
return self.get_query_params().get('RequireBindHashBase64')
def set_RequireBindHashBase64(self, RequireBindHashBase64): # String
self.add_query_param('RequireBindHashBase64', RequireBindHashBase64)
def get_AuthenticationContext(self): # String
return self.get_query_params().get('AuthenticationContext')
def set_AuthenticationContext(self, AuthenticationContext): # String
self.add_query_param('AuthenticationContext', AuthenticationContext)
def get_RequireChallengeBase64(self): # String
return self.get_query_params().get('RequireChallengeBase64')
def set_RequireChallengeBase64(self, RequireChallengeBase64): # String
self.add_query_param('RequireChallengeBase64', RequireChallengeBase64)
def get_AuthenticatorType(self): # String
return self.get_query_params().get('AuthenticatorType')
def set_AuthenticatorType(self, AuthenticatorType): # String
self.add_query_param('AuthenticatorType', AuthenticatorType)
def get_ClientExtendParamsJsonSign(self): # String
return self.get_query_params().get('ClientExtendParamsJsonSign')
def set_ClientExtendParamsJsonSign(self, ClientExtendParamsJsonSign): # String
self.add_query_param('ClientExtendParamsJsonSign', ClientExtendParamsJsonSign)
def get_UserSourceIp(self): # String
return self.get_query_params().get('UserSourceIp')
def set_UserSourceIp(self, UserSourceIp): # String
self.add_query_param('UserSourceIp', UserSourceIp)
def get_ApplicationExternalId(self): # String
return self.get_query_params().get('ApplicationExternalId')
def set_ApplicationExternalId(self, ApplicationExternalId): # String
self.add_query_param('ApplicationExternalId', ApplicationExternalId) | [
{
"body": "\t\treturn self.get_query_params().get('LogParams')",
"name": "get_LogParams(self):VerifyUserAuthenticationRequest(RpcRequest):"
},
{
"body": "\t\tself.add_query_param('LogParams', LogParams)",
"name": "METHOD_NAME(self,VerifyUserAuthenticationRequest(RpcRequest):"
},
{
"body": "\t\treturn self.get_query_params().get('ClientExtendParamsJson')",
"name": "get_ClientExtendParamsJson(self):VerifyUserAuthenticationRequest(RpcRequest):"
},
{
"body": "\t\tself.add_query_param('ClientExtendParamsJson', ClientExtendParamsJson)",
"name": "set_ClientExtendParamsJson(self,VerifyUserAuthenticationRequest(RpcRequest):"
},
{
"body": "\t\treturn self.get_query_params().get('UserId')",
"name": "get_UserId(self):VerifyUserAuthenticationRequest(RpcRequest):"
},
{
"body": "\t\treturn self.get_query_params().get('LogTag')",
"name": "get_LogTag(self):VerifyUserAuthenticationRequest(RpcRequest):"
},
{
"body": "\t\treturn self.get_query_params().get('ServerExtendParamsJson')",
"name": "get_ServerExtendParamsJson(self):VerifyUserAuthenticationRequest(RpcRequest):"
},
{
"body": "\t\tself.add_query_param('ServerExtendParamsJson', ServerExtendParamsJson)",
"name": "set_ServerExtendParamsJson(self,VerifyUserAuthenticationRequest(RpcRequest):"
},
{
"body": "\t\treturn self.get_query_params().get('RequireBindHashBase64')",
"name": "get_RequireBindHashBase64(self):VerifyUserAuthenticationRequest(RpcRequest):"
},
{
"body": "\t\tself.add_query_param('RequireBindHashBase64', RequireBindHashBase64)",
"name": "set_RequireBindHashBase64(self,VerifyUserAuthenticationRequest(RpcRequest):"
},
{
"body": "\t\treturn self.get_query_params().get('AuthenticationContext')",
"name": "get_AuthenticationContext(self):VerifyUserAuthenticationRequest(RpcRequest):"
},
{
"body": "\t\tself.add_query_param('AuthenticationContext', AuthenticationContext)",
"name": "set_AuthenticationContext(self,VerifyUserAuthenticationRequest(RpcRequest):"
},
{
"body": "\t\treturn self.get_query_params().get('RequireChallengeBase64')",
"name": "get_RequireChallengeBase64(self):VerifyUserAuthenticationRequest(RpcRequest):"
},
{
"body": "\t\treturn self.get_query_params().get('AuthenticatorType')",
"name": "get_AuthenticatorType(self):VerifyUserAuthenticationRequest(RpcRequest):"
},
{
"body": "\t\tself.add_query_param('AuthenticatorType', AuthenticatorType)",
"name": "set_AuthenticatorType(self,VerifyUserAuthenticationRequest(RpcRequest):"
},
{
"body": "\t\treturn self.get_query_params().get('ClientExtendParamsJsonSign')",
"name": "get_ClientExtendParamsJsonSign(self):VerifyUserAuthenticationRequest(RpcRequest):"
},
{
"body": "\t\tself.add_query_param('ClientExtendParamsJsonSign', ClientExtendParamsJsonSign)",
"name": "set_ClientExtendParamsJsonSign(self,VerifyUserAuthenticationRequest(RpcRequest):"
},
{
"body": "\t\treturn self.get_query_params().get('UserSourceIp')",
"name": "get_UserSourceIp(self):VerifyUserAuthenticationRequest(RpcRequest):"
},
{
"body": "\t\treturn self.get_query_params().get('ApplicationExternalId')",
"name": "get_ApplicationExternalId(self):VerifyUserAuthenticationRequest(RpcRequest):"
},
{
"body": "\t\tself.add_query_param('ApplicationExternalId', ApplicationExternalId)",
"name": "set_ApplicationExternalId(self,VerifyUserAuthenticationRequest(RpcRequest):"
}
] |
10 | #!/usr/bin/env python
# -*- coding: utf-8 -*-
"""`cssmin` - A Python port of the YUI CSS compressor."""
"""
Home page: https://github.com/zacharyvoase/cssmin
License: BSD: https://github.com/zacharyvoase/cssmin/blob/master/LICENSE
Original author: Zachary Voase
Modified for inclusion into web2py by: Ross Peoples <ross.peoples@gmail.com>
"""
try:
from StringIO import StringIO
except ImportError:
from io import StringIO
import re
__version__ = '0.1.4'
def remove_comments(css):
"""Remove all CSS comment blocks."""
iemac = False
preserve = False
comment_start = css.find("/*")
while comment_start >= 0:
# Preserve comments that look like `/*!...*/`.
# Slicing is used to make sure we don"t get an IndexError.
preserve = css[comment_start + 2:comment_start + 3] == "!"
comment_end = css.find("*/", comment_start + 2)
if comment_end < 0:
if not preserve:
css = css[:comment_start]
break
elif comment_end >= (comment_start + 2):
if css[comment_end - 1] == "\\":
# This is an IE Mac-specific comment; leave this one and the
# following one alone.
comment_start = comment_end + 2
iemac = True
elif iemac:
comment_start = comment_end + 2
iemac = False
elif not preserve:
css = css[:comment_start] + css[comment_end + 2:]
else:
comment_start = comment_end + 2
comment_start = css.find("/*", comment_start)
return css
def remove_unnecessary_whitespace(css):
"""Remove unnecessary whitespace characters."""
def pseudoclasscolon(css):
"""
Prevents 'p :link' from becoming 'p:link'.
Translates 'p :link' into 'p ___PSEUDOCLASSCOLON___link'; this is
translated back again later.
"""
regex = re.compile(r"(^|\})(([^\{\:])+\:)+([^\{]*\{)")
match = regex.search(css)
while match:
css = ''.join([
css[:match.start()],
match.group().replace(":", "___PSEUDOCLASSCOLON___"),
css[match.end():]])
match = regex.search(css)
return css
css = pseudoclasscolon(css)
# Remove spaces from before things.
css = re.sub(r"\s+([!{};:>+\(\)\],])", r"\1", css)
# If there is a `@charset`, then only allow one, and move to the beginning.
css = re.sub(r"^(.*)(@charset \"[^\"]*\";)", r"\2\1", css)
css = re.sub(r"^(\s*@charset [^;]+;\s*)+", r"\1", css)
# Put the space back in for a few cases, such as `@media screen` and
# `(-webkit-min-device-pixel-ratio:0)`.
css = re.sub(r"\band\(", "and (", css)
# Put the colons back.
css = css.replace('___PSEUDOCLASSCOLON___', ':')
# Remove spaces from after things.
css = re.sub(r"([!{}:;>+\(\[,])\s+", r"\1", css)
return css
def remove_unnecessary_semicolons(css):
"""Remove unnecessary semicolons."""
return re.sub(r";+\}", "}", css)
def remove_empty_rules(css):
"""Remove empty rules."""
return re.sub(r"[^\}\{]+\{\}", "", css)
def normalize_rgb_colors_to_hex(css):
"""Convert `rgb(51,102,153)` to `#336699`."""
regex = re.compile(r"rgb\s*\(\s*([0-9,\s]+)\s*\)")
match = regex.search(css)
while match:
colors = map(lambda s: s.strip(), match.group(1).split(","))
hexcolor = '#%.2x%.2x%.2x' % tuple(map(int, colors))
css = css.replace(match.group(), hexcolor)
match = regex.search(css)
return css
def condense_zero_units(css):
"""Replace `0(px, em, %, etc)` with `0`."""
return re.sub(r"([\s:])(0)(px|em|%|in|cm|mm|pc|pt|ex)", r"\1\2", css)
def condense_multidimensional_zeros(css):
"""Replace `:0 0 0 0;`, `:0 0 0;` etc. with `:0;`."""
css = css.replace(":0 0 0 0;", ":0;")
css = css.replace(":0 0 0;", ":0;")
css = css.replace(":0 0;", ":0;")
# Revert `background-position:0;` to the valid `background-position:0 0;`.
css = css.replace("background-position:0;", "background-position:0 0;")
return css
def condense_floating_points(css):
"""Replace `0.6` with `.6` where possible."""
return re.sub(r"(:|\s)0+\.(\d+)", r"\1.\2", css)
def condense_hex_colors(css):
"""Shorten colors from #AABBCC to #ABC where possible."""
regex = re.compile(r"([^\"'=\s])(\s*)#([0-9a-fA-F])([0-9a-fA-F])([0-9a-fA-F])([0-9a-fA-F])([0-9a-fA-F])([0-9a-fA-F])")
match = regex.search(css)
while match:
first = match.group(3) + match.group(5) + match.group(7)
second = match.group(4) + match.group(6) + match.group(8)
if first.lower() == second.lower():
css = css.replace(
match.group(), match.group(1) + match.group(2) + '#' + first)
match = regex.search(css, match.end() - 3)
else:
match = regex.search(css, match.end())
return css
def METHOD_NAME(css):
"""Condense multiple adjacent whitespace characters into one."""
return re.sub(r"\s+", " ", css)
def condense_semicolons(css):
"""Condense multiple adjacent semicolon characters into one."""
return re.sub(r";;+", ";", css)
def wrap_css_lines(css, line_length):
"""Wrap the lines of the given CSS to an approximate length."""
lines = []
line_start = 0
for i, char in enumerate(css):
# It's safe to break after `}` characters.
if char == '}' and (i - line_start >= line_length):
lines.append(css[line_start:i + 1])
line_start = i + 1
if line_start < len(css):
lines.append(css[line_start:])
return '\n'.join(lines)
def cssmin(css, wrap=None):
css = remove_comments(css)
css = METHOD_NAME(css)
# A pseudo class for the Box Model Hack
# (see http://tantek.com/CSS/Examples/boxmodelhack.html)
css = css.replace('"\\"}\\""', "___PSEUDOCLASSBMH___")
css = remove_unnecessary_whitespace(css)
css = remove_unnecessary_semicolons(css)
css = condense_zero_units(css)
css = condense_multidimensional_zeros(css)
css = condense_floating_points(css)
css = normalize_rgb_colors_to_hex(css)
css = condense_hex_colors(css)
if wrap is not None:
css = wrap_css_lines(css, wrap)
css = css.replace("___PSEUDOCLASSBMH___", '"\\"}\\""')
css = condense_semicolons(css)
return css.strip()
def main():
import optparse
import sys
p = optparse.OptionParser(
prog="cssmin", version=__version__,
usage="%prog [--wrap N]",
description="""Reads raw CSS from stdin, and writes compressed CSS to stdout.""")
p.add_option(
'-w', '--wrap', type='int', default=None, metavar='N',
help="Wrap output to approximately N chars per line.")
options, args = p.parse_args()
sys.stdout.write(cssmin(sys.stdin.read(), wrap=options.wrap))
if __name__ == '__main__':
main() | [
{
"body": " \"\"\"Remove all CSS comment blocks.\"\"\"\n iemac = False\n preserve = False\n comment_start = css.find(\"/*\")\n while comment_start >= 0:\n # Preserve comments that look like `/*!...*/`.\n # Slicing is used to make sure we don\"t get an IndexError.\n preserve = css[comment_start + 2:comment_start + 3] == \"!\"\n comment_end = css.find(\"*/\", comment_start + 2)\n if comment_end < 0:\n if not preserve:\n css = css[:comment_start]\n break\n elif comment_end >= (comment_start + 2):\n if css[comment_end - 1] == \"\\\\\":\n # This is an IE Mac-specific comment; leave this one and the\n # following one alone.\n comment_start = comment_end + 2\n iemac = True\n elif iemac:\n comment_start = comment_end + 2\n iemac = False\n elif not preserve:\n css = css[:comment_start] + css[comment_end + 2:]\n else:\n comment_start = comment_end + 2\n comment_start = css.find(\"/*\", comment_start)\n return css",
"name": "remove_comments(css):"
},
{
"body": " \"\"\"Condense multiple adjacent whitespace characters into one.\"\"\"\n return re.sub(r\"\\s+\", \" \", css)",
"name": "METHOD_NAME(css):"
},
{
"body": " \"\"\"Condense multiple adjacent semicolon characters into one.\"\"\"\n return re.sub(r\";;+\", \";\", css)",
"name": "condense_semicolons(css):"
}
] |
11 | from io import StringIO as TextIO
from io import BytesIO as BytesIO
from typing import Any, AnyStr, Callable, Generic, IO, List, Optional, Text, Tuple, TypeVar, Union, overload
from typing_extensions import Final
import sys
_T = TypeVar("_T")
class FDCapture(Generic[AnyStr]):
def __init__(self, targetfd: int, tmpfile: Optional[IO[AnyStr]] = ..., now: bool = ..., patchsys: bool = ...) -> None: ...
def start(self) -> None: ...
def done(self) -> IO[AnyStr]: ...
def writeorg(self, data: AnyStr) -> None: ...
class StdCaptureFD:
def __init__(
self,
out: Union[bool, IO[str]] = ...,
err: Union[bool, IO[str]] = ...,
mixed: bool = ...,
in_: bool = ...,
patchsys: bool = ...,
now: bool = ...,
) -> None: ...
@classmethod
def call(cls, func: Callable[..., _T], *args: Any, **kwargs: Any) -> Tuple[_T, str, str]: ...
def reset(self) -> Tuple[str, str]: ...
def suspend(self) -> Tuple[str, str]: ...
def startall(self) -> None: ...
def resume(self) -> None: ...
def done(self, save: bool = ...) -> Tuple[IO[str], IO[str]]: ...
def readouterr(self) -> Tuple[str, str]: ...
class StdCapture:
def __init__(
self,
out: Union[bool, IO[str]] = ...,
err: Union[bool, IO[str]] = ...,
in_: bool = ...,
mixed: bool = ...,
now: bool = ...,
) -> None: ...
@classmethod
def call(cls, func: Callable[..., _T], *args: Any, **kwargs: Any) -> Tuple[_T, str, str]: ...
def reset(self) -> Tuple[str, str]: ...
def suspend(self) -> Tuple[str, str]: ...
def startall(self) -> None: ...
def resume(self) -> None: ...
def done(self, save: bool = ...) -> Tuple[IO[str], IO[str]]: ...
def readouterr(self) -> Tuple[IO[str], IO[str]]: ...
# XXX: The type here is not exactly right. If f is IO[bytes] and
# encoding is not None, returns some weird hybrid, not exactly IO[bytes].
def dupfile(
f: IO[AnyStr],
mode: Optional[str] = ...,
buffering: int = ...,
raising: bool = ...,
encoding: Optional[str] = ...,
) -> IO[AnyStr]: ...
def get_terminal_width() -> int: ...
def ansi_print(
text: Union[str, Text],
esc: Union[Union[str, Text], Tuple[Union[str, Text], ...]],
file: Optional[IO[Any]] = ...,
newline: bool = ...,
flush: bool = ...,
) -> None: ...
def saferepr(obj, maxsize: int = ...) -> str: ...
class TerminalWriter:
stringio: TextIO
encoding: Final[str]
hasmarkup: bool
def __init__(self, file: Optional[IO[str]] = ..., stringio: bool = ..., encoding: Optional[str] = ...) -> None: ...
@property
def fullwidth(self) -> int: ...
@fullwidth.setter
def fullwidth(self, value: int) -> None: ...
@property
def chars_on_current_line(self) -> int: ...
@property
def width_of_current_line(self) -> int: ...
def markup(
self,
text: str,
*,
black: int = ..., red: int = ..., green: int = ..., yellow: int = ..., blue: int = ..., purple: int = ...,
cyan: int = ..., white: int = ..., Black: int = ..., Red: int = ..., Green: int = ..., Yellow: int = ...,
Blue: int = ..., Purple: int = ..., Cyan: int = ..., White: int = ..., bold: int = ..., light: int = ...,
blink: int = ..., invert: int = ...,
) -> str: ...
def sep(
self,
sepchar: str,
title: Optional[str] = ...,
fullwidth: Optional[int] = ...,
*,
black: int = ..., red: int = ..., green: int = ..., yellow: int = ..., blue: int = ..., purple: int = ...,
cyan: int = ..., white: int = ..., Black: int = ..., Red: int = ..., Green: int = ..., Yellow: int = ...,
Blue: int = ..., Purple: int = ..., Cyan: int = ..., White: int = ..., bold: int = ..., light: int = ...,
blink: int = ..., invert: int = ...,
) -> None: ...
def METHOD_NAME(
self,
msg: str,
*,
black: int = ..., red: int = ..., green: int = ..., yellow: int = ..., blue: int = ..., purple: int = ...,
cyan: int = ..., white: int = ..., Black: int = ..., Red: int = ..., Green: int = ..., Yellow: int = ...,
Blue: int = ..., Purple: int = ..., Cyan: int = ..., White: int = ..., bold: int = ..., light: int = ...,
blink: int = ..., invert: int = ...,
) -> None: ...
def line(
self,
s: str = ...,
*,
black: int = ..., red: int = ..., green: int = ..., yellow: int = ..., blue: int = ..., purple: int = ...,
cyan: int = ..., white: int = ..., Black: int = ..., Red: int = ..., Green: int = ..., Yellow: int = ...,
Blue: int = ..., Purple: int = ..., Cyan: int = ..., White: int = ..., bold: int = ..., light: int = ...,
blink: int = ..., invert: int = ...,
) -> None: ...
def reline(
self,
line: str,
*,
black: int = ..., red: int = ..., green: int = ..., yellow: int = ..., blue: int = ..., purple: int = ...,
cyan: int = ..., white: int = ..., Black: int = ..., Red: int = ..., Green: int = ..., Yellow: int = ...,
Blue: int = ..., Purple: int = ..., Cyan: int = ..., White: int = ..., bold: int = ..., light: int = ...,
blink: int = ..., invert: int = ...,
) -> None: ... | [
{
"body": " self,\n out: Union[bool, IO[str]] = ...,\n err: Union[bool, IO[str]] = ...,\n mixed: bool = ...,\n in_: bool = ...,\n patchsys: bool = ...,\n now: bool = ...,",
"name": "__init__(TerminalWriter:"
},
{
"body": " self,\n out: Union[bool, IO[str]] = ...,\n err: Union[bool, IO[str]] = ...,\n in_: bool = ...,\n mixed: bool = ...,\n now: bool = ...,",
"name": "__init__(TerminalWriter:"
},
{
"body": " self,\n sepchar: str,\n title: Optional[str] = ...,\n fullwidth: Optional[int] = ...,\n *,\n black: int = ..., red: int = ..., green: int = ..., yellow: int = ..., blue: int = ..., purple: int = ...,\n cyan: int = ..., white: int = ..., Black: int = ..., Red: int = ..., Green: int = ..., Yellow: int = ...,\n Blue: int = ..., Purple: int = ..., Cyan: int = ..., White: int = ..., bold: int = ..., light: int = ...,\n blink: int = ..., invert: int = ...,",
"name": "sep(TerminalWriter:"
},
{
"body": " self,\n msg: str,\n *,\n black: int = ..., red: int = ..., green: int = ..., yellow: int = ..., blue: int = ..., purple: int = ...,\n cyan: int = ..., white: int = ..., Black: int = ..., Red: int = ..., Green: int = ..., Yellow: int = ...,\n Blue: int = ..., Purple: int = ..., Cyan: int = ..., White: int = ..., bold: int = ..., light: int = ...,\n blink: int = ..., invert: int = ...,",
"name": "METHOD_NAME(TerminalWriter:"
}
] |
12 | # Licensed to the Apache Software Foundation (ASF) under one
# or more contributor license agreements. See the NOTICE file
# distributed with this work for additional information
# regarding copyright ownership. The ASF licenses this file
# to you under the Apache License, Version 2.0 (the
# "License"); you may not use this file except in compliance
# with the License. You may obtain a copy of the License at
#
#
# http://www.apache.org/licenses/LICENSE-2.0
#
#
# Unless required by applicable law or agreed to in writing,
# software distributed under the License is distributed on an
# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
# KIND, either express or implied. See the License for the
# specific language governing permissions and limitations
# under the License.
from aliyunsdkcore.request import RpcRequest
from aliyunsdkhbase.endpoint import endpoint_data
class CreateMultiZoneClusterRequest(RpcRequest):
def __init__(self):
RpcRequest.__init__(self, 'HBase', '2019-01-01', 'CreateMultiZoneCluster','hbase')
self.set_method('POST')
if hasattr(self, "endpoint_map"):
setattr(self, "endpoint_map", endpoint_data.getEndpointMap())
if hasattr(self, "endpoint_regional"):
setattr(self, "endpoint_regional", endpoint_data.getEndpointRegional())
def get_ArchVersion(self):
return self.get_query_params().get('ArchVersion')
def set_ArchVersion(self,ArchVersion):
self.add_query_param('ArchVersion',ArchVersion)
def get_ClusterName(self):
return self.get_query_params().get('ClusterName')
def set_ClusterName(self,ClusterName):
self.add_query_param('ClusterName',ClusterName)
def get_EngineVersion(self):
return self.get_query_params().get('EngineVersion')
def set_EngineVersion(self,EngineVersion):
self.add_query_param('EngineVersion',EngineVersion)
def get_LogDiskType(self):
return self.get_query_params().get('LogDiskType')
def set_LogDiskType(self,LogDiskType):
self.add_query_param('LogDiskType',LogDiskType)
def get_ResourceGroupId(self):
return self.get_query_params().get('ResourceGroupId')
def set_ResourceGroupId(self,ResourceGroupId):
self.add_query_param('ResourceGroupId',ResourceGroupId)
def get_PrimaryVSwitchId(self):
return self.get_query_params().get('PrimaryVSwitchId')
def set_PrimaryVSwitchId(self,PrimaryVSwitchId):
self.add_query_param('PrimaryVSwitchId',PrimaryVSwitchId)
def get_LogInstanceType(self):
return self.get_query_params().get('LogInstanceType')
def set_LogInstanceType(self,LogInstanceType):
self.add_query_param('LogInstanceType',LogInstanceType)
def get_AutoRenewPeriod(self):
return self.get_query_params().get('AutoRenewPeriod')
def set_AutoRenewPeriod(self,AutoRenewPeriod):
self.add_query_param('AutoRenewPeriod',AutoRenewPeriod)
def get_Period(self):
return self.get_query_params().get('Period')
def set_Period(self,Period):
self.add_query_param('Period',Period)
def get_LogNodeCount(self):
return self.get_query_params().get('LogNodeCount')
def METHOD_NAME(self,LogNodeCount):
self.add_query_param('LogNodeCount',LogNodeCount)
def get_SecurityIPList(self):
return self.get_query_params().get('SecurityIPList')
def set_SecurityIPList(self,SecurityIPList):
self.add_query_param('SecurityIPList',SecurityIPList)
def get_PeriodUnit(self):
return self.get_query_params().get('PeriodUnit')
def set_PeriodUnit(self,PeriodUnit):
self.add_query_param('PeriodUnit',PeriodUnit)
def get_CoreDiskType(self):
return self.get_query_params().get('CoreDiskType')
def set_CoreDiskType(self,CoreDiskType):
self.add_query_param('CoreDiskType',CoreDiskType)
def get_ArbiterZoneId(self):
return self.get_query_params().get('ArbiterZoneId')
def set_ArbiterZoneId(self,ArbiterZoneId):
self.add_query_param('ArbiterZoneId',ArbiterZoneId)
def get_ClientToken(self):
return self.get_query_params().get('ClientToken')
def set_ClientToken(self,ClientToken):
self.add_query_param('ClientToken',ClientToken)
def get_MultiZoneCombination(self):
return self.get_query_params().get('MultiZoneCombination')
def set_MultiZoneCombination(self,MultiZoneCombination):
self.add_query_param('MultiZoneCombination',MultiZoneCombination)
def get_PrimaryZoneId(self):
return self.get_query_params().get('PrimaryZoneId')
def set_PrimaryZoneId(self,PrimaryZoneId):
self.add_query_param('PrimaryZoneId',PrimaryZoneId)
def get_Engine(self):
return self.get_query_params().get('Engine')
def set_Engine(self,Engine):
self.add_query_param('Engine',Engine)
def get_StandbyVSwitchId(self):
return self.get_query_params().get('StandbyVSwitchId')
def set_StandbyVSwitchId(self,StandbyVSwitchId):
self.add_query_param('StandbyVSwitchId',StandbyVSwitchId)
def get_StandbyZoneId(self):
return self.get_query_params().get('StandbyZoneId')
def set_StandbyZoneId(self,StandbyZoneId):
self.add_query_param('StandbyZoneId',StandbyZoneId)
def get_MasterInstanceType(self):
return self.get_query_params().get('MasterInstanceType')
def set_MasterInstanceType(self,MasterInstanceType):
self.add_query_param('MasterInstanceType',MasterInstanceType)
def get_CoreNodeCount(self):
return self.get_query_params().get('CoreNodeCount')
def set_CoreNodeCount(self,CoreNodeCount):
self.add_query_param('CoreNodeCount',CoreNodeCount)
def get_LogDiskSize(self):
return self.get_query_params().get('LogDiskSize')
def set_LogDiskSize(self,LogDiskSize):
self.add_query_param('LogDiskSize',LogDiskSize)
def get_CoreInstanceType(self):
return self.get_query_params().get('CoreInstanceType')
def set_CoreInstanceType(self,CoreInstanceType):
self.add_query_param('CoreInstanceType',CoreInstanceType)
def get_CoreDiskSize(self):
return self.get_query_params().get('CoreDiskSize')
def set_CoreDiskSize(self,CoreDiskSize):
self.add_query_param('CoreDiskSize',CoreDiskSize)
def get_VpcId(self):
return self.get_query_params().get('VpcId')
def set_VpcId(self,VpcId):
self.add_query_param('VpcId',VpcId)
def get_PayType(self):
return self.get_query_params().get('PayType')
def set_PayType(self,PayType):
self.add_query_param('PayType',PayType)
def get_ArbiterVSwitchId(self):
return self.get_query_params().get('ArbiterVSwitchId')
def set_ArbiterVSwitchId(self,ArbiterVSwitchId):
self.add_query_param('ArbiterVSwitchId',ArbiterVSwitchId | [
{
"body": "\t\treturn self.get_query_params().get('ArchVersion')",
"name": "get_ArchVersion(self):CreateMultiZoneClusterRequest(RpcRequest):"
},
{
"body": "\t\tself.add_query_param('ArchVersion',ArchVersion)",
"name": "set_ArchVersion(self,ArchVersion):CreateMultiZoneClusterRequest(RpcRequest):"
},
{
"body": "\t\treturn self.get_query_params().get('ClusterName')",
"name": "get_ClusterName(self):CreateMultiZoneClusterRequest(RpcRequest):"
},
{
"body": "\t\tself.add_query_param('ClusterName',ClusterName)",
"name": "set_ClusterName(self,ClusterName):CreateMultiZoneClusterRequest(RpcRequest):"
},
{
"body": "\t\treturn self.get_query_params().get('EngineVersion')",
"name": "get_EngineVersion(self):CreateMultiZoneClusterRequest(RpcRequest):"
},
{
"body": "\t\tself.add_query_param('EngineVersion',EngineVersion)",
"name": "set_EngineVersion(self,EngineVersion):CreateMultiZoneClusterRequest(RpcRequest):"
},
{
"body": "\t\treturn self.get_query_params().get('LogDiskType')",
"name": "get_LogDiskType(self):CreateMultiZoneClusterRequest(RpcRequest):"
},
{
"body": "\t\treturn self.get_query_params().get('ResourceGroupId')",
"name": "get_ResourceGroupId(self):CreateMultiZoneClusterRequest(RpcRequest):"
},
{
"body": "\t\tself.add_query_param('ResourceGroupId',ResourceGroupId)",
"name": "set_ResourceGroupId(self,ResourceGroupId):CreateMultiZoneClusterRequest(RpcRequest):"
},
{
"body": "\t\treturn self.get_query_params().get('PrimaryVSwitchId')",
"name": "get_PrimaryVSwitchId(self):CreateMultiZoneClusterRequest(RpcRequest):"
},
{
"body": "\t\tself.add_query_param('PrimaryVSwitchId',PrimaryVSwitchId)",
"name": "set_PrimaryVSwitchId(self,PrimaryVSwitchId):CreateMultiZoneClusterRequest(RpcRequest):"
},
{
"body": "\t\treturn self.get_query_params().get('LogInstanceType')",
"name": "get_LogInstanceType(self):CreateMultiZoneClusterRequest(RpcRequest):"
},
{
"body": "\t\tself.add_query_param('LogInstanceType',LogInstanceType)",
"name": "set_LogInstanceType(self,LogInstanceType):CreateMultiZoneClusterRequest(RpcRequest):"
},
{
"body": "\t\treturn self.get_query_params().get('Period')",
"name": "get_Period(self):CreateMultiZoneClusterRequest(RpcRequest):"
},
{
"body": "\t\treturn self.get_query_params().get('LogNodeCount')",
"name": "get_LogNodeCount(self):CreateMultiZoneClusterRequest(RpcRequest):"
},
{
"body": "\t\tself.add_query_param('LogNodeCount',LogNodeCount)",
"name": "METHOD_NAME(self,LogNodeCount):CreateMultiZoneClusterRequest(RpcRequest):"
},
{
"body": "\t\treturn self.get_query_params().get('SecurityIPList')",
"name": "get_SecurityIPList(self):CreateMultiZoneClusterRequest(RpcRequest):"
},
{
"body": "\t\tself.add_query_param('SecurityIPList',SecurityIPList)",
"name": "set_SecurityIPList(self,SecurityIPList):CreateMultiZoneClusterRequest(RpcRequest):"
},
{
"body": "\t\treturn self.get_query_params().get('PeriodUnit')",
"name": "get_PeriodUnit(self):CreateMultiZoneClusterRequest(RpcRequest):"
},
{
"body": "\t\tself.add_query_param('PeriodUnit',PeriodUnit)",
"name": "set_PeriodUnit(self,PeriodUnit):CreateMultiZoneClusterRequest(RpcRequest):"
},
{
"body": "\t\treturn self.get_query_params().get('CoreDiskType')",
"name": "get_CoreDiskType(self):CreateMultiZoneClusterRequest(RpcRequest):"
},
{
"body": "\t\treturn self.get_query_params().get('ClientToken')",
"name": "get_ClientToken(self):CreateMultiZoneClusterRequest(RpcRequest):"
},
{
"body": "\t\tself.add_query_param('ClientToken',ClientToken)",
"name": "set_ClientToken(self,ClientToken):CreateMultiZoneClusterRequest(RpcRequest):"
},
{
"body": "\t\treturn self.get_query_params().get('MultiZoneCombination')",
"name": "get_MultiZoneCombination(self):CreateMultiZoneClusterRequest(RpcRequest):"
},
{
"body": "\t\tself.add_query_param('MultiZoneCombination',MultiZoneCombination)",
"name": "set_MultiZoneCombination(self,MultiZoneCombination):CreateMultiZoneClusterRequest(RpcRequest):"
},
{
"body": "\t\treturn self.get_query_params().get('PrimaryZoneId')",
"name": "get_PrimaryZoneId(self):CreateMultiZoneClusterRequest(RpcRequest):"
},
{
"body": "\t\tself.add_query_param('PrimaryZoneId',PrimaryZoneId)",
"name": "set_PrimaryZoneId(self,PrimaryZoneId):CreateMultiZoneClusterRequest(RpcRequest):"
},
{
"body": "\t\treturn self.get_query_params().get('Engine')",
"name": "get_Engine(self):CreateMultiZoneClusterRequest(RpcRequest):"
},
{
"body": "\t\treturn self.get_query_params().get('MasterInstanceType')",
"name": "get_MasterInstanceType(self):CreateMultiZoneClusterRequest(RpcRequest):"
},
{
"body": "\t\tself.add_query_param('MasterInstanceType',MasterInstanceType)",
"name": "set_MasterInstanceType(self,MasterInstanceType):CreateMultiZoneClusterRequest(RpcRequest):"
},
{
"body": "\t\treturn self.get_query_params().get('CoreNodeCount')",
"name": "get_CoreNodeCount(self):CreateMultiZoneClusterRequest(RpcRequest):"
},
{
"body": "\t\tself.add_query_param('CoreNodeCount',CoreNodeCount)",
"name": "set_CoreNodeCount(self,CoreNodeCount):CreateMultiZoneClusterRequest(RpcRequest):"
},
{
"body": "\t\treturn self.get_query_params().get('LogDiskSize')",
"name": "get_LogDiskSize(self):CreateMultiZoneClusterRequest(RpcRequest):"
},
{
"body": "\t\treturn self.get_query_params().get('CoreInstanceType')",
"name": "get_CoreInstanceType(self):CreateMultiZoneClusterRequest(RpcRequest):"
},
{
"body": "\t\tself.add_query_param('CoreInstanceType',CoreInstanceType)",
"name": "set_CoreInstanceType(self,CoreInstanceType):CreateMultiZoneClusterRequest(RpcRequest):"
},
{
"body": "\t\treturn self.get_query_params().get('CoreDiskSize')",
"name": "get_CoreDiskSize(self):CreateMultiZoneClusterRequest(RpcRequest):"
}
] |
13 | from plugin import plugin
# General values for connect 4 game and board
numberRows = 6
numberColumns = 7
numToWin = 4
GameBoard = [[0] * numberColumns for j in range(numberRows)]
def restartBoard():
for i in range(numberRows):
for j in range(numberColumns):
GameBoard[i][j] = str(' ')
# Function to check if the column is open
def checkIfFree(c):
if whatsAtPos(0, c) == ' ':
return True
else:
return False
# Function that calls all win conditions
def checkForWin(c):
for i in range(numberRows):
if whatsAtPos(i, c) != ' ':
row = i
if checkHorizWin(row, c, whatsAtPos(row, c)) or checkVertWin(row, c, whatsAtPos(row, c)) \
or checkDiagWin(row, c, whatsAtPos(row, c)):
return True
break
return False
# Place token at the lowest available row in the selected column
def placeToken(p, c):
startIndex = numberRows - 1
stopIndex = -1
step = -1
# Loop through column to find top most available row to place token
for i in range(startIndex, stopIndex, step):
if whatsAtPos(i, c) == ' ':
GameBoard[i][c] = str(p)
break
# Check for a horizontal win
def checkHorizWin(r, c, p):
# Temp row and col values to manipulate throughout function
row = r
col = c
# Count matching tokens to the right. Stop when at end of board
rightCounter = 0
while col < numberColumns:
if whatsAtPos(row, col) == p:
rightCounter += 1
else:
row = r
break
col += 1
# Count matching tokens to the left. Stop when at end of board
leftCounter = 0
col = c
while col >= 0:
# break if at first column
if col == 0:
break
col -= 1
if whatsAtPos(row, col) == p:
leftCounter += 1
else:
break
# Add left and right together to check if numToWin was reached
if leftCounter + rightCounter >= numToWin:
print("Congrats, player ", p, " you win horizontally!\n")
return True
else:
return False
def checkVertWin(r, c, p):
winCheck = False
counter = 0
if r > numberRows - numToWin:
return False
for i in range(r, numberRows, 1):
if whatsAtPos(i, c) == p:
counter += 1
else:
counter = 0
if counter == numToWin:
winCheck = True
print("Congrats, player ", p, ", you win vertically!\n")
break
return winCheck
def checkDiagWin(r, c, p):
row = r
col = c
upRight = 0
while row >= 0 and col <= numberColumns:
if whatsAtPos(row, col) == p:
upRight += 1
else:
row = r
col = c
break
# If the column is he last column on the board, break the loop
if col == numberColumns - 1 or row == 0:
row = r
col = c
break
row -= 1
col += 1
downLeft = 0
while row < numberRows - 1 and col > 0:
row += 1
col -= 1
if whatsAtPos(row, col) == p:
downLeft += 1
else:
row = r
col = c
break
if upRight + downLeft >= numToWin:
print('Congrats! You won diagonally!')
return True
upLeft = 0
while row >= 0 and col >= 0:
if whatsAtPos(row, col) == p:
upLeft += 1
else:
row = r
col = c
break
if col == 0 or row == 0:
row = r
col = c
break
row -= 1
col -= 1
downRight = 0
while row < numberRows - 1 and col < numberColumns - 1:
row += 1
col += 1
if whatsAtPos(row, col) == p:
downRight += 1
else:
break
if downRight + upLeft >= numToWin:
print("Congrats, player ", p, " you win diagonally!\n")
return True
return False
# Function to return value of gameboard location
def whatsAtPos(r, c):
if not GameBoard[r][c]:
return ' '
else:
return str(GameBoard[r][c])
# Check to see if players tied
def checkTie():
startIndex = 0
# players have not tied if there is still an empty place in the first row
for i in range(startIndex, numberColumns, 1):
if checkIfFree(i):
return False
# If there is no space left and checkForWin already passed the players tied
print('Tie game! Thanks for playing!\n')
return True
# Function to print the gameboard
def printBoard():
ss = ''
startIndex = 0
# Create column headers (1-7)
for i in range(startIndex, numberColumns, 1):
ss += '|'
ss = ss + str(i + 1)
ss += '|'
ss += '\n'
# Create current GameBoard
startIndex = 0
startIndex_j = 0
for i in range(startIndex, numberRows, 1):
for j in range(startIndex_j, numberColumns, 1):
ss += '|'
ss = ss + str(whatsAtPos(i, j))
ss += '|'
ss += '\n'
print(ss)
@plugin("connect_four")
def METHOD_NAME(jarvis, s):
# Welcome message and rules explanation
print('Welcome to Connect Four! This is a two player game.\n')
print('Enter numbers to place your token in the corresponding column!\n')
print('Match four of your tokens in a row to win. Good Luck!\n')
playerTracker = 0
playAgainFlag = 'y'
while playAgainFlag == 'y':
restartBoard()
printBoard()
while True:
# Make sure column is numeric. If not then ask user for numeric input again instead of throwing error.
notNumericInputFlag = True
while notNumericInputFlag == True:
try:
column = int(input('Pick a column (1-7):\n'))
notNumericInputFlag = False
except ValueError:
print("Enter a valid numeric input.")
column -= 1
# Make sure column is inbounds
while column < 0 or column > numberColumns:
print('Out of bounds. Pick another column.')
printBoard()
column = int(input('Pick a column (1-7):\n'))
column -= 1
# Make sure column is empty
while not checkIfFree(column):
print('Column is full. Pick another.\n')
printBoard()
column = int(input('Pick a column (1-7):\n'))
column -= 1
# get the players turn and place token now that conditions are met
if playerTracker % 2 == 0:
placeToken("X", column)
else:
placeToken("O", column)
# print updated gameboard
printBoard()
# Check for a win on the last move
if checkForWin(column):
break
# Make sure no one tied with the last move
if checkTie():
break
# increment player tracker
playerTracker += 1
playAgainFlag = input('Would you like the play again? (Y/N)\n')
playAgainFlag = playAgainFlag.strip()
playAgainFlag = playAgainFlag.lower()
while playAgainFlag != 'n' and playAgainFlag != 'y':
playAgainFlag = input('Please enter Y or N\n')
playAgainFlag = playAgainFlag.strip()
playAgainFlag = playAgainFlag.lower()
print('Thanks for playing!\n')
if __name__ == "__main__":
METHOD_NAME() | [
{
"body": " # Temp row and col values to manipulate throughout function\n row = r\n col = c\n # Count matching tokens to the right. Stop when at end of board\n rightCounter = 0\n while col < numberColumns:\n if whatsAtPos(row, col) == p:\n rightCounter += 1\n else:\n row = r\n break\n col += 1\n # Count matching tokens to the left. Stop when at end of board\n leftCounter = 0\n col = c\n while col >= 0:\n # break if at first column\n if col == 0:\n break\n col -= 1\n if whatsAtPos(row, col) == p:\n leftCounter += 1\n else:\n break\n # Add left and right together to check if numToWin was reached\n if leftCounter + rightCounter >= numToWin:\n print(\"Congrats, player \", p, \" you win horizontally!\\n\")\n return True\n else:\n return False",
"name": "checkHorizWin(r,"
},
{
"body": " winCheck = False\n counter = 0\n if r > numberRows - numToWin:\n return False\n for i in range(r, numberRows, 1):\n if whatsAtPos(i, c) == p:\n counter += 1\n else:\n counter = 0\n if counter == numToWin:\n winCheck = True\n print(\"Congrats, player \", p, \", you win vertically!\\n\")\n break\n return winCheck",
"name": "checkVertWin(r,"
},
{
"body": " row = r\n col = c\n upRight = 0\n while row >= 0 and col <= numberColumns:\n if whatsAtPos(row, col) == p:\n upRight += 1\n else:\n row = r\n col = c\n break\n # If the column is he last column on the board, break the loop\n if col == numberColumns - 1 or row == 0:\n row = r\n col = c\n break\n row -= 1\n col += 1\n downLeft = 0\n while row < numberRows - 1 and col > 0:\n row += 1\n col -= 1\n if whatsAtPos(row, col) == p:\n downLeft += 1\n else:\n row = r\n col = c\n break\n if upRight + downLeft >= numToWin:\n print('Congrats! You won diagonally!')\n return True\n upLeft = 0\n while row >= 0 and col >= 0:\n if whatsAtPos(row, col) == p:\n upLeft += 1\n else:\n row = r\n col = c\n break\n if col == 0 or row == 0:\n row = r\n col = c\n break\n row -= 1\n col -= 1\n downRight = 0\n while row < numberRows - 1 and col < numberColumns - 1:\n row += 1\n col += 1\n if whatsAtPos(row, col) == p:\n downRight += 1\n else:\n break\n if downRight + upLeft >= numToWin:\n print(\"Congrats, player \", p, \" you win diagonally!\\n\")\n return True\n return False",
"name": "checkDiagWin(r,"
},
{
"body": " startIndex = 0\n # players have not tied if there is still an empty place in the first row\n for i in range(startIndex, numberColumns, 1):\n if checkIfFree(i):\n return False\n # If there is no space left and checkForWin already passed the players tied\n print('Tie game! Thanks for playing!\\n')\n return True",
"name": "checkTie():"
},
{
"body": " # Welcome message and rules explanation\n print('Welcome to Connect Four! This is a two player game.\\n')\n print('Enter numbers to place your token in the corresponding column!\\n')\n print('Match four of your tokens in a row to win. Good Luck!\\n')\n playerTracker = 0\n playAgainFlag = 'y'\n while playAgainFlag == 'y':\n restartBoard()\n printBoard()\n while True:\n # Make sure column is numeric. If not then ask user for numeric input again instead of throwing error.\n notNumericInputFlag = True\n while notNumericInputFlag == True:\n try:\n column = int(input('Pick a column (1-7):\\n'))\n notNumericInputFlag = False\n except ValueError:\n print(\"Enter a valid numeric input.\")\n column -= 1\n # Make sure column is inbounds\n while column < 0 or column > numberColumns:\n print('Out of bounds. Pick another column.')\n printBoard()\n column = int(input('Pick a column (1-7):\\n'))\n column -= 1\n # Make sure column is empty\n while not checkIfFree(column):\n print('Column is full. Pick another.\\n')\n printBoard()\n column = int(input('Pick a column (1-7):\\n'))\n column -= 1\n # get the players turn and place token now that conditions are met\n if playerTracker % 2 == 0:\n placeToken(\"X\", column)\n else:\n placeToken(\"O\", column)\n # print updated gameboard\n printBoard()\n # Check for a win on the last move\n if checkForWin(column):\n break\n # Make sure no one tied with the last move\n if checkTie():\n break\n # increment player tracker\n playerTracker += 1\n playAgainFlag = input('Would you like the play again? (Y/N)\\n')\n playAgainFlag = playAgainFlag.strip()\n playAgainFlag = playAgainFlag.lower()\n while playAgainFlag != 'n' and playAgainFlag != 'y':\n playAgainFlag = input('Please enter Y or N\\n')\n playAgainFlag = playAgainFlag.strip()\n playAgainFlag = playAgainFlag.lower()\n print('Thanks for playing!\\n')",
"name": "METHOD_NAME(jarvis,"
}
] |
14 | # Licensed to the Apache Software Foundation (ASF) under one
# or more contributor license agreements. See the NOTICE file
# distributed with this work for additional information
# regarding copyright ownership. The ASF licenses this file
# to you under the Apache License, Version 2.0 (the
# "License"); you may not use this file except in compliance
# with the License. You may obtain a copy of the License at
#
#
# http://www.apache.org/licenses/LICENSE-2.0
#
#
# Unless required by applicable law or agreed to in writing,
# software distributed under the License is distributed on an
# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
# KIND, either express or implied. See the License for the
# specific language governing permissions and limitations
# under the License.
from aliyunsdkcore.request import RpcRequest
from aliyunsdkcloudapi.endpoint import endpoint_data
class CreateApiRequest(RpcRequest):
def __init__(self):
RpcRequest.__init__(self, 'CloudAPI', '2016-07-14', 'CreateApi','apigateway')
self.set_method('POST')
if hasattr(self, "endpoint_map"):
setattr(self, "endpoint_map", endpoint_data.getEndpointMap())
if hasattr(self, "endpoint_regional"):
setattr(self, "endpoint_regional", endpoint_data.getEndpointRegional())
def get_WebSocketApiType(self): # String
return self.get_query_params().get('WebSocketApiType')
def set_WebSocketApiType(self, WebSocketApiType): # String
self.add_query_param('WebSocketApiType', WebSocketApiType)
def get_ErrorCodeSamples(self): # String
return self.get_query_params().get('ErrorCodeSamples')
def set_ErrorCodeSamples(self, ErrorCodeSamples): # String
self.add_query_param('ErrorCodeSamples', ErrorCodeSamples)
def get_AppCodeAuthType(self): # String
return self.get_query_params().get('AppCodeAuthType')
def set_AppCodeAuthType(self, AppCodeAuthType): # String
self.add_query_param('AppCodeAuthType', AppCodeAuthType)
def get_Description(self): # String
return self.get_query_params().get('Description')
def set_Description(self, Description): # String
self.add_query_param('Description', Description)
def get_DisableInternet(self): # Boolean
return self.get_query_params().get('DisableInternet')
def set_DisableInternet(self, DisableInternet): # Boolean
self.add_query_param('DisableInternet', DisableInternet)
def get_BackendId(self): # String
return self.get_query_params().get('BackendId')
def set_BackendId(self, BackendId): # String
self.add_query_param('BackendId', BackendId)
def get_ConstantParameters(self): # String
return self.get_query_params().get('ConstantParameters')
def METHOD_NAME(self, ConstantParameters): # String
self.add_query_param('ConstantParameters', ConstantParameters)
def get_AuthType(self): # String
return self.get_query_params().get('AuthType')
def set_AuthType(self, AuthType): # String
self.add_query_param('AuthType', AuthType)
def get_AllowSignatureMethod(self): # String
return self.get_query_params().get('AllowSignatureMethod')
def set_AllowSignatureMethod(self, AllowSignatureMethod): # String
self.add_query_param('AllowSignatureMethod', AllowSignatureMethod)
def get_ServiceParameters(self): # String
return self.get_query_params().get('ServiceParameters')
def set_ServiceParameters(self, ServiceParameters): # String
self.add_query_param('ServiceParameters', ServiceParameters)
def get_FailResultSample(self): # String
return self.get_query_params().get('FailResultSample')
def set_FailResultSample(self, FailResultSample): # String
self.add_query_param('FailResultSample', FailResultSample)
def get_SystemParameters(self): # String
return self.get_query_params().get('SystemParameters')
def set_SystemParameters(self, SystemParameters): # String
self.add_query_param('SystemParameters', SystemParameters)
def get_ServiceParametersMap(self): # String
return self.get_query_params().get('ServiceParametersMap')
def set_ServiceParametersMap(self, ServiceParametersMap): # String
self.add_query_param('ServiceParametersMap', ServiceParametersMap)
def get_SecurityToken(self): # String
return self.get_query_params().get('SecurityToken')
def set_SecurityToken(self, SecurityToken): # String
self.add_query_param('SecurityToken', SecurityToken)
def get_OpenIdConnectConfig(self): # String
return self.get_query_params().get('OpenIdConnectConfig')
def set_OpenIdConnectConfig(self, OpenIdConnectConfig): # String
self.add_query_param('OpenIdConnectConfig', OpenIdConnectConfig)
def get_RequestParameters(self): # String
return self.get_query_params().get('RequestParameters')
def set_RequestParameters(self, RequestParameters): # String
self.add_query_param('RequestParameters', RequestParameters)
def get_ResultDescriptions(self): # String
return self.get_query_params().get('ResultDescriptions')
def set_ResultDescriptions(self, ResultDescriptions): # String
self.add_query_param('ResultDescriptions', ResultDescriptions)
def get_Visibility(self): # String
return self.get_query_params().get('Visibility')
def set_Visibility(self, Visibility): # String
self.add_query_param('Visibility', Visibility)
def get_GroupId(self): # String
return self.get_query_params().get('GroupId')
def set_GroupId(self, GroupId): # String
self.add_query_param('GroupId', GroupId)
def get_ServiceConfig(self): # String
return self.get_query_params().get('ServiceConfig')
def set_ServiceConfig(self, ServiceConfig): # String
self.add_query_param('ServiceConfig', ServiceConfig)
def get_ResultType(self): # String
return self.get_query_params().get('ResultType')
def set_ResultType(self, ResultType): # String
self.add_query_param('ResultType', ResultType)
def get_ApiName(self): # String
return self.get_query_params().get('ApiName')
def set_ApiName(self, ApiName): # String
self.add_query_param('ApiName', ApiName)
def get_ResultSample(self): # String
return self.get_query_params().get('ResultSample')
def set_ResultSample(self, ResultSample): # String
self.add_query_param('ResultSample', ResultSample)
def get_BackendEnable(self): # Boolean
return self.get_query_params().get('BackendEnable')
def set_BackendEnable(self, BackendEnable): # Boolean
self.add_query_param('BackendEnable', BackendEnable)
def get_ForceNonceCheck(self): # Boolean
return self.get_query_params().get('ForceNonceCheck')
def set_ForceNonceCheck(self, ForceNonceCheck): # Boolean
self.add_query_param('ForceNonceCheck', ForceNonceCheck)
def get_RequestConfig(self): # String
return self.get_query_params().get('RequestConfig')
def set_RequestConfig(self, RequestConfig): # String
self.add_query_param('RequestConfig', RequestConfig)
def get_ResultBodyModel(self): # String
return self.get_query_params().get('ResultBodyModel')
def set_ResultBodyModel(self, ResultBodyModel): # String
self.add_query_param('ResultBodyModel', ResultBodyModel) | [
{
"body": "\t\treturn self.get_query_params().get('Description')",
"name": "get_Description(self):CreateApiRequest(RpcRequest):"
},
{
"body": "\t\tself.add_query_param('Description', Description)",
"name": "set_Description(self,CreateApiRequest(RpcRequest):"
},
{
"body": "\t\treturn self.get_query_params().get('ConstantParameters')",
"name": "get_ConstantParameters(self):CreateApiRequest(RpcRequest):"
},
{
"body": "\t\tself.add_query_param('ConstantParameters', ConstantParameters)",
"name": "METHOD_NAME(self,CreateApiRequest(RpcRequest):"
},
{
"body": "\t\treturn self.get_query_params().get('AllowSignatureMethod')",
"name": "get_AllowSignatureMethod(self):CreateApiRequest(RpcRequest):"
},
{
"body": "\t\tself.add_query_param('AllowSignatureMethod', AllowSignatureMethod)",
"name": "set_AllowSignatureMethod(self,CreateApiRequest(RpcRequest):"
},
{
"body": "\t\treturn self.get_query_params().get('ServiceParameters')",
"name": "get_ServiceParameters(self):CreateApiRequest(RpcRequest):"
},
{
"body": "\t\tself.add_query_param('ServiceParameters', ServiceParameters)",
"name": "set_ServiceParameters(self,CreateApiRequest(RpcRequest):"
},
{
"body": "\t\treturn self.get_query_params().get('FailResultSample')",
"name": "get_FailResultSample(self):CreateApiRequest(RpcRequest):"
},
{
"body": "\t\tself.add_query_param('FailResultSample', FailResultSample)",
"name": "set_FailResultSample(self,CreateApiRequest(RpcRequest):"
},
{
"body": "\t\treturn self.get_query_params().get('SystemParameters')",
"name": "get_SystemParameters(self):CreateApiRequest(RpcRequest):"
},
{
"body": "\t\tself.add_query_param('SystemParameters', SystemParameters)",
"name": "set_SystemParameters(self,CreateApiRequest(RpcRequest):"
},
{
"body": "\t\treturn self.get_query_params().get('ServiceParametersMap')",
"name": "get_ServiceParametersMap(self):CreateApiRequest(RpcRequest):"
},
{
"body": "\t\tself.add_query_param('ServiceParametersMap', ServiceParametersMap)",
"name": "set_ServiceParametersMap(self,CreateApiRequest(RpcRequest):"
},
{
"body": "\t\treturn self.get_query_params().get('SecurityToken')",
"name": "get_SecurityToken(self):CreateApiRequest(RpcRequest):"
},
{
"body": "\t\tself.add_query_param('SecurityToken', SecurityToken)",
"name": "set_SecurityToken(self,CreateApiRequest(RpcRequest):"
},
{
"body": "\t\treturn self.get_query_params().get('OpenIdConnectConfig')",
"name": "get_OpenIdConnectConfig(self):CreateApiRequest(RpcRequest):"
},
{
"body": "\t\tself.add_query_param('OpenIdConnectConfig', OpenIdConnectConfig)",
"name": "set_OpenIdConnectConfig(self,CreateApiRequest(RpcRequest):"
},
{
"body": "\t\treturn self.get_query_params().get('RequestParameters')",
"name": "get_RequestParameters(self):CreateApiRequest(RpcRequest):"
},
{
"body": "\t\tself.add_query_param('RequestParameters', RequestParameters)",
"name": "set_RequestParameters(self,CreateApiRequest(RpcRequest):"
},
{
"body": "\t\treturn self.get_query_params().get('ResultDescriptions')",
"name": "get_ResultDescriptions(self):CreateApiRequest(RpcRequest):"
},
{
"body": "\t\tself.add_query_param('ResultDescriptions', ResultDescriptions)",
"name": "set_ResultDescriptions(self,CreateApiRequest(RpcRequest):"
},
{
"body": "\t\treturn self.get_query_params().get('ServiceConfig')",
"name": "get_ServiceConfig(self):CreateApiRequest(RpcRequest):"
},
{
"body": "\t\tself.add_query_param('ServiceConfig', ServiceConfig)",
"name": "set_ServiceConfig(self,CreateApiRequest(RpcRequest):"
},
{
"body": "\t\treturn self.get_query_params().get('ResultSample')",
"name": "get_ResultSample(self):CreateApiRequest(RpcRequest):"
},
{
"body": "\t\tself.add_query_param('ResultSample', ResultSample)",
"name": "set_ResultSample(self,CreateApiRequest(RpcRequest):"
},
{
"body": "\t\treturn self.get_query_params().get('RequestConfig')",
"name": "get_RequestConfig(self):CreateApiRequest(RpcRequest):"
},
{
"body": "\t\tself.add_query_param('RequestConfig', RequestConfig)",
"name": "set_RequestConfig(self,CreateApiRequest(RpcRequest):"
},
{
"body": "\t\treturn self.get_query_params().get('ResultBodyModel')",
"name": "get_ResultBodyModel(self):CreateApiRequest(RpcRequest):"
},
{
"body": "\t\tself.add_query_param('ResultBodyModel', ResultBodyModel)",
"name": "set_ResultBodyModel(self,CreateApiRequest(RpcRequest):"
}
] |
15 | from pathlib import Path
import pytest
from click.testing import CliRunner
from ggshield.__main__ import cli
from ggshield.verticals.hmsl.crypto import hash_string
from tests.unit.conftest import assert_invoke_exited_with, assert_invoke_ok
RESULTS_CONTENT = (
'{"hint": "f7f17c88638b42465b6c620a0c7648ef470e611c1fdf90166aac613601799f81", "payload": '
'"KzzhJEq/pM2RaWav9NAvjw45Gfp/26UGDDzYiDNuOCup0PfoAHNViOGX14/a7uUNWLk53zGIar3s2xOW/xxzMEgTT2owjH52gGalhwKBfTY="}\n'
'{"hint": "71d27eee3aa6c1751110ea338f23a5cfe11da717ea27453e7fe09e1594c3f8e7", "payload": '
'"zmrGtuhTtgxNkk9SA250HTxXQ+mfoJQlZ76CPx50juK4XFCCTbFNv6ZeahGRQqW4+vf92DEwpGTHVzjiQEF6JebJsoRuaMQDSntHQ17z0UU="}\n'
'{"hint": "f1b2fcaf134f3a08513ec6603ee3281511f349166fea5ef3356dd62051a76aa8", "payload": '
'"Qzp7zRkFeIlPhiy6VMeUyo4vaCJAFmuqDQITH4WFC1BH51eHDNcL1UOw5u8dKmBWRJMfY7Zh7atyTl++hbsYDnIItJi8LFO5Yyzj+xte+ik="}\n'
'{"hint": "89740ad4cd63fa9a7637325a7bef91c0ba93d0a45bbf687beb76bacaf5fa8da3", "payload": '
'"kUlYx2lO5dOtFAM7XPT7uyk5v81ajJeg7Uepq1D4oyWQcf3ijMRThqsMrkKkUXSXHcAL182yCAgbub/NDF2wFA+Lyr5qBdb3qBBFLztfFz0="}\n'
'{"hint": "3be2d605a3d143bfea373887ed16e7935be0e3c189cbee4d343c92ed6c89fdb8", "payload": '
'"GZIG82jOLH5gXB5NNJt7NyfUOQUpk720wA3LItmVrXKCIK2PursytFkg/pPtzBXyPifNZtsOaNf5an+5Pz3mVysVMoCF9dXGFt1AFRi8lXk="}\n'
'{"hint": "16787b637f7787685909539f65cc100b591d8c8d1074d0e5491aab33f364c86b", "payload": '
'"4XgUM9pXWrLbQ8tH0AH7Za3u7tObAmlDXBSgwS+IE2m/NeDn3y7KF5H7yPB/faFDfKFirNiijhEfkBgfCz+FmZhDLCCzsga6hZN0S9he6EM="}\n'
'{"hint": "e9ecc350e213860e9472057339443c830581c53e2b4dfb3aaa7e5fa4a854d5a3", "payload": '
'"UDIP09t3tSk2IyQhxnJmF2gaDxhOY4zgrGpOzLeakIOZEmRxlyXYfdN3uFuTutnfdT7ZY+2Am2Q0Vst0L3EfuvomNdx/yL3desUApHq5o5I="}\n'
'{"hint": "31ded0b51235ebde7d5fa10685d33b95e8a20a4e284220351812ca98ed20836b", "payload": '
'"+FuUB48xvYQg1VTf1Jvyif14T8rLJETu3L0y2SJa7fJ+P7HDTDf/ESH8pLjJmadyNB3vl3t8KS3VH+lveCae53yVY66LncUCwuXVKd9s7G0="}\n'
'{"hint": "19b9ba15c838c44d8965ac2300718fd8f9e2a038ff3ca7b3982fae50ec4afbfa", "payload": '
'"YKk5NCIkiS5tmab2lXO1V2mpsPbRC+vAsz+TNHroEcpo8b0YhEjy6SCUXWkYMm2mBUFz3Kmvkqqd59Pdj4EXmvqrl1yNV2LlCCoJGD91SUY="}\n'
'{"hint": "23ef947812513a59de504af2e291f9bbab287b941e0551f442e63f19f979679d", "payload": '
'"0XmzWJNyq3gHbeqb5+T5xSjuwP1qFdrIbvsW4K5Spk+Yn2mfBs92Z3ipEngis2nZMNS+K99h/sh3+hvqTH5T5Z0p/YnCd2f+1E4suGEbVnA="}\n'
'{"hint": "9c9e78a410131e548c733e08b1de9a3dcccbe5cda970cb6ad740655b7741e7b3", "payload": '
'"WDmh3FQvY+i5DO+6bWeOkY5J78jHBHCsEFjl9u1PEpftDS5Htzcc/dQqrzFcYvBwU+RbPLag2z/w7PBW+m472D9R1OExamCWs6MjN65j3L0="}\n'
)
RESULTS_CLEARTEXT_CONTENT = (
'{"hash": "743d9fde380b7064cc6a8d3071184fc47905cf7440e5615cd46c7b6cbfb46d47", '
'"count": 14, "url": "https://github.com/edly-io/devstack/commit/ccfc9c2d63c29'
'17be60a9fd2a4c36ff3a8b9bb8c#diff-e45e45baeda1c1e73482975a664062aa56f20c03dd9d64a827aba57775bed0d3L158"}'
)
@pytest.fixture
def mapping_path(cli_fs_runner, tmp_path: Path):
"""Prepare a mapping file"""
mapping_path = tmp_path / "mapping.txt"
secrets = ["foo", "bar", "password", "1234"]
mapping = {hash_string(secret): secret for secret in secrets}
mapping_path.write_text(
"\n".join(f"{key}:{value}" for key, value in mapping.items())
)
return mapping_path
@pytest.fixture
def results_path(mapping_path: Path):
"""Prepare a results file"""
results_path = mapping_path.parent / "results.txt"
results_path.write_text(RESULTS_CONTENT)
return results_path
@pytest.fixture
def full_hash_result(mapping_path: Path):
"""Prepare a results file"""
results_path = mapping_path.parent / "results.txt"
results_path.write_text(RESULTS_CLEARTEXT_CONTENT)
return results_path
@pytest.mark.parametrize(
"command",
[
["hmsl", "decrypt"],
["hmsl", "decrypt", "none.txt"],
["hmsl", "decrypt", "-m", "none.txt"],
["hmsl", "decrypt", "-m", "none.txt", "void.txt"],
],
)
def METHOD_NAME(cli_fs_runner: CliRunner, command) -> None:
"""
GIVEN a cli
WHEN running on non-existing files or other issues
THEN the return code is 2
"""
result = cli_fs_runner.invoke(cli, command)
assert_invoke_exited_with(result, 2)
def test_hmsl_decrypt_default_behavior(
cli_fs_runner: CliRunner, mapping_path, results_path: Path
) -> None:
"""
GIVEN some secrets
WHEN running the decrypt command on a file
THEN the secrets are correctly decrypted
"""
result = cli_fs_runner.invoke(
cli, ["hmsl", "decrypt", "-m", str(mapping_path), str(results_path)]
)
assert_invoke_ok(result)
assert result.output.count("> Secret ") == 1
assert 'Secret name: "foo"' in result.output
def test_hmsl_decrypt_full_hashes_behavior(
cli_fs_runner: CliRunner, mapping_path, full_hash_result: Path
) -> None:
"""
GIVEN a some full hashes response
WHEN running the decrypt command on a file
THEN the command accepts the decrypted payloads seamlessly
"""
result = cli_fs_runner.invoke(
cli, ["hmsl", "decrypt", "-m", str(mapping_path), str(full_hash_result)]
)
assert_invoke_ok(result)
assert result.output.count("> Secret ") == 1
assert 'Secret name: "password"' in result.output | [
{
"body": " \"\"\"\n GIVEN a cli\n WHEN running on non-existing files or other issues\n THEN the return code is 2\n \"\"\"\n result = cli_fs_runner.invoke(cli, command)\n assert_invoke_exited_with(result, 2)",
"name": "METHOD_NAME(cli_fs_runner:"
}
] |
16 | ## @file
# process FD generation
#
# Copyright (c) 2007 - 2018, Intel Corporation. All rights reserved.<BR>
#
# SPDX-License-Identifier: BSD-2-Clause-Patent
#
##
# Import Modules
#
from __future__ import absolute_import
from . import Region
from . import Fv
import Common.LongFilePathOs as os
from io import BytesIO
import sys
from struct import *
from .GenFdsGlobalVariable import GenFdsGlobalVariable
from CommonDataClass.FdfClass import FDClassObject
from Common import EdkLogger
from Common.BuildToolError import *
from Common.Misc import SaveFileOnChange
from Common.DataType import BINARY_FILE_TYPE_FV
## generate FD
#
#
class FD(FDClassObject):
## The constructor
#
# @param self The object pointer
#
def __init__(self):
FDClassObject.__init__(self)
## GenFd() method
#
# Generate FD
#
# @retval string Generated FD file name
#
def METHOD_NAME (self, Flag = False):
if self.FdUiName.upper() + 'fd' in GenFdsGlobalVariable.ImageBinDict:
return GenFdsGlobalVariable.ImageBinDict[self.FdUiName.upper() + 'fd']
#
# Print Information
#
FdFileName = os.path.join(GenFdsGlobalVariable.FvDir, self.FdUiName + '.fd')
if not Flag:
GenFdsGlobalVariable.InfLogger("\nFd File Name:%s (%s)" %(self.FdUiName, FdFileName))
Offset = 0x00
for item in self.BlockSizeList:
Offset = Offset + item[0] * item[1]
if Offset != self.Size:
EdkLogger.error("GenFds", GENFDS_ERROR, 'FD %s Size not consistent with block array' % self.FdUiName)
GenFdsGlobalVariable.VerboseLogger('Following Fv will be add to Fd !!!')
for FvObj in GenFdsGlobalVariable.FdfParser.Profile.FvDict:
GenFdsGlobalVariable.VerboseLogger(FvObj)
HasCapsuleRegion = False
for RegionObj in self.RegionList:
if RegionObj.RegionType == 'CAPSULE':
HasCapsuleRegion = True
break
if HasCapsuleRegion:
TempFdBuffer = BytesIO()
PreviousRegionStart = -1
PreviousRegionSize = 1
for RegionObj in self.RegionList :
if RegionObj.RegionType == 'CAPSULE':
continue
if RegionObj.Offset + RegionObj.Size <= PreviousRegionStart:
pass
elif RegionObj.Offset <= PreviousRegionStart or (RegionObj.Offset >=PreviousRegionStart and RegionObj.Offset < PreviousRegionStart + PreviousRegionSize):
pass
elif RegionObj.Offset > PreviousRegionStart + PreviousRegionSize:
if not Flag:
GenFdsGlobalVariable.InfLogger('Padding region starting from offset 0x%X, with size 0x%X' %(PreviousRegionStart + PreviousRegionSize, RegionObj.Offset - (PreviousRegionStart + PreviousRegionSize)))
PadRegion = Region.Region()
PadRegion.Offset = PreviousRegionStart + PreviousRegionSize
PadRegion.Size = RegionObj.Offset - PadRegion.Offset
if not Flag:
PadRegion.AddToBuffer(TempFdBuffer, self.BaseAddress, self.BlockSizeList, self.ErasePolarity, GenFdsGlobalVariable.ImageBinDict, self.DefineVarDict)
PreviousRegionStart = RegionObj.Offset
PreviousRegionSize = RegionObj.Size
#
# Call each region's AddToBuffer function
#
if PreviousRegionSize > self.Size:
pass
GenFdsGlobalVariable.VerboseLogger('Call each region\'s AddToBuffer function')
RegionObj.AddToBuffer (TempFdBuffer, self.BaseAddress, self.BlockSizeList, self.ErasePolarity, GenFdsGlobalVariable.ImageBinDict, self.DefineVarDict)
FdBuffer = BytesIO()
PreviousRegionStart = -1
PreviousRegionSize = 1
for RegionObj in self.RegionList :
if RegionObj.Offset + RegionObj.Size <= PreviousRegionStart:
EdkLogger.error("GenFds", GENFDS_ERROR,
'Region offset 0x%X in wrong order with Region starting from 0x%X, size 0x%X\nRegions in FDF must have offsets appear in ascending order.'\
% (RegionObj.Offset, PreviousRegionStart, PreviousRegionSize))
elif RegionObj.Offset <= PreviousRegionStart or (RegionObj.Offset >=PreviousRegionStart and RegionObj.Offset < PreviousRegionStart + PreviousRegionSize):
EdkLogger.error("GenFds", GENFDS_ERROR,
'Region offset 0x%X overlaps with Region starting from 0x%X, size 0x%X' \
% (RegionObj.Offset, PreviousRegionStart, PreviousRegionSize))
elif RegionObj.Offset > PreviousRegionStart + PreviousRegionSize:
if not Flag:
GenFdsGlobalVariable.InfLogger('Padding region starting from offset 0x%X, with size 0x%X' %(PreviousRegionStart + PreviousRegionSize, RegionObj.Offset - (PreviousRegionStart + PreviousRegionSize)))
PadRegion = Region.Region()
PadRegion.Offset = PreviousRegionStart + PreviousRegionSize
PadRegion.Size = RegionObj.Offset - PadRegion.Offset
if not Flag:
PadRegion.AddToBuffer(FdBuffer, self.BaseAddress, self.BlockSizeList, self.ErasePolarity, GenFdsGlobalVariable.ImageBinDict, self.DefineVarDict)
PreviousRegionStart = RegionObj.Offset
PreviousRegionSize = RegionObj.Size
#
# Verify current region fits within allocated FD section Size
#
if PreviousRegionStart + PreviousRegionSize > self.Size:
EdkLogger.error("GenFds", GENFDS_ERROR,
'FD %s size too small to fit region with offset 0x%X and size 0x%X'
% (self.FdUiName, PreviousRegionStart, PreviousRegionSize))
#
# Call each region's AddToBuffer function
#
GenFdsGlobalVariable.VerboseLogger('Call each region\'s AddToBuffer function')
RegionObj.AddToBuffer (FdBuffer, self.BaseAddress, self.BlockSizeList, self.ErasePolarity, GenFdsGlobalVariable.ImageBinDict, self.DefineVarDict, Flag=Flag)
#
# Write the buffer contents to Fd file
#
GenFdsGlobalVariable.VerboseLogger('Write the buffer contents to Fd file')
if not Flag:
SaveFileOnChange(FdFileName, FdBuffer.getvalue())
FdBuffer.close()
GenFdsGlobalVariable.ImageBinDict[self.FdUiName.upper() + 'fd'] = FdFileName
return FdFileName
## generate flash map file
#
# @param self The object pointer
#
def GenFlashMap (self):
pass
| [
{
"body": " if self.FdUiName.upper() + 'fd' in GenFdsGlobalVariable.ImageBinDict:\n return GenFdsGlobalVariable.ImageBinDict[self.FdUiName.upper() + 'fd']\n #\n # Print Information\n #\n FdFileName = os.path.join(GenFdsGlobalVariable.FvDir, self.FdUiName + '.fd')\n if not Flag:\n GenFdsGlobalVariable.InfLogger(\"\\nFd File Name:%s (%s)\" %(self.FdUiName, FdFileName))\n Offset = 0x00\n for item in self.BlockSizeList:\n Offset = Offset + item[0] * item[1]\n if Offset != self.Size:\n EdkLogger.error(\"GenFds\", GENFDS_ERROR, 'FD %s Size not consistent with block array' % self.FdUiName)\n GenFdsGlobalVariable.VerboseLogger('Following Fv will be add to Fd !!!')\n for FvObj in GenFdsGlobalVariable.FdfParser.Profile.FvDict:\n GenFdsGlobalVariable.VerboseLogger(FvObj)\n HasCapsuleRegion = False\n for RegionObj in self.RegionList:\n if RegionObj.RegionType == 'CAPSULE':\n HasCapsuleRegion = True\n break\n if HasCapsuleRegion:\n TempFdBuffer = BytesIO()\n PreviousRegionStart = -1\n PreviousRegionSize = 1\n for RegionObj in self.RegionList :\n if RegionObj.RegionType == 'CAPSULE':\n continue\n if RegionObj.Offset + RegionObj.Size <= PreviousRegionStart:\n pass\n elif RegionObj.Offset <= PreviousRegionStart or (RegionObj.Offset >=PreviousRegionStart and RegionObj.Offset < PreviousRegionStart + PreviousRegionSize):\n pass\n elif RegionObj.Offset > PreviousRegionStart + PreviousRegionSize:\n if not Flag:\n GenFdsGlobalVariable.InfLogger('Padding region starting from offset 0x%X, with size 0x%X' %(PreviousRegionStart + PreviousRegionSize, RegionObj.Offset - (PreviousRegionStart + PreviousRegionSize)))\n PadRegion = Region.Region()\n PadRegion.Offset = PreviousRegionStart + PreviousRegionSize\n PadRegion.Size = RegionObj.Offset - PadRegion.Offset\n if not Flag:\n PadRegion.AddToBuffer(TempFdBuffer, self.BaseAddress, self.BlockSizeList, self.ErasePolarity, GenFdsGlobalVariable.ImageBinDict, self.DefineVarDict)\n PreviousRegionStart = RegionObj.Offset\n PreviousRegionSize = RegionObj.Size\n #\n # Call each region's AddToBuffer function\n #\n if PreviousRegionSize > self.Size:\n pass\n GenFdsGlobalVariable.VerboseLogger('Call each region\\'s AddToBuffer function')\n RegionObj.AddToBuffer (TempFdBuffer, self.BaseAddress, self.BlockSizeList, self.ErasePolarity, GenFdsGlobalVariable.ImageBinDict, self.DefineVarDict)\n FdBuffer = BytesIO()\n PreviousRegionStart = -1\n PreviousRegionSize = 1\n for RegionObj in self.RegionList :\n if RegionObj.Offset + RegionObj.Size <= PreviousRegionStart:\n EdkLogger.error(\"GenFds\", GENFDS_ERROR,\n 'Region offset 0x%X in wrong order with Region starting from 0x%X, size 0x%X\\nRegions in FDF must have offsets appear in ascending order.'\\\n % (RegionObj.Offset, PreviousRegionStart, PreviousRegionSize))\n elif RegionObj.Offset <= PreviousRegionStart or (RegionObj.Offset >=PreviousRegionStart and RegionObj.Offset < PreviousRegionStart + PreviousRegionSize):\n EdkLogger.error(\"GenFds\", GENFDS_ERROR,\n 'Region offset 0x%X overlaps with Region starting from 0x%X, size 0x%X' \\\n % (RegionObj.Offset, PreviousRegionStart, PreviousRegionSize))\n elif RegionObj.Offset > PreviousRegionStart + PreviousRegionSize:\n if not Flag:\n GenFdsGlobalVariable.InfLogger('Padding region starting from offset 0x%X, with size 0x%X' %(PreviousRegionStart + PreviousRegionSize, RegionObj.Offset - (PreviousRegionStart + PreviousRegionSize)))\n PadRegion = Region.Region()\n PadRegion.Offset = PreviousRegionStart + PreviousRegionSize\n PadRegion.Size = RegionObj.Offset - PadRegion.Offset\n if not Flag:\n PadRegion.AddToBuffer(FdBuffer, self.BaseAddress, self.BlockSizeList, self.ErasePolarity, GenFdsGlobalVariable.ImageBinDict, self.DefineVarDict)\n PreviousRegionStart = RegionObj.Offset\n PreviousRegionSize = RegionObj.Size\n #\n # Verify current region fits within allocated FD section Size\n #\n if PreviousRegionStart + PreviousRegionSize > self.Size:\n EdkLogger.error(\"GenFds\", GENFDS_ERROR,\n 'FD %s size too small to fit region with offset 0x%X and size 0x%X'\n % (self.FdUiName, PreviousRegionStart, PreviousRegionSize))\n #\n # Call each region's AddToBuffer function\n #\n GenFdsGlobalVariable.VerboseLogger('Call each region\\'s AddToBuffer function')\n RegionObj.AddToBuffer (FdBuffer, self.BaseAddress, self.BlockSizeList, self.ErasePolarity, GenFdsGlobalVariable.ImageBinDict, self.DefineVarDict, Flag=Flag)\n #\n # Write the buffer contents to Fd file\n #\n GenFdsGlobalVariable.VerboseLogger('Write the buffer contents to Fd file')\n if not Flag:\n SaveFileOnChange(FdFileName, FdBuffer.getvalue())\n FdBuffer.close()\n GenFdsGlobalVariable.ImageBinDict[self.FdUiName.upper() + 'fd'] = FdFileName\n return FdFileName",
"name": "METHOD_NAMEFD(FDClassObject):"
}
] |
17 | # Licensed to the Apache Software Foundation (ASF) under one
# or more contributor license agreements. See the NOTICE file
# distributed with this work for additional information
# regarding copyright ownership. The ASF licenses this file
# to you under the Apache License, Version 2.0 (the
# "License"); you may not use this file except in compliance
# with the License. You may obtain a copy of the License at
#
#
# http://www.apache.org/licenses/LICENSE-2.0
#
#
# Unless required by applicable law or agreed to in writing,
# software distributed under the License is distributed on an
# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
# KIND, either express or implied. See the License for the
# specific language governing permissions and limitations
# under the License.
from aliyunsdkcore.request import RpcRequest
from aliyunsdkecs.endpoint import endpoint_data
class DescribeSnapshotsRequest(RpcRequest):
def __init__(self):
RpcRequest.__init__(self, 'Ecs', '2014-05-26', 'DescribeSnapshots','ecs')
self.set_method('POST')
if hasattr(self, "endpoint_map"):
setattr(self, "endpoint_map", endpoint_data.getEndpointMap())
if hasattr(self, "endpoint_regional"):
setattr(self, "endpoint_regional", endpoint_data.getEndpointRegional())
def get_ResourceOwnerId(self): # Long
return self.get_query_params().get('ResourceOwnerId')
def set_ResourceOwnerId(self, ResourceOwnerId): # Long
self.add_query_param('ResourceOwnerId', ResourceOwnerId)
def get_Filter2Value(self): # String
return self.get_query_params().get('Filter.2.Value')
def set_Filter2Value(self, Filter2Value): # String
self.add_query_param('Filter.2.Value', Filter2Value)
def get_SnapshotIds(self): # String
return self.get_query_params().get('SnapshotIds')
def set_SnapshotIds(self, SnapshotIds): # String
self.add_query_param('SnapshotIds', SnapshotIds)
def get_Usage(self): # String
return self.get_query_params().get('Usage')
def METHOD_NAME(self, Usage): # String
self.add_query_param('Usage', Usage)
def get_SnapshotLinkId(self): # String
return self.get_query_params().get('SnapshotLinkId')
def set_SnapshotLinkId(self, SnapshotLinkId): # String
self.add_query_param('SnapshotLinkId', SnapshotLinkId)
def get_ResourceGroupId(self): # String
return self.get_query_params().get('ResourceGroupId')
def set_ResourceGroupId(self, ResourceGroupId): # String
self.add_query_param('ResourceGroupId', ResourceGroupId)
def get_Filter1Key(self): # String
return self.get_query_params().get('Filter.1.Key')
def set_Filter1Key(self, Filter1Key): # String
self.add_query_param('Filter.1.Key', Filter1Key)
def get_Tags(self): # RepeatList
return self.get_query_params().get('Tag')
def set_Tags(self, Tag): # RepeatList
for depth1 in range(len(Tag)):
if Tag[depth1].get('Value') is not None:
self.add_query_param('Tag.' + str(depth1 + 1) + '.Value', Tag[depth1].get('Value'))
if Tag[depth1].get('Key') is not None:
self.add_query_param('Tag.' + str(depth1 + 1) + '.Key', Tag[depth1].get('Key'))
def get_DryRun(self): # Boolean
return self.get_query_params().get('DryRun')
def set_DryRun(self, DryRun): # Boolean
self.add_query_param('DryRun', DryRun)
def get_Filter1Value(self): # String
return self.get_query_params().get('Filter.1.Value')
def set_Filter1Value(self, Filter1Value): # String
self.add_query_param('Filter.1.Value', Filter1Value)
def get_OwnerId(self): # Long
return self.get_query_params().get('OwnerId')
def set_OwnerId(self, OwnerId): # Long
self.add_query_param('OwnerId', OwnerId)
def get_InstanceId(self): # String
return self.get_query_params().get('InstanceId')
def set_InstanceId(self, InstanceId): # String
self.add_query_param('InstanceId', InstanceId)
def get_MaxResults(self): # Integer
return self.get_query_params().get('MaxResults')
def set_MaxResults(self, MaxResults): # Integer
self.add_query_param('MaxResults', MaxResults)
def get_Status(self): # String
return self.get_query_params().get('Status')
def set_Status(self, Status): # String
self.add_query_param('Status', Status)
def get_SnapshotName(self): # String
return self.get_query_params().get('SnapshotName')
def set_SnapshotName(self, SnapshotName): # String
self.add_query_param('SnapshotName', SnapshotName)
def get_PageNumber(self): # Integer
return self.get_query_params().get('PageNumber')
def set_PageNumber(self, PageNumber): # Integer
self.add_query_param('PageNumber', PageNumber)
def get_NextToken(self): # String
return self.get_query_params().get('NextToken')
def set_NextToken(self, NextToken): # String
self.add_query_param('NextToken', NextToken)
def get_PageSize(self): # Integer
return self.get_query_params().get('PageSize')
def set_PageSize(self, PageSize): # Integer
self.add_query_param('PageSize', PageSize)
def get_DiskId(self): # String
return self.get_query_params().get('DiskId')
def set_DiskId(self, DiskId): # String
self.add_query_param('DiskId', DiskId)
def get_ResourceOwnerAccount(self): # String
return self.get_query_params().get('ResourceOwnerAccount')
def set_ResourceOwnerAccount(self, ResourceOwnerAccount): # String
self.add_query_param('ResourceOwnerAccount', ResourceOwnerAccount)
def get_OwnerAccount(self): # String
return self.get_query_params().get('OwnerAccount')
def set_OwnerAccount(self, OwnerAccount): # String
self.add_query_param('OwnerAccount', OwnerAccount)
def get_SourceDiskType(self): # String
return self.get_query_params().get('SourceDiskType')
def set_SourceDiskType(self, SourceDiskType): # String
self.add_query_param('SourceDiskType', SourceDiskType)
def get_Filter2Key(self): # String
return self.get_query_params().get('Filter.2.Key')
def set_Filter2Key(self, Filter2Key): # String
self.add_query_param('Filter.2.Key', Filter2Key)
def get_Encrypted(self): # Boolean
return self.get_query_params().get('Encrypted')
def set_Encrypted(self, Encrypted): # Boolean
self.add_query_param('Encrypted', Encrypted)
def get_SnapshotType(self): # String
return self.get_query_params().get('SnapshotType')
def set_SnapshotType(self, SnapshotType): # String
self.add_query_param('SnapshotType', SnapshotType)
def get_KMSKeyId(self): # String
return self.get_query_params().get('KMSKeyId')
def set_KMSKeyId(self, KMSKeyId): # String
self.add_query_param('KMSKeyId', KMSKeyId)
def get_Category(self): # String
return self.get_query_params().get('Category')
def set_Category(self, Category): # String
self.add_query_param('Category', Category) | [
{
"body": "\t\treturn self.get_query_params().get('ResourceOwnerId')",
"name": "get_ResourceOwnerId(self):DescribeSnapshotsRequest(RpcRequest):"
},
{
"body": "\t\tself.add_query_param('ResourceOwnerId', ResourceOwnerId)",
"name": "set_ResourceOwnerId(self,DescribeSnapshotsRequest(RpcRequest):"
},
{
"body": "\t\treturn self.get_query_params().get('SnapshotIds')",
"name": "get_SnapshotIds(self):DescribeSnapshotsRequest(RpcRequest):"
},
{
"body": "\t\tself.add_query_param('Usage', Usage)",
"name": "METHOD_NAME(self,DescribeSnapshotsRequest(RpcRequest):"
},
{
"body": "\t\treturn self.get_query_params().get('SnapshotLinkId')",
"name": "get_SnapshotLinkId(self):DescribeSnapshotsRequest(RpcRequest):"
},
{
"body": "\t\treturn self.get_query_params().get('ResourceGroupId')",
"name": "get_ResourceGroupId(self):DescribeSnapshotsRequest(RpcRequest):"
},
{
"body": "\t\tself.add_query_param('ResourceGroupId', ResourceGroupId)",
"name": "set_ResourceGroupId(self,DescribeSnapshotsRequest(RpcRequest):"
},
{
"body": "\t\treturn self.get_query_params().get('SnapshotName')",
"name": "get_SnapshotName(self):DescribeSnapshotsRequest(RpcRequest):"
},
{
"body": "\t\treturn self.get_query_params().get('ResourceOwnerAccount')",
"name": "get_ResourceOwnerAccount(self):DescribeSnapshotsRequest(RpcRequest):"
},
{
"body": "\t\tself.add_query_param('ResourceOwnerAccount', ResourceOwnerAccount)",
"name": "set_ResourceOwnerAccount(self,DescribeSnapshotsRequest(RpcRequest):"
},
{
"body": "\t\treturn self.get_query_params().get('OwnerAccount')",
"name": "get_OwnerAccount(self):DescribeSnapshotsRequest(RpcRequest):"
},
{
"body": "\t\treturn self.get_query_params().get('SourceDiskType')",
"name": "get_SourceDiskType(self):DescribeSnapshotsRequest(RpcRequest):"
},
{
"body": "\t\treturn self.get_query_params().get('SnapshotType')",
"name": "get_SnapshotType(self):DescribeSnapshotsRequest(RpcRequest):"
},
{
"body": "\t\treturn self.get_query_params().get('Category')",
"name": "get_Category(self):DescribeSnapshotsRequest(RpcRequest):"
}
] |
18 | import IMP
import IMP.atom
import IMP.pmi
import IMP.test
import IMP.isd
import IMP.pmi.restraints.proteomics
import IMP.pmi.io
import IMP.pmi.restraints
import IMP.pmi.restraints.basic
import IMP.rmf
import RMF
import math
import sys
class MembraneRestraintPrototype(IMP.Restraint):
def __init__(self,
m,
z_nuisance,
thickness=30.0,
softness=3.0,
plateau=0.0000000001,
linear=0.02):
'''
input a list of particles, the slope and theta of the sigmoid potential
theta is the cutoff distance for a protein-protein contact
'''
IMP.Restraint.__init__(self, m, "MembraneRestraintPrototype_ %1%")
self.set_was_used(True)
self.thickness = thickness
self.z_nuisance = z_nuisance
self.softness = softness
self.plateau = plateau
self.particle_list_below = []
self.particle_list_above = []
self.particle_list_inside = []
self.max_float = sys.float_info.max
self.log_max_float = math.log(self.max_float)
self.linear = linear
def add_particles_below(self, particles):
self.particle_list_below += particles
def add_particles_above(self, particles):
self.particle_list_above += particles
def add_particles_inside(self, particles):
self.particle_list_inside += particles
def score_above(self, z):
argvalue = (z - self.z_slope_center_upper) / self.softness
prob = (1.0 - self.plateau) / (1.0 + math.exp(-argvalue))
return -math.log(prob * self.max_float) + self.log_max_float
def score_below(self, z):
argvalue = (z - self.z_slope_center_lower) / self.softness
prob = (1.0 - self.plateau) / (1.0 + math.exp(argvalue))
return -math.log(prob * self.max_float) + self.log_max_float
def score_inside(self, z):
argvalue = (z - self.z_slope_center_upper) / self.softness
prob1 = 1.0 - (1.0 - self.plateau) / (1.0 + math.exp(-argvalue))
argvalue = (z - self.z_slope_center_lower) / self.softness
prob2 = 1.0 - (1.0 - self.plateau) / (1.0 + math.exp(argvalue))
return (-math.log(prob1 * self.max_float)
- math.log(prob2 * self.max_float)
+ 2 * self.log_max_float)
def unprotected_evaluate(self, da):
z_center = IMP.isd.Nuisance(self.z_nuisance).get_nuisance()
self.z_slope_center_lower = z_center - self.thickness / 2.0
self.z_slope_center_upper = z_center + self.thickness / 2.0
score_above = sum([self.score_above(IMP.core.XYZ(p).get_z())
for p in self.particle_list_above])
score_below = sum([self.score_below(IMP.core.XYZ(p).get_z())
for p in self.particle_list_below])
score_inside = sum([self.score_inside(IMP.core.XYZ(p).get_z())
for p in self.particle_list_inside])
return score_above + score_below + score_inside
def do_get_inputs(self):
particle_list = self.particle_list_above + \
self.particle_list_inside + self.particle_list_below
return particle_list
class MembraneRestraint(IMP.test.TestCase):
def test_inside(self):
m = IMP.Model()
atom = IMP.Particle(m)
d = IMP.core.XYZ.setup_particle(atom)
p = IMP.Particle(m)
z_center = IMP.isd.Nuisance.setup_particle(p)
z_center.set_nuisance(0.0)
r = MembraneRestraintPrototype(m, z_center)
r.add_particles_inside([atom])
r2 = IMP.pmi.MembraneRestraint(
m, z_center.get_particle_index(), 30.0, 3.0, 0.0000000001, 0.02)
r2.set_was_used(True)
r2.add_particles_inside([atom.get_index()])
for z_c in range(-500, 500, 100):
z_center.set_nuisance(z_c)
for z in range(-500, 500, 10):
IMP.core.XYZ(atom).set_z(z)
self.assertAlmostEqual(
r.unprotected_evaluate(None), r2.unprotected_evaluate(None),
delta=1e-4)
self.assertEqual(r2.get_inputs(), [atom, z_center.get_particle()])
def test_above(self):
m = IMP.Model()
atom = IMP.Particle(m)
d = IMP.core.XYZ.setup_particle(atom)
p = IMP.Particle(m)
z_center = IMP.isd.Nuisance.setup_particle(p)
z_center.set_nuisance(0.0)
r = MembraneRestraintPrototype(m, z_center)
r.add_particles_above([atom])
r2 = IMP.pmi.MembraneRestraint(
m, z_center.get_particle_index(), 30.0, 3.0, 0.0000000001, 0.02)
r2.set_was_used(True)
r2.add_particles_above([atom.get_index()])
for z_c in range(-500, 500, 100):
z_center.set_nuisance(z_c)
for z in range(-500, 500, 10):
IMP.core.XYZ(atom).set_z(z)
self.assertAlmostEqual(
r.unprotected_evaluate(None), r2.unprotected_evaluate(None),
delta=1e-4)
def test_below(self):
m = IMP.Model()
atom = IMP.Particle(m)
d = IMP.core.XYZ.setup_particle(atom)
p = IMP.Particle(m)
z_center = IMP.isd.Nuisance.setup_particle(p)
z_center.set_nuisance(0.0)
r = MembraneRestraintPrototype(m, z_center)
r.add_particles_below([atom])
r2 = IMP.pmi.MembraneRestraint(
m, z_center.get_particle_index(), 30.0, 3.0, 0.0000000001, 0.02)
r2.set_was_used(True)
r2.add_particles_below([atom.get_index()])
for z_c in range(-500, 500, 100):
z_center.set_nuisance(z_c)
for z in range(-500, 500, 10):
IMP.core.XYZ(atom).set_z(z)
self.assertAlmostEqual(
r.unprotected_evaluate(None), r2.unprotected_evaluate(None),
delta=1e-4)
def METHOD_NAME(self):
m = IMP.Model()
s = IMP.pmi.topology.System(m)
st = s.create_state()
len_helix = 40
mol = st.create_molecule("helix",sequence='A'*len_helix, chain_id='A')
mol.add_representation(mol,
resolutions=[1],
ideal_helix=True)
hier = s.build()
mr = IMP.pmi.restraints.basic.MembraneRestraint(hier,
objects_inside=[(11,30,'helix')],
objects_above=[(1,10,'helix')],
objects_below = [(31,40,'helix')])
p_inside = mr.get_particles_inside()
self.assertEqual(len(p_inside), 20)
p_above = mr.get_particles_above()
self.assertEqual(len(p_above), 10)
p_below = mr.get_particles_below()
self.assertEqual(len(p_below), 10)
if __name__ == '__main__':
IMP.test.main() | [
{
"body": " m,\n z_nuisance,\n thickness=30.0,\n softness=3.0,\n plateau=0.0000000001,\n linear=0.02):\n '''\n input a list of particles, the slope and theta of the sigmoid potential\n theta is the cutoff distance for a protein-protein contact\n '''\n IMP.Restraint.__init__(self, m, \"MembraneRestraintPrototype_ %1%\")\n self.set_was_used(True)\n self.thickness = thickness\n self.z_nuisance = z_nuisance\n self.softness = softness\n self.plateau = plateau\n self.particle_list_below = []\n self.particle_list_above = []\n self.particle_list_inside = []\n self.max_float = sys.float_info.max\n self.log_max_float = math.log(self.max_float)\n self.linear = linear",
"name": "__init__(self,MembraneRestraint(IMP.test.TestCase):"
},
{
"body": " self.particle_list_below += particles",
"name": "add_particles_below(self,MembraneRestraint(IMP.test.TestCase):"
},
{
"body": " self.particle_list_above += particles",
"name": "add_particles_above(self,MembraneRestraint(IMP.test.TestCase):"
},
{
"body": " self.particle_list_inside += particles",
"name": "add_particles_inside(self,MembraneRestraint(IMP.test.TestCase):"
},
{
"body": " particle_list = self.particle_list_above + \\\n self.particle_list_inside + self.particle_list_below\n return particle_list",
"name": "do_get_inputs(self):MembraneRestraint(IMP.test.TestCase):"
},
{
"body": " m = IMP.Model()\n s = IMP.pmi.topology.System(m)\n st = s.create_state()\n len_helix = 40\n mol = st.create_molecule(\"helix\",sequence='A'*len_helix, chain_id='A')\n mol.add_representation(mol,\n resolutions=[1],\n ideal_helix=True)\n hier = s.build()\n mr = IMP.pmi.restraints.basic.MembraneRestraint(hier,\n objects_inside=[(11,30,'helix')],\n objects_above=[(1,10,'helix')],\n objects_below = [(31,40,'helix')])\n p_inside = mr.get_particles_inside()\n self.assertEqual(len(p_inside), 20)\n p_above = mr.get_particles_above()\n self.assertEqual(len(p_above), 10)\n p_below = mr.get_particles_below()\n self.assertEqual(len(p_below), 10)",
"name": "METHOD_NAME(self):MembraneRestraint(IMP.test.TestCase):"
}
] |
19 | # Licensed to the Apache Software Foundation (ASF) under one
# or more contributor license agreements. See the NOTICE file
# distributed with this work for additional information
# regarding copyright ownership. The ASF licenses this file
# to you under the Apache License, Version 2.0 (the
# "License"); you may not use this file except in compliance
# with the License. You may obtain a copy of the License at
#
#
# http://www.apache.org/licenses/LICENSE-2.0
#
#
# Unless required by applicable law or agreed to in writing,
# software distributed under the License is distributed on an
# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
# KIND, either express or implied. See the License for the
# specific language governing permissions and limitations
# under the License.
from aliyunsdkcore.request import RpcRequest
from aliyunsdkga.endpoint import endpoint_data
class CreateAcceleratorRequest(RpcRequest):
def __init__(self):
RpcRequest.__init__(self, 'Ga', '2019-11-20', 'CreateAccelerator','gaplus')
self.set_method('POST')
if hasattr(self, "endpoint_map"):
setattr(self, "endpoint_map", endpoint_data.getEndpointMap())
if hasattr(self, "endpoint_regional"):
setattr(self, "endpoint_regional", endpoint_data.getEndpointRegional())
def get_ClientToken(self): # String
return self.get_query_params().get('ClientToken')
def set_ClientToken(self, ClientToken): # String
self.add_query_param('ClientToken', ClientToken)
def get_IpSetConfig(self): # Struct
return self.get_query_params().get('IpSetConfig')
def set_IpSetConfig(self, IpSetConfig): # Struct
if IpSetConfig.get('AccessMode') is not None:
self.add_query_param('IpSetConfig.AccessMode', IpSetConfig.get('AccessMode'))
def get_AutoUseCoupon(self): # String
return self.get_query_params().get('AutoUseCoupon')
def set_AutoUseCoupon(self, AutoUseCoupon): # String
self.add_query_param('AutoUseCoupon', AutoUseCoupon)
def get_AutoRenewDuration(self): # Integer
return self.get_query_params().get('AutoRenewDuration')
def set_AutoRenewDuration(self, AutoRenewDuration): # Integer
self.add_query_param('AutoRenewDuration', AutoRenewDuration)
def get_Spec(self): # String
return self.get_query_params().get('Spec')
def set_Spec(self, Spec): # String
self.add_query_param('Spec', Spec)
def get_Duration(self): # Integer
return self.get_query_params().get('Duration')
def set_Duration(self, Duration): # Integer
self.add_query_param('Duration', Duration)
def get_ResourceGroupId(self): # String
return self.get_query_params().get('ResourceGroupId')
def set_ResourceGroupId(self, ResourceGroupId): # String
self.add_query_param('ResourceGroupId', ResourceGroupId)
def get_Tags(self): # RepeatList
return self.get_query_params().get('Tag')
def set_Tags(self, Tag): # RepeatList
for depth1 in range(len(Tag)):
if Tag[depth1].get('Key') is not None:
self.add_query_param('Tag.' + str(depth1 + 1) + '.Key', Tag[depth1].get('Key'))
if Tag[depth1].get('Value') is not None:
self.add_query_param('Tag.' + str(depth1 + 1) + '.Value', Tag[depth1].get('Value'))
def get_InstanceChargeType(self): # String
return self.get_query_params().get('InstanceChargeType')
def set_InstanceChargeType(self, InstanceChargeType): # String
self.add_query_param('InstanceChargeType', InstanceChargeType)
def get_AutoPay(self): # Boolean
return self.get_query_params().get('AutoPay')
def set_AutoPay(self, AutoPay): # Boolean
self.add_query_param('AutoPay', AutoPay)
def METHOD_NAME(self): # Boolean
return self.get_query_params().get('DryRun')
def set_DryRun(self, DryRun): # Boolean
self.add_query_param('DryRun', DryRun)
def get_PromotionOptionNo(self): # String
return self.get_query_params().get('PromotionOptionNo')
def set_PromotionOptionNo(self, PromotionOptionNo): # String
self.add_query_param('PromotionOptionNo', PromotionOptionNo)
def get_BandwidthBillingType(self): # String
return self.get_query_params().get('BandwidthBillingType')
def set_BandwidthBillingType(self, BandwidthBillingType): # String
self.add_query_param('BandwidthBillingType', BandwidthBillingType)
def get_AutoRenew(self): # Boolean
return self.get_query_params().get('AutoRenew')
def set_AutoRenew(self, AutoRenew): # Boolean
self.add_query_param('AutoRenew', AutoRenew)
def get_Name(self): # String
return self.get_query_params().get('Name')
def set_Name(self, Name): # String
self.add_query_param('Name', Name)
def get_PricingCycle(self): # String
return self.get_query_params().get('PricingCycle')
def set_PricingCycle(self, PricingCycle): # String
self.add_query_param('PricingCycle', PricingCycle) | [
{
"body": "\t\treturn self.get_query_params().get('ClientToken')",
"name": "get_ClientToken(self):CreateAcceleratorRequest(RpcRequest):"
},
{
"body": "\t\treturn self.get_query_params().get('Duration')",
"name": "get_Duration(self):CreateAcceleratorRequest(RpcRequest):"
},
{
"body": "\t\treturn self.get_query_params().get('ResourceGroupId')",
"name": "get_ResourceGroupId(self):CreateAcceleratorRequest(RpcRequest):"
},
{
"body": "\t\tself.add_query_param('ResourceGroupId', ResourceGroupId)",
"name": "set_ResourceGroupId(self,CreateAcceleratorRequest(RpcRequest):"
},
{
"body": "\t\treturn self.get_query_params().get('InstanceChargeType')",
"name": "get_InstanceChargeType(self):CreateAcceleratorRequest(RpcRequest):"
},
{
"body": "\t\treturn self.get_query_params().get('DryRun')",
"name": "METHOD_NAME(self):CreateAcceleratorRequest(RpcRequest):"
},
{
"body": "\t\treturn self.get_query_params().get('PromotionOptionNo')",
"name": "get_PromotionOptionNo(self):CreateAcceleratorRequest(RpcRequest):"
}
] |
20 | from __future__ import print_function
## @file
# Utility functions and classes for BaseTools unit tests
#
# Copyright (c) 2008 - 2018, Intel Corporation. All rights reserved.<BR>
#
# SPDX-License-Identifier: BSD-2-Clause-Patent
#
##
# Import Modules
#
import base64
import os
import os.path
import random
import shutil
import subprocess
import sys
import unittest
import codecs
TestsDir = os.path.realpath(os.path.split(sys.argv[0])[0])
BaseToolsDir = os.path.realpath(os.path.join(TestsDir, '..'))
CSourceDir = os.path.join(BaseToolsDir, 'Source', 'C')
PythonSourceDir = os.path.join(BaseToolsDir, 'Source', 'Python')
TestTempDir = os.path.join(TestsDir, 'TestTempDir')
if PythonSourceDir not in sys.path:
#
# Allow unit tests to import BaseTools python modules. This is very useful
# for writing unit tests.
#
sys.path.append(PythonSourceDir)
def MakeTheTestSuite(localItems):
tests = []
for name, item in localItems.items():
if isinstance(item, type):
if issubclass(item, unittest.TestCase):
tests.append(unittest.TestLoader().loadTestsFromTestCase(item))
elif issubclass(item, unittest.TestSuite):
tests.append(item())
return lambda: unittest.TestSuite(tests)
def GetBaseToolsPaths():
if sys.platform in ('win32', 'win64'):
return [ os.path.join(BaseToolsDir, 'Bin', sys.platform.title()) ]
else:
uname = os.popen('uname -sm').read().strip()
for char in (' ', '/'):
uname = uname.replace(char, '-')
return [
os.path.join(BaseToolsDir, 'Bin', uname),
os.path.join(BaseToolsDir, 'BinWrappers', uname),
os.path.join(BaseToolsDir, 'BinWrappers', 'PosixLike')
]
BaseToolsBinPaths = GetBaseToolsPaths()
class BaseToolsTest(unittest.TestCase):
def cleanOutDir(self, dir):
for dirItem in os.listdir(dir):
if dirItem in ('.', '..'): continue
dirItem = os.path.join(dir, dirItem)
self.RemoveFileOrDir(dirItem)
def CleanUpTmpDir(self):
if os.path.exists(self.testDir):
self.cleanOutDir(self.testDir)
def HandleTreeDeleteError(self, function, path, excinfo):
os.chmod(path, stat.S_IWRITE)
function(path)
def METHOD_NAME(self, dir):
shutil.rmtree(dir, False, self.HandleTreeDeleteError)
def RemoveFileOrDir(self, path):
if not os.path.exists(path):
return
elif os.path.isdir(path):
self.METHOD_NAME(path)
else:
os.remove(path)
def DisplayBinaryData(self, description, data):
print(description, '(base64 encoded):')
b64data = base64.b64encode(data)
print(b64data)
def DisplayFile(self, fileName):
sys.stdout.write(self.ReadTmpFile(fileName))
sys.stdout.flush()
def FindToolBin(self, toolName):
for binPath in BaseToolsBinPaths:
bin = os.path.join(binPath, toolName)
if os.path.exists(bin):
break
assert os.path.exists(bin)
return bin
def RunTool(self, *args, **kwd):
if 'toolName' in kwd: toolName = kwd['toolName']
else: toolName = None
if 'logFile' in kwd: logFile = kwd['logFile']
else: logFile = None
if toolName is None: toolName = self.toolName
bin = self.FindToolBin(toolName)
if logFile is not None:
logFile = open(os.path.join(self.testDir, logFile), 'w')
popenOut = logFile
else:
popenOut = subprocess.PIPE
args = [toolName] + list(args)
Proc = subprocess.Popen(
args, executable=bin,
stdout=popenOut, stderr=subprocess.STDOUT
)
if logFile is None:
Proc.stdout.read()
return Proc.wait()
def GetTmpFilePath(self, fileName):
return os.path.join(self.testDir, fileName)
def OpenTmpFile(self, fileName, mode = 'r'):
return open(os.path.join(self.testDir, fileName), mode)
def ReadTmpFile(self, fileName):
f = open(self.GetTmpFilePath(fileName), 'r')
data = f.read()
f.close()
return data
def WriteTmpFile(self, fileName, data):
if isinstance(data, bytes):
with open(self.GetTmpFilePath(fileName), 'wb') as f:
f.write(data)
else:
with codecs.open(self.GetTmpFilePath(fileName), 'w', encoding='utf-8') as f:
f.write(data)
def GenRandomFileData(self, fileName, minlen = None, maxlen = None):
if maxlen is None: maxlen = minlen
f = self.OpenTmpFile(fileName, 'w')
f.write(self.GetRandomString(minlen, maxlen))
f.close()
def GetRandomString(self, minlen = None, maxlen = None):
if minlen is None: minlen = 1024
if maxlen is None: maxlen = minlen
return ''.join(
[chr(random.randint(0, 255))
for x in range(random.randint(minlen, maxlen))
])
def setUp(self):
self.savedEnvPath = os.environ['PATH']
self.savedSysPath = sys.path[:]
for binPath in BaseToolsBinPaths:
os.environ['PATH'] = \
os.path.pathsep.join((os.environ['PATH'], binPath))
self.testDir = TestTempDir
if not os.path.exists(self.testDir):
os.mkdir(self.testDir)
else:
self.cleanOutDir(self.testDir)
def tearDown(self):
self.RemoveFileOrDir(self.testDir)
os.environ['PATH'] = self.savedEnvPath
sys.path = self.savedSysPath
| [
{
"body": " tests = []\n for name, item in localItems.items():\n if isinstance(item, type):\n if issubclass(item, unittest.TestCase):\n tests.append(unittest.TestLoader().loadTestsFromTestCase(item))\n elif issubclass(item, unittest.TestSuite):\n tests.append(item())\n return lambda: unittest.TestSuite(tests)",
"name": "MakeTheTestSuite(localItems):BaseToolsTest(unittest.TestCase):"
},
{
"body": " for dirItem in os.listdir(dir):\n if dirItem in ('.', '..'): continue\n dirItem = os.path.join(dir, dirItem)\n self.RemoveFileOrDir(dirItem)",
"name": "cleanOutDir(self,BaseToolsTest(unittest.TestCase):"
},
{
"body": " shutil.rmtree(dir, False, self.HandleTreeDeleteError)",
"name": "METHOD_NAME(self,BaseToolsTest(unittest.TestCase):"
},
{
"body": " if not os.path.exists(path):\n return\n elif os.path.isdir(path):\n self.METHOD_NAME(path)\n else:\n os.remove(path)",
"name": "RemoveFileOrDir(self,BaseToolsTest(unittest.TestCase):"
},
{
"body": " for binPath in BaseToolsBinPaths:\n bin = os.path.join(binPath, toolName)\n if os.path.exists(bin):\n break\n assert os.path.exists(bin)\n return bin",
"name": "FindToolBin(self,BaseToolsTest(unittest.TestCase):"
},
{
"body": " if 'toolName' in kwd: toolName = kwd['toolName']\n else: toolName = None\n if 'logFile' in kwd: logFile = kwd['logFile']\n else: logFile = None\n if toolName is None: toolName = self.toolName\n bin = self.FindToolBin(toolName)\n if logFile is not None:\n logFile = open(os.path.join(self.testDir, logFile), 'w')\n popenOut = logFile\n else:\n popenOut = subprocess.PIPE\n args = [toolName] + list(args)\n Proc = subprocess.Popen(\n args, executable=bin,\n stdout=popenOut, stderr=subprocess.STDOUT\n )\n if logFile is None:\n Proc.stdout.read()\n return Proc.wait()",
"name": "RunTool(self,BaseToolsTest(unittest.TestCase):"
},
{
"body": " if isinstance(data, bytes):\n with open(self.GetTmpFilePath(fileName), 'wb') as f:\n f.write(data)\n else:\n with codecs.open(self.GetTmpFilePath(fileName), 'w', encoding='utf-8') as f:\n f.write(data)",
"name": "WriteTmpFile(self,BaseToolsTest(unittest.TestCase):"
},
{
"body": " self.savedEnvPath = os.environ['PATH']\n self.savedSysPath = sys.path[:]\n for binPath in BaseToolsBinPaths:\n os.environ['PATH'] = \\\n os.path.pathsep.join((os.environ['PATH'], binPath))\n self.testDir = TestTempDir\n if not os.path.exists(self.testDir):\n os.mkdir(self.testDir)\n else:\n self.cleanOutDir(self.testDir)",
"name": "setUp(self):BaseToolsTest(unittest.TestCase):"
}
] |
End of preview. Expand
in Dataset Viewer.
README.md exists but content is empty.
Use the Edit dataset card button to edit it.
- Downloads last month
- 38