sentence1
stringlengths
52
3.87M
sentence2
stringlengths
1
47.2k
label
stringclasses
1 value
def enhance(self): """ Function enhance Enhance the object with new item or enhanced items """ self.update({'parameters': SubDict(self.api, self.objName, self.payloadObj, self.key, SubItemParameter)}) self.update({'interfaces': SubDict(self.api, self.objName, self.payloadObj, self.key, SubItemInterface)}) self.update({'subnets': SubDict(self.api, self.objName, self.payloadObj, self.key, SubItemSubnet)})
Function enhance Enhance the object with new item or enhanced items
entailment
def kong(ctx, namespace, yes): """Update the kong configuration """ m = KongManager(ctx.obj['agile'], namespace=namespace) click.echo(utils.niceJson(m.create_kong(yes)))
Update the kong configuration
entailment
def schedule_task(self): """ Schedules this publish action as a Celery task. """ from .tasks import publish_task publish_task.apply_async(kwargs={'pk': self.pk}, eta=self.scheduled_time)
Schedules this publish action as a Celery task.
entailment
def get_version(self): """ Get the version object for the related object. """ return Version.objects.get( content_type=self.content_type, object_id=self.object_id, version_number=self.publish_version, )
Get the version object for the related object.
entailment
def _publish(self): """ Process a publish action on the related object, returns a boolean if a change is made. Only objects where a version change is needed will be updated. """ obj = self.content_object version = self.get_version() actioned = False # Only update if needed if obj.current_version != version: version = self.get_version() obj.current_version = version obj.save(update_fields=['current_version']) actioned = True return actioned
Process a publish action on the related object, returns a boolean if a change is made. Only objects where a version change is needed will be updated.
entailment
def _unpublish(self): """ Process an unpublish action on the related object, returns a boolean if a change is made. Only objects with a current active version will be updated. """ obj = self.content_object actioned = False # Only update if needed if obj.current_version is not None: obj.current_version = None obj.save(update_fields=['current_version']) actioned = True return actioned
Process an unpublish action on the related object, returns a boolean if a change is made. Only objects with a current active version will be updated.
entailment
def _log_action(self): """ Adds a log entry for this action to the object history in the Django admin. """ if self.publish_version == self.UNPUBLISH_CHOICE: message = 'Unpublished page (scheduled)' else: message = 'Published version {} (scheduled)'.format(self.publish_version) LogEntry.objects.log_action( user_id=self.user.pk, content_type_id=self.content_type.pk, object_id=self.object_id, object_repr=force_text(self.content_object), action_flag=CHANGE, change_message=message )
Adds a log entry for this action to the object history in the Django admin.
entailment
def process_action(self): """ Process the action and update the related object, returns a boolean if a change is made. """ if self.publish_version == self.UNPUBLISH_CHOICE: actioned = self._unpublish() else: actioned = self._publish() # Only log if an action was actually taken if actioned: self._log_action() return actioned
Process the action and update the related object, returns a boolean if a change is made.
entailment
def checkAndCreate(self, key, payload, hostgroupConf, hostgroupParent, puppetClassesId): """ Function checkAndCreate check And Create procedure for an hostgroup - check the hostgroup is not existing - create the hostgroup - Add puppet classes from puppetClassesId - Add params from hostgroupConf @param key: The hostgroup name or ID @param payload: The description of the hostgroup @param hostgroupConf: The configuration of the host group from the foreman.conf @param hostgroupParent: The id of the parent hostgroup @param puppetClassesId: The dict of puppet classes ids in foreman @return RETURN: The ItemHostsGroup object of an host """ if key not in self: self[key] = payload oid = self[key]['id'] if not oid: return False # Create Hostgroup classes if 'classes' in hostgroupConf.keys(): classList = list() for c in hostgroupConf['classes']: classList.append(puppetClassesId[c]) if not self[key].checkAndCreateClasses(classList): print("Failed in classes") return False # Set params if 'params' in hostgroupConf.keys(): if not self[key].checkAndCreateParams(hostgroupConf['params']): print("Failed in params") return False return oid
Function checkAndCreate check And Create procedure for an hostgroup - check the hostgroup is not existing - create the hostgroup - Add puppet classes from puppetClassesId - Add params from hostgroupConf @param key: The hostgroup name or ID @param payload: The description of the hostgroup @param hostgroupConf: The configuration of the host group from the foreman.conf @param hostgroupParent: The id of the parent hostgroup @param puppetClassesId: The dict of puppet classes ids in foreman @return RETURN: The ItemHostsGroup object of an host
entailment
def doublewrap(f): ''' a decorator decorator, allowing the decorator to be used as: @decorator(with, arguments, and=kwargs) or @decorator Ref: http://stackoverflow.com/questions/653368/how-to-create-a-python-decorator-that-can-be-used-either-with-or-without-paramet ''' @functools.wraps(f) def new_dec(*args, **kwargs): if len(args) == 1 and len(kwargs) == 0 and callable(args[0]): # actual decorated function return f(args[0]) else: # decorator arguments return lambda realf: f(realf, *args, **kwargs) return new_dec
a decorator decorator, allowing the decorator to be used as: @decorator(with, arguments, and=kwargs) or @decorator Ref: http://stackoverflow.com/questions/653368/how-to-create-a-python-decorator-that-can-be-used-either-with-or-without-paramet
entailment
def deprecated(func, msg=None): """ A decorator which can be used to mark functions as deprecated.It will result in a deprecation warning being shown when the function is used. """ message = msg or "Use of deprecated function '{}`.".format(func.__name__) @functools.wraps(func) def wrapper_func(*args, **kwargs): warnings.warn(message, DeprecationWarning, stacklevel=2) return func(*args, **kwargs) return wrapper_func
A decorator which can be used to mark functions as deprecated.It will result in a deprecation warning being shown when the function is used.
entailment
def create_handler(target: str): """Create a handler for logging to ``target``""" if target == 'stderr': return logging.StreamHandler(sys.stderr) elif target == 'stdout': return logging.StreamHandler(sys.stdout) else: return logging.handlers.WatchedFileHandler(filename=target)
Create a handler for logging to ``target``
entailment
def initialise_logging(level: str, target: str, short_format: bool): """Initialise basic logging facilities""" try: log_level = getattr(logging, level) except AttributeError: raise SystemExit( "invalid log level %r, expected any of 'DEBUG', 'INFO', 'WARNING', 'ERROR' or 'CRITICAL'" % level ) handler = create_handler(target=target) logging.basicConfig( level=log_level, format='%(asctime)-15s (%(process)d) %(message)s' if not short_format else '%(message)s', datefmt='%Y-%m-%d %H:%M:%S', handlers=[handler] )
Initialise basic logging facilities
entailment
def escape_dn_chars(s): """ Escape all DN special characters found in s with a back-slash (see RFC 4514, section 2.4) """ if s: assert isinstance(s, six.string_types) s = s.replace('\\', '\\\\') s = s.replace(',', '\\,') s = s.replace('+', '\\+') s = s.replace('"', '\\"') s = s.replace('<', '\\<') s = s.replace('>', '\\>') s = s.replace(';', '\\;') s = s.replace('=', '\\=') s = s.replace('\000', '\\\000') if s[0] == '#' or s[0] == ' ': s = ''.join(('\\', s)) if s[-1] == ' ': s = ''.join((s[:-1], '\\ ')) return s
Escape all DN special characters found in s with a back-slash (see RFC 4514, section 2.4)
entailment
def str2dn(dn, flags=0): """ This function takes a DN as string as parameter and returns a decomposed DN. It's the inverse to dn2str(). flags describes the format of the dn See also the OpenLDAP man-page ldap_str2dn(3) """ # if python2, we need unicode string if not isinstance(dn, six.text_type): dn = dn.decode("utf_8") assert flags == 0 result, i = _distinguishedName(dn, 0) if result is None: raise tldap.exceptions.InvalidDN("Cannot parse dn") if i != len(dn): raise tldap.exceptions.InvalidDN("Cannot parse dn past %s" % dn[i:]) return result
This function takes a DN as string as parameter and returns a decomposed DN. It's the inverse to dn2str(). flags describes the format of the dn See also the OpenLDAP man-page ldap_str2dn(3)
entailment
def dn2str(dn): """ This function takes a decomposed DN as parameter and returns a single string. It's the inverse to str2dn() but will always return a DN in LDAPv3 format compliant to RFC 4514. """ for rdn in dn: for atype, avalue, dummy in rdn: assert isinstance(atype, six.string_types) assert isinstance(avalue, six.string_types) assert dummy == 1 return ','.join([ '+'.join([ '='.join((atype, escape_dn_chars(avalue or ''))) for atype, avalue, dummy in rdn]) for rdn in dn ])
This function takes a decomposed DN as parameter and returns a single string. It's the inverse to str2dn() but will always return a DN in LDAPv3 format compliant to RFC 4514.
entailment
def explode_dn(dn, notypes=0, flags=0): """ explode_dn(dn [, notypes=0]) -> list This function takes a DN and breaks it up into its component parts. The notypes parameter is used to specify that only the component's attribute values be returned and not the attribute types. """ if not dn: return [] dn_decomp = str2dn(dn, flags) rdn_list = [] for rdn in dn_decomp: if notypes: rdn_list.append('+'.join([ escape_dn_chars(avalue or '') for atype, avalue, dummy in rdn ])) else: rdn_list.append('+'.join([ '='.join((atype, escape_dn_chars(avalue or ''))) for atype, avalue, dummy in rdn ])) return rdn_list
explode_dn(dn [, notypes=0]) -> list This function takes a DN and breaks it up into its component parts. The notypes parameter is used to specify that only the component's attribute values be returned and not the attribute types.
entailment
def explode_rdn(rdn, notypes=0, flags=0): """ explode_rdn(rdn [, notypes=0]) -> list This function takes a RDN and breaks it up into its component parts if it is a multi-valued RDN. The notypes parameter is used to specify that only the component's attribute values be returned and not the attribute types. """ if not rdn: return [] rdn_decomp = str2dn(rdn, flags)[0] if notypes: return [avalue or '' for atype, avalue, dummy in rdn_decomp] else: return ['='.join((atype, escape_dn_chars(avalue or ''))) for atype, avalue, dummy in rdn_decomp]
explode_rdn(rdn [, notypes=0]) -> list This function takes a RDN and breaks it up into its component parts if it is a multi-valued RDN. The notypes parameter is used to specify that only the component's attribute values be returned and not the attribute types.
entailment
def labels(ctx): """Crate or update labels in github """ config = ctx.obj['agile'] repos = config.get('repositories') labels = config.get('labels') if not isinstance(repos, list): raise CommandError( 'You need to specify the "repos" list in the config' ) if not isinstance(labels, dict): raise CommandError( 'You need to specify the "labels" dictionary in the config' ) git = GithubApi() for repo in repos: repo = git.repo(repo) for label, color in labels.items(): if repo.label(label, color): click.echo('Created label "%s" @ %s' % (label, repo)) else: click.echo('Updated label "%s" @ %s' % (label, repo))
Crate or update labels in github
entailment
def get_access_token(self, code): """Get new access token.""" try: self._token = super().fetch_token( MINUT_TOKEN_URL, client_id=self._client_id, client_secret=self._client_secret, code=code, ) # except Exception as e: except MissingTokenError as error: _LOGGER.debug("Token issues: %s", error) return self._token
Get new access token.
entailment
def _request(self, url, request_type='GET', **params): """Send a request to the Minut Point API.""" try: _LOGGER.debug('Request %s %s', url, params) response = self.request( request_type, url, timeout=TIMEOUT.seconds, **params) response.raise_for_status() _LOGGER.debug('Response %s %s %.200s', response.status_code, response.headers['content-type'], response.json()) response = response.json() if 'error' in response: raise OSError(response['error']) return response except OSError as error: _LOGGER.warning('Failed request: %s', error)
Send a request to the Minut Point API.
entailment
def _request_devices(self, url, _type): """Request list of devices.""" res = self._request(url) return res.get(_type) if res else {}
Request list of devices.
entailment
def read_sensor(self, device_id, sensor_uri): """Return sensor value based on sensor_uri.""" url = MINUT_DEVICES_URL + "/{device_id}/{sensor_uri}".format( device_id=device_id, sensor_uri=sensor_uri) res = self._request(url, request_type='GET', data={'limit': 1}) if not res.get('values'): return None return res.get('values')[-1].get('value')
Return sensor value based on sensor_uri.
entailment
def _register_webhook(self, webhook_url, events): """Register webhook.""" response = self._request( MINUT_WEBHOOKS_URL, request_type='POST', json={ 'url': webhook_url, 'events': events, }, ) return response
Register webhook.
entailment
def remove_webhook(self): """Remove webhook.""" if self._webhook.get('hook_id'): self._request( "{}/{}".format(MINUT_WEBHOOKS_URL, self._webhook['hook_id']), request_type='DELETE', )
Remove webhook.
entailment
def update_webhook(self, webhook_url, webhook_id, events=None): """Register webhook (if it doesn't exit).""" hooks = self._request(MINUT_WEBHOOKS_URL, request_type='GET')['hooks'] try: self._webhook = next( hook for hook in hooks if hook['url'] == webhook_url) _LOGGER.debug("Webhook: %s", self._webhook) except StopIteration: # Not found if events is None: events = [e for v in EVENTS.values() for e in v if e] self._webhook = self._register_webhook(webhook_url, events) _LOGGER.debug("Registered hook: %s", self._webhook) return self._webhook
Register webhook (if it doesn't exit).
entailment
def update(self): """Update all devices from server.""" with self._lock: devices = self._request_devices(MINUT_DEVICES_URL, 'devices') if devices: self._state = { device['device_id']: device for device in devices } _LOGGER.debug("Found devices: %s", list(self._state.keys())) # _LOGGER.debug("Device status: %s", devices) homes = self._request_devices(MINUT_HOMES_URL, 'homes') if homes: self._homes = homes return self.devices
Update all devices from server.
entailment
def _set_alarm(self, status, home_id): """Set alarm satus.""" response = self._request( MINUT_HOMES_URL + "/{}".format(home_id), request_type='PUT', json={'alarm_status': status}) return response.get('alarm_status', '') == status
Set alarm satus.
entailment
def sensor(self, sensor_type): """Update and return sensor value.""" _LOGGER.debug("Reading %s sensor.", sensor_type) return self._session.read_sensor(self.device_id, sensor_type)
Update and return sensor value.
entailment
def device_info(self): """Info about device.""" return { 'connections': {('mac', self.device['device_mac'])}, 'identifieres': self.device['device_id'], 'manufacturer': 'Minut', 'model': 'Point v{}'.format(self.device['hardware_version']), 'name': self.device['description'], 'sw_version': self.device['firmware']['installed'], }
Info about device.
entailment
def device_status(self): """Status of device.""" return { 'active': self.device['active'], 'offline': self.device['offline'], 'last_update': self.last_update, 'battery_level': self.battery_level, }
Status of device.
entailment
def glitter_head(context): """ Template tag which renders the glitter CSS and JavaScript. Any resources which need to be loaded should be added here. This is only shown to users with permission to edit the page. """ user = context.get('user') rendered = '' template_path = 'glitter/include/head.html' if user is not None and user.is_staff: template = context.template.engine.get_template(template_path) rendered = template.render(context) return rendered
Template tag which renders the glitter CSS and JavaScript. Any resources which need to be loaded should be added here. This is only shown to users with permission to edit the page.
entailment
def glitter_startbody(context): """ Template tag which renders the glitter overlay and sidebar. This is only shown to users with permission to edit the page. """ user = context.get('user') path_body = 'glitter/include/startbody.html' path_plus = 'glitter/include/startbody_%s_%s.html' rendered = '' if user is not None and user.is_staff: templates = [path_body] # We've got a page with a glitter object: # - May need a different startbody template # - Check if user has permission to add glitter = context.get('glitter') if glitter is not None: opts = glitter.obj._meta.app_label, glitter.obj._meta.model_name template_path = path_plus % opts templates.insert(0, template_path) template = context.template.engine.select_template(templates) rendered = template.render(context) return rendered
Template tag which renders the glitter overlay and sidebar. This is only shown to users with permission to edit the page.
entailment
def escape_filter_chars(assertion_value, escape_mode=0): """ Replace all special characters found in assertion_value by quoted notation. escape_mode If 0 only special chars mentioned in RFC 4515 are escaped. If 1 all NON-ASCII chars are escaped. If 2 all chars are escaped. """ if isinstance(assertion_value, six.text_type): assertion_value = assertion_value.encode("utf_8") s = [] for c in assertion_value: do_escape = False if str != bytes: # Python 3 pass else: # Python 2 c = ord(c) if escape_mode == 0: if c == ord('\\') or c == ord('*') \ or c == ord('(') or c == ord(')') \ or c == ord('\x00'): do_escape = True elif escape_mode == 1: if c < '0' or c > 'z' or c in "\\*()": do_escape = True elif escape_mode == 2: do_escape = True else: raise ValueError('escape_mode must be 0, 1 or 2.') if do_escape: s.append(b"\\%02x" % c) else: b = None if str != bytes: # Python 3 b = bytes([c]) else: # Python 2 b = chr(c) s.append(b) return b''.join(s)
Replace all special characters found in assertion_value by quoted notation. escape_mode If 0 only special chars mentioned in RFC 4515 are escaped. If 1 all NON-ASCII chars are escaped. If 2 all chars are escaped.
entailment
def filter_format(filter_template, assertion_values): """ filter_template String containing %s as placeholder for assertion values. assertion_values List or tuple of assertion values. Length must match count of %s in filter_template. """ assert isinstance(filter_template, bytes) return filter_template % ( tuple(map(escape_filter_chars, assertion_values)))
filter_template String containing %s as placeholder for assertion values. assertion_values List or tuple of assertion values. Length must match count of %s in filter_template.
entailment
def to_flake8(self, checker_cls: type) -> Flake8Error: """ Args: checker_cls: Class performing the check to be passed back to flake8. """ return Flake8Error( line_number=self.line_number, offset=self.offset, text=self.text, checker_cls=checker_cls, )
Args: checker_cls: Class performing the check to be passed back to flake8.
entailment
def add_chassis(self, chassis): """ :param chassis: chassis object """ res = self._request(RestMethod.post, self.user_url, params={'ip': chassis.ip, 'port': chassis.port}) assert(res.status_code == 201)
:param chassis: chassis object
entailment
def send_command(self, obj, command, *arguments): """ Send command with no output. :param obj: requested object. :param command: command to send. :param arguments: list of command arguments. """ self._perform_command('{}/{}'.format(self.session_url, obj.ref), command, OperReturnType.no_output, *arguments)
Send command with no output. :param obj: requested object. :param command: command to send. :param arguments: list of command arguments.
entailment
def send_command_return(self, obj, command, *arguments): """ Send command with single line output. :param obj: requested object. :param command: command to send. :param arguments: list of command arguments. :return: command output. """ return self._perform_command('{}/{}'.format(self.session_url, obj.ref), command, OperReturnType.line_output, *arguments).json()
Send command with single line output. :param obj: requested object. :param command: command to send. :param arguments: list of command arguments. :return: command output.
entailment
def send_command_return_multilines(self, obj, command, *arguments): """ Send command with no output. :param obj: requested object. :param command: command to send. :param arguments: list of command arguments. :return: list of command output lines. :rtype: list(str) """ return self._perform_command('{}/{}'.format(self.session_url, obj.ref), command, OperReturnType.multiline_output, *arguments).json()
Send command with no output. :param obj: requested object. :param command: command to send. :param arguments: list of command arguments. :return: list of command output lines. :rtype: list(str)
entailment
def get_attributes(self, obj): """ Get all object's attributes. Sends multi-parameter info/config queries and returns the result as dictionary. :param obj: requested object. :returns: dictionary of <name, value> of all attributes returned by the query. :rtype: dict of (str, str) """ return self._get_attributes('{}/{}'.format(self.session_url, obj.ref))
Get all object's attributes. Sends multi-parameter info/config queries and returns the result as dictionary. :param obj: requested object. :returns: dictionary of <name, value> of all attributes returned by the query. :rtype: dict of (str, str)
entailment
def set_attributes(self, obj, **attributes): """ Set attributes. :param obj: requested object. :param attributes: dictionary of {attribute: value} to set """ attributes_url = '{}/{}/attributes'.format(self.session_url, obj.ref) attributes_list = [{u'name': str(name), u'value': str(value)} for name, value in attributes.items()] self._request(RestMethod.patch, attributes_url, headers={'Content-Type': 'application/json'}, data=json.dumps(attributes_list))
Set attributes. :param obj: requested object. :param attributes: dictionary of {attribute: value} to set
entailment
def get_stats(self, obj, stat_name): """ Send CLI command that returns list of integer counters. :param obj: requested object. :param stat_name: statistics command name. :return: list of counters. :rtype: list(int) """ return [int(v) for v in self.send_command_return(obj, stat_name, '?').split()]
Send CLI command that returns list of integer counters. :param obj: requested object. :param stat_name: statistics command name. :return: list of counters. :rtype: list(int)
entailment
def init_common_services(self, with_cloud_account=True, zone_name=None): """ Initialize common service, When 'zone_name' is defined " at $zone_name" is added to service names :param bool with_cloud_account: :param str zone_name: :return: OR tuple(Workflow, Vault), OR tuple(Workflow, Vault, CloudAccount) with services """ zone_names = ZoneConstants(zone_name) type_to_app = lambda t: self.organization.applications[system_application_types.get(t, t)] wf_service = self.organization.service(name=zone_names.DEFAULT_WORKFLOW_SERVICE, application=type_to_app(WORKFLOW_SERVICE_TYPE), environment=self) key_service = self.organization.service(name=zone_names.DEFAULT_CREDENTIAL_SERVICE, application=type_to_app(COBALT_SECURE_STORE_TYPE), environment=self) assert wf_service.running() assert key_service.running() if not with_cloud_account: with self as env: env.add_service(wf_service, force=True) env.add_service(key_service, force=True) return wf_service, key_service cloud_account_service = self.organization.instance(name=zone_names.DEFAULT_CLOUD_ACCOUNT_SERVICE, application=type_to_app(CLOUD_ACCOUNT_TYPE), environment=self, parameters=PROVIDER_CONFIG, destroyInterval=0) # Imidiate adding to env cause CA not to drop destroy interval. Known issue 6132. So, add service as instance with # destroyInterval set to 'never' assert cloud_account_service.running() with self as env: env.add_service(wf_service, force=True) env.add_service(key_service, force=True) env.add_service(cloud_account_service, force=True) return wf_service, key_service, cloud_account_service
Initialize common service, When 'zone_name' is defined " at $zone_name" is added to service names :param bool with_cloud_account: :param str zone_name: :return: OR tuple(Workflow, Vault), OR tuple(Workflow, Vault, CloudAccount) with services
entailment
def clone(self, name=None): """ :param name: new env name :rtype: Environment """ resp = self._router.post_env_clone(env_id=self.environmentId, json=dict(name=name) if name else {}).json() return Environment(self.organization, id=resp['id']).init_router(self._router)
:param name: new env name :rtype: Environment
entailment
def default(self): """ Returns environment marked as default. When Zone is set marked default makes no sense, special env with proper Zone is returned. """ if ZONE_NAME: log.info("Getting or creating default environment for zone with name '{0}'".format(DEFAULT_ENV_NAME())) zone_id = self.organization.zones[ZONE_NAME].id return self.organization.get_or_create_environment(name=DEFAULT_ENV_NAME(), zone=zone_id) def_envs = [env_j["id"] for env_j in self.json() if env_j["isDefault"]] if len(def_envs) > 1: log.warning('Found more than one default environment. Picking last.') return self[def_envs[-1]] elif len(def_envs) == 1: return self[def_envs[0]] raise exceptions.NotFoundError('Unable to get default environment')
Returns environment marked as default. When Zone is set marked default makes no sense, special env with proper Zone is returned.
entailment
def build_urls(self: NodeVisitor, node: inheritance_diagram) -> Mapping[str, str]: """ Builds a mapping of class paths to URLs. """ current_filename = self.builder.current_docname + self.builder.out_suffix urls = {} for child in node: # Another document if child.get("refuri") is not None: uri = child.get("refuri") package_path = child["reftitle"] if uri.startswith("http"): _, _, package_path = uri.partition("#") else: uri = ( pathlib.Path("..") / pathlib.Path(current_filename).parent / pathlib.Path(uri) ) uri = str(uri).replace(os.path.sep, "/") urls[package_path] = uri # Same document elif child.get("refid") is not None: urls[child["reftitle"]] = ( "../" + current_filename + "#" + child.get("refid") ) return urls
Builds a mapping of class paths to URLs.
entailment
def html_visit_inheritance_diagram( self: NodeVisitor, node: inheritance_diagram ) -> None: """ Builds HTML output from an :py:class:`~uqbar.sphinx.inheritance.inheritance_diagram` node. """ inheritance_graph = node["graph"] urls = build_urls(self, node) graphviz_graph = inheritance_graph.build_graph(urls) dot_code = format(graphviz_graph, "graphviz") # TODO: We can perform unflattening here aspect_ratio = inheritance_graph.aspect_ratio if aspect_ratio: aspect_ratio = math.ceil(math.sqrt(aspect_ratio[1] / aspect_ratio[0])) if aspect_ratio > 1: process = subprocess.Popen( ["unflatten", "-l", str(aspect_ratio), "-c", str(aspect_ratio), "-f"], stdout=subprocess.PIPE, stdin=subprocess.PIPE, stderr=subprocess.PIPE, ) stdout, stderr = process.communicate(dot_code.encode()) dot_code = stdout.decode() render_dot_html(self, node, dot_code, {}, "inheritance", "inheritance") raise SkipNode
Builds HTML output from an :py:class:`~uqbar.sphinx.inheritance.inheritance_diagram` node.
entailment
def latex_visit_inheritance_diagram( self: NodeVisitor, node: inheritance_diagram ) -> None: """ Builds LaTeX output from an :py:class:`~uqbar.sphinx.inheritance.inheritance_diagram` node. """ inheritance_graph = node["graph"] graphviz_graph = inheritance_graph.build_graph() graphviz_graph.attributes["size"] = 6.0 dot_code = format(graphviz_graph, "graphviz") render_dot_latex(self, node, dot_code, {}, "inheritance") raise SkipNode
Builds LaTeX output from an :py:class:`~uqbar.sphinx.inheritance.inheritance_diagram` node.
entailment
def setup(app) -> Dict[str, Any]: """ Sets up Sphinx extension. """ app.setup_extension("sphinx.ext.graphviz") app.add_node( inheritance_diagram, html=(html_visit_inheritance_diagram, None), latex=(latex_visit_inheritance_diagram, None), man=(skip, None), texinfo=(skip, None), text=(skip, None), ) app.add_directive("inheritance-diagram", InheritanceDiagram) return { "version": uqbar.__version__, "parallel_read_safe": True, "parallel_write_safe": True, }
Sets up Sphinx extension.
entailment
def run(configuration: str, level: str, target: str, short_format: bool): """Run the daemon and all its services""" initialise_logging(level=level, target=target, short_format=short_format) logger = logging.getLogger(__package__) logger.info('COBalD %s', cobald.__about__.__version__) logger.info(cobald.__about__.__url__) logger.info('%s %s (%s)', platform.python_implementation(), platform.python_version(), sys.executable) logger.debug(cobald.__file__) logger.info('Using configuration %s', configuration) with load(configuration): logger.info('Starting daemon services...') runtime.accept()
Run the daemon and all its services
entailment
def cli_run(): """Run the daemon from a command line interface""" options = CLI.parse_args() run(options.CONFIGURATION, options.log_level, options.log_target, options.log_journal)
Run the daemon from a command line interface
entailment
def build_body(cls: Type[AN], body: List[ast.stmt]) -> List: """ Note: Return type is probably ``-> List[AN]``, but can't get it to pass. """ act_nodes = [] # type: List[ActNode] for child_node in body: act_nodes += ActNode.build(child_node) return act_nodes
Note: Return type is probably ``-> List[AN]``, but can't get it to pass.
entailment
def build(cls: Type[AN], node: ast.stmt) -> List[AN]: """ Starting at this ``node``, check if it's an act node. If it's a context manager, recurse into child nodes. Returns: List of all act nodes found. """ if node_is_result_assignment(node): return [cls(node, ActNodeType.result_assignment)] if node_is_pytest_raises(node): return [cls(node, ActNodeType.pytest_raises)] if node_is_unittest_raises(node): return [cls(node, ActNodeType.unittest_raises)] token = node.first_token # type: ignore # Check if line marked with '# act' if token.line.strip().endswith('# act'): return [cls(node, ActNodeType.marked_act)] # Recurse (downwards) if it's a context manager if isinstance(node, ast.With): return cls.build_body(node.body) return []
Starting at this ``node``, check if it's an act node. If it's a context manager, recurse into child nodes. Returns: List of all act nodes found.
entailment
def ready(self): """ Checks if organization properly created. Note: New organization must have 'default' environment and two default services running there. Cannot use DEFAULT_ENV_NAME, because zone could be added there. :rtype: bool """ @retry(tries=3, retry_exception=exceptions.NotFoundError) # org init, takes some times def check_init(): env = self.environments['default'] return env.services['Default workflow service'].running(timeout=1) and \ env.services['Default credentials service'].running(timeout=1) return check_init()
Checks if organization properly created. Note: New organization must have 'default' environment and two default services running there. Cannot use DEFAULT_ENV_NAME, because zone could be added there. :rtype: bool
entailment
def create_application(self, name=None, manifest=None): """ Creates application and returns Application object. """ if not manifest: raise exceptions.NotEnoughParams('Manifest not set') if not name: name = 'auto-generated-name' from qubell.api.private.application import Application return Application.new(self, name, manifest, self._router)
Creates application and returns Application object.
entailment
def get_application(self, id=None, name=None): """ Get application object by name or id. """ log.info("Picking application: %s (%s)" % (name, id)) return self.applications[id or name]
Get application object by name or id.
entailment
def get_or_create_application(self, id=None, manifest=None, name=None): """ Get application by id or name. If not found: create with given or generated parameters """ if id: return self.get_application(id=id) elif name: try: app = self.get_application(name=name) except exceptions.NotFoundError: app = self.create_application(name=name, manifest=manifest) return app raise exceptions.NotEnoughParams('Not enough parameters')
Get application by id or name. If not found: create with given or generated parameters
entailment
def application(self, id=None, manifest=None, name=None): """ Smart method. Creates, picks or modifies application. If application found by name or id and manifest not changed: return app. If app found by id, but other parameters differs: change them. If no application found, create. """ modify = False found = False # Try to find application by name or id if name and id: found = self.get_application(id=id) if not found.name == name: modify = True elif id: found = self.get_application(id=id) name = found.name elif name: try: found = self.get_application(name=name) except exceptions.NotFoundError: pass # If found - compare parameters if found: if manifest and not manifest == found.manifest: modify = True # We need to update application if found and modify: found.update(name=name, manifest=manifest) if not found: created = self.create_application(name=name, manifest=manifest) return found or created
Smart method. Creates, picks or modifies application. If application found by name or id and manifest not changed: return app. If app found by id, but other parameters differs: change them. If no application found, create.
entailment
def create_instance(self, application, revision=None, environment=None, name=None, parameters=None, submodules=None, destroyInterval=None, manifestVersion=None): """ Launches instance in application and returns Instance object. """ from qubell.api.private.instance import Instance return Instance.new(self._router, application, revision, environment, name, parameters, submodules, destroyInterval, manifestVersion=manifestVersion)
Launches instance in application and returns Instance object.
entailment
def get_instance(self, id=None, name=None): """ Get instance object by name or id. If application set, search within the application. """ log.info("Picking instance: %s (%s)" % (name, id)) if id: # submodule instances are invisible for lists return Instance(id=id, organization=self).init_router(self._router) return Instance.get(self._router, self, name)
Get instance object by name or id. If application set, search within the application.
entailment
def list_instances_json(self, application=None, show_only_destroyed=False): """ Get list of instances in json format converted to list""" # todo: application should not be parameter here. Application should do its own list, just in sake of code reuse q_filter = {'sortBy': 'byCreation', 'descending': 'true', 'mode': 'short', 'from': '0', 'to': '10000'} if not show_only_destroyed: q_filter['showDestroyed'] = 'false' else: q_filter['showDestroyed'] = 'true' q_filter['showRunning'] = 'false' q_filter['showError'] = 'false' q_filter['showLaunching'] = 'false' if application: q_filter["applicationFilterId"] = application.applicationId resp_json = self._router.get_instances(org_id=self.organizationId, params=q_filter).json() if type(resp_json) == dict: instances = [instance for g in resp_json['groups'] for instance in g['records']] else: # TODO: This is compatibility fix for platform < 37.1 instances = resp_json return instances
Get list of instances in json format converted to list
entailment
def get_or_create_instance(self, id=None, application=None, revision=None, environment=None, name=None, parameters=None, submodules=None, destroyInterval=None): """ Get instance by id or name. If not found: create with given parameters """ try: instance = self.get_instance(id=id, name=name) if name and name != instance.name: instance.rename(name) instance.ready() return instance except exceptions.NotFoundError: return self.create_instance(application, revision, environment, name, parameters, submodules, destroyInterval)
Get instance by id or name. If not found: create with given parameters
entailment
def instance(self, id=None, application=None, name=None, revision=None, environment=None, parameters=None, submodules=None, destroyInterval=None): """ Smart method. It does everything, to return Instance with given parameters within the application. If instance found running and given parameters are actual: return it. If instance found, but parameters differs - reconfigure instance with new parameters. If instance not found: launch instance with given parameters. Return: Instance object. """ instance = self.get_or_create_instance(id, application, revision, environment, name, parameters, submodules, destroyInterval) reconfigure = False # if found: # if revision and revision is not found.revision: # reconfigure = True # if parameters and parameters is not found.parameters: # reconfigure = True # We need to reconfigure instance if reconfigure: instance.reconfigure(revision=revision, parameters=parameters) return instance
Smart method. It does everything, to return Instance with given parameters within the application. If instance found running and given parameters are actual: return it. If instance found, but parameters differs - reconfigure instance with new parameters. If instance not found: launch instance with given parameters. Return: Instance object.
entailment
def create_environment(self, name, default=False, zone=None): """ Creates environment and returns Environment object. """ from qubell.api.private.environment import Environment return Environment.new(organization=self, name=name, zone_id=zone, default=default, router=self._router)
Creates environment and returns Environment object.
entailment
def get_environment(self, id=None, name=None): """ Get environment object by name or id. """ log.info("Picking environment: %s (%s)" % (name, id)) return self.environments[id or name]
Get environment object by name or id.
entailment
def get_or_create_environment(self, id=None, name=None, zone=None, default=False): """ Get environment by id or name. If not found: create with given or generated parameters """ if id: return self.get_environment(id=id) elif name: try: env = self.get_environment(name=name) self._assert_env_and_zone(env, zone) except exceptions.NotFoundError: env = self.create_environment(name=name, zone=zone, default=default) return env else: name = 'auto-generated-env' return self.create_environment(name=name, zone=zone, default=default)
Get environment by id or name. If not found: create with given or generated parameters
entailment
def environment(self, id=None, name=None, zone=None, default=False): """ Smart method. Creates, picks or modifies environment. If environment found by name or id parameters not changed: return env. If env found by id, but other parameters differs: change them. If no environment found, create with given parameters. """ found = False # Try to find environment by name or id if name and id: found = self.get_environment(id=id) elif id: found = self.get_environment(id=id) name = found.name elif name: try: found = self.get_environment(name=name) except exceptions.NotFoundError: pass # If found - compare parameters if found: self._assert_env_and_zone(found, zone) if default and not found.isDefault: found.set_as_default() # TODO: add abilities to change name. if not found: created = self.create_environment(name=name, zone=zone, default=default) return found or created
Smart method. Creates, picks or modifies environment. If environment found by name or id parameters not changed: return env. If env found by id, but other parameters differs: change them. If no environment found, create with given parameters.
entailment
def get_zone(self, id=None, name=None): """ Get zone object by name or id. """ log.info("Picking zone: %s (%s)" % (name, id)) return self.zones[id or name]
Get zone object by name or id.
entailment
def create_role(self, name=None, permissions=""): """ Creates role """ name = name or "autocreated-role" from qubell.api.private.role import Role return Role.new(self._router, organization=self, name=name, permissions=permissions)
Creates role
entailment
def get_role(self, id=None, name=None): """ Get role object by name or id. """ log.info("Picking role: %s (%s)" % (name, id)) return self.roles[id or name]
Get role object by name or id.
entailment
def get_user(self, id=None, name=None, email=None): """ Get user object by email or id. """ log.info("Picking user: %s (%s) (%s)" % (name, email, id)) from qubell.api.private.user import User if email: user = User.get(self._router, organization=self, email=email) else: user = self.users[id or name] return user
Get user object by email or id.
entailment
def invite(self, email, roles=None): """ Send invitation to email with a list of roles :param email: :param roles: None or "ALL" or list of role_names :return: """ if roles is None: role_ids = [self.roles['Guest'].roleId] elif roles == "ALL": role_ids = list([i.id for i in self.roles]) else: if "Guest" not in roles: roles.append('Guest') role_ids = list([i.id for i in self.roles if i.name in roles]) self._router.invite_user(data=json.dumps({ "organizationId": self.organizationId, "email": email, "roles": role_ids}))
Send invitation to email with a list of roles :param email: :param roles: None or "ALL" or list of role_names :return:
entailment
def init(self, access_key=None, secret_key=None): """ Mimics wizard's environment preparation """ if not access_key and not secret_key: self._router.post_init(org_id=self.organizationId, data='{"initCloudAccount": true}') else: self._router.post_init(org_id=self.organizationId, data='{}') ca_data = dict(accessKey=access_key, secretKey=secret_key) self._router.post_init_custom_cloud_account(org_id=self.organizationId, data=json.dumps(ca_data))
Mimics wizard's environment preparation
entailment
def set_applications_from_meta(self, metadata, exclude=None): """ Parses meta and update or create each application :param str metadata: path or url to meta.yml :param list[str] exclude: List of application names, to exclude from meta. This might be need when you use meta as list of dependencies """ if not exclude: exclude = [] if metadata.startswith('http'): meta = yaml.safe_load(requests.get(url=metadata).content) else: # noinspection PyArgumentEqualDefault meta = yaml.safe_load(open(metadata, 'r').read()) applications = [] for app in meta['kit']['applications']: if app['name'] not in exclude: applications.append({ 'name': app['name'], 'url': app['manifest']}) self.restore({'applications': applications})
Parses meta and update or create each application :param str metadata: path or url to meta.yml :param list[str] exclude: List of application names, to exclude from meta. This might be need when you use meta as list of dependencies
entailment
def upload_applications(self, metadata, category=None): """ Mimics get starter-kit and wizard functionality to create components Note: may create component duplicates, not idempotent :type metadata: str :type category: Category :param metadata: url to meta.yml :param category: category """ upload_json = self._router.get_upload(params=dict(metadataUrl=metadata)).json() manifests = [dict(name=app['name'], manifest=app['url']) for app in upload_json['applications']] if not category: category = self.categories['Application'] data = {'categoryId': category.id, 'applications': manifests} self._router.post_application_kits(org_id=self.organizationId, data=json.dumps(data))
Mimics get starter-kit and wizard functionality to create components Note: may create component duplicates, not idempotent :type metadata: str :type category: Category :param metadata: url to meta.yml :param category: category
entailment
def process_response(self, request, response): """Commits and leaves transaction management.""" if tldap.transaction.is_managed(): tldap.transaction.commit() tldap.transaction.leave_transaction_management() return response
Commits and leaves transaction management.
entailment
def line_protocol(name, tags: dict = None, fields: dict = None, timestamp: float = None) -> str: """ Format a report as per InfluxDB line protocol :param name: name of the report :param tags: tags identifying the specific report :param fields: measurements of the report :param timestamp: when the measurement was taken, in **seconds** since the epoch """ output_str = name if tags: output_str += ',' output_str += ','.join('%s=%s' % (key, value) for key, value in sorted(tags.items())) output_str += ' ' output_str += ','.join(('%s=%r' % (key, value)).replace("'", '"') for key, value in sorted(fields.items())) if timestamp is not None: # line protocol requires nanosecond precision, python uses seconds output_str += ' %d' % (timestamp * 1E9) return output_str + '\n'
Format a report as per InfluxDB line protocol :param name: name of the report :param tags: tags identifying the specific report :param fields: measurements of the report :param timestamp: when the measurement was taken, in **seconds** since the epoch
entailment
def block_type(self): """ This gets display on the block header. """ return capfirst(force_text( self.content_block.content_type.model_class()._meta.verbose_name ))
This gets display on the block header.
entailment
def get_default_blocks(self, top=False): """ Return a list of column default block tuples (URL, verbose name). Used for quick add block buttons. """ default_blocks = [] for block_model, block_name in self.glitter_page.default_blocks: block = apps.get_model(block_model) base_url = reverse('block_admin:{}_{}_add'.format( block._meta.app_label, block._meta.model_name, ), kwargs={ 'version_id': self.glitter_page.version.id, }) block_qs = { 'column': self.name, 'top': top, } block_url = '{}?{}'.format(base_url, urlencode(block_qs)) block_text = capfirst(force_text(block._meta.verbose_name)) default_blocks.append((block_url, block_text)) return default_blocks
Return a list of column default block tuples (URL, verbose name). Used for quick add block buttons.
entailment
def add_block_widget(self, top=False): """ Return a select widget for blocks which can be added to this column. """ widget = AddBlockSelect(attrs={ 'class': 'glitter-add-block-select', }, choices=self.add_block_options(top=top)) return widget.render(name='', value=None)
Return a select widget for blocks which can be added to this column.
entailment
def add_block_options(self, top): """ Return a list of URLs and titles for blocks which can be added to this column. All available blocks are grouped by block category. """ from .blockadmin import blocks block_choices = [] # Group all block by category for category in sorted(blocks.site.block_list): category_blocks = blocks.site.block_list[category] category_choices = [] for block in category_blocks: base_url = reverse('block_admin:{}_{}_add'.format( block._meta.app_label, block._meta.model_name, ), kwargs={ 'version_id': self.glitter_page.version.id, }) block_qs = { 'column': self.name, 'top': top, } block_url = '{}?{}'.format(base_url, urlencode(block_qs)) block_text = capfirst(force_text(block._meta.verbose_name)) category_choices.append((block_url, block_text)) category_choices = sorted(category_choices, key=lambda x: x[1]) block_choices.append((category, category_choices)) return block_choices
Return a list of URLs and titles for blocks which can be added to this column. All available blocks are grouped by block category.
entailment
def default_blocks(self): """ Return a list of default block tuples (appname.ModelName, verbose name). Next to the dropdown list of block types, a small number of common blocks which are frequently used can be added immediately to a column with one click. This method defines the list of default blocks. """ # Use the block list provided by settings if it's defined block_list = getattr(settings, 'GLITTER_DEFAULT_BLOCKS', None) if block_list is not None: return block_list # Try and auto fill in default blocks if the apps are installed block_list = [] for block in GLITTER_FALLBACK_BLOCKS: app_name, model_name = block.split('.') try: model_class = apps.get_model(app_name, model_name) verbose_name = capfirst(model_class._meta.verbose_name) block_list.append((block, verbose_name)) except LookupError: # Block isn't installed - don't add it as a quick add default pass return block_list
Return a list of default block tuples (appname.ModelName, verbose name). Next to the dropdown list of block types, a small number of common blocks which are frequently used can be added immediately to a column with one click. This method defines the list of default blocks.
entailment
def has_add_permission(self): """ Returns a boolean if the current user has permission to add another object of the same type which is being viewed/edited. """ has_permission = False if self.user is not None: # We don't check for the object level permission - as the add permission doesn't make # sense on a per object level here. has_permission = self.user.has_perm( '{}.add_{}'.format(self.opts.app_label, self.opts.model_name) ) return has_permission
Returns a boolean if the current user has permission to add another object of the same type which is being viewed/edited.
entailment
def has_change_permission(self): """ Returns a boolean if the current user has permission to change the current object being viewed/edited. """ has_permission = False if self.user is not None: # We check for the object level permission here, even though by default the Django # admin doesn't. If the Django ModelAdmin is extended to allow object level # permissions - then this will work as expected. permission_name = '{}.change_{}'.format(self.opts.app_label, self.opts.model_name) has_permission = ( self.user.has_perm(permission_name) or self.user.has_perm(permission_name, obj=self.obj) ) return has_permission
Returns a boolean if the current user has permission to change the current object being viewed/edited.
entailment
def get_embed_url(self): """ Get correct embed url for Youtube or Vimeo. """ embed_url = None youtube_embed_url = 'https://www.youtube.com/embed/{}' vimeo_embed_url = 'https://player.vimeo.com/video/{}' # Get video ID from url. if re.match(YOUTUBE_URL_RE, self.url): embed_url = youtube_embed_url.format(re.match(YOUTUBE_URL_RE, self.url).group(2)) if re.match(VIMEO_URL_RE, self.url): embed_url = vimeo_embed_url.format(re.match(VIMEO_URL_RE, self.url).group(3)) return embed_url
Get correct embed url for Youtube or Vimeo.
entailment
def save(self, force_insert=False, force_update=False, using=None, update_fields=None): """ Set html field with correct iframe. """ if self.url: iframe_html = '<iframe src="{}" frameborder="0" title="{}" allowfullscreen></iframe>' self.html = iframe_html.format( self.get_embed_url(), self.title ) return super().save(force_insert, force_update, using, update_fields)
Set html field with correct iframe.
entailment
def _get_ip(): """Get IP address for the docker host """ cmd_netstat = ['netstat', '-nr'] p1 = subprocess.Popen(cmd_netstat, stdout=subprocess.PIPE) cmd_grep = ['grep', '^0\.0\.0\.0'] p2 = subprocess.Popen(cmd_grep, stdin=p1.stdout, stdout=subprocess.PIPE) cmd_awk = ['awk', '{ print $2 }'] p3 = subprocess.Popen(cmd_awk, stdin=p2.stdout, stdout=subprocess.PIPE) galaxy_ip = p3.stdout.read() log.debug('Host IP determined to be %s', galaxy_ip) return galaxy_ip
Get IP address for the docker host
entailment
def get_galaxy_connection(history_id=None, obj=True): """ Given access to the configuration dict that galaxy passed us, we try and connect to galaxy's API. First we try connecting to galaxy directly, using an IP address given us by docker (since the galaxy host is the default gateway for docker). Using additional information collected by galaxy like the port it is running on and the application path, we build a galaxy URL and test our connection by attempting to get a history listing. This is done to avoid any nasty network configuration that a SysAdmin has placed between galaxy and us inside docker, like disabling API queries. If that fails, we failover to using the URL the user is accessing through. This will succeed where the previous connection fails under the conditions of REMOTE_USER and galaxy running under uWSGI. """ history_id = history_id or os.environ['HISTORY_ID'] key = os.environ['API_KEY'] ### Customised/Raw galaxy_url ### galaxy_ip = _get_ip() # Substitute $DOCKER_HOST with real IP url = Template(os.environ['GALAXY_URL']).safe_substitute({'DOCKER_HOST': galaxy_ip}) gi = _test_url(url, key, history_id, obj=obj) if gi is not None: return gi ### Failover, fully auto-detected URL ### # Remove trailing slashes app_path = os.environ['GALAXY_URL'].rstrip('/') # Remove protocol+host:port if included app_path = ''.join(app_path.split('/')[3:]) if 'GALAXY_WEB_PORT' not in os.environ: # We've failed to detect a port in the config we were given by # galaxy, so we won't be able to construct a valid URL raise Exception("No port") else: # We should be able to find a port to connect to galaxy on via this # conf var: galaxy_paster_port galaxy_port = os.environ['GALAXY_WEB_PORT'] built_galaxy_url = 'http://%s:%s/%s' % (galaxy_ip.strip(), galaxy_port, app_path.strip()) url = built_galaxy_url.rstrip('/') gi = _test_url(url, key, history_id, obj=obj) if gi is not None: return gi ### Fail ### msg = "Could not connect to a galaxy instance. Please contact your SysAdmin for help with this error" raise Exception(msg)
Given access to the configuration dict that galaxy passed us, we try and connect to galaxy's API. First we try connecting to galaxy directly, using an IP address given us by docker (since the galaxy host is the default gateway for docker). Using additional information collected by galaxy like the port it is running on and the application path, we build a galaxy URL and test our connection by attempting to get a history listing. This is done to avoid any nasty network configuration that a SysAdmin has placed between galaxy and us inside docker, like disabling API queries. If that fails, we failover to using the URL the user is accessing through. This will succeed where the previous connection fails under the conditions of REMOTE_USER and galaxy running under uWSGI.
entailment
def put(filenames, file_type='auto', history_id=None): """ Given filename[s] of any file accessible to the docker instance, this function will upload that file[s] to galaxy using the current history. Does not return anything. """ history_id = history_id or os.environ['HISTORY_ID'] gi = get_galaxy_connection(history_id=history_id) for filename in filenames: log.debug('Uploading gx=%s history=%s localpath=%s ft=%s', gi, history_id, filename, file_type) history = gi.histories.get(history_id) history.upload_dataset(filename, file_type=file_type)
Given filename[s] of any file accessible to the docker instance, this function will upload that file[s] to galaxy using the current history. Does not return anything.
entailment
def get(datasets_identifiers, identifier_type='hid', history_id=None): """ Given the history_id that is displayed to the user, this function will download the file[s] from the history and stores them under /import/ Return value[s] are the path[s] to the dataset[s] stored under /import/ """ history_id = history_id or os.environ['HISTORY_ID'] # The object version of bioblend is to slow in retrieving all datasets from a history # fallback to the non-object path gi = get_galaxy_connection(history_id=history_id, obj=False) for dataset_identifier in datasets_identifiers: file_path = '/import/%s' % dataset_identifier log.debug('Downloading gx=%s history=%s dataset=%s', gi, history_id, dataset_identifier) # Cache the file requests. E.g. in the example of someone doing something # silly like a get() for a Galaxy file in a for-loop, wouldn't want to # re-download every time and add that overhead. if not os.path.exists(file_path): hc = HistoryClient(gi) dc = DatasetClient(gi) history = hc.show_history(history_id, contents=True) datasets = {ds[identifier_type]: ds['id'] for ds in history} if identifier_type == 'hid': dataset_identifier = int(dataset_identifier) dc.download_dataset(datasets[dataset_identifier], file_path=file_path, use_default_filename=False) else: log.debug('Cached, not re-downloading') return file_path
Given the history_id that is displayed to the user, this function will download the file[s] from the history and stores them under /import/ Return value[s] are the path[s] to the dataset[s] stored under /import/
entailment
def get_user_history (history_id=None): """ Get all visible dataset infos of user history. Return a list of dict of each dataset. """ history_id = history_id or os.environ['HISTORY_ID'] gi = get_galaxy_connection(history_id=history_id, obj=False) hc = HistoryClient(gi) history = hc.show_history(history_id, visible=True, contents=True) return history
Get all visible dataset infos of user history. Return a list of dict of each dataset.
entailment
def build_act(cls: Type[_Block], node: ast.stmt, test_func_node: ast.FunctionDef) -> _Block: """ Act block is a single node - either the act node itself, or the node that wraps the act node. """ add_node_parents(test_func_node) # Walk up the parent nodes of the parent node to find test's definition. act_block_node = node while act_block_node.parent != test_func_node: # type: ignore act_block_node = act_block_node.parent # type: ignore return cls([act_block_node], LineType.act)
Act block is a single node - either the act node itself, or the node that wraps the act node.
entailment
def build_arrange(cls: Type[_Block], nodes: List[ast.stmt], max_line_number: int) -> _Block: """ Arrange block is all non-pass and non-docstring nodes before the Act block start. """ return cls(filter_arrange_nodes(nodes, max_line_number), LineType.arrange)
Arrange block is all non-pass and non-docstring nodes before the Act block start.
entailment
def build_assert(cls: Type[_Block], nodes: List[ast.stmt], min_line_number: int) -> _Block: """ Assert block is all nodes that are after the Act node. Note: The filtering is *still* running off the line number of the Act node, when instead it should be using the last line of the Act block. """ return cls(filter_assert_nodes(nodes, min_line_number), LineType._assert)
Assert block is all nodes that are after the Act node. Note: The filtering is *still* running off the line number of the Act node, when instead it should be using the last line of the Act block.
entailment
def get_span(self, first_line_no: int) -> Tuple[int, int]: """ Raises: EmptyBlock: when block has no nodes """ if not self.nodes: raise EmptyBlock('span requested from {} block with no nodes'.format(self.line_type)) return ( get_first_token(self.nodes[0]).start[0] - first_line_no, get_last_token(self.nodes[-1]).start[0] - first_line_no, )
Raises: EmptyBlock: when block has no nodes
entailment
def cli_aliases(self): r"""Developer script aliases. """ scripting_groups = [] aliases = {} for cli_class in self.cli_classes: instance = cli_class() if getattr(instance, "alias", None): scripting_group = getattr(instance, "scripting_group", None) if scripting_group: scripting_groups.append(scripting_group) entry = (scripting_group, instance.alias) if (scripting_group,) in aliases: message = "alias conflict between scripting group" message += " {!r} and {}" message = message.format( scripting_group, aliases[(scripting_group,)].__name__ ) raise Exception(message) if entry in aliases: message = "alias conflict between {} and {}" message = message.format( aliases[entry].__name__, cli_class.__name__ ) raise Exception(message) aliases[entry] = cli_class else: entry = (instance.alias,) if entry in scripting_groups: message = "alias conflict between {}" message += " and scripting group {!r}" message = message.format(cli_class.__name__, instance.alias) raise Exception(message) if entry in aliases: message = "alias conflict be {} and {}" message = message.format(cli_class.__name__, aliases[entry]) raise Exception(message) aliases[(instance.alias,)] = cli_class else: if instance.program_name in scripting_groups: message = "Alias conflict between {}" message += " and scripting group {!r}" message = message.format(cli_class.__name__, instance.program_name) raise Exception(message) aliases[(instance.program_name,)] = cli_class alias_map = {} for key, value in aliases.items(): if len(key) == 1: alias_map[key[0]] = value else: if key[0] not in alias_map: alias_map[key[0]] = {} alias_map[key[0]][key[1]] = value return alias_map
r"""Developer script aliases.
entailment
def cli_program_names(self): r"""Developer script program names. """ program_names = {} for cli_class in self.cli_classes: instance = cli_class() program_names[instance.program_name] = cli_class return program_names
r"""Developer script program names.
entailment
def get_flat_stats(self): """ :return: statistics as flat table {port/strea,/tpld name {group_stat name: value}} """ flat_stats = OrderedDict() for obj, port_stats in self.statistics.items(): flat_obj_stats = OrderedDict() for group_name, group_values in port_stats.items(): for stat_name, stat_value in group_values.items(): full_stat_name = group_name + '_' + stat_name flat_obj_stats[full_stat_name] = stat_value flat_stats[obj.name] = flat_obj_stats return flat_stats
:return: statistics as flat table {port/strea,/tpld name {group_stat name: value}}
entailment
def read_stats(self): """ Read current ports statistics from chassis. :return: dictionary {port name {group name, {stat name: stat value}}} """ self.statistics = TgnObjectsDict() for port in self.session.ports.values(): self.statistics[port] = port.read_port_stats() return self.statistics
Read current ports statistics from chassis. :return: dictionary {port name {group name, {stat name: stat value}}}
entailment