text
stringlengths
89
104k
code_tokens
sequence
avg_line_len
float64
7.91
980
score
float64
0
630
def metadata_add_endpoints(self): """ Metadata: add endpoint to the group """ metadata = load_mpe_service_metadata(self.args.metadata_file) group_name = metadata.get_group_name_nonetrick(self.args.group_name) for endpoint in self.args.endpoints: metadata.add_endpoint(group_name, endpoint) metadata.save_pretty(self.args.metadata_file)
[ "def", "metadata_add_endpoints", "(", "self", ")", ":", "metadata", "=", "load_mpe_service_metadata", "(", "self", ".", "args", ".", "metadata_file", ")", "group_name", "=", "metadata", ".", "get_group_name_nonetrick", "(", "self", ".", "args", ".", "group_name", ")", "for", "endpoint", "in", "self", ".", "args", ".", "endpoints", ":", "metadata", ".", "add_endpoint", "(", "group_name", ",", "endpoint", ")", "metadata", ".", "save_pretty", "(", "self", ".", "args", ".", "metadata_file", ")" ]
54.428571
14.857143
def prepare_request(node): """ Prepare request to node's API route :param Node node: the RAML node object """ if node.resource.method not in AVAILABLE_METHODS: raise UnsupportedHTTPMethodError(node.resource.method) def request(data=None, json=None, **kwargs): """ Make request to node's API route with the given keyword arguments """ # validate given query parameters for key, value in kwargs.items(): param = next((p for p in node.resource.query_params if p.name == key), None) if not param: raise UnsupportedQueryParameter(node.resource.path, key) if not match_type(value, param.type): raise TypeError( "Resource Query Parameter has type '{0}' but expected type '{1}'".format( value.__class__.__name__, param.type)) response = requests.request(node.resource.method, node.resource.absolute_uri, params=kwargs, data=data, json=json) return response return request
[ "def", "prepare_request", "(", "node", ")", ":", "if", "node", ".", "resource", ".", "method", "not", "in", "AVAILABLE_METHODS", ":", "raise", "UnsupportedHTTPMethodError", "(", "node", ".", "resource", ".", "method", ")", "def", "request", "(", "data", "=", "None", ",", "json", "=", "None", ",", "*", "*", "kwargs", ")", ":", "\"\"\"\n Make request to node's API route with the given keyword arguments\n \"\"\"", "# validate given query parameters", "for", "key", ",", "value", "in", "kwargs", ".", "items", "(", ")", ":", "param", "=", "next", "(", "(", "p", "for", "p", "in", "node", ".", "resource", ".", "query_params", "if", "p", ".", "name", "==", "key", ")", ",", "None", ")", "if", "not", "param", ":", "raise", "UnsupportedQueryParameter", "(", "node", ".", "resource", ".", "path", ",", "key", ")", "if", "not", "match_type", "(", "value", ",", "param", ".", "type", ")", ":", "raise", "TypeError", "(", "\"Resource Query Parameter has type '{0}' but expected type '{1}'\"", ".", "format", "(", "value", ".", "__class__", ".", "__name__", ",", "param", ".", "type", ")", ")", "response", "=", "requests", ".", "request", "(", "node", ".", "resource", ".", "method", ",", "node", ".", "resource", ".", "absolute_uri", ",", "params", "=", "kwargs", ",", "data", "=", "data", ",", "json", "=", "json", ")", "return", "response", "return", "request" ]
39
20.285714
def calc_filemap(atlas_properties, subject, atlas_version_tags, worklog, output_path=None, overwrite=False, output_format='mgz', create_directory=False): ''' calc_filemap is a calculator that converts the atlas properties nested-map into a single-depth map whose keys are filenames and whose values are the interpolated property data. Afferent parameters @ output_path The directory into which the atlas files should be written. If not provided or None then uses the subject's surf directory. If this directory doesn't exist, then it uses the subject's directory itself. @ overwrite Whether to overwrite existing atlas files. If True, then atlas files that already exist will be overwritten. If False, then no files are overwritten. @ create_directory Whether to create the output path if it doesn't exist. This is False by default. @ output_format The desired output format of the files to be written. May be one of the following: 'mgz', 'mgh', or either 'curv' or 'morph'. Efferent values: @ filemap A pimms lazy map whose keys are filenames and whose values are interpolated atlas properties. @ export_all_fn A function of no arguments that, when called, exports all of the files in the filemap to the output_path. ''' if output_path is None: output_path = os.path.join(subject.path, 'surf') if not os.path.isdir(output_path): output_path = subject.path output_format = 'mgz' if output_format is None else output_format.lower() if output_format.startswith('.'): output_format = output_format[1:] (fmt,ending) = (('mgh','.mgz') if output_format == 'mgz' else ('mgh','.mgh') if output_format == 'mgh' else ('freesurfer_morph','')) # make the filemap... worklog('Preparing Filemap...') fm = AutoDict() for (atl,atldat) in six.iteritems(atlas_properties): for (ver,verdat) in six.iteritems(atldat): vstr = atlas_version_tags[atl][ver] for (h,hdat) in six.iteritems(verdat): for m in six.iterkeys(hdat): flnm = '%s.%s_%s%s%s' % (h, atl, m, vstr, ending) flnm = os.path.join(output_path, flnm) fm[flnm] = curry(lambda hdat,m: hdat[m], hdat, m) # okay, make that a lazy map: filemap = pimms.lazy_map(fm) # the function for exporting all properties: def export_all(): ''' This function will export all files from its associated filemap and return a list of the filenames. ''' if not os.path.isdir(output_path): if not create_directory: raise ValueError('No such path and create_direcotry is False: %s' % output_path) os.makedirs(os.path.abspath(output_path), 0o755) filenames = [] worklog('Extracting Files...') wl = worklog.indent() for flnm in six.iterkeys(filemap): wl(flnm) filenames.append(nyio.save(flnm, filemap[flnm], fmt)) return filenames return {'filemap': filemap, 'export_all_fn': export_all}
[ "def", "calc_filemap", "(", "atlas_properties", ",", "subject", ",", "atlas_version_tags", ",", "worklog", ",", "output_path", "=", "None", ",", "overwrite", "=", "False", ",", "output_format", "=", "'mgz'", ",", "create_directory", "=", "False", ")", ":", "if", "output_path", "is", "None", ":", "output_path", "=", "os", ".", "path", ".", "join", "(", "subject", ".", "path", ",", "'surf'", ")", "if", "not", "os", ".", "path", ".", "isdir", "(", "output_path", ")", ":", "output_path", "=", "subject", ".", "path", "output_format", "=", "'mgz'", "if", "output_format", "is", "None", "else", "output_format", ".", "lower", "(", ")", "if", "output_format", ".", "startswith", "(", "'.'", ")", ":", "output_format", "=", "output_format", "[", "1", ":", "]", "(", "fmt", ",", "ending", ")", "=", "(", "(", "'mgh'", ",", "'.mgz'", ")", "if", "output_format", "==", "'mgz'", "else", "(", "'mgh'", ",", "'.mgh'", ")", "if", "output_format", "==", "'mgh'", "else", "(", "'freesurfer_morph'", ",", "''", ")", ")", "# make the filemap...", "worklog", "(", "'Preparing Filemap...'", ")", "fm", "=", "AutoDict", "(", ")", "for", "(", "atl", ",", "atldat", ")", "in", "six", ".", "iteritems", "(", "atlas_properties", ")", ":", "for", "(", "ver", ",", "verdat", ")", "in", "six", ".", "iteritems", "(", "atldat", ")", ":", "vstr", "=", "atlas_version_tags", "[", "atl", "]", "[", "ver", "]", "for", "(", "h", ",", "hdat", ")", "in", "six", ".", "iteritems", "(", "verdat", ")", ":", "for", "m", "in", "six", ".", "iterkeys", "(", "hdat", ")", ":", "flnm", "=", "'%s.%s_%s%s%s'", "%", "(", "h", ",", "atl", ",", "m", ",", "vstr", ",", "ending", ")", "flnm", "=", "os", ".", "path", ".", "join", "(", "output_path", ",", "flnm", ")", "fm", "[", "flnm", "]", "=", "curry", "(", "lambda", "hdat", ",", "m", ":", "hdat", "[", "m", "]", ",", "hdat", ",", "m", ")", "# okay, make that a lazy map:", "filemap", "=", "pimms", ".", "lazy_map", "(", "fm", ")", "# the function for exporting all properties:", "def", "export_all", "(", ")", ":", "'''\n This function will export all files from its associated filemap and return a list of the\n filenames.\n '''", "if", "not", "os", ".", "path", ".", "isdir", "(", "output_path", ")", ":", "if", "not", "create_directory", ":", "raise", "ValueError", "(", "'No such path and create_direcotry is False: %s'", "%", "output_path", ")", "os", ".", "makedirs", "(", "os", ".", "path", ".", "abspath", "(", "output_path", ")", ",", "0o755", ")", "filenames", "=", "[", "]", "worklog", "(", "'Extracting Files...'", ")", "wl", "=", "worklog", ".", "indent", "(", ")", "for", "flnm", "in", "six", ".", "iterkeys", "(", "filemap", ")", ":", "wl", "(", "flnm", ")", "filenames", ".", "append", "(", "nyio", ".", "save", "(", "flnm", ",", "filemap", "[", "flnm", "]", ",", "fmt", ")", ")", "return", "filenames", "return", "{", "'filemap'", ":", "filemap", ",", "'export_all_fn'", ":", "export_all", "}" ]
47.38806
24.850746
def get_initkwargs(cls, form_list, initial_dict=None, instance_dict=None, condition_dict=None, *args, **kwargs): """ Creates a dict with all needed parameters for the form wizard instances. * `form_list` - is a list of forms. The list entries can be single form classes or tuples of (`step_name`, `form_class`). If you pass a list of forms, the formwizard will convert the class list to (`zero_based_counter`, `form_class`). This is needed to access the form for a specific step. * `initial_dict` - contains a dictionary of initial data dictionaries. The key should be equal to the `step_name` in the `form_list` (or the str of the zero based counter - if no step_names added in the `form_list`) * `instance_dict` - contains a dictionary of instance objects. This list is only used when `ModelForm`s are used. The key should be equal to the `step_name` in the `form_list`. Same rules as for `initial_dict` apply. * `condition_dict` - contains a dictionary of boolean values or callables. If the value of for a specific `step_name` is callable it will be called with the formwizard instance as the only argument. If the return value is true, the step's form will be used. """ kwargs.update({ 'initial_dict': initial_dict or {}, 'instance_dict': instance_dict or {}, 'condition_dict': condition_dict or {}, }) init_form_list = SortedDict() assert len(form_list) > 0, 'at least one form is needed' # walk through the passed form list for i, form in enumerate(form_list): if isinstance(form, (list, tuple)): # if the element is a tuple, add the tuple to the new created # sorted dictionary. init_form_list[unicode(form[0])] = form[1] else: # if not, add the form with a zero based counter as unicode init_form_list[unicode(i)] = form # walk through the ne created list of forms for form in init_form_list.itervalues(): if issubclass(form, formsets.BaseFormSet): # if the element is based on BaseFormSet (FormSet/ModelFormSet) # we need to override the form variable. form = form.form # check if any form contains a FileField, if yes, we need a # file_storage added to the formwizard (by subclassing). for field in form.base_fields.itervalues(): if (isinstance(field, forms.FileField) and not hasattr(cls, 'file_storage')): raise NoFileStorageConfigured # build the kwargs for the formwizard instances kwargs['form_list'] = init_form_list return kwargs
[ "def", "get_initkwargs", "(", "cls", ",", "form_list", ",", "initial_dict", "=", "None", ",", "instance_dict", "=", "None", ",", "condition_dict", "=", "None", ",", "*", "args", ",", "*", "*", "kwargs", ")", ":", "kwargs", ".", "update", "(", "{", "'initial_dict'", ":", "initial_dict", "or", "{", "}", ",", "'instance_dict'", ":", "instance_dict", "or", "{", "}", ",", "'condition_dict'", ":", "condition_dict", "or", "{", "}", ",", "}", ")", "init_form_list", "=", "SortedDict", "(", ")", "assert", "len", "(", "form_list", ")", ">", "0", ",", "'at least one form is needed'", "# walk through the passed form list", "for", "i", ",", "form", "in", "enumerate", "(", "form_list", ")", ":", "if", "isinstance", "(", "form", ",", "(", "list", ",", "tuple", ")", ")", ":", "# if the element is a tuple, add the tuple to the new created", "# sorted dictionary.", "init_form_list", "[", "unicode", "(", "form", "[", "0", "]", ")", "]", "=", "form", "[", "1", "]", "else", ":", "# if not, add the form with a zero based counter as unicode", "init_form_list", "[", "unicode", "(", "i", ")", "]", "=", "form", "# walk through the ne created list of forms", "for", "form", "in", "init_form_list", ".", "itervalues", "(", ")", ":", "if", "issubclass", "(", "form", ",", "formsets", ".", "BaseFormSet", ")", ":", "# if the element is based on BaseFormSet (FormSet/ModelFormSet)", "# we need to override the form variable.", "form", "=", "form", ".", "form", "# check if any form contains a FileField, if yes, we need a", "# file_storage added to the formwizard (by subclassing).", "for", "field", "in", "form", ".", "base_fields", ".", "itervalues", "(", ")", ":", "if", "(", "isinstance", "(", "field", ",", "forms", ".", "FileField", ")", "and", "not", "hasattr", "(", "cls", ",", "'file_storage'", ")", ")", ":", "raise", "NoFileStorageConfigured", "# build the kwargs for the formwizard instances", "kwargs", "[", "'form_list'", "]", "=", "init_form_list", "return", "kwargs" ]
49.534483
22.637931
def echo_html_fenye_str(rec_num, fenye_num): ''' 生成分页的导航 ''' pagination_num = int(math.ceil(rec_num * 1.0 / 10)) if pagination_num == 1 or pagination_num == 0: fenye_str = '' elif pagination_num > 1: pager_mid, pager_pre, pager_next, pager_last, pager_home = '', '', '', '', '' fenye_str = '<ul class="pagination">' if fenye_num > 1: pager_home = '''<li class="{0}" name='fenye' onclick='change(this);' value='{1}'><a>First Page</a></li>'''.format('', 1) pager_pre = ''' <li class="{0}" name='fenye' onclick='change(this);' value='{1}'><a>Previous Page</a></li>'''.format('', fenye_num - 1) if fenye_num > 5: cur_num = fenye_num - 4 else: cur_num = 1 if pagination_num > 10 and cur_num < pagination_num - 10: show_num = cur_num + 10 else: show_num = pagination_num + 1 for num in range(cur_num, show_num): if num == fenye_num: checkstr = 'active' else: checkstr = '' tmp_str_df = '''<li class="{0}" name='fenye' onclick='change(this);' value='{1}'><a>{1}</a></li>'''.format(checkstr, num) pager_mid += tmp_str_df if fenye_num < pagination_num: pager_next = '''<li class="{0}" name='fenye' onclick='change(this);' value='{1}'><a>Next Page</a></li>'''.format('', fenye_num + 1) pager_last = '''<li class="{0}" name='fenye' onclick='change(this);' value='{1}'><a>End Page</a></li>'''.format('', pagination_num) fenye_str += pager_home + pager_pre + pager_mid + pager_next + pager_last fenye_str += '</ul>' else: return '' return fenye_str
[ "def", "echo_html_fenye_str", "(", "rec_num", ",", "fenye_num", ")", ":", "pagination_num", "=", "int", "(", "math", ".", "ceil", "(", "rec_num", "*", "1.0", "/", "10", ")", ")", "if", "pagination_num", "==", "1", "or", "pagination_num", "==", "0", ":", "fenye_str", "=", "''", "elif", "pagination_num", ">", "1", ":", "pager_mid", ",", "pager_pre", ",", "pager_next", ",", "pager_last", ",", "pager_home", "=", "''", ",", "''", ",", "''", ",", "''", ",", "''", "fenye_str", "=", "'<ul class=\"pagination\">'", "if", "fenye_num", ">", "1", ":", "pager_home", "=", "'''<li class=\"{0}\" name='fenye' onclick='change(this);'\n value='{1}'><a>First Page</a></li>'''", ".", "format", "(", "''", ",", "1", ")", "pager_pre", "=", "''' <li class=\"{0}\" name='fenye' onclick='change(this);'\n value='{1}'><a>Previous Page</a></li>'''", ".", "format", "(", "''", ",", "fenye_num", "-", "1", ")", "if", "fenye_num", ">", "5", ":", "cur_num", "=", "fenye_num", "-", "4", "else", ":", "cur_num", "=", "1", "if", "pagination_num", ">", "10", "and", "cur_num", "<", "pagination_num", "-", "10", ":", "show_num", "=", "cur_num", "+", "10", "else", ":", "show_num", "=", "pagination_num", "+", "1", "for", "num", "in", "range", "(", "cur_num", ",", "show_num", ")", ":", "if", "num", "==", "fenye_num", ":", "checkstr", "=", "'active'", "else", ":", "checkstr", "=", "''", "tmp_str_df", "=", "'''<li class=\"{0}\" name='fenye' onclick='change(this);'\n value='{1}'><a>{1}</a></li>'''", ".", "format", "(", "checkstr", ",", "num", ")", "pager_mid", "+=", "tmp_str_df", "if", "fenye_num", "<", "pagination_num", ":", "pager_next", "=", "'''<li class=\"{0}\" name='fenye' onclick='change(this);'\n value='{1}'><a>Next Page</a></li>'''", ".", "format", "(", "''", ",", "fenye_num", "+", "1", ")", "pager_last", "=", "'''<li class=\"{0}\" name='fenye' onclick='change(this);'\n value='{1}'><a>End Page</a></li>'''", ".", "format", "(", "''", ",", "pagination_num", ")", "fenye_str", "+=", "pager_home", "+", "pager_pre", "+", "pager_mid", "+", "pager_next", "+", "pager_last", "fenye_str", "+=", "'</ul>'", "else", ":", "return", "''", "return", "fenye_str" ]
32.907407
26.092593
def _ConvertMapFieldValue(self, value, message, field): """Convert map field value for a message map field. Args: value: A JSON object to convert the map field value. message: A protocol message to record the converted data. field: The descriptor of the map field to be converted. Raises: ParseError: In case of convert problems. """ if not isinstance(value, dict): raise ParseError( 'Map field {0} must be in a dict which is {1}.'.format( field.name, value)) key_field = field.message_type.fields_by_name['key'] value_field = field.message_type.fields_by_name['value'] for key in value: key_value = _ConvertScalarFieldValue(key, key_field, True) if value_field.cpp_type == descriptor.FieldDescriptor.CPPTYPE_MESSAGE: self.ConvertMessage(value[key], getattr( message, field.name)[key_value]) else: getattr(message, field.name)[key_value] = _ConvertScalarFieldValue( value[key], value_field)
[ "def", "_ConvertMapFieldValue", "(", "self", ",", "value", ",", "message", ",", "field", ")", ":", "if", "not", "isinstance", "(", "value", ",", "dict", ")", ":", "raise", "ParseError", "(", "'Map field {0} must be in a dict which is {1}.'", ".", "format", "(", "field", ".", "name", ",", "value", ")", ")", "key_field", "=", "field", ".", "message_type", ".", "fields_by_name", "[", "'key'", "]", "value_field", "=", "field", ".", "message_type", ".", "fields_by_name", "[", "'value'", "]", "for", "key", "in", "value", ":", "key_value", "=", "_ConvertScalarFieldValue", "(", "key", ",", "key_field", ",", "True", ")", "if", "value_field", ".", "cpp_type", "==", "descriptor", ".", "FieldDescriptor", ".", "CPPTYPE_MESSAGE", ":", "self", ".", "ConvertMessage", "(", "value", "[", "key", "]", ",", "getattr", "(", "message", ",", "field", ".", "name", ")", "[", "key_value", "]", ")", "else", ":", "getattr", "(", "message", ",", "field", ".", "name", ")", "[", "key_value", "]", "=", "_ConvertScalarFieldValue", "(", "value", "[", "key", "]", ",", "value_field", ")" ]
40.48
18.88
def buffer_to_value(self, obj, buffer, offset, default_endianness=DEFAULT_ENDIANNESS): """ Converts the bytes in ``buffer`` at ``offset`` to a native Python value. Returns that value and the number of bytes consumed to create it. :param obj: The parent :class:`.PebblePacket` of this field :type obj: .PebblePacket :param buffer: The buffer from which to extract a value. :type buffer: bytes :param offset: The offset in the buffer to start at. :type offset: int :param default_endianness: The default endianness of the value. Used if ``endianness`` was not passed to the :class:`Field` constructor. :type default_endianness: str :return: (value, length) :rtype: (:class:`object`, :any:`int`) """ try: value, length = struct.unpack_from(str(self.endianness or default_endianness) + self.struct_format, buffer, offset)[0], struct.calcsize(self.struct_format) if self._enum is not None: try: return self._enum(value), length except ValueError as e: raise PacketDecodeError("{}: {}".format(self.type, e)) else: return value, length except struct.error as e: raise PacketDecodeError("{}: {}".format(self.type, e))
[ "def", "buffer_to_value", "(", "self", ",", "obj", ",", "buffer", ",", "offset", ",", "default_endianness", "=", "DEFAULT_ENDIANNESS", ")", ":", "try", ":", "value", ",", "length", "=", "struct", ".", "unpack_from", "(", "str", "(", "self", ".", "endianness", "or", "default_endianness", ")", "+", "self", ".", "struct_format", ",", "buffer", ",", "offset", ")", "[", "0", "]", ",", "struct", ".", "calcsize", "(", "self", ".", "struct_format", ")", "if", "self", ".", "_enum", "is", "not", "None", ":", "try", ":", "return", "self", ".", "_enum", "(", "value", ")", ",", "length", "except", "ValueError", "as", "e", ":", "raise", "PacketDecodeError", "(", "\"{}: {}\"", ".", "format", "(", "self", ".", "type", ",", "e", ")", ")", "else", ":", "return", "value", ",", "length", "except", "struct", ".", "error", "as", "e", ":", "raise", "PacketDecodeError", "(", "\"{}: {}\"", ".", "format", "(", "self", ".", "type", ",", "e", ")", ")" ]
48.931034
23.068966
def append_func(self, func, *args, **kwargs): ''' append func with given arguments and keywords. ''' wraped_func = partial(func, *args, **kwargs) self.append(wraped_func)
[ "def", "append_func", "(", "self", ",", "func", ",", "*", "args", ",", "*", "*", "kwargs", ")", ":", "wraped_func", "=", "partial", "(", "func", ",", "*", "args", ",", "*", "*", "kwargs", ")", "self", ".", "append", "(", "wraped_func", ")" ]
34.166667
16.166667
def cloudshell_model_name(self): """Return the name of the CloudShell model""" if self.shell_name: return "{shell_name}.{resource_model}".format(shell_name=self.shell_name, resource_model=self.RESOURCE_MODEL.replace(" ", "")) else: return self.RESOURCE_MODEL
[ "def", "cloudshell_model_name", "(", "self", ")", ":", "if", "self", ".", "shell_name", ":", "return", "\"{shell_name}.{resource_model}\"", ".", "format", "(", "shell_name", "=", "self", ".", "shell_name", ",", "resource_model", "=", "self", ".", "RESOURCE_MODEL", ".", "replace", "(", "\" \"", ",", "\"\"", ")", ")", "else", ":", "return", "self", ".", "RESOURCE_MODEL" ]
51.142857
23.571429
def b_fit_score(self, x, y): """ Computes the cds statistic from variable 1 to variable 2 Args: a (numpy.ndarray): Variable 1 b (numpy.ndarray): Variable 2 Returns: float: BF fit score """ x = np.reshape(scale(x), (-1, 1)) y = np.reshape(scale(y), (-1, 1)) gp = GaussianProcessRegressor().fit(x, y) y_predict = gp.predict(x) error = mean_squared_error(y_predict, y) return error
[ "def", "b_fit_score", "(", "self", ",", "x", ",", "y", ")", ":", "x", "=", "np", ".", "reshape", "(", "scale", "(", "x", ")", ",", "(", "-", "1", ",", "1", ")", ")", "y", "=", "np", ".", "reshape", "(", "scale", "(", "y", ")", ",", "(", "-", "1", ",", "1", ")", ")", "gp", "=", "GaussianProcessRegressor", "(", ")", ".", "fit", "(", "x", ",", "y", ")", "y_predict", "=", "gp", ".", "predict", "(", "x", ")", "error", "=", "mean_squared_error", "(", "y_predict", ",", "y", ")", "return", "error" ]
28.294118
14.117647
def linkify_hostgroups_hosts(self, hosts): """We just search for each hostgroup the id of the hosts and replace the names by the found identifiers :param hosts: object Hosts :type hosts: alignak.objects.host.Hosts :return: None """ for hostgroup in self: members = hostgroup.get_hosts() # The new members identifiers list new_members = [] for member in members: # member is an host name member = member.strip() if not member: # void entry, skip this continue if member == '*': # All the hosts identifiers list new_members.extend(list(hosts.items.keys())) else: host = hosts.find_by_name(member) if host is not None: new_members.append(host.uuid) if hostgroup.uuid not in host.hostgroups: host.hostgroups.append(hostgroup.uuid) else: hostgroup.add_unknown_members(member) # Make members unique new_members = list(set(new_members)) # We find the id, we replace the names hostgroup.replace_members(new_members)
[ "def", "linkify_hostgroups_hosts", "(", "self", ",", "hosts", ")", ":", "for", "hostgroup", "in", "self", ":", "members", "=", "hostgroup", ".", "get_hosts", "(", ")", "# The new members identifiers list", "new_members", "=", "[", "]", "for", "member", "in", "members", ":", "# member is an host name", "member", "=", "member", ".", "strip", "(", ")", "if", "not", "member", ":", "# void entry, skip this", "continue", "if", "member", "==", "'*'", ":", "# All the hosts identifiers list", "new_members", ".", "extend", "(", "list", "(", "hosts", ".", "items", ".", "keys", "(", ")", ")", ")", "else", ":", "host", "=", "hosts", ".", "find_by_name", "(", "member", ")", "if", "host", "is", "not", "None", ":", "new_members", ".", "append", "(", "host", ".", "uuid", ")", "if", "hostgroup", ".", "uuid", "not", "in", "host", ".", "hostgroups", ":", "host", ".", "hostgroups", ".", "append", "(", "hostgroup", ".", "uuid", ")", "else", ":", "hostgroup", ".", "add_unknown_members", "(", "member", ")", "# Make members unique", "new_members", "=", "list", "(", "set", "(", "new_members", ")", ")", "# We find the id, we replace the names", "hostgroup", ".", "replace_members", "(", "new_members", ")" ]
38
13.8
def get_request_data(self, var_name, full_data=False): """ :param var_name: :param full_data: If you want `.to_array()` with this data, ready to be sent. :return: A tuple of `to_array()` dict and the files (:py:func:`InputFile.get_request_files()`). Files can be None, if no file was given, but an url or existing `file_id`. If `self.media` is an `InputFile` however, the first tuple element (either the string, or the dict's `['media']` if `full_data=True`), will be set to `attach://{var_name}_media` automatically. If `self.thumb` is an `InputFile` however, the first tuple element's `['thumb']`, will be set to `attach://{var_name}_thumb` automatically. """ if not full_data: raise ArithmeticError('we have a thumbnail, please use `full_data=True`.') # end if file = {} data, file_to_add = super(InputMediaWithThumb, self).get_request_data(var_name, full_data=True) if file_to_add: file.update(file_to_add) # end if data['thumb'], file_to_add = self.get_inputfile_data(self.thumb, var_name, suffix='_thumb') if data['thumb'] is None: del data['thumb'] # having `'thumb': null` in the json produces errors. # end if if file_to_add: file.update(file_to_add) # end if return data, (file or None)
[ "def", "get_request_data", "(", "self", ",", "var_name", ",", "full_data", "=", "False", ")", ":", "if", "not", "full_data", ":", "raise", "ArithmeticError", "(", "'we have a thumbnail, please use `full_data=True`.'", ")", "# end if", "file", "=", "{", "}", "data", ",", "file_to_add", "=", "super", "(", "InputMediaWithThumb", ",", "self", ")", ".", "get_request_data", "(", "var_name", ",", "full_data", "=", "True", ")", "if", "file_to_add", ":", "file", ".", "update", "(", "file_to_add", ")", "# end if", "data", "[", "'thumb'", "]", ",", "file_to_add", "=", "self", ".", "get_inputfile_data", "(", "self", ".", "thumb", ",", "var_name", ",", "suffix", "=", "'_thumb'", ")", "if", "data", "[", "'thumb'", "]", "is", "None", ":", "del", "data", "[", "'thumb'", "]", "# having `'thumb': null` in the json produces errors.", "# end if", "if", "file_to_add", ":", "file", ".", "update", "(", "file_to_add", ")", "# end if", "return", "data", ",", "(", "file", "or", "None", ")" ]
51.428571
30.928571
def logo(symbol, token='', version=''): '''This is a helper function, but the google APIs url is standardized. https://iexcloud.io/docs/api/#logo 8am UTC daily Args: symbol (string); Ticker to request token (string); Access token version (string); API version Returns: dict: result ''' _raiseIfNotStr(symbol) return _getJson('stock/' + symbol + '/logo', token, version)
[ "def", "logo", "(", "symbol", ",", "token", "=", "''", ",", "version", "=", "''", ")", ":", "_raiseIfNotStr", "(", "symbol", ")", "return", "_getJson", "(", "'stock/'", "+", "symbol", "+", "'/logo'", ",", "token", ",", "version", ")" ]
26.3125
21.1875
def _to_kraus(rep, data, input_dim, output_dim): """Transform a QuantumChannel to the Kraus representation.""" if rep == 'Kraus': return data if rep == 'Stinespring': return _stinespring_to_kraus(data, input_dim, output_dim) if rep == 'Operator': return _from_operator('Kraus', data, input_dim, output_dim) # Convert via Choi and Kraus if rep != 'Choi': data = _to_choi(rep, data, input_dim, output_dim) return _choi_to_kraus(data, input_dim, output_dim)
[ "def", "_to_kraus", "(", "rep", ",", "data", ",", "input_dim", ",", "output_dim", ")", ":", "if", "rep", "==", "'Kraus'", ":", "return", "data", "if", "rep", "==", "'Stinespring'", ":", "return", "_stinespring_to_kraus", "(", "data", ",", "input_dim", ",", "output_dim", ")", "if", "rep", "==", "'Operator'", ":", "return", "_from_operator", "(", "'Kraus'", ",", "data", ",", "input_dim", ",", "output_dim", ")", "# Convert via Choi and Kraus", "if", "rep", "!=", "'Choi'", ":", "data", "=", "_to_choi", "(", "rep", ",", "data", ",", "input_dim", ",", "output_dim", ")", "return", "_choi_to_kraus", "(", "data", ",", "input_dim", ",", "output_dim", ")" ]
41.916667
15.333333
def logit_px(self)->LogitTensorImage: "Get logit(image.px)." if self._logit_px is None: self._logit_px = logit_(self.px) return self._logit_px
[ "def", "logit_px", "(", "self", ")", "->", "LogitTensorImage", ":", "if", "self", ".", "_logit_px", "is", "None", ":", "self", ".", "_logit_px", "=", "logit_", "(", "self", ".", "px", ")", "return", "self", ".", "_logit_px" ]
40.75
12.75
def _parse(args): """Parse passed arguments from shell.""" ordered = [] opt_full = dict() opt_abbrev = dict() args = args + [''] # Avoid out of range i = 0 while i < len(args) - 1: arg = args[i] arg_next = args[i+1] if arg.startswith('--'): if arg_next.startswith('-'): raise ValueError('{} lacks value'.format(arg)) else: opt_full[arg[2:]] = arg_next i += 2 elif arg.startswith('-'): if arg_next.startswith('-'): raise ValueError('{} lacks value'.format(arg)) else: opt_abbrev[arg[1:]] = arg_next i += 2 else: ordered.append(arg) i += 1 return ordered, opt_full, opt_abbrev
[ "def", "_parse", "(", "args", ")", ":", "ordered", "=", "[", "]", "opt_full", "=", "dict", "(", ")", "opt_abbrev", "=", "dict", "(", ")", "args", "=", "args", "+", "[", "''", "]", "# Avoid out of range", "i", "=", "0", "while", "i", "<", "len", "(", "args", ")", "-", "1", ":", "arg", "=", "args", "[", "i", "]", "arg_next", "=", "args", "[", "i", "+", "1", "]", "if", "arg", ".", "startswith", "(", "'--'", ")", ":", "if", "arg_next", ".", "startswith", "(", "'-'", ")", ":", "raise", "ValueError", "(", "'{} lacks value'", ".", "format", "(", "arg", ")", ")", "else", ":", "opt_full", "[", "arg", "[", "2", ":", "]", "]", "=", "arg_next", "i", "+=", "2", "elif", "arg", ".", "startswith", "(", "'-'", ")", ":", "if", "arg_next", ".", "startswith", "(", "'-'", ")", ":", "raise", "ValueError", "(", "'{} lacks value'", ".", "format", "(", "arg", ")", ")", "else", ":", "opt_abbrev", "[", "arg", "[", "1", ":", "]", "]", "=", "arg_next", "i", "+=", "2", "else", ":", "ordered", ".", "append", "(", "arg", ")", "i", "+=", "1", "return", "ordered", ",", "opt_full", ",", "opt_abbrev" ]
26.466667
17.533333
def artist(self): """ :class:`Artist` object of album's artist """ if not self._artist: self._artist = Artist(self._artist_id, self._artist_name, self._connection) return self._artist
[ "def", "artist", "(", "self", ")", ":", "if", "not", "self", ".", "_artist", ":", "self", ".", "_artist", "=", "Artist", "(", "self", ".", "_artist_id", ",", "self", ".", "_artist_name", ",", "self", ".", "_connection", ")", "return", "self", ".", "_artist" ]
32.714286
14.714286
def fmt_text(text): """ convert characters that aren't printable to hex format """ PRINTABLE_CHAR = set( list(range(ord(' '), ord('~') + 1)) + [ord('\r'), ord('\n')]) newtext = ("\\x{:02X}".format( c) if c not in PRINTABLE_CHAR else chr(c) for c in text) textlines = "\r\n".join(l.strip('\r') for l in "".join(newtext).split('\n')) return textlines
[ "def", "fmt_text", "(", "text", ")", ":", "PRINTABLE_CHAR", "=", "set", "(", "list", "(", "range", "(", "ord", "(", "' '", ")", ",", "ord", "(", "'~'", ")", "+", "1", ")", ")", "+", "[", "ord", "(", "'\\r'", ")", ",", "ord", "(", "'\\n'", ")", "]", ")", "newtext", "=", "(", "\"\\\\x{:02X}\"", ".", "format", "(", "c", ")", "if", "c", "not", "in", "PRINTABLE_CHAR", "else", "chr", "(", "c", ")", "for", "c", "in", "text", ")", "textlines", "=", "\"\\r\\n\"", ".", "join", "(", "l", ".", "strip", "(", "'\\r'", ")", "for", "l", "in", "\"\"", ".", "join", "(", "newtext", ")", ".", "split", "(", "'\\n'", ")", ")", "return", "textlines" ]
40.7
14.2
def extract(args): """ %prog extract idsfile sizesfile Extract the lines containing only the given IDs. """ p = OptionParser(extract.__doc__) opts, args = p.parse_args(args) if len(args) != 2: sys.exit(not p.print_help()) idsfile, sizesfile = args sizes = Sizes(sizesfile).mapping fp = open(idsfile) for row in fp: name = row.strip() size = sizes[name] print("\t".join(str(x) for x in (name, size)))
[ "def", "extract", "(", "args", ")", ":", "p", "=", "OptionParser", "(", "extract", ".", "__doc__", ")", "opts", ",", "args", "=", "p", ".", "parse_args", "(", "args", ")", "if", "len", "(", "args", ")", "!=", "2", ":", "sys", ".", "exit", "(", "not", "p", ".", "print_help", "(", ")", ")", "idsfile", ",", "sizesfile", "=", "args", "sizes", "=", "Sizes", "(", "sizesfile", ")", ".", "mapping", "fp", "=", "open", "(", "idsfile", ")", "for", "row", "in", "fp", ":", "name", "=", "row", ".", "strip", "(", ")", "size", "=", "sizes", "[", "name", "]", "print", "(", "\"\\t\"", ".", "join", "(", "str", "(", "x", ")", "for", "x", "in", "(", "name", ",", "size", ")", ")", ")" ]
24.210526
15.052632
def execute_notebook(self, name): """Loads and then runs a notebook file.""" warnings.filterwarnings("ignore", category=DeprecationWarning) nb,f = self.load_notebook(name) self.run_notebook(nb,f) self.assertTrue(True)
[ "def", "execute_notebook", "(", "self", ",", "name", ")", ":", "warnings", ".", "filterwarnings", "(", "\"ignore\"", ",", "category", "=", "DeprecationWarning", ")", "nb", ",", "f", "=", "self", ".", "load_notebook", "(", "name", ")", "self", ".", "run_notebook", "(", "nb", ",", "f", ")", "self", ".", "assertTrue", "(", "True", ")" ]
36
14
def async_command(self, command, raw=False, timeout_ms=None): """See shell_service.ShellService.async_command().""" return self.shell_service.async_command( str(command), raw=raw, timeout_ms=timeout_ms)
[ "def", "async_command", "(", "self", ",", "command", ",", "raw", "=", "False", ",", "timeout_ms", "=", "None", ")", ":", "return", "self", ".", "shell_service", ".", "async_command", "(", "str", "(", "command", ")", ",", "raw", "=", "raw", ",", "timeout_ms", "=", "timeout_ms", ")" ]
53.75
9.5
def update_dashboards(modules, horizon_config, installed_apps): """Imports dashboard and panel configuration from modules and applies it. The submodules from specified modules are imported, and the configuration for the specific dashboards is merged, with the later modules overriding settings from the former. Then the configuration is applied to horizon_config and installed_apps, in alphabetical order of files from which the configurations were imported. For example, given this setup: | foo/__init__.py | foo/_10_baz.py | foo/_20_qux.py | bar/__init__.py | bar/_30_baz_.py and being called with ``modules=[foo, bar]``, we will first have the configuration from ``_10_baz`` and ``_30_baz`` merged, then the configurations will be applied in order ``qux``, ``baz`` (``baz`` is second, because the most recent file which contributed to it, ``_30_baz``, comes after ``_20_qux``). Panel specific configurations are stored in horizon_config. Dashboards from both plugin-based and openstack_dashboard must be registered before the panel configuration can be applied. Making changes to the panel is deferred until the horizon autodiscover is completed, configurations are applied in alphabetical order of files where it was imported. """ config_dashboards = horizon_config.get('dashboards', []) if config_dashboards or horizon_config.get('default_dashboard'): logging.warning( '"dashboards" and "default_dashboard" in (local_)settings is ' 'DEPRECATED now and may be unsupported in some future release. ' 'The preferred way to specify the order of dashboards and the ' 'default dashboard is the pluggable dashboard mechanism (in %s).', ', '.join([os.path.abspath(module.__path__[0]) for module in modules]) ) enabled_dashboards = [] disabled_dashboards = [] exceptions = horizon_config.get('exceptions', {}) apps = [] angular_modules = [] js_files = [] js_spec_files = [] scss_files = [] xstatic_modules = [] panel_customization = [] header_sections = [] extra_tabs = collections.defaultdict(tuple) extra_steps = collections.defaultdict(tuple) update_horizon_config = {} for key, config in import_dashboard_config(modules): if config.get('DISABLED', False): if config.get('DASHBOARD'): disabled_dashboards.append(config.get('DASHBOARD')) continue _apps = config.get('ADD_INSTALLED_APPS', []) apps.extend(_apps) _header_sections = config.get('ADD_HEADER_SECTIONS', []) header_sections.extend(_header_sections) if config.get('AUTO_DISCOVER_STATIC_FILES', False): for _app in _apps: module = import_module(_app) base_path = os.path.join(module.__path__[0], 'static/') file_discovery.populate_horizon_config(horizon_config, base_path) add_exceptions = config.get('ADD_EXCEPTIONS', {}).items() for category, exc_list in add_exceptions: exceptions[category] = tuple(set(exceptions.get(category, ()) + exc_list)) angular_modules.extend(config.get('ADD_ANGULAR_MODULES', [])) # avoid pulling in dashboard javascript dependencies multiple times existing = set(js_files) js_files.extend([f for f in config.get('ADD_JS_FILES', []) if f not in existing]) js_spec_files.extend(config.get('ADD_JS_SPEC_FILES', [])) scss_files.extend(config.get('ADD_SCSS_FILES', [])) xstatic_modules.extend(config.get('ADD_XSTATIC_MODULES', [])) update_horizon_config.update( config.get('UPDATE_HORIZON_CONFIG', {})) if config.get('DASHBOARD'): dashboard = key enabled_dashboards.append(dashboard) if config.get('DEFAULT', False): horizon_config['default_dashboard'] = dashboard elif config.get('PANEL') or config.get('PANEL_GROUP'): config.pop("__builtins__", None) panel_customization.append(config) _extra_tabs = config.get('EXTRA_TABS', {}) for tab_key, tab_defs in _extra_tabs.items(): extra_tabs[tab_key] += tuple(tab_defs) _extra_steps = config.get('EXTRA_STEPS', {}) for step_key, step_defs in _extra_steps.items(): extra_steps[step_key] += tuple(step_defs) # Preserve the dashboard order specified in settings dashboards = ([d for d in config_dashboards if d not in disabled_dashboards] + [d for d in enabled_dashboards if d not in config_dashboards]) horizon_config['panel_customization'] = panel_customization horizon_config['header_sections'] = header_sections horizon_config['dashboards'] = tuple(dashboards) horizon_config.setdefault('exceptions', {}).update(exceptions) horizon_config.update(update_horizon_config) horizon_config.setdefault('angular_modules', []).extend(angular_modules) horizon_config.setdefault('js_files', []).extend(js_files) horizon_config.setdefault('js_spec_files', []).extend(js_spec_files) horizon_config.setdefault('scss_files', []).extend(scss_files) horizon_config.setdefault('xstatic_modules', []).extend(xstatic_modules) horizon_config['extra_tabs'] = extra_tabs horizon_config['extra_steps'] = extra_steps # apps contains reference to applications declared in the enabled folder # basically a list of applications that are internal and external plugins # installed_apps contains reference to applications declared in settings # such as django.contribe.*, django_pyscss, compressor, horizon, etc... # for translation, we are only interested in the list of external plugins # so we save the reference to it before we append to installed_apps horizon_config.setdefault('plugins', []).extend(apps) installed_apps[0:0] = apps
[ "def", "update_dashboards", "(", "modules", ",", "horizon_config", ",", "installed_apps", ")", ":", "config_dashboards", "=", "horizon_config", ".", "get", "(", "'dashboards'", ",", "[", "]", ")", "if", "config_dashboards", "or", "horizon_config", ".", "get", "(", "'default_dashboard'", ")", ":", "logging", ".", "warning", "(", "'\"dashboards\" and \"default_dashboard\" in (local_)settings is '", "'DEPRECATED now and may be unsupported in some future release. '", "'The preferred way to specify the order of dashboards and the '", "'default dashboard is the pluggable dashboard mechanism (in %s).'", ",", "', '", ".", "join", "(", "[", "os", ".", "path", ".", "abspath", "(", "module", ".", "__path__", "[", "0", "]", ")", "for", "module", "in", "modules", "]", ")", ")", "enabled_dashboards", "=", "[", "]", "disabled_dashboards", "=", "[", "]", "exceptions", "=", "horizon_config", ".", "get", "(", "'exceptions'", ",", "{", "}", ")", "apps", "=", "[", "]", "angular_modules", "=", "[", "]", "js_files", "=", "[", "]", "js_spec_files", "=", "[", "]", "scss_files", "=", "[", "]", "xstatic_modules", "=", "[", "]", "panel_customization", "=", "[", "]", "header_sections", "=", "[", "]", "extra_tabs", "=", "collections", ".", "defaultdict", "(", "tuple", ")", "extra_steps", "=", "collections", ".", "defaultdict", "(", "tuple", ")", "update_horizon_config", "=", "{", "}", "for", "key", ",", "config", "in", "import_dashboard_config", "(", "modules", ")", ":", "if", "config", ".", "get", "(", "'DISABLED'", ",", "False", ")", ":", "if", "config", ".", "get", "(", "'DASHBOARD'", ")", ":", "disabled_dashboards", ".", "append", "(", "config", ".", "get", "(", "'DASHBOARD'", ")", ")", "continue", "_apps", "=", "config", ".", "get", "(", "'ADD_INSTALLED_APPS'", ",", "[", "]", ")", "apps", ".", "extend", "(", "_apps", ")", "_header_sections", "=", "config", ".", "get", "(", "'ADD_HEADER_SECTIONS'", ",", "[", "]", ")", "header_sections", ".", "extend", "(", "_header_sections", ")", "if", "config", ".", "get", "(", "'AUTO_DISCOVER_STATIC_FILES'", ",", "False", ")", ":", "for", "_app", "in", "_apps", ":", "module", "=", "import_module", "(", "_app", ")", "base_path", "=", "os", ".", "path", ".", "join", "(", "module", ".", "__path__", "[", "0", "]", ",", "'static/'", ")", "file_discovery", ".", "populate_horizon_config", "(", "horizon_config", ",", "base_path", ")", "add_exceptions", "=", "config", ".", "get", "(", "'ADD_EXCEPTIONS'", ",", "{", "}", ")", ".", "items", "(", ")", "for", "category", ",", "exc_list", "in", "add_exceptions", ":", "exceptions", "[", "category", "]", "=", "tuple", "(", "set", "(", "exceptions", ".", "get", "(", "category", ",", "(", ")", ")", "+", "exc_list", ")", ")", "angular_modules", ".", "extend", "(", "config", ".", "get", "(", "'ADD_ANGULAR_MODULES'", ",", "[", "]", ")", ")", "# avoid pulling in dashboard javascript dependencies multiple times", "existing", "=", "set", "(", "js_files", ")", "js_files", ".", "extend", "(", "[", "f", "for", "f", "in", "config", ".", "get", "(", "'ADD_JS_FILES'", ",", "[", "]", ")", "if", "f", "not", "in", "existing", "]", ")", "js_spec_files", ".", "extend", "(", "config", ".", "get", "(", "'ADD_JS_SPEC_FILES'", ",", "[", "]", ")", ")", "scss_files", ".", "extend", "(", "config", ".", "get", "(", "'ADD_SCSS_FILES'", ",", "[", "]", ")", ")", "xstatic_modules", ".", "extend", "(", "config", ".", "get", "(", "'ADD_XSTATIC_MODULES'", ",", "[", "]", ")", ")", "update_horizon_config", ".", "update", "(", "config", ".", "get", "(", "'UPDATE_HORIZON_CONFIG'", ",", "{", "}", ")", ")", "if", "config", ".", "get", "(", "'DASHBOARD'", ")", ":", "dashboard", "=", "key", "enabled_dashboards", ".", "append", "(", "dashboard", ")", "if", "config", ".", "get", "(", "'DEFAULT'", ",", "False", ")", ":", "horizon_config", "[", "'default_dashboard'", "]", "=", "dashboard", "elif", "config", ".", "get", "(", "'PANEL'", ")", "or", "config", ".", "get", "(", "'PANEL_GROUP'", ")", ":", "config", ".", "pop", "(", "\"__builtins__\"", ",", "None", ")", "panel_customization", ".", "append", "(", "config", ")", "_extra_tabs", "=", "config", ".", "get", "(", "'EXTRA_TABS'", ",", "{", "}", ")", "for", "tab_key", ",", "tab_defs", "in", "_extra_tabs", ".", "items", "(", ")", ":", "extra_tabs", "[", "tab_key", "]", "+=", "tuple", "(", "tab_defs", ")", "_extra_steps", "=", "config", ".", "get", "(", "'EXTRA_STEPS'", ",", "{", "}", ")", "for", "step_key", ",", "step_defs", "in", "_extra_steps", ".", "items", "(", ")", ":", "extra_steps", "[", "step_key", "]", "+=", "tuple", "(", "step_defs", ")", "# Preserve the dashboard order specified in settings", "dashboards", "=", "(", "[", "d", "for", "d", "in", "config_dashboards", "if", "d", "not", "in", "disabled_dashboards", "]", "+", "[", "d", "for", "d", "in", "enabled_dashboards", "if", "d", "not", "in", "config_dashboards", "]", ")", "horizon_config", "[", "'panel_customization'", "]", "=", "panel_customization", "horizon_config", "[", "'header_sections'", "]", "=", "header_sections", "horizon_config", "[", "'dashboards'", "]", "=", "tuple", "(", "dashboards", ")", "horizon_config", ".", "setdefault", "(", "'exceptions'", ",", "{", "}", ")", ".", "update", "(", "exceptions", ")", "horizon_config", ".", "update", "(", "update_horizon_config", ")", "horizon_config", ".", "setdefault", "(", "'angular_modules'", ",", "[", "]", ")", ".", "extend", "(", "angular_modules", ")", "horizon_config", ".", "setdefault", "(", "'js_files'", ",", "[", "]", ")", ".", "extend", "(", "js_files", ")", "horizon_config", ".", "setdefault", "(", "'js_spec_files'", ",", "[", "]", ")", ".", "extend", "(", "js_spec_files", ")", "horizon_config", ".", "setdefault", "(", "'scss_files'", ",", "[", "]", ")", ".", "extend", "(", "scss_files", ")", "horizon_config", ".", "setdefault", "(", "'xstatic_modules'", ",", "[", "]", ")", ".", "extend", "(", "xstatic_modules", ")", "horizon_config", "[", "'extra_tabs'", "]", "=", "extra_tabs", "horizon_config", "[", "'extra_steps'", "]", "=", "extra_steps", "# apps contains reference to applications declared in the enabled folder", "# basically a list of applications that are internal and external plugins", "# installed_apps contains reference to applications declared in settings", "# such as django.contribe.*, django_pyscss, compressor, horizon, etc...", "# for translation, we are only interested in the list of external plugins", "# so we save the reference to it before we append to installed_apps", "horizon_config", ".", "setdefault", "(", "'plugins'", ",", "[", "]", ")", ".", "extend", "(", "apps", ")", "installed_apps", "[", "0", ":", "0", "]", "=", "apps" ]
46.661538
21.538462
def anonymous_login(services): """Initialize services without authenticating to Globus Auth. Note: Clients may have reduced functionality without authentication. Arguments: services (str or list of str): The services to initialize clients for. Returns: dict: The clients requested, indexed by service name. """ if isinstance(services, str): services = [services] clients = {} # Initialize valid services for serv in services: try: clients[serv] = KNOWN_CLIENTS[serv](http_timeout=STD_TIMEOUT) except KeyError: # No known client print("Error: No known client for '{}' service.".format(serv)) except Exception: # Other issue, probably auth print("Error: Unable to create client for '{}' service.\n" "Anonymous access may not be allowed.".format(serv)) return clients
[ "def", "anonymous_login", "(", "services", ")", ":", "if", "isinstance", "(", "services", ",", "str", ")", ":", "services", "=", "[", "services", "]", "clients", "=", "{", "}", "# Initialize valid services", "for", "serv", "in", "services", ":", "try", ":", "clients", "[", "serv", "]", "=", "KNOWN_CLIENTS", "[", "serv", "]", "(", "http_timeout", "=", "STD_TIMEOUT", ")", "except", "KeyError", ":", "# No known client", "print", "(", "\"Error: No known client for '{}' service.\"", ".", "format", "(", "serv", ")", ")", "except", "Exception", ":", "# Other issue, probably auth", "print", "(", "\"Error: Unable to create client for '{}' service.\\n\"", "\"Anonymous access may not be allowed.\"", ".", "format", "(", "serv", ")", ")", "return", "clients" ]
33.148148
23.888889
def Message(msg, id=260, ok=None): """Original doc: Display a MESSAGE string. Return when the user clicks the OK button or presses Return. The MESSAGE string can be at most 255 characters long. """ return psidialogs.message(message=msg, ok=ok)
[ "def", "Message", "(", "msg", ",", "id", "=", "260", ",", "ok", "=", "None", ")", ":", "return", "psidialogs", ".", "message", "(", "message", "=", "msg", ",", "ok", "=", "ok", ")" ]
32.25
17.125
def do_state_tomography(preparation_program, nsamples, cxn, qubits=None, use_run=False): """ Method to perform both a QPU and QVM state tomography, and use the latter as as reference to calculate the fidelity of the former. :param Program preparation_program: Program to execute. :param int nsamples: Number of samples to take for the program. :param QVMConnection|QPUConnection cxn: Connection on which to run the program. :param list qubits: List of qubits for the program. to use in the tomography analysis. :param bool use_run: If ``True``, use append measurements on all qubits and use ``cxn.run`` instead of ``cxn.run_and_measure``. :return: The state tomogram. :rtype: StateTomography """ return tomography._do_tomography(preparation_program, nsamples, cxn, qubits, tomography.MAX_QUBITS_STATE_TOMO, StateTomography, state_tomography_programs, DEFAULT_STATE_TOMO_SETTINGS, use_run=use_run)
[ "def", "do_state_tomography", "(", "preparation_program", ",", "nsamples", ",", "cxn", ",", "qubits", "=", "None", ",", "use_run", "=", "False", ")", ":", "return", "tomography", ".", "_do_tomography", "(", "preparation_program", ",", "nsamples", ",", "cxn", ",", "qubits", ",", "tomography", ".", "MAX_QUBITS_STATE_TOMO", ",", "StateTomography", ",", "state_tomography_programs", ",", "DEFAULT_STATE_TOMO_SETTINGS", ",", "use_run", "=", "use_run", ")" ]
55.263158
25.368421
def finalize(self) -> None: ''' split statement and task by last directive ''' self.wrap_script() if not self.statements: self.task = '' return # handle tasks input_directive = [ idx for idx, statement in enumerate(self.statements) if statement[0] == ':' and statement[1] == 'input' ] task_directive = [ idx for idx, statement in enumerate(self.statements) if statement[0] == ':' and statement[1] == 'task' ] if len(task_directive) > 1: raise ValueError('Only one task statement is allowed in a step') # handle parameter for idx, statement in enumerate(self.statements): if statement[0] == ':' and statement[1] == 'parameter': if task_directive and task_directive[0] < idx: raise ValueError( 'Parameter statement is not allowed in tasks.') if '=' not in statement[2]: if ':' in statement[2]: if not is_type_hint(statement[2]): raise ValueError( f'Invalid type trait in parameter specification {statement[2]}' ) name, value = statement[2].split(':') else: name = statement[2] value = 'str' else: name, value = statement[2].split('=', 1) # ignore type trait if a default value is specified name = name.split(':')[0] name = name.strip() if name.startswith('_'): raise ValueError( f'Invalid parameter name {name}: names with leading underscore is not allowed.' ) if not value.strip(): raise ValueError( f'{self.step_name()}: Invalid parameter definition: {statement[2]}' ) # there is a possibility that value contains # so sos_handle_parameter(name, val # aaa) will fail self.statements[idx] = [ '!', f'#begin_parameter {name}\n{name} = sos_handle_parameter_({name.strip()!r}, {value}\n) #end_parameter {name}\n', statement[2].strip() ] self.parameters[name] = (value, statement[3]) if input_directive and input_directive[0] < idx: self.substep_parameters.add(name) # handle tasks if not task_directive: self.task = '' else: start_task = task_directive[0] + 1 # convert statement to task self.task = '' for statement in self.statements[start_task:]: if statement[0] == ':': if statement[1] in ('input', 'output', 'depends'): raise ValueError( f'{self.step_name()}: Step task should be defined as the last item in a SoS step' ) elif statement[1] == 'task': raise ValueError( f'{self.step_name()}: Only one task is allowed for a step' ) elif statement[1] == 'parameter': raise ValueError( f'{self.step_name()}: Parameters should be defined before step task' ) # ignore ... self.task += '\n' else: self.task += statement[1] self.task_params = self.statements[task_directive[0]][2] self.statements = self.statements[:task_directive[0]] # merge multiple statments at the end if len(self.statements) > 1 and self.statements[-1][0] == '!': starting = len(self.statements) - 1 for idx in range(starting - 1, -1, -1): if self.statements[idx][0] == '!': starting = idx else: break # merge for idx in range(starting + 1, len(self.statements)): self.statements[starting][1] += self.statements[idx][1] # remove the rest of the statements self.statements = self.statements[:starting + 1] # # auto provides #859 if not any(opt in self.options for opt in ('provides', 'shared')) and \ len([x for x in self.statements if x[0] == ':' and x[1] == 'output']) == 1: output_stmt = [ x for x in self.statements if x[0] == ':' and x[1] == 'output' ][0][2] output_names = get_names_of_kwargs(output_stmt) self.options['namedprovides'] = repr(output_names)
[ "def", "finalize", "(", "self", ")", "->", "None", ":", "self", ".", "wrap_script", "(", ")", "if", "not", "self", ".", "statements", ":", "self", ".", "task", "=", "''", "return", "# handle tasks", "input_directive", "=", "[", "idx", "for", "idx", ",", "statement", "in", "enumerate", "(", "self", ".", "statements", ")", "if", "statement", "[", "0", "]", "==", "':'", "and", "statement", "[", "1", "]", "==", "'input'", "]", "task_directive", "=", "[", "idx", "for", "idx", ",", "statement", "in", "enumerate", "(", "self", ".", "statements", ")", "if", "statement", "[", "0", "]", "==", "':'", "and", "statement", "[", "1", "]", "==", "'task'", "]", "if", "len", "(", "task_directive", ")", ">", "1", ":", "raise", "ValueError", "(", "'Only one task statement is allowed in a step'", ")", "# handle parameter", "for", "idx", ",", "statement", "in", "enumerate", "(", "self", ".", "statements", ")", ":", "if", "statement", "[", "0", "]", "==", "':'", "and", "statement", "[", "1", "]", "==", "'parameter'", ":", "if", "task_directive", "and", "task_directive", "[", "0", "]", "<", "idx", ":", "raise", "ValueError", "(", "'Parameter statement is not allowed in tasks.'", ")", "if", "'='", "not", "in", "statement", "[", "2", "]", ":", "if", "':'", "in", "statement", "[", "2", "]", ":", "if", "not", "is_type_hint", "(", "statement", "[", "2", "]", ")", ":", "raise", "ValueError", "(", "f'Invalid type trait in parameter specification {statement[2]}'", ")", "name", ",", "value", "=", "statement", "[", "2", "]", ".", "split", "(", "':'", ")", "else", ":", "name", "=", "statement", "[", "2", "]", "value", "=", "'str'", "else", ":", "name", ",", "value", "=", "statement", "[", "2", "]", ".", "split", "(", "'='", ",", "1", ")", "# ignore type trait if a default value is specified", "name", "=", "name", ".", "split", "(", "':'", ")", "[", "0", "]", "name", "=", "name", ".", "strip", "(", ")", "if", "name", ".", "startswith", "(", "'_'", ")", ":", "raise", "ValueError", "(", "f'Invalid parameter name {name}: names with leading underscore is not allowed.'", ")", "if", "not", "value", ".", "strip", "(", ")", ":", "raise", "ValueError", "(", "f'{self.step_name()}: Invalid parameter definition: {statement[2]}'", ")", "# there is a possibility that value contains # so sos_handle_parameter(name, val # aaa) will fail", "self", ".", "statements", "[", "idx", "]", "=", "[", "'!'", ",", "f'#begin_parameter {name}\\n{name} = sos_handle_parameter_({name.strip()!r}, {value}\\n) #end_parameter {name}\\n'", ",", "statement", "[", "2", "]", ".", "strip", "(", ")", "]", "self", ".", "parameters", "[", "name", "]", "=", "(", "value", ",", "statement", "[", "3", "]", ")", "if", "input_directive", "and", "input_directive", "[", "0", "]", "<", "idx", ":", "self", ".", "substep_parameters", ".", "add", "(", "name", ")", "# handle tasks", "if", "not", "task_directive", ":", "self", ".", "task", "=", "''", "else", ":", "start_task", "=", "task_directive", "[", "0", "]", "+", "1", "# convert statement to task", "self", ".", "task", "=", "''", "for", "statement", "in", "self", ".", "statements", "[", "start_task", ":", "]", ":", "if", "statement", "[", "0", "]", "==", "':'", ":", "if", "statement", "[", "1", "]", "in", "(", "'input'", ",", "'output'", ",", "'depends'", ")", ":", "raise", "ValueError", "(", "f'{self.step_name()}: Step task should be defined as the last item in a SoS step'", ")", "elif", "statement", "[", "1", "]", "==", "'task'", ":", "raise", "ValueError", "(", "f'{self.step_name()}: Only one task is allowed for a step'", ")", "elif", "statement", "[", "1", "]", "==", "'parameter'", ":", "raise", "ValueError", "(", "f'{self.step_name()}: Parameters should be defined before step task'", ")", "# ignore ...", "self", ".", "task", "+=", "'\\n'", "else", ":", "self", ".", "task", "+=", "statement", "[", "1", "]", "self", ".", "task_params", "=", "self", ".", "statements", "[", "task_directive", "[", "0", "]", "]", "[", "2", "]", "self", ".", "statements", "=", "self", ".", "statements", "[", ":", "task_directive", "[", "0", "]", "]", "# merge multiple statments at the end", "if", "len", "(", "self", ".", "statements", ")", ">", "1", "and", "self", ".", "statements", "[", "-", "1", "]", "[", "0", "]", "==", "'!'", ":", "starting", "=", "len", "(", "self", ".", "statements", ")", "-", "1", "for", "idx", "in", "range", "(", "starting", "-", "1", ",", "-", "1", ",", "-", "1", ")", ":", "if", "self", ".", "statements", "[", "idx", "]", "[", "0", "]", "==", "'!'", ":", "starting", "=", "idx", "else", ":", "break", "# merge", "for", "idx", "in", "range", "(", "starting", "+", "1", ",", "len", "(", "self", ".", "statements", ")", ")", ":", "self", ".", "statements", "[", "starting", "]", "[", "1", "]", "+=", "self", ".", "statements", "[", "idx", "]", "[", "1", "]", "# remove the rest of the statements", "self", ".", "statements", "=", "self", ".", "statements", "[", ":", "starting", "+", "1", "]", "#", "# auto provides #859", "if", "not", "any", "(", "opt", "in", "self", ".", "options", "for", "opt", "in", "(", "'provides'", ",", "'shared'", ")", ")", "and", "len", "(", "[", "x", "for", "x", "in", "self", ".", "statements", "if", "x", "[", "0", "]", "==", "':'", "and", "x", "[", "1", "]", "==", "'output'", "]", ")", "==", "1", ":", "output_stmt", "=", "[", "x", "for", "x", "in", "self", ".", "statements", "if", "x", "[", "0", "]", "==", "':'", "and", "x", "[", "1", "]", "==", "'output'", "]", "[", "0", "]", "[", "2", "]", "output_names", "=", "get_names_of_kwargs", "(", "output_stmt", ")", "self", ".", "options", "[", "'namedprovides'", "]", "=", "repr", "(", "output_names", ")" ]
47
19.269231
def get_witness(self, work, siglum, text_class=WitnessText): """Returns a `WitnessText` representing the file associated with `work` and `siglum`. Combined, `work` and `siglum` form the basis of a filename for retrieving the text. :param work: name of work :type work: `str` :param siglum: siglum of witness :type siglum: `str` :rtype: `WitnessText` """ filename = os.path.join(work, siglum + '.txt') self._logger.debug('Creating WitnessText object from {}'.format( filename)) with open(os.path.join(self._path, filename), encoding='utf-8') \ as fh: content = fh.read() return text_class(work, siglum, content, self._tokenizer)
[ "def", "get_witness", "(", "self", ",", "work", ",", "siglum", ",", "text_class", "=", "WitnessText", ")", ":", "filename", "=", "os", ".", "path", ".", "join", "(", "work", ",", "siglum", "+", "'.txt'", ")", "self", ".", "_logger", ".", "debug", "(", "'Creating WitnessText object from {}'", ".", "format", "(", "filename", ")", ")", "with", "open", "(", "os", ".", "path", ".", "join", "(", "self", ".", "_path", ",", "filename", ")", ",", "encoding", "=", "'utf-8'", ")", "as", "fh", ":", "content", "=", "fh", ".", "read", "(", ")", "return", "text_class", "(", "work", ",", "siglum", ",", "content", ",", "self", ".", "_tokenizer", ")" ]
36.285714
18.52381
def get_resource_ids(self): """ Get resource ids as a list. :return: List of resource id:s or "unknown" """ resids = [] if self.dutinformations: for info in self.dutinformations: resids.append(info.resource_id) return resids return "unknown"
[ "def", "get_resource_ids", "(", "self", ")", ":", "resids", "=", "[", "]", "if", "self", ".", "dutinformations", ":", "for", "info", "in", "self", ".", "dutinformations", ":", "resids", ".", "append", "(", "info", ".", "resource_id", ")", "return", "resids", "return", "\"unknown\"" ]
27.25
11.75
def plot_covariance( mean, cov=None, variance=1.0, std=None, interval=None, ellipse=None, title=None, axis_equal=True, show_semiaxis=False, show_center=True, facecolor=None, edgecolor=None, fc='none', ec='#004080', alpha=1.0, xlim=None, ylim=None, ls='solid'): """ Plots the covariance ellipse for the 2D normal defined by (mean, cov) `variance` is the normal sigma^2 that we want to plot. If list-like, ellipses for all ellipses will be ploted. E.g. [1,2] will plot the sigma^2 = 1 and sigma^2 = 2 ellipses. Alternatively, use std for the standard deviation, in which case `variance` will be ignored. ellipse is a (angle,width,height) tuple containing the angle in radians, and width and height radii. You may provide either cov or ellipse, but not both. Parameters ---------- mean : row vector like (2x1) The mean of the normal cov : ndarray-like 2x2 covariance matrix variance : float, default 1, or iterable float, optional Variance of the plotted ellipse. May specify std or interval instead. If iterable, such as (1, 2**2, 3**2), then ellipses will be drawn for all in the list. std : float, or iterable float, optional Standard deviation of the plotted ellipse. If specified, variance is ignored, and interval must be `None`. If iterable, such as (1, 2, 3), then ellipses will be drawn for all in the list. interval : float range [0,1), or iterable float, optional Confidence interval for the plotted ellipse. For example, .68 (for 68%) gives roughly 1 standand deviation. If specified, variance is ignored and `std` must be `None` If iterable, such as (.68, .95), then ellipses will be drawn for all in the list. ellipse: (float, float, float) Instead of a covariance, plots an ellipse described by (angle, width, height), where angle is in radians, and the width and height are the minor and major sub-axis radii. `cov` must be `None`. title: str, optional title for the plot axis_equal: bool, default=True Use the same scale for the x-axis and y-axis to ensure the aspect ratio is correct. show_semiaxis: bool, default=False Draw the semiaxis of the ellipse show_center: bool, default=True Mark the center of the ellipse with a cross facecolor, fc: color, default=None If specified, fills the ellipse with the specified color. `fc` is an allowed abbreviation edgecolor, ec: color, default=None If specified, overrides the default color sequence for the edge color of the ellipse. `ec` is an allowed abbreviation alpha: float range [0,1], default=1. alpha value for the ellipse xlim: float or (float,float), default=None specifies the limits for the x-axis ylim: float or (float,float), default=None specifies the limits for the y-axis ls: str, default='solid': line style for the edge of the ellipse """ from matplotlib.patches import Ellipse import matplotlib.pyplot as plt if cov is not None and ellipse is not None: raise ValueError('You cannot specify both cov and ellipse') if cov is None and ellipse is None: raise ValueError('Specify one of cov or ellipse') if facecolor is None: facecolor = fc if edgecolor is None: edgecolor = ec if cov is not None: ellipse = covariance_ellipse(cov) if axis_equal: plt.axis('equal') if title is not None: plt.title(title) ax = plt.gca() angle = np.degrees(ellipse[0]) width = ellipse[1] * 2. height = ellipse[2] * 2. std = _std_tuple_of(variance, std, interval) for sd in std: e = Ellipse(xy=mean, width=sd*width, height=sd*height, angle=angle, facecolor=facecolor, edgecolor=edgecolor, alpha=alpha, lw=2, ls=ls) ax.add_patch(e) x, y = mean if show_center: plt.scatter(x, y, marker='+', color=edgecolor) if xlim is not None: ax.set_xlim(xlim) if ylim is not None: ax.set_ylim(ylim) if show_semiaxis: a = ellipse[0] h, w = height/4, width/4 plt.plot([x, x+ h*cos(a+np.pi/2)], [y, y + h*sin(a+np.pi/2)]) plt.plot([x, x+ w*cos(a)], [y, y + w*sin(a)])
[ "def", "plot_covariance", "(", "mean", ",", "cov", "=", "None", ",", "variance", "=", "1.0", ",", "std", "=", "None", ",", "interval", "=", "None", ",", "ellipse", "=", "None", ",", "title", "=", "None", ",", "axis_equal", "=", "True", ",", "show_semiaxis", "=", "False", ",", "show_center", "=", "True", ",", "facecolor", "=", "None", ",", "edgecolor", "=", "None", ",", "fc", "=", "'none'", ",", "ec", "=", "'#004080'", ",", "alpha", "=", "1.0", ",", "xlim", "=", "None", ",", "ylim", "=", "None", ",", "ls", "=", "'solid'", ")", ":", "from", "matplotlib", ".", "patches", "import", "Ellipse", "import", "matplotlib", ".", "pyplot", "as", "plt", "if", "cov", "is", "not", "None", "and", "ellipse", "is", "not", "None", ":", "raise", "ValueError", "(", "'You cannot specify both cov and ellipse'", ")", "if", "cov", "is", "None", "and", "ellipse", "is", "None", ":", "raise", "ValueError", "(", "'Specify one of cov or ellipse'", ")", "if", "facecolor", "is", "None", ":", "facecolor", "=", "fc", "if", "edgecolor", "is", "None", ":", "edgecolor", "=", "ec", "if", "cov", "is", "not", "None", ":", "ellipse", "=", "covariance_ellipse", "(", "cov", ")", "if", "axis_equal", ":", "plt", ".", "axis", "(", "'equal'", ")", "if", "title", "is", "not", "None", ":", "plt", ".", "title", "(", "title", ")", "ax", "=", "plt", ".", "gca", "(", ")", "angle", "=", "np", ".", "degrees", "(", "ellipse", "[", "0", "]", ")", "width", "=", "ellipse", "[", "1", "]", "*", "2.", "height", "=", "ellipse", "[", "2", "]", "*", "2.", "std", "=", "_std_tuple_of", "(", "variance", ",", "std", ",", "interval", ")", "for", "sd", "in", "std", ":", "e", "=", "Ellipse", "(", "xy", "=", "mean", ",", "width", "=", "sd", "*", "width", ",", "height", "=", "sd", "*", "height", ",", "angle", "=", "angle", ",", "facecolor", "=", "facecolor", ",", "edgecolor", "=", "edgecolor", ",", "alpha", "=", "alpha", ",", "lw", "=", "2", ",", "ls", "=", "ls", ")", "ax", ".", "add_patch", "(", "e", ")", "x", ",", "y", "=", "mean", "if", "show_center", ":", "plt", ".", "scatter", "(", "x", ",", "y", ",", "marker", "=", "'+'", ",", "color", "=", "edgecolor", ")", "if", "xlim", "is", "not", "None", ":", "ax", ".", "set_xlim", "(", "xlim", ")", "if", "ylim", "is", "not", "None", ":", "ax", ".", "set_ylim", "(", "ylim", ")", "if", "show_semiaxis", ":", "a", "=", "ellipse", "[", "0", "]", "h", ",", "w", "=", "height", "/", "4", ",", "width", "/", "4", "plt", ".", "plot", "(", "[", "x", ",", "x", "+", "h", "*", "cos", "(", "a", "+", "np", ".", "pi", "/", "2", ")", "]", ",", "[", "y", ",", "y", "+", "h", "*", "sin", "(", "a", "+", "np", ".", "pi", "/", "2", ")", "]", ")", "plt", ".", "plot", "(", "[", "x", ",", "x", "+", "w", "*", "cos", "(", "a", ")", "]", ",", "[", "y", ",", "y", "+", "w", "*", "sin", "(", "a", ")", "]", ")" ]
30.479167
21.868056
def abspath(cur_file, parent=0) -> str: """ Absolute path Args: cur_file: __file__ or file or path str parent: level of parent to look for Returns: str """ file_path = os.path.abspath(cur_file).replace('\\', '/') if os.path.isdir(file_path) and parent == 0: return file_path adj = 1 - os.path.isdir(file_path) return '/'.join(file_path.split('/')[:-(parent + adj)])
[ "def", "abspath", "(", "cur_file", ",", "parent", "=", "0", ")", "->", "str", ":", "file_path", "=", "os", ".", "path", ".", "abspath", "(", "cur_file", ")", ".", "replace", "(", "'\\\\'", ",", "'/'", ")", "if", "os", ".", "path", ".", "isdir", "(", "file_path", ")", "and", "parent", "==", "0", ":", "return", "file_path", "adj", "=", "1", "-", "os", ".", "path", ".", "isdir", "(", "file_path", ")", "return", "'/'", ".", "join", "(", "file_path", ".", "split", "(", "'/'", ")", "[", ":", "-", "(", "parent", "+", "adj", ")", "]", ")" ]
27.533333
17.8
def inverted(values, input_min=0, input_max=1): """ Returns the inversion of the supplied values (*input_min* becomes *input_max*, *input_max* becomes *input_min*, `input_min + 0.1` becomes `input_max - 0.1`, etc.). All items in *values* are assumed to be between *input_min* and *input_max* (which default to 0 and 1 respectively), and the output will be in the same range. For example:: from gpiozero import MCP3008, PWMLED from gpiozero.tools import inverted from signal import pause led = PWMLED(4) pot = MCP3008(channel=0) led.source = inverted(pot) pause() """ values = _normalize(values) if input_min >= input_max: raise ValueError('input_min must be smaller than input_max') for v in values: yield input_min + input_max - v
[ "def", "inverted", "(", "values", ",", "input_min", "=", "0", ",", "input_max", "=", "1", ")", ":", "values", "=", "_normalize", "(", "values", ")", "if", "input_min", ">=", "input_max", ":", "raise", "ValueError", "(", "'input_min must be smaller than input_max'", ")", "for", "v", "in", "values", ":", "yield", "input_min", "+", "input_max", "-", "v" ]
34.333333
19.083333
def export_avg_losses(ekey, dstore): """ :param ekey: export key, i.e. a pair (datastore key, fmt) :param dstore: datastore object """ dskey = ekey[0] oq = dstore['oqparam'] dt = oq.loss_dt() name, value, tags = _get_data(dstore, dskey, oq.hazard_stats().items()) writer = writers.CsvWriter(fmt=writers.FIVEDIGITS) assets = get_assets(dstore) for tag, values in zip(tags, value.transpose(1, 0, 2)): dest = dstore.build_fname(name, tag, 'csv') array = numpy.zeros(len(values), dt) for l, lt in enumerate(dt.names): array[lt] = values[:, l] writer.save(compose_arrays(assets, array), dest) return writer.getsaved()
[ "def", "export_avg_losses", "(", "ekey", ",", "dstore", ")", ":", "dskey", "=", "ekey", "[", "0", "]", "oq", "=", "dstore", "[", "'oqparam'", "]", "dt", "=", "oq", ".", "loss_dt", "(", ")", "name", ",", "value", ",", "tags", "=", "_get_data", "(", "dstore", ",", "dskey", ",", "oq", ".", "hazard_stats", "(", ")", ".", "items", "(", ")", ")", "writer", "=", "writers", ".", "CsvWriter", "(", "fmt", "=", "writers", ".", "FIVEDIGITS", ")", "assets", "=", "get_assets", "(", "dstore", ")", "for", "tag", ",", "values", "in", "zip", "(", "tags", ",", "value", ".", "transpose", "(", "1", ",", "0", ",", "2", ")", ")", ":", "dest", "=", "dstore", ".", "build_fname", "(", "name", ",", "tag", ",", "'csv'", ")", "array", "=", "numpy", ".", "zeros", "(", "len", "(", "values", ")", ",", "dt", ")", "for", "l", ",", "lt", "in", "enumerate", "(", "dt", ".", "names", ")", ":", "array", "[", "lt", "]", "=", "values", "[", ":", ",", "l", "]", "writer", ".", "save", "(", "compose_arrays", "(", "assets", ",", "array", ")", ",", "dest", ")", "return", "writer", ".", "getsaved", "(", ")" ]
38.166667
11.611111
def str(password, opslimit=OPSLIMIT_INTERACTIVE, memlimit=MEMLIMIT_INTERACTIVE): """ Hashes a password with a random salt, using the memory-hard argon2i construct and returning an ascii string that has all the needed info to check against a future password The default settings for opslimit and memlimit are those deemed correct for the interactive user login case. :param bytes password: :param int opslimit: :param int memlimit: :rtype: bytes .. versionadded:: 1.2 """ return nacl.bindings.crypto_pwhash_str_alg(password, opslimit, memlimit, ALG)
[ "def", "str", "(", "password", ",", "opslimit", "=", "OPSLIMIT_INTERACTIVE", ",", "memlimit", "=", "MEMLIMIT_INTERACTIVE", ")", ":", "return", "nacl", ".", "bindings", ".", "crypto_pwhash_str_alg", "(", "password", ",", "opslimit", ",", "memlimit", ",", "ALG", ")" ]
32.130435
18.478261
def _inputcooker_store(self, char): """Put the cooked data in the correct queue""" if self.sb: self.sbdataq = self.sbdataq + char else: self.inputcooker_store_queue(char)
[ "def", "_inputcooker_store", "(", "self", ",", "char", ")", ":", "if", "self", ".", "sb", ":", "self", ".", "sbdataq", "=", "self", ".", "sbdataq", "+", "char", "else", ":", "self", ".", "inputcooker_store_queue", "(", "char", ")" ]
35.5
10.833333
def gemini_writer(self, f_handle): """ Write out a GEMINI formated OT ephemeris. This is just a hack of SSD Horizons output. """ f_handle.write(GEMINI_HEADER) # Date__(UT)__HR:MN Date_________JDUT R.A.___(ICRF/J2000.0)___DEC dRA*cosD d(DEC)/dt # 1 2 3 4 5 6 7 8 9 # 123456789012345678901234567890123456789012345678901234567890123456789012345678901234567890 # ' 2019-Jan-30 00:00 01 46 56.46 +10 28 54.9 01 47 56.17 +10 34 27.6 3.520 for coordinate in self.coordinates: date = coordinate.obstime.datetime.strftime('%Y-%b-%d %H:%M')[:17] f_handle.write(" {:16} {:17.9f} {:27} {:+8.5f} {:+8.5f}\n".format(date, coordinate.obstime.jd, coordinate.to_string('hmsdms', sep=' ', precision=4, pad=True)[:27], float(coordinate.dra), float(coordinate.ddec)), ) f_handle.write(GEMINI_FOOTER) return
[ "def", "gemini_writer", "(", "self", ",", "f_handle", ")", ":", "f_handle", ".", "write", "(", "GEMINI_HEADER", ")", "# Date__(UT)__HR:MN Date_________JDUT R.A.___(ICRF/J2000.0)___DEC dRA*cosD d(DEC)/dt", "# 1 2 3 4 5 6 7 8 9", "# 123456789012345678901234567890123456789012345678901234567890123456789012345678901234567890", "# ' 2019-Jan-30 00:00 01 46 56.46 +10 28 54.9 01 47 56.17 +10 34 27.6 3.520", "for", "coordinate", "in", "self", ".", "coordinates", ":", "date", "=", "coordinate", ".", "obstime", ".", "datetime", ".", "strftime", "(", "'%Y-%b-%d %H:%M'", ")", "[", ":", "17", "]", "f_handle", ".", "write", "(", "\" {:16} {:17.9f} {:27} {:+8.5f} {:+8.5f}\\n\"", ".", "format", "(", "date", ",", "coordinate", ".", "obstime", ".", "jd", ",", "coordinate", ".", "to_string", "(", "'hmsdms'", ",", "sep", "=", "' '", ",", "precision", "=", "4", ",", "pad", "=", "True", ")", "[", ":", "27", "]", ",", "float", "(", "coordinate", ".", "dra", ")", ",", "float", "(", "coordinate", ".", "ddec", ")", ")", ",", ")", "f_handle", ".", "write", "(", "GEMINI_FOOTER", ")", "return" ]
70.521739
40.869565
def _get_object_as_soft(self): """Return object as SOFT formatted string.""" soft = [] if self.database is not None: soft.append(self.database._get_object_as_soft()) soft += ["^%s = %s" % (self.geotype, self.name), self._get_metadata_as_string()] for subset in self.subsets.values(): soft.append(subset._get_object_as_soft()) soft += ["^%s = %s" % (self.geotype, self.name), self._get_columns_as_string(), self._get_table_as_string()] return "\n".join(soft)
[ "def", "_get_object_as_soft", "(", "self", ")", ":", "soft", "=", "[", "]", "if", "self", ".", "database", "is", "not", "None", ":", "soft", ".", "append", "(", "self", ".", "database", ".", "_get_object_as_soft", "(", ")", ")", "soft", "+=", "[", "\"^%s = %s\"", "%", "(", "self", ".", "geotype", ",", "self", ".", "name", ")", ",", "self", ".", "_get_metadata_as_string", "(", ")", "]", "for", "subset", "in", "self", ".", "subsets", ".", "values", "(", ")", ":", "soft", ".", "append", "(", "subset", ".", "_get_object_as_soft", "(", ")", ")", "soft", "+=", "[", "\"^%s = %s\"", "%", "(", "self", ".", "geotype", ",", "self", ".", "name", ")", ",", "self", ".", "_get_columns_as_string", "(", ")", ",", "self", ".", "_get_table_as_string", "(", ")", "]", "return", "\"\\n\"", ".", "join", "(", "soft", ")" ]
44.307692
10.384615
def __load_project(path): ''' Load a docker-compose project from path :param path: :return: ''' file_path = __get_docker_file_path(path) if file_path is None: msg = 'Could not find docker-compose file at {0}'.format(path) return __standardize_result(False, msg, None, None) return __load_project_from_file_path(file_path)
[ "def", "__load_project", "(", "path", ")", ":", "file_path", "=", "__get_docker_file_path", "(", "path", ")", "if", "file_path", "is", "None", ":", "msg", "=", "'Could not find docker-compose file at {0}'", ".", "format", "(", "path", ")", "return", "__standardize_result", "(", "False", ",", "msg", ",", "None", ",", "None", ")", "return", "__load_project_from_file_path", "(", "file_path", ")" ]
30.642857
17.5
def get_unique_families(hkls): """ Returns unique families of Miller indices. Families must be permutations of each other. Args: hkls ([h, k, l]): List of Miller indices. Returns: {hkl: multiplicity}: A dict with unique hkl and multiplicity. """ # TODO: Definitely can be sped up. def is_perm(hkl1, hkl2): h1 = np.abs(hkl1) h2 = np.abs(hkl2) return all([i == j for i, j in zip(sorted(h1), sorted(h2))]) unique = collections.defaultdict(list) for hkl1 in hkls: found = False for hkl2 in unique.keys(): if is_perm(hkl1, hkl2): found = True unique[hkl2].append(hkl1) break if not found: unique[hkl1].append(hkl1) pretty_unique = {} for k, v in unique.items(): pretty_unique[sorted(v)[-1]] = len(v) return pretty_unique
[ "def", "get_unique_families", "(", "hkls", ")", ":", "# TODO: Definitely can be sped up.", "def", "is_perm", "(", "hkl1", ",", "hkl2", ")", ":", "h1", "=", "np", ".", "abs", "(", "hkl1", ")", "h2", "=", "np", ".", "abs", "(", "hkl2", ")", "return", "all", "(", "[", "i", "==", "j", "for", "i", ",", "j", "in", "zip", "(", "sorted", "(", "h1", ")", ",", "sorted", "(", "h2", ")", ")", "]", ")", "unique", "=", "collections", ".", "defaultdict", "(", "list", ")", "for", "hkl1", "in", "hkls", ":", "found", "=", "False", "for", "hkl2", "in", "unique", ".", "keys", "(", ")", ":", "if", "is_perm", "(", "hkl1", ",", "hkl2", ")", ":", "found", "=", "True", "unique", "[", "hkl2", "]", ".", "append", "(", "hkl1", ")", "break", "if", "not", "found", ":", "unique", "[", "hkl1", "]", ".", "append", "(", "hkl1", ")", "pretty_unique", "=", "{", "}", "for", "k", ",", "v", "in", "unique", ".", "items", "(", ")", ":", "pretty_unique", "[", "sorted", "(", "v", ")", "[", "-", "1", "]", "]", "=", "len", "(", "v", ")", "return", "pretty_unique" ]
26.787879
17.878788
def to_cartesian(r, theta, theta_units="radians"): """ Converts polar r, theta to cartesian x, y. """ assert theta_units in ['radians', 'degrees'],\ "kwarg theta_units must specified in radians or degrees" # Convert to radians if theta_units == "degrees": theta = to_radians(theta) theta = to_proper_radians(theta) x = r * cos(theta) y = r * sin(theta) return x, y
[ "def", "to_cartesian", "(", "r", ",", "theta", ",", "theta_units", "=", "\"radians\"", ")", ":", "assert", "theta_units", "in", "[", "'radians'", ",", "'degrees'", "]", ",", "\"kwarg theta_units must specified in radians or degrees\"", "# Convert to radians", "if", "theta_units", "==", "\"degrees\"", ":", "theta", "=", "to_radians", "(", "theta", ")", "theta", "=", "to_proper_radians", "(", "theta", ")", "x", "=", "r", "*", "cos", "(", "theta", ")", "y", "=", "r", "*", "sin", "(", "theta", ")", "return", "x", ",", "y" ]
25.5
16.625
def vms(message, level=1): """Writes the specified message *only* if verbose output is enabled.""" if verbose is not None and verbose != False: if isinstance(verbose, bool) or (isinstance(verbose, int) and level <= verbose): std(message)
[ "def", "vms", "(", "message", ",", "level", "=", "1", ")", ":", "if", "verbose", "is", "not", "None", "and", "verbose", "!=", "False", ":", "if", "isinstance", "(", "verbose", ",", "bool", ")", "or", "(", "isinstance", "(", "verbose", ",", "int", ")", "and", "level", "<=", "verbose", ")", ":", "std", "(", "message", ")" ]
52.2
17.2
def search(geo_coords, mode=2, verbose=True): """ Function to query for a list of coordinates """ if not isinstance(geo_coords, tuple) and not isinstance(geo_coords, list): raise TypeError('Expecting a tuple or a tuple/list of tuples') elif not isinstance(geo_coords[0], tuple): geo_coords = [geo_coords] _rg = RGeocoder(mode=mode, verbose=verbose) return _rg.query(geo_coords)
[ "def", "search", "(", "geo_coords", ",", "mode", "=", "2", ",", "verbose", "=", "True", ")", ":", "if", "not", "isinstance", "(", "geo_coords", ",", "tuple", ")", "and", "not", "isinstance", "(", "geo_coords", ",", "list", ")", ":", "raise", "TypeError", "(", "'Expecting a tuple or a tuple/list of tuples'", ")", "elif", "not", "isinstance", "(", "geo_coords", "[", "0", "]", ",", "tuple", ")", ":", "geo_coords", "=", "[", "geo_coords", "]", "_rg", "=", "RGeocoder", "(", "mode", "=", "mode", ",", "verbose", "=", "verbose", ")", "return", "_rg", ".", "query", "(", "geo_coords", ")" ]
37.454545
13.454545
def handle_error(self, failure): """ Provides basic error information for bad requests. If the error was an HttpError or DNSLookupError, it prints more specific information. """ self.logger.error(repr(failure)) if failure.check(HttpError): response = failure.value.response self.logger.error(u'HttpError on %s', response.url) self.logger.error(u'HttpError Code: %s', response.status) if response.status in (401, 403): # If the error is from invalid login, tell the user self.logger.error( "Credentials failed. Either add/update the current credentials " "or remove them to enable auto auth" ) elif failure.check(DNSLookupError): request = failure.request self.logger.error(u'DNSLookupError on %s', request.url)
[ "def", "handle_error", "(", "self", ",", "failure", ")", ":", "self", ".", "logger", ".", "error", "(", "repr", "(", "failure", ")", ")", "if", "failure", ".", "check", "(", "HttpError", ")", ":", "response", "=", "failure", ".", "value", ".", "response", "self", ".", "logger", ".", "error", "(", "u'HttpError on %s'", ",", "response", ".", "url", ")", "self", ".", "logger", ".", "error", "(", "u'HttpError Code: %s'", ",", "response", ".", "status", ")", "if", "response", ".", "status", "in", "(", "401", ",", "403", ")", ":", "# If the error is from invalid login, tell the user", "self", ".", "logger", ".", "error", "(", "\"Credentials failed. Either add/update the current credentials \"", "\"or remove them to enable auto auth\"", ")", "elif", "failure", ".", "check", "(", "DNSLookupError", ")", ":", "request", "=", "failure", ".", "request", "self", ".", "logger", ".", "error", "(", "u'DNSLookupError on %s'", ",", "request", ".", "url", ")" ]
43.571429
14.333333
def hlen(key, host=None, port=None, db=None, password=None): ''' Returns number of fields of a hash. .. versionadded:: 2017.7.0 CLI Example: .. code-block:: bash salt '*' redis.hlen foo_hash ''' server = _connect(host, port, db, password) return server.hlen(key)
[ "def", "hlen", "(", "key", ",", "host", "=", "None", ",", "port", "=", "None", ",", "db", "=", "None", ",", "password", "=", "None", ")", ":", "server", "=", "_connect", "(", "host", ",", "port", ",", "db", ",", "password", ")", "return", "server", ".", "hlen", "(", "key", ")" ]
20.928571
22.928571
def aboveAt(self, offset=0): """ Returns point in the center of the region's top side (offset to the top by negative ``offset``) """ return Location(self.getX() + (self.getW() / 2), self.getY() + offset)
[ "def", "aboveAt", "(", "self", ",", "offset", "=", "0", ")", ":", "return", "Location", "(", "self", ".", "getX", "(", ")", "+", "(", "self", ".", "getW", "(", ")", "/", "2", ")", ",", "self", ".", "getY", "(", ")", "+", "offset", ")" ]
56
12.5
def create_apply_graph(self, signature, input_tensors, name): """See `ModuleImpl.create_apply_graph`.""" signature_def = self._meta_graph.signature_def.get(signature) meta_graph = meta_graph_pb2.MetaGraphDef() meta_graph.CopyFrom(self._meta_graph) apply_graph = tf_v1.get_default_graph() infeed_map = tensor_info.build_input_map(signature_def.inputs, input_tensors) # Build a input map to feed when importing the apply-graph by augmenting the # state_map with the input args. This allows an input to override a tensor # from the state-graph. feed_map = dict(self._state_map) # If we are applying the module in a function with a TPUReplicateContext, we # must capture the state tensors in generating our feedmap and prune out # assign ops. Function graph semantics are different in that all ops are # executed regardless of dependency. # TODO(b/112575006): The following adds functionality of function call # within a TPU context. Work to generalize this for all function calls is # ongoing. if self._is_tpu_graph_function(): for k, v in self._state_map.items(): feed_map[k] = apply_graph.capture(v) meta_graph_lib.prune_unused_nodes(meta_graph, signature_def) # After we prune the metagraph def, we might need to prune away # infeeds which no longer exist. meta_graph_lib.prune_feed_map(meta_graph, infeed_map) elif apply_graph.building_function: raise NotImplementedError( "Using TF-Hub module within a TensorFlow defined function " "is currently not supported.") # As state ops in the apply graph are unused, replace them with Placeholders # so that in a heirarchical instantiation, apply_graph state ops are # ignored. replace_apply_state(meta_graph, list_registered_stateful_ops_without_inputs(), feed_map) feed_map.update(infeed_map) # Make state tensors enter the current context. This way the Module can be # applied inside a control flow structure such as a while_loop. control_flow = apply_graph._get_control_flow_context() # pylint: disable=protected-access if control_flow: for key, value in sorted(feed_map.items()): feed_map[key] = control_flow.AddValue(value) # Don't mark the name as used at this point - import_scoped_meta_graph will # start using it. absolute_scope_name = apply_graph.unique_name(name, mark_as_used=False) relative_scope_name = absolute_scope_name.split("/")[-1] import_collections = [ # In most cases ASSET_FILEPATHS are only used for the TABLE_INITIALIZERS # ops, however one could create a graph that uses an asset at any other # time. As so everytime we bring the tensor with that has the asset # filename we must annotate it as so, so later re-exports have that # semantic information and can handle it. tf_v1.GraphKeys.ASSET_FILEPATHS, tf_v1.GraphKeys.COND_CONTEXT, tf_v1.GraphKeys.WHILE_CONTEXT, ] if self._trainable: import_collections.extend([tf_v1.GraphKeys.UPDATE_OPS]) meta_graph_lib.filter_collections(meta_graph, import_collections) meta_graph_lib.prefix_shared_name_attributes(meta_graph, absolute_scope_name) if len(meta_graph.collection_def) and self._is_tpu_graph_function(): raise NotImplementedError( "Applying modules with collections inside TPU functions is not " "supported.") tf_v1.train.import_meta_graph( meta_graph, input_map=feed_map, import_scope=relative_scope_name) fix_colocation_after_import(input_map=feed_map, absolute_import_scope=absolute_scope_name) def get_tensor(name): # When trying to output an input tensor there are no nodes created within # the apply scope. So one must look into the input map. try: return feed_map[name] except KeyError: return apply_graph.get_tensor_by_name( meta_graph_lib.prepend_name_scope( name, import_scope=absolute_scope_name)) return tensor_info.build_output_map(signature_def.outputs, get_tensor)
[ "def", "create_apply_graph", "(", "self", ",", "signature", ",", "input_tensors", ",", "name", ")", ":", "signature_def", "=", "self", ".", "_meta_graph", ".", "signature_def", ".", "get", "(", "signature", ")", "meta_graph", "=", "meta_graph_pb2", ".", "MetaGraphDef", "(", ")", "meta_graph", ".", "CopyFrom", "(", "self", ".", "_meta_graph", ")", "apply_graph", "=", "tf_v1", ".", "get_default_graph", "(", ")", "infeed_map", "=", "tensor_info", ".", "build_input_map", "(", "signature_def", ".", "inputs", ",", "input_tensors", ")", "# Build a input map to feed when importing the apply-graph by augmenting the", "# state_map with the input args. This allows an input to override a tensor", "# from the state-graph.", "feed_map", "=", "dict", "(", "self", ".", "_state_map", ")", "# If we are applying the module in a function with a TPUReplicateContext, we", "# must capture the state tensors in generating our feedmap and prune out", "# assign ops. Function graph semantics are different in that all ops are", "# executed regardless of dependency.", "# TODO(b/112575006): The following adds functionality of function call", "# within a TPU context. Work to generalize this for all function calls is", "# ongoing.", "if", "self", ".", "_is_tpu_graph_function", "(", ")", ":", "for", "k", ",", "v", "in", "self", ".", "_state_map", ".", "items", "(", ")", ":", "feed_map", "[", "k", "]", "=", "apply_graph", ".", "capture", "(", "v", ")", "meta_graph_lib", ".", "prune_unused_nodes", "(", "meta_graph", ",", "signature_def", ")", "# After we prune the metagraph def, we might need to prune away", "# infeeds which no longer exist.", "meta_graph_lib", ".", "prune_feed_map", "(", "meta_graph", ",", "infeed_map", ")", "elif", "apply_graph", ".", "building_function", ":", "raise", "NotImplementedError", "(", "\"Using TF-Hub module within a TensorFlow defined function \"", "\"is currently not supported.\"", ")", "# As state ops in the apply graph are unused, replace them with Placeholders", "# so that in a heirarchical instantiation, apply_graph state ops are", "# ignored.", "replace_apply_state", "(", "meta_graph", ",", "list_registered_stateful_ops_without_inputs", "(", ")", ",", "feed_map", ")", "feed_map", ".", "update", "(", "infeed_map", ")", "# Make state tensors enter the current context. This way the Module can be", "# applied inside a control flow structure such as a while_loop.", "control_flow", "=", "apply_graph", ".", "_get_control_flow_context", "(", ")", "# pylint: disable=protected-access", "if", "control_flow", ":", "for", "key", ",", "value", "in", "sorted", "(", "feed_map", ".", "items", "(", ")", ")", ":", "feed_map", "[", "key", "]", "=", "control_flow", ".", "AddValue", "(", "value", ")", "# Don't mark the name as used at this point - import_scoped_meta_graph will", "# start using it.", "absolute_scope_name", "=", "apply_graph", ".", "unique_name", "(", "name", ",", "mark_as_used", "=", "False", ")", "relative_scope_name", "=", "absolute_scope_name", ".", "split", "(", "\"/\"", ")", "[", "-", "1", "]", "import_collections", "=", "[", "# In most cases ASSET_FILEPATHS are only used for the TABLE_INITIALIZERS", "# ops, however one could create a graph that uses an asset at any other", "# time. As so everytime we bring the tensor with that has the asset", "# filename we must annotate it as so, so later re-exports have that", "# semantic information and can handle it.", "tf_v1", ".", "GraphKeys", ".", "ASSET_FILEPATHS", ",", "tf_v1", ".", "GraphKeys", ".", "COND_CONTEXT", ",", "tf_v1", ".", "GraphKeys", ".", "WHILE_CONTEXT", ",", "]", "if", "self", ".", "_trainable", ":", "import_collections", ".", "extend", "(", "[", "tf_v1", ".", "GraphKeys", ".", "UPDATE_OPS", "]", ")", "meta_graph_lib", ".", "filter_collections", "(", "meta_graph", ",", "import_collections", ")", "meta_graph_lib", ".", "prefix_shared_name_attributes", "(", "meta_graph", ",", "absolute_scope_name", ")", "if", "len", "(", "meta_graph", ".", "collection_def", ")", "and", "self", ".", "_is_tpu_graph_function", "(", ")", ":", "raise", "NotImplementedError", "(", "\"Applying modules with collections inside TPU functions is not \"", "\"supported.\"", ")", "tf_v1", ".", "train", ".", "import_meta_graph", "(", "meta_graph", ",", "input_map", "=", "feed_map", ",", "import_scope", "=", "relative_scope_name", ")", "fix_colocation_after_import", "(", "input_map", "=", "feed_map", ",", "absolute_import_scope", "=", "absolute_scope_name", ")", "def", "get_tensor", "(", "name", ")", ":", "# When trying to output an input tensor there are no nodes created within", "# the apply scope. So one must look into the input map.", "try", ":", "return", "feed_map", "[", "name", "]", "except", "KeyError", ":", "return", "apply_graph", ".", "get_tensor_by_name", "(", "meta_graph_lib", ".", "prepend_name_scope", "(", "name", ",", "import_scope", "=", "absolute_scope_name", ")", ")", "return", "tensor_info", ".", "build_output_map", "(", "signature_def", ".", "outputs", ",", "get_tensor", ")" ]
46.755556
22.377778
def expose_event(self, widget, event): """When an area of the window is exposed, we just copy out of the server-side, off-screen surface to that area. """ x, y, width, height = event.area self.logger.debug("surface is %s" % self.surface) if self.surface is not None: win = widget.get_window() cr = win.cairo_create() # set clip area for exposed region cr.rectangle(x, y, width, height) cr.clip() # Paint from off-screen surface cr.set_source_surface(self.surface, 0, 0) cr.set_operator(cairo.OPERATOR_SOURCE) cr.paint() return False
[ "def", "expose_event", "(", "self", ",", "widget", ",", "event", ")", ":", "x", ",", "y", ",", "width", ",", "height", "=", "event", ".", "area", "self", ".", "logger", ".", "debug", "(", "\"surface is %s\"", "%", "self", ".", "surface", ")", "if", "self", ".", "surface", "is", "not", "None", ":", "win", "=", "widget", ".", "get_window", "(", ")", "cr", "=", "win", ".", "cairo_create", "(", ")", "# set clip area for exposed region", "cr", ".", "rectangle", "(", "x", ",", "y", ",", "width", ",", "height", ")", "cr", ".", "clip", "(", ")", "# Paint from off-screen surface", "cr", ".", "set_source_surface", "(", "self", ".", "surface", ",", "0", ",", "0", ")", "cr", ".", "set_operator", "(", "cairo", ".", "OPERATOR_SOURCE", ")", "cr", ".", "paint", "(", ")", "return", "False" ]
34
12.9
def _GetVSSStoreIdentifiers(self, scan_node): """Determines the VSS store identifiers. Args: scan_node (SourceScanNode): scan node. Returns: list[str]: VSS store identifiers. Raises: ScannerError: if the format the scan node is invalid or no mediator is provided and VSS store identifiers are found. UserAbort: if the user requested to abort. """ if not scan_node or not scan_node.path_spec: raise errors.ScannerError('Invalid scan node.') volume_system = vshadow_volume_system.VShadowVolumeSystem() volume_system.Open(scan_node.path_spec) volume_identifiers = self._source_scanner.GetVolumeIdentifiers( volume_system) if not volume_identifiers: return [] if not self._mediator: raise errors.ScannerError( 'Unable to proceed. VSS stores found but no mediator to determine ' 'how they should be used.') try: volume_identifiers = self._mediator.GetVSSStoreIdentifiers( volume_system, volume_identifiers) except KeyboardInterrupt: raise errors.UserAbort('File system scan aborted.') return self._NormalizedVolumeIdentifiers( volume_system, volume_identifiers, prefix='vss')
[ "def", "_GetVSSStoreIdentifiers", "(", "self", ",", "scan_node", ")", ":", "if", "not", "scan_node", "or", "not", "scan_node", ".", "path_spec", ":", "raise", "errors", ".", "ScannerError", "(", "'Invalid scan node.'", ")", "volume_system", "=", "vshadow_volume_system", ".", "VShadowVolumeSystem", "(", ")", "volume_system", ".", "Open", "(", "scan_node", ".", "path_spec", ")", "volume_identifiers", "=", "self", ".", "_source_scanner", ".", "GetVolumeIdentifiers", "(", "volume_system", ")", "if", "not", "volume_identifiers", ":", "return", "[", "]", "if", "not", "self", ".", "_mediator", ":", "raise", "errors", ".", "ScannerError", "(", "'Unable to proceed. VSS stores found but no mediator to determine '", "'how they should be used.'", ")", "try", ":", "volume_identifiers", "=", "self", ".", "_mediator", ".", "GetVSSStoreIdentifiers", "(", "volume_system", ",", "volume_identifiers", ")", "except", "KeyboardInterrupt", ":", "raise", "errors", ".", "UserAbort", "(", "'File system scan aborted.'", ")", "return", "self", ".", "_NormalizedVolumeIdentifiers", "(", "volume_system", ",", "volume_identifiers", ",", "prefix", "=", "'vss'", ")" ]
30.948718
20.923077
def repeat_nd(a, repeats): """Return read-only view into input array with elements repeated. Zoom nD image by integer factors using nearest neighbor interpolation (box filter). Parameters ---------- a : array_like Input array. repeats : sequence of int The number of repetitions to apply along each dimension of input array. Examples -------- >>> repeat_nd([[1, 2], [3, 4]], (2, 2)) array([[1, 1, 2, 2], [1, 1, 2, 2], [3, 3, 4, 4], [3, 3, 4, 4]]) """ a = numpy.asarray(a) reshape = [] shape = [] strides = [] for i, j, k in zip(a.strides, a.shape, repeats): shape.extend((j, k)) strides.extend((i, 0)) reshape.append(j * k) return numpy.lib.stride_tricks.as_strided( a, shape, strides, writeable=False).reshape(reshape)
[ "def", "repeat_nd", "(", "a", ",", "repeats", ")", ":", "a", "=", "numpy", ".", "asarray", "(", "a", ")", "reshape", "=", "[", "]", "shape", "=", "[", "]", "strides", "=", "[", "]", "for", "i", ",", "j", ",", "k", "in", "zip", "(", "a", ".", "strides", ",", "a", ".", "shape", ",", "repeats", ")", ":", "shape", ".", "extend", "(", "(", "j", ",", "k", ")", ")", "strides", ".", "extend", "(", "(", "i", ",", "0", ")", ")", "reshape", ".", "append", "(", "j", "*", "k", ")", "return", "numpy", ".", "lib", ".", "stride_tricks", ".", "as_strided", "(", "a", ",", "shape", ",", "strides", ",", "writeable", "=", "False", ")", ".", "reshape", "(", "reshape", ")" ]
26.40625
20.53125
def oauth2(self): """ returns the oauth2 class """ if self._url.endswith("/oauth2"): url = self._url else: url = self._url + "/oauth2" return _oauth2.oauth2(oauth_url=url, securityHandler=self._securityHandler, proxy_url=self._proxy_url, proxy_port=self._proxy_port)
[ "def", "oauth2", "(", "self", ")", ":", "if", "self", ".", "_url", ".", "endswith", "(", "\"/oauth2\"", ")", ":", "url", "=", "self", ".", "_url", "else", ":", "url", "=", "self", ".", "_url", "+", "\"/oauth2\"", "return", "_oauth2", ".", "oauth2", "(", "oauth_url", "=", "url", ",", "securityHandler", "=", "self", ".", "_securityHandler", ",", "proxy_url", "=", "self", ".", "_proxy_url", ",", "proxy_port", "=", "self", ".", "_proxy_port", ")" ]
34.75
11.583333
def _format_list(self, extracted_list): """Format a list of traceback entry tuples for printing. Given a list of tuples as returned by extract_tb() or extract_stack(), return a list of strings ready for printing. Each string in the resulting list corresponds to the item with the same index in the argument list. Each string ends in a newline; the strings may contain internal newlines as well, for those items whose source text line is not None. Lifted almost verbatim from traceback.py """ Colors = self.Colors list = [] for filename, lineno, name, line in extracted_list[:-1]: item = ' File %s"%s"%s, line %s%d%s, in %s%s%s\n' % \ (Colors.filename, filename, Colors.Normal, Colors.lineno, lineno, Colors.Normal, Colors.name, name, Colors.Normal) if line: item += ' %s\n' % line.strip() list.append(item) # Emphasize the last entry filename, lineno, name, line = extracted_list[-1] item = '%s File %s"%s"%s, line %s%d%s, in %s%s%s%s\n' % \ (Colors.normalEm, Colors.filenameEm, filename, Colors.normalEm, Colors.linenoEm, lineno, Colors.normalEm, Colors.nameEm, name, Colors.normalEm, Colors.Normal) if line: item += '%s %s%s\n' % (Colors.line, line.strip(), Colors.Normal) list.append(item) #from pprint import pformat; print 'LISTTB', pformat(list) # dbg return list
[ "def", "_format_list", "(", "self", ",", "extracted_list", ")", ":", "Colors", "=", "self", ".", "Colors", "list", "=", "[", "]", "for", "filename", ",", "lineno", ",", "name", ",", "line", "in", "extracted_list", "[", ":", "-", "1", "]", ":", "item", "=", "' File %s\"%s\"%s, line %s%d%s, in %s%s%s\\n'", "%", "(", "Colors", ".", "filename", ",", "filename", ",", "Colors", ".", "Normal", ",", "Colors", ".", "lineno", ",", "lineno", ",", "Colors", ".", "Normal", ",", "Colors", ".", "name", ",", "name", ",", "Colors", ".", "Normal", ")", "if", "line", ":", "item", "+=", "' %s\\n'", "%", "line", ".", "strip", "(", ")", "list", ".", "append", "(", "item", ")", "# Emphasize the last entry", "filename", ",", "lineno", ",", "name", ",", "line", "=", "extracted_list", "[", "-", "1", "]", "item", "=", "'%s File %s\"%s\"%s, line %s%d%s, in %s%s%s%s\\n'", "%", "(", "Colors", ".", "normalEm", ",", "Colors", ".", "filenameEm", ",", "filename", ",", "Colors", ".", "normalEm", ",", "Colors", ".", "linenoEm", ",", "lineno", ",", "Colors", ".", "normalEm", ",", "Colors", ".", "nameEm", ",", "name", ",", "Colors", ".", "normalEm", ",", "Colors", ".", "Normal", ")", "if", "line", ":", "item", "+=", "'%s %s%s\\n'", "%", "(", "Colors", ".", "line", ",", "line", ".", "strip", "(", ")", ",", "Colors", ".", "Normal", ")", "list", ".", "append", "(", "item", ")", "#from pprint import pformat; print 'LISTTB', pformat(list) # dbg", "return", "list" ]
44.621622
19.297297
def delete_db_instance(self, dbid): ''' Delete DB ''' if not self.connect_to_aws_rds(): return False try: database = self.rdsc.delete_dbinstance(dbid, skip_final_snapshot=True) print database except: return False else: return True
[ "def", "delete_db_instance", "(", "self", ",", "dbid", ")", ":", "if", "not", "self", ".", "connect_to_aws_rds", "(", ")", ":", "return", "False", "try", ":", "database", "=", "self", ".", "rdsc", ".", "delete_dbinstance", "(", "dbid", ",", "skip_final_snapshot", "=", "True", ")", "print", "database", "except", ":", "return", "False", "else", ":", "return", "True" ]
30.833333
18
def is_bridge(self): """bool: Is this zone a bridge?""" # Since this does not change over time (?) check whether we already # know the answer. If so, there is no need to go further if self._is_bridge is not None: return self._is_bridge # if not, we have to get it from the zone topology. This will set # self._is_bridge for us for next time, so we won't have to do this # again self._parse_zone_group_state() return self._is_bridge
[ "def", "is_bridge", "(", "self", ")", ":", "# Since this does not change over time (?) check whether we already", "# know the answer. If so, there is no need to go further", "if", "self", ".", "_is_bridge", "is", "not", "None", ":", "return", "self", ".", "_is_bridge", "# if not, we have to get it from the zone topology. This will set", "# self._is_bridge for us for next time, so we won't have to do this", "# again", "self", ".", "_parse_zone_group_state", "(", ")", "return", "self", ".", "_is_bridge" ]
45.909091
17.363636
def register_column(self, column, expr, deltas=None, checkpoints=None, odo_kwargs=None): """Explicitly map a single bound column to a collection of blaze expressions. The expressions need to have ``timestamp`` and ``as_of`` columns. Parameters ---------- column : BoundColumn The pipeline dataset to map to the given expressions. expr : Expr The baseline values. deltas : Expr, optional The deltas for the data. checkpoints : Expr, optional The forward fill checkpoints for the data. odo_kwargs : dict, optional The keyword arguments to forward to the odo calls internally. See Also -------- :func:`zipline.pipeline.loaders.blaze.from_blaze` """ self._table_expressions[column] = ExprData( expr, deltas, checkpoints, odo_kwargs, )
[ "def", "register_column", "(", "self", ",", "column", ",", "expr", ",", "deltas", "=", "None", ",", "checkpoints", "=", "None", ",", "odo_kwargs", "=", "None", ")", ":", "self", ".", "_table_expressions", "[", "column", "]", "=", "ExprData", "(", "expr", ",", "deltas", ",", "checkpoints", ",", "odo_kwargs", ",", ")" ]
32
16.515152
async def _redirect(self, response_obj): ''' Calls the _check_redirect method of the supplied response object in order to determine if the http status code indicates a redirect. Returns: Response: May or may not be the result of recursive calls due to redirects! Notes: If it does redirect, it calls the appropriate method with the redirect location, returning the response object. Furthermore, if there is a redirect, this function is recursive in a roundabout way, storing the previous response object in `.history_objects`. ''' redirect, force_get, location = False, None, None if 300 <= response_obj.status_code < 400: if response_obj.status_code == 303: self.data, self.json, self.files = None, None, None if response_obj.status_code in [301, 305]: # redirect / force GET / location redirect = True force_get = False else: redirect = True force_get = True location = response_obj.headers['Location'] if redirect: allow_redirect = True redirect_uri = urlparse(location.strip()) # relative redirect if not redirect_uri.netloc: self.uri = urlunparse( (self.scheme, self.host, *redirect_uri[2:])) # absolute-redirect else: location = location.strip() if self.auth is not None: if not self.auth_off_domain: allow_redirect = self._location_auth_protect(location) self.uri = location l_scheme, l_netloc, *_ = urlparse(location) if l_scheme != self.scheme or l_netloc != self.host: await self._get_new_sock() # follow redirect with correct http method type if force_get: self.history_objects.append(response_obj) self.method = 'GET' else: self.history_objects.append(response_obj) self.max_redirects -= 1 try: if response_obj.headers['connection'].lower() == 'close': await self._get_new_sock() except KeyError: pass if allow_redirect: _, response_obj = await self.make_request() return response_obj
[ "async", "def", "_redirect", "(", "self", ",", "response_obj", ")", ":", "redirect", ",", "force_get", ",", "location", "=", "False", ",", "None", ",", "None", "if", "300", "<=", "response_obj", ".", "status_code", "<", "400", ":", "if", "response_obj", ".", "status_code", "==", "303", ":", "self", ".", "data", ",", "self", ".", "json", ",", "self", ".", "files", "=", "None", ",", "None", ",", "None", "if", "response_obj", ".", "status_code", "in", "[", "301", ",", "305", "]", ":", "# redirect / force GET / location", "redirect", "=", "True", "force_get", "=", "False", "else", ":", "redirect", "=", "True", "force_get", "=", "True", "location", "=", "response_obj", ".", "headers", "[", "'Location'", "]", "if", "redirect", ":", "allow_redirect", "=", "True", "redirect_uri", "=", "urlparse", "(", "location", ".", "strip", "(", ")", ")", "# relative redirect", "if", "not", "redirect_uri", ".", "netloc", ":", "self", ".", "uri", "=", "urlunparse", "(", "(", "self", ".", "scheme", ",", "self", ".", "host", ",", "*", "redirect_uri", "[", "2", ":", "]", ")", ")", "# absolute-redirect", "else", ":", "location", "=", "location", ".", "strip", "(", ")", "if", "self", ".", "auth", "is", "not", "None", ":", "if", "not", "self", ".", "auth_off_domain", ":", "allow_redirect", "=", "self", ".", "_location_auth_protect", "(", "location", ")", "self", ".", "uri", "=", "location", "l_scheme", ",", "l_netloc", ",", "", "*", "_", "=", "urlparse", "(", "location", ")", "if", "l_scheme", "!=", "self", ".", "scheme", "or", "l_netloc", "!=", "self", ".", "host", ":", "await", "self", ".", "_get_new_sock", "(", ")", "# follow redirect with correct http method type", "if", "force_get", ":", "self", ".", "history_objects", ".", "append", "(", "response_obj", ")", "self", ".", "method", "=", "'GET'", "else", ":", "self", ".", "history_objects", ".", "append", "(", "response_obj", ")", "self", ".", "max_redirects", "-=", "1", "try", ":", "if", "response_obj", ".", "headers", "[", "'connection'", "]", ".", "lower", "(", ")", "==", "'close'", ":", "await", "self", ".", "_get_new_sock", "(", ")", "except", "KeyError", ":", "pass", "if", "allow_redirect", ":", "_", ",", "response_obj", "=", "await", "self", ".", "make_request", "(", ")", "return", "response_obj" ]
39.492063
19.238095
def reread(self): """ Read and parse credentials file. If something goes wrong, log exception and continue. """ logger.debug("Loading credentials from %s", os.path.abspath(self.creds_filename)) creds = {} try: with self.open_creds() as fp: creds = yaml.safe_load(fp) except IOError: logger.info("No credentials file found at %s", os.path.abspath(self.creds_filename)) except: logger.exception("Error loading credentials file") if creds != self.creds: self.creds = creds return True return False
[ "def", "reread", "(", "self", ")", ":", "logger", ".", "debug", "(", "\"Loading credentials from %s\"", ",", "os", ".", "path", ".", "abspath", "(", "self", ".", "creds_filename", ")", ")", "creds", "=", "{", "}", "try", ":", "with", "self", ".", "open_creds", "(", ")", "as", "fp", ":", "creds", "=", "yaml", ".", "safe_load", "(", "fp", ")", "except", "IOError", ":", "logger", ".", "info", "(", "\"No credentials file found at %s\"", ",", "os", ".", "path", ".", "abspath", "(", "self", ".", "creds_filename", ")", ")", "except", ":", "logger", ".", "exception", "(", "\"Error loading credentials file\"", ")", "if", "creds", "!=", "self", ".", "creds", ":", "self", ".", "creds", "=", "creds", "return", "True", "return", "False" ]
34.2
14.2
def fullversion(): ''' Return all server information from catalina.sh version CLI Example: .. code-block:: bash salt '*' tomcat.fullversion ''' cmd = __catalina_home() + '/bin/catalina.sh version' ret = {} out = __salt__['cmd.run'](cmd).splitlines() for line in out: if not line: continue if ': ' in line: comps = line.split(': ') ret[comps[0]] = comps[1].lstrip() return ret
[ "def", "fullversion", "(", ")", ":", "cmd", "=", "__catalina_home", "(", ")", "+", "'/bin/catalina.sh version'", "ret", "=", "{", "}", "out", "=", "__salt__", "[", "'cmd.run'", "]", "(", "cmd", ")", ".", "splitlines", "(", ")", "for", "line", "in", "out", ":", "if", "not", "line", ":", "continue", "if", "': '", "in", "line", ":", "comps", "=", "line", ".", "split", "(", "': '", ")", "ret", "[", "comps", "[", "0", "]", "]", "=", "comps", "[", "1", "]", ".", "lstrip", "(", ")", "return", "ret" ]
22.95
21.65
def serialize(obj): """JSON serializer that accepts datetime & date""" from datetime import datetime, date, time if isinstance(obj, date) and not isinstance(obj, datetime): obj = datetime.combine(obj, time.min) if isinstance(obj, datetime): return obj.isoformat()
[ "def", "serialize", "(", "obj", ")", ":", "from", "datetime", "import", "datetime", ",", "date", ",", "time", "if", "isinstance", "(", "obj", ",", "date", ")", "and", "not", "isinstance", "(", "obj", ",", "datetime", ")", ":", "obj", "=", "datetime", ".", "combine", "(", "obj", ",", "time", ".", "min", ")", "if", "isinstance", "(", "obj", ",", "datetime", ")", ":", "return", "obj", ".", "isoformat", "(", ")" ]
41.285714
10.142857
def formatter_help(cls): """ Return a list of format specifiers and their documentation. """ result = [("raw", "Switch off the default field formatter.")] for name, method in globals().items(): if name.startswith("fmt_"): result.append((name[4:], method.__doc__.strip())) return result
[ "def", "formatter_help", "(", "cls", ")", ":", "result", "=", "[", "(", "\"raw\"", ",", "\"Switch off the default field formatter.\"", ")", "]", "for", "name", ",", "method", "in", "globals", "(", ")", ".", "items", "(", ")", ":", "if", "name", ".", "startswith", "(", "\"fmt_\"", ")", ":", "result", ".", "append", "(", "(", "name", "[", "4", ":", "]", ",", "method", ".", "__doc__", ".", "strip", "(", ")", ")", ")", "return", "result" ]
34.6
17.6
def illumg(method, target, ilusrc, et, fixref, abcorr, obsrvr, spoint): """ Find the illumination angles (phase, incidence, and emission) at a specified surface point of a target body. The surface of the target body may be represented by a triaxial ellipsoid or by topographic data provided by DSK files. The illumination source is a specified ephemeris object. param method: Computation method. https://naif.jpl.nasa.gov/pub/naif/toolkit_docs/C/cspice/illumg_c.html :type method: str :param target: Name of target body. :type target: str :param ilusrc: Name of illumination source. :type ilusrc: str :param et: Epoch in ephemeris seconds past J2000. :type et: float :param fixref: Body-fixed, body-centered target body frame. :type fixref: str :param abcorr: Desired aberration correction. :type abcorr: str :param obsrvr: Name of observing body. :type obsrvr: str :param spoint: Body-fixed coordinates of a target surface point. :type spoint: 3-Element Array of floats :return: Target surface point epoch, Vector from observer to target surface point, Phase angle at the surface point, Source incidence angle at the surface point, Emission angle at the surface point, :rtype: tuple """ method = stypes.stringToCharP(method) target = stypes.stringToCharP(target) ilusrc = stypes.stringToCharP(ilusrc) et = ctypes.c_double(et) fixref = stypes.stringToCharP(fixref) abcorr = stypes.stringToCharP(abcorr) obsrvr = stypes.stringToCharP(obsrvr) spoint = stypes.toDoubleVector(spoint) trgepc = ctypes.c_double(0) srfvec = stypes.emptyDoubleVector(3) phase = ctypes.c_double(0) incdnc = ctypes.c_double(0) emissn = ctypes.c_double(0) libspice.illumg_c(method, target, ilusrc, et, fixref, abcorr, obsrvr, spoint, ctypes.byref(trgepc), srfvec, ctypes.byref(phase), ctypes.byref(incdnc), ctypes.byref(emissn)) return trgepc.value, stypes.cVectorToPython(srfvec), \ phase.value, incdnc.value, emissn.value
[ "def", "illumg", "(", "method", ",", "target", ",", "ilusrc", ",", "et", ",", "fixref", ",", "abcorr", ",", "obsrvr", ",", "spoint", ")", ":", "method", "=", "stypes", ".", "stringToCharP", "(", "method", ")", "target", "=", "stypes", ".", "stringToCharP", "(", "target", ")", "ilusrc", "=", "stypes", ".", "stringToCharP", "(", "ilusrc", ")", "et", "=", "ctypes", ".", "c_double", "(", "et", ")", "fixref", "=", "stypes", ".", "stringToCharP", "(", "fixref", ")", "abcorr", "=", "stypes", ".", "stringToCharP", "(", "abcorr", ")", "obsrvr", "=", "stypes", ".", "stringToCharP", "(", "obsrvr", ")", "spoint", "=", "stypes", ".", "toDoubleVector", "(", "spoint", ")", "trgepc", "=", "ctypes", ".", "c_double", "(", "0", ")", "srfvec", "=", "stypes", ".", "emptyDoubleVector", "(", "3", ")", "phase", "=", "ctypes", ".", "c_double", "(", "0", ")", "incdnc", "=", "ctypes", ".", "c_double", "(", "0", ")", "emissn", "=", "ctypes", ".", "c_double", "(", "0", ")", "libspice", ".", "illumg_c", "(", "method", ",", "target", ",", "ilusrc", ",", "et", ",", "fixref", ",", "abcorr", ",", "obsrvr", ",", "spoint", ",", "ctypes", ".", "byref", "(", "trgepc", ")", ",", "srfvec", ",", "ctypes", ".", "byref", "(", "phase", ")", ",", "ctypes", ".", "byref", "(", "incdnc", ")", ",", "ctypes", ".", "byref", "(", "emissn", ")", ")", "return", "trgepc", ".", "value", ",", "stypes", ".", "cVectorToPython", "(", "srfvec", ")", ",", "phase", ".", "value", ",", "incdnc", ".", "value", ",", "emissn", ".", "value" ]
41.176471
16.392157
def _get_model_parameters_estimations(self, error_model): """ Infer model estimation method from the 'error_model'. Return an object of type ModelParametersEstimation. """ if error_model.dependance == NIDM_INDEPEDENT_ERROR: if error_model.variance_homo: estimation_method = STATO_OLS else: estimation_method = STATO_WLS else: estimation_method = STATO_GLS mpe = ModelParametersEstimation(estimation_method, self.software.id) return mpe
[ "def", "_get_model_parameters_estimations", "(", "self", ",", "error_model", ")", ":", "if", "error_model", ".", "dependance", "==", "NIDM_INDEPEDENT_ERROR", ":", "if", "error_model", ".", "variance_homo", ":", "estimation_method", "=", "STATO_OLS", "else", ":", "estimation_method", "=", "STATO_WLS", "else", ":", "estimation_method", "=", "STATO_GLS", "mpe", "=", "ModelParametersEstimation", "(", "estimation_method", ",", "self", ".", "software", ".", "id", ")", "return", "mpe" ]
34.625
17.25
def _process_flux_param(self, pval, wave): """Process individual model parameter representing flux.""" if isinstance(pval, u.Quantity): self._validate_flux_unit(pval.unit) outval = units.convert_flux(self._redshift_model(wave), pval, self._internal_flux_unit).value else: # Assume already in internal unit outval = pval return outval
[ "def", "_process_flux_param", "(", "self", ",", "pval", ",", "wave", ")", ":", "if", "isinstance", "(", "pval", ",", "u", ".", "Quantity", ")", ":", "self", ".", "_validate_flux_unit", "(", "pval", ".", "unit", ")", "outval", "=", "units", ".", "convert_flux", "(", "self", ".", "_redshift_model", "(", "wave", ")", ",", "pval", ",", "self", ".", "_internal_flux_unit", ")", ".", "value", "else", ":", "# Assume already in internal unit", "outval", "=", "pval", "return", "outval" ]
48.222222
12.777778
def str_append_hash(*args): """ Convert each argument to a lower case string, appended, then hash """ ret_hash = "" for i in args: ret_hash += str(i).lower() return hash(ret_hash)
[ "def", "str_append_hash", "(", "*", "args", ")", ":", "ret_hash", "=", "\"\"", "for", "i", "in", "args", ":", "ret_hash", "+=", "str", "(", "i", ")", ".", "lower", "(", ")", "return", "hash", "(", "ret_hash", ")" ]
28.285714
17
def new_method_call(celf, destination, path, iface, method) : "creates a new DBUS.MESSAGE_TYPE_METHOD_CALL message." result = dbus.dbus_message_new_method_call \ ( (lambda : None, lambda : destination.encode())[destination != None](), path.encode(), (lambda : None, lambda : iface.encode())[iface != None](), method.encode(), ) if result == None : raise CallFailed("dbus_message_new_method_call") #end if return \ celf(result)
[ "def", "new_method_call", "(", "celf", ",", "destination", ",", "path", ",", "iface", ",", "method", ")", ":", "result", "=", "dbus", ".", "dbus_message_new_method_call", "(", "(", "lambda", ":", "None", ",", "lambda", ":", "destination", ".", "encode", "(", ")", ")", "[", "destination", "!=", "None", "]", "(", ")", ",", "path", ".", "encode", "(", ")", ",", "(", "lambda", ":", "None", ",", "lambda", ":", "iface", ".", "encode", "(", ")", ")", "[", "iface", "!=", "None", "]", "(", ")", ",", "method", ".", "encode", "(", ")", ",", ")", "if", "result", "==", "None", ":", "raise", "CallFailed", "(", "\"dbus_message_new_method_call\"", ")", "#end if", "return", "celf", "(", "result", ")" ]
38.928571
22.071429
def _path_to_objs(path, include=['*', '.*'], exclude=['.*', '_*']): """Turn path with opt. globbing into valid list of files respecting include and exclude patterns. Parameters ---------- path : str Path to process. Can be location of a file, folder or glob. Can be in uri-notation, can be relative or absolute or start with ~. include : list, optional Globbing patterns to require in result, defaults to ['*', '.*']. exclude : list, optional Globbing patterns to exclude from result, defaults to ['.*', '_*']. Returns ------- objs : list List of valid files Notes ----- - Doesn't show hidden files starting with '.' by default. To enable hidden files, make sure '.*' is in `include` and '.*' is not in `exclude`. - Doesn't show files starting with '_' by default. To enable these files, make sure '_*' is not in `exclude`. """ if '://' in path: # don't modify when path is in uri-notation, except for local files if path.startswith('file://'): path = path[7:] else: return [path] path = os.path.abspath(os.path.expanduser(path)) if os.path.isfile(path): if not path.lower().endswith(('.xlsx', '.xls')) and zipfile.is_zipfile(path): # zipfile misidentifies xlsx as archive of xml files with zipfile.ZipFile(path) as myzip: zipped = [] for z in myzip.namelist(): z_fn = os.path.basename(z) if z_fn != '' and any([fnmatch(z_fn, i) for i in include]) and \ not any([fnmatch(z_fn, e) for e in exclude]): zipped.append(z) return [myzip.open(z) for z in zipped] else: return [path] elif os.path.isdir(path): cands = [os.path.abspath(os.path.join(path, p)) for p in os.listdir(path)] dirname = path else: cands = [] dirname = os.path.dirname(path) include = list(chain.from_iterable(glob(os.path.join(dirname, i)) for i in include)) exclude = list(chain.from_iterable(glob(os.path.join(dirname, e)) for e in exclude)) objs = [] if cands == []: cands = glob(path) for p in cands: if os.path.isfile(p) and p in include and not p in exclude: objs.append(p) zipped = [zipfile.is_zipfile(o) and not o.lower().endswith(('.xlsx', '.xls')) \ for o in objs] toappend = [] todelete = [] for ix, o in enumerate(objs): # if zipfile in objs replace zipfile with its contents if zipped[ix]: for new_o in _path_to_objs(o): toappend.append(new_o) todelete.append(ix) shiftindex = 0 for d in todelete: del objs[d - shiftindex] shiftindex += 1 for new_o in toappend: objs.append(new_o) return objs
[ "def", "_path_to_objs", "(", "path", ",", "include", "=", "[", "'*'", ",", "'.*'", "]", ",", "exclude", "=", "[", "'.*'", ",", "'_*'", "]", ")", ":", "if", "'://'", "in", "path", ":", "# don't modify when path is in uri-notation, except for local files", "if", "path", ".", "startswith", "(", "'file://'", ")", ":", "path", "=", "path", "[", "7", ":", "]", "else", ":", "return", "[", "path", "]", "path", "=", "os", ".", "path", ".", "abspath", "(", "os", ".", "path", ".", "expanduser", "(", "path", ")", ")", "if", "os", ".", "path", ".", "isfile", "(", "path", ")", ":", "if", "not", "path", ".", "lower", "(", ")", ".", "endswith", "(", "(", "'.xlsx'", ",", "'.xls'", ")", ")", "and", "zipfile", ".", "is_zipfile", "(", "path", ")", ":", "# zipfile misidentifies xlsx as archive of xml files", "with", "zipfile", ".", "ZipFile", "(", "path", ")", "as", "myzip", ":", "zipped", "=", "[", "]", "for", "z", "in", "myzip", ".", "namelist", "(", ")", ":", "z_fn", "=", "os", ".", "path", ".", "basename", "(", "z", ")", "if", "z_fn", "!=", "''", "and", "any", "(", "[", "fnmatch", "(", "z_fn", ",", "i", ")", "for", "i", "in", "include", "]", ")", "and", "not", "any", "(", "[", "fnmatch", "(", "z_fn", ",", "e", ")", "for", "e", "in", "exclude", "]", ")", ":", "zipped", ".", "append", "(", "z", ")", "return", "[", "myzip", ".", "open", "(", "z", ")", "for", "z", "in", "zipped", "]", "else", ":", "return", "[", "path", "]", "elif", "os", ".", "path", ".", "isdir", "(", "path", ")", ":", "cands", "=", "[", "os", ".", "path", ".", "abspath", "(", "os", ".", "path", ".", "join", "(", "path", ",", "p", ")", ")", "for", "p", "in", "os", ".", "listdir", "(", "path", ")", "]", "dirname", "=", "path", "else", ":", "cands", "=", "[", "]", "dirname", "=", "os", ".", "path", ".", "dirname", "(", "path", ")", "include", "=", "list", "(", "chain", ".", "from_iterable", "(", "glob", "(", "os", ".", "path", ".", "join", "(", "dirname", ",", "i", ")", ")", "for", "i", "in", "include", ")", ")", "exclude", "=", "list", "(", "chain", ".", "from_iterable", "(", "glob", "(", "os", ".", "path", ".", "join", "(", "dirname", ",", "e", ")", ")", "for", "e", "in", "exclude", ")", ")", "objs", "=", "[", "]", "if", "cands", "==", "[", "]", ":", "cands", "=", "glob", "(", "path", ")", "for", "p", "in", "cands", ":", "if", "os", ".", "path", ".", "isfile", "(", "p", ")", "and", "p", "in", "include", "and", "not", "p", "in", "exclude", ":", "objs", ".", "append", "(", "p", ")", "zipped", "=", "[", "zipfile", ".", "is_zipfile", "(", "o", ")", "and", "not", "o", ".", "lower", "(", ")", ".", "endswith", "(", "(", "'.xlsx'", ",", "'.xls'", ")", ")", "for", "o", "in", "objs", "]", "toappend", "=", "[", "]", "todelete", "=", "[", "]", "for", "ix", ",", "o", "in", "enumerate", "(", "objs", ")", ":", "# if zipfile in objs replace zipfile with its contents", "if", "zipped", "[", "ix", "]", ":", "for", "new_o", "in", "_path_to_objs", "(", "o", ")", ":", "toappend", ".", "append", "(", "new_o", ")", "todelete", ".", "append", "(", "ix", ")", "shiftindex", "=", "0", "for", "d", "in", "todelete", ":", "del", "objs", "[", "d", "-", "shiftindex", "]", "shiftindex", "+=", "1", "for", "new_o", "in", "toappend", ":", "objs", ".", "append", "(", "new_o", ")", "return", "objs" ]
34.223529
24.270588
def create(self, model): """ Given a model object instance create it """ signals.pre_create.send(model.__class__, model=model) signals.pre_save.send(model.__class__, model=model) param = self.to_pg(model) query = """ INSERT INTO {table} ({dirty_cols}) VALUES ({dirty_vals}) RETURNING {cols}; """ query = query.format( cols=self.field_cols(model), dirty_cols=self.dirty_cols(model), dirty_vals=self.dirty_vals(model), table=model.rtype, ) result = self.query(query, param=param) signals.post_create.send(model.__class__, model=model) signals.post_save.send(model.__class__, model=model) return model.merge(result[0], clean=True)
[ "def", "create", "(", "self", ",", "model", ")", ":", "signals", ".", "pre_create", ".", "send", "(", "model", ".", "__class__", ",", "model", "=", "model", ")", "signals", ".", "pre_save", ".", "send", "(", "model", ".", "__class__", ",", "model", "=", "model", ")", "param", "=", "self", ".", "to_pg", "(", "model", ")", "query", "=", "\"\"\"\n INSERT INTO {table} ({dirty_cols})\n VALUES ({dirty_vals})\n RETURNING {cols};\n \"\"\"", "query", "=", "query", ".", "format", "(", "cols", "=", "self", ".", "field_cols", "(", "model", ")", ",", "dirty_cols", "=", "self", ".", "dirty_cols", "(", "model", ")", ",", "dirty_vals", "=", "self", ".", "dirty_vals", "(", "model", ")", ",", "table", "=", "model", ".", "rtype", ",", ")", "result", "=", "self", ".", "query", "(", "query", ",", "param", "=", "param", ")", "signals", ".", "post_create", ".", "send", "(", "model", ".", "__class__", ",", "model", "=", "model", ")", "signals", ".", "post_save", ".", "send", "(", "model", ".", "__class__", ",", "model", "=", "model", ")", "return", "model", ".", "merge", "(", "result", "[", "0", "]", ",", "clean", "=", "True", ")" ]
31.076923
17.115385
def _pretrained_initializer(varname, weight_file, embedding_weight_file=None): """ We'll stub out all the initializers in the pretrained LM with a function that loads the weights from the file """ weight_name_map = {} for i in range(2): for j in range(8): # if we decide to add more layers root = 'RNN_{}/RNN/MultiRNNCell/Cell{}'.format(i, j) weight_name_map[root + '/rnn/lstm_cell/kernel'] = \ root + '/LSTMCell/W_0' weight_name_map[root + '/rnn/lstm_cell/bias'] = \ root + '/LSTMCell/B' weight_name_map[root + '/rnn/lstm_cell/projection/kernel'] = \ root + '/LSTMCell/W_P_0' # convert the graph name to that in the checkpoint varname_in_file = varname[5:] if varname_in_file.startswith('RNN'): varname_in_file = weight_name_map[varname_in_file] if varname_in_file == 'embedding': with h5py.File(embedding_weight_file, 'r') as fin: # Have added a special 0 index for padding not present # in the original model. embed_weights = fin[varname_in_file][...] weights = np.zeros( (embed_weights.shape[0] + 1, embed_weights.shape[1]), dtype=DTYPE ) weights[1:, :] = embed_weights else: with h5py.File(weight_file, 'r') as fin: if varname_in_file == 'char_embed': # Have added a special 0 index for padding not present # in the original model. char_embed_weights = fin[varname_in_file][...] weights = np.zeros( (char_embed_weights.shape[0] + 1, char_embed_weights.shape[1]), dtype=DTYPE ) weights[1:, :] = char_embed_weights else: weights = fin[varname_in_file][...] # Tensorflow initializers are callables that accept a shape parameter # and some optional kwargs def ret(shape, **kwargs): if list(shape) != list(weights.shape): raise ValueError( "Invalid shape initializing {0}, got {1}, expected {2}".format( varname_in_file, shape, weights.shape) ) return weights return ret
[ "def", "_pretrained_initializer", "(", "varname", ",", "weight_file", ",", "embedding_weight_file", "=", "None", ")", ":", "weight_name_map", "=", "{", "}", "for", "i", "in", "range", "(", "2", ")", ":", "for", "j", "in", "range", "(", "8", ")", ":", "# if we decide to add more layers", "root", "=", "'RNN_{}/RNN/MultiRNNCell/Cell{}'", ".", "format", "(", "i", ",", "j", ")", "weight_name_map", "[", "root", "+", "'/rnn/lstm_cell/kernel'", "]", "=", "root", "+", "'/LSTMCell/W_0'", "weight_name_map", "[", "root", "+", "'/rnn/lstm_cell/bias'", "]", "=", "root", "+", "'/LSTMCell/B'", "weight_name_map", "[", "root", "+", "'/rnn/lstm_cell/projection/kernel'", "]", "=", "root", "+", "'/LSTMCell/W_P_0'", "# convert the graph name to that in the checkpoint", "varname_in_file", "=", "varname", "[", "5", ":", "]", "if", "varname_in_file", ".", "startswith", "(", "'RNN'", ")", ":", "varname_in_file", "=", "weight_name_map", "[", "varname_in_file", "]", "if", "varname_in_file", "==", "'embedding'", ":", "with", "h5py", ".", "File", "(", "embedding_weight_file", ",", "'r'", ")", "as", "fin", ":", "# Have added a special 0 index for padding not present", "# in the original model.", "embed_weights", "=", "fin", "[", "varname_in_file", "]", "[", "...", "]", "weights", "=", "np", ".", "zeros", "(", "(", "embed_weights", ".", "shape", "[", "0", "]", "+", "1", ",", "embed_weights", ".", "shape", "[", "1", "]", ")", ",", "dtype", "=", "DTYPE", ")", "weights", "[", "1", ":", ",", ":", "]", "=", "embed_weights", "else", ":", "with", "h5py", ".", "File", "(", "weight_file", ",", "'r'", ")", "as", "fin", ":", "if", "varname_in_file", "==", "'char_embed'", ":", "# Have added a special 0 index for padding not present", "# in the original model.", "char_embed_weights", "=", "fin", "[", "varname_in_file", "]", "[", "...", "]", "weights", "=", "np", ".", "zeros", "(", "(", "char_embed_weights", ".", "shape", "[", "0", "]", "+", "1", ",", "char_embed_weights", ".", "shape", "[", "1", "]", ")", ",", "dtype", "=", "DTYPE", ")", "weights", "[", "1", ":", ",", ":", "]", "=", "char_embed_weights", "else", ":", "weights", "=", "fin", "[", "varname_in_file", "]", "[", "...", "]", "# Tensorflow initializers are callables that accept a shape parameter", "# and some optional kwargs", "def", "ret", "(", "shape", ",", "*", "*", "kwargs", ")", ":", "if", "list", "(", "shape", ")", "!=", "list", "(", "weights", ".", "shape", ")", ":", "raise", "ValueError", "(", "\"Invalid shape initializing {0}, got {1}, expected {2}\"", ".", "format", "(", "varname_in_file", ",", "shape", ",", "weights", ".", "shape", ")", ")", "return", "weights", "return", "ret" ]
40.070175
17.22807
def post(self, request, *args, **kwargs): """ Returns a token identifying the user in Centrifugo. """ current_timestamp = "%.0f" % time.time() user_id_str = u"{0}".format(request.user.id) token = generate_token(settings.CENTRIFUGE_SECRET, user_id_str, "{0}".format(current_timestamp), info="") # we get all the channels to which the user can subscribe participant = Participant.objects.get(id=request.user.id) # we use the threads as channels ids channels = [] for thread in Thread.managers.get_threads_where_participant_is_active(participant_id=participant.id): channels.append( build_channel(settings.CENTRIFUGO_MESSAGE_NAMESPACE, thread.id, thread.participants.all()) ) # we also have a channel to alert us about new threads threads_channel = build_channel(settings.CENTRIFUGO_THREAD_NAMESPACE, request.user.id, [request.user.id]) # he is the only one to have access to the channel channels.append(threads_channel) # we return the information to_return = { 'user': user_id_str, 'timestamp': current_timestamp, 'token': token, 'connection_url': "{0}connection/".format(settings.CENTRIFUGE_ADDRESS), 'channels': channels, 'debug': settings.DEBUG, } return HttpResponse(json.dumps(to_return), content_type='application/json; charset=utf-8')
[ "def", "post", "(", "self", ",", "request", ",", "*", "args", ",", "*", "*", "kwargs", ")", ":", "current_timestamp", "=", "\"%.0f\"", "%", "time", ".", "time", "(", ")", "user_id_str", "=", "u\"{0}\"", ".", "format", "(", "request", ".", "user", ".", "id", ")", "token", "=", "generate_token", "(", "settings", ".", "CENTRIFUGE_SECRET", ",", "user_id_str", ",", "\"{0}\"", ".", "format", "(", "current_timestamp", ")", ",", "info", "=", "\"\"", ")", "# we get all the channels to which the user can subscribe", "participant", "=", "Participant", ".", "objects", ".", "get", "(", "id", "=", "request", ".", "user", ".", "id", ")", "# we use the threads as channels ids", "channels", "=", "[", "]", "for", "thread", "in", "Thread", ".", "managers", ".", "get_threads_where_participant_is_active", "(", "participant_id", "=", "participant", ".", "id", ")", ":", "channels", ".", "append", "(", "build_channel", "(", "settings", ".", "CENTRIFUGO_MESSAGE_NAMESPACE", ",", "thread", ".", "id", ",", "thread", ".", "participants", ".", "all", "(", ")", ")", ")", "# we also have a channel to alert us about new threads", "threads_channel", "=", "build_channel", "(", "settings", ".", "CENTRIFUGO_THREAD_NAMESPACE", ",", "request", ".", "user", ".", "id", ",", "[", "request", ".", "user", ".", "id", "]", ")", "# he is the only one to have access to the channel", "channels", ".", "append", "(", "threads_channel", ")", "# we return the information", "to_return", "=", "{", "'user'", ":", "user_id_str", ",", "'timestamp'", ":", "current_timestamp", ",", "'token'", ":", "token", ",", "'connection_url'", ":", "\"{0}connection/\"", ".", "format", "(", "settings", ".", "CENTRIFUGE_ADDRESS", ")", ",", "'channels'", ":", "channels", ",", "'debug'", ":", "settings", ".", "DEBUG", ",", "}", "return", "HttpResponse", "(", "json", ".", "dumps", "(", "to_return", ")", ",", "content_type", "=", "'application/json; charset=utf-8'", ")" ]
43.235294
27.588235
def execute(args): """ Executes the *index* subprogram with parsed commandline *args*. """ index_file = Config.instance().get_expanded("core", "index_file") # just print the file location? if args.location: print(index_file) return # just remove the index file? if args.remove: if os.path.exists(index_file): os.remove(index_file) print("removed index file {}".format(index_file)) return # get modules to lookup lookup = [m.strip() for m in Config.instance().keys("modules")] if args.modules: lookup += args.modules print("loading tasks from {} module(s)".format(len(lookup))) # loop through modules, import everything to load tasks for modid in lookup: if not modid: continue if args.verbose: sys.stdout.write("loading module '{}'".format(modid)) try: import_module(modid) except Exception as e: if not args.verbose: print("Error in module '{}': {}".format(colored(modid, "red"), str(e))) else: print("\n\nError in module '{}':".format(colored(modid, "red"))) traceback.print_exc() continue if args.verbose: print(", {}".format(colored("done", style="bright"))) # determine tasks to write into the index file seen_families = [] task_classes = [] lookup = [Task] while lookup: cls = lookup.pop(0) lookup.extend(cls.__subclasses__()) # skip already seen task families if cls.task_family in seen_families: continue seen_families.append(cls.task_family) # skip when explicitly excluded if cls.exclude_index: continue # skip external tasks is_external_task = issubclass(cls, ExternalTask) if args.no_externals and is_external_task: continue # skip non-external tasks without run implementation run_is_callable = callable(getattr(cls, "run", None)) run_is_abstract = getattr(cls.run, "__isabstractmethod__", False) if not is_external_task and (not run_is_callable or run_is_abstract): continue task_classes.append(cls) def get_task_params(cls): params = [] for attr in dir(cls): member = getattr(cls, attr) if isinstance(member, luigi.Parameter): exclude = getattr(cls, "exclude_params_index", set()) if not multi_match(attr, exclude, any): params.append(attr.replace("_", "-")) return params def index_line(cls, params): # format: "module_id:task_family:param param ..." return "{}:{}:{}".format(cls.__module__, cls.task_family, " ".join(params)) stats = OrderedDict() # write the index file if not os.path.exists(os.path.dirname(index_file)): os.makedirs(os.path.dirname(index_file)) with open(index_file, "w") as f: for cls in task_classes: # get prams params = get_task_params(cls) # fill stats if cls.__module__ not in stats: stats[cls.__module__] = [] stats[cls.__module__].append((cls.task_family, params)) f.write(index_line(cls, params) + "\n") # print stats if args.verbose: for mod, data in six.iteritems(stats): print("\nmodule '{}', {} task(s):".format(colored(mod, style="bright"), len(data))) for task_family, _ in data: print(" - {}".format(colored(task_family, "green"))) print("") print("written {} task(s) to index file '{}'".format(len(task_classes), index_file))
[ "def", "execute", "(", "args", ")", ":", "index_file", "=", "Config", ".", "instance", "(", ")", ".", "get_expanded", "(", "\"core\"", ",", "\"index_file\"", ")", "# just print the file location?", "if", "args", ".", "location", ":", "print", "(", "index_file", ")", "return", "# just remove the index file?", "if", "args", ".", "remove", ":", "if", "os", ".", "path", ".", "exists", "(", "index_file", ")", ":", "os", ".", "remove", "(", "index_file", ")", "print", "(", "\"removed index file {}\"", ".", "format", "(", "index_file", ")", ")", "return", "# get modules to lookup", "lookup", "=", "[", "m", ".", "strip", "(", ")", "for", "m", "in", "Config", ".", "instance", "(", ")", ".", "keys", "(", "\"modules\"", ")", "]", "if", "args", ".", "modules", ":", "lookup", "+=", "args", ".", "modules", "print", "(", "\"loading tasks from {} module(s)\"", ".", "format", "(", "len", "(", "lookup", ")", ")", ")", "# loop through modules, import everything to load tasks", "for", "modid", "in", "lookup", ":", "if", "not", "modid", ":", "continue", "if", "args", ".", "verbose", ":", "sys", ".", "stdout", ".", "write", "(", "\"loading module '{}'\"", ".", "format", "(", "modid", ")", ")", "try", ":", "import_module", "(", "modid", ")", "except", "Exception", "as", "e", ":", "if", "not", "args", ".", "verbose", ":", "print", "(", "\"Error in module '{}': {}\"", ".", "format", "(", "colored", "(", "modid", ",", "\"red\"", ")", ",", "str", "(", "e", ")", ")", ")", "else", ":", "print", "(", "\"\\n\\nError in module '{}':\"", ".", "format", "(", "colored", "(", "modid", ",", "\"red\"", ")", ")", ")", "traceback", ".", "print_exc", "(", ")", "continue", "if", "args", ".", "verbose", ":", "print", "(", "\", {}\"", ".", "format", "(", "colored", "(", "\"done\"", ",", "style", "=", "\"bright\"", ")", ")", ")", "# determine tasks to write into the index file", "seen_families", "=", "[", "]", "task_classes", "=", "[", "]", "lookup", "=", "[", "Task", "]", "while", "lookup", ":", "cls", "=", "lookup", ".", "pop", "(", "0", ")", "lookup", ".", "extend", "(", "cls", ".", "__subclasses__", "(", ")", ")", "# skip already seen task families", "if", "cls", ".", "task_family", "in", "seen_families", ":", "continue", "seen_families", ".", "append", "(", "cls", ".", "task_family", ")", "# skip when explicitly excluded", "if", "cls", ".", "exclude_index", ":", "continue", "# skip external tasks", "is_external_task", "=", "issubclass", "(", "cls", ",", "ExternalTask", ")", "if", "args", ".", "no_externals", "and", "is_external_task", ":", "continue", "# skip non-external tasks without run implementation", "run_is_callable", "=", "callable", "(", "getattr", "(", "cls", ",", "\"run\"", ",", "None", ")", ")", "run_is_abstract", "=", "getattr", "(", "cls", ".", "run", ",", "\"__isabstractmethod__\"", ",", "False", ")", "if", "not", "is_external_task", "and", "(", "not", "run_is_callable", "or", "run_is_abstract", ")", ":", "continue", "task_classes", ".", "append", "(", "cls", ")", "def", "get_task_params", "(", "cls", ")", ":", "params", "=", "[", "]", "for", "attr", "in", "dir", "(", "cls", ")", ":", "member", "=", "getattr", "(", "cls", ",", "attr", ")", "if", "isinstance", "(", "member", ",", "luigi", ".", "Parameter", ")", ":", "exclude", "=", "getattr", "(", "cls", ",", "\"exclude_params_index\"", ",", "set", "(", ")", ")", "if", "not", "multi_match", "(", "attr", ",", "exclude", ",", "any", ")", ":", "params", ".", "append", "(", "attr", ".", "replace", "(", "\"_\"", ",", "\"-\"", ")", ")", "return", "params", "def", "index_line", "(", "cls", ",", "params", ")", ":", "# format: \"module_id:task_family:param param ...\"", "return", "\"{}:{}:{}\"", ".", "format", "(", "cls", ".", "__module__", ",", "cls", ".", "task_family", ",", "\" \"", ".", "join", "(", "params", ")", ")", "stats", "=", "OrderedDict", "(", ")", "# write the index file", "if", "not", "os", ".", "path", ".", "exists", "(", "os", ".", "path", ".", "dirname", "(", "index_file", ")", ")", ":", "os", ".", "makedirs", "(", "os", ".", "path", ".", "dirname", "(", "index_file", ")", ")", "with", "open", "(", "index_file", ",", "\"w\"", ")", "as", "f", ":", "for", "cls", "in", "task_classes", ":", "# get prams", "params", "=", "get_task_params", "(", "cls", ")", "# fill stats", "if", "cls", ".", "__module__", "not", "in", "stats", ":", "stats", "[", "cls", ".", "__module__", "]", "=", "[", "]", "stats", "[", "cls", ".", "__module__", "]", ".", "append", "(", "(", "cls", ".", "task_family", ",", "params", ")", ")", "f", ".", "write", "(", "index_line", "(", "cls", ",", "params", ")", "+", "\"\\n\"", ")", "# print stats", "if", "args", ".", "verbose", ":", "for", "mod", ",", "data", "in", "six", ".", "iteritems", "(", "stats", ")", ":", "print", "(", "\"\\nmodule '{}', {} task(s):\"", ".", "format", "(", "colored", "(", "mod", ",", "style", "=", "\"bright\"", ")", ",", "len", "(", "data", ")", ")", ")", "for", "task_family", ",", "_", "in", "data", ":", "print", "(", "\" - {}\"", ".", "format", "(", "colored", "(", "task_family", ",", "\"green\"", ")", ")", ")", "print", "(", "\"\"", ")", "print", "(", "\"written {} task(s) to index file '{}'\"", ".", "format", "(", "len", "(", "task_classes", ")", ",", "index_file", ")", ")" ]
31.452991
21.367521
def compute_pointwise_distances(self, other, default=None): """ Compute the minimal distance between each point on self and other. Parameters ---------- other : tuple of number \ or imgaug.augmentables.kps.Keypoint \ or imgaug.augmentables.LineString Other object to which to compute the distances. default Value to return if `other` contains no points. Returns ------- list of float Distances to `other` or `default` if not distance could be computed. """ import shapely.geometry from .kps import Keypoint if isinstance(other, Keypoint): other = shapely.geometry.Point((other.x, other.y)) elif isinstance(other, LineString): if len(other.coords) == 0: return default elif len(other.coords) == 1: other = shapely.geometry.Point(other.coords[0, :]) else: other = shapely.geometry.LineString(other.coords) elif isinstance(other, tuple): assert len(other) == 2 other = shapely.geometry.Point(other) else: raise ValueError( ("Expected Keypoint or LineString or tuple (x,y), " + "got type %s.") % (type(other),)) return [shapely.geometry.Point(point).distance(other) for point in self.coords]
[ "def", "compute_pointwise_distances", "(", "self", ",", "other", ",", "default", "=", "None", ")", ":", "import", "shapely", ".", "geometry", "from", ".", "kps", "import", "Keypoint", "if", "isinstance", "(", "other", ",", "Keypoint", ")", ":", "other", "=", "shapely", ".", "geometry", ".", "Point", "(", "(", "other", ".", "x", ",", "other", ".", "y", ")", ")", "elif", "isinstance", "(", "other", ",", "LineString", ")", ":", "if", "len", "(", "other", ".", "coords", ")", "==", "0", ":", "return", "default", "elif", "len", "(", "other", ".", "coords", ")", "==", "1", ":", "other", "=", "shapely", ".", "geometry", ".", "Point", "(", "other", ".", "coords", "[", "0", ",", ":", "]", ")", "else", ":", "other", "=", "shapely", ".", "geometry", ".", "LineString", "(", "other", ".", "coords", ")", "elif", "isinstance", "(", "other", ",", "tuple", ")", ":", "assert", "len", "(", "other", ")", "==", "2", "other", "=", "shapely", ".", "geometry", ".", "Point", "(", "other", ")", "else", ":", "raise", "ValueError", "(", "(", "\"Expected Keypoint or LineString or tuple (x,y), \"", "+", "\"got type %s.\"", ")", "%", "(", "type", "(", "other", ")", ",", ")", ")", "return", "[", "shapely", ".", "geometry", ".", "Point", "(", "point", ")", ".", "distance", "(", "other", ")", "for", "point", "in", "self", ".", "coords", "]" ]
34.214286
18.595238
def get_alerts_summary(self, **kwargs): # noqa: E501 """Count alerts of various statuses for a customer # noqa: E501 # noqa: E501 This method makes a synchronous HTTP request by default. To make an asynchronous HTTP request, please pass async_req=True >>> thread = api.get_alerts_summary(async_req=True) >>> result = thread.get() :param async_req bool :return: ResponseContainerMapStringInteger If the method is called asynchronously, returns the request thread. """ kwargs['_return_http_data_only'] = True if kwargs.get('async_req'): return self.get_alerts_summary_with_http_info(**kwargs) # noqa: E501 else: (data) = self.get_alerts_summary_with_http_info(**kwargs) # noqa: E501 return data
[ "def", "get_alerts_summary", "(", "self", ",", "*", "*", "kwargs", ")", ":", "# noqa: E501", "kwargs", "[", "'_return_http_data_only'", "]", "=", "True", "if", "kwargs", ".", "get", "(", "'async_req'", ")", ":", "return", "self", ".", "get_alerts_summary_with_http_info", "(", "*", "*", "kwargs", ")", "# noqa: E501", "else", ":", "(", "data", ")", "=", "self", ".", "get_alerts_summary_with_http_info", "(", "*", "*", "kwargs", ")", "# noqa: E501", "return", "data" ]
42.35
18.7
def _events(self, using_url, filters=None, limit=None): """ A long-polling method that queries Syncthing for events.. Args: using_url (str): REST HTTP endpoint filters (List[str]): Creates an "event group" in Syncthing to only receive events that have been subscribed to. limit (int): The number of events to query in the history to catch up to the current state. Returns: generator[dict] """ # coerce if not isinstance(limit, (int, NoneType)): limit = None # coerce if filters is None: filters = [] # format our list into the correct expectation of string with commas if isinstance(filters, string_types): filters = filters.split(',') # reset the state if the loop was broken with `stop` if not self.blocking: self.blocking = True # block/long-poll for updates to the events api while self.blocking: params = { 'since': self._last_seen_id, 'limit': limit, } if filters: params['events'] = ','.join(map(str, filters)) try: data = self.get(using_url, params=params, raw_exceptions=True) except (ConnectTimeout, ConnectionError) as e: # swallow timeout errors for long polling data = None except Exception as e: reraise('', e) if data: # update our last_seen_id to move our event counter forward self._last_seen_id = data[-1]['id'] for event in data: # handle potentially multiple events returned in a list self._count += 1 yield event
[ "def", "_events", "(", "self", ",", "using_url", ",", "filters", "=", "None", ",", "limit", "=", "None", ")", ":", "# coerce", "if", "not", "isinstance", "(", "limit", ",", "(", "int", ",", "NoneType", ")", ")", ":", "limit", "=", "None", "# coerce", "if", "filters", "is", "None", ":", "filters", "=", "[", "]", "# format our list into the correct expectation of string with commas", "if", "isinstance", "(", "filters", ",", "string_types", ")", ":", "filters", "=", "filters", ".", "split", "(", "','", ")", "# reset the state if the loop was broken with `stop`", "if", "not", "self", ".", "blocking", ":", "self", ".", "blocking", "=", "True", "# block/long-poll for updates to the events api", "while", "self", ".", "blocking", ":", "params", "=", "{", "'since'", ":", "self", ".", "_last_seen_id", ",", "'limit'", ":", "limit", ",", "}", "if", "filters", ":", "params", "[", "'events'", "]", "=", "','", ".", "join", "(", "map", "(", "str", ",", "filters", ")", ")", "try", ":", "data", "=", "self", ".", "get", "(", "using_url", ",", "params", "=", "params", ",", "raw_exceptions", "=", "True", ")", "except", "(", "ConnectTimeout", ",", "ConnectionError", ")", "as", "e", ":", "# swallow timeout errors for long polling", "data", "=", "None", "except", "Exception", "as", "e", ":", "reraise", "(", "''", ",", "e", ")", "if", "data", ":", "# update our last_seen_id to move our event counter forward", "self", ".", "_last_seen_id", "=", "data", "[", "-", "1", "]", "[", "'id'", "]", "for", "event", "in", "data", ":", "# handle potentially multiple events returned in a list", "self", ".", "_count", "+=", "1", "yield", "event" ]
33.909091
20.781818
def item(self, infohash, prefetch=None, cache=False): """ Fetch a single item by its info hash. """ return next(self.items(infohash, prefetch, cache))
[ "def", "item", "(", "self", ",", "infohash", ",", "prefetch", "=", "None", ",", "cache", "=", "False", ")", ":", "return", "next", "(", "self", ".", "items", "(", "infohash", ",", "prefetch", ",", "cache", ")", ")" ]
42.75
7.75
def end(self) -> "GameNode": """Follows the main variation to the end and returns the last node.""" node = self while node.variations: node = node.variations[0] return node
[ "def", "end", "(", "self", ")", "->", "\"GameNode\"", ":", "node", "=", "self", "while", "node", ".", "variations", ":", "node", "=", "node", ".", "variations", "[", "0", "]", "return", "node" ]
26.375
18.375
def json2value(json_string, params=Null, flexible=False, leaves=False): """ :param json_string: THE JSON :param params: STANDARD JSON PARAMS :param flexible: REMOVE COMMENTS :param leaves: ASSUME JSON KEYS ARE DOT-DELIMITED :return: Python value """ if not is_text(json_string): Log.error("only unicode json accepted") try: if flexible: # REMOVE """COMMENTS""", # COMMENTS, //COMMENTS, AND \n \r # DERIVED FROM https://github.com/jeads/datasource/blob/master/datasource/bases/BaseHub.py# L58 json_string = re.sub(r"\"\"\".*?\"\"\"", r"\n", json_string, flags=re.MULTILINE) json_string = "\n".join(remove_line_comment(l) for l in json_string.split("\n")) # ALLOW DICTIONARY'S NAME:VALUE LIST TO END WITH COMMA json_string = re.sub(r",\s*\}", r"}", json_string) # ALLOW LISTS TO END WITH COMMA json_string = re.sub(r",\s*\]", r"]", json_string) if params: # LOOKUP REFERENCES json_string = expand_template(json_string, params) try: value = wrap(json_decoder(text_type(json_string))) except Exception as e: Log.error("can not decode\n{{content}}", content=json_string, cause=e) if leaves: value = wrap_leaves(value) return value except Exception as e: e = Except.wrap(e) if not json_string.strip(): Log.error("JSON string is only whitespace") c = e while "Expecting '" in c.cause and "' delimiter: line" in c.cause: c = c.cause if "Expecting '" in c and "' delimiter: line" in c: line_index = int(strings.between(c.message, " line ", " column ")) - 1 column = int(strings.between(c.message, " column ", " ")) - 1 line = json_string.split("\n")[line_index].replace("\t", " ") if column > 20: sample = "..." + line[column - 20:] pointer = " " + (" " * 20) + "^" else: sample = line pointer = (" " * column) + "^" if len(sample) > 43: sample = sample[:43] + "..." Log.error(CAN_NOT_DECODE_JSON + " at:\n\t{{sample}}\n\t{{pointer}}\n", sample=sample, pointer=pointer) base_str = strings.limit(json_string, 1000).encode('utf8') hexx_str = bytes2hex(base_str, " ") try: char_str = " " + " ".join((c.decode("latin1") if ord(c) >= 32 else ".") for c in base_str) except Exception: char_str = " " Log.error(CAN_NOT_DECODE_JSON + ":\n{{char_str}}\n{{hexx_str}}\n", char_str=char_str, hexx_str=hexx_str, cause=e)
[ "def", "json2value", "(", "json_string", ",", "params", "=", "Null", ",", "flexible", "=", "False", ",", "leaves", "=", "False", ")", ":", "if", "not", "is_text", "(", "json_string", ")", ":", "Log", ".", "error", "(", "\"only unicode json accepted\"", ")", "try", ":", "if", "flexible", ":", "# REMOVE \"\"\"COMMENTS\"\"\", # COMMENTS, //COMMENTS, AND \\n \\r", "# DERIVED FROM https://github.com/jeads/datasource/blob/master/datasource/bases/BaseHub.py# L58", "json_string", "=", "re", ".", "sub", "(", "r\"\\\"\\\"\\\".*?\\\"\\\"\\\"\"", ",", "r\"\\n\"", ",", "json_string", ",", "flags", "=", "re", ".", "MULTILINE", ")", "json_string", "=", "\"\\n\"", ".", "join", "(", "remove_line_comment", "(", "l", ")", "for", "l", "in", "json_string", ".", "split", "(", "\"\\n\"", ")", ")", "# ALLOW DICTIONARY'S NAME:VALUE LIST TO END WITH COMMA", "json_string", "=", "re", ".", "sub", "(", "r\",\\s*\\}\"", ",", "r\"}\"", ",", "json_string", ")", "# ALLOW LISTS TO END WITH COMMA", "json_string", "=", "re", ".", "sub", "(", "r\",\\s*\\]\"", ",", "r\"]\"", ",", "json_string", ")", "if", "params", ":", "# LOOKUP REFERENCES", "json_string", "=", "expand_template", "(", "json_string", ",", "params", ")", "try", ":", "value", "=", "wrap", "(", "json_decoder", "(", "text_type", "(", "json_string", ")", ")", ")", "except", "Exception", "as", "e", ":", "Log", ".", "error", "(", "\"can not decode\\n{{content}}\"", ",", "content", "=", "json_string", ",", "cause", "=", "e", ")", "if", "leaves", ":", "value", "=", "wrap_leaves", "(", "value", ")", "return", "value", "except", "Exception", "as", "e", ":", "e", "=", "Except", ".", "wrap", "(", "e", ")", "if", "not", "json_string", ".", "strip", "(", ")", ":", "Log", ".", "error", "(", "\"JSON string is only whitespace\"", ")", "c", "=", "e", "while", "\"Expecting '\"", "in", "c", ".", "cause", "and", "\"' delimiter: line\"", "in", "c", ".", "cause", ":", "c", "=", "c", ".", "cause", "if", "\"Expecting '\"", "in", "c", "and", "\"' delimiter: line\"", "in", "c", ":", "line_index", "=", "int", "(", "strings", ".", "between", "(", "c", ".", "message", ",", "\" line \"", ",", "\" column \"", ")", ")", "-", "1", "column", "=", "int", "(", "strings", ".", "between", "(", "c", ".", "message", ",", "\" column \"", ",", "\" \"", ")", ")", "-", "1", "line", "=", "json_string", ".", "split", "(", "\"\\n\"", ")", "[", "line_index", "]", ".", "replace", "(", "\"\\t\"", ",", "\" \"", ")", "if", "column", ">", "20", ":", "sample", "=", "\"...\"", "+", "line", "[", "column", "-", "20", ":", "]", "pointer", "=", "\" \"", "+", "(", "\" \"", "*", "20", ")", "+", "\"^\"", "else", ":", "sample", "=", "line", "pointer", "=", "(", "\" \"", "*", "column", ")", "+", "\"^\"", "if", "len", "(", "sample", ")", ">", "43", ":", "sample", "=", "sample", "[", ":", "43", "]", "+", "\"...\"", "Log", ".", "error", "(", "CAN_NOT_DECODE_JSON", "+", "\" at:\\n\\t{{sample}}\\n\\t{{pointer}}\\n\"", ",", "sample", "=", "sample", ",", "pointer", "=", "pointer", ")", "base_str", "=", "strings", ".", "limit", "(", "json_string", ",", "1000", ")", ".", "encode", "(", "'utf8'", ")", "hexx_str", "=", "bytes2hex", "(", "base_str", ",", "\" \"", ")", "try", ":", "char_str", "=", "\" \"", "+", "\" \"", ".", "join", "(", "(", "c", ".", "decode", "(", "\"latin1\"", ")", "if", "ord", "(", "c", ")", ">=", "32", "else", "\".\"", ")", "for", "c", "in", "base_str", ")", "except", "Exception", ":", "char_str", "=", "\" \"", "Log", ".", "error", "(", "CAN_NOT_DECODE_JSON", "+", "\":\\n{{char_str}}\\n{{hexx_str}}\\n\"", ",", "char_str", "=", "char_str", ",", "hexx_str", "=", "hexx_str", ",", "cause", "=", "e", ")" ]
38.971014
24.710145
def global_closeness_centrality(g, node=None, normalize=True): """ Calculates global closeness centrality for one or all nodes in the network. See :func:`.node_global_closeness_centrality` for more information. Parameters ---------- g : networkx.Graph normalize : boolean If True, normalizes centrality based on the average shortest path length. Default is True. Returns ------- C : dict Dictionary of results, with node identifiers as keys and gcc as values. """ if not node: C = {} for node in g.nodes(): C[node] = global_closeness_centrality(g, node, normalize=normalize) return C values = nx.shortest_path_length(g, node).values() c = sum([1./pl for pl in values if pl != 0.]) / len(g) if normalize: ac = 0 for sg in nx.connected_component_subgraphs(g): if len(sg.nodes()) > 1: aspl = nx.average_shortest_path_length(sg) ac += (1./aspl) * (float(len(sg)) / float(len(g))**2 ) c = c/ac return c
[ "def", "global_closeness_centrality", "(", "g", ",", "node", "=", "None", ",", "normalize", "=", "True", ")", ":", "if", "not", "node", ":", "C", "=", "{", "}", "for", "node", "in", "g", ".", "nodes", "(", ")", ":", "C", "[", "node", "]", "=", "global_closeness_centrality", "(", "g", ",", "node", ",", "normalize", "=", "normalize", ")", "return", "C", "values", "=", "nx", ".", "shortest_path_length", "(", "g", ",", "node", ")", ".", "values", "(", ")", "c", "=", "sum", "(", "[", "1.", "/", "pl", "for", "pl", "in", "values", "if", "pl", "!=", "0.", "]", ")", "/", "len", "(", "g", ")", "if", "normalize", ":", "ac", "=", "0", "for", "sg", "in", "nx", ".", "connected_component_subgraphs", "(", "g", ")", ":", "if", "len", "(", "sg", ".", "nodes", "(", ")", ")", ">", "1", ":", "aspl", "=", "nx", ".", "average_shortest_path_length", "(", "sg", ")", "ac", "+=", "(", "1.", "/", "aspl", ")", "*", "(", "float", "(", "len", "(", "sg", ")", ")", "/", "float", "(", "len", "(", "g", ")", ")", "**", "2", ")", "c", "=", "c", "/", "ac", "return", "c" ]
28.648649
25.621622
def terminate(self, include_watchman=True): """Terminates pantsd and watchman. N.B. This should always be called under care of the `lifecycle_lock`. """ super(PantsDaemon, self).terminate() if include_watchman: self.watchman_launcher.terminate()
[ "def", "terminate", "(", "self", ",", "include_watchman", "=", "True", ")", ":", "super", "(", "PantsDaemon", ",", "self", ")", ".", "terminate", "(", ")", "if", "include_watchman", ":", "self", ".", "watchman_launcher", ".", "terminate", "(", ")" ]
33.125
11.5
def get_queryset(self): """ Optionally restricts the queryset by filtering against query parameters in the URL. """ query_params = self.request.query_params url_params = self.kwargs # get queryset_filters from FilterMixin queryset_filters = self.get_db_filters(url_params, query_params) # This dict will hold filter kwargs to pass in to Django ORM calls. db_filters = queryset_filters['db_filters'] # This dict will hold exclude kwargs to pass in to Django ORM calls. db_excludes = queryset_filters['db_excludes'] queryset = Team.objects.prefetch_related( 'players' ).all() return queryset.filter(**db_filters).exclude(**db_excludes)
[ "def", "get_queryset", "(", "self", ")", ":", "query_params", "=", "self", ".", "request", ".", "query_params", "url_params", "=", "self", ".", "kwargs", "# get queryset_filters from FilterMixin", "queryset_filters", "=", "self", ".", "get_db_filters", "(", "url_params", ",", "query_params", ")", "# This dict will hold filter kwargs to pass in to Django ORM calls.", "db_filters", "=", "queryset_filters", "[", "'db_filters'", "]", "# This dict will hold exclude kwargs to pass in to Django ORM calls.", "db_excludes", "=", "queryset_filters", "[", "'db_excludes'", "]", "queryset", "=", "Team", ".", "objects", ".", "prefetch_related", "(", "'players'", ")", ".", "all", "(", ")", "return", "queryset", ".", "filter", "(", "*", "*", "db_filters", ")", ".", "exclude", "(", "*", "*", "db_excludes", ")" ]
32.565217
22.304348
def start_api_and_rpc_workers(self): """Initializes eventlet and starts wait for workers to exit. Spawns the workers returned from serve_rpc """ pool = eventlet.GreenPool() quark_rpc = self.serve_rpc() pool.spawn(quark_rpc.wait) pool.waitall()
[ "def", "start_api_and_rpc_workers", "(", "self", ")", ":", "pool", "=", "eventlet", ".", "GreenPool", "(", ")", "quark_rpc", "=", "self", ".", "serve_rpc", "(", ")", "pool", ".", "spawn", "(", "quark_rpc", ".", "wait", ")", "pool", ".", "waitall", "(", ")" ]
26.545455
15.181818
def error(msg, exit_code): """ Print `msg` error and exit with status `exit_code` """ sys.stderr.write("%s\ntry 'mongotail --help' for more information\n" % msg) sys.stderr.flush() exit(exit_code)
[ "def", "error", "(", "msg", ",", "exit_code", ")", ":", "sys", ".", "stderr", ".", "write", "(", "\"%s\\ntry 'mongotail --help' for more information\\n\"", "%", "msg", ")", "sys", ".", "stderr", ".", "flush", "(", ")", "exit", "(", "exit_code", ")" ]
30.571429
15.142857
def predict(self, x): """ This function make forward pass through MLP (no update). **Args:** * `x` : input vector (1-dimensional array) **Returns:** * `y` : output of MLP (float or 1-diemnsional array). Size depends on number of MLP outputs. """ # forward pass to hidden layers for l in self.layers: x = l.predict(x) self.x[1:] = x # forward pass to output layer if self.outputs == 1: self.y = np.dot(self.w, self.x) else: self.y = np.sum(self.w*self.x, axis=1) return self.y
[ "def", "predict", "(", "self", ",", "x", ")", ":", "# forward pass to hidden layers", "for", "l", "in", "self", ".", "layers", ":", "x", "=", "l", ".", "predict", "(", "x", ")", "self", ".", "x", "[", "1", ":", "]", "=", "x", "# forward pass to output layer", "if", "self", ".", "outputs", "==", "1", ":", "self", ".", "y", "=", "np", ".", "dot", "(", "self", ".", "w", ",", "self", ".", "x", ")", "else", ":", "self", ".", "y", "=", "np", ".", "sum", "(", "self", ".", "w", "*", "self", ".", "x", ",", "axis", "=", "1", ")", "return", "self", ".", "y" ]
26.583333
17.5
def fetch_weighted_complexity(self, recalculate_metrics=False): """ Calculates indicator value according to metrics weights Uses metrics in database args: recalculate_metrics: If true metrics values are updated before using weights """ # TODO: implment metrics recalculation max_total = sum( [self.metrics_weights[metric_name] for metric_name in self.metrics_weights] ) total = 0 if recalculate_metrics: self.calculate_indicator_metrics() for metric in self.metrics.all(): if metric.name in self.metrics_weights and metric.is_outlier: total += self.metrics_weights[metric.name] value = total / max_total final_value = "{:.1f}".format(value * 10) if final_value[-1] == "0": final_value = "{:.0f}".format(value * 10) final_value = int(final_value) else: final_value = float(final_value) self.value = float(final_value) self.is_valid = True self.updated_at = datetime.datetime.now() self.save() return final_value
[ "def", "fetch_weighted_complexity", "(", "self", ",", "recalculate_metrics", "=", "False", ")", ":", "# TODO: implment metrics recalculation", "max_total", "=", "sum", "(", "[", "self", ".", "metrics_weights", "[", "metric_name", "]", "for", "metric_name", "in", "self", ".", "metrics_weights", "]", ")", "total", "=", "0", "if", "recalculate_metrics", ":", "self", ".", "calculate_indicator_metrics", "(", ")", "for", "metric", "in", "self", ".", "metrics", ".", "all", "(", ")", ":", "if", "metric", ".", "name", "in", "self", ".", "metrics_weights", "and", "metric", ".", "is_outlier", ":", "total", "+=", "self", ".", "metrics_weights", "[", "metric", ".", "name", "]", "value", "=", "total", "/", "max_total", "final_value", "=", "\"{:.1f}\"", ".", "format", "(", "value", "*", "10", ")", "if", "final_value", "[", "-", "1", "]", "==", "\"0\"", ":", "final_value", "=", "\"{:.0f}\"", ".", "format", "(", "value", "*", "10", ")", "final_value", "=", "int", "(", "final_value", ")", "else", ":", "final_value", "=", "float", "(", "final_value", ")", "self", ".", "value", "=", "float", "(", "final_value", ")", "self", ".", "is_valid", "=", "True", "self", ".", "updated_at", "=", "datetime", ".", "datetime", ".", "now", "(", ")", "self", ".", "save", "(", ")", "return", "final_value" ]
35.575758
16.848485
def encode_arg(arg): """ Encode argument to be sent in a valid GuacamoleInstruction. example: >> arg = encode_arg('size') >> arg == '4.size' >> True :param arg: arg string. :return: str """ arg_utf8 = utf8(arg) return ELEM_SEP.join([str(len(str(arg_utf8))), str(arg_utf8)])
[ "def", "encode_arg", "(", "arg", ")", ":", "arg_utf8", "=", "utf8", "(", "arg", ")", "return", "ELEM_SEP", ".", "join", "(", "[", "str", "(", "len", "(", "str", "(", "arg_utf8", ")", ")", ")", ",", "str", "(", "arg_utf8", ")", "]", ")" ]
21.875
21.625
def iteration(self, node_status=True): """ Execute a single model iteration :return: Iteration_id, Incremental node status (dictionary node->status) """ self.clean_initial_status(self.available_statuses.values()) actual_status = {node: nstatus for node, nstatus in future.utils.iteritems(self.status)} if self.actual_iteration == 0: if min(actual_status.values()) == 0: number_node_blocked = int(float(self.graph.number_of_nodes()) * float(self.params['model']['percentage_blocked'])) i = 0 while i < number_node_blocked: # select a random node node = list(self.graph.nodes())[np.random.randint(0, self.graph.number_of_nodes())] # node not infected if actual_status[node] == 0: # node blocked actual_status[node] = -1 self.status[node] = -1 i += 1 self.actual_iteration += 1 delta, node_count, status_delta = self.status_delta(actual_status) if node_status: return {"iteration": 0, "status": actual_status.copy(), "node_count": node_count.copy(), "status_delta": status_delta.copy()} else: return {"iteration": 0, "status": {}, "node_count": node_count.copy(), "status_delta": status_delta.copy()} for node in self.graph.nodes(): if self.status[node] == 0: if self.params['model']['adopter_rate'] > 0: xk = (0, 1) pk = (1-self.params['model']['adopter_rate'], self.params['model']['adopter_rate']) probability = stats.rv_discrete(name='probability', values=(xk, pk)) number_probability = probability.rvs() if number_probability == 1: actual_status[node] = 1 continue neighbors = list(self.graph.neighbors(node)) if len(neighbors) == 0: continue if isinstance(self.graph, nx.DiGraph): neighbors = self.graph.predecessors(node) infected = 0 for v in neighbors: if self.status[v] != -1: infected += self.status[v] infected_ratio = float(infected)/len(neighbors) if infected_ratio >= self.params['nodes']['threshold'][node]: actual_status[node] = 1 delta, node_count, status_delta = self.status_delta(actual_status) self.status = actual_status self.actual_iteration += 1 if node_status: return {"iteration": self.actual_iteration - 1, "status": delta.copy(), "node_count": node_count.copy(), "status_delta": status_delta.copy()} else: return {"iteration": self.actual_iteration - 1, "status": {}, "node_count": node_count.copy(), "status_delta": status_delta.copy()}
[ "def", "iteration", "(", "self", ",", "node_status", "=", "True", ")", ":", "self", ".", "clean_initial_status", "(", "self", ".", "available_statuses", ".", "values", "(", ")", ")", "actual_status", "=", "{", "node", ":", "nstatus", "for", "node", ",", "nstatus", "in", "future", ".", "utils", ".", "iteritems", "(", "self", ".", "status", ")", "}", "if", "self", ".", "actual_iteration", "==", "0", ":", "if", "min", "(", "actual_status", ".", "values", "(", ")", ")", "==", "0", ":", "number_node_blocked", "=", "int", "(", "float", "(", "self", ".", "graph", ".", "number_of_nodes", "(", ")", ")", "*", "float", "(", "self", ".", "params", "[", "'model'", "]", "[", "'percentage_blocked'", "]", ")", ")", "i", "=", "0", "while", "i", "<", "number_node_blocked", ":", "# select a random node", "node", "=", "list", "(", "self", ".", "graph", ".", "nodes", "(", ")", ")", "[", "np", ".", "random", ".", "randint", "(", "0", ",", "self", ".", "graph", ".", "number_of_nodes", "(", ")", ")", "]", "# node not infected", "if", "actual_status", "[", "node", "]", "==", "0", ":", "# node blocked", "actual_status", "[", "node", "]", "=", "-", "1", "self", ".", "status", "[", "node", "]", "=", "-", "1", "i", "+=", "1", "self", ".", "actual_iteration", "+=", "1", "delta", ",", "node_count", ",", "status_delta", "=", "self", ".", "status_delta", "(", "actual_status", ")", "if", "node_status", ":", "return", "{", "\"iteration\"", ":", "0", ",", "\"status\"", ":", "actual_status", ".", "copy", "(", ")", ",", "\"node_count\"", ":", "node_count", ".", "copy", "(", ")", ",", "\"status_delta\"", ":", "status_delta", ".", "copy", "(", ")", "}", "else", ":", "return", "{", "\"iteration\"", ":", "0", ",", "\"status\"", ":", "{", "}", ",", "\"node_count\"", ":", "node_count", ".", "copy", "(", ")", ",", "\"status_delta\"", ":", "status_delta", ".", "copy", "(", ")", "}", "for", "node", "in", "self", ".", "graph", ".", "nodes", "(", ")", ":", "if", "self", ".", "status", "[", "node", "]", "==", "0", ":", "if", "self", ".", "params", "[", "'model'", "]", "[", "'adopter_rate'", "]", ">", "0", ":", "xk", "=", "(", "0", ",", "1", ")", "pk", "=", "(", "1", "-", "self", ".", "params", "[", "'model'", "]", "[", "'adopter_rate'", "]", ",", "self", ".", "params", "[", "'model'", "]", "[", "'adopter_rate'", "]", ")", "probability", "=", "stats", ".", "rv_discrete", "(", "name", "=", "'probability'", ",", "values", "=", "(", "xk", ",", "pk", ")", ")", "number_probability", "=", "probability", ".", "rvs", "(", ")", "if", "number_probability", "==", "1", ":", "actual_status", "[", "node", "]", "=", "1", "continue", "neighbors", "=", "list", "(", "self", ".", "graph", ".", "neighbors", "(", "node", ")", ")", "if", "len", "(", "neighbors", ")", "==", "0", ":", "continue", "if", "isinstance", "(", "self", ".", "graph", ",", "nx", ".", "DiGraph", ")", ":", "neighbors", "=", "self", ".", "graph", ".", "predecessors", "(", "node", ")", "infected", "=", "0", "for", "v", "in", "neighbors", ":", "if", "self", ".", "status", "[", "v", "]", "!=", "-", "1", ":", "infected", "+=", "self", ".", "status", "[", "v", "]", "infected_ratio", "=", "float", "(", "infected", ")", "/", "len", "(", "neighbors", ")", "if", "infected_ratio", ">=", "self", ".", "params", "[", "'nodes'", "]", "[", "'threshold'", "]", "[", "node", "]", ":", "actual_status", "[", "node", "]", "=", "1", "delta", ",", "node_count", ",", "status_delta", "=", "self", ".", "status_delta", "(", "actual_status", ")", "self", ".", "status", "=", "actual_status", "self", ".", "actual_iteration", "+=", "1", "if", "node_status", ":", "return", "{", "\"iteration\"", ":", "self", ".", "actual_iteration", "-", "1", ",", "\"status\"", ":", "delta", ".", "copy", "(", ")", ",", "\"node_count\"", ":", "node_count", ".", "copy", "(", ")", ",", "\"status_delta\"", ":", "status_delta", ".", "copy", "(", ")", "}", "else", ":", "return", "{", "\"iteration\"", ":", "self", ".", "actual_iteration", "-", "1", ",", "\"status\"", ":", "{", "}", ",", "\"node_count\"", ":", "node_count", ".", "copy", "(", ")", ",", "\"status_delta\"", ":", "status_delta", ".", "copy", "(", ")", "}" ]
42.253333
23.8
def saveAs( self, filename = '' ): """ Saves the current document to the inputed filename. If no filename \ is supplied, then the user will be prompted to supply a filename. :param filename | <str> :return <bool> | success """ if ( not (filename and isinstance(filename, basestring)) ): langTypes = XLanguage.pluginFileTypes() filename = QFileDialog.getSaveFileName( None, 'Save File As...', QDir.currentPath(), langTypes) if type(filename) == tuple: filename = nativestring(filename[0]) if ( not filename ): return False docfile = QFile(filename) if ( not docfile.open(QFile.WriteOnly) ): logger.warning('Could not open %s for writing.' % filename) return False success = self.write(docfile) docfile.close() if success: filename = nativestring(filename) self._filename = filename self.setModified(False) # set the language lang = XLanguage.byFileType(os.path.splitext(filename)[1]) if ( lang != self.language() ): self.setLanguage(lang) return success
[ "def", "saveAs", "(", "self", ",", "filename", "=", "''", ")", ":", "if", "(", "not", "(", "filename", "and", "isinstance", "(", "filename", ",", "basestring", ")", ")", ")", ":", "langTypes", "=", "XLanguage", ".", "pluginFileTypes", "(", ")", "filename", "=", "QFileDialog", ".", "getSaveFileName", "(", "None", ",", "'Save File As...'", ",", "QDir", ".", "currentPath", "(", ")", ",", "langTypes", ")", "if", "type", "(", "filename", ")", "==", "tuple", ":", "filename", "=", "nativestring", "(", "filename", "[", "0", "]", ")", "if", "(", "not", "filename", ")", ":", "return", "False", "docfile", "=", "QFile", "(", "filename", ")", "if", "(", "not", "docfile", ".", "open", "(", "QFile", ".", "WriteOnly", ")", ")", ":", "logger", ".", "warning", "(", "'Could not open %s for writing.'", "%", "filename", ")", "return", "False", "success", "=", "self", ".", "write", "(", "docfile", ")", "docfile", ".", "close", "(", ")", "if", "success", ":", "filename", "=", "nativestring", "(", "filename", ")", "self", ".", "_filename", "=", "filename", "self", ".", "setModified", "(", "False", ")", "# set the language\r", "lang", "=", "XLanguage", ".", "byFileType", "(", "os", ".", "path", ".", "splitext", "(", "filename", ")", "[", "1", "]", ")", "if", "(", "lang", "!=", "self", ".", "language", "(", ")", ")", ":", "self", ".", "setLanguage", "(", "lang", ")", "return", "success" ]
35.095238
17.761905
def do_pot(self): """ Sync the template with the python code. """ files_to_translate = [] log.debug("Collecting python sources for pot ...") for source_path in self._source_paths: for source_path in self._iter_suffix(path=source_path, suffix=".py"): log.debug("... add to pot: {source}".format(source=str(source_path))) files_to_translate.append(str(source_path)) for system_file in self.SYSTEM_SOURCE_FILES: files_to_translate.append(str(self._system_path / system_file)) # FIXME: use separate domain for system source translations? Nerge them when generating mo's? log.debug("Finished collection sources.") pot_path = (self._po_path / self._basename).with_suffix(".pot") command = ["xgettext", "--keyword=_", "--keyword=_translate", "--output={output}".format(output=str(pot_path))] command.extend(files_to_translate) check_call(command) log.debug("pot file \"{pot}\" created!".format(pot=str(pot_path))) pot_copy_path = self._mo_path / pot_path.name log.debug("Copying pot file to mo path: {pot_copy_path}".format(pot_copy_path=str(pot_copy_path))) shutil.copy(str(pot_path), str(pot_copy_path))
[ "def", "do_pot", "(", "self", ")", ":", "files_to_translate", "=", "[", "]", "log", ".", "debug", "(", "\"Collecting python sources for pot ...\"", ")", "for", "source_path", "in", "self", ".", "_source_paths", ":", "for", "source_path", "in", "self", ".", "_iter_suffix", "(", "path", "=", "source_path", ",", "suffix", "=", "\".py\"", ")", ":", "log", ".", "debug", "(", "\"... add to pot: {source}\"", ".", "format", "(", "source", "=", "str", "(", "source_path", ")", ")", ")", "files_to_translate", ".", "append", "(", "str", "(", "source_path", ")", ")", "for", "system_file", "in", "self", ".", "SYSTEM_SOURCE_FILES", ":", "files_to_translate", ".", "append", "(", "str", "(", "self", ".", "_system_path", "/", "system_file", ")", ")", "# FIXME: use separate domain for system source translations? Nerge them when generating mo's?", "log", ".", "debug", "(", "\"Finished collection sources.\"", ")", "pot_path", "=", "(", "self", ".", "_po_path", "/", "self", ".", "_basename", ")", ".", "with_suffix", "(", "\".pot\"", ")", "command", "=", "[", "\"xgettext\"", ",", "\"--keyword=_\"", ",", "\"--keyword=_translate\"", ",", "\"--output={output}\"", ".", "format", "(", "output", "=", "str", "(", "pot_path", ")", ")", "]", "command", ".", "extend", "(", "files_to_translate", ")", "check_call", "(", "command", ")", "log", ".", "debug", "(", "\"pot file \\\"{pot}\\\" created!\"", ".", "format", "(", "pot", "=", "str", "(", "pot_path", ")", ")", ")", "pot_copy_path", "=", "self", ".", "_mo_path", "/", "pot_path", ".", "name", "log", ".", "debug", "(", "\"Copying pot file to mo path: {pot_copy_path}\"", ".", "format", "(", "pot_copy_path", "=", "str", "(", "pot_copy_path", ")", ")", ")", "shutil", ".", "copy", "(", "str", "(", "pot_path", ")", ",", "str", "(", "pot_copy_path", ")", ")" ]
53.791667
23.291667
def get_upstream_paths(self, port): """Retrieve a dictionary containing the full URLs of the upstream apps :param int port: The port used by the replay and cdx servers :return: A dictionary containing the upstream paths (replay, cdx-server, record [if enabled]) :rtype: dict[str, str] """ base_paths = { 'replay': self.REPLAY_API % port, 'cdx-server': self.CDX_API % port, } if self.recorder_path: base_paths['record'] = self.recorder_path return base_paths
[ "def", "get_upstream_paths", "(", "self", ",", "port", ")", ":", "base_paths", "=", "{", "'replay'", ":", "self", ".", "REPLAY_API", "%", "port", ",", "'cdx-server'", ":", "self", ".", "CDX_API", "%", "port", ",", "}", "if", "self", ".", "recorder_path", ":", "base_paths", "[", "'record'", "]", "=", "self", ".", "recorder_path", "return", "base_paths" ]
35.5
20.1875
def top_directory(self): """Return the name of the archive topmost directory.""" if self.handle: return os.path.commonprefix(self.handle.getnames()).rstrip('/')
[ "def", "top_directory", "(", "self", ")", ":", "if", "self", ".", "handle", ":", "return", "os", ".", "path", ".", "commonprefix", "(", "self", ".", "handle", ".", "getnames", "(", ")", ")", ".", "rstrip", "(", "'/'", ")" ]
46.25
17
def _new_conn(self): """ Return a fresh :class:`HTTPConnection`. """ self.num_connections += 1 log.debug("Starting new HTTP connection (%d): %s", self.num_connections, self.host) conn = self.ConnectionCls(host=self.host, port=self.port, timeout=self.timeout.connect_timeout, strict=self.strict, **self.conn_kw) return conn
[ "def", "_new_conn", "(", "self", ")", ":", "self", ".", "num_connections", "+=", "1", "log", ".", "debug", "(", "\"Starting new HTTP connection (%d): %s\"", ",", "self", ".", "num_connections", ",", "self", ".", "host", ")", "conn", "=", "self", ".", "ConnectionCls", "(", "host", "=", "self", ".", "host", ",", "port", "=", "self", ".", "port", ",", "timeout", "=", "self", ".", "timeout", ".", "connect_timeout", ",", "strict", "=", "self", ".", "strict", ",", "*", "*", "self", ".", "conn_kw", ")", "return", "conn" ]
37.833333
17.333333
def schedule_contact_downtime(self, contact, start_time, end_time, author, comment): """Schedule contact downtime Format of the line that triggers function call:: SCHEDULE_CONTACT_DOWNTIME;<contact_name>;<start_time>;<end_time>;<author>;<comment> :param contact: contact to put in downtime :type contact: alignak.objects.contact.Contact :param start_time: downtime start time :type start_time: int :param end_time: downtime end time :type end_time: int :param author: downtime author :type author: str :param comment: text comment :type comment: str :return: None """ data = {'ref': contact.uuid, 'start_time': start_time, 'end_time': end_time, 'author': author, 'comment': comment} cdt = ContactDowntime(data) contact.add_downtime(cdt) self.send_an_element(contact.get_update_status_brok())
[ "def", "schedule_contact_downtime", "(", "self", ",", "contact", ",", "start_time", ",", "end_time", ",", "author", ",", "comment", ")", ":", "data", "=", "{", "'ref'", ":", "contact", ".", "uuid", ",", "'start_time'", ":", "start_time", ",", "'end_time'", ":", "end_time", ",", "'author'", ":", "author", ",", "'comment'", ":", "comment", "}", "cdt", "=", "ContactDowntime", "(", "data", ")", "contact", ".", "add_downtime", "(", "cdt", ")", "self", ".", "send_an_element", "(", "contact", ".", "get_update_status_brok", "(", ")", ")" ]
40.826087
17.043478
def common(self, other): """ Return two objects with the same dimensions if they lie in the same orthogonal plane. :: >>> l = Location(pop=1, snap=2) >>> m = Location(crackle=1, snap=3) >>> l.common(m) (<Location snap:2 >, <Location snap:3 >) """ selfDim = set(self.keys()) otherDim = set(other.keys()) dims = selfDim | otherDim newSelf = None newOther = None for dim in dims: sd = self.get(dim, None) od = other.get(dim, None) if sd is None or od is None: # axis is missing in one or the other continue if -_EPSILON < sd < _EPSILON and -_EPSILON < od < _EPSILON: # values are both zero continue if newSelf is None: newSelf = self.__class__() if newOther is None: newOther = self.__class__() newSelf[dim] = self[dim] newOther[dim] = other[dim] return newSelf, newOther
[ "def", "common", "(", "self", ",", "other", ")", ":", "selfDim", "=", "set", "(", "self", ".", "keys", "(", ")", ")", "otherDim", "=", "set", "(", "other", ".", "keys", "(", ")", ")", "dims", "=", "selfDim", "|", "otherDim", "newSelf", "=", "None", "newOther", "=", "None", "for", "dim", "in", "dims", ":", "sd", "=", "self", ".", "get", "(", "dim", ",", "None", ")", "od", "=", "other", ".", "get", "(", "dim", ",", "None", ")", "if", "sd", "is", "None", "or", "od", "is", "None", ":", "# axis is missing in one or the other", "continue", "if", "-", "_EPSILON", "<", "sd", "<", "_EPSILON", "and", "-", "_EPSILON", "<", "od", "<", "_EPSILON", ":", "# values are both zero", "continue", "if", "newSelf", "is", "None", ":", "newSelf", "=", "self", ".", "__class__", "(", ")", "if", "newOther", "is", "None", ":", "newOther", "=", "self", ".", "__class__", "(", ")", "newSelf", "[", "dim", "]", "=", "self", "[", "dim", "]", "newOther", "[", "dim", "]", "=", "other", "[", "dim", "]", "return", "newSelf", ",", "newOther" ]
34.419355
11.709677
def extend(self, tasks): '''Add tasks to this particular shovel''' self._tasks.extend(tasks) for task in tasks: # We'll now go through all of our tasks and group them into # sub-shovels current = self.map modules = task.fullname.split('.') for module in modules[:-1]: if not isinstance(current[module], Shovel): logger.warn('Overriding task %s with a module' % current[module].file) shovel = Shovel() shovel.overrides = current[module] current[module] = shovel current = current[module].map # Now we'll put the task in this particular sub-shovel name = modules[-1] if name in current: logger.warn('Overriding %s with %s' % ( '.'.join(modules), task.file)) task.overrides = current[name] current[name] = task
[ "def", "extend", "(", "self", ",", "tasks", ")", ":", "self", ".", "_tasks", ".", "extend", "(", "tasks", ")", "for", "task", "in", "tasks", ":", "# We'll now go through all of our tasks and group them into", "# sub-shovels", "current", "=", "self", ".", "map", "modules", "=", "task", ".", "fullname", ".", "split", "(", "'.'", ")", "for", "module", "in", "modules", "[", ":", "-", "1", "]", ":", "if", "not", "isinstance", "(", "current", "[", "module", "]", ",", "Shovel", ")", ":", "logger", ".", "warn", "(", "'Overriding task %s with a module'", "%", "current", "[", "module", "]", ".", "file", ")", "shovel", "=", "Shovel", "(", ")", "shovel", ".", "overrides", "=", "current", "[", "module", "]", "current", "[", "module", "]", "=", "shovel", "current", "=", "current", "[", "module", "]", ".", "map", "# Now we'll put the task in this particular sub-shovel", "name", "=", "modules", "[", "-", "1", "]", "if", "name", "in", "current", ":", "logger", ".", "warn", "(", "'Overriding %s with %s'", "%", "(", "'.'", ".", "join", "(", "modules", ")", ",", "task", ".", "file", ")", ")", "task", ".", "overrides", "=", "current", "[", "name", "]", "current", "[", "name", "]", "=", "task" ]
41.875
12.958333
def set(self, client_id, code, request, *args, **kwargs): """Creates Grant object with the given params :param client_id: ID of the client :param code: :param request: OAuthlib request object """ expires = datetime.utcnow() + timedelta(seconds=100) grant = self.model( client_id=request.client.client_id, code=code['code'], redirect_uri=request.redirect_uri, scope=' '.join(request.scopes), user=self.current_user(), expires=expires ) self.session.add(grant) self.session.commit()
[ "def", "set", "(", "self", ",", "client_id", ",", "code", ",", "request", ",", "*", "args", ",", "*", "*", "kwargs", ")", ":", "expires", "=", "datetime", ".", "utcnow", "(", ")", "+", "timedelta", "(", "seconds", "=", "100", ")", "grant", "=", "self", ".", "model", "(", "client_id", "=", "request", ".", "client", ".", "client_id", ",", "code", "=", "code", "[", "'code'", "]", ",", "redirect_uri", "=", "request", ".", "redirect_uri", ",", "scope", "=", "' '", ".", "join", "(", "request", ".", "scopes", ")", ",", "user", "=", "self", ".", "current_user", "(", ")", ",", "expires", "=", "expires", ")", "self", ".", "session", ".", "add", "(", "grant", ")", "self", ".", "session", ".", "commit", "(", ")" ]
32.421053
13.263158
def runGenomeSGE(bfile, freqFile, nbJob, outPrefix, options): """Runs the genome command from plink, on SGE. :param bfile: the prefix of the input file. :param freqFile: the name of the frequency file (from Plink). :param nbJob: the number of jobs to launch. :param outPrefix: the prefix of all the output files. :param options: the options. :type bfile: str :type freqFile: str :type nbJob: int :type outPrefix: str :type options: argparse.Namespace Runs Plink with the ``genome`` options on the cluster (using SGE). """ # Add the environment variable for DRMAA package if "DRMAA_LIBRARY_PATH" not in os.environ: msg = "could not load drmaa: set DRMAA_LIBRARY_PATH" raise ProgramError(msg) # Import the python drmaa library try: import drmaa except ImportError: raise ProgramError("drmaa is not install, install drmaa") # Initializing a session s = drmaa.Session() s.initialize() # Run for each sub task... jobIDs = [] jobTemplates = [] for i in xrange(1, nbJob + 1): for j in xrange(i, nbJob + 1): # The command to run plinkCommand = ["plink", "--noweb", "--bfile", bfile, "--read-freq", freqFile, "--genome", "--genome-full", "--genome-lists", "{}_tmp.list{}".format(outPrefix, i), "{}_tmp.list{}".format(outPrefix, j), "--out", "{}_output.sub.{}.{}".format(outPrefix, i, j)] # Creating the job template jt = s.createJobTemplate() jt.remoteCommand = plinkCommand[0] jt.workingDirectory = os.getcwd() jt.jobEnvironment = os.environ jt.args = plinkCommand[1:] jt.jobName = "_plink_genome_{}_{}".format(i, j) # Cluster specifics if options.sge_walltime is not None: jt.hardWallclockTimeLimit = options.sge_walltime if options.sge_nodes is not None: native_spec = "-l nodes={}:ppn={}".format(options.sge_nodes[0], options.sge_nodes[1]) jt.nativeSpecification = native_spec jobIDs.append(s.runJob(jt)) jobTemplates.append(jt) # Waiting for the jobs to finish hadProblems = [] for jobID in jobIDs: retVal = s.wait(jobID, drmaa.Session.TIMEOUT_WAIT_FOREVER) hadProblems.append(retVal.exitStatus == 0) # Deleting the jobs for jt in jobTemplates: s.deleteJobTemplate(jt) # Closing the session s.exit() # Checking for problems for hadProblem in hadProblems: if not hadProblem: msg = "Some SGE jobs had errors..." raise ProgramError(msg)
[ "def", "runGenomeSGE", "(", "bfile", ",", "freqFile", ",", "nbJob", ",", "outPrefix", ",", "options", ")", ":", "# Add the environment variable for DRMAA package", "if", "\"DRMAA_LIBRARY_PATH\"", "not", "in", "os", ".", "environ", ":", "msg", "=", "\"could not load drmaa: set DRMAA_LIBRARY_PATH\"", "raise", "ProgramError", "(", "msg", ")", "# Import the python drmaa library", "try", ":", "import", "drmaa", "except", "ImportError", ":", "raise", "ProgramError", "(", "\"drmaa is not install, install drmaa\"", ")", "# Initializing a session", "s", "=", "drmaa", ".", "Session", "(", ")", "s", ".", "initialize", "(", ")", "# Run for each sub task...", "jobIDs", "=", "[", "]", "jobTemplates", "=", "[", "]", "for", "i", "in", "xrange", "(", "1", ",", "nbJob", "+", "1", ")", ":", "for", "j", "in", "xrange", "(", "i", ",", "nbJob", "+", "1", ")", ":", "# The command to run", "plinkCommand", "=", "[", "\"plink\"", ",", "\"--noweb\"", ",", "\"--bfile\"", ",", "bfile", ",", "\"--read-freq\"", ",", "freqFile", ",", "\"--genome\"", ",", "\"--genome-full\"", ",", "\"--genome-lists\"", ",", "\"{}_tmp.list{}\"", ".", "format", "(", "outPrefix", ",", "i", ")", ",", "\"{}_tmp.list{}\"", ".", "format", "(", "outPrefix", ",", "j", ")", ",", "\"--out\"", ",", "\"{}_output.sub.{}.{}\"", ".", "format", "(", "outPrefix", ",", "i", ",", "j", ")", "]", "# Creating the job template", "jt", "=", "s", ".", "createJobTemplate", "(", ")", "jt", ".", "remoteCommand", "=", "plinkCommand", "[", "0", "]", "jt", ".", "workingDirectory", "=", "os", ".", "getcwd", "(", ")", "jt", ".", "jobEnvironment", "=", "os", ".", "environ", "jt", ".", "args", "=", "plinkCommand", "[", "1", ":", "]", "jt", ".", "jobName", "=", "\"_plink_genome_{}_{}\"", ".", "format", "(", "i", ",", "j", ")", "# Cluster specifics", "if", "options", ".", "sge_walltime", "is", "not", "None", ":", "jt", ".", "hardWallclockTimeLimit", "=", "options", ".", "sge_walltime", "if", "options", ".", "sge_nodes", "is", "not", "None", ":", "native_spec", "=", "\"-l nodes={}:ppn={}\"", ".", "format", "(", "options", ".", "sge_nodes", "[", "0", "]", ",", "options", ".", "sge_nodes", "[", "1", "]", ")", "jt", ".", "nativeSpecification", "=", "native_spec", "jobIDs", ".", "append", "(", "s", ".", "runJob", "(", "jt", ")", ")", "jobTemplates", ".", "append", "(", "jt", ")", "# Waiting for the jobs to finish", "hadProblems", "=", "[", "]", "for", "jobID", "in", "jobIDs", ":", "retVal", "=", "s", ".", "wait", "(", "jobID", ",", "drmaa", ".", "Session", ".", "TIMEOUT_WAIT_FOREVER", ")", "hadProblems", ".", "append", "(", "retVal", ".", "exitStatus", "==", "0", ")", "# Deleting the jobs", "for", "jt", "in", "jobTemplates", ":", "s", ".", "deleteJobTemplate", "(", "jt", ")", "# Closing the session", "s", ".", "exit", "(", ")", "# Checking for problems", "for", "hadProblem", "in", "hadProblems", ":", "if", "not", "hadProblem", ":", "msg", "=", "\"Some SGE jobs had errors...\"", "raise", "ProgramError", "(", "msg", ")" ]
33.819277
18.819277
def get_all_context(self): """ Retrieves Plan and Pricing for current order creation """ self.plan_pricing = get_object_or_404(PlanPricing.objects.all().select_related('plan', 'pricing'), Q(pk=self.kwargs['pk']) & Q(plan__available=True) & ( Q(plan__customized=self.request.user) | Q( plan__customized__isnull=True))) # User is not allowed to create new order for Plan when he has different Plan # He should use Plan Change View for this kind of action if not self.request.user.userplan.is_expired() and self.request.user.userplan.plan != self.plan_pricing.plan: raise Http404 self.plan = self.plan_pricing.plan self.pricing = self.plan_pricing.pricing
[ "def", "get_all_context", "(", "self", ")", ":", "self", ".", "plan_pricing", "=", "get_object_or_404", "(", "PlanPricing", ".", "objects", ".", "all", "(", ")", ".", "select_related", "(", "'plan'", ",", "'pricing'", ")", ",", "Q", "(", "pk", "=", "self", ".", "kwargs", "[", "'pk'", "]", ")", "&", "Q", "(", "plan__available", "=", "True", ")", "&", "(", "Q", "(", "plan__customized", "=", "self", ".", "request", ".", "user", ")", "|", "Q", "(", "plan__customized__isnull", "=", "True", ")", ")", ")", "# User is not allowed to create new order for Plan when he has different Plan", "# He should use Plan Change View for this kind of action", "if", "not", "self", ".", "request", ".", "user", ".", "userplan", ".", "is_expired", "(", ")", "and", "self", ".", "request", ".", "user", ".", "userplan", ".", "plan", "!=", "self", ".", "plan_pricing", ".", "plan", ":", "raise", "Http404", "self", ".", "plan", "=", "self", ".", "plan_pricing", ".", "plan", "self", ".", "pricing", "=", "self", ".", "plan_pricing", ".", "pricing" ]
51.352941
32.294118
def is_downloadable(self, response): ''' Checks whether the response object is a html page or a likely downloadable file. Intended to detect error pages or prompts such as kaggle's competition rules acceptance prompt. Returns True if the response is a html page. False otherwise. ''' content_type = response.headers.get('Content-Type', '') content_disp = response.headers.get('Content-Disposition', '') if 'text/html' in content_type and 'attachment' not in content_disp: # This response is a html file # which is not marked as an attachment, # so we likely hit a rules acceptance prompt return False return True
[ "def", "is_downloadable", "(", "self", ",", "response", ")", ":", "content_type", "=", "response", ".", "headers", ".", "get", "(", "'Content-Type'", ",", "''", ")", "content_disp", "=", "response", ".", "headers", ".", "get", "(", "'Content-Disposition'", ",", "''", ")", "if", "'text/html'", "in", "content_type", "and", "'attachment'", "not", "in", "content_disp", ":", "# This response is a html file", "# which is not marked as an attachment,", "# so we likely hit a rules acceptance prompt", "return", "False", "return", "True" ]
38.578947
21.842105
def fourier_ratios(phase_shifted_coeffs): r""" Returns the :math:`R_{j1}` and :math:`\phi_{j1}` values for the given phase-shifted coefficients. .. math:: R_{j1} = A_j / A_1 .. math:: \phi_{j1} = \phi_j - j \phi_1 **Parameters** phase_shifted_coeffs : array-like, shape = [:math:`2n+1`] Fourier sine or cosine series coefficients. :math:`[ A_0, A_1, \Phi_1, \ldots, A_n, \Phi_n ]`. **Returns** out : array-like, shape = [:math:`2n+1`] Fourier ratios :math:`[ R_{21}, \phi_{21}, \ldots, R_{n1}, \phi_{n1} ]`. """ n_coeff = phase_shifted_coeffs.size # n_coeff = 2*degree + 1 => degree = (n_coeff-1)/2 degree = (n_coeff - 1) / 2 amplitudes = phase_shifted_coeffs[1::2] phases = phase_shifted_coeffs[2::2] # there are degree-1 amplitude ratios, and degree-1 phase deltas, # so altogether there are 2*(degree-1) values ratios = numpy.empty(2*(degree-1), dtype=float) amplitude_ratios = ratios[::2] phase_deltas = ratios[1::2] # amplitudes may be zero, so suppress division by zero warnings with numpy.errstate(divide="ignore"): amplitude_ratios[:] = amplitudes[1:] amplitude_ratios /= amplitudes[0] # indices for phase deltas i = numpy.arange(2, degree+1) phase_deltas[:] = phases[1:] phase_deltas -= i*phases[0] # constrain phase_deltas between 0 and 2*pi phase_deltas %= 2*pi return ratios
[ "def", "fourier_ratios", "(", "phase_shifted_coeffs", ")", ":", "n_coeff", "=", "phase_shifted_coeffs", ".", "size", "# n_coeff = 2*degree + 1 => degree = (n_coeff-1)/2", "degree", "=", "(", "n_coeff", "-", "1", ")", "/", "2", "amplitudes", "=", "phase_shifted_coeffs", "[", "1", ":", ":", "2", "]", "phases", "=", "phase_shifted_coeffs", "[", "2", ":", ":", "2", "]", "# there are degree-1 amplitude ratios, and degree-1 phase deltas,", "# so altogether there are 2*(degree-1) values", "ratios", "=", "numpy", ".", "empty", "(", "2", "*", "(", "degree", "-", "1", ")", ",", "dtype", "=", "float", ")", "amplitude_ratios", "=", "ratios", "[", ":", ":", "2", "]", "phase_deltas", "=", "ratios", "[", "1", ":", ":", "2", "]", "# amplitudes may be zero, so suppress division by zero warnings", "with", "numpy", ".", "errstate", "(", "divide", "=", "\"ignore\"", ")", ":", "amplitude_ratios", "[", ":", "]", "=", "amplitudes", "[", "1", ":", "]", "amplitude_ratios", "/=", "amplitudes", "[", "0", "]", "# indices for phase deltas", "i", "=", "numpy", ".", "arange", "(", "2", ",", "degree", "+", "1", ")", "phase_deltas", "[", ":", "]", "=", "phases", "[", "1", ":", "]", "phase_deltas", "-=", "i", "*", "phases", "[", "0", "]", "# constrain phase_deltas between 0 and 2*pi", "phase_deltas", "%=", "2", "*", "pi", "return", "ratios" ]
29.867925
20.075472
def extend_back(self, dag, edge_map=None): """Add `dag` at the end of `self`, using `edge_map`. """ edge_map = edge_map or {} for qreg in dag.qregs.values(): if qreg.name not in self.qregs: self.add_qreg(QuantumRegister(qreg.size, qreg.name)) edge_map.update([(qbit, qbit) for qbit in qreg if qbit not in edge_map]) for creg in dag.cregs.values(): if creg.name not in self.cregs: self.add_creg(ClassicalRegister(creg.size, creg.name)) edge_map.update([(cbit, cbit) for cbit in creg if cbit not in edge_map]) self.compose_back(dag, edge_map)
[ "def", "extend_back", "(", "self", ",", "dag", ",", "edge_map", "=", "None", ")", ":", "edge_map", "=", "edge_map", "or", "{", "}", "for", "qreg", "in", "dag", ".", "qregs", ".", "values", "(", ")", ":", "if", "qreg", ".", "name", "not", "in", "self", ".", "qregs", ":", "self", ".", "add_qreg", "(", "QuantumRegister", "(", "qreg", ".", "size", ",", "qreg", ".", "name", ")", ")", "edge_map", ".", "update", "(", "[", "(", "qbit", ",", "qbit", ")", "for", "qbit", "in", "qreg", "if", "qbit", "not", "in", "edge_map", "]", ")", "for", "creg", "in", "dag", ".", "cregs", ".", "values", "(", ")", ":", "if", "creg", ".", "name", "not", "in", "self", ".", "cregs", ":", "self", ".", "add_creg", "(", "ClassicalRegister", "(", "creg", ".", "size", ",", "creg", ".", "name", ")", ")", "edge_map", ".", "update", "(", "[", "(", "cbit", ",", "cbit", ")", "for", "cbit", "in", "creg", "if", "cbit", "not", "in", "edge_map", "]", ")", "self", ".", "compose_back", "(", "dag", ",", "edge_map", ")" ]
43.733333
16.2
def from_ordered_sequence(cls, iseq): """ Return the root of a balanced binary search tree populated with the values in iterable *iseq*. """ seq = list(iseq) # optimize for usually all fits by making longest first bst = cls(seq.pop()) bst._insert_from_ordered_sequence(seq) return bst
[ "def", "from_ordered_sequence", "(", "cls", ",", "iseq", ")", ":", "seq", "=", "list", "(", "iseq", ")", "# optimize for usually all fits by making longest first", "bst", "=", "cls", "(", "seq", ".", "pop", "(", ")", ")", "bst", ".", "_insert_from_ordered_sequence", "(", "seq", ")", "return", "bst" ]
34.7
12.3
def set_unicode(self, quoted_text=False): """Converts the context's ``value`` to a sequence of unicode code points for holding text tokens, indicating whether the text is quoted. """ if isinstance(self.value, CodePointArray): assert self.quoted_text == quoted_text return self self.value = CodePointArray(self.value) self.quoted_text = quoted_text self.line_comment = False return self
[ "def", "set_unicode", "(", "self", ",", "quoted_text", "=", "False", ")", ":", "if", "isinstance", "(", "self", ".", "value", ",", "CodePointArray", ")", ":", "assert", "self", ".", "quoted_text", "==", "quoted_text", "return", "self", "self", ".", "value", "=", "CodePointArray", "(", "self", ".", "value", ")", "self", ".", "quoted_text", "=", "quoted_text", "self", ".", "line_comment", "=", "False", "return", "self" ]
42.090909
7.272727
def ppj(json_data): """ppj :param json_data: dictionary to print """ return str(json.dumps( json_data, sort_keys=True, indent=4, separators=(',', ': ')))
[ "def", "ppj", "(", "json_data", ")", ":", "return", "str", "(", "json", ".", "dumps", "(", "json_data", ",", "sort_keys", "=", "True", ",", "indent", "=", "4", ",", "separators", "=", "(", "','", ",", "': '", ")", ")", ")" ]
22.5
11.4