query
stringlengths
1
46.9k
pos
stringlengths
75
104k
neg
listlengths
12
12
scores
listlengths
12
12
Set specified value to yaml path. Example: patch('application/components/child/configuration/__locator.application-id','777') Will change child app ID to 777
def patch_env(env, path, value): """ Set specified value to yaml path. Example: patch('application/components/child/configuration/__locator.application-id','777') Will change child app ID to 777 """ def pathGet(dictionary, path): for item in path.split("/"): dictionary = dictionary[item] return dictionary def pathSet(dictionary, path, value): path = path.split("/") key = path[-1] dictionary = pathGet(dictionary, "/".join(path[:-1])) dictionary[key] = value pathSet(env, path, value) return True
[ "def patch(self, path, value=None):\n \"\"\" Set specified value to yaml path.\n Example:\n patch('application/components/child/configuration/__locator.application-id','777')\n Will change child app ID to 777\n \"\"\"\n # noinspection PyShadowingNames\n def pathGet(d...
[ 0.863051176071167, 0.725434422492981, 0.7185708284378052, 0.7039631009101868, 0.68790602684021, 0.6823700666427612, 0.6745213866233826, 0.6723222732543945, 0.6701123714447021, 0.667013943195343, 0.6660921573638916, 0.6651179194450378 ]
Extract metadata link for starter kit from platform configs. Starter kit available on add component - starter kit menu. Beware, config could be changed by deploy scripts during deploy. :param name: Name of starter kit :return: Link to metadata
def get_starter_kit_meta(name): """ Extract metadata link for starter kit from platform configs. Starter kit available on add component - starter kit menu. Beware, config could be changed by deploy scripts during deploy. :param name: Name of starter kit :return: Link to metadata """ kits = yaml.safe_load(requests.get(url=starter_kits_url).content)['kits'] kits_meta_url = [x['metaUrl'] for x in kits if x['name'] == name] assert len(kits_meta_url)==1, "No component %s found in meta:\n %s" % (name, kits) meta = yaml.safe_load(requests.get(url=kits_meta_url[0]).content)['download_url'] return meta
[ "def get_metadata(session, name):\n \"\"\"\n Gets meta data from launchpad for the given package.\n :param session: requests Session instance\n :param name: str, package\n :return: dict, meta data\n \"\"\"\n resp = session.get(\n \"https://api.launchpad.net/1.0/{}/releases\".format(name)...
[ 0.6562999486923218, 0.655454158782959, 0.643340528011322, 0.636264443397522, 0.6352569460868835, 0.6327133178710938, 0.6320620775222778, 0.6312291026115417, 0.6309686899185181, 0.6300292611122131, 0.6295232772827148, 0.6292681694030762 ]
Extact manifest url from metadata url :param metaurl: Url to metadata :param name: Name of application to extract :return:
def get_manifest_from_meta(metaurl, name): """ Extact manifest url from metadata url :param metaurl: Url to metadata :param name: Name of application to extract :return: """ if 'http' in metaurl: kit = yaml.safe_load(requests.get(url=metaurl).content)['kit']['applications'] else: kit = yaml.safe_load(open(metaurl).read())['kit']['applications'] app_urls = [x['manifest'] for x in kit if x['name'] == name] assert len(app_urls) == 1 return app_urls[0]
[ "def set_applications_from_meta(self, metadata, exclude=None):\n \"\"\"\n Parses meta and update or create each application\n :param str metadata: path or url to meta.yml\n :param list[str] exclude: List of application names, to exclude from meta.\n This ...
[ 0.674472451210022, 0.648524284362793, 0.6440187096595764, 0.637789785861969, 0.6317768692970276, 0.6311434507369995, 0.6276582479476929, 0.6270686984062195, 0.6214728355407715, 0.6208724975585938, 0.6203470826148987, 0.6190667748451233 ]
Function getPayloadStruct Get the payload structure to do a creation or a modification @param key: The key to modify @param attribute: The data @param objType: NOT USED in this class @return RETURN: The API result
def getPayloadStruct(self, attributes, objType=None): """ Function getPayloadStruct Get the payload structure to do a creation or a modification @param key: The key to modify @param attribute: The data @param objType: NOT USED in this class @return RETURN: The API result """ if self.setInParentPayload: return {self.parentPayloadObject: {self.payloadObj: attributes}} else: return {self.payloadObj: attributes}
[ "def getPayloadStruct(self, attributes, objType):\n \"\"\" Function getPayloadStruct\n Get the payload structure to do a creation or a modification\n\n @param attribute: The data\n @param objType: SubItem type (e.g: hostgroup for hostgroup_class)\n @return RETURN: the payload\n ...
[ 0.8354079723358154, 0.7295766472816467, 0.6407061815261841, 0.6358272433280945, 0.6252712607383728, 0.6211220622062683, 0.6209088563919067, 0.6204599142074585, 0.6190513968467712, 0.6182016134262085, 0.6174253821372986, 0.6140912771224976 ]
Function log Decorator to log lasts request before sending a new one @return RETURN: None
def log(function): """ Function log Decorator to log lasts request before sending a new one @return RETURN: None """ def _log(self, *args, **kwargs): ret = function(self, *args, **kwargs) if len(self.history) > self.maxHistory: self.history = self.history[1:self.maxHistory] self.history.append({'errorMsg': self.errorMsg, 'payload': self.payload, 'url': self.url, 'resp': self.resp, 'res': self.res, 'printErrors': self.printErrors, 'method': self.method}) self.clearReqVars() return ret return _log
[ "def log_request(self, handler: RequestHandler) -> None:\n \"\"\"Writes a completed HTTP request to the logs.\n\n By default writes to the python root logger. To change\n this behavior either subclass Application and override this method,\n or pass a function in the application settings...
[ 0.722433865070343, 0.7216631770133972, 0.7147947549819946, 0.7131589651107788, 0.7096238136291504, 0.7083010673522949, 0.7022097110748291, 0.7021543979644775, 0.7012953162193298, 0.699263334274292, 0.6988188028335571, 0.6917266249656677 ]
Function clearHistVars Clear the variables used to get history of all vars @return RETURN: None
def clearReqVars(self): """ Function clearHistVars Clear the variables used to get history of all vars @return RETURN: None """ self.errorMsg = None self.payload = None self.url = None self.resp = None self.res = None self.method = None self.printErrors = None
[ "public function clear()\n {\n $shmid = shm_attach($this->sysvKey, $this->shmSize, $this->perm);\n shm_remove_var($shmid, self::VAR_KEY);\n }", "public function clear()\n\t{\n\t\t$cvars = get_class_vars(__CLASS__);\n\n\t\t$this->_updatedkeys = array();\n\n\t\tforeach ($cvars as $key => $value)...
[ 0.6837462782859802, 0.6633564233779907, 0.6587758660316467, 0.6558899879455566, 0.6457203030586243, 0.6425361633300781, 0.6407960057258606, 0.6398618817329407, 0.639014482498169, 0.6376631259918213, 0.6365300416946411, 0.6364492774009705 ]
Function list Get the list of an object @param obj: object name ('hosts', 'puppetclasses'...) @param filter: filter for objects @param only_id: boolean to only return dict with name/id @return RETURN: the list of the object
def list(self, obj, filter=False, only_id=False, limit=20): """ Function list Get the list of an object @param obj: object name ('hosts', 'puppetclasses'...) @param filter: filter for objects @param only_id: boolean to only return dict with name/id @return RETURN: the list of the object """ self.url = '{}{}/?per_page={}'.format(self.base_url, obj, limit) self.method = 'GET' if filter: self.url += '&search={}'.format(filter) self.resp = requests.get(url=self.url, auth=self.auth, headers=self.headers, cert=self.ca_cert) if only_id: if self.__process_resp__(obj) is False: return False if type(self.res['results']) is list: return dict((x['name'], x['id']) for x in self.res['results']) elif type(self.res['results']) is dict: r = {} for v in self.res['results'].values(): for vv in v: r[vv['name']] = vv['id'] return r else: return False else: return self.__process_resp__(obj)
[ "def apply_filter_list(func, obj):\n \"\"\"Apply `func` to list or tuple `obj` element-wise and directly otherwise.\"\"\"\n if isinstance(obj, (list, tuple)):\n return [func(item) for item in obj]\n return func(obj)", "def _where(self, filter_fn):\n ''' use this to filter VLists, simply pro...
[ 0.6781755685806274, 0.6688603162765503, 0.6688332557678223, 0.6551244854927063, 0.6471869349479675, 0.6464616060256958, 0.6460150480270386, 0.6420566439628601, 0.6405892968177795, 0.6329345703125, 0.6318852305412292, 0.6314055323600769 ]
Function get Get an object by id @param obj: object name ('hosts', 'puppetclasses'...) @param id: the id of the object (name or id) @return RETURN: the targeted object
def get(self, obj, id, sub_object=None): """ Function get Get an object by id @param obj: object name ('hosts', 'puppetclasses'...) @param id: the id of the object (name or id) @return RETURN: the targeted object """ self.url = '{}{}/{}'.format(self.base_url, obj, id) self.method = 'GET' if sub_object: self.url += '/' + sub_object self.resp = requests.get(url=self.url, auth=self.auth, headers=self.headers, cert=self.ca_cert) if self.__process_resp__(obj): return self.res return False
[ "def get_id_by_name(self, obj, name):\n \"\"\" Function get_id_by_name\n Get the id of an object\n\n @param obj: object name ('hosts', 'puppetclasses'...)\n @param id: the id of the object (name or id)\n @return RETURN: the targeted object\n \"\"\"\n list = self.list...
[ 0.785792887210846, 0.7371777892112732, 0.7038533091545105, 0.6925148367881775, 0.691858172416687, 0.6908140182495117, 0.6882432699203491, 0.6878669857978821, 0.686981201171875, 0.6845094561576843, 0.6832347512245178, 0.6818809509277344 ]
Function get_id_by_name Get the id of an object @param obj: object name ('hosts', 'puppetclasses'...) @param id: the id of the object (name or id) @return RETURN: the targeted object
def get_id_by_name(self, obj, name): """ Function get_id_by_name Get the id of an object @param obj: object name ('hosts', 'puppetclasses'...) @param id: the id of the object (name or id) @return RETURN: the targeted object """ list = self.list(obj, filter='name = "{}"'.format(name), only_id=True, limit=1) return list[name] if name in list.keys() else False
[ "def get(self, obj, id, sub_object=None):\n \"\"\" Function get\n Get an object by id\n\n @param obj: object name ('hosts', 'puppetclasses'...)\n @param id: the id of the object (name or id)\n @return RETURN: the targeted object\n \"\"\"\n self.url = '{}{}/{}'.format...
[ 0.7663769125938416, 0.7547003626823425, 0.7183605432510376, 0.7151867151260376, 0.708323061466217, 0.7081875205039978, 0.6977672576904297, 0.6951950788497925, 0.6887167096138, 0.68255215883255, 0.6808472871780396, 0.678314745426178 ]
Function set Set an object by id @param obj: object name ('hosts', 'puppetclasses'...) @param id: the id of the object (name or id) @param action: specific action of an object ('power'...) @param payload: the dict of the payload @param async: should this request be async, if true use return.result() to get the response @return RETURN: the server response
def set(self, obj, id, payload, action='', async=False): """ Function set Set an object by id @param obj: object name ('hosts', 'puppetclasses'...) @param id: the id of the object (name or id) @param action: specific action of an object ('power'...) @param payload: the dict of the payload @param async: should this request be async, if true use return.result() to get the response @return RETURN: the server response """ self.url = '{}{}/{}'.format(self.base_url, obj, id) self.method = 'PUT' if action: self.url += '/{}'.format(action) self.payload = json.dumps(payload) if async: session = FuturesSession() return session.put(url=self.url, auth=self.auth, headers=self.headers, data=self.payload, cert=self.ca_cert) else: self.resp = requests.put(url=self.url, auth=self.auth, headers=self.headers, data=self.payload, cert=self.ca_cert) if self.__process_resp__(obj): return self.res return False
[ "def create(self, obj, payload, async=False):\n \"\"\" Function create\n Create an new object\n\n @param obj: object name ('hosts', 'puppetclasses'...)\n @param payload: the dict of the payload\n @param async: should this request be async, if true use\n retu...
[ 0.7629753947257996, 0.6833419799804688, 0.6769798994064331, 0.6621506810188293, 0.6462209820747375, 0.6453836560249329, 0.6416064500808716, 0.6394880414009094, 0.6394200921058655, 0.6385706663131714, 0.638204038143158, 0.6359037160873413 ]
Function create Create an new object @param obj: object name ('hosts', 'puppetclasses'...) @param payload: the dict of the payload @param async: should this request be async, if true use return.result() to get the response @return RETURN: the server response
def create(self, obj, payload, async=False): """ Function create Create an new object @param obj: object name ('hosts', 'puppetclasses'...) @param payload: the dict of the payload @param async: should this request be async, if true use return.result() to get the response @return RETURN: the server response """ self.url = self.base_url + obj self.method = 'POST' self.payload = json.dumps(payload) if async: self.method = 'POST(Async)' session = FuturesSession() self.resp = session.post(url=self.url, auth=self.auth, headers=self.headers, data=self.payload, cert=self.ca_cert) return self.resp else: self.resp = requests.post(url=self.url, auth=self.auth, headers=self.headers, data=self.payload, cert=self.ca_cert) return self.__process_resp__(obj)
[ "def set(self, obj, id, payload, action='', async=False):\n \"\"\" Function set\n Set an object by id\n\n @param obj: object name ('hosts', 'puppetclasses'...)\n @param id: the id of the object (name or id)\n @param action: specific action of an object ('power'...)\n @param...
[ 0.6929964423179626, 0.681441605091095, 0.6703740954399109, 0.6687217950820923, 0.6633094549179077, 0.6537376046180725, 0.65224289894104, 0.6456822156906128, 0.643081545829773, 0.6398965120315552, 0.6397796273231506, 0.6394233703613281 ]
Function delete Delete an object by id @param obj: object name ('hosts', 'puppetclasses'...) @param id: the id of the object (name or id) @return RETURN: the server response
def delete(self, obj, id): """ Function delete Delete an object by id @param obj: object name ('hosts', 'puppetclasses'...) @param id: the id of the object (name or id) @return RETURN: the server response """ self.url = '{}{}/{}'.format(self.base_url, obj, id) self.method = 'DELETE' self.resp = requests.delete(url=self.url, auth=self.auth, headers=self.headers, cert=self.ca_cert) return self.__process_resp__(obj)
[ "def del_object(self, obj):\n \"\"\"Debug deletes obj of obj[_type] with id of obj['_id']\"\"\"\n if obj['_index'] is None or obj['_index'] == \"\":\n raise Exception(\"Invalid Object\")\n if obj['_id'] is None or obj['_id'] == \"\":\n raise Exception(\"Invalid Object\")\n...
[ 0.7532449960708618, 0.7522357702255249, 0.7273005247116089, 0.7185198664665222, 0.7165303826332092, 0.7154962420463562, 0.7148327231407166, 0.7069894671440125, 0.7058194875717163, 0.704591691493988, 0.7043777108192444, 0.7033277750015259 ]
Modified ``run`` that captures return value and exceptions from ``target``
def run(self): """Modified ``run`` that captures return value and exceptions from ``target``""" try: if self._target: return_value = self._target(*self._args, **self._kwargs) if return_value is not None: self._exception = OrphanedReturn(self, return_value) except BaseException as err: self._exception = err finally: # Avoid a refcycle if the thread is running a function with # an argument that has a member that points to the thread. del self._target, self._args, self._kwargs
[ "def capture(self, *args, **kwargs):\n '''Run a task and return a dictionary with stderr, stdout and the\n return value. Also, the traceback from the exception if there was\n one'''\n import traceback\n try:\n from StringIO import StringIO\n except ImportError:\n...
[ 0.7184444069862366, 0.7156774401664734, 0.7035624384880066, 0.6966459155082703, 0.6876786351203918, 0.6849420070648193, 0.6831562519073486, 0.6829209327697754, 0.6812566518783569, 0.6808006167411804, 0.6792249083518982, 0.67826247215271 ]
Start all queued payloads
def _start_payloads(self): """Start all queued payloads""" with self._lock: payloads = self._payloads.copy() self._payloads.clear() for subroutine in payloads: thread = CapturingThread(target=subroutine) thread.start() self._threads.add(thread) self._logger.debug('booted thread %s', thread) time.sleep(0)
[ "async def _start_payloads(self):\n \"\"\"Start all queued payloads\"\"\"\n with self._lock:\n for coroutine in self._payloads:\n task = self.event_loop.create_task(coroutine())\n self._tasks.add(task)\n self._payloads.clear()\n await asyncio....
[ 0.894452691078186, 0.88545161485672, 0.7891626954078674, 0.7770432233810425, 0.7561456561088562, 0.7468627095222473, 0.7409204840660095, 0.7305784225463867, 0.7299909591674805, 0.7234981060028076, 0.7174237966537476, 0.70259028673172 ]
Clean up all finished payloads
def _reap_payloads(self): """Clean up all finished payloads""" for thread in self._threads.copy(): # CapturingThread.join will throw if thread.join(timeout=0): self._threads.remove(thread) self._logger.debug('reaped thread %s', thread)
[ "async def _reap_payloads(self):\n \"\"\"Clean up all finished payloads\"\"\"\n for task in self._tasks.copy():\n if task.done():\n self._tasks.remove(task)\n if task.exception() is not None:\n raise task.exception()\n await asyncio.sl...
[ 0.792407214641571, 0.7866622805595398, 0.7357620596885681, 0.7221269011497498, 0.7220130562782288, 0.7094706892967224, 0.7041050791740417, 0.7006880640983582, 0.6978424191474915, 0.6969825029373169, 0.6950753331184387, 0.6941037774085999 ]
Decorate functions that modify the internally stored usernotes JSON. Ensures that updates are mirrored onto reddit. Arguments: func: the function being decorated
def update_cache(func): """Decorate functions that modify the internally stored usernotes JSON. Ensures that updates are mirrored onto reddit. Arguments: func: the function being decorated """ @wraps(func) def wrapper(self, *args, **kwargs): """The wrapper function.""" lazy = kwargs.get('lazy', False) kwargs.pop('lazy', None) if not lazy: self.get_json() ret = func(self, *args, **kwargs) # If returning a string assume it is an update message if isinstance(ret, str) and not lazy: self.set_json(ret) else: return ret return wrapper
[ "def alias_function(function, class_name):\n \"\"\"Create a RedditContentObject function mapped to a BaseReddit function.\n\n The BaseReddit classes define the majority of the API's functions. The\n first argument for many of these functions is the RedditContentObject that\n they operate on. This factor...
[ 0.7108466625213623, 0.699897289276123, 0.6968865990638733, 0.6755101680755615, 0.6744885444641113, 0.6734359264373779, 0.6732186079025269, 0.6724695563316345, 0.6722174286842346, 0.6711249351501465, 0.6701214909553528, 0.6627087593078613 ]
Create an unbound prototype of this class, partially applying arguments .. code:: python controller = Controller.s(interval=20) pipeline = controller(rate=10) >> pool
def s(cls: Type[C], *args, **kwargs) -> Partial[C]: """ Create an unbound prototype of this class, partially applying arguments .. code:: python controller = Controller.s(interval=20) pipeline = controller(rate=10) >> pool """ return Partial(cls, *args, **kwargs)
[ "def s(self, *args, **kwargs) -> Partial[Stepwise]:\n \"\"\"\n Create an unbound prototype of this class, partially applying arguments\n\n .. code:: python\n\n @stepwise\n def control(pool: Pool, interval):\n return 10\n\n pipeline = control.s(int...
[ 0.8144322037696838, 0.6688690781593323, 0.6525091528892517, 0.6471115350723267, 0.6442935466766357, 0.6442053914070129, 0.6432787179946899, 0.6421696543693542, 0.6420442461967468, 0.6401344537734985, 0.6352636218070984, 0.6345563530921936 ]
Collect all bases and organize into parent/child mappings.
def _build_mappings( self, classes: Sequence[type] ) -> Tuple[Mapping[type, Sequence[type]], Mapping[type, Sequence[type]]]: """ Collect all bases and organize into parent/child mappings. """ parents_to_children: MutableMapping[type, Set[type]] = {} children_to_parents: MutableMapping[type, Set[type]] = {} visited_classes: Set[type] = set() class_stack = list(classes) while class_stack: class_ = class_stack.pop() if class_ in visited_classes: continue visited_classes.add(class_) for base in class_.__bases__: if base not in visited_classes: class_stack.append(base) parents_to_children.setdefault(base, set()).add(class_) children_to_parents.setdefault(class_, set()).add(base) sorted_parents_to_children: MutableMapping[ type, List[type] ] = collections.OrderedDict() for parent, children in sorted( parents_to_children.items(), key=lambda x: (x[0].__module__, x[0].__name__) ): sorted_parents_to_children[parent] = sorted( children, key=lambda x: (x.__module__, x.__name__) ) sorted_children_to_parents: MutableMapping[ type, List[type] ] = collections.OrderedDict() for child, parents in sorted( children_to_parents.items(), key=lambda x: (x[0].__module__, x[0].__name__) ): sorted_children_to_parents[child] = sorted( parents, key=lambda x: (x.__module__, x.__name__) ) return sorted_parents_to_children, sorted_children_to_parents
[ "def recursive_bases(self):\n \"\"\"list of all :class:`base classes <hierarchy_info_t>`\"\"\"\n if self._recursive_bases is None:\n to_go = self.bases[:]\n all_bases = []\n while to_go:\n base = to_go.pop()\n if base not in all_bases:\n ...
[ 0.7023827433586121, 0.6952263116836548, 0.6884115934371948, 0.6778467893600464, 0.6761168241500854, 0.6727396845817566, 0.6723271012306213, 0.6719276309013367, 0.6717371940612793, 0.6709029674530029, 0.6694896221160889, 0.6686638593673706 ]
Collect all classes defined in/under ``package_paths``.
def _collect_classes( self, package_paths: Sequence[str], recurse_subpackages: bool = True ) -> Sequence[type]: """ Collect all classes defined in/under ``package_paths``. """ import uqbar.apis classes = [] initial_source_paths: Set[str] = set() # Graph source paths and classes for path in package_paths: try: module = importlib.import_module(path) if hasattr(module, "__path__"): initial_source_paths.update(getattr(module, "__path__")) else: initial_source_paths.add(module.__file__) except ModuleNotFoundError: path, _, class_name = path.rpartition(".") module = importlib.import_module(path) classes.append(getattr(module, class_name)) # Iterate source paths for source_path in uqbar.apis.collect_source_paths( initial_source_paths, recurse_subpackages=recurse_subpackages ): package_path = uqbar.apis.source_path_to_package_path(source_path) module = importlib.import_module(package_path) # Grab any defined classes for name in dir(module): if name.startswith("_"): continue object_ = getattr(module, name) if isinstance(object_, type) and object_.__module__ == module.__name__: classes.append(object_) return sorted(classes, key=lambda x: (x.__module__, x.__name__))
[ "def collect_dependency_paths(package_name):\n \"\"\"\n TODO docstrings\n \"\"\"\n deps = []\n try:\n dist = pkg_resources.get_distribution(package_name)\n except (pkg_resources.DistributionNotFound, ValueError):\n message = \"Distribution '{}' not found.\".format(package_name)\n ...
[ 0.7355327606201172, 0.7090335488319397, 0.7000157833099365, 0.700005829334259, 0.6999524831771851, 0.6933577656745911, 0.689599871635437, 0.6882212162017822, 0.6865902543067932, 0.6856491565704346, 0.6845882534980774, 0.6808279752731323 ]
Return a tuple for authenticating a user If not successful raise ``AgileError``.
def get_auth(): """Return a tuple for authenticating a user If not successful raise ``AgileError``. """ auth = get_auth_from_env() if auth[0] and auth[1]: return auth home = os.path.expanduser("~") config = os.path.join(home, '.gitconfig') if not os.path.isfile(config): raise GithubException('No .gitconfig available') parser = configparser.ConfigParser() parser.read(config) if 'user' in parser: user = parser['user'] if 'username' not in user: raise GithubException('Specify username in %s user ' 'section' % config) if 'token' not in user: raise GithubException('Specify token in %s user section' % config) return user['username'], user['token'] else: raise GithubException('No user section in %s' % config)
[ "def authenticate(self, request):\n \"\"\"\n Returns a `User` if a correct access token has been supplied\n in the Authorization header. Otherwise returns `None`.\n \"\"\"\n auth = get_authorization_header(request).split()\n\n if not auth or auth[0].lower() != b'bearer':\n...
[ 0.7139577269554138, 0.7108141779899597, 0.7068831920623779, 0.7052825093269348, 0.7042210698127747, 0.7039636373519897, 0.7035170793533325, 0.7032013535499573, 0.6986998319625854, 0.6977265477180481, 0.6957534551620483, 0.6925418972969055 ]
Function checkAndCreate Check if an architectures exists and create it if not @param key: The targeted architectures @param payload: The targeted architectures description @param osIds: The list of os ids liked with this architecture @return RETURN: The id of the object
def checkAndCreate(self, key, payload, osIds): """ Function checkAndCreate Check if an architectures exists and create it if not @param key: The targeted architectures @param payload: The targeted architectures description @param osIds: The list of os ids liked with this architecture @return RETURN: The id of the object """ if key not in self: self[key] = payload oid = self[key]['id'] if not oid: return False #~ To be sure the OS list is good, we ensure our os are in the list for os in self[key]['operatingsystems']: osIds.add(os['id']) self[key]["operatingsystem_ids"] = list(osIds) if (len(self[key]['operatingsystems']) is not len(osIds)): return False return oid
[ "def checkAndCreate(self, key, payload):\n \"\"\" Function checkAndCreate\n Check if an object exists and create it if not\n\n @param key: The targeted object\n @param payload: The targeted object description\n @return RETURN: The id of the object\n \"\"\"\n if key n...
[ 0.751899242401123, 0.7515250444412231, 0.7219224572181702, 0.6788215637207031, 0.6209136843681335, 0.6128567457199097, 0.6048723459243774, 0.60396808385849, 0.599178671836853, 0.59865802526474, 0.5976240634918213, 0.5968005657196045 ]
Get output (as a string) from pip command :param pip_args: list o pip switches to pass :return: string with results
def pip_command_output(pip_args): """ Get output (as a string) from pip command :param pip_args: list o pip switches to pass :return: string with results """ import sys import pip from io import StringIO # as pip will write to stdout we use some nasty hacks # to substitute system stdout with our own old_stdout = sys.stdout sys.stdout = mystdout = StringIO() pip.main(pip_args) output = mystdout.getvalue() mystdout.truncate(0) sys.stdout = old_stdout return output
[ "def run_pip_command(command_args, pip_version=None, python_version=None):\n \"\"\"Run a pip command.\n\n Args:\n command_args (list of str): Args to pip.\n\n Returns:\n `subprocess.Popen`: Pip process.\n \"\"\"\n pip_exe, context = find_pip(pip_version, python_version)\n command = [...
[ 0.7169312238693237, 0.7122620940208435, 0.7016721963882446, 0.6971988677978516, 0.6969940066337585, 0.6961489319801331, 0.6926828622817993, 0.6923568248748779, 0.678249180316925, 0.672666072845459, 0.6705126166343689, 0.6704131960868835 ]
Generate (temporarily) versioneer.py file in project root directory :return:
def setup_versioneer(): """ Generate (temporarily) versioneer.py file in project root directory :return: """ try: # assume versioneer.py was generated using "versioneer install" command import versioneer versioneer.get_version() except ImportError: # it looks versioneer.py is missing # lets assume that versioneer package is installed # and versioneer binary is present in $PATH import subprocess try: # call versioneer install to generate versioneer.py subprocess.check_output(["versioneer", "install"]) except OSError: # it looks versioneer is missing from $PATH # probably versioneer is installed in some user directory # query pip for list of files in versioneer package # line below is equivalen to putting result of # "pip show -f versioneer" command to string output output = pip_command_output(["show", "-f", "versioneer"]) # now we parse the results import os # find absolute path where *versioneer package* was installed # and store it in main_path main_path = [x[len("Location: "):] for x in output.splitlines() if x.startswith("Location")][0] # find path relative to main_path where # *versioneer binary* was installed bin_path = [x[len(" "):] for x in output.splitlines() if x.endswith(os.path.sep + "versioneer")][0] # exe_path is absolute path to *versioneer binary* exe_path = os.path.join(main_path, bin_path) # call versioneer install to generate versioneer.py # line below is equivalent to running in terminal # "python versioneer install" subprocess.check_output(["python", exe_path, "install"])
[ "def get_root():\n \"\"\"Get the project root directory.\n\n We require that all commands are run from the project root, i.e. the\n directory that contains setup.py, setup.cfg, and versioneer.py .\n \"\"\"\n root = os.path.realpath(os.path.abspath(os.getcwd()))\n setup_py = os.path.join(root, \"se...
[ 0.7725598216056824, 0.7593209147453308, 0.7382193803787231, 0.7354882955551147, 0.7267719507217407, 0.721881091594696, 0.7006163597106934, 0.6965499520301819, 0.6929922103881836, 0.6898463368415833, 0.6882026791572571, 0.6879287362098694 ]
Python won't realise that new module has appeared in the runtime We need to clean the cache of module finders. Hacking again :return:
def clean_cache(): """ Python won't realise that new module has appeared in the runtime We need to clean the cache of module finders. Hacking again :return: """ import importlib try: # Python ver < 3.3 vermod = importlib.import_module("versioneer") globals()["versioneer"] = vermod except ImportError: importlib.invalidate_caches()
[ "def cache_py2_modules():\n \"\"\"\n Currently this function is unneeded, as we are not attempting to provide import hooks\n for modules with ambiguous names: email, urllib, pickle.\n \"\"\"\n if len(sys.py2_modules) != 0:\n return\n assert not detect_hooks()\n import urllib\n sys.py2...
[ 0.7373819351196289, 0.7117829918861389, 0.7047575116157532, 0.7046589255332947, 0.6884981989860535, 0.6834573745727539, 0.6827386617660522, 0.6806471943855286, 0.6788356900215149, 0.6784219741821289, 0.6779079437255859, 0.677069902420044 ]
Get project version (using versioneer) :return: string containing version
def get_version(): """ Get project version (using versioneer) :return: string containing version """ setup_versioneer() clean_cache() import versioneer version = versioneer.get_version() parsed_version = parse_version(version) if '*@' in str(parsed_version): import time version += str(int(time.time())) return version
[ "def _get_version():\n \"\"\"Return the project version from VERSION file.\"\"\"\n\n with open(join(dirname(__file__), '{{project.package}}/VERSION'), 'rb') as f:\n version = f.read().decode('ascii').strip()\n return version", "def _get_version():\n \"\"\"Return the project version from VERSION...
[ 0.7875058650970459, 0.7697684168815613, 0.7648671865463257, 0.759833574295044, 0.7570896744728088, 0.7523835301399231, 0.7461139559745789, 0.7398679256439209, 0.7363691926002502, 0.7361968159675598, 0.7345678210258484, 0.7333527207374573 ]
Find the common prefix of two or more paths. :: >>> import pathlib >>> one = pathlib.Path('foo/bar/baz') >>> two = pathlib.Path('foo/quux/biz') >>> three = pathlib.Path('foo/quux/wuux') :: >>> import uqbar.io >>> str(uqbar.io.find_common_prefix([one, two, three])) 'foo' :param paths: paths to inspect
def find_common_prefix( paths: Sequence[Union[str, pathlib.Path]] ) -> Optional[pathlib.Path]: """ Find the common prefix of two or more paths. :: >>> import pathlib >>> one = pathlib.Path('foo/bar/baz') >>> two = pathlib.Path('foo/quux/biz') >>> three = pathlib.Path('foo/quux/wuux') :: >>> import uqbar.io >>> str(uqbar.io.find_common_prefix([one, two, three])) 'foo' :param paths: paths to inspect """ counter: collections.Counter = collections.Counter() for path in paths: path = pathlib.Path(path) counter.update([path]) counter.update(path.parents) valid_paths = sorted( [path for path, count in counter.items() if count >= len(paths)], key=lambda x: len(x.parts), ) if valid_paths: return valid_paths[-1] return None
[ "def _StripCommonPathPrefix(paths):\n \"\"\"Removes path common prefix from a list of path strings.\"\"\"\n # Find the longest common prefix in terms of characters.\n common_prefix = os.path.commonprefix(paths)\n # Truncate at last segment boundary. E.g. '/aa/bb1/x.py' and '/a/bb2/x.py'\n # have '/aa/bb' as th...
[ 0.7656171917915344, 0.7499231696128845, 0.7466578483581543, 0.7462409138679504, 0.729282021522522, 0.7263355851173401, 0.7221022844314575, 0.7215659022331238, 0.7173000574111938, 0.7153367400169373, 0.7125357985496521, 0.7106934785842896 ]
r"""Finds executable `name`. Similar to Unix ``which`` command. Returns list of zero or more full paths to `name`.
def find_executable(name: str, flags=os.X_OK) -> List[str]: r"""Finds executable `name`. Similar to Unix ``which`` command. Returns list of zero or more full paths to `name`. """ result = [] extensions = [x for x in os.environ.get("PATHEXT", "").split(os.pathsep) if x] path = os.environ.get("PATH", None) if path is None: return [] for path in os.environ.get("PATH", "").split(os.pathsep): path = os.path.join(path, name) if os.access(path, flags): result.append(path) for extension in extensions: path_extension = path + extension if os.access(path_extension, flags): result.append(path_extension) return result
[ "def which(cmd):\n \"\"\"\n Returns full path to a executable.\n\n Args:\n cmd (str): Executable command to search for.\n\n Returns:\n (str) Full path to command. None if it is not found.\n\n Example::\n\n full_path_to_python = which(\"python\")\n \"\"\"\n def is_exe(fp):\n...
[ 0.8366226553916931, 0.8197463750839233, 0.8171188235282898, 0.7880702018737793, 0.7684198021888733, 0.767952561378479, 0.7670852541923523, 0.7655863761901855, 0.7653024792671204, 0.7651069760322571, 0.7628904581069946, 0.7624965906143188 ]
Generates relative path from ``source_path`` to ``target_path``. Handles the case of paths without a common prefix. :: >>> import pathlib >>> source = pathlib.Path('foo/bar/baz') >>> target = pathlib.Path('foo/quux/biz') :: >>> target.relative_to(source) Traceback (most recent call last): ... ValueError: 'foo/quux/biz' does not start with 'foo/bar/baz' :: >>> import uqbar.io >>> str(uqbar.io.relative_to(source, target)) '../../quux/biz' :param source_path: the source path :param target_path: the target path
def relative_to( source_path: Union[str, pathlib.Path], target_path: Union[str, pathlib.Path] ) -> pathlib.Path: """ Generates relative path from ``source_path`` to ``target_path``. Handles the case of paths without a common prefix. :: >>> import pathlib >>> source = pathlib.Path('foo/bar/baz') >>> target = pathlib.Path('foo/quux/biz') :: >>> target.relative_to(source) Traceback (most recent call last): ... ValueError: 'foo/quux/biz' does not start with 'foo/bar/baz' :: >>> import uqbar.io >>> str(uqbar.io.relative_to(source, target)) '../../quux/biz' :param source_path: the source path :param target_path: the target path """ source_path = pathlib.Path(source_path).absolute() if source_path.is_file(): source_path = source_path.parent target_path = pathlib.Path(target_path).absolute() common_prefix = find_common_prefix([source_path, target_path]) if not common_prefix: raise ValueError("No common prefix") source_path = source_path.relative_to(common_prefix) target_path = target_path.relative_to(common_prefix) result = pathlib.Path(*[".."] * len(source_path.parts)) return result / target_path
[ "def rellink_to (self, target, force=False):\n \"\"\"Make this path a symlink pointing to the given *target*, generating the\n\tproper relative path using :meth:`make_relative`. This gives different\n\tbehavior than :meth:`symlink_to`. For instance, ``Path\n\t('a/b').symlink_to ('c')`` results in ``a/b`` poi...
[ 0.7140219211578369, 0.7119200825691223, 0.6999659538269043, 0.6958523392677307, 0.6902969479560852, 0.6895066499710083, 0.6857941150665283, 0.6817877292633057, 0.681742787361145, 0.6782861351966858, 0.6780699491500854, 0.6768007874488831 ]
Walks a directory tree. Like :py:func:`os.walk` but yielding instances of :py:class:`pathlib.Path` instead of strings. :param root_path: foo :param top_down: bar
def walk( root_path: Union[str, pathlib.Path], top_down: bool = True ) -> Generator[ Tuple[pathlib.Path, Sequence[pathlib.Path], Sequence[pathlib.Path]], None, None ]: """ Walks a directory tree. Like :py:func:`os.walk` but yielding instances of :py:class:`pathlib.Path` instead of strings. :param root_path: foo :param top_down: bar """ root_path = pathlib.Path(root_path) directory_paths, file_paths = [], [] for path in sorted(root_path.iterdir()): if path.is_dir(): directory_paths.append(path) else: file_paths.append(path) if top_down: yield root_path, directory_paths, file_paths for directory_path in directory_paths: yield from walk(directory_path, top_down=top_down) if not top_down: yield root_path, directory_paths, file_paths
[ "def _walk(top, topdown=True, onerror=None, followlinks=False):\n \"\"\"Like Python 3.5's implementation of os.walk() -- faster than\n the pre-Python 3.5 version as it uses scandir() internally.\n \"\"\"\n dirs = []\n nondirs = []\n\n # We may not have read permission for top, in which case we can...
[ 0.7666237354278564, 0.760486900806427, 0.7558243870735168, 0.7528534531593323, 0.7476502060890198, 0.7472764849662781, 0.7448787689208984, 0.7446994781494141, 0.7380245923995972, 0.7364243268966675, 0.734965443611145, 0.733047604560852 ]
Writes ``contents`` to ``path``. Checks if ``path`` already exists and only write out new contents if the old contents do not match. Creates any intermediate missing directories. :param contents: the file contents to write :param path: the path to write to :param verbose: whether to print output
def write( contents: str, path: Union[str, pathlib.Path], verbose: bool = False, logger_func=None, ) -> bool: """ Writes ``contents`` to ``path``. Checks if ``path`` already exists and only write out new contents if the old contents do not match. Creates any intermediate missing directories. :param contents: the file contents to write :param path: the path to write to :param verbose: whether to print output """ print_func = logger_func or print path = pathlib.Path(path) if path.exists(): with path.open("r") as file_pointer: old_contents = file_pointer.read() if old_contents == contents: if verbose: print_func("preserved {}".format(path)) return False else: with path.open("w") as file_pointer: file_pointer.write(contents) if verbose: print_func("rewrote {}".format(path)) return True elif not path.exists(): if not path.parent.exists(): path.parent.mkdir(parents=True) with path.open("w") as file_pointer: file_pointer.write(contents) if verbose: print_func("wrote {}".format(path)) return True
[ "def write_file(self, path, contents):\n\t\t\"\"\"\n\t\tWrite a file of any type to the destination path. Useful for files like\n\t\trobots.txt, manifest.json, and so on.\n\n\t\tArgs:\n\t\t path (str): The name of the file to write to.\n\t\t contents (str or bytes): The contents to write.\n\t\t\"\"\"\n\t\tpath = ...
[ 0.7112444639205933, 0.6992592811584473, 0.6982851624488831, 0.6905460953712463, 0.6884069442749023, 0.6825169324874878, 0.6800003051757812, 0.6771378517150879, 0.671228289604187, 0.6689144968986511, 0.6681013107299805, 0.6621887683868408 ]
Pretty object reference using ``module.path:qual.name`` format
def pretty_ref(obj: Any) -> str: """Pretty object reference using ``module.path:qual.name`` format""" try: return obj.__module__ + ':' + obj.__qualname__ except AttributeError: return pretty_ref(type(obj)) + '(...)'
[ "def qualified_name(self):\n '''return the fully qualified name (`<module>.<name>`)'''\n if self.module == self:\n return self.module.name\n else:\n if \".\" not in self.name:\n return '{0}.{1}'.format(self.module.name, self.name)\n else:\n ...
[ 0.7537909150123596, 0.7496721148490906, 0.7459216117858887, 0.7329827547073364, 0.72868412733078, 0.7268847227096558, 0.7227492332458496, 0.719032883644104, 0.7084441184997559, 0.7068862318992615, 0.7022971510887146, 0.7020387649536133 ]
Display repo github path
def remote(ctx): """Display repo github path """ with command(): m = RepoManager(ctx.obj['agile']) click.echo(m.github_repo().repo_path)
[ "def repository(name):\n \"\"\"Display selected repository.\"\"\"\n user_id = current_user.id\n github = GitHubAPI(user_id=user_id)\n token = github.session_token\n\n if token:\n repos = github.account.extra_data.get('repos', [])\n repo = next((repo for repo_id, repo in repos.items()\n ...
[ 0.7642771601676941, 0.7277076244354248, 0.7231431603431702, 0.7152695655822754, 0.7143361568450928, 0.7138631939888, 0.7125160694122314, 0.7093775868415833, 0.7087344527244568, 0.7085931301116943, 0.7023402452468872, 0.701813280582428 ]
Get graph-order tuple for node. :: >>> from uqbar.containers import UniqueTreeContainer, UniqueTreeNode >>> root_container = UniqueTreeContainer(name="root") >>> outer_container = UniqueTreeContainer(name="outer") >>> inner_container = UniqueTreeContainer(name="inner") >>> node_a = UniqueTreeNode(name="a") >>> node_b = UniqueTreeNode(name="b") >>> node_c = UniqueTreeNode(name="c") >>> node_d = UniqueTreeNode(name="d") >>> root_container.extend([node_a, outer_container]) >>> outer_container.extend([inner_container, node_d]) >>> inner_container.extend([node_b, node_c]) :: >>> for node in root_container.depth_first(): ... print(node.name, node.graph_order) ... a (0,) outer (1,) inner (1, 0) b (1, 0, 0) c (1, 0, 1) d (1, 1)
def graph_order(self): """ Get graph-order tuple for node. :: >>> from uqbar.containers import UniqueTreeContainer, UniqueTreeNode >>> root_container = UniqueTreeContainer(name="root") >>> outer_container = UniqueTreeContainer(name="outer") >>> inner_container = UniqueTreeContainer(name="inner") >>> node_a = UniqueTreeNode(name="a") >>> node_b = UniqueTreeNode(name="b") >>> node_c = UniqueTreeNode(name="c") >>> node_d = UniqueTreeNode(name="d") >>> root_container.extend([node_a, outer_container]) >>> outer_container.extend([inner_container, node_d]) >>> inner_container.extend([node_b, node_c]) :: >>> for node in root_container.depth_first(): ... print(node.name, node.graph_order) ... a (0,) outer (1,) inner (1, 0) b (1, 0, 0) c (1, 0, 1) d (1, 1) """ parentage = tuple(reversed(self.parentage)) graph_order = [] for i in range(len(parentage) - 1): parent, child = parentage[i : i + 2] graph_order.append(parent.index(child)) return tuple(graph_order)
[ "def postorder(self):\n \"\"\"\n Returns a valid postorder of the **node objects** of the ``DictGraph`` \n *if* the topology is a directed acyclic graph. This postorder is \n semi-random, because the order of elements in a dictionary is \n semi-random and so are the starting nodes...
[ 0.6900789737701416, 0.6887165307998657, 0.6883143782615662, 0.6868157982826233, 0.6808519959449768, 0.6807454228401184, 0.6807418465614319, 0.6763340830802917, 0.665793776512146, 0.6647040247917175, 0.6604621410369873, 0.6588199734687805 ]
Send command, wait for response (single or multi lines), test for errors and return the returned code. :param cmd: command to send :param multilines: True - multiline response, False - single line response. :return: command return value.
def sendQuery(self, cmd, multilines=False): """ Send command, wait for response (single or multi lines), test for errors and return the returned code. :param cmd: command to send :param multilines: True - multiline response, False - single line response. :return: command return value. """ self.logger.debug("sendQuery(%s)", cmd) if not self.is_connected(): raise socket.error("sendQuery on a disconnected socket") if multilines: replies = self.__sendQueryReplies(cmd) for reply in replies: if reply.startswith(XenaSocket.reply_errors): raise XenaCommandException('sendQuery({}) reply({})'.format(cmd, replies)) self.logger.debug("sendQuery(%s) -- Begin", cmd) for l in replies: self.logger.debug("%s", l.strip()) self.logger.debug("sendQuery(%s) -- End", cmd) return replies else: reply = self.__sendQueryReply(cmd) if reply.startswith(XenaSocket.reply_errors): raise XenaCommandException('sendQuery({}) reply({})'.format(cmd, reply)) self.logger.debug('sendQuery(%s) reply(%s)', cmd, reply) return reply
[ "def send_command_return_multilines(self, command, *arguments):\n \"\"\" Send command and wait for multiple lines output. \"\"\"\n return self.api.send_command_return_multilines(self, command, *arguments)", "def sendcmd(self, cmd='AT', timeout=1.0):\n \"\"\"send command, wait for response. re...
[ 0.8435419797897339, 0.744498074054718, 0.7368252277374268, 0.7289223670959473, 0.7211261987686157, 0.7043935656547546, 0.6924325227737427, 0.6888087391853333, 0.6862530708312988, 0.6852949857711792, 0.6839452385902405, 0.676463782787323 ]
Send command without return value, wait for completion, verify success. :param cmd: command to send
def sendQueryVerify(self, cmd): """ Send command without return value, wait for completion, verify success. :param cmd: command to send """ cmd = cmd.strip() self.logger.debug("sendQueryVerify(%s)", cmd) if not self.is_connected(): raise socket.error("sendQueryVerify on a disconnected socket") resp = self.__sendQueryReply(cmd) if resp != self.reply_ok: raise XenaCommandException('Command {} Fail Expected {} Actual {}'.format(cmd, self.reply_ok, resp)) self.logger.debug("SendQueryVerify(%s) Succeed", cmd)
[ "def send(self, cmd):\n \"\"\" Send a command to the bridge.\n\n :param cmd: List of command bytes.\n \"\"\"\n self._bridge.send(cmd, wait=self.wait, reps=self.reps)", "def send_command(self, cmd, sudo=False, stderr=None, stdout=None):\n '''send command is a non interactive version ...
[ 0.7671437859535217, 0.7568156719207764, 0.7517185211181641, 0.7498824000358582, 0.7450752258300781, 0.7445734739303589, 0.7442312240600586, 0.7423124313354492, 0.7350210547447205, 0.7292740941047668, 0.7287125587463379, 0.7251755595207214 ]
Scan all SHIELDHIT12A config files to find external files used and return them. Also change paths in config files to match convention that all resources are symlinked in job_xxxx/symlink
def find_external_files(self, run_input_dir): """ Scan all SHIELDHIT12A config files to find external files used and return them. Also change paths in config files to match convention that all resources are symlinked in job_xxxx/symlink """ beam_file, geo_file, mat_file, _ = self.input_files # check for external files in BEAM input file external_beam_files = self._parse_beam_file(beam_file, run_input_dir) if external_beam_files: logger.info("External files from BEAM file: {0}".format(external_beam_files)) else: logger.debug("No external files from BEAM file") # check for external files in MAT input file icru_numbers = self._parse_mat_file(mat_file) if icru_numbers: logger.info("External files from MAT file: {0}".format(icru_numbers)) else: logger.debug("No external files from MAT file") # if ICRU+LOADEX pairs were found - get file names for external material files icru_files = [] if icru_numbers: icru_files = self._decrypt_icru_files(icru_numbers) # check for external files in GEO input file geo_files = self._parse_geo_file(geo_file, run_input_dir) if geo_files: logger.info("External files from GEO file: {0}".format(geo_files)) else: logger.debug("No external files from GEO file") external_files = external_beam_files + icru_files + geo_files return [os.path.join(self.input_path, e) for e in external_files]
[ "def _parse_beam_file(self, file_path, run_input_dir):\n \"\"\"Scan SH12A BEAM file for references to external files and return them\"\"\"\n external_files = []\n paths_to_replace = []\n with open(file_path, 'r') as beam_f:\n for line in beam_f.readlines():\n sp...
[ 0.7037990093231201, 0.6976031064987183, 0.691632091999054, 0.6895419359207153, 0.68953937292099, 0.689534068107605, 0.6879248023033142, 0.6852285265922546, 0.6822295784950256, 0.679693341255188, 0.67869633436203, 0.6767182946205139 ]
Scan SH12A BEAM file for references to external files and return them
def _parse_beam_file(self, file_path, run_input_dir): """Scan SH12A BEAM file for references to external files and return them""" external_files = [] paths_to_replace = [] with open(file_path, 'r') as beam_f: for line in beam_f.readlines(): split_line = line.split() # line length checking to prevent IndexError if len(split_line) > 2 and split_line[0] == "USEBMOD": logger.debug("Found reference to external file in BEAM file: {0} {1}".format( split_line[0], split_line[2])) external_files.append(split_line[2]) paths_to_replace.append(split_line[2]) elif len(split_line) > 1 and split_line[0] == "USECBEAM": logger.debug("Found reference to external file in BEAM file: {0} {1}".format( split_line[0], split_line[1])) external_files.append(split_line[1]) paths_to_replace.append(split_line[1]) if paths_to_replace: run_dir_config_file = os.path.join(run_input_dir, os.path.split(file_path)[-1]) logger.debug("Calling rewrite_paths method on file: {0}".format(run_dir_config_file)) self._rewrite_paths_in_file(run_dir_config_file, paths_to_replace) return external_files
[ "def find_external_files(self, run_input_dir):\n \"\"\"\n Scan all SHIELDHIT12A config files to find external files used and return them.\n Also change paths in config files to match convention that all resources are\n symlinked in job_xxxx/symlink\n \"\"\"\n beam_file, geo...
[ 0.7982855439186096, 0.7568964958190918, 0.6761327385902405, 0.6656328439712524, 0.6623234748840332, 0.6597821116447449, 0.6423473954200745, 0.6383647918701172, 0.6373353600502014, 0.636387825012207, 0.6359410285949707, 0.6359152793884277 ]
Scan SH12A GEO file for references to external files (like voxelised geometry) and return them
def _parse_geo_file(self, file_path, run_input_dir): """Scan SH12A GEO file for references to external files (like voxelised geometry) and return them""" external_files = [] paths_to_replace = [] with open(file_path, 'r') as geo_f: for line in geo_f.readlines(): split_line = line.split() if len(split_line) > 0 and not line.startswith("*"): base_path = os.path.join(self.input_path, split_line[0]) if os.path.isfile(base_path + '.hed'): logger.debug("Found ctx + hed files: {0}".format(base_path)) external_files.append(base_path + '.hed') # try to find ctx file if os.path.isfile(base_path + '.ctx'): external_files.append(base_path + '.ctx') elif os.path.isfile(base_path + '.ctx.gz'): external_files.append(base_path + '.ctx.gz') # replace path to match symlink location paths_to_replace.append(split_line[0]) if paths_to_replace: run_dir_config_file = os.path.join(run_input_dir, os.path.split(file_path)[-1]) logger.debug("Calling rewrite_paths method on file: {0}".format(run_dir_config_file)) self._rewrite_paths_in_file(run_dir_config_file, paths_to_replace) return external_files
[ "def find_external_files(self, run_input_dir):\n \"\"\"\n Scan all SHIELDHIT12A config files to find external files used and return them.\n Also change paths in config files to match convention that all resources are\n symlinked in job_xxxx/symlink\n \"\"\"\n beam_file, geo...
[ 0.7469627261161804, 0.7438340187072754, 0.7095581293106079, 0.6927589774131775, 0.6735621690750122, 0.6709260940551758, 0.665416955947876, 0.6626731157302856, 0.6550756096839905, 0.6497797966003418, 0.6456049680709839, 0.6410051584243774 ]
Scan SH12A MAT file for ICRU+LOADEX pairs and return found ICRU numbers
def _parse_mat_file(self, file_path): """Scan SH12A MAT file for ICRU+LOADEX pairs and return found ICRU numbers""" mat_file_sections = self._extract_mat_sections(file_path) return self._analyse_mat_sections(mat_file_sections)
[ "def _analyse_mat_sections(sections):\n \"\"\"\n Cases:\n - ICRU flag present, LOADDEDX flag missing -> data loaded from some data hardcoded in SH12A binary,\n no need to load external files\n - ICRU flag present, LOADDEDX flag present -> data loaded from external files. ICRU numb...
[ 0.8263450264930725, 0.7812528610229492, 0.7005556225776672, 0.6722555160522461, 0.6598689556121826, 0.6553063988685608, 0.6534486413002014, 0.6525524854660034, 0.6503617763519287, 0.6435152888298035, 0.6423606872558594, 0.6421538591384888 ]
Cases: - ICRU flag present, LOADDEDX flag missing -> data loaded from some data hardcoded in SH12A binary, no need to load external files - ICRU flag present, LOADDEDX flag present -> data loaded from external files. ICRU number read from ICRU flag, any number following LOADDEDX flag is ignored. - ICRU flag missing, LOADDEDX flag present -> data loaded from external files. ICRU number read from LOADDEDX - ICRU flag missing, LOADDEDX flag missing -> nothing happens
def _analyse_mat_sections(sections): """ Cases: - ICRU flag present, LOADDEDX flag missing -> data loaded from some data hardcoded in SH12A binary, no need to load external files - ICRU flag present, LOADDEDX flag present -> data loaded from external files. ICRU number read from ICRU flag, any number following LOADDEDX flag is ignored. - ICRU flag missing, LOADDEDX flag present -> data loaded from external files. ICRU number read from LOADDEDX - ICRU flag missing, LOADDEDX flag missing -> nothing happens """ icru_numbers = [] for section in sections: load_present = False load_value = False icru_value = False for e in section: split_line = e.split() if "LOADDEDX" in e: load_present = True if len(split_line) > 1: load_value = split_line[1] if "!" not in split_line[1] else False # ignore ! comments elif "ICRU" in e and len(split_line) > 1: icru_value = split_line[1] if "!" not in split_line[1] else False # ignore ! comments if load_present: # LOADDEDX is present, so external file is required if icru_value: # if ICRU value was given icru_numbers.append(icru_value) elif load_value: # if only LOADDEDX with values was present in section icru_numbers.append(load_value) return icru_numbers
[ "public static int cuModuleLoadDataEx (CUmodule phMod, Pointer p, int numOptions, int options[], Pointer optionValues)\r\n {\r\n // Although it should be possible to pass 'null' for these parameters\r\n // when numOptions==0, the driver crashes when they are 'null', so\r\n // they are replac...
[ 0.7070738673210144, 0.6922526359558105, 0.6904281973838806, 0.6704273223876953, 0.6688159108161926, 0.66507488489151, 0.6602060794830322, 0.6574947834014893, 0.655653715133667, 0.6555174589157104, 0.6555153727531433, 0.6554554104804993 ]
Find matching file names for given ICRU numbers
def _decrypt_icru_files(numbers): """Find matching file names for given ICRU numbers""" import json icru_file = resource_string(__name__, os.path.join('data', 'SH12A_ICRU_table.json')) ref_dict = json.loads(icru_file.decode('ascii')) try: return [ref_dict[e] for e in numbers] except KeyError as er: logger.error("There is no ICRU file for id: {0}".format(er)) raise
[ "def _find_umi(files):\n \"\"\"Find UMI file using different naming schemes.\n\n R1/R2/R3 => R1/R3 with R2 UMI\n R1/R2/I1 => R1/R2 with I1 UMI\n \"\"\"\n base = os.path.basename(_commonprefix(files))\n\n def _file_ext(f):\n exts = utils.splitext_plus(os.path.basename(f).replace(base, \"\"))...
[ 0.7105668187141418, 0.6876538395881653, 0.6679221391677856, 0.6674149036407471, 0.6648903489112854, 0.664292573928833, 0.6628639101982117, 0.6617603898048401, 0.6605300307273865, 0.660112202167511, 0.6600364446640015, 0.6591923236846924 ]
Rewrite paths in config files to match convention job_xxxx/symlink Requires path to run_xxxx/input/config_file and a list of paths_to_replace
def _rewrite_paths_in_file(config_file, paths_to_replace): """ Rewrite paths in config files to match convention job_xxxx/symlink Requires path to run_xxxx/input/config_file and a list of paths_to_replace """ lines = [] # make a copy of config import shutil shutil.copyfile(config_file, str(config_file + '_original')) with open(config_file) as infile: for line in infile: for old_path in paths_to_replace: if old_path in line: new_path = os.path.split(old_path)[-1] line = line.replace(old_path, new_path) logger.debug("Changed path {0} ---> {1} in file {2}".format(old_path, new_path, config_file)) lines.append(line) with open(config_file, 'w') as outfile: for line in lines: outfile.write(line)
[ "def rewrite_paths(self, local_path, remote_path):\n \"\"\"\n Rewrite references to `local_path` with `remote_path` in job inputs.\n \"\"\"\n self.__rewrite_command_line(local_path, remote_path)\n self.__rewrite_config_files(local_path, remote_path)", "def rewrite_input_paths(s...
[ 0.7911860346794128, 0.781268298625946, 0.6906705498695374, 0.690446138381958, 0.6899089813232422, 0.6825046539306641, 0.6709731817245483, 0.6641524434089661, 0.6566023826599121, 0.656368613243103, 0.655708372592926, 0.653624951839447 ]
Check if a given LDAP object exists.
def _check_exists(database: Database, table: LdapObjectClass, key: str, value: str): """ Check if a given LDAP object exists. """ try: get_one(table, Q(**{key: value}), database=database) return True except ObjectDoesNotExist: return False
[ "public boolean exists(Dn dn) {\n try {\n return connection.exists(dn);\n } catch (LdapException e) {\n throw new LdapDaoException(e);\n }\n }", "private void existsCheck(Dn dn) throws NoSuchNodeException, MissingParentException {\n try {\n if (!conn...
[ 0.7273830771446228, 0.7265703678131104, 0.707835853099823, 0.7018440365791321, 0.6881299018859863, 0.6857613921165466, 0.6838850378990173, 0.683398425579071, 0.6821202039718628, 0.6818259954452515, 0.6811388731002808, 0.6797795295715332 ]
Modify a changes to add an automatically generated uidNumber.
def save_account(changes: Changeset, table: LdapObjectClass, database: Database) -> Changeset: """ Modify a changes to add an automatically generated uidNumber. """ d = {} settings = database.settings uid_number = changes.get_value_as_single('uidNumber') if uid_number is None: scheme = settings['NUMBER_SCHEME'] first = settings.get('UID_FIRST', 10000) d['uidNumber'] = Counters.get_and_increment( scheme, "uidNumber", first, lambda n: not _check_exists(database, table, 'uidNumber', n) ) changes = changes.merge(d) return changes
[ "def _add_uid(self, uid, skip_handle=False):\n \"\"\"Add unique identifier in correct field.\n\n The ``skip_handle`` flag is used when adding a uid through the add_url function\n since urls can be easily confused with handle elements.\n \"\"\"\n # We might add None values from whe...
[ 0.6978135704994202, 0.6813786625862122, 0.681290328502655, 0.6805565357208252, 0.6763060688972473, 0.6754458546638489, 0.6754096746444702, 0.6735838055610657, 0.6723365783691406, 0.6688442826271057, 0.6684103012084961, 0.6675044298171997 ]
Replaces instances of switch expression: by for __case in _Switch(n): and replaces case expression: by if __case(expression): and default: by if __case():
def transform_source(text): '''Replaces instances of switch expression: by for __case in _Switch(n): and replaces case expression: by if __case(expression): and default: by if __case(): ''' toks = tokenize.generate_tokens(StringIO(text).readline) result = [] replacing_keyword = False for toktype, tokvalue, _, _, _ in toks: if toktype == tokenize.NAME and tokvalue == 'switch': result.extend([ (tokenize.NAME, 'for'), (tokenize.NAME, '__case'), (tokenize.NAME, 'in'), (tokenize.NAME, '_Switch'), (tokenize.OP, '(') ]) replacing_keyword = True elif toktype == tokenize.NAME and (tokvalue == 'case' or tokvalue == 'default'): result.extend([ (tokenize.NAME, 'if'), (tokenize.NAME, '__case'), (tokenize.OP, '(') ]) replacing_keyword = True elif replacing_keyword and tokvalue == ':': result.extend([ (tokenize.OP, ')'), (tokenize.OP, ':') ]) replacing_keyword = False else: result.append((toktype, tokvalue)) return tokenize.untokenize(result)
[ "def p_switch_statement(self, p):\n \"\"\"switch_statement : SWITCH LPAREN expr RPAREN case_block\"\"\"\n cases = []\n default = None\n # iterate over return values from case_block\n for item in p[5]:\n if isinstance(item, ast.Default):\n default = item\n...
[ 0.7839750051498413, 0.7434612512588501, 0.7269330024719238, 0.7261450290679932, 0.7179446220397949, 0.710665225982666, 0.7068049311637878, 0.6975150108337402, 0.6962053775787354, 0.6949174404144287, 0.6891921162605286, 0.6840291619300842 ]
Search for entries in LDAP database.
def search(self, base, scope, filterstr='(objectClass=*)', attrlist=None, limit=None) -> Generator[Tuple[str, dict], None, None]: """ Search for entries in LDAP database. """ _debug("search", base, scope, filterstr, attrlist, limit) # first results if attrlist is None: attrlist = ldap3.ALL_ATTRIBUTES elif isinstance(attrlist, set): attrlist = list(attrlist) def first_results(obj): _debug("---> searching ldap", limit) obj.search( base, filterstr, scope, attributes=attrlist, paged_size=limit) return obj.response # get the 1st result result_list = self._do_with_retry(first_results) # Loop over list of search results for result_item in result_list: # skip searchResRef for now if result_item['type'] != "searchResEntry": continue dn = result_item['dn'] attributes = result_item['raw_attributes'] # did we already retrieve this from cache? _debug("---> got ldap result", dn) _debug("---> yielding", result_item) yield (dn, attributes) # we are finished - return results, eat cake _debug("---> done") return
[ "def _search(self, base, fltr, attrs=None, scope=ldap.SCOPE_SUBTREE):\n \"\"\"Perform LDAP search\"\"\"\n try:\n results = self._conn.search_s(base, scope, fltr, attrs)\n except Exception as e:\n log.exception(self._get_ldap_msg(e))\n results = False\n re...
[ 0.8410250544548035, 0.8161965608596802, 0.7980911135673523, 0.7957127690315247, 0.7936100363731384, 0.7926977276802063, 0.7824440002441406, 0.7757538557052612, 0.763999879360199, 0.7509103417396545, 0.7479243874549866, 0.7462848424911499 ]
rename a dn in the ldap database; see ldap module. doesn't return a result if transactions enabled.
def rename(self, dn: str, new_rdn: str, new_base_dn: Optional[str] = None) -> None: """ rename a dn in the ldap database; see ldap module. doesn't return a result if transactions enabled. """ raise NotImplementedError()
[ "def rename(self, dn: str, new_rdn: str, new_base_dn: Optional[str] = None) -> None:\n \"\"\"\n rename a dn in the ldap database; see ldap module. doesn't return a\n result if transactions enabled.\n \"\"\"\n\n return self._do_with_retry(\n lambda obj: obj.rename_s(dn, ...
[ 0.8929044604301453, 0.8716602325439453, 0.8117704391479492, 0.790888786315918, 0.7474083304405212, 0.7388917207717896, 0.7381210923194885, 0.7348619699478149, 0.7344306111335754, 0.7342362999916077, 0.7287878394126892, 0.7264547348022461 ]
Example shows how to configure environment from scratch
def prepare_env(org): """ Example shows how to configure environment from scratch """ # Add services key_service = org.service(type='builtin:cobalt_secure_store', name='Keystore') wf_service = org.service(type='builtin:workflow_service', name='Workflow', parameters='{}') # Add services to environment env = org.environment(name='default') env.clean() env.add_service(key_service) env.add_service(wf_service) env.add_policy( {"action": "provisionVms", "parameter": "publicKeyId", "value": key_service.regenerate()['id']}) # Add cloud provider account access = { "provider": "aws-ec2", "usedEnvironments": [], "ec2SecurityGroup": "default", "providerCopy": "aws-ec2", "name": "test-provider", "jcloudsIdentity": KEY, "jcloudsCredential": SECRET_KEY, "jcloudsRegions": "us-east-1" } prov = org.provider(access) env.add_provider(prov) return org.organizationId
[ "def configure(self, remotes=None,\n client_id=None,\n start_timeout=None, docker_image=None,\n ignore_clock_skew=False, disable_action_probes=False,\n vnc_driver=None, vnc_kwargs=None,\n rewarder_driver=None,\n re...
[ 0.753597617149353, 0.747139573097229, 0.7193689942359924, 0.7176499366760254, 0.7167496085166931, 0.7145485877990723, 0.7098968029022217, 0.7098963260650635, 0.7090998888015747, 0.7090483903884888, 0.7075063586235046, 0.7068800330162048 ]
Commands for devops operations
def start(ctx, debug, version, config): """Commands for devops operations""" ctx.obj = {} ctx.DEBUG = debug if os.path.isfile(config): with open(config) as fp: agile = json.load(fp) else: agile = {} ctx.obj['agile'] = agile if version: click.echo(__version__) ctx.exit(0) if not ctx.invoked_subcommand: click.echo(ctx.get_help())
[ "def cmd_devop(self, args):\n '''device operations'''\n usage = \"Usage: devop <read|write> <spi|i2c> name bus address\"\n if len(args) < 5:\n print(usage)\n return\n\n if args[1] == 'spi':\n bustype = mavutil.mavlink.DEVICE_OP_BUSTYPE_SPI\n elif a...
[ 0.7714719176292419, 0.7381599545478821, 0.7163476943969727, 0.687071681022644, 0.6861153841018677, 0.6814738512039185, 0.6775156259536743, 0.6766917705535889, 0.6749687790870667, 0.6739022135734558, 0.6731467247009277, 0.6728531122207642 ]
Duplicate all related objects of obj setting field to value. If one of the duplicate objects has an FK to another duplicate object update that as well. Return the duplicate copy of obj. duplicate_order is a list of models which specify how the duplicate objects are saved. For complex objects this can matter. Check to save if objects are being saved correctly and if not just pass in related objects in the order that they should be saved.
def duplicate(obj, value=None, field=None, duplicate_order=None): """ Duplicate all related objects of obj setting field to value. If one of the duplicate objects has an FK to another duplicate object update that as well. Return the duplicate copy of obj. duplicate_order is a list of models which specify how the duplicate objects are saved. For complex objects this can matter. Check to save if objects are being saved correctly and if not just pass in related objects in the order that they should be saved. """ using = router.db_for_write(obj._meta.model) collector = CloneCollector(using=using) collector.collect([obj]) collector.sort() related_models = list(collector.data.keys()) data_snapshot = {} for key in collector.data.keys(): data_snapshot.update({ key: dict(zip( [item.pk for item in collector.data[key]], [item for item in collector.data[key]])) }) root_obj = None # Sometimes it's good enough just to save in reverse deletion order. if duplicate_order is None: duplicate_order = reversed(related_models) for model in duplicate_order: # Find all FKs on model that point to a related_model. fks = [] for f in model._meta.fields: if isinstance(f, ForeignKey) and f.rel.to in related_models: fks.append(f) # Replace each `sub_obj` with a duplicate. if model not in collector.data: continue sub_objects = collector.data[model] for obj in sub_objects: for fk in fks: fk_value = getattr(obj, "%s_id" % fk.name) # If this FK has been duplicated then point to the duplicate. fk_rel_to = data_snapshot[fk.rel.to] if fk_value in fk_rel_to: dupe_obj = fk_rel_to[fk_value] setattr(obj, fk.name, dupe_obj) # Duplicate the object and save it. obj.id = None if field is not None: setattr(obj, field, value) obj.save() if root_obj is None: root_obj = obj return root_obj
[ "def _update_related(self, obj, related, subfield_dict):\n \"\"\"\n update DB objects related to a base object\n obj: a base object to create related\n related: dict mapping field names to lists of related objects\n subfield_list: where to get the ne...
[ 0.6876682043075562, 0.6828989386558533, 0.6736040711402893, 0.6582846641540527, 0.6567643284797668, 0.6564415693283081, 0.656254768371582, 0.6561061143875122, 0.6523545980453491, 0.650313138961792, 0.6493257880210876, 0.6484439373016357 ]
Function getPayloadStruct Get the payload structure to do a creation or a modification @param attribute: The data @param objType: SubItem type (e.g: hostgroup for hostgroup_class) @return RETURN: the payload
def getPayloadStruct(self, attributes, objType): """ Function getPayloadStruct Get the payload structure to do a creation or a modification @param attribute: The data @param objType: SubItem type (e.g: hostgroup for hostgroup_class) @return RETURN: the payload """ payload = {self.payloadObj: attributes, objType + "_class": {self.payloadObj: attributes}} return payload
[ "def getPayloadStruct(self, attributes, objType=None):\n \"\"\" Function getPayloadStruct\n Get the payload structure to do a creation or a modification\n\n @param key: The key to modify\n @param attribute: The data\n @param objType: NOT USED in this class\n @return RETURN:...
[ 0.8475438952445984, 0.7575864791870117, 0.6440965533256531, 0.6344138979911804, 0.6316420435905457, 0.6299972534179688, 0.6281882524490356, 0.6253945231437683, 0.6251264214515686, 0.6238875985145569, 0.6235304474830627, 0.6234186887741089 ]
Validate url.
def validate_url(value): """ Validate url. """ if not re.match(VIMEO_URL_RE, value) and not re.match(YOUTUBE_URL_RE, value): raise ValidationError('Invalid URL - only Youtube, Vimeo can be used.')
[ "def _validateurl(self, url):\n \"\"\"assembles the server url\"\"\"\n parsed = urlparse(url)\n path = parsed.path.strip(\"/\")\n if path:\n parts = path.split(\"/\")\n url_types = (\"admin\", \"manager\", \"rest\")\n if any(i in parts for i in url_types)...
[ 0.7814081311225891, 0.772642970085144, 0.7723298668861389, 0.760263204574585, 0.7552741169929504, 0.7532185912132263, 0.7522064447402954, 0.7444924712181091, 0.7414736151695251, 0.7404168248176575, 0.7383787035942078, 0.7377429604530334 ]
Enters transaction management for a running thread. It must be balanced with the appropriate leave_transaction_management call, since the actual state is managed as a stack. The state and dirty flag are carried over from the surrounding block or from the settings, if there is no surrounding block (dirty is always false when no current block is running).
def enter_transaction_management(using=None): """ Enters transaction management for a running thread. It must be balanced with the appropriate leave_transaction_management call, since the actual state is managed as a stack. The state and dirty flag are carried over from the surrounding block or from the settings, if there is no surrounding block (dirty is always false when no current block is running). """ if using is None: for using in tldap.backend.connections: connection = tldap.backend.connections[using] connection.enter_transaction_management() return connection = tldap.backend.connections[using] connection.enter_transaction_management()
[ "def leave_transaction_management(using=None):\n \"\"\"\n Leaves transaction management for a running thread. A dirty flag is carried\n over to the surrounding block, as a commit will commit all changes, even\n those from outside. (Commits are on connection level.)\n \"\"\"\n if using is None:\n ...
[ 0.7948814630508423, 0.7469999194145203, 0.6925532817840576, 0.690844714641571, 0.6850407719612122, 0.6764214038848877, 0.6729089021682739, 0.6670367121696472, 0.6654178500175476, 0.6579058170318604, 0.6569712162017822, 0.6567016839981079 ]
Leaves transaction management for a running thread. A dirty flag is carried over to the surrounding block, as a commit will commit all changes, even those from outside. (Commits are on connection level.)
def leave_transaction_management(using=None): """ Leaves transaction management for a running thread. A dirty flag is carried over to the surrounding block, as a commit will commit all changes, even those from outside. (Commits are on connection level.) """ if using is None: for using in tldap.backend.connections: connection = tldap.backend.connections[using] connection.leave_transaction_management() return connection = tldap.backend.connections[using] connection.leave_transaction_management()
[ "def leave_transaction_management(self) -> None:\n \"\"\"\n End a transaction. Must not be dirty when doing so. ie. commit() or\n rollback() must be called if changes made. If dirty, changes will be\n discarded.\n \"\"\"\n if len(self._transactions) == 0:\n raise...
[ 0.7795088887214661, 0.7766814827919006, 0.7272088527679443, 0.6986686587333679, 0.6957940459251404, 0.6925381422042847, 0.6876813769340515, 0.686309278011322, 0.6844186186790466, 0.6785255670547485, 0.6785023212432861, 0.6779400110244751 ]
Returns True if the current transaction requires a commit for changes to happen.
def is_dirty(using=None): """ Returns True if the current transaction requires a commit for changes to happen. """ if using is None: dirty = False for using in tldap.backend.connections: connection = tldap.backend.connections[using] if connection.is_dirty(): dirty = True return dirty connection = tldap.backend.connections[using] return connection.is_dirty()
[ "def has_commit(self):\n \"\"\"\n :return:\n :rtype: boolean\n \"\"\"\n current_revision = self.history.current_revision\n revision_id = self.state.revision_id\n\n return current_revision.revision_id != revision_id", "def has_commit(self, client_key=None):\n ...
[ 0.7338279485702515, 0.7293597459793091, 0.725247859954834, 0.7228243350982666, 0.715399980545044, 0.7153799533843994, 0.7116058468818665, 0.7112911343574524, 0.6996734738349915, 0.6891674995422363, 0.6886998414993286, 0.6882462501525879 ]
Checks whether the transaction manager is in manual or in auto state.
def is_managed(using=None): """ Checks whether the transaction manager is in manual or in auto state. """ if using is None: managed = False for using in tldap.backend.connections: connection = tldap.backend.connections[using] if connection.is_managed(): managed = True return managed connection = tldap.backend.connections[using] return connection.is_managed()
[ "public void setTransactionManualMode(boolean manualMode) {\n this.transactionHandler.setManualMode(manualMode);\n overrider.override(MjdbcConstants.OVERRIDE_INT_IS_MANUAL_MODE, manualMode);\n }", "public boolean isManualQuery()\n {\n boolean bIsManual = false;\n if (this.getTabl...
[ 0.7647520303726196, 0.7216840982437134, 0.7114665508270264, 0.70999675989151, 0.7045300006866455, 0.6824000477790833, 0.6787397861480713, 0.678667426109314, 0.6777503490447998, 0.6752684116363525, 0.6746699213981628, 0.6737667322158813 ]
Does the commit itself and resets the dirty flag.
def commit(using=None): """ Does the commit itself and resets the dirty flag. """ if using is None: for using in tldap.backend.connections: connection = tldap.backend.connections[using] connection.commit() return connection = tldap.backend.connections[using] connection.commit()
[ "public function commit()\n\t{\n\t\tif ( $this->_initialized && $this->_dirty )\n\t\t{\n\t\t\t$this->_saveCache();\n\t\t\t$this->_dirty = false;\n\t\t}\n\t}", "def commit(self):\n \"\"\"Commit dirty records to the server. This method is automatically\n called when the `auto_commit` option is set to ...
[ 0.7501399517059326, 0.743144690990448, 0.7354114055633545, 0.7335783243179321, 0.7292166352272034, 0.7251945734024048, 0.7217972874641418, 0.7213919162750244, 0.721125066280365, 0.7208200693130493, 0.71666419506073, 0.7162477374076843 ]
This function does the rollback itself and resets the dirty flag.
def rollback(using=None): """ This function does the rollback itself and resets the dirty flag. """ if using is None: for using in tldap.backend.connections: connection = tldap.backend.connections[using] connection.rollback() return connection = tldap.backend.connections[using] connection.rollback()
[ "def rollback(self) -> None:\n \"\"\"\n Roll back to previous database state. However stay inside transaction\n management.\n \"\"\"\n if len(self._transactions) == 0:\n raise RuntimeError(\"rollback called outside transaction\")\n\n _debug(\"rollback:\", self._t...
[ 0.7328417301177979, 0.731033980846405, 0.7217168807983398, 0.7200501561164856, 0.7126035094261169, 0.7117496132850647, 0.7112213373184204, 0.7069525718688965, 0.7067874670028687, 0.7057897448539734, 0.7046284079551697, 0.7040011286735535 ]
Takes 3 things, an entering function (what to do to start this block of transaction management), an exiting function (what to do to end it, on both success and failure, and using which can be: None, indiciating transaction should occur on all defined servers, or a callable, indicating that using is None and to return the function already wrapped. Returns either a Transaction objects, which is both a decorator and a context manager, or a wrapped function, if using is a callable.
def _transaction_func(entering, exiting, using): """ Takes 3 things, an entering function (what to do to start this block of transaction management), an exiting function (what to do to end it, on both success and failure, and using which can be: None, indiciating transaction should occur on all defined servers, or a callable, indicating that using is None and to return the function already wrapped. Returns either a Transaction objects, which is both a decorator and a context manager, or a wrapped function, if using is a callable. """ # Note that although the first argument is *called* `using`, it # may actually be a function; @autocommit and @autocommit('foo') # are both allowed forms. if callable(using): return Transaction(entering, exiting, None)(using) return Transaction(entering, exiting, using)
[ "def enter_transaction_management(using=None):\n \"\"\"\n Enters transaction management for a running thread. It must be balanced\n with the appropriate leave_transaction_management call, since the actual\n state is managed as a stack.\n\n The state and dirty flag are carried over from the surroundin...
[ 0.7809832692146301, 0.7770252227783203, 0.7619637846946716, 0.7583186626434326, 0.7478051781654358, 0.7470187544822693, 0.7456947565078735, 0.7369341850280762, 0.7086473107337952, 0.7082058787345886, 0.7039930820465088, 0.7035502791404724 ]
This decorator activates commit on response. This way, if the view function runs successfully, a commit is made; if the viewfunc produces an exception, a rollback is made. This is one of the most common ways to do transaction control in Web apps.
def commit_on_success(using=None): """ This decorator activates commit on response. This way, if the view function runs successfully, a commit is made; if the viewfunc produces an exception, a rollback is made. This is one of the most common ways to do transaction control in Web apps. """ def entering(using): enter_transaction_management(using=using) def exiting(exc_value, using): try: if exc_value is not None: if is_dirty(using=using): rollback(using=using) else: commit(using=using) finally: leave_transaction_management(using=using) return _transaction_func(entering, exiting, using)
[ "def autocommit(f):\n \"A decorator to commit to the storage if autocommit is set to True.\"\n @wraps(f)\n def wrapper(self, *args, **kwargs):\n result = f(self, *args, **kwargs)\n if self._meta.commit_ready():\n self.commit()\n return result\n return wrapper", "def com...
[ 0.7192099094390869, 0.7120048403739929, 0.705969512462616, 0.7059608697891235, 0.7056255340576172, 0.7001574635505676, 0.6987271904945374, 0.6951708793640137, 0.6897431015968323, 0.6896659135818481, 0.6870453953742981, 0.6861870288848877 ]
Decorator that activates manual transaction control. It just disables automatic transaction control and doesn't do any commit/rollback of its own -- it's up to the user to call the commit and rollback functions themselves.
def commit_manually(using=None): """ Decorator that activates manual transaction control. It just disables automatic transaction control and doesn't do any commit/rollback of its own -- it's up to the user to call the commit and rollback functions themselves. """ def entering(using): enter_transaction_management(using=using) def exiting(exc_value, using): leave_transaction_management(using=using) return _transaction_func(entering, exiting, using)
[ "def commit_on_success(using=None):\n \"\"\"\n This decorator activates commit on response. This way, if the view function\n runs successfully, a commit is made; if the viewfunc produces an exception,\n a rollback is made. This is one of the most common ways to do transaction\n control in Web apps.\n...
[ 0.7432112097740173, 0.7334818243980408, 0.7209842205047607, 0.7146124839782715, 0.7028743028640747, 0.6992279887199402, 0.6944511532783508, 0.6935604810714722, 0.6925960779190063, 0.6875629425048828, 0.6853241920471191, 0.6841294765472412 ]
Yields: tuple (line_number: int, offset: int, text: str, check: type)
def run(self) -> Generator[Tuple[int, int, str, type], None, None]: """ Yields: tuple (line_number: int, offset: int, text: str, check: type) """ if is_test_file(self.filename): self.load() for func in self.all_funcs(): try: for error in func.check_all(): yield (error.line_number, error.offset, error.text, Checker) except ValidationError as error: yield error.to_flake8(Checker)
[ "def generator_telling_position(self) -> Iterator[Tuple[str, int]]:\n \"\"\"\n Create a generate that iterates the whole content of the file or string, and also tells which offset is now.\n\n :return: An iterator iterating tuples, containing lines of the text stream,...\n separated b...
[ 0.7129460573196411, 0.6868925094604492, 0.6831988096237183, 0.6774337291717529, 0.6772196292877197, 0.6769496202468872, 0.675317645072937, 0.672489583492279, 0.6683998107910156, 0.6676743030548096, 0.6662952303886414, 0.6629166603088379 ]
Reloads glitter URL patterns if page URLs change. Avoids having to restart the server to recreate the glitter URLs being used by Django.
def process_request(self, request): """ Reloads glitter URL patterns if page URLs change. Avoids having to restart the server to recreate the glitter URLs being used by Django. """ global _urlconf_pages page_list = list( Page.objects.exclude(glitter_app_name='').values_list('id', 'url').order_by('id') ) with _urlconf_lock: if page_list != _urlconf_pages: glitter_urls = 'glitter.urls' if glitter_urls in sys.modules: importlib.reload(sys.modules[glitter_urls]) _urlconf_pages = page_list
[ "def urlpatterns(self):\n '''load and decorate urls from all modules\n then store it as cached property for less loading\n '''\n if not hasattr(self, '_urlspatterns'):\n urlpatterns = []\n # load all urls\n # support .urls file and urls_conf = 'elephantbl...
[ 0.7137013673782349, 0.689215898513794, 0.6736341714859009, 0.661320686340332, 0.6490828394889832, 0.6484881639480591, 0.6462690234184265, 0.6458756327629089, 0.6435902118682861, 0.6427618265151978, 0.6419044733047485, 0.6411451101303101 ]
Execute all current and future payloads Blocks and executes payloads until :py:meth:`stop` is called. It is an error for any orphaned payload to return or raise.
def run(self): """ Execute all current and future payloads Blocks and executes payloads until :py:meth:`stop` is called. It is an error for any orphaned payload to return or raise. """ self._logger.info('runner started: %s', self) try: with self._lock: assert not self.running.is_set() and self._stopped.is_set(), 'cannot re-run: %s' % self self.running.set() self._stopped.clear() self._run() except Exception: self._logger.exception('runner aborted: %s', self) raise else: self._logger.info('runner stopped: %s', self) finally: with self._lock: self.running.clear() self._stopped.set()
[ "async def _await_all(self):\n \"\"\"Async component of _run\"\"\"\n delay = 0.0\n # we run a top-level nursery that automatically reaps/cancels for us\n async with trio.open_nursery() as nursery:\n while self.running.is_set():\n await self._start_payloads(nurse...
[ 0.7552907466888428, 0.7415286302566528, 0.7319077849388123, 0.7161659598350525, 0.7152004837989807, 0.714405357837677, 0.7080373167991638, 0.7059256434440613, 0.7043203115463257, 0.6888946294784546, 0.6872673630714417, 0.6786566376686096 ]
Stop execution of all current and future payloads
def stop(self): """Stop execution of all current and future payloads""" if not self.running.wait(0.2): return self._logger.debug('runner disabled: %s', self) with self._lock: self.running.clear() self._stopped.wait()
[ "async def _cancel_payloads(self):\n \"\"\"Cancel all remaining payloads\"\"\"\n for task in self._tasks:\n task.cancel()\n await asyncio.sleep(0)\n for task in self._tasks:\n while not task.done():\n await asyncio.sleep(0.1)\n task...
[ 0.7745028138160706, 0.7574176788330078, 0.7350283861160278, 0.7333797216415405, 0.7153338193893433, 0.7145117521286011, 0.7136678099632263, 0.7098059058189392, 0.7093703746795654, 0.7081308364868164, 0.7078524231910706, 0.7044951319694519 ]
Delimit a string at word boundaries. :: >>> import uqbar.strings >>> list(uqbar.strings.delimit_words("i want to believe")) ['i', 'want', 'to', 'believe'] :: >>> list(uqbar.strings.delimit_words("S3Bucket")) ['S3', 'Bucket'] :: >>> list(uqbar.strings.delimit_words("Route53")) ['Route', '53']
def delimit_words(string: str) -> Generator[str, None, None]: """ Delimit a string at word boundaries. :: >>> import uqbar.strings >>> list(uqbar.strings.delimit_words("i want to believe")) ['i', 'want', 'to', 'believe'] :: >>> list(uqbar.strings.delimit_words("S3Bucket")) ['S3', 'Bucket'] :: >>> list(uqbar.strings.delimit_words("Route53")) ['Route', '53'] """ # TODO: Reimplement this wordlike_characters = ("<", ">", "!") current_word = "" for i, character in enumerate(string): if ( not character.isalpha() and not character.isdigit() and character not in wordlike_characters ): if current_word: yield current_word current_word = "" elif not current_word: current_word += character elif character.isupper(): if current_word[-1].isupper(): current_word += character else: yield current_word current_word = character elif character.islower(): if current_word[-1].isalpha(): current_word += character else: yield current_word current_word = character elif character.isdigit(): if current_word[-1].isdigit() or current_word[-1].isupper(): current_word += character else: yield current_word current_word = character elif character in wordlike_characters: if current_word[-1] in wordlike_characters: current_word += character else: yield current_word current_word = character if current_word: yield current_word
[ "def delimit_words(self):\n \"\"\"This method takes the existing encoded binary string\n and returns a binary string that will pad it such that\n the encoded string contains only full bytes.\n \"\"\"\n bits_short = 8 - (len(self.buffer.getvalue()) % 8)\n \n #The stri...
[ 0.6909090280532837, 0.6606740355491638, 0.6514214873313904, 0.6489845514297485, 0.6448297500610352, 0.6416976451873779, 0.6416348814964294, 0.6394431591033936, 0.6336376070976257, 0.6314685940742493, 0.6312682032585144, 0.6307927370071411 ]
Normalizes whitespace. Strips leading and trailing blank lines, dedents, and removes trailing whitespace from the result.
def normalize(string: str) -> str: """ Normalizes whitespace. Strips leading and trailing blank lines, dedents, and removes trailing whitespace from the result. """ string = string.replace("\t", " ") lines = string.split("\n") while lines and (not lines[0] or lines[0].isspace()): lines.pop(0) while lines and (not lines[-1] or lines[-1].isspace()): lines.pop() for i, line in enumerate(lines): lines[i] = line.rstrip() string = "\n".join(lines) string = textwrap.dedent(string) return string
[ "def normalize(text_block)\n return text_block if @options[:preserve_whitespace]\n\n # Strip out any preceding [whitespace]* that occur on every line. Not\n # the smartest, but I wonder if I care.\n text_block = text_block.gsub(/^(\\s*\\*+)/, '')\n\n # Strip consistent indenting by measurin...
[ 0.7407689094543457, 0.7230909466743469, 0.7103708386421204, 0.7006025910377502, 0.6976730823516846, 0.6974694728851318, 0.6950390934944153, 0.6949636936187744, 0.6920515298843384, 0.6885442733764648, 0.6881459355354309, 0.6814990043640137 ]
Convert a string to dash-delimited words. :: >>> import uqbar.strings >>> string = 'Tô Đặc Biệt Xe Lửa' >>> print(uqbar.strings.to_dash_case(string)) to-dac-biet-xe-lua :: >>> string = 'alpha.beta.gamma' >>> print(uqbar.strings.to_dash_case(string)) alpha-beta-gamma
def to_dash_case(string: str) -> str: """ Convert a string to dash-delimited words. :: >>> import uqbar.strings >>> string = 'Tô Đặc Biệt Xe Lửa' >>> print(uqbar.strings.to_dash_case(string)) to-dac-biet-xe-lua :: >>> string = 'alpha.beta.gamma' >>> print(uqbar.strings.to_dash_case(string)) alpha-beta-gamma """ string = unidecode.unidecode(string) words = (_.lower() for _ in delimit_words(string)) string = "-".join(words) return string
[ "def parse_dash(string, width):\n\t\"parse dash pattern specified with string\"\n\t\n\t# DashConvert from {tk-sources}/generic/tkCanvUtil.c\n\tw = max(1, int(width + 0.5))\n\n\tn = len(string)\n\tresult = []\n\tfor i, c in enumerate(string):\n\t\tif c == \" \" and len(result):\n\t\t\tresult[-1] += w + 1\n\t\telif c...
[ 0.7056986689567566, 0.7022408246994019, 0.6875450015068054, 0.6869301795959473, 0.6775718927383423, 0.6666778922080994, 0.6664897799491882, 0.6663722395896912, 0.6655914783477783, 0.662871778011322, 0.661432147026062, 0.6612073183059692 ]
returns a list of all fixers found in the lib2to3 library
def get_lib2to3_fixers(): '''returns a list of all fixers found in the lib2to3 library''' fixers = [] fixer_dirname = fixer_dir.__path__[0] for name in sorted(os.listdir(fixer_dirname)): if name.startswith("fix_") and name.endswith(".py"): fixers.append("lib2to3.fixes." + name[:-3]) return fixers
[ "def get_single_fixer(fixname):\n '''return a single fixer found in the lib2to3 library'''\n fixer_dirname = fixer_dir.__path__[0]\n for name in sorted(os.listdir(fixer_dirname)):\n if (name.startswith(\"fix_\") and name.endswith(\".py\") \n and fixname == name[4:-3]):\n return...
[ 0.8299906253814697, 0.7078806161880493, 0.7032569050788879, 0.698115885257721, 0.6977643966674805, 0.6867755055427551, 0.6652924418449402, 0.6635935306549072, 0.6580876708030701, 0.6555743217468262, 0.6539329290390015, 0.6534920930862427 ]
return a single fixer found in the lib2to3 library
def get_single_fixer(fixname): '''return a single fixer found in the lib2to3 library''' fixer_dirname = fixer_dir.__path__[0] for name in sorted(os.listdir(fixer_dirname)): if (name.startswith("fix_") and name.endswith(".py") and fixname == name[4:-3]): return "lib2to3.fixes." + name[:-3]
[ "def get_lib2to3_fixers():\n '''returns a list of all fixers found in the lib2to3 library'''\n fixers = []\n fixer_dirname = fixer_dir.__path__[0]\n for name in sorted(os.listdir(fixer_dirname)):\n if name.startswith(\"fix_\") and name.endswith(\".py\"):\n fixers.append(\"lib2to3.fixes...
[ 0.8598690032958984, 0.7472712397575378, 0.7428495287895203, 0.7360685467720032, 0.7156214118003845, 0.6804410219192505, 0.6788302063941956, 0.6769159436225891, 0.6761971116065979, 0.6723169088363647, 0.671269953250885, 0.6692692637443542 ]
Returns field's single value prepared for saving into a database.
def to_db(self, value): """ Returns field's single value prepared for saving into a database. """ # ensure value is valid self.validate(value) assert isinstance(value, list) value = list(value) for i, v in enumerate(value): value[i] = self.value_to_db(v) # return result assert isinstance(value, list) return value
[ "def get_prep_value(self, value):\n \"\"\"Returns field's value prepared for saving into a database.\"\"\"\n\n if isinstance(value, LocalizedValue):\n prep_value = LocalizedValue()\n for k, v in value.__dict__.items():\n if v is None:\n prep_valu...
[ 0.7930915951728821, 0.7762113213539124, 0.7404974699020386, 0.7379361987113953, 0.7321757078170776, 0.7313757538795471, 0.7301207184791565, 0.7283756136894226, 0.7269089221954346, 0.7263267040252686, 0.7263159155845642, 0.7262715101242065 ]
Converts the input value into the expected Python data type, raising django.core.exceptions.ValidationError if the data can't be converted. Returns the converted value. Subclasses should override this.
def to_python(self, value): """ Converts the input value into the expected Python data type, raising django.core.exceptions.ValidationError if the data can't be converted. Returns the converted value. Subclasses should override this. """ assert isinstance(value, list) # convert every value in list value = list(value) for i, v in enumerate(value): value[i] = self.value_to_python(v) # return result return value
[ "def value_to_python(self, value):\n \"\"\"\n Converts the input single value into the expected Python data type,\n raising django.core.exceptions.ValidationError if the data can't be\n converted. Returns the converted value. Subclasses should override\n this.\n \"\"\"\n ...
[ 0.8508092164993286, 0.8455279469490051, 0.8399019241333008, 0.8279650807380676, 0.8272891640663147, 0.8231707215309143, 0.814752995967865, 0.806265115737915, 0.8001563549041748, 0.7947636246681213, 0.7799067497253418, 0.7714439630508423 ]
Validates value and throws ValidationError. Subclasses should override this to provide validation logic.
def validate(self, value): """ Validates value and throws ValidationError. Subclasses should override this to provide validation logic. """ # check object type if not isinstance(value, list): raise tldap.exceptions.ValidationError( "is not a list and max_instances is %s" % self._max_instances) # check maximum instances if (self._max_instances is not None and len(value) > self._max_instances): raise tldap.exceptions.ValidationError( "exceeds max_instances of %d" % self._max_instances) # check this required value is given if self._required: if len(value) == 0: raise tldap.exceptions.ValidationError( "is required") # validate the value for i, v in enumerate(value): self.value_validate(v)
[ "def validate(self, value, model_instance):\n \"\"\"\n Validates value and throws ValidationError. Subclasses should override\n this to provide validation logic.\n \"\"\"\n return super(self.__class__, self).validate(value.value, model_instance)", "def value_validate(self, value...
[ 0.8405061364173889, 0.8293669819831848, 0.7976804971694946, 0.7885225415229797, 0.7805082201957703, 0.7797256708145142, 0.779566764831543, 0.7763097882270813, 0.7717840671539307, 0.7703309655189514, 0.7695854902267456, 0.7688673138618469 ]
Convert the value's type and run validation. Validation errors from to_python and validate are propagated. The correct value is returned if no error is raised.
def clean(self, value): """ Convert the value's type and run validation. Validation errors from to_python and validate are propagated. The correct value is returned if no error is raised. """ value = self.to_python(value) self.validate(value) return value
[ "def clean(self, value, model_instance):\n \"\"\"\n Convert the value's type and run validation. Validation errors\n from to_python and validate are propagated. The correct value is\n returned if no error is raised.\n \"\"\"\n #: return constant's name instead of constant i...
[ 0.7912411093711853, 0.7856140732765198, 0.7601820230484009, 0.7515721321105957, 0.7506422996520996, 0.7505135536193848, 0.7475513815879822, 0.7470663785934448, 0.746669590473175, 0.7434817552566528, 0.737034261226654, 0.7361854314804077 ]
Returns field's single value prepared for saving into a database.
def value_to_db(self, value): """ Returns field's single value prepared for saving into a database. """ if isinstance(value, six.string_types): value = value.encode("utf_8") return value
[ "def get_prep_value(self, value):\n \"\"\"Returns field's value prepared for saving into a database.\"\"\"\n\n if isinstance(value, LocalizedValue):\n prep_value = LocalizedValue()\n for k, v in value.__dict__.items():\n if v is None:\n prep_valu...
[ 0.7930915951728821, 0.7762113213539124, 0.7404974699020386, 0.7379361987113953, 0.7321757078170776, 0.7313757538795471, 0.7301207184791565, 0.7283756136894226, 0.7269089221954346, 0.7263267040252686, 0.7263159155845642, 0.7262715101242065 ]
Converts the input single value into the expected Python data type, raising django.core.exceptions.ValidationError if the data can't be converted. Returns the converted value. Subclasses should override this.
def value_to_python(self, value): """ Converts the input single value into the expected Python data type, raising django.core.exceptions.ValidationError if the data can't be converted. Returns the converted value. Subclasses should override this. """ if not isinstance(value, bytes): raise tldap.exceptions.ValidationError("should be a bytes") value = value.decode("utf_8") return value
[ "def value_to_python(self, value):\n \"\"\"\n Converts the input single value into the expected Python data type,\n raising django.core.exceptions.ValidationError if the data can't be\n converted. Returns the converted value. Subclasses should override\n this.\n \"\"\"\n ...
[ 0.8518143892288208, 0.8449154496192932, 0.8376913070678711, 0.8368150591850281, 0.8314429521560669, 0.8277285695075989, 0.8108909726142883, 0.8054237365722656, 0.7992805242538452, 0.7976009845733643, 0.782760739326477, 0.76715487241745 ]
Validates value and throws ValidationError. Subclasses should override this to provide validation logic.
def value_validate(self, value): """ Validates value and throws ValidationError. Subclasses should override this to provide validation logic. """ if not isinstance(value, six.string_types): raise tldap.exceptions.ValidationError("should be a string")
[ "def validate(self, value, model_instance):\n \"\"\"\n Validates value and throws ValidationError. Subclasses should override\n this to provide validation logic.\n \"\"\"\n return super(self.__class__, self).validate(value.value, model_instance)", "def validate(self, value):\n ...
[ 0.8405061364173889, 0.8216529488563538, 0.7976804971694946, 0.7885225415229797, 0.7805082201957703, 0.7797256708145142, 0.779566764831543, 0.7763097882270813, 0.7717840671539307, 0.7703309655189514, 0.7695854902267456, 0.7688673138618469 ]
Converts the input single value into the expected Python data type, raising django.core.exceptions.ValidationError if the data can't be converted. Returns the converted value. Subclasses should override this.
def value_to_python(self, value): """ Converts the input single value into the expected Python data type, raising django.core.exceptions.ValidationError if the data can't be converted. Returns the converted value. Subclasses should override this. """ if not isinstance(value, bytes): raise tldap.exceptions.ValidationError("should be bytes") if value is None: return value try: return int(value) except (TypeError, ValueError): raise tldap.exceptions.ValidationError("is invalid integer")
[ "def value_to_python(self, value):\n \"\"\"\n Converts the input single value into the expected Python data type,\n raising django.core.exceptions.ValidationError if the data can't be\n converted. Returns the converted value. Subclasses should override\n this.\n \"\"\"\n ...
[ 0.8518143892288208, 0.838757336139679, 0.8376913070678711, 0.8368150591850281, 0.8314429521560669, 0.8277285695075989, 0.8108909726142883, 0.8054237365722656, 0.7992805242538452, 0.7976009845733643, 0.782760739326477, 0.76715487241745 ]
Returns field's single value prepared for saving into a database.
def value_to_db(self, value): """ Returns field's single value prepared for saving into a database. """ assert isinstance(value, six.integer_types) return str(value).encode("utf_8")
[ "def get_prep_value(self, value):\n \"\"\"Returns field's value prepared for saving into a database.\"\"\"\n\n if isinstance(value, LocalizedValue):\n prep_value = LocalizedValue()\n for k, v in value.__dict__.items():\n if v is None:\n prep_valu...
[ 0.7930915951728821, 0.7762113213539124, 0.7404974699020386, 0.7379361987113953, 0.7321757078170776, 0.7313757538795471, 0.7301207184791565, 0.7283756136894226, 0.7269089221954346, 0.7263267040252686, 0.7263159155845642, 0.7262715101242065 ]
Converts the input single value into the expected Python data type, raising django.core.exceptions.ValidationError if the data can't be converted. Returns the converted value. Subclasses should override this.
def value_validate(self, value): """ Converts the input single value into the expected Python data type, raising django.core.exceptions.ValidationError if the data can't be converted. Returns the converted value. Subclasses should override this. """ if not isinstance(value, six.integer_types): raise tldap.exceptions.ValidationError("should be a integer") try: return str(value) except (TypeError, ValueError): raise tldap.exceptions.ValidationError("is invalid integer")
[ "def value_to_python(self, value):\n \"\"\"\n Converts the input single value into the expected Python data type,\n raising django.core.exceptions.ValidationError if the data can't be\n converted. Returns the converted value. Subclasses should override\n this.\n \"\"\"\n ...
[ 0.8518143892288208, 0.8449154496192932, 0.838757336139679, 0.8376913070678711, 0.8368150591850281, 0.8314429521560669, 0.8108909726142883, 0.8054237365722656, 0.7992805242538452, 0.7976009845733643, 0.782760739326477, 0.76715487241745 ]
Converts the input single value into the expected Python data type, raising django.core.exceptions.ValidationError if the data can't be converted. Returns the converted value. Subclasses should override this.
def value_to_python(self, value): """ Converts the input single value into the expected Python data type, raising django.core.exceptions.ValidationError if the data can't be converted. Returns the converted value. Subclasses should override this. """ if not isinstance(value, bytes): raise tldap.exceptions.ValidationError("should be a bytes") try: value = int(value) except (TypeError, ValueError): raise tldap.exceptions.ValidationError("is invalid integer") try: value = datetime.date.fromtimestamp(value * 24 * 60 * 60) except OverflowError: raise tldap.exceptions.ValidationError("is too big a date") return value
[ "def value_to_python(self, value):\n \"\"\"\n Converts the input single value into the expected Python data type,\n raising django.core.exceptions.ValidationError if the data can't be\n converted. Returns the converted value. Subclasses should override\n this.\n \"\"\"\n ...
[ 0.8449154496192932, 0.838757336139679, 0.8376913070678711, 0.8368150591850281, 0.8314429521560669, 0.8277285695075989, 0.8108909726142883, 0.8054237365722656, 0.7992805242538452, 0.7976009845733643, 0.782760739326477, 0.76715487241745 ]
Returns field's single value prepared for saving into a database.
def value_to_db(self, value): """ Returns field's single value prepared for saving into a database. """ assert isinstance(value, datetime.date) assert not isinstance(value, datetime.datetime) try: value = value - datetime.date(year=1970, month=1, day=1) except OverflowError: raise tldap.exceptions.ValidationError("is too big a date") return str(value.days).encode("utf_8")
[ "def get_prep_value(self, value):\n \"\"\"Returns field's value prepared for saving into a database.\"\"\"\n\n if isinstance(value, LocalizedValue):\n prep_value = LocalizedValue()\n for k, v in value.__dict__.items():\n if v is None:\n prep_valu...
[ 0.7930915951728821, 0.7762113213539124, 0.7404974699020386, 0.7379361987113953, 0.7321757078170776, 0.7313757538795471, 0.7301207184791565, 0.7283756136894226, 0.7269089221954346, 0.7263267040252686, 0.7263159155845642, 0.7262715101242065 ]
Converts the input single value into the expected Python data type, raising django.core.exceptions.ValidationError if the data can't be converted. Returns the converted value. Subclasses should override this.
def value_validate(self, value): """ Converts the input single value into the expected Python data type, raising django.core.exceptions.ValidationError if the data can't be converted. Returns the converted value. Subclasses should override this. """ if not isinstance(value, datetime.date): raise tldap.exceptions.ValidationError("is invalid date") # a datetime is also a date but they are not compatable if isinstance(value, datetime.datetime): raise tldap.exceptions.ValidationError("should be a date, not a datetime")
[ "def value_to_python(self, value):\n \"\"\"\n Converts the input single value into the expected Python data type,\n raising django.core.exceptions.ValidationError if the data can't be\n converted. Returns the converted value. Subclasses should override\n this.\n \"\"\"\n ...
[ 0.8518143892288208, 0.8449154496192932, 0.838757336139679, 0.8368150591850281, 0.8314429521560669, 0.8277285695075989, 0.8108909726142883, 0.8054237365722656, 0.7992805242538452, 0.7976009845733643, 0.782760739326477, 0.76715487241745 ]
Returns field's single value prepared for saving into a database.
def value_to_db(self, value): """ Returns field's single value prepared for saving into a database. """ assert isinstance(value, datetime.datetime) try: value = value - datetime.datetime(1970, 1, 1) except OverflowError: raise tldap.exceptions.ValidationError("is too big a date") value = value.seconds + value.days * 24 * 3600 value = str(value).encode("utf_8") return value
[ "def get_prep_value(self, value):\n \"\"\"Returns field's value prepared for saving into a database.\"\"\"\n\n if isinstance(value, LocalizedValue):\n prep_value = LocalizedValue()\n for k, v in value.__dict__.items():\n if v is None:\n prep_valu...
[ 0.7930915951728821, 0.7762113213539124, 0.7404974699020386, 0.7379361987113953, 0.7321757078170776, 0.7313757538795471, 0.7301207184791565, 0.7283756136894226, 0.7269089221954346, 0.7263267040252686, 0.7262715101242065, 0.7243165969848633 ]
Converts the input single value into the expected Python data type, raising django.core.exceptions.ValidationError if the data can't be converted. Returns the converted value. Subclasses should override this.
def value_validate(self, value): """ Converts the input single value into the expected Python data type, raising django.core.exceptions.ValidationError if the data can't be converted. Returns the converted value. Subclasses should override this. """ if not isinstance(value, datetime.datetime): raise tldap.exceptions.ValidationError("is invalid date time")
[ "def value_to_python(self, value):\n \"\"\"\n Converts the input single value into the expected Python data type,\n raising django.core.exceptions.ValidationError if the data can't be\n converted. Returns the converted value. Subclasses should override\n this.\n \"\"\"\n ...
[ 0.8518143892288208, 0.8449154496192932, 0.838757336139679, 0.8376913070678711, 0.8314429521560669, 0.8277285695075989, 0.8108909726142883, 0.8054237365722656, 0.7992805242538452, 0.7976009845733643, 0.782760739326477, 0.76715487241745 ]
Converts the input single value into the expected Python data type, raising django.core.exceptions.ValidationError if the data can't be converted. Returns the converted value. Subclasses should override this.
def value_to_python(self, value): """ Converts the input single value into the expected Python data type, raising django.core.exceptions.ValidationError if the data can't be converted. Returns the converted value. Subclasses should override this. """ if not isinstance(value, bytes): raise tldap.exceptions.ValidationError("should be a bytes") length = len(value) - 8 if length % 4 != 0: raise tldap.exceptions.ValidationError("Invalid sid") length = length // 4 array = struct.unpack('<bbbbbbbb' + 'I' * length, value) if array[1] != length: raise tldap.exceptions.ValidationError("Invalid sid") if array[2:7] != (0, 0, 0, 0, 0): raise tldap.exceptions.ValidationError("Invalid sid") array = ("S", ) + array[0:1] + array[7:] return "-".join([str(i) for i in array])
[ "def value_to_python(self, value):\n \"\"\"\n Converts the input single value into the expected Python data type,\n raising django.core.exceptions.ValidationError if the data can't be\n converted. Returns the converted value. Subclasses should override\n this.\n \"\"\"\n ...
[ 0.8518143892288208, 0.8449154496192932, 0.838757336139679, 0.8376913070678711, 0.8368150591850281, 0.8314429521560669, 0.8277285695075989, 0.8054237365722656, 0.7992805242538452, 0.7976009845733643, 0.782760739326477, 0.76715487241745 ]
Returns field's single value prepared for saving into a database.
def value_to_db(self, value): """ Returns field's single value prepared for saving into a database. """ assert isinstance(value, str) array = value.split("-") length = len(array) - 3 assert length >= 0 assert array[0] == 'S' array = array[1:2] + [length, 0, 0, 0, 0, 0] + array[2:] array = [int(i) for i in array] return struct.pack('<bbbbbbbb' + 'I' * length, *array)
[ "def get_prep_value(self, value):\n \"\"\"Returns field's value prepared for saving into a database.\"\"\"\n\n if isinstance(value, LocalizedValue):\n prep_value = LocalizedValue()\n for k, v in value.__dict__.items():\n if v is None:\n prep_valu...
[ 0.7930915951728821, 0.7762113213539124, 0.7404974699020386, 0.7379361987113953, 0.7321757078170776, 0.7313757538795471, 0.7301207184791565, 0.7283756136894226, 0.7269089221954346, 0.7263267040252686, 0.7263159155845642, 0.7262715101242065 ]
Converts the input single value into the expected Python data type, raising django.core.exceptions.ValidationError if the data can't be converted. Returns the converted value. Subclasses should override this.
def value_validate(self, value): """ Converts the input single value into the expected Python data type, raising django.core.exceptions.ValidationError if the data can't be converted. Returns the converted value. Subclasses should override this. """ if not isinstance(value, str): raise tldap.exceptions.ValidationError("Invalid sid") array = value.split("-") length = len(array) - 3 if length < 1: raise tldap.exceptions.ValidationError("Invalid sid") if array.pop(0) != "S": raise tldap.exceptions.ValidationError("Invalid sid") try: [int(i) for i in array] except TypeError: raise tldap.exceptions.ValidationError("Invalid sid")
[ "def value_to_python(self, value):\n \"\"\"\n Converts the input single value into the expected Python data type,\n raising django.core.exceptions.ValidationError if the data can't be\n converted. Returns the converted value. Subclasses should override\n this.\n \"\"\"\n ...
[ 0.8518143892288208, 0.8449154496192932, 0.838757336139679, 0.8376913070678711, 0.8368150591850281, 0.8314429521560669, 0.8277285695075989, 0.8108909726142883, 0.8054237365722656, 0.7992805242538452, 0.782760739326477, 0.76715487241745 ]
Get data for this component
def get(self, id): """Get data for this component """ id = self.as_id(id) url = '%s/%s' % (self, id) response = self.http.get(url, auth=self.auth) response.raise_for_status() return response.json()
[ "public Object doGetData()\n {\n String data = (String)super.doGetData();\n FileListener listener = this.getRecord().getListener(PropertiesStringFileListener.class);\n if (this.getComponent(0) == null) // Don't convert if this is linked to a screen\n if (enableConversion)\n ...
[ 0.7479872703552246, 0.7313993573188782, 0.715919017791748, 0.7088156342506409, 0.7082095742225647, 0.7074160575866699, 0.7063506245613098, 0.7022608518600464, 0.7014139890670776, 0.7011657953262329, 0.6999324560165405, 0.6983368992805481 ]
Create a new component
def create(self, data): """Create a new component """ response = self.http.post(str(self), json=data, auth=self.auth) response.raise_for_status() return response.json()
[ "def create(self, name, status, description=\"\", link=\"\", order=0,\n group_id=0, enabled=True):\n \"\"\"Create a new component\n\n :param str name: Name of the component\n :param int status: Status of the component; 1-4\n :param str description: Description of the compon...
[ 0.8072904944419861, 0.7723421454429626, 0.7717704772949219, 0.7657792568206787, 0.7609652876853943, 0.7576033473014832, 0.7565774917602539, 0.7495549321174622, 0.7473636269569397, 0.7469908595085144, 0.7466772794723511, 0.7457039952278137 ]
Update a component
def update(self, id, data): """Update a component """ id = self.as_id(id) response = self.http.patch( '%s/%s' % (self, id), json=data, auth=self.auth ) response.raise_for_status() return response.json()
[ "def update(self, component_id, name=None, status=None, description=None,\n link=None, order=None, group_id=None, enabled=True):\n \"\"\"Update a component\n\n :param int component_id: Component ID\n :param str name: Name of the component (optional)\n :param int status: Sta...
[ 0.8214623332023621, 0.8128786683082581, 0.7688286304473877, 0.7536182403564453, 0.7502194046974182, 0.7400845885276794, 0.7399318218231201, 0.7325282096862793, 0.7288486957550049, 0.7256565690040588, 0.7254197001457214, 0.7252134680747986 ]
Delete a component by id
def delete(self, id): """Delete a component by id """ id = self.as_id(id) response = self.http.delete( '%s/%s' % (self.api_url, id), auth=self.auth) response.raise_for_status()
[ "def delete_component(self, id):\n \"\"\"Delete component by id.\n\n :param id: ID of the component to use\n :type id: str\n :rtype: Response\n \"\"\"\n url = self._get_url('component/' + str(id))\n return self._session.delete(url)", "def delete(context, id):\n ...
[ 0.8823086619377136, 0.8571924567222595, 0.7978848814964294, 0.789355456829071, 0.7869218587875366, 0.7781111001968384, 0.766715407371521, 0.7617802023887634, 0.7290953993797302, 0.722851037979126, 0.7205148935317993, 0.7175469398498535 ]
Get a list of this github component :param url: full url :param Comp: a :class:`.Component` class :param callback: Optional callback :param limit: Optional number of items to retrieve :param data: additional query data :return: a list of ``Comp`` objects with data
def get_list(self, url=None, callback=None, limit=100, **data): """Get a list of this github component :param url: full url :param Comp: a :class:`.Component` class :param callback: Optional callback :param limit: Optional number of items to retrieve :param data: additional query data :return: a list of ``Comp`` objects with data """ url = url or str(self) data = dict(((k, v) for k, v in data.items() if v)) all_data = [] if limit: data['per_page'] = min(limit, 100) while url: response = self.http.get(url, params=data, auth=self.auth) response.raise_for_status() result = response.json() n = m = len(result) if callback: result = callback(result) m = len(result) all_data.extend(result) if limit and len(all_data) > limit: all_data = all_data[:limit] break elif m == n: data = None next = response.links.get('next', {}) url = next.get('url') else: break return all_data
[ "def comp(request, slug, directory_slug=None):\n \"\"\"\n View the requested comp\n \"\"\"\n context = {}\n path = settings.COMPS_DIR\n comp_dir = os.path.split(path)[1]\n template = \"{0}/{1}\".format(comp_dir, slug)\n if directory_slug:\n template = \"{0}/{1}/{2}\".format(comp_dir, ...
[ 0.6603466868400574, 0.639123260974884, 0.6311928629875183, 0.6283335089683533, 0.6283116340637207, 0.6262850165367126, 0.6243754625320435, 0.6223114728927612, 0.6206058263778687, 0.6196853518486023, 0.6196420788764954, 0.6195151805877686 ]
Return all comments for this issue/pull request
def comments(self, issue): """Return all comments for this issue/pull request """ commit = self.as_id(issue) return self.get_list(url='%s/%s/comments' % (self, commit))
[ "def getPullRequestComments(self, repo_user, repo_name, pull_number):\n \"\"\"\n GET /repos/:owner/:repo/pulls/:number/comments\n\n :param pull_number: The pull request's number.\n \"\"\"\n return self.api.makeRequestAllPages(\n ['repos', repo_user, repo_name,\n ...
[ 0.8164628148078918, 0.79473477602005, 0.7838341593742371, 0.7744812369346619, 0.7670895457267761, 0.7670817971229553, 0.7664884924888611, 0.765682578086853, 0.7602823376655579, 0.7534890174865723, 0.7518331408500671, 0.7515272498130798 ]
Returns a boolean if the user in the request has edit permission for the object. Can also be passed a version object to check if the user has permission to edit a version of the object (if they own it).
def has_edit_permission(self, request, obj=None, version=None): """ Returns a boolean if the user in the request has edit permission for the object. Can also be passed a version object to check if the user has permission to edit a version of the object (if they own it). """ # Has the edit permission for this object type permission_name = '{}.edit_{}'.format(self.opts.app_label, self.opts.model_name) has_permission = request.user.has_perm(permission_name) if obj is not None and has_permission is False: has_permission = request.user.has_perm(permission_name, obj=obj) if has_permission and version is not None: # Version must not be saved, and must belong to this user if version.version_number or version.owner != request.user: has_permission = False return has_permission
[ "def get_can_edit(self, obj):\n \"\"\" returns true if user has permission to edit, false otherwise \"\"\"\n view = self.context.get('view')\n request = copy(self.context.get('request'))\n request._method = 'PUT'\n try:\n view.check_object_permissions(request, obj)\n ...
[ 0.8267927169799805, 0.8250415325164795, 0.8135248422622681, 0.7944478392601013, 0.79191654920578, 0.7849165201187134, 0.7844603061676025, 0.773772120475769, 0.7692142128944397, 0.7653836607933044, 0.7638435959815979, 0.7629140019416809 ]
Returns a boolean if the user in the request has publish permission for the object.
def has_publish_permission(self, request, obj=None): """ Returns a boolean if the user in the request has publish permission for the object. """ permission_name = '{}.publish_{}'.format(self.opts.app_label, self.opts.model_name) has_permission = request.user.has_perm(permission_name) if obj is not None and has_permission is False: has_permission = request.user.has_perm(permission_name, obj=obj) return has_permission
[ "def has_publish_permission(self, request, obj=None):\n \"\"\"\n Determines if the user has permissions to publish.\n\n :param request: Django request object.\n :param obj: The object to determine if the user has\n permissions to publish.\n :return: Boolean.\n \"\"\"...
[ 0.898836612701416, 0.8384897112846375, 0.7753359079360962, 0.7683749198913574, 0.7435063123703003, 0.741520881652832, 0.7396457195281982, 0.7391578555107117, 0.7368825674057007, 0.7341288924217224, 0.7313709855079651, 0.7284159064292908 ]
Get a valid semantic version for tag
def semantic_version(tag): """Get a valid semantic version for tag """ try: version = list(map(int, tag.split('.'))) assert len(version) == 3 return tuple(version) except Exception as exc: raise CommandError( 'Could not parse "%s", please use ' 'MAJOR.MINOR.PATCH' % tag ) from exc
[ "def validate_version(self, prefix='v'):\n \"\"\"Validate version by checking if it is a valid semantic version\n and its value is higher than latest github tag\n \"\"\"\n version = self.software_version()\n repo = self.github_repo()\n repo.releases.validate_tag(version, pr...
[ 0.7959873676300049, 0.7770457863807678, 0.7640970945358276, 0.7486236095428467, 0.741369903087616, 0.7395448684692383, 0.7371225357055664, 0.7266147136688232, 0.7261949777603149, 0.7255418300628662, 0.7233297824859619, 0.7209794521331787 ]
Function load Store the object data
def load(self, data): """ Function load Store the object data """ self.clear() self.update(data) self.enhance()
[ "function loadObject(data, db, callback) {\n callback = callback || function() {};\n var iterator = function(modelName, next){\n insertCollection(modelName, data[modelName], db, next);\n };\n async.forEachSeries(Object.keys(data), iterator, callback);\n}", "function(data, depth) {\n dept...
[ 0.7398872375488281, 0.7252195477485657, 0.7247742414474487, 0.7204188108444214, 0.7134860754013062, 0.712976336479187, 0.7112202048301697, 0.7068856358528137, 0.704858124256134, 0.703584611415863, 0.7005787491798401, 0.6986722946166992 ]
Function enhance Enhance the object with new item or enhanced items
def enhance(self): """ Function enhance Enhance the object with new item or enhanced items """ if self.objName in ['hosts', 'hostgroups', 'puppet_classes']: from foreman.itemSmartClassParameter\ import ItemSmartClassParameter self.update({'smart_class_parameters': SubDict(self.api, self.objName, self.payloadObj, self.key, ItemSmartClassParameter)})
[ "def enhance(self):\n \"\"\" Function enhance\n Enhance the object with new item or enhanced items\n \"\"\"\n self.update({'os_default_templates':\n SubDict(self.api, self.objName,\n self.payloadObj, self.key,\n ...
[ 0.8752948045730591, 0.8729672431945801, 0.8696154356002808, 0.8695272207260132, 0.8679497838020325, 0.8630505800247192, 0.7447202205657959, 0.7079142928123474, 0.707258939743042, 0.6839107275009155, 0.6810131072998047, 0.6799886226654053 ]
Function reload Sync the full object
def reload(self): """ Function reload Sync the full object """ self.load(self.api.get(self.objName, self.key))
[ "def reload(self):\n \"\"\" Function reload\n Reload the full object to ensure sync\n \"\"\"\n realData = self.load()\n self.clear()\n self.update(realData)", "function reload(args) {\n if (args !== undefined) {\n if (args.l !== undefined) {\n fs.clos...
[ 0.8672438859939575, 0.7427269220352173, 0.7301700711250305, 0.7258812189102173, 0.725823163986206, 0.7242302298545837, 0.7187184691429138, 0.7158632874488831, 0.7093992233276367, 0.7003313302993774, 0.7002490758895874, 0.6987038254737854 ]