repository_name
stringlengths 7
55
| func_path_in_repository
stringlengths 4
223
| func_name
stringlengths 1
134
| whole_func_string
stringlengths 75
104k
| language
stringclasses 1
value | func_code_string
stringlengths 75
104k
| func_code_tokens
listlengths 19
28.4k
| func_documentation_string
stringlengths 1
46.9k
| func_documentation_tokens
listlengths 1
1.97k
| split_name
stringclasses 1
value | func_code_url
stringlengths 87
315
|
---|---|---|---|---|---|---|---|---|---|---|
ministryofjustice/money-to-prisoners-common
|
mtp_common/build_tasks/executor.py
|
Context.management_command
|
def management_command(self, command, *args, **kwargs):
"""
Runs a Django management command
"""
self.setup_django()
if 'verbosity' not in kwargs:
kwargs['verbosity'] = self.verbosity
if not self.use_colour:
kwargs['no_color'] = False
self.debug(self.yellow_style('$ manage.py %s' % command))
return call_command(command, *args, **kwargs)
|
python
|
def management_command(self, command, *args, **kwargs):
"""
Runs a Django management command
"""
self.setup_django()
if 'verbosity' not in kwargs:
kwargs['verbosity'] = self.verbosity
if not self.use_colour:
kwargs['no_color'] = False
self.debug(self.yellow_style('$ manage.py %s' % command))
return call_command(command, *args, **kwargs)
|
[
"def",
"management_command",
"(",
"self",
",",
"command",
",",
"*",
"args",
",",
"*",
"*",
"kwargs",
")",
":",
"self",
".",
"setup_django",
"(",
")",
"if",
"'verbosity'",
"not",
"in",
"kwargs",
":",
"kwargs",
"[",
"'verbosity'",
"]",
"=",
"self",
".",
"verbosity",
"if",
"not",
"self",
".",
"use_colour",
":",
"kwargs",
"[",
"'no_color'",
"]",
"=",
"False",
"self",
".",
"debug",
"(",
"self",
".",
"yellow_style",
"(",
"'$ manage.py %s'",
"%",
"command",
")",
")",
"return",
"call_command",
"(",
"command",
",",
"*",
"args",
",",
"*",
"*",
"kwargs",
")"
] |
Runs a Django management command
|
[
"Runs",
"a",
"Django",
"management",
"command"
] |
train
|
https://github.com/ministryofjustice/money-to-prisoners-common/blob/33c43a2912cb990d9148da7c8718f480f07d90a1/mtp_common/build_tasks/executor.py#L451-L461
|
ministryofjustice/money-to-prisoners-common
|
mtp_common/build_tasks/executor.py
|
Executor.help
|
def help(self, context):
"""
Prints this help (use --verbosity 2 for more details)
"""
context.info('%s\n%s [global options] [task] [task options]...\n' % (self.name, sys.argv[0]))
def print_parameter(prn, p):
if p.description:
suffix = ' - {0.description}'.format(p)
else:
suffix = ''
if p.constraint is bool:
prn(' {0.arg_name}'.format(p) + suffix)
else:
prn(' {0.arg_name} [{0.value}]'.format(p) + suffix)
context.info('Global options:')
for parameter in self.context_parameters.values():
print_parameter(context.info, parameter)
context.info()
context.info('Commands:')
name_template = ' {0.name:<%d}' % min(max(map(len, self.available_tasks)), 20)
for task in self.available_tasks.values():
printer = context.debug if task.hidden else context.info
if task.description:
printer((name_template + ' - {0.description}').format(task))
else:
printer(name_template.format(task))
for parameter in task.parameters.values():
print_parameter(printer, parameter)
|
python
|
def help(self, context):
"""
Prints this help (use --verbosity 2 for more details)
"""
context.info('%s\n%s [global options] [task] [task options]...\n' % (self.name, sys.argv[0]))
def print_parameter(prn, p):
if p.description:
suffix = ' - {0.description}'.format(p)
else:
suffix = ''
if p.constraint is bool:
prn(' {0.arg_name}'.format(p) + suffix)
else:
prn(' {0.arg_name} [{0.value}]'.format(p) + suffix)
context.info('Global options:')
for parameter in self.context_parameters.values():
print_parameter(context.info, parameter)
context.info()
context.info('Commands:')
name_template = ' {0.name:<%d}' % min(max(map(len, self.available_tasks)), 20)
for task in self.available_tasks.values():
printer = context.debug if task.hidden else context.info
if task.description:
printer((name_template + ' - {0.description}').format(task))
else:
printer(name_template.format(task))
for parameter in task.parameters.values():
print_parameter(printer, parameter)
|
[
"def",
"help",
"(",
"self",
",",
"context",
")",
":",
"context",
".",
"info",
"(",
"'%s\\n%s [global options] [task] [task options]...\\n'",
"%",
"(",
"self",
".",
"name",
",",
"sys",
".",
"argv",
"[",
"0",
"]",
")",
")",
"def",
"print_parameter",
"(",
"prn",
",",
"p",
")",
":",
"if",
"p",
".",
"description",
":",
"suffix",
"=",
"' - {0.description}'",
".",
"format",
"(",
"p",
")",
"else",
":",
"suffix",
"=",
"''",
"if",
"p",
".",
"constraint",
"is",
"bool",
":",
"prn",
"(",
"' {0.arg_name}'",
".",
"format",
"(",
"p",
")",
"+",
"suffix",
")",
"else",
":",
"prn",
"(",
"' {0.arg_name} [{0.value}]'",
".",
"format",
"(",
"p",
")",
"+",
"suffix",
")",
"context",
".",
"info",
"(",
"'Global options:'",
")",
"for",
"parameter",
"in",
"self",
".",
"context_parameters",
".",
"values",
"(",
")",
":",
"print_parameter",
"(",
"context",
".",
"info",
",",
"parameter",
")",
"context",
".",
"info",
"(",
")",
"context",
".",
"info",
"(",
"'Commands:'",
")",
"name_template",
"=",
"' {0.name:<%d}'",
"%",
"min",
"(",
"max",
"(",
"map",
"(",
"len",
",",
"self",
".",
"available_tasks",
")",
")",
",",
"20",
")",
"for",
"task",
"in",
"self",
".",
"available_tasks",
".",
"values",
"(",
")",
":",
"printer",
"=",
"context",
".",
"debug",
"if",
"task",
".",
"hidden",
"else",
"context",
".",
"info",
"if",
"task",
".",
"description",
":",
"printer",
"(",
"(",
"name_template",
"+",
"' - {0.description}'",
")",
".",
"format",
"(",
"task",
")",
")",
"else",
":",
"printer",
"(",
"name_template",
".",
"format",
"(",
"task",
")",
")",
"for",
"parameter",
"in",
"task",
".",
"parameters",
".",
"values",
"(",
")",
":",
"print_parameter",
"(",
"printer",
",",
"parameter",
")"
] |
Prints this help (use --verbosity 2 for more details)
|
[
"Prints",
"this",
"help",
"(",
"use",
"--",
"verbosity",
"2",
"for",
"more",
"details",
")"
] |
train
|
https://github.com/ministryofjustice/money-to-prisoners-common/blob/33c43a2912cb990d9148da7c8718f480f07d90a1/mtp_common/build_tasks/executor.py#L527-L557
|
ministryofjustice/money-to-prisoners-common
|
mtp_common/auth/__init__.py
|
login
|
def login(request, user):
"""
Persist a user id and a backend in the request. This way a user doesn't
have to reauthenticate on every request. Note that data set during
the anonymous session is retained when the user logs in.
"""
session_auth_hash = ''
if user is None:
user = request.user
if hasattr(user, 'get_session_auth_hash'):
session_auth_hash = user.get_session_auth_hash()
if SESSION_KEY in request.session:
session_key = request.session[SESSION_KEY]
if session_key != user.pk or (
session_auth_hash and
request.session.get(HASH_SESSION_KEY) != session_auth_hash):
# To avoid reusing another user's session, create a new, empty
# session if the existing session corresponds to a different
# authenticated user.
request.session.flush()
else:
request.session.cycle_key()
request.session[SESSION_KEY] = user.pk
request.session[BACKEND_SESSION_KEY] = user.backend
request.session[USER_DATA_SESSION_KEY] = user.user_data
request.session[HASH_SESSION_KEY] = session_auth_hash
update_token_in_session(request.session, user.token)
if hasattr(request, 'user'):
request.user = user
rotate_token(request)
user_logged_in.send(sender=user.__class__, request=request, user=user)
|
python
|
def login(request, user):
"""
Persist a user id and a backend in the request. This way a user doesn't
have to reauthenticate on every request. Note that data set during
the anonymous session is retained when the user logs in.
"""
session_auth_hash = ''
if user is None:
user = request.user
if hasattr(user, 'get_session_auth_hash'):
session_auth_hash = user.get_session_auth_hash()
if SESSION_KEY in request.session:
session_key = request.session[SESSION_KEY]
if session_key != user.pk or (
session_auth_hash and
request.session.get(HASH_SESSION_KEY) != session_auth_hash):
# To avoid reusing another user's session, create a new, empty
# session if the existing session corresponds to a different
# authenticated user.
request.session.flush()
else:
request.session.cycle_key()
request.session[SESSION_KEY] = user.pk
request.session[BACKEND_SESSION_KEY] = user.backend
request.session[USER_DATA_SESSION_KEY] = user.user_data
request.session[HASH_SESSION_KEY] = session_auth_hash
update_token_in_session(request.session, user.token)
if hasattr(request, 'user'):
request.user = user
rotate_token(request)
user_logged_in.send(sender=user.__class__, request=request, user=user)
|
[
"def",
"login",
"(",
"request",
",",
"user",
")",
":",
"session_auth_hash",
"=",
"''",
"if",
"user",
"is",
"None",
":",
"user",
"=",
"request",
".",
"user",
"if",
"hasattr",
"(",
"user",
",",
"'get_session_auth_hash'",
")",
":",
"session_auth_hash",
"=",
"user",
".",
"get_session_auth_hash",
"(",
")",
"if",
"SESSION_KEY",
"in",
"request",
".",
"session",
":",
"session_key",
"=",
"request",
".",
"session",
"[",
"SESSION_KEY",
"]",
"if",
"session_key",
"!=",
"user",
".",
"pk",
"or",
"(",
"session_auth_hash",
"and",
"request",
".",
"session",
".",
"get",
"(",
"HASH_SESSION_KEY",
")",
"!=",
"session_auth_hash",
")",
":",
"# To avoid reusing another user's session, create a new, empty",
"# session if the existing session corresponds to a different",
"# authenticated user.",
"request",
".",
"session",
".",
"flush",
"(",
")",
"else",
":",
"request",
".",
"session",
".",
"cycle_key",
"(",
")",
"request",
".",
"session",
"[",
"SESSION_KEY",
"]",
"=",
"user",
".",
"pk",
"request",
".",
"session",
"[",
"BACKEND_SESSION_KEY",
"]",
"=",
"user",
".",
"backend",
"request",
".",
"session",
"[",
"USER_DATA_SESSION_KEY",
"]",
"=",
"user",
".",
"user_data",
"request",
".",
"session",
"[",
"HASH_SESSION_KEY",
"]",
"=",
"session_auth_hash",
"update_token_in_session",
"(",
"request",
".",
"session",
",",
"user",
".",
"token",
")",
"if",
"hasattr",
"(",
"request",
",",
"'user'",
")",
":",
"request",
".",
"user",
"=",
"user",
"rotate_token",
"(",
"request",
")",
"user_logged_in",
".",
"send",
"(",
"sender",
"=",
"user",
".",
"__class__",
",",
"request",
"=",
"request",
",",
"user",
"=",
"user",
")"
] |
Persist a user id and a backend in the request. This way a user doesn't
have to reauthenticate on every request. Note that data set during
the anonymous session is retained when the user logs in.
|
[
"Persist",
"a",
"user",
"id",
"and",
"a",
"backend",
"in",
"the",
"request",
".",
"This",
"way",
"a",
"user",
"doesn",
"t",
"have",
"to",
"reauthenticate",
"on",
"every",
"request",
".",
"Note",
"that",
"data",
"set",
"during",
"the",
"anonymous",
"session",
"is",
"retained",
"when",
"the",
"user",
"logs",
"in",
"."
] |
train
|
https://github.com/ministryofjustice/money-to-prisoners-common/blob/33c43a2912cb990d9148da7c8718f480f07d90a1/mtp_common/auth/__init__.py#L22-L55
|
ministryofjustice/money-to-prisoners-common
|
mtp_common/auth/__init__.py
|
get_user
|
def get_user(request):
"""
Returns the user model instance associated with the given request session.
If no user is retrieved an instance of `MojAnonymousUser` is returned.
"""
user = None
try:
user_id = request.session[SESSION_KEY]
token = request.session[AUTH_TOKEN_SESSION_KEY]
user_data = request.session[USER_DATA_SESSION_KEY]
backend_path = request.session[BACKEND_SESSION_KEY]
except KeyError:
pass
else:
if backend_path in settings.AUTHENTICATION_BACKENDS:
backend = load_backend(backend_path)
user = backend.get_user(user_id, token, user_data)
# Verify the session
if hasattr(user, 'get_session_auth_hash'):
session_hash = request.session.get(HASH_SESSION_KEY)
session_hash_verified = session_hash and constant_time_compare(
session_hash,
user.get_session_auth_hash()
)
if not session_hash_verified:
request.session.flush()
user = None
return user or MojAnonymousUser()
|
python
|
def get_user(request):
"""
Returns the user model instance associated with the given request session.
If no user is retrieved an instance of `MojAnonymousUser` is returned.
"""
user = None
try:
user_id = request.session[SESSION_KEY]
token = request.session[AUTH_TOKEN_SESSION_KEY]
user_data = request.session[USER_DATA_SESSION_KEY]
backend_path = request.session[BACKEND_SESSION_KEY]
except KeyError:
pass
else:
if backend_path in settings.AUTHENTICATION_BACKENDS:
backend = load_backend(backend_path)
user = backend.get_user(user_id, token, user_data)
# Verify the session
if hasattr(user, 'get_session_auth_hash'):
session_hash = request.session.get(HASH_SESSION_KEY)
session_hash_verified = session_hash and constant_time_compare(
session_hash,
user.get_session_auth_hash()
)
if not session_hash_verified:
request.session.flush()
user = None
return user or MojAnonymousUser()
|
[
"def",
"get_user",
"(",
"request",
")",
":",
"user",
"=",
"None",
"try",
":",
"user_id",
"=",
"request",
".",
"session",
"[",
"SESSION_KEY",
"]",
"token",
"=",
"request",
".",
"session",
"[",
"AUTH_TOKEN_SESSION_KEY",
"]",
"user_data",
"=",
"request",
".",
"session",
"[",
"USER_DATA_SESSION_KEY",
"]",
"backend_path",
"=",
"request",
".",
"session",
"[",
"BACKEND_SESSION_KEY",
"]",
"except",
"KeyError",
":",
"pass",
"else",
":",
"if",
"backend_path",
"in",
"settings",
".",
"AUTHENTICATION_BACKENDS",
":",
"backend",
"=",
"load_backend",
"(",
"backend_path",
")",
"user",
"=",
"backend",
".",
"get_user",
"(",
"user_id",
",",
"token",
",",
"user_data",
")",
"# Verify the session",
"if",
"hasattr",
"(",
"user",
",",
"'get_session_auth_hash'",
")",
":",
"session_hash",
"=",
"request",
".",
"session",
".",
"get",
"(",
"HASH_SESSION_KEY",
")",
"session_hash_verified",
"=",
"session_hash",
"and",
"constant_time_compare",
"(",
"session_hash",
",",
"user",
".",
"get_session_auth_hash",
"(",
")",
")",
"if",
"not",
"session_hash_verified",
":",
"request",
".",
"session",
".",
"flush",
"(",
")",
"user",
"=",
"None",
"return",
"user",
"or",
"MojAnonymousUser",
"(",
")"
] |
Returns the user model instance associated with the given request session.
If no user is retrieved an instance of `MojAnonymousUser` is returned.
|
[
"Returns",
"the",
"user",
"model",
"instance",
"associated",
"with",
"the",
"given",
"request",
"session",
".",
"If",
"no",
"user",
"is",
"retrieved",
"an",
"instance",
"of",
"MojAnonymousUser",
"is",
"returned",
"."
] |
train
|
https://github.com/ministryofjustice/money-to-prisoners-common/blob/33c43a2912cb990d9148da7c8718f480f07d90a1/mtp_common/auth/__init__.py#L58-L86
|
ministryofjustice/money-to-prisoners-common
|
mtp_common/auth/__init__.py
|
logout
|
def logout(request):
"""
Removes the authenticated user's ID from the request and flushes their
session data.
"""
# Dispatch the signal before the user is logged out so the receivers have a
# chance to find out *who* logged out.
user = getattr(request, 'user', None)
if hasattr(user, 'is_authenticated') and not user.is_authenticated:
user = None
user_logged_out.send(sender=user.__class__, request=request, user=user)
# remember language choice saved to session
language = request.session.get(LANGUAGE_SESSION_KEY)
request.session.flush()
if language is not None:
request.session[LANGUAGE_SESSION_KEY] = language
if hasattr(request, 'user'):
request.user = MojAnonymousUser()
|
python
|
def logout(request):
"""
Removes the authenticated user's ID from the request and flushes their
session data.
"""
# Dispatch the signal before the user is logged out so the receivers have a
# chance to find out *who* logged out.
user = getattr(request, 'user', None)
if hasattr(user, 'is_authenticated') and not user.is_authenticated:
user = None
user_logged_out.send(sender=user.__class__, request=request, user=user)
# remember language choice saved to session
language = request.session.get(LANGUAGE_SESSION_KEY)
request.session.flush()
if language is not None:
request.session[LANGUAGE_SESSION_KEY] = language
if hasattr(request, 'user'):
request.user = MojAnonymousUser()
|
[
"def",
"logout",
"(",
"request",
")",
":",
"# Dispatch the signal before the user is logged out so the receivers have a",
"# chance to find out *who* logged out.",
"user",
"=",
"getattr",
"(",
"request",
",",
"'user'",
",",
"None",
")",
"if",
"hasattr",
"(",
"user",
",",
"'is_authenticated'",
")",
"and",
"not",
"user",
".",
"is_authenticated",
":",
"user",
"=",
"None",
"user_logged_out",
".",
"send",
"(",
"sender",
"=",
"user",
".",
"__class__",
",",
"request",
"=",
"request",
",",
"user",
"=",
"user",
")",
"# remember language choice saved to session",
"language",
"=",
"request",
".",
"session",
".",
"get",
"(",
"LANGUAGE_SESSION_KEY",
")",
"request",
".",
"session",
".",
"flush",
"(",
")",
"if",
"language",
"is",
"not",
"None",
":",
"request",
".",
"session",
"[",
"LANGUAGE_SESSION_KEY",
"]",
"=",
"language",
"if",
"hasattr",
"(",
"request",
",",
"'user'",
")",
":",
"request",
".",
"user",
"=",
"MojAnonymousUser",
"(",
")"
] |
Removes the authenticated user's ID from the request and flushes their
session data.
|
[
"Removes",
"the",
"authenticated",
"user",
"s",
"ID",
"from",
"the",
"request",
"and",
"flushes",
"their",
"session",
"data",
"."
] |
train
|
https://github.com/ministryofjustice/money-to-prisoners-common/blob/33c43a2912cb990d9148da7c8718f480f07d90a1/mtp_common/auth/__init__.py#L96-L117
|
koreyou/word_embedding_loader
|
word_embedding_loader/saver/word2vec_text.py
|
save
|
def save(f, arr, vocab):
"""
Save word embedding file.
Check :func:`word_embedding_loader.saver.glove.save` for the API.
"""
f.write(('%d %d' % (arr.shape[0], arr.shape[1])).encode('utf-8'))
for word, idx in vocab:
_write_line(f, arr[idx], word)
|
python
|
def save(f, arr, vocab):
"""
Save word embedding file.
Check :func:`word_embedding_loader.saver.glove.save` for the API.
"""
f.write(('%d %d' % (arr.shape[0], arr.shape[1])).encode('utf-8'))
for word, idx in vocab:
_write_line(f, arr[idx], word)
|
[
"def",
"save",
"(",
"f",
",",
"arr",
",",
"vocab",
")",
":",
"f",
".",
"write",
"(",
"(",
"'%d %d'",
"%",
"(",
"arr",
".",
"shape",
"[",
"0",
"]",
",",
"arr",
".",
"shape",
"[",
"1",
"]",
")",
")",
".",
"encode",
"(",
"'utf-8'",
")",
")",
"for",
"word",
",",
"idx",
"in",
"vocab",
":",
"_write_line",
"(",
"f",
",",
"arr",
"[",
"idx",
"]",
",",
"word",
")"
] |
Save word embedding file.
Check :func:`word_embedding_loader.saver.glove.save` for the API.
|
[
"Save",
"word",
"embedding",
"file",
".",
"Check",
":",
"func",
":",
"word_embedding_loader",
".",
"saver",
".",
"glove",
".",
"save",
"for",
"the",
"API",
"."
] |
train
|
https://github.com/koreyou/word_embedding_loader/blob/1bc123f1a8bea12646576dcd768dae3ecea39c06/word_embedding_loader/saver/word2vec_text.py#L20-L27
|
ministryofjustice/money-to-prisoners-common
|
mtp_common/spooling.py
|
spoolable
|
def spoolable(*, pre_condition=True, body_params=()):
"""
Decorates a function to make it spoolable using uWSGI, but if no spooling mechanism is available,
the function is called synchronously. All decorated function arguments must be picklable and
the first annotated with `Context` will receive an object that defines the current execution state.
Return values are always ignored and all exceptions are caught in spooled mode.
:param pre_condition: additional condition needed to use spooler
:param body_params: parameter names that can have large values and should use spooler body
"""
def decorator(func):
context_name = None
keyword_kinds = {inspect.Parameter.POSITIONAL_OR_KEYWORD, inspect.Parameter.KEYWORD_ONLY}
invalid_body_params = set(body_params)
for name, parameter in inspect.signature(func).parameters.items():
if parameter.kind not in keyword_kinds:
continue
if not context_name and parameter.annotation is Context:
context_name = name
elif name in invalid_body_params:
invalid_body_params.remove(name)
if invalid_body_params:
raise TypeError('Spoolable task body_params must be keyword arguments')
task = Task(func, context_name=context_name, pre_condition=pre_condition, body_params=body_params)
spooler.register(task)
return task
return decorator
|
python
|
def spoolable(*, pre_condition=True, body_params=()):
"""
Decorates a function to make it spoolable using uWSGI, but if no spooling mechanism is available,
the function is called synchronously. All decorated function arguments must be picklable and
the first annotated with `Context` will receive an object that defines the current execution state.
Return values are always ignored and all exceptions are caught in spooled mode.
:param pre_condition: additional condition needed to use spooler
:param body_params: parameter names that can have large values and should use spooler body
"""
def decorator(func):
context_name = None
keyword_kinds = {inspect.Parameter.POSITIONAL_OR_KEYWORD, inspect.Parameter.KEYWORD_ONLY}
invalid_body_params = set(body_params)
for name, parameter in inspect.signature(func).parameters.items():
if parameter.kind not in keyword_kinds:
continue
if not context_name and parameter.annotation is Context:
context_name = name
elif name in invalid_body_params:
invalid_body_params.remove(name)
if invalid_body_params:
raise TypeError('Spoolable task body_params must be keyword arguments')
task = Task(func, context_name=context_name, pre_condition=pre_condition, body_params=body_params)
spooler.register(task)
return task
return decorator
|
[
"def",
"spoolable",
"(",
"*",
",",
"pre_condition",
"=",
"True",
",",
"body_params",
"=",
"(",
")",
")",
":",
"def",
"decorator",
"(",
"func",
")",
":",
"context_name",
"=",
"None",
"keyword_kinds",
"=",
"{",
"inspect",
".",
"Parameter",
".",
"POSITIONAL_OR_KEYWORD",
",",
"inspect",
".",
"Parameter",
".",
"KEYWORD_ONLY",
"}",
"invalid_body_params",
"=",
"set",
"(",
"body_params",
")",
"for",
"name",
",",
"parameter",
"in",
"inspect",
".",
"signature",
"(",
"func",
")",
".",
"parameters",
".",
"items",
"(",
")",
":",
"if",
"parameter",
".",
"kind",
"not",
"in",
"keyword_kinds",
":",
"continue",
"if",
"not",
"context_name",
"and",
"parameter",
".",
"annotation",
"is",
"Context",
":",
"context_name",
"=",
"name",
"elif",
"name",
"in",
"invalid_body_params",
":",
"invalid_body_params",
".",
"remove",
"(",
"name",
")",
"if",
"invalid_body_params",
":",
"raise",
"TypeError",
"(",
"'Spoolable task body_params must be keyword arguments'",
")",
"task",
"=",
"Task",
"(",
"func",
",",
"context_name",
"=",
"context_name",
",",
"pre_condition",
"=",
"pre_condition",
",",
"body_params",
"=",
"body_params",
")",
"spooler",
".",
"register",
"(",
"task",
")",
"return",
"task",
"return",
"decorator"
] |
Decorates a function to make it spoolable using uWSGI, but if no spooling mechanism is available,
the function is called synchronously. All decorated function arguments must be picklable and
the first annotated with `Context` will receive an object that defines the current execution state.
Return values are always ignored and all exceptions are caught in spooled mode.
:param pre_condition: additional condition needed to use spooler
:param body_params: parameter names that can have large values and should use spooler body
|
[
"Decorates",
"a",
"function",
"to",
"make",
"it",
"spoolable",
"using",
"uWSGI",
"but",
"if",
"no",
"spooling",
"mechanism",
"is",
"available",
"the",
"function",
"is",
"called",
"synchronously",
".",
"All",
"decorated",
"function",
"arguments",
"must",
"be",
"picklable",
"and",
"the",
"first",
"annotated",
"with",
"Context",
"will",
"receive",
"an",
"object",
"that",
"defines",
"the",
"current",
"execution",
"state",
".",
"Return",
"values",
"are",
"always",
"ignored",
"and",
"all",
"exceptions",
"are",
"caught",
"in",
"spooled",
"mode",
".",
":",
"param",
"pre_condition",
":",
"additional",
"condition",
"needed",
"to",
"use",
"spooler",
":",
"param",
"body_params",
":",
"parameter",
"names",
"that",
"can",
"have",
"large",
"values",
"and",
"should",
"use",
"spooler",
"body"
] |
train
|
https://github.com/ministryofjustice/money-to-prisoners-common/blob/33c43a2912cb990d9148da7c8718f480f07d90a1/mtp_common/spooling.py#L144-L172
|
Jaymon/prom
|
prom/model.py
|
Orm.fields
|
def fields(self):
"""
return all the fields and their raw values for this Orm instance. This
property returns a dict with the field names and their current values
if you want to control the values for outputting to an api, use .jsonable()
"""
return {k:getattr(self, k, None) for k in self.schema.fields}
|
python
|
def fields(self):
"""
return all the fields and their raw values for this Orm instance. This
property returns a dict with the field names and their current values
if you want to control the values for outputting to an api, use .jsonable()
"""
return {k:getattr(self, k, None) for k in self.schema.fields}
|
[
"def",
"fields",
"(",
"self",
")",
":",
"return",
"{",
"k",
":",
"getattr",
"(",
"self",
",",
"k",
",",
"None",
")",
"for",
"k",
"in",
"self",
".",
"schema",
".",
"fields",
"}"
] |
return all the fields and their raw values for this Orm instance. This
property returns a dict with the field names and their current values
if you want to control the values for outputting to an api, use .jsonable()
|
[
"return",
"all",
"the",
"fields",
"and",
"their",
"raw",
"values",
"for",
"this",
"Orm",
"instance",
".",
"This",
"property",
"returns",
"a",
"dict",
"with",
"the",
"field",
"names",
"and",
"their",
"current",
"values"
] |
train
|
https://github.com/Jaymon/prom/blob/b7ad2c259eca198da03e1e4bc7d95014c168c361/prom/model.py#L144-L151
|
Jaymon/prom
|
prom/model.py
|
Orm.create
|
def create(cls, fields=None, **fields_kwargs):
"""
create an instance of cls with the passed in fields and set it into the db
fields -- dict -- field_name keys, with their respective values
**fields_kwargs -- dict -- if you would rather pass in fields as name=val, that works also
"""
# NOTE -- you cannot use hydrate/populate here because populate alters modified fields
instance = cls(fields, **fields_kwargs)
instance.save()
return instance
|
python
|
def create(cls, fields=None, **fields_kwargs):
"""
create an instance of cls with the passed in fields and set it into the db
fields -- dict -- field_name keys, with their respective values
**fields_kwargs -- dict -- if you would rather pass in fields as name=val, that works also
"""
# NOTE -- you cannot use hydrate/populate here because populate alters modified fields
instance = cls(fields, **fields_kwargs)
instance.save()
return instance
|
[
"def",
"create",
"(",
"cls",
",",
"fields",
"=",
"None",
",",
"*",
"*",
"fields_kwargs",
")",
":",
"# NOTE -- you cannot use hydrate/populate here because populate alters modified fields",
"instance",
"=",
"cls",
"(",
"fields",
",",
"*",
"*",
"fields_kwargs",
")",
"instance",
".",
"save",
"(",
")",
"return",
"instance"
] |
create an instance of cls with the passed in fields and set it into the db
fields -- dict -- field_name keys, with their respective values
**fields_kwargs -- dict -- if you would rather pass in fields as name=val, that works also
|
[
"create",
"an",
"instance",
"of",
"cls",
"with",
"the",
"passed",
"in",
"fields",
"and",
"set",
"it",
"into",
"the",
"db"
] |
train
|
https://github.com/Jaymon/prom/blob/b7ad2c259eca198da03e1e4bc7d95014c168c361/prom/model.py#L182-L192
|
Jaymon/prom
|
prom/model.py
|
Orm.populate
|
def populate(self, fields=None, **fields_kwargs):
"""take the passed in fields, combine them with missing fields that should
be there and then run all those through appropriate methods to hydrate this
orm.
The method replaces cls.hydrate() since it was becoming hard to understand
what was going on with all these methods that did things just a little bit
different.
This is used to completely set all the fields of self. If you just want
to set certain fields, you can use the submethod _populate
:param fields: dict, the fields in a dict
:param **fields_kwargs: dict, if you would like to pass the fields as key=val
this picks those up and combines them with fields
"""
# this will run all the fields of the Orm, not just the fields in fields
# dict, another name would be hydrate
pop_fields = {}
fields = self.make_dict(fields, fields_kwargs)
for k in self.schema.fields.keys():
pop_fields[k] = fields.get(k, None)
self._populate(pop_fields)
|
python
|
def populate(self, fields=None, **fields_kwargs):
"""take the passed in fields, combine them with missing fields that should
be there and then run all those through appropriate methods to hydrate this
orm.
The method replaces cls.hydrate() since it was becoming hard to understand
what was going on with all these methods that did things just a little bit
different.
This is used to completely set all the fields of self. If you just want
to set certain fields, you can use the submethod _populate
:param fields: dict, the fields in a dict
:param **fields_kwargs: dict, if you would like to pass the fields as key=val
this picks those up and combines them with fields
"""
# this will run all the fields of the Orm, not just the fields in fields
# dict, another name would be hydrate
pop_fields = {}
fields = self.make_dict(fields, fields_kwargs)
for k in self.schema.fields.keys():
pop_fields[k] = fields.get(k, None)
self._populate(pop_fields)
|
[
"def",
"populate",
"(",
"self",
",",
"fields",
"=",
"None",
",",
"*",
"*",
"fields_kwargs",
")",
":",
"# this will run all the fields of the Orm, not just the fields in fields",
"# dict, another name would be hydrate",
"pop_fields",
"=",
"{",
"}",
"fields",
"=",
"self",
".",
"make_dict",
"(",
"fields",
",",
"fields_kwargs",
")",
"for",
"k",
"in",
"self",
".",
"schema",
".",
"fields",
".",
"keys",
"(",
")",
":",
"pop_fields",
"[",
"k",
"]",
"=",
"fields",
".",
"get",
"(",
"k",
",",
"None",
")",
"self",
".",
"_populate",
"(",
"pop_fields",
")"
] |
take the passed in fields, combine them with missing fields that should
be there and then run all those through appropriate methods to hydrate this
orm.
The method replaces cls.hydrate() since it was becoming hard to understand
what was going on with all these methods that did things just a little bit
different.
This is used to completely set all the fields of self. If you just want
to set certain fields, you can use the submethod _populate
:param fields: dict, the fields in a dict
:param **fields_kwargs: dict, if you would like to pass the fields as key=val
this picks those up and combines them with fields
|
[
"take",
"the",
"passed",
"in",
"fields",
"combine",
"them",
"with",
"missing",
"fields",
"that",
"should",
"be",
"there",
"and",
"then",
"run",
"all",
"those",
"through",
"appropriate",
"methods",
"to",
"hydrate",
"this",
"orm",
"."
] |
train
|
https://github.com/Jaymon/prom/blob/b7ad2c259eca198da03e1e4bc7d95014c168c361/prom/model.py#L225-L249
|
Jaymon/prom
|
prom/model.py
|
Orm._populate
|
def _populate(self, fields):
"""this runs all the fields through their iget methods to mimic them
freshly coming out of the db, then resets modified
:param fields: dict, the fields that were passed in
"""
schema = self.schema
for k, v in fields.items():
fields[k] = schema.fields[k].iget(self, v)
self.modify(fields)
self.reset_modified()
|
python
|
def _populate(self, fields):
"""this runs all the fields through their iget methods to mimic them
freshly coming out of the db, then resets modified
:param fields: dict, the fields that were passed in
"""
schema = self.schema
for k, v in fields.items():
fields[k] = schema.fields[k].iget(self, v)
self.modify(fields)
self.reset_modified()
|
[
"def",
"_populate",
"(",
"self",
",",
"fields",
")",
":",
"schema",
"=",
"self",
".",
"schema",
"for",
"k",
",",
"v",
"in",
"fields",
".",
"items",
"(",
")",
":",
"fields",
"[",
"k",
"]",
"=",
"schema",
".",
"fields",
"[",
"k",
"]",
".",
"iget",
"(",
"self",
",",
"v",
")",
"self",
".",
"modify",
"(",
"fields",
")",
"self",
".",
"reset_modified",
"(",
")"
] |
this runs all the fields through their iget methods to mimic them
freshly coming out of the db, then resets modified
:param fields: dict, the fields that were passed in
|
[
"this",
"runs",
"all",
"the",
"fields",
"through",
"their",
"iget",
"methods",
"to",
"mimic",
"them",
"freshly",
"coming",
"out",
"of",
"the",
"db",
"then",
"resets",
"modified"
] |
train
|
https://github.com/Jaymon/prom/blob/b7ad2c259eca198da03e1e4bc7d95014c168c361/prom/model.py#L251-L262
|
Jaymon/prom
|
prom/model.py
|
Orm.depopulate
|
def depopulate(self, is_update):
"""Get all the fields that need to be saved
:param is_udpate: bool, True if update query, False if insert
:returns: dict, key is field_name and val is the field value to be saved
"""
fields = {}
schema = self.schema
for k, field in schema.fields.items():
is_modified = k in self.modified_fields
orig_v = getattr(self, k)
v = field.iset(
self,
orig_v,
is_update=is_update,
is_modified=is_modified
)
if is_modified or v is not None:
if is_update and field.is_pk() and v == orig_v:
continue
else:
fields[k] = v
if not is_update:
for field_name in schema.required_fields.keys():
if field_name not in fields:
raise KeyError("Missing required field {}".format(field_name))
return fields
|
python
|
def depopulate(self, is_update):
"""Get all the fields that need to be saved
:param is_udpate: bool, True if update query, False if insert
:returns: dict, key is field_name and val is the field value to be saved
"""
fields = {}
schema = self.schema
for k, field in schema.fields.items():
is_modified = k in self.modified_fields
orig_v = getattr(self, k)
v = field.iset(
self,
orig_v,
is_update=is_update,
is_modified=is_modified
)
if is_modified or v is not None:
if is_update and field.is_pk() and v == orig_v:
continue
else:
fields[k] = v
if not is_update:
for field_name in schema.required_fields.keys():
if field_name not in fields:
raise KeyError("Missing required field {}".format(field_name))
return fields
|
[
"def",
"depopulate",
"(",
"self",
",",
"is_update",
")",
":",
"fields",
"=",
"{",
"}",
"schema",
"=",
"self",
".",
"schema",
"for",
"k",
",",
"field",
"in",
"schema",
".",
"fields",
".",
"items",
"(",
")",
":",
"is_modified",
"=",
"k",
"in",
"self",
".",
"modified_fields",
"orig_v",
"=",
"getattr",
"(",
"self",
",",
"k",
")",
"v",
"=",
"field",
".",
"iset",
"(",
"self",
",",
"orig_v",
",",
"is_update",
"=",
"is_update",
",",
"is_modified",
"=",
"is_modified",
")",
"if",
"is_modified",
"or",
"v",
"is",
"not",
"None",
":",
"if",
"is_update",
"and",
"field",
".",
"is_pk",
"(",
")",
"and",
"v",
"==",
"orig_v",
":",
"continue",
"else",
":",
"fields",
"[",
"k",
"]",
"=",
"v",
"if",
"not",
"is_update",
":",
"for",
"field_name",
"in",
"schema",
".",
"required_fields",
".",
"keys",
"(",
")",
":",
"if",
"field_name",
"not",
"in",
"fields",
":",
"raise",
"KeyError",
"(",
"\"Missing required field {}\"",
".",
"format",
"(",
"field_name",
")",
")",
"return",
"fields"
] |
Get all the fields that need to be saved
:param is_udpate: bool, True if update query, False if insert
:returns: dict, key is field_name and val is the field value to be saved
|
[
"Get",
"all",
"the",
"fields",
"that",
"need",
"to",
"be",
"saved"
] |
train
|
https://github.com/Jaymon/prom/blob/b7ad2c259eca198da03e1e4bc7d95014c168c361/prom/model.py#L264-L294
|
Jaymon/prom
|
prom/model.py
|
Orm.insert
|
def insert(self):
"""persist the field values of this orm"""
ret = True
schema = self.schema
fields = self.depopulate(False)
q = self.query
q.set_fields(fields)
pk = q.insert()
if pk:
fields = q.fields
fields[schema.pk.name] = pk
self._populate(fields)
else:
ret = False
return ret
|
python
|
def insert(self):
"""persist the field values of this orm"""
ret = True
schema = self.schema
fields = self.depopulate(False)
q = self.query
q.set_fields(fields)
pk = q.insert()
if pk:
fields = q.fields
fields[schema.pk.name] = pk
self._populate(fields)
else:
ret = False
return ret
|
[
"def",
"insert",
"(",
"self",
")",
":",
"ret",
"=",
"True",
"schema",
"=",
"self",
".",
"schema",
"fields",
"=",
"self",
".",
"depopulate",
"(",
"False",
")",
"q",
"=",
"self",
".",
"query",
"q",
".",
"set_fields",
"(",
"fields",
")",
"pk",
"=",
"q",
".",
"insert",
"(",
")",
"if",
"pk",
":",
"fields",
"=",
"q",
".",
"fields",
"fields",
"[",
"schema",
".",
"pk",
".",
"name",
"]",
"=",
"pk",
"self",
".",
"_populate",
"(",
"fields",
")",
"else",
":",
"ret",
"=",
"False",
"return",
"ret"
] |
persist the field values of this orm
|
[
"persist",
"the",
"field",
"values",
"of",
"this",
"orm"
] |
train
|
https://github.com/Jaymon/prom/blob/b7ad2c259eca198da03e1e4bc7d95014c168c361/prom/model.py#L296-L314
|
Jaymon/prom
|
prom/model.py
|
Orm.update
|
def update(self):
"""re-persist the updated field values of this orm that has a primary key"""
ret = True
fields = self.depopulate(True)
q = self.query
q.set_fields(fields)
pk = self.pk
if pk:
q.is_field(self.schema.pk.name, pk)
else:
raise ValueError("You cannot update without a primary key")
if q.update():
fields = q.fields
self._populate(fields)
else:
ret = False
return ret
|
python
|
def update(self):
"""re-persist the updated field values of this orm that has a primary key"""
ret = True
fields = self.depopulate(True)
q = self.query
q.set_fields(fields)
pk = self.pk
if pk:
q.is_field(self.schema.pk.name, pk)
else:
raise ValueError("You cannot update without a primary key")
if q.update():
fields = q.fields
self._populate(fields)
else:
ret = False
return ret
|
[
"def",
"update",
"(",
"self",
")",
":",
"ret",
"=",
"True",
"fields",
"=",
"self",
".",
"depopulate",
"(",
"True",
")",
"q",
"=",
"self",
".",
"query",
"q",
".",
"set_fields",
"(",
"fields",
")",
"pk",
"=",
"self",
".",
"pk",
"if",
"pk",
":",
"q",
".",
"is_field",
"(",
"self",
".",
"schema",
".",
"pk",
".",
"name",
",",
"pk",
")",
"else",
":",
"raise",
"ValueError",
"(",
"\"You cannot update without a primary key\"",
")",
"if",
"q",
".",
"update",
"(",
")",
":",
"fields",
"=",
"q",
".",
"fields",
"self",
".",
"_populate",
"(",
"fields",
")",
"else",
":",
"ret",
"=",
"False",
"return",
"ret"
] |
re-persist the updated field values of this orm that has a primary key
|
[
"re",
"-",
"persist",
"the",
"updated",
"field",
"values",
"of",
"this",
"orm",
"that",
"has",
"a",
"primary",
"key"
] |
train
|
https://github.com/Jaymon/prom/blob/b7ad2c259eca198da03e1e4bc7d95014c168c361/prom/model.py#L316-L337
|
Jaymon/prom
|
prom/model.py
|
Orm.save
|
def save(self):
"""
persist the fields in this object into the db, this will update if _id is set, otherwise
it will insert
see also -- .insert(), .update()
"""
ret = False
# we will only use the primary key if it hasn't been modified
pk = None
if self.schema.pk.name not in self.modified_fields:
pk = self.pk
if pk:
ret = self.update()
else:
ret = self.insert()
return ret
|
python
|
def save(self):
"""
persist the fields in this object into the db, this will update if _id is set, otherwise
it will insert
see also -- .insert(), .update()
"""
ret = False
# we will only use the primary key if it hasn't been modified
pk = None
if self.schema.pk.name not in self.modified_fields:
pk = self.pk
if pk:
ret = self.update()
else:
ret = self.insert()
return ret
|
[
"def",
"save",
"(",
"self",
")",
":",
"ret",
"=",
"False",
"# we will only use the primary key if it hasn't been modified",
"pk",
"=",
"None",
"if",
"self",
".",
"schema",
".",
"pk",
".",
"name",
"not",
"in",
"self",
".",
"modified_fields",
":",
"pk",
"=",
"self",
".",
"pk",
"if",
"pk",
":",
"ret",
"=",
"self",
".",
"update",
"(",
")",
"else",
":",
"ret",
"=",
"self",
".",
"insert",
"(",
")",
"return",
"ret"
] |
persist the fields in this object into the db, this will update if _id is set, otherwise
it will insert
see also -- .insert(), .update()
|
[
"persist",
"the",
"fields",
"in",
"this",
"object",
"into",
"the",
"db",
"this",
"will",
"update",
"if",
"_id",
"is",
"set",
"otherwise",
"it",
"will",
"insert"
] |
train
|
https://github.com/Jaymon/prom/blob/b7ad2c259eca198da03e1e4bc7d95014c168c361/prom/model.py#L340-L359
|
Jaymon/prom
|
prom/model.py
|
Orm.delete
|
def delete(self):
"""delete the object from the db if pk is set"""
ret = False
q = self.query
pk = self.pk
if pk:
pk_name = self.schema.pk.name
self.query.is_field(pk_name, pk).delete()
setattr(self, pk_name, None)
# mark all the fields that still exist as modified
self.reset_modified()
for field_name in self.schema.fields:
if getattr(self, field_name, None) != None:
self.modified_fields.add(field_name)
ret = True
return ret
|
python
|
def delete(self):
"""delete the object from the db if pk is set"""
ret = False
q = self.query
pk = self.pk
if pk:
pk_name = self.schema.pk.name
self.query.is_field(pk_name, pk).delete()
setattr(self, pk_name, None)
# mark all the fields that still exist as modified
self.reset_modified()
for field_name in self.schema.fields:
if getattr(self, field_name, None) != None:
self.modified_fields.add(field_name)
ret = True
return ret
|
[
"def",
"delete",
"(",
"self",
")",
":",
"ret",
"=",
"False",
"q",
"=",
"self",
".",
"query",
"pk",
"=",
"self",
".",
"pk",
"if",
"pk",
":",
"pk_name",
"=",
"self",
".",
"schema",
".",
"pk",
".",
"name",
"self",
".",
"query",
".",
"is_field",
"(",
"pk_name",
",",
"pk",
")",
".",
"delete",
"(",
")",
"setattr",
"(",
"self",
",",
"pk_name",
",",
"None",
")",
"# mark all the fields that still exist as modified",
"self",
".",
"reset_modified",
"(",
")",
"for",
"field_name",
"in",
"self",
".",
"schema",
".",
"fields",
":",
"if",
"getattr",
"(",
"self",
",",
"field_name",
",",
"None",
")",
"!=",
"None",
":",
"self",
".",
"modified_fields",
".",
"add",
"(",
"field_name",
")",
"ret",
"=",
"True",
"return",
"ret"
] |
delete the object from the db if pk is set
|
[
"delete",
"the",
"object",
"from",
"the",
"db",
"if",
"pk",
"is",
"set"
] |
train
|
https://github.com/Jaymon/prom/blob/b7ad2c259eca198da03e1e4bc7d95014c168c361/prom/model.py#L361-L379
|
Jaymon/prom
|
prom/model.py
|
Orm.reset_modified
|
def reset_modified(self):
"""
reset field modification tracking
this is handy for when you are loading a new Orm with the results from a query and
you don't want set() to do anything, you can Orm(**fields) and then orm.reset_modified() to
clear all the passed in fields from the modified list
"""
self.modified_fields = set()
# compensate for us not having knowledge of certain fields changing
for field_name, field in self.schema.normal_fields.items():
if isinstance(field, ObjectField):
self.modified_fields.add(field_name)
|
python
|
def reset_modified(self):
"""
reset field modification tracking
this is handy for when you are loading a new Orm with the results from a query and
you don't want set() to do anything, you can Orm(**fields) and then orm.reset_modified() to
clear all the passed in fields from the modified list
"""
self.modified_fields = set()
# compensate for us not having knowledge of certain fields changing
for field_name, field in self.schema.normal_fields.items():
if isinstance(field, ObjectField):
self.modified_fields.add(field_name)
|
[
"def",
"reset_modified",
"(",
"self",
")",
":",
"self",
".",
"modified_fields",
"=",
"set",
"(",
")",
"# compensate for us not having knowledge of certain fields changing",
"for",
"field_name",
",",
"field",
"in",
"self",
".",
"schema",
".",
"normal_fields",
".",
"items",
"(",
")",
":",
"if",
"isinstance",
"(",
"field",
",",
"ObjectField",
")",
":",
"self",
".",
"modified_fields",
".",
"add",
"(",
"field_name",
")"
] |
reset field modification tracking
this is handy for when you are loading a new Orm with the results from a query and
you don't want set() to do anything, you can Orm(**fields) and then orm.reset_modified() to
clear all the passed in fields from the modified list
|
[
"reset",
"field",
"modification",
"tracking"
] |
train
|
https://github.com/Jaymon/prom/blob/b7ad2c259eca198da03e1e4bc7d95014c168c361/prom/model.py#L385-L398
|
Jaymon/prom
|
prom/model.py
|
Orm.modify
|
def modify(self, fields=None, **fields_kwargs):
"""update the fields of this instance with the values in dict fields
this should rarely be messed with, if you would like to manipulate the
fields you should override _modify()
:param fields: dict, the fields in a dict
:param **fields_kwargs: dict, if you would like to pass the fields as key=val
this picks those up and combines them with fields
:returns: set, all the names of the fields that were modified
"""
modified_fields = set()
fields = self.make_dict(fields, fields_kwargs)
fields = self._modify(fields)
for field_name, field_val in fields.items():
in_schema = field_name in self.schema.fields
if in_schema:
setattr(self, field_name, field_val)
modified_fields.add(field_name)
return modified_fields
|
python
|
def modify(self, fields=None, **fields_kwargs):
"""update the fields of this instance with the values in dict fields
this should rarely be messed with, if you would like to manipulate the
fields you should override _modify()
:param fields: dict, the fields in a dict
:param **fields_kwargs: dict, if you would like to pass the fields as key=val
this picks those up and combines them with fields
:returns: set, all the names of the fields that were modified
"""
modified_fields = set()
fields = self.make_dict(fields, fields_kwargs)
fields = self._modify(fields)
for field_name, field_val in fields.items():
in_schema = field_name in self.schema.fields
if in_schema:
setattr(self, field_name, field_val)
modified_fields.add(field_name)
return modified_fields
|
[
"def",
"modify",
"(",
"self",
",",
"fields",
"=",
"None",
",",
"*",
"*",
"fields_kwargs",
")",
":",
"modified_fields",
"=",
"set",
"(",
")",
"fields",
"=",
"self",
".",
"make_dict",
"(",
"fields",
",",
"fields_kwargs",
")",
"fields",
"=",
"self",
".",
"_modify",
"(",
"fields",
")",
"for",
"field_name",
",",
"field_val",
"in",
"fields",
".",
"items",
"(",
")",
":",
"in_schema",
"=",
"field_name",
"in",
"self",
".",
"schema",
".",
"fields",
"if",
"in_schema",
":",
"setattr",
"(",
"self",
",",
"field_name",
",",
"field_val",
")",
"modified_fields",
".",
"add",
"(",
"field_name",
")",
"return",
"modified_fields"
] |
update the fields of this instance with the values in dict fields
this should rarely be messed with, if you would like to manipulate the
fields you should override _modify()
:param fields: dict, the fields in a dict
:param **fields_kwargs: dict, if you would like to pass the fields as key=val
this picks those up and combines them with fields
:returns: set, all the names of the fields that were modified
|
[
"update",
"the",
"fields",
"of",
"this",
"instance",
"with",
"the",
"values",
"in",
"dict",
"fields"
] |
train
|
https://github.com/Jaymon/prom/blob/b7ad2c259eca198da03e1e4bc7d95014c168c361/prom/model.py#L400-L420
|
Jaymon/prom
|
prom/model.py
|
Orm.jsonable
|
def jsonable(self, *args, **options):
"""
return a public version of this instance that can be jsonified
Note that this does not return _id, _created, _updated, the reason why is
because lots of times you have a different name for _id (like if it is a
user object, then you might want to call it user_id instead of _id) and I
didn't want to make assumptions
note 2, I'm not crazy about the name, but I didn't like to_dict() and pretty
much any time I need to convert the object to a dict is for json, I kind of
like dictify() though, but I've already used this method in so many places
"""
d = {}
for field_name, field in self.schema.normal_fields.items():
field_val = getattr(self, field_name, None)
field_val = field.jsonable(self, field_val)
if field_val is not None:
d[field_name] = field_val
return d
|
python
|
def jsonable(self, *args, **options):
"""
return a public version of this instance that can be jsonified
Note that this does not return _id, _created, _updated, the reason why is
because lots of times you have a different name for _id (like if it is a
user object, then you might want to call it user_id instead of _id) and I
didn't want to make assumptions
note 2, I'm not crazy about the name, but I didn't like to_dict() and pretty
much any time I need to convert the object to a dict is for json, I kind of
like dictify() though, but I've already used this method in so many places
"""
d = {}
for field_name, field in self.schema.normal_fields.items():
field_val = getattr(self, field_name, None)
field_val = field.jsonable(self, field_val)
if field_val is not None:
d[field_name] = field_val
return d
|
[
"def",
"jsonable",
"(",
"self",
",",
"*",
"args",
",",
"*",
"*",
"options",
")",
":",
"d",
"=",
"{",
"}",
"for",
"field_name",
",",
"field",
"in",
"self",
".",
"schema",
".",
"normal_fields",
".",
"items",
"(",
")",
":",
"field_val",
"=",
"getattr",
"(",
"self",
",",
"field_name",
",",
"None",
")",
"field_val",
"=",
"field",
".",
"jsonable",
"(",
"self",
",",
"field_val",
")",
"if",
"field_val",
"is",
"not",
"None",
":",
"d",
"[",
"field_name",
"]",
"=",
"field_val",
"return",
"d"
] |
return a public version of this instance that can be jsonified
Note that this does not return _id, _created, _updated, the reason why is
because lots of times you have a different name for _id (like if it is a
user object, then you might want to call it user_id instead of _id) and I
didn't want to make assumptions
note 2, I'm not crazy about the name, but I didn't like to_dict() and pretty
much any time I need to convert the object to a dict is for json, I kind of
like dictify() though, but I've already used this method in so many places
|
[
"return",
"a",
"public",
"version",
"of",
"this",
"instance",
"that",
"can",
"be",
"jsonified"
] |
train
|
https://github.com/Jaymon/prom/blob/b7ad2c259eca198da03e1e4bc7d95014c168c361/prom/model.py#L466-L486
|
guaix-ucm/pyemir
|
emirdrp/tools/merge2images.py
|
merge2images
|
def merge2images(hdu1, hdu2, debugplot):
"""Merge 2 EMIR images, averaging the common region.
Parameters
----------
hdu1 : HDUList object
Input image #1.
hdu2 : HDUList object
Input image #2.
debugplot : int
Determines whether intermediate computations and/or plots
are displayed. The valid codes are defined in
numina.array.display.pause_debugplot.
Returns
-------
image_merged : HDUList object
Output image.
"""
# check image dimensions
image_header = hdu1[0].header
image_header_ = hdu2[0].header
#
naxis = image_header['naxis']
naxis_ = image_header_['naxis']
if naxis != naxis_:
raise ValueError('Incompatible NAXIS values: {}, {}'.format(
naxis, naxis_
))
naxis1 = image_header['naxis1']
naxis1_ = image_header_['naxis1']
if naxis1 != naxis1_:
raise ValueError('Incompatible NAXIS1 values: {}, {}'.format(
naxis1, naxis1_
))
if naxis == 1:
naxis2 = 1
elif naxis == 2:
naxis2 = image_header['naxis2']
naxis2_ = image_header_['naxis2']
if naxis2 != naxis2_:
raise ValueError('Incompatible NAXIS2 values: {}, {}'.format(
naxis2, naxis2_
))
else:
raise ValueError('Unexpected NAXIS value: {}'.format(naxis))
# initialize output array
image2d_merged = np.zeros((naxis2, naxis1))
# main loop
data1 = hdu1[0].data
data2 = hdu2[0].data
for i in range(naxis2):
sp1 = data1[i, :]
sp2 = data2[i, :]
jmin1, jmax1 = find_pix_borders(sp1, sought_value=0)
jmin2, jmax2 = find_pix_borders(sp2, sought_value=0)
image2d_merged[i, :] = sp1 + sp2
useful1 = (jmin1 != -1) and (jmax1 != naxis1)
useful2 = (jmin2 != -1) and (jmax2 != naxis1)
if useful1 and useful2:
jmineff = max(jmin1, jmin2)
jmaxeff = min(jmax1, jmax2)
image2d_merged[i, jmineff:(jmaxeff+1)] /= 2
# return result
image_merged = fits.PrimaryHDU(data=image2d_merged.astype(np.float32),
header=image_header)
return image_merged
|
python
|
def merge2images(hdu1, hdu2, debugplot):
"""Merge 2 EMIR images, averaging the common region.
Parameters
----------
hdu1 : HDUList object
Input image #1.
hdu2 : HDUList object
Input image #2.
debugplot : int
Determines whether intermediate computations and/or plots
are displayed. The valid codes are defined in
numina.array.display.pause_debugplot.
Returns
-------
image_merged : HDUList object
Output image.
"""
# check image dimensions
image_header = hdu1[0].header
image_header_ = hdu2[0].header
#
naxis = image_header['naxis']
naxis_ = image_header_['naxis']
if naxis != naxis_:
raise ValueError('Incompatible NAXIS values: {}, {}'.format(
naxis, naxis_
))
naxis1 = image_header['naxis1']
naxis1_ = image_header_['naxis1']
if naxis1 != naxis1_:
raise ValueError('Incompatible NAXIS1 values: {}, {}'.format(
naxis1, naxis1_
))
if naxis == 1:
naxis2 = 1
elif naxis == 2:
naxis2 = image_header['naxis2']
naxis2_ = image_header_['naxis2']
if naxis2 != naxis2_:
raise ValueError('Incompatible NAXIS2 values: {}, {}'.format(
naxis2, naxis2_
))
else:
raise ValueError('Unexpected NAXIS value: {}'.format(naxis))
# initialize output array
image2d_merged = np.zeros((naxis2, naxis1))
# main loop
data1 = hdu1[0].data
data2 = hdu2[0].data
for i in range(naxis2):
sp1 = data1[i, :]
sp2 = data2[i, :]
jmin1, jmax1 = find_pix_borders(sp1, sought_value=0)
jmin2, jmax2 = find_pix_borders(sp2, sought_value=0)
image2d_merged[i, :] = sp1 + sp2
useful1 = (jmin1 != -1) and (jmax1 != naxis1)
useful2 = (jmin2 != -1) and (jmax2 != naxis1)
if useful1 and useful2:
jmineff = max(jmin1, jmin2)
jmaxeff = min(jmax1, jmax2)
image2d_merged[i, jmineff:(jmaxeff+1)] /= 2
# return result
image_merged = fits.PrimaryHDU(data=image2d_merged.astype(np.float32),
header=image_header)
return image_merged
|
[
"def",
"merge2images",
"(",
"hdu1",
",",
"hdu2",
",",
"debugplot",
")",
":",
"# check image dimensions",
"image_header",
"=",
"hdu1",
"[",
"0",
"]",
".",
"header",
"image_header_",
"=",
"hdu2",
"[",
"0",
"]",
".",
"header",
"#",
"naxis",
"=",
"image_header",
"[",
"'naxis'",
"]",
"naxis_",
"=",
"image_header_",
"[",
"'naxis'",
"]",
"if",
"naxis",
"!=",
"naxis_",
":",
"raise",
"ValueError",
"(",
"'Incompatible NAXIS values: {}, {}'",
".",
"format",
"(",
"naxis",
",",
"naxis_",
")",
")",
"naxis1",
"=",
"image_header",
"[",
"'naxis1'",
"]",
"naxis1_",
"=",
"image_header_",
"[",
"'naxis1'",
"]",
"if",
"naxis1",
"!=",
"naxis1_",
":",
"raise",
"ValueError",
"(",
"'Incompatible NAXIS1 values: {}, {}'",
".",
"format",
"(",
"naxis1",
",",
"naxis1_",
")",
")",
"if",
"naxis",
"==",
"1",
":",
"naxis2",
"=",
"1",
"elif",
"naxis",
"==",
"2",
":",
"naxis2",
"=",
"image_header",
"[",
"'naxis2'",
"]",
"naxis2_",
"=",
"image_header_",
"[",
"'naxis2'",
"]",
"if",
"naxis2",
"!=",
"naxis2_",
":",
"raise",
"ValueError",
"(",
"'Incompatible NAXIS2 values: {}, {}'",
".",
"format",
"(",
"naxis2",
",",
"naxis2_",
")",
")",
"else",
":",
"raise",
"ValueError",
"(",
"'Unexpected NAXIS value: {}'",
".",
"format",
"(",
"naxis",
")",
")",
"# initialize output array",
"image2d_merged",
"=",
"np",
".",
"zeros",
"(",
"(",
"naxis2",
",",
"naxis1",
")",
")",
"# main loop",
"data1",
"=",
"hdu1",
"[",
"0",
"]",
".",
"data",
"data2",
"=",
"hdu2",
"[",
"0",
"]",
".",
"data",
"for",
"i",
"in",
"range",
"(",
"naxis2",
")",
":",
"sp1",
"=",
"data1",
"[",
"i",
",",
":",
"]",
"sp2",
"=",
"data2",
"[",
"i",
",",
":",
"]",
"jmin1",
",",
"jmax1",
"=",
"find_pix_borders",
"(",
"sp1",
",",
"sought_value",
"=",
"0",
")",
"jmin2",
",",
"jmax2",
"=",
"find_pix_borders",
"(",
"sp2",
",",
"sought_value",
"=",
"0",
")",
"image2d_merged",
"[",
"i",
",",
":",
"]",
"=",
"sp1",
"+",
"sp2",
"useful1",
"=",
"(",
"jmin1",
"!=",
"-",
"1",
")",
"and",
"(",
"jmax1",
"!=",
"naxis1",
")",
"useful2",
"=",
"(",
"jmin2",
"!=",
"-",
"1",
")",
"and",
"(",
"jmax2",
"!=",
"naxis1",
")",
"if",
"useful1",
"and",
"useful2",
":",
"jmineff",
"=",
"max",
"(",
"jmin1",
",",
"jmin2",
")",
"jmaxeff",
"=",
"min",
"(",
"jmax1",
",",
"jmax2",
")",
"image2d_merged",
"[",
"i",
",",
"jmineff",
":",
"(",
"jmaxeff",
"+",
"1",
")",
"]",
"/=",
"2",
"# return result",
"image_merged",
"=",
"fits",
".",
"PrimaryHDU",
"(",
"data",
"=",
"image2d_merged",
".",
"astype",
"(",
"np",
".",
"float32",
")",
",",
"header",
"=",
"image_header",
")",
"return",
"image_merged"
] |
Merge 2 EMIR images, averaging the common region.
Parameters
----------
hdu1 : HDUList object
Input image #1.
hdu2 : HDUList object
Input image #2.
debugplot : int
Determines whether intermediate computations and/or plots
are displayed. The valid codes are defined in
numina.array.display.pause_debugplot.
Returns
-------
image_merged : HDUList object
Output image.
|
[
"Merge",
"2",
"EMIR",
"images",
"averaging",
"the",
"common",
"region",
"."
] |
train
|
https://github.com/guaix-ucm/pyemir/blob/fef6bbabcb13f80123cafd1800a0f508a3c21702/emirdrp/tools/merge2images.py#L34-L106
|
Jaymon/prom
|
prom/interface/__init__.py
|
configure_environ
|
def configure_environ(dsn_env_name='PROM_DSN', connection_class=DsnConnection):
"""
configure interfaces based on environment variables
by default, when prom is imported, it will look for PROM_DSN, and PROM_DSN_N (where
N is 1 through infinity) in the environment, if it finds them, it will assume they
are dsn urls that prom understands and will configure db connections with them. If you
don't want this behavior (ie, you want to configure prom manually) then just make sure
you don't have any environment variables with matching names
The num checks (eg PROM_DSN_1, PROM_DSN_2) go in order, so you can't do PROM_DSN_1, PROM_DSN_3,
because it will fail on _2 and move on, so make sure your num dsns are in order (eg, 1, 2, 3, ...)
example --
export PROM_DSN_1=some.Interface://host:port/dbname#i1
export PROM_DSN_2=some.Interface://host2:port/dbname2#i2
$ python
>>> import prom
>>> print prom.interfaces # prints a dict with interfaces i1 and i2 keys
:param dsn_env_name: string, the name of the environment variables
"""
inters = []
cs = dsnparse.parse_environs(dsn_env_name, parse_class=connection_class)
for c in cs:
inter = c.interface
set_interface(inter, c.name)
inters.append(inter)
return inters
|
python
|
def configure_environ(dsn_env_name='PROM_DSN', connection_class=DsnConnection):
"""
configure interfaces based on environment variables
by default, when prom is imported, it will look for PROM_DSN, and PROM_DSN_N (where
N is 1 through infinity) in the environment, if it finds them, it will assume they
are dsn urls that prom understands and will configure db connections with them. If you
don't want this behavior (ie, you want to configure prom manually) then just make sure
you don't have any environment variables with matching names
The num checks (eg PROM_DSN_1, PROM_DSN_2) go in order, so you can't do PROM_DSN_1, PROM_DSN_3,
because it will fail on _2 and move on, so make sure your num dsns are in order (eg, 1, 2, 3, ...)
example --
export PROM_DSN_1=some.Interface://host:port/dbname#i1
export PROM_DSN_2=some.Interface://host2:port/dbname2#i2
$ python
>>> import prom
>>> print prom.interfaces # prints a dict with interfaces i1 and i2 keys
:param dsn_env_name: string, the name of the environment variables
"""
inters = []
cs = dsnparse.parse_environs(dsn_env_name, parse_class=connection_class)
for c in cs:
inter = c.interface
set_interface(inter, c.name)
inters.append(inter)
return inters
|
[
"def",
"configure_environ",
"(",
"dsn_env_name",
"=",
"'PROM_DSN'",
",",
"connection_class",
"=",
"DsnConnection",
")",
":",
"inters",
"=",
"[",
"]",
"cs",
"=",
"dsnparse",
".",
"parse_environs",
"(",
"dsn_env_name",
",",
"parse_class",
"=",
"connection_class",
")",
"for",
"c",
"in",
"cs",
":",
"inter",
"=",
"c",
".",
"interface",
"set_interface",
"(",
"inter",
",",
"c",
".",
"name",
")",
"inters",
".",
"append",
"(",
"inter",
")",
"return",
"inters"
] |
configure interfaces based on environment variables
by default, when prom is imported, it will look for PROM_DSN, and PROM_DSN_N (where
N is 1 through infinity) in the environment, if it finds them, it will assume they
are dsn urls that prom understands and will configure db connections with them. If you
don't want this behavior (ie, you want to configure prom manually) then just make sure
you don't have any environment variables with matching names
The num checks (eg PROM_DSN_1, PROM_DSN_2) go in order, so you can't do PROM_DSN_1, PROM_DSN_3,
because it will fail on _2 and move on, so make sure your num dsns are in order (eg, 1, 2, 3, ...)
example --
export PROM_DSN_1=some.Interface://host:port/dbname#i1
export PROM_DSN_2=some.Interface://host2:port/dbname2#i2
$ python
>>> import prom
>>> print prom.interfaces # prints a dict with interfaces i1 and i2 keys
:param dsn_env_name: string, the name of the environment variables
|
[
"configure",
"interfaces",
"based",
"on",
"environment",
"variables"
] |
train
|
https://github.com/Jaymon/prom/blob/b7ad2c259eca198da03e1e4bc7d95014c168c361/prom/interface/__init__.py#L13-L42
|
Jaymon/prom
|
prom/interface/__init__.py
|
configure
|
def configure(dsn, connection_class=DsnConnection):
"""
configure an interface to be used to query a backend
you use this function to configure an Interface using a dsn, then you can get
that interface using the get_interface() method
dsn -- string -- a properly formatted prom dsn, see DsnConnection for how to format the dsn
"""
c = dsnparse.parse(dsn, parse_class=connection_class)
inter = c.interface
set_interface(inter, c.name)
return inter
|
python
|
def configure(dsn, connection_class=DsnConnection):
"""
configure an interface to be used to query a backend
you use this function to configure an Interface using a dsn, then you can get
that interface using the get_interface() method
dsn -- string -- a properly formatted prom dsn, see DsnConnection for how to format the dsn
"""
c = dsnparse.parse(dsn, parse_class=connection_class)
inter = c.interface
set_interface(inter, c.name)
return inter
|
[
"def",
"configure",
"(",
"dsn",
",",
"connection_class",
"=",
"DsnConnection",
")",
":",
"c",
"=",
"dsnparse",
".",
"parse",
"(",
"dsn",
",",
"parse_class",
"=",
"connection_class",
")",
"inter",
"=",
"c",
".",
"interface",
"set_interface",
"(",
"inter",
",",
"c",
".",
"name",
")",
"return",
"inter"
] |
configure an interface to be used to query a backend
you use this function to configure an Interface using a dsn, then you can get
that interface using the get_interface() method
dsn -- string -- a properly formatted prom dsn, see DsnConnection for how to format the dsn
|
[
"configure",
"an",
"interface",
"to",
"be",
"used",
"to",
"query",
"a",
"backend"
] |
train
|
https://github.com/Jaymon/prom/blob/b7ad2c259eca198da03e1e4bc7d95014c168c361/prom/interface/__init__.py#L45-L57
|
Jaymon/prom
|
prom/interface/__init__.py
|
set_interface
|
def set_interface(interface, name=''):
"""
don't want to bother with a dsn? Use this method to make an interface available
"""
global interfaces
if not interface: raise ValueError('interface is empty')
# close down the interface before we discard it
if name in interfaces:
interfaces[name].close()
interfaces[name] = interface
|
python
|
def set_interface(interface, name=''):
"""
don't want to bother with a dsn? Use this method to make an interface available
"""
global interfaces
if not interface: raise ValueError('interface is empty')
# close down the interface before we discard it
if name in interfaces:
interfaces[name].close()
interfaces[name] = interface
|
[
"def",
"set_interface",
"(",
"interface",
",",
"name",
"=",
"''",
")",
":",
"global",
"interfaces",
"if",
"not",
"interface",
":",
"raise",
"ValueError",
"(",
"'interface is empty'",
")",
"# close down the interface before we discard it",
"if",
"name",
"in",
"interfaces",
":",
"interfaces",
"[",
"name",
"]",
".",
"close",
"(",
")",
"interfaces",
"[",
"name",
"]",
"=",
"interface"
] |
don't want to bother with a dsn? Use this method to make an interface available
|
[
"don",
"t",
"want",
"to",
"bother",
"with",
"a",
"dsn?",
"Use",
"this",
"method",
"to",
"make",
"an",
"interface",
"available"
] |
train
|
https://github.com/Jaymon/prom/blob/b7ad2c259eca198da03e1e4bc7d95014c168c361/prom/interface/__init__.py#L69-L81
|
Fuyukai/asyncwebsockets
|
asyncwebsockets/server.py
|
open_websocket_server
|
async def open_websocket_server(sock, filter=None): # pylint: disable=W0622
"""
A context manager which serves this websocket.
:param filter: an async callback which accepts the connection request
and returns a bool, or an explicit Accept/Reject message.
"""
ws = await create_websocket_server(sock, filter=filter)
try:
yield ws
finally:
await ws.close()
|
python
|
async def open_websocket_server(sock, filter=None): # pylint: disable=W0622
"""
A context manager which serves this websocket.
:param filter: an async callback which accepts the connection request
and returns a bool, or an explicit Accept/Reject message.
"""
ws = await create_websocket_server(sock, filter=filter)
try:
yield ws
finally:
await ws.close()
|
[
"async",
"def",
"open_websocket_server",
"(",
"sock",
",",
"filter",
"=",
"None",
")",
":",
"# pylint: disable=W0622",
"ws",
"=",
"await",
"create_websocket_server",
"(",
"sock",
",",
"filter",
"=",
"filter",
")",
"try",
":",
"yield",
"ws",
"finally",
":",
"await",
"ws",
".",
"close",
"(",
")"
] |
A context manager which serves this websocket.
:param filter: an async callback which accepts the connection request
and returns a bool, or an explicit Accept/Reject message.
|
[
"A",
"context",
"manager",
"which",
"serves",
"this",
"websocket",
"."
] |
train
|
https://github.com/Fuyukai/asyncwebsockets/blob/e33e75fd51ce5ae0feac244e8407d2672c5b4745/asyncwebsockets/server.py#L14-L25
|
Fuyukai/asyncwebsockets
|
asyncwebsockets/server.py
|
create_websocket_server
|
async def create_websocket_server(sock, filter=None): # pylint: disable=W0622
"""
A more low-level form of open_websocket_server.
You are responsible for closing this websocket.
"""
ws = Websocket()
await ws.start_server(sock, filter=filter)
return ws
|
python
|
async def create_websocket_server(sock, filter=None): # pylint: disable=W0622
"""
A more low-level form of open_websocket_server.
You are responsible for closing this websocket.
"""
ws = Websocket()
await ws.start_server(sock, filter=filter)
return ws
|
[
"async",
"def",
"create_websocket_server",
"(",
"sock",
",",
"filter",
"=",
"None",
")",
":",
"# pylint: disable=W0622",
"ws",
"=",
"Websocket",
"(",
")",
"await",
"ws",
".",
"start_server",
"(",
"sock",
",",
"filter",
"=",
"filter",
")",
"return",
"ws"
] |
A more low-level form of open_websocket_server.
You are responsible for closing this websocket.
|
[
"A",
"more",
"low",
"-",
"level",
"form",
"of",
"open_websocket_server",
".",
"You",
"are",
"responsible",
"for",
"closing",
"this",
"websocket",
"."
] |
train
|
https://github.com/Fuyukai/asyncwebsockets/blob/e33e75fd51ce5ae0feac244e8407d2672c5b4745/asyncwebsockets/server.py#L28-L35
|
Jaymon/prom
|
prom/config.py
|
Schema.normal_fields
|
def normal_fields(self):
"""fields that aren't magic (eg, aren't _id, _created, _updated)"""
return {f:v for f, v in self.fields.items() if not f.startswith('_')}
|
python
|
def normal_fields(self):
"""fields that aren't magic (eg, aren't _id, _created, _updated)"""
return {f:v for f, v in self.fields.items() if not f.startswith('_')}
|
[
"def",
"normal_fields",
"(",
"self",
")",
":",
"return",
"{",
"f",
":",
"v",
"for",
"f",
",",
"v",
"in",
"self",
".",
"fields",
".",
"items",
"(",
")",
"if",
"not",
"f",
".",
"startswith",
"(",
"'_'",
")",
"}"
] |
fields that aren't magic (eg, aren't _id, _created, _updated)
|
[
"fields",
"that",
"aren",
"t",
"magic",
"(",
"eg",
"aren",
"t",
"_id",
"_created",
"_updated",
")"
] |
train
|
https://github.com/Jaymon/prom/blob/b7ad2c259eca198da03e1e4bc7d95014c168c361/prom/config.py#L170-L172
|
Jaymon/prom
|
prom/config.py
|
Schema.required_fields
|
def required_fields(self):
"""The normal required fields (eg, no magic fields like _id are included)"""
return {f:v for f, v in self.normal_fields.items() if v.required}
|
python
|
def required_fields(self):
"""The normal required fields (eg, no magic fields like _id are included)"""
return {f:v for f, v in self.normal_fields.items() if v.required}
|
[
"def",
"required_fields",
"(",
"self",
")",
":",
"return",
"{",
"f",
":",
"v",
"for",
"f",
",",
"v",
"in",
"self",
".",
"normal_fields",
".",
"items",
"(",
")",
"if",
"v",
".",
"required",
"}"
] |
The normal required fields (eg, no magic fields like _id are included)
|
[
"The",
"normal",
"required",
"fields",
"(",
"eg",
"no",
"magic",
"fields",
"like",
"_id",
"are",
"included",
")"
] |
train
|
https://github.com/Jaymon/prom/blob/b7ad2c259eca198da03e1e4bc7d95014c168c361/prom/config.py#L175-L177
|
Jaymon/prom
|
prom/config.py
|
Schema.magic_fields
|
def magic_fields(self):
"""the magic fields for the schema"""
return {f:v for f, v in self.fields.items() if f.startswith('_')}
|
python
|
def magic_fields(self):
"""the magic fields for the schema"""
return {f:v for f, v in self.fields.items() if f.startswith('_')}
|
[
"def",
"magic_fields",
"(",
"self",
")",
":",
"return",
"{",
"f",
":",
"v",
"for",
"f",
",",
"v",
"in",
"self",
".",
"fields",
".",
"items",
"(",
")",
"if",
"f",
".",
"startswith",
"(",
"'_'",
")",
"}"
] |
the magic fields for the schema
|
[
"the",
"magic",
"fields",
"for",
"the",
"schema"
] |
train
|
https://github.com/Jaymon/prom/blob/b7ad2c259eca198da03e1e4bc7d95014c168c361/prom/config.py#L180-L182
|
Jaymon/prom
|
prom/config.py
|
Schema.set_index
|
def set_index(self, index_name, index):
"""
add an index to the schema
for the most part, you will use the __getattr__ method of adding indexes for a more fluid interface,
but you can use this if you want to get closer to the bare metal
index_name -- string -- the name of the index
index -- Index() -- an Index instance
"""
if not index_name:
raise ValueError("index_name must have a value")
if index_name in self.indexes:
raise ValueError("index_name {} has already been defined on {}".format(
index_name, str(self.indexes[index_name].fields)
))
if not isinstance(index, Index): raise ValueError("{} is not an Index instance".format(type(index)))
index.name = index_name
self.indexes[index_name] = index
return self
|
python
|
def set_index(self, index_name, index):
"""
add an index to the schema
for the most part, you will use the __getattr__ method of adding indexes for a more fluid interface,
but you can use this if you want to get closer to the bare metal
index_name -- string -- the name of the index
index -- Index() -- an Index instance
"""
if not index_name:
raise ValueError("index_name must have a value")
if index_name in self.indexes:
raise ValueError("index_name {} has already been defined on {}".format(
index_name, str(self.indexes[index_name].fields)
))
if not isinstance(index, Index): raise ValueError("{} is not an Index instance".format(type(index)))
index.name = index_name
self.indexes[index_name] = index
return self
|
[
"def",
"set_index",
"(",
"self",
",",
"index_name",
",",
"index",
")",
":",
"if",
"not",
"index_name",
":",
"raise",
"ValueError",
"(",
"\"index_name must have a value\"",
")",
"if",
"index_name",
"in",
"self",
".",
"indexes",
":",
"raise",
"ValueError",
"(",
"\"index_name {} has already been defined on {}\"",
".",
"format",
"(",
"index_name",
",",
"str",
"(",
"self",
".",
"indexes",
"[",
"index_name",
"]",
".",
"fields",
")",
")",
")",
"if",
"not",
"isinstance",
"(",
"index",
",",
"Index",
")",
":",
"raise",
"ValueError",
"(",
"\"{} is not an Index instance\"",
".",
"format",
"(",
"type",
"(",
"index",
")",
")",
")",
"index",
".",
"name",
"=",
"index_name",
"self",
".",
"indexes",
"[",
"index_name",
"]",
"=",
"index",
"return",
"self"
] |
add an index to the schema
for the most part, you will use the __getattr__ method of adding indexes for a more fluid interface,
but you can use this if you want to get closer to the bare metal
index_name -- string -- the name of the index
index -- Index() -- an Index instance
|
[
"add",
"an",
"index",
"to",
"the",
"schema"
] |
train
|
https://github.com/Jaymon/prom/blob/b7ad2c259eca198da03e1e4bc7d95014c168c361/prom/config.py#L281-L301
|
Jaymon/prom
|
prom/config.py
|
Field.schema
|
def schema(self):
"""return the schema instance if this is reference to another table"""
if not hasattr(self, "_schema"):
ret = None
o = self._type
if isinstance(o, type):
ret = getattr(o, "schema", None)
elif isinstance(o, Schema):
ret = o
else:
module, klass = utils.get_objects(o)
ret = klass.schema
self._schema = ret
return self._schema
|
python
|
def schema(self):
"""return the schema instance if this is reference to another table"""
if not hasattr(self, "_schema"):
ret = None
o = self._type
if isinstance(o, type):
ret = getattr(o, "schema", None)
elif isinstance(o, Schema):
ret = o
else:
module, klass = utils.get_objects(o)
ret = klass.schema
self._schema = ret
return self._schema
|
[
"def",
"schema",
"(",
"self",
")",
":",
"if",
"not",
"hasattr",
"(",
"self",
",",
"\"_schema\"",
")",
":",
"ret",
"=",
"None",
"o",
"=",
"self",
".",
"_type",
"if",
"isinstance",
"(",
"o",
",",
"type",
")",
":",
"ret",
"=",
"getattr",
"(",
"o",
",",
"\"schema\"",
",",
"None",
")",
"elif",
"isinstance",
"(",
"o",
",",
"Schema",
")",
":",
"ret",
"=",
"o",
"else",
":",
"module",
",",
"klass",
"=",
"utils",
".",
"get_objects",
"(",
"o",
")",
"ret",
"=",
"klass",
".",
"schema",
"self",
".",
"_schema",
"=",
"ret",
"return",
"self",
".",
"_schema"
] |
return the schema instance if this is reference to another table
|
[
"return",
"the",
"schema",
"instance",
"if",
"this",
"is",
"reference",
"to",
"another",
"table"
] |
train
|
https://github.com/Jaymon/prom/blob/b7ad2c259eca198da03e1e4bc7d95014c168c361/prom/config.py#L390-L407
|
Jaymon/prom
|
prom/config.py
|
Field.fval
|
def fval(self, instance):
"""return the raw value that this property is holding internally for instance"""
try:
val = instance.__dict__[self.instance_field_name]
except KeyError as e:
#raise AttributeError(str(e))
val = None
return val
|
python
|
def fval(self, instance):
"""return the raw value that this property is holding internally for instance"""
try:
val = instance.__dict__[self.instance_field_name]
except KeyError as e:
#raise AttributeError(str(e))
val = None
return val
|
[
"def",
"fval",
"(",
"self",
",",
"instance",
")",
":",
"try",
":",
"val",
"=",
"instance",
".",
"__dict__",
"[",
"self",
".",
"instance_field_name",
"]",
"except",
"KeyError",
"as",
"e",
":",
"#raise AttributeError(str(e))",
"val",
"=",
"None",
"return",
"val"
] |
return the raw value that this property is holding internally for instance
|
[
"return",
"the",
"raw",
"value",
"that",
"this",
"property",
"is",
"holding",
"internally",
"for",
"instance"
] |
train
|
https://github.com/Jaymon/prom/blob/b7ad2c259eca198da03e1e4bc7d95014c168c361/prom/config.py#L561-L569
|
openmicroanalysis/pyxray
|
pyxray/base.py
|
_Database.set_default_reference
|
def set_default_reference(self, method, reference):
"""
Set the default reference for a method.
:arg method: name of a method
:type method: :class:`str`
{reference}
"""
if method not in self._available_methods:
raise ValueError('Unknown method: {0}'.format(method))
self._default_references[method] = reference
|
python
|
def set_default_reference(self, method, reference):
"""
Set the default reference for a method.
:arg method: name of a method
:type method: :class:`str`
{reference}
"""
if method not in self._available_methods:
raise ValueError('Unknown method: {0}'.format(method))
self._default_references[method] = reference
|
[
"def",
"set_default_reference",
"(",
"self",
",",
"method",
",",
"reference",
")",
":",
"if",
"method",
"not",
"in",
"self",
".",
"_available_methods",
":",
"raise",
"ValueError",
"(",
"'Unknown method: {0}'",
".",
"format",
"(",
"method",
")",
")",
"self",
".",
"_default_references",
"[",
"method",
"]",
"=",
"reference"
] |
Set the default reference for a method.
:arg method: name of a method
:type method: :class:`str`
{reference}
|
[
"Set",
"the",
"default",
"reference",
"for",
"a",
"method",
"."
] |
train
|
https://github.com/openmicroanalysis/pyxray/blob/cae89677f00ebcc0952f94d1ab70e6b35e1a51e9/pyxray/base.py#L74-L85
|
openmicroanalysis/pyxray
|
pyxray/base.py
|
_Database.get_default_reference
|
def get_default_reference(self, method):
"""
Returns the default reference for a method.
:arg method: name of a method
:type method: :class:`str`
:return: reference
:rtype: :class:`Reference <pyxray.descriptor.Reference>` or :class:`str`
"""
if method not in self._available_methods:
raise ValueError('Unknown method: {0}'.format(method))
return self._default_references.get(method)
|
python
|
def get_default_reference(self, method):
"""
Returns the default reference for a method.
:arg method: name of a method
:type method: :class:`str`
:return: reference
:rtype: :class:`Reference <pyxray.descriptor.Reference>` or :class:`str`
"""
if method not in self._available_methods:
raise ValueError('Unknown method: {0}'.format(method))
return self._default_references.get(method)
|
[
"def",
"get_default_reference",
"(",
"self",
",",
"method",
")",
":",
"if",
"method",
"not",
"in",
"self",
".",
"_available_methods",
":",
"raise",
"ValueError",
"(",
"'Unknown method: {0}'",
".",
"format",
"(",
"method",
")",
")",
"return",
"self",
".",
"_default_references",
".",
"get",
"(",
"method",
")"
] |
Returns the default reference for a method.
:arg method: name of a method
:type method: :class:`str`
:return: reference
:rtype: :class:`Reference <pyxray.descriptor.Reference>` or :class:`str`
|
[
"Returns",
"the",
"default",
"reference",
"for",
"a",
"method",
"."
] |
train
|
https://github.com/openmicroanalysis/pyxray/blob/cae89677f00ebcc0952f94d1ab70e6b35e1a51e9/pyxray/base.py#L87-L99
|
openmicroanalysis/pyxray
|
pyxray/base.py
|
_Database.print_element_xray_transitions
|
def print_element_xray_transitions(self, element, file=sys.stdout, tabulate_kwargs=None):
"""
Prints all x-ray transitions for an element, with their different
notations and energy.
{element}
:arg file: file for output, default to standard out
"""
header = ['IUPAC', 'Siegbahn', 'Energy (eV)', 'Probability']
rows = []
for xraytransition in self.element_xray_transitions(element):
try:
iupac = self.xray_transition_notation(xraytransition, 'iupac')
except:
iupac = ''
try:
siegbahn = self.xray_transition_notation(xraytransition, 'siegbahn')
except:
siegbahn = ''
try:
energy_eV = self.xray_transition_energy_eV(element, xraytransition)
except:
energy_eV = ''
try:
probability = self.xray_transition_probability(element, xraytransition)
except:
probability = ''
rows.append([iupac, siegbahn, energy_eV, probability])
rows.sort(key=operator.itemgetter(2))
if tabulate_kwargs is None:
tabulate_kwargs = {}
file.write(tabulate.tabulate(rows, header, **tabulate_kwargs))
|
python
|
def print_element_xray_transitions(self, element, file=sys.stdout, tabulate_kwargs=None):
"""
Prints all x-ray transitions for an element, with their different
notations and energy.
{element}
:arg file: file for output, default to standard out
"""
header = ['IUPAC', 'Siegbahn', 'Energy (eV)', 'Probability']
rows = []
for xraytransition in self.element_xray_transitions(element):
try:
iupac = self.xray_transition_notation(xraytransition, 'iupac')
except:
iupac = ''
try:
siegbahn = self.xray_transition_notation(xraytransition, 'siegbahn')
except:
siegbahn = ''
try:
energy_eV = self.xray_transition_energy_eV(element, xraytransition)
except:
energy_eV = ''
try:
probability = self.xray_transition_probability(element, xraytransition)
except:
probability = ''
rows.append([iupac, siegbahn, energy_eV, probability])
rows.sort(key=operator.itemgetter(2))
if tabulate_kwargs is None:
tabulate_kwargs = {}
file.write(tabulate.tabulate(rows, header, **tabulate_kwargs))
|
[
"def",
"print_element_xray_transitions",
"(",
"self",
",",
"element",
",",
"file",
"=",
"sys",
".",
"stdout",
",",
"tabulate_kwargs",
"=",
"None",
")",
":",
"header",
"=",
"[",
"'IUPAC'",
",",
"'Siegbahn'",
",",
"'Energy (eV)'",
",",
"'Probability'",
"]",
"rows",
"=",
"[",
"]",
"for",
"xraytransition",
"in",
"self",
".",
"element_xray_transitions",
"(",
"element",
")",
":",
"try",
":",
"iupac",
"=",
"self",
".",
"xray_transition_notation",
"(",
"xraytransition",
",",
"'iupac'",
")",
"except",
":",
"iupac",
"=",
"''",
"try",
":",
"siegbahn",
"=",
"self",
".",
"xray_transition_notation",
"(",
"xraytransition",
",",
"'siegbahn'",
")",
"except",
":",
"siegbahn",
"=",
"''",
"try",
":",
"energy_eV",
"=",
"self",
".",
"xray_transition_energy_eV",
"(",
"element",
",",
"xraytransition",
")",
"except",
":",
"energy_eV",
"=",
"''",
"try",
":",
"probability",
"=",
"self",
".",
"xray_transition_probability",
"(",
"element",
",",
"xraytransition",
")",
"except",
":",
"probability",
"=",
"''",
"rows",
".",
"append",
"(",
"[",
"iupac",
",",
"siegbahn",
",",
"energy_eV",
",",
"probability",
"]",
")",
"rows",
".",
"sort",
"(",
"key",
"=",
"operator",
".",
"itemgetter",
"(",
"2",
")",
")",
"if",
"tabulate_kwargs",
"is",
"None",
":",
"tabulate_kwargs",
"=",
"{",
"}",
"file",
".",
"write",
"(",
"tabulate",
".",
"tabulate",
"(",
"rows",
",",
"header",
",",
"*",
"*",
"tabulate_kwargs",
")",
")"
] |
Prints all x-ray transitions for an element, with their different
notations and energy.
{element}
:arg file: file for output, default to standard out
|
[
"Prints",
"all",
"x",
"-",
"ray",
"transitions",
"for",
"an",
"element",
"with",
"their",
"different",
"notations",
"and",
"energy",
"."
] |
train
|
https://github.com/openmicroanalysis/pyxray/blob/cae89677f00ebcc0952f94d1ab70e6b35e1a51e9/pyxray/base.py#L243-L282
|
Jaymon/prom
|
prom/cli/dump.py
|
get_modules
|
def get_modules(modulepath):
"""return all found modules at modulepath (eg, foo.bar) including modulepath module"""
m = importlib.import_module(modulepath)
mpath = m.__file__
ret = set([m])
if "__init__." in mpath.lower():
mpath = os.path.dirname(mpath)
# https://docs.python.org/2/library/pkgutil.html#pkgutil.iter_modules
for module_info in pkgutil.iter_modules([mpath]):
submodulepath = ".".join([modulepath, module_info[1]])
if module_info[2]:
# module is a package
submodules = get_modules(submodulepath)
ret.update(submodules)
else:
ret.add(importlib.import_module(submodulepath))
return ret
|
python
|
def get_modules(modulepath):
"""return all found modules at modulepath (eg, foo.bar) including modulepath module"""
m = importlib.import_module(modulepath)
mpath = m.__file__
ret = set([m])
if "__init__." in mpath.lower():
mpath = os.path.dirname(mpath)
# https://docs.python.org/2/library/pkgutil.html#pkgutil.iter_modules
for module_info in pkgutil.iter_modules([mpath]):
submodulepath = ".".join([modulepath, module_info[1]])
if module_info[2]:
# module is a package
submodules = get_modules(submodulepath)
ret.update(submodules)
else:
ret.add(importlib.import_module(submodulepath))
return ret
|
[
"def",
"get_modules",
"(",
"modulepath",
")",
":",
"m",
"=",
"importlib",
".",
"import_module",
"(",
"modulepath",
")",
"mpath",
"=",
"m",
".",
"__file__",
"ret",
"=",
"set",
"(",
"[",
"m",
"]",
")",
"if",
"\"__init__.\"",
"in",
"mpath",
".",
"lower",
"(",
")",
":",
"mpath",
"=",
"os",
".",
"path",
".",
"dirname",
"(",
"mpath",
")",
"# https://docs.python.org/2/library/pkgutil.html#pkgutil.iter_modules",
"for",
"module_info",
"in",
"pkgutil",
".",
"iter_modules",
"(",
"[",
"mpath",
"]",
")",
":",
"submodulepath",
"=",
"\".\"",
".",
"join",
"(",
"[",
"modulepath",
",",
"module_info",
"[",
"1",
"]",
"]",
")",
"if",
"module_info",
"[",
"2",
"]",
":",
"# module is a package",
"submodules",
"=",
"get_modules",
"(",
"submodulepath",
")",
"ret",
".",
"update",
"(",
"submodules",
")",
"else",
":",
"ret",
".",
"add",
"(",
"importlib",
".",
"import_module",
"(",
"submodulepath",
")",
")",
"return",
"ret"
] |
return all found modules at modulepath (eg, foo.bar) including modulepath module
|
[
"return",
"all",
"found",
"modules",
"at",
"modulepath",
"(",
"eg",
"foo",
".",
"bar",
")",
"including",
"modulepath",
"module"
] |
train
|
https://github.com/Jaymon/prom/blob/b7ad2c259eca198da03e1e4bc7d95014c168c361/prom/cli/dump.py#L22-L41
|
Jaymon/prom
|
prom/cli/dump.py
|
get_subclasses
|
def get_subclasses(modulepath, parent_class):
"""given a module return all the parent_class subclasses that are found in
that module and any submodules.
:param modulepath: string, a path like foo.bar.che
:param parent_class: object, the class whose children you are looking for
:returns: set, all the found child classes in modulepath of parent_class
"""
if isinstance(modulepath, ModuleType):
modules = get_modules(modulepath.__name__)
else:
modules = get_modules(modulepath)
ret = set()
for m in modules:
cs = inspect.getmembers(m, lambda v: inspect.isclass(v) and issubclass(v, parent_class))
for class_name, klass in cs:
ret.add(klass)
return ret
|
python
|
def get_subclasses(modulepath, parent_class):
"""given a module return all the parent_class subclasses that are found in
that module and any submodules.
:param modulepath: string, a path like foo.bar.che
:param parent_class: object, the class whose children you are looking for
:returns: set, all the found child classes in modulepath of parent_class
"""
if isinstance(modulepath, ModuleType):
modules = get_modules(modulepath.__name__)
else:
modules = get_modules(modulepath)
ret = set()
for m in modules:
cs = inspect.getmembers(m, lambda v: inspect.isclass(v) and issubclass(v, parent_class))
for class_name, klass in cs:
ret.add(klass)
return ret
|
[
"def",
"get_subclasses",
"(",
"modulepath",
",",
"parent_class",
")",
":",
"if",
"isinstance",
"(",
"modulepath",
",",
"ModuleType",
")",
":",
"modules",
"=",
"get_modules",
"(",
"modulepath",
".",
"__name__",
")",
"else",
":",
"modules",
"=",
"get_modules",
"(",
"modulepath",
")",
"ret",
"=",
"set",
"(",
")",
"for",
"m",
"in",
"modules",
":",
"cs",
"=",
"inspect",
".",
"getmembers",
"(",
"m",
",",
"lambda",
"v",
":",
"inspect",
".",
"isclass",
"(",
"v",
")",
"and",
"issubclass",
"(",
"v",
",",
"parent_class",
")",
")",
"for",
"class_name",
",",
"klass",
"in",
"cs",
":",
"ret",
".",
"add",
"(",
"klass",
")",
"return",
"ret"
] |
given a module return all the parent_class subclasses that are found in
that module and any submodules.
:param modulepath: string, a path like foo.bar.che
:param parent_class: object, the class whose children you are looking for
:returns: set, all the found child classes in modulepath of parent_class
|
[
"given",
"a",
"module",
"return",
"all",
"the",
"parent_class",
"subclasses",
"that",
"are",
"found",
"in",
"that",
"module",
"and",
"any",
"submodules",
"."
] |
train
|
https://github.com/Jaymon/prom/blob/b7ad2c259eca198da03e1e4bc7d95014c168c361/prom/cli/dump.py#L44-L63
|
Jaymon/prom
|
prom/cli/dump.py
|
build_dump_order
|
def build_dump_order(orm_class, orm_classes):
"""pass in an array, when you encounter a ref, call this method again with the array
when something has no more refs, then it gets appended to the array and returns, each
time something gets through the list they are added, but before they are added to the
list it is checked to see if it is already in the listt"""
if orm_class in orm_classes: return
for field_name, field_val in orm_class.schema.fields.items():
if field_val.is_ref():
build_dump_order(field_val.schema.orm_class, orm_classes)
if orm_class not in orm_classes:
orm_classes.append(orm_class)
|
python
|
def build_dump_order(orm_class, orm_classes):
"""pass in an array, when you encounter a ref, call this method again with the array
when something has no more refs, then it gets appended to the array and returns, each
time something gets through the list they are added, but before they are added to the
list it is checked to see if it is already in the listt"""
if orm_class in orm_classes: return
for field_name, field_val in orm_class.schema.fields.items():
if field_val.is_ref():
build_dump_order(field_val.schema.orm_class, orm_classes)
if orm_class not in orm_classes:
orm_classes.append(orm_class)
|
[
"def",
"build_dump_order",
"(",
"orm_class",
",",
"orm_classes",
")",
":",
"if",
"orm_class",
"in",
"orm_classes",
":",
"return",
"for",
"field_name",
",",
"field_val",
"in",
"orm_class",
".",
"schema",
".",
"fields",
".",
"items",
"(",
")",
":",
"if",
"field_val",
".",
"is_ref",
"(",
")",
":",
"build_dump_order",
"(",
"field_val",
".",
"schema",
".",
"orm_class",
",",
"orm_classes",
")",
"if",
"orm_class",
"not",
"in",
"orm_classes",
":",
"orm_classes",
".",
"append",
"(",
"orm_class",
")"
] |
pass in an array, when you encounter a ref, call this method again with the array
when something has no more refs, then it gets appended to the array and returns, each
time something gets through the list they are added, but before they are added to the
list it is checked to see if it is already in the listt
|
[
"pass",
"in",
"an",
"array",
"when",
"you",
"encounter",
"a",
"ref",
"call",
"this",
"method",
"again",
"with",
"the",
"array",
"when",
"something",
"has",
"no",
"more",
"refs",
"then",
"it",
"gets",
"appended",
"to",
"the",
"array",
"and",
"returns",
"each",
"time",
"something",
"gets",
"through",
"the",
"list",
"they",
"are",
"added",
"but",
"before",
"they",
"are",
"added",
"to",
"the",
"list",
"it",
"is",
"checked",
"to",
"see",
"if",
"it",
"is",
"already",
"in",
"the",
"listt"
] |
train
|
https://github.com/Jaymon/prom/blob/b7ad2c259eca198da03e1e4bc7d95014c168c361/prom/cli/dump.py#L66-L78
|
Jaymon/prom
|
prom/cli/dump.py
|
get_orm_classes
|
def get_orm_classes(path):
"""this will return prom.Orm classes found in the given path (classpath or modulepath)"""
ret = set()
try:
m = importlib.import_module(path)
except ImportError:
# we have a classpath
m, klass = get_objects(path)
if issubclass(klass, Orm):
ret.add(klass)
else:
ret.update(get_subclasses(m, Orm))
return ret
|
python
|
def get_orm_classes(path):
"""this will return prom.Orm classes found in the given path (classpath or modulepath)"""
ret = set()
try:
m = importlib.import_module(path)
except ImportError:
# we have a classpath
m, klass = get_objects(path)
if issubclass(klass, Orm):
ret.add(klass)
else:
ret.update(get_subclasses(m, Orm))
return ret
|
[
"def",
"get_orm_classes",
"(",
"path",
")",
":",
"ret",
"=",
"set",
"(",
")",
"try",
":",
"m",
"=",
"importlib",
".",
"import_module",
"(",
"path",
")",
"except",
"ImportError",
":",
"# we have a classpath",
"m",
",",
"klass",
"=",
"get_objects",
"(",
"path",
")",
"if",
"issubclass",
"(",
"klass",
",",
"Orm",
")",
":",
"ret",
".",
"add",
"(",
"klass",
")",
"else",
":",
"ret",
".",
"update",
"(",
"get_subclasses",
"(",
"m",
",",
"Orm",
")",
")",
"return",
"ret"
] |
this will return prom.Orm classes found in the given path (classpath or modulepath)
|
[
"this",
"will",
"return",
"prom",
".",
"Orm",
"classes",
"found",
"in",
"the",
"given",
"path",
"(",
"classpath",
"or",
"modulepath",
")"
] |
train
|
https://github.com/Jaymon/prom/blob/b7ad2c259eca198da03e1e4bc7d95014c168c361/prom/cli/dump.py#L81-L96
|
Jaymon/prom
|
prom/cli/dump.py
|
main_dump
|
def main_dump(paths, directory, dry_run):
"""dump all or part of the prom data, currently only works on Postgres databases
basically just a wrapper around `dump backup` https://github.com/Jaymon/dump
"""
table_map = get_table_map(paths)
for conn_name, conn_info in table_map.items():
inter = conn_info["interface"]
conn = inter.connection_config
table_names = conn_info["table_names"]
cmd = get_base_cmd("backup", inter, directory)
cmd.extend(table_names)
if dry_run:
echo.out(" ".join(cmd))
else:
run_cmd(cmd)
|
python
|
def main_dump(paths, directory, dry_run):
"""dump all or part of the prom data, currently only works on Postgres databases
basically just a wrapper around `dump backup` https://github.com/Jaymon/dump
"""
table_map = get_table_map(paths)
for conn_name, conn_info in table_map.items():
inter = conn_info["interface"]
conn = inter.connection_config
table_names = conn_info["table_names"]
cmd = get_base_cmd("backup", inter, directory)
cmd.extend(table_names)
if dry_run:
echo.out(" ".join(cmd))
else:
run_cmd(cmd)
|
[
"def",
"main_dump",
"(",
"paths",
",",
"directory",
",",
"dry_run",
")",
":",
"table_map",
"=",
"get_table_map",
"(",
"paths",
")",
"for",
"conn_name",
",",
"conn_info",
"in",
"table_map",
".",
"items",
"(",
")",
":",
"inter",
"=",
"conn_info",
"[",
"\"interface\"",
"]",
"conn",
"=",
"inter",
".",
"connection_config",
"table_names",
"=",
"conn_info",
"[",
"\"table_names\"",
"]",
"cmd",
"=",
"get_base_cmd",
"(",
"\"backup\"",
",",
"inter",
",",
"directory",
")",
"cmd",
".",
"extend",
"(",
"table_names",
")",
"if",
"dry_run",
":",
"echo",
".",
"out",
"(",
"\" \"",
".",
"join",
"(",
"cmd",
")",
")",
"else",
":",
"run_cmd",
"(",
"cmd",
")"
] |
dump all or part of the prom data, currently only works on Postgres databases
basically just a wrapper around `dump backup` https://github.com/Jaymon/dump
|
[
"dump",
"all",
"or",
"part",
"of",
"the",
"prom",
"data",
"currently",
"only",
"works",
"on",
"Postgres",
"databases"
] |
train
|
https://github.com/Jaymon/prom/blob/b7ad2c259eca198da03e1e4bc7d95014c168c361/prom/cli/dump.py#L182-L201
|
Jaymon/prom
|
prom/cli/dump.py
|
main_restore
|
def main_restore(directory, conn_name):
"""Restore your database dumped with the dump command
just a wrapper around `dump restore` https://github.com/Jaymon/dump
"""
inter = get_interface(conn_name)
conn = inter.connection_config
cmd = get_base_cmd("restore", inter, directory)
run_cmd(cmd)
|
python
|
def main_restore(directory, conn_name):
"""Restore your database dumped with the dump command
just a wrapper around `dump restore` https://github.com/Jaymon/dump
"""
inter = get_interface(conn_name)
conn = inter.connection_config
cmd = get_base_cmd("restore", inter, directory)
run_cmd(cmd)
|
[
"def",
"main_restore",
"(",
"directory",
",",
"conn_name",
")",
":",
"inter",
"=",
"get_interface",
"(",
"conn_name",
")",
"conn",
"=",
"inter",
".",
"connection_config",
"cmd",
"=",
"get_base_cmd",
"(",
"\"restore\"",
",",
"inter",
",",
"directory",
")",
"run_cmd",
"(",
"cmd",
")"
] |
Restore your database dumped with the dump command
just a wrapper around `dump restore` https://github.com/Jaymon/dump
|
[
"Restore",
"your",
"database",
"dumped",
"with",
"the",
"dump",
"command"
] |
train
|
https://github.com/Jaymon/prom/blob/b7ad2c259eca198da03e1e4bc7d95014c168c361/prom/cli/dump.py#L211-L219
|
Jaymon/prom
|
prom/interface/postgres.py
|
PostgreSQL._get_fields
|
def _get_fields(self, table_name, **kwargs):
"""return all the fields for the given schema"""
ret = {}
query_str = []
query_args = ['f', table_name]
# I had to brush up on my join knowledge while writing this query
# https://en.wikipedia.org/wiki/Join_(SQL)
#
# other helpful links
# https://wiki.postgresql.org/wiki/Retrieve_primary_key_columns
# https://www.postgresql.org/docs/9.4/static/catalog-pg-attribute.html
# https://www.postgresql.org/docs/9.3/static/catalog-pg-type.html
#
# another approach
# http://dba.stackexchange.com/questions/22362/how-do-i-list-all-columns-for-a-specified-table
# http://gis.stackexchange.com/questions/94049/how-to-get-the-data-type-of-each-column-from-a-postgis-table
query_str.append('SELECT')
query_str.append(', '.join([
'a.attnum',
'a.attname',
'a.attnotnull',
't.typname',
'i.indisprimary',
#'s.conname',
#'pg_get_constraintdef(s.oid, true) as condef',
'c.relname AS confrelname',
]))
query_str.append('FROM')
query_str.append(' pg_attribute a')
query_str.append('JOIN pg_type t ON a.atttypid = t.oid')
query_str.append('LEFT JOIN pg_index i ON a.attrelid = i.indrelid')
query_str.append(' AND a.attnum = any(i.indkey)')
query_str.append('LEFT JOIN pg_constraint s ON a.attrelid = s.conrelid')
query_str.append(' AND s.contype = {} AND a.attnum = any(s.conkey)'.format(self.val_placeholder))
query_str.append('LEFT JOIN pg_class c ON s.confrelid = c.oid')
query_str.append('WHERE')
query_str.append(' a.attrelid = {}::regclass'.format(self.val_placeholder))
query_str.append(' AND a.attisdropped = False')
query_str.append(' AND a.attnum > 0')
query_str.append('ORDER BY a.attnum ASC')
query_str = os.linesep.join(query_str)
fields = self.query(query_str, *query_args, **kwargs)
pg_types = {
"float8": float,
"timestamp": datetime.datetime,
"int2": int,
"int4": int,
"int8": long,
"numeric": decimal.Decimal,
"text": str,
"bpchar": str,
"varchar": str,
"bool": bool,
"date": datetime.date,
"blob": bytearray,
}
# the rows we can set: field_type, name, field_required, min_size, max_size,
# size, unique, pk, <foreign key info>
# These keys will roughly correspond with schema.Field
for row in fields:
field = {
"name": row["attname"],
"field_type": pg_types[row["typname"]],
"field_required": row["attnotnull"],
"pk": bool(row["indisprimary"]),
}
if row["confrelname"]:
# TODO -- I can't decide which name I like
field["schema_table_name"] = row["confrelname"]
field["ref_table_name"] = row["confrelname"]
ret[field["name"]] = field
return ret
|
python
|
def _get_fields(self, table_name, **kwargs):
"""return all the fields for the given schema"""
ret = {}
query_str = []
query_args = ['f', table_name]
# I had to brush up on my join knowledge while writing this query
# https://en.wikipedia.org/wiki/Join_(SQL)
#
# other helpful links
# https://wiki.postgresql.org/wiki/Retrieve_primary_key_columns
# https://www.postgresql.org/docs/9.4/static/catalog-pg-attribute.html
# https://www.postgresql.org/docs/9.3/static/catalog-pg-type.html
#
# another approach
# http://dba.stackexchange.com/questions/22362/how-do-i-list-all-columns-for-a-specified-table
# http://gis.stackexchange.com/questions/94049/how-to-get-the-data-type-of-each-column-from-a-postgis-table
query_str.append('SELECT')
query_str.append(', '.join([
'a.attnum',
'a.attname',
'a.attnotnull',
't.typname',
'i.indisprimary',
#'s.conname',
#'pg_get_constraintdef(s.oid, true) as condef',
'c.relname AS confrelname',
]))
query_str.append('FROM')
query_str.append(' pg_attribute a')
query_str.append('JOIN pg_type t ON a.atttypid = t.oid')
query_str.append('LEFT JOIN pg_index i ON a.attrelid = i.indrelid')
query_str.append(' AND a.attnum = any(i.indkey)')
query_str.append('LEFT JOIN pg_constraint s ON a.attrelid = s.conrelid')
query_str.append(' AND s.contype = {} AND a.attnum = any(s.conkey)'.format(self.val_placeholder))
query_str.append('LEFT JOIN pg_class c ON s.confrelid = c.oid')
query_str.append('WHERE')
query_str.append(' a.attrelid = {}::regclass'.format(self.val_placeholder))
query_str.append(' AND a.attisdropped = False')
query_str.append(' AND a.attnum > 0')
query_str.append('ORDER BY a.attnum ASC')
query_str = os.linesep.join(query_str)
fields = self.query(query_str, *query_args, **kwargs)
pg_types = {
"float8": float,
"timestamp": datetime.datetime,
"int2": int,
"int4": int,
"int8": long,
"numeric": decimal.Decimal,
"text": str,
"bpchar": str,
"varchar": str,
"bool": bool,
"date": datetime.date,
"blob": bytearray,
}
# the rows we can set: field_type, name, field_required, min_size, max_size,
# size, unique, pk, <foreign key info>
# These keys will roughly correspond with schema.Field
for row in fields:
field = {
"name": row["attname"],
"field_type": pg_types[row["typname"]],
"field_required": row["attnotnull"],
"pk": bool(row["indisprimary"]),
}
if row["confrelname"]:
# TODO -- I can't decide which name I like
field["schema_table_name"] = row["confrelname"]
field["ref_table_name"] = row["confrelname"]
ret[field["name"]] = field
return ret
|
[
"def",
"_get_fields",
"(",
"self",
",",
"table_name",
",",
"*",
"*",
"kwargs",
")",
":",
"ret",
"=",
"{",
"}",
"query_str",
"=",
"[",
"]",
"query_args",
"=",
"[",
"'f'",
",",
"table_name",
"]",
"# I had to brush up on my join knowledge while writing this query",
"# https://en.wikipedia.org/wiki/Join_(SQL)",
"#",
"# other helpful links",
"# https://wiki.postgresql.org/wiki/Retrieve_primary_key_columns",
"# https://www.postgresql.org/docs/9.4/static/catalog-pg-attribute.html",
"# https://www.postgresql.org/docs/9.3/static/catalog-pg-type.html",
"# ",
"# another approach",
"# http://dba.stackexchange.com/questions/22362/how-do-i-list-all-columns-for-a-specified-table",
"# http://gis.stackexchange.com/questions/94049/how-to-get-the-data-type-of-each-column-from-a-postgis-table",
"query_str",
".",
"append",
"(",
"'SELECT'",
")",
"query_str",
".",
"append",
"(",
"', '",
".",
"join",
"(",
"[",
"'a.attnum'",
",",
"'a.attname'",
",",
"'a.attnotnull'",
",",
"'t.typname'",
",",
"'i.indisprimary'",
",",
"#'s.conname',",
"#'pg_get_constraintdef(s.oid, true) as condef',",
"'c.relname AS confrelname'",
",",
"]",
")",
")",
"query_str",
".",
"append",
"(",
"'FROM'",
")",
"query_str",
".",
"append",
"(",
"' pg_attribute a'",
")",
"query_str",
".",
"append",
"(",
"'JOIN pg_type t ON a.atttypid = t.oid'",
")",
"query_str",
".",
"append",
"(",
"'LEFT JOIN pg_index i ON a.attrelid = i.indrelid'",
")",
"query_str",
".",
"append",
"(",
"' AND a.attnum = any(i.indkey)'",
")",
"query_str",
".",
"append",
"(",
"'LEFT JOIN pg_constraint s ON a.attrelid = s.conrelid'",
")",
"query_str",
".",
"append",
"(",
"' AND s.contype = {} AND a.attnum = any(s.conkey)'",
".",
"format",
"(",
"self",
".",
"val_placeholder",
")",
")",
"query_str",
".",
"append",
"(",
"'LEFT JOIN pg_class c ON s.confrelid = c.oid'",
")",
"query_str",
".",
"append",
"(",
"'WHERE'",
")",
"query_str",
".",
"append",
"(",
"' a.attrelid = {}::regclass'",
".",
"format",
"(",
"self",
".",
"val_placeholder",
")",
")",
"query_str",
".",
"append",
"(",
"' AND a.attisdropped = False'",
")",
"query_str",
".",
"append",
"(",
"' AND a.attnum > 0'",
")",
"query_str",
".",
"append",
"(",
"'ORDER BY a.attnum ASC'",
")",
"query_str",
"=",
"os",
".",
"linesep",
".",
"join",
"(",
"query_str",
")",
"fields",
"=",
"self",
".",
"query",
"(",
"query_str",
",",
"*",
"query_args",
",",
"*",
"*",
"kwargs",
")",
"pg_types",
"=",
"{",
"\"float8\"",
":",
"float",
",",
"\"timestamp\"",
":",
"datetime",
".",
"datetime",
",",
"\"int2\"",
":",
"int",
",",
"\"int4\"",
":",
"int",
",",
"\"int8\"",
":",
"long",
",",
"\"numeric\"",
":",
"decimal",
".",
"Decimal",
",",
"\"text\"",
":",
"str",
",",
"\"bpchar\"",
":",
"str",
",",
"\"varchar\"",
":",
"str",
",",
"\"bool\"",
":",
"bool",
",",
"\"date\"",
":",
"datetime",
".",
"date",
",",
"\"blob\"",
":",
"bytearray",
",",
"}",
"# the rows we can set: field_type, name, field_required, min_size, max_size,",
"# size, unique, pk, <foreign key info>",
"# These keys will roughly correspond with schema.Field",
"for",
"row",
"in",
"fields",
":",
"field",
"=",
"{",
"\"name\"",
":",
"row",
"[",
"\"attname\"",
"]",
",",
"\"field_type\"",
":",
"pg_types",
"[",
"row",
"[",
"\"typname\"",
"]",
"]",
",",
"\"field_required\"",
":",
"row",
"[",
"\"attnotnull\"",
"]",
",",
"\"pk\"",
":",
"bool",
"(",
"row",
"[",
"\"indisprimary\"",
"]",
")",
",",
"}",
"if",
"row",
"[",
"\"confrelname\"",
"]",
":",
"# TODO -- I can't decide which name I like",
"field",
"[",
"\"schema_table_name\"",
"]",
"=",
"row",
"[",
"\"confrelname\"",
"]",
"field",
"[",
"\"ref_table_name\"",
"]",
"=",
"row",
"[",
"\"confrelname\"",
"]",
"ret",
"[",
"field",
"[",
"\"name\"",
"]",
"]",
"=",
"field",
"return",
"ret"
] |
return all the fields for the given schema
|
[
"return",
"all",
"the",
"fields",
"for",
"the",
"given",
"schema"
] |
train
|
https://github.com/Jaymon/prom/blob/b7ad2c259eca198da03e1e4bc7d95014c168c361/prom/interface/postgres.py#L184-L262
|
Jaymon/prom
|
prom/interface/postgres.py
|
PostgreSQL._get_indexes
|
def _get_indexes(self, schema, **kwargs):
"""return all the indexes for the given schema"""
ret = {}
query_str = []
query_str.append('SELECT')
query_str.append(' tbl.relname AS table_name, i.relname AS index_name, a.attname AS field_name,')
query_str.append(' ix.indkey AS index_order, a.attnum AS field_num')
query_str.append('FROM')
query_str.append(' pg_class tbl, pg_class i, pg_index ix, pg_attribute a')
query_str.append('WHERE')
query_str.append(' tbl.oid = ix.indrelid AND i.oid = ix.indexrelid AND a.attrelid = tbl.oid')
query_str.append(' AND a.attnum = ANY(ix.indkey) AND tbl.relkind = %s AND tbl.relname = %s')
query_str.append('ORDER BY')
query_str.append(' tbl.relname, i.relname')
query_str = os.linesep.join(query_str)
indexes = self.query(query_str, 'r', str(schema), **kwargs)
# massage the data into more readable {index_name: fields} format
for idict in indexes:
if idict['index_name'] not in ret:
ret[idict['index_name']] = list(map(int, idict['index_order'].split(' ')))
i = ret[idict['index_name']].index(idict['field_num'])
ret[idict['index_name']][i] = idict['field_name']
return ret
|
python
|
def _get_indexes(self, schema, **kwargs):
"""return all the indexes for the given schema"""
ret = {}
query_str = []
query_str.append('SELECT')
query_str.append(' tbl.relname AS table_name, i.relname AS index_name, a.attname AS field_name,')
query_str.append(' ix.indkey AS index_order, a.attnum AS field_num')
query_str.append('FROM')
query_str.append(' pg_class tbl, pg_class i, pg_index ix, pg_attribute a')
query_str.append('WHERE')
query_str.append(' tbl.oid = ix.indrelid AND i.oid = ix.indexrelid AND a.attrelid = tbl.oid')
query_str.append(' AND a.attnum = ANY(ix.indkey) AND tbl.relkind = %s AND tbl.relname = %s')
query_str.append('ORDER BY')
query_str.append(' tbl.relname, i.relname')
query_str = os.linesep.join(query_str)
indexes = self.query(query_str, 'r', str(schema), **kwargs)
# massage the data into more readable {index_name: fields} format
for idict in indexes:
if idict['index_name'] not in ret:
ret[idict['index_name']] = list(map(int, idict['index_order'].split(' ')))
i = ret[idict['index_name']].index(idict['field_num'])
ret[idict['index_name']][i] = idict['field_name']
return ret
|
[
"def",
"_get_indexes",
"(",
"self",
",",
"schema",
",",
"*",
"*",
"kwargs",
")",
":",
"ret",
"=",
"{",
"}",
"query_str",
"=",
"[",
"]",
"query_str",
".",
"append",
"(",
"'SELECT'",
")",
"query_str",
".",
"append",
"(",
"' tbl.relname AS table_name, i.relname AS index_name, a.attname AS field_name,'",
")",
"query_str",
".",
"append",
"(",
"' ix.indkey AS index_order, a.attnum AS field_num'",
")",
"query_str",
".",
"append",
"(",
"'FROM'",
")",
"query_str",
".",
"append",
"(",
"' pg_class tbl, pg_class i, pg_index ix, pg_attribute a'",
")",
"query_str",
".",
"append",
"(",
"'WHERE'",
")",
"query_str",
".",
"append",
"(",
"' tbl.oid = ix.indrelid AND i.oid = ix.indexrelid AND a.attrelid = tbl.oid'",
")",
"query_str",
".",
"append",
"(",
"' AND a.attnum = ANY(ix.indkey) AND tbl.relkind = %s AND tbl.relname = %s'",
")",
"query_str",
".",
"append",
"(",
"'ORDER BY'",
")",
"query_str",
".",
"append",
"(",
"' tbl.relname, i.relname'",
")",
"query_str",
"=",
"os",
".",
"linesep",
".",
"join",
"(",
"query_str",
")",
"indexes",
"=",
"self",
".",
"query",
"(",
"query_str",
",",
"'r'",
",",
"str",
"(",
"schema",
")",
",",
"*",
"*",
"kwargs",
")",
"# massage the data into more readable {index_name: fields} format",
"for",
"idict",
"in",
"indexes",
":",
"if",
"idict",
"[",
"'index_name'",
"]",
"not",
"in",
"ret",
":",
"ret",
"[",
"idict",
"[",
"'index_name'",
"]",
"]",
"=",
"list",
"(",
"map",
"(",
"int",
",",
"idict",
"[",
"'index_order'",
"]",
".",
"split",
"(",
"' '",
")",
")",
")",
"i",
"=",
"ret",
"[",
"idict",
"[",
"'index_name'",
"]",
"]",
".",
"index",
"(",
"idict",
"[",
"'field_num'",
"]",
")",
"ret",
"[",
"idict",
"[",
"'index_name'",
"]",
"]",
"[",
"i",
"]",
"=",
"idict",
"[",
"'field_name'",
"]",
"return",
"ret"
] |
return all the indexes for the given schema
|
[
"return",
"all",
"the",
"indexes",
"for",
"the",
"given",
"schema"
] |
train
|
https://github.com/Jaymon/prom/blob/b7ad2c259eca198da03e1e4bc7d95014c168c361/prom/interface/postgres.py#L264-L290
|
Jaymon/prom
|
prom/interface/postgres.py
|
PostgreSQL._set_index
|
def _set_index(self, schema, name, fields, **index_options):
"""
NOTE -- we set the index name using <table_name>_<name> format since indexes have to have
a globally unique name in postgres
http://www.postgresql.org/docs/9.1/static/sql-createindex.html
"""
index_fields = []
for field_name in fields:
field = schema.fields[field_name]
if issubclass(field.type, basestring):
if field.options.get('ignore_case', False):
field_name = 'UPPER({})'.format(self._normalize_name(field_name))
index_fields.append(field_name)
query_str = 'CREATE {}INDEX {} ON {} USING BTREE ({})'.format(
'UNIQUE ' if index_options.get('unique', False) else '',
self._normalize_name("{}_{}".format(schema, name)),
self._normalize_table_name(schema),
', '.join(index_fields)
)
return self.query(query_str, ignore_result=True, **index_options)
|
python
|
def _set_index(self, schema, name, fields, **index_options):
"""
NOTE -- we set the index name using <table_name>_<name> format since indexes have to have
a globally unique name in postgres
http://www.postgresql.org/docs/9.1/static/sql-createindex.html
"""
index_fields = []
for field_name in fields:
field = schema.fields[field_name]
if issubclass(field.type, basestring):
if field.options.get('ignore_case', False):
field_name = 'UPPER({})'.format(self._normalize_name(field_name))
index_fields.append(field_name)
query_str = 'CREATE {}INDEX {} ON {} USING BTREE ({})'.format(
'UNIQUE ' if index_options.get('unique', False) else '',
self._normalize_name("{}_{}".format(schema, name)),
self._normalize_table_name(schema),
', '.join(index_fields)
)
return self.query(query_str, ignore_result=True, **index_options)
|
[
"def",
"_set_index",
"(",
"self",
",",
"schema",
",",
"name",
",",
"fields",
",",
"*",
"*",
"index_options",
")",
":",
"index_fields",
"=",
"[",
"]",
"for",
"field_name",
"in",
"fields",
":",
"field",
"=",
"schema",
".",
"fields",
"[",
"field_name",
"]",
"if",
"issubclass",
"(",
"field",
".",
"type",
",",
"basestring",
")",
":",
"if",
"field",
".",
"options",
".",
"get",
"(",
"'ignore_case'",
",",
"False",
")",
":",
"field_name",
"=",
"'UPPER({})'",
".",
"format",
"(",
"self",
".",
"_normalize_name",
"(",
"field_name",
")",
")",
"index_fields",
".",
"append",
"(",
"field_name",
")",
"query_str",
"=",
"'CREATE {}INDEX {} ON {} USING BTREE ({})'",
".",
"format",
"(",
"'UNIQUE '",
"if",
"index_options",
".",
"get",
"(",
"'unique'",
",",
"False",
")",
"else",
"''",
",",
"self",
".",
"_normalize_name",
"(",
"\"{}_{}\"",
".",
"format",
"(",
"schema",
",",
"name",
")",
")",
",",
"self",
".",
"_normalize_table_name",
"(",
"schema",
")",
",",
"', '",
".",
"join",
"(",
"index_fields",
")",
")",
"return",
"self",
".",
"query",
"(",
"query_str",
",",
"ignore_result",
"=",
"True",
",",
"*",
"*",
"index_options",
")"
] |
NOTE -- we set the index name using <table_name>_<name> format since indexes have to have
a globally unique name in postgres
http://www.postgresql.org/docs/9.1/static/sql-createindex.html
|
[
"NOTE",
"--",
"we",
"set",
"the",
"index",
"name",
"using",
"<table_name",
">",
"_<name",
">",
"format",
"since",
"indexes",
"have",
"to",
"have",
"a",
"globally",
"unique",
"name",
"in",
"postgres"
] |
train
|
https://github.com/Jaymon/prom/blob/b7ad2c259eca198da03e1e4bc7d95014c168c361/prom/interface/postgres.py#L292-L314
|
IdentityPython/oidcendpoint
|
src/oidcendpoint/oidc/token.py
|
AccessToken.client_authentication
|
def client_authentication(self, request, auth=None, **kwargs):
"""
Deal with client authentication
:param request: The refresh access token request
:param auth: Client authentication information
:param kwargs: Extra keyword arguments
:return: dictionary containing client id, client authentication method
and possibly access token.
"""
try:
auth_info = verify_client(self.endpoint_context, request, auth)
except Exception as err:
msg = "Failed to verify client due to: {}".format(err)
logger.error(msg)
return self.error_cls(error="unauthorized_client",
error_description=msg)
else:
if 'client_id' not in auth_info:
logger.error('No client_id, authentication failed')
return self.error_cls(error="unauthorized_client",
error_description='unknown client')
return auth_info
|
python
|
def client_authentication(self, request, auth=None, **kwargs):
"""
Deal with client authentication
:param request: The refresh access token request
:param auth: Client authentication information
:param kwargs: Extra keyword arguments
:return: dictionary containing client id, client authentication method
and possibly access token.
"""
try:
auth_info = verify_client(self.endpoint_context, request, auth)
except Exception as err:
msg = "Failed to verify client due to: {}".format(err)
logger.error(msg)
return self.error_cls(error="unauthorized_client",
error_description=msg)
else:
if 'client_id' not in auth_info:
logger.error('No client_id, authentication failed')
return self.error_cls(error="unauthorized_client",
error_description='unknown client')
return auth_info
|
[
"def",
"client_authentication",
"(",
"self",
",",
"request",
",",
"auth",
"=",
"None",
",",
"*",
"*",
"kwargs",
")",
":",
"try",
":",
"auth_info",
"=",
"verify_client",
"(",
"self",
".",
"endpoint_context",
",",
"request",
",",
"auth",
")",
"except",
"Exception",
"as",
"err",
":",
"msg",
"=",
"\"Failed to verify client due to: {}\"",
".",
"format",
"(",
"err",
")",
"logger",
".",
"error",
"(",
"msg",
")",
"return",
"self",
".",
"error_cls",
"(",
"error",
"=",
"\"unauthorized_client\"",
",",
"error_description",
"=",
"msg",
")",
"else",
":",
"if",
"'client_id'",
"not",
"in",
"auth_info",
":",
"logger",
".",
"error",
"(",
"'No client_id, authentication failed'",
")",
"return",
"self",
".",
"error_cls",
"(",
"error",
"=",
"\"unauthorized_client\"",
",",
"error_description",
"=",
"'unknown client'",
")",
"return",
"auth_info"
] |
Deal with client authentication
:param request: The refresh access token request
:param auth: Client authentication information
:param kwargs: Extra keyword arguments
:return: dictionary containing client id, client authentication method
and possibly access token.
|
[
"Deal",
"with",
"client",
"authentication"
] |
train
|
https://github.com/IdentityPython/oidcendpoint/blob/6c1d729d51bfb6332816117fe476073df7a1d823/src/oidcendpoint/oidc/token.py#L111-L135
|
IdentityPython/oidcendpoint
|
src/oidcendpoint/oidc/token.py
|
AccessToken._post_parse_request
|
def _post_parse_request(self, request, client_id='', **kwargs):
"""
This is where clients come to get their access tokens
:param request: The request
:param authn: Authentication info, comes from HTTP header
:returns:
"""
if 'state' in request:
try:
sinfo = self.endpoint_context.sdb[request['code']]
except KeyError:
logger.error('Code not present in SessionDB')
return self.error_cls(error="unauthorized_client")
else:
state = sinfo['authn_req']['state']
if state != request['state']:
logger.error('State value mismatch')
return self.error_cls(error="unauthorized_client")
if "client_id" not in request: # Optional for access token request
request["client_id"] = client_id
logger.debug("%s: %s" % (request.__class__.__name__, sanitize(request)))
return request
|
python
|
def _post_parse_request(self, request, client_id='', **kwargs):
"""
This is where clients come to get their access tokens
:param request: The request
:param authn: Authentication info, comes from HTTP header
:returns:
"""
if 'state' in request:
try:
sinfo = self.endpoint_context.sdb[request['code']]
except KeyError:
logger.error('Code not present in SessionDB')
return self.error_cls(error="unauthorized_client")
else:
state = sinfo['authn_req']['state']
if state != request['state']:
logger.error('State value mismatch')
return self.error_cls(error="unauthorized_client")
if "client_id" not in request: # Optional for access token request
request["client_id"] = client_id
logger.debug("%s: %s" % (request.__class__.__name__, sanitize(request)))
return request
|
[
"def",
"_post_parse_request",
"(",
"self",
",",
"request",
",",
"client_id",
"=",
"''",
",",
"*",
"*",
"kwargs",
")",
":",
"if",
"'state'",
"in",
"request",
":",
"try",
":",
"sinfo",
"=",
"self",
".",
"endpoint_context",
".",
"sdb",
"[",
"request",
"[",
"'code'",
"]",
"]",
"except",
"KeyError",
":",
"logger",
".",
"error",
"(",
"'Code not present in SessionDB'",
")",
"return",
"self",
".",
"error_cls",
"(",
"error",
"=",
"\"unauthorized_client\"",
")",
"else",
":",
"state",
"=",
"sinfo",
"[",
"'authn_req'",
"]",
"[",
"'state'",
"]",
"if",
"state",
"!=",
"request",
"[",
"'state'",
"]",
":",
"logger",
".",
"error",
"(",
"'State value mismatch'",
")",
"return",
"self",
".",
"error_cls",
"(",
"error",
"=",
"\"unauthorized_client\"",
")",
"if",
"\"client_id\"",
"not",
"in",
"request",
":",
"# Optional for access token request",
"request",
"[",
"\"client_id\"",
"]",
"=",
"client_id",
"logger",
".",
"debug",
"(",
"\"%s: %s\"",
"%",
"(",
"request",
".",
"__class__",
".",
"__name__",
",",
"sanitize",
"(",
"request",
")",
")",
")",
"return",
"request"
] |
This is where clients come to get their access tokens
:param request: The request
:param authn: Authentication info, comes from HTTP header
:returns:
|
[
"This",
"is",
"where",
"clients",
"come",
"to",
"get",
"their",
"access",
"tokens"
] |
train
|
https://github.com/IdentityPython/oidcendpoint/blob/6c1d729d51bfb6332816117fe476073df7a1d823/src/oidcendpoint/oidc/token.py#L137-L164
|
BreakingBytes/simkit
|
simkit/core/outputs.py
|
OutputRegistry.register
|
def register(self, new_outputs, *args, **kwargs):
"""
Register outputs and metadata.
* ``initial_value`` - used in dynamic calculations
* ``size`` - number of elements per timestep
* ``uncertainty`` - in percent of nominal value
* ``variance`` - dictionary of covariances, diagonal is square of
uncertianties, no units
* ``jacobian`` - dictionary of sensitivities dxi/dfj
* ``isconstant`` - ``True`` if constant, ``False`` if periodic
* ``isproperty`` - ``True`` if output stays at last value during
thresholds, ``False`` if reverts to initial value
* ``timeseries`` - name of corresponding time series output, ``None`` if
no time series
* ``output_source`` - name
:param new_outputs: new outputs to register.
"""
kwargs.update(zip(self.meta_names, args))
# call super method
super(OutputRegistry, self).register(new_outputs, **kwargs)
|
python
|
def register(self, new_outputs, *args, **kwargs):
"""
Register outputs and metadata.
* ``initial_value`` - used in dynamic calculations
* ``size`` - number of elements per timestep
* ``uncertainty`` - in percent of nominal value
* ``variance`` - dictionary of covariances, diagonal is square of
uncertianties, no units
* ``jacobian`` - dictionary of sensitivities dxi/dfj
* ``isconstant`` - ``True`` if constant, ``False`` if periodic
* ``isproperty`` - ``True`` if output stays at last value during
thresholds, ``False`` if reverts to initial value
* ``timeseries`` - name of corresponding time series output, ``None`` if
no time series
* ``output_source`` - name
:param new_outputs: new outputs to register.
"""
kwargs.update(zip(self.meta_names, args))
# call super method
super(OutputRegistry, self).register(new_outputs, **kwargs)
|
[
"def",
"register",
"(",
"self",
",",
"new_outputs",
",",
"*",
"args",
",",
"*",
"*",
"kwargs",
")",
":",
"kwargs",
".",
"update",
"(",
"zip",
"(",
"self",
".",
"meta_names",
",",
"args",
")",
")",
"# call super method",
"super",
"(",
"OutputRegistry",
",",
"self",
")",
".",
"register",
"(",
"new_outputs",
",",
"*",
"*",
"kwargs",
")"
] |
Register outputs and metadata.
* ``initial_value`` - used in dynamic calculations
* ``size`` - number of elements per timestep
* ``uncertainty`` - in percent of nominal value
* ``variance`` - dictionary of covariances, diagonal is square of
uncertianties, no units
* ``jacobian`` - dictionary of sensitivities dxi/dfj
* ``isconstant`` - ``True`` if constant, ``False`` if periodic
* ``isproperty`` - ``True`` if output stays at last value during
thresholds, ``False`` if reverts to initial value
* ``timeseries`` - name of corresponding time series output, ``None`` if
no time series
* ``output_source`` - name
:param new_outputs: new outputs to register.
|
[
"Register",
"outputs",
"and",
"metadata",
"."
] |
train
|
https://github.com/BreakingBytes/simkit/blob/205163d879d3880b6c9ef609f1b723a58773026b/simkit/core/outputs.py#L31-L52
|
guaix-ucm/pyemir
|
emirdrp/recipes/image/dither.py
|
FullDitheredImagesRecipe.initial_classification
|
def initial_classification(self, obresult, target_is_sky=False):
"""Classify input frames, """
# lists of targets and sky frames
with obresult.frames[0].open() as baseimg:
# Initial checks
has_bpm_ext = 'BPM' in baseimg
self.logger.debug('images have BPM extension: %s', has_bpm_ext)
images_info = []
for f in obresult.frames:
with f.open() as img:
# Getting some metadata from FITS header
hdr = img[0].header
iinfo = ImageInfo(f)
finfo = {}
iinfo.metadata = finfo
finfo['uuid'] = hdr['UUID']
finfo['exposure'] = hdr['EXPTIME']
# frame.baseshape = get_image_shape(hdr)
finfo['airmass'] = hdr['airmass']
finfo['mjd'] = hdr['tstamp']
iinfo.label = 'result_image_{}'.format(finfo['uuid'])
iinfo.mask = nfcom.Extension("BPM")
# Insert pixel offsets between frames
iinfo.objmask_data = None
iinfo.valid_target = False
iinfo.valid_sky = False
# FIXME: hardcode itype for the moment
iinfo.itype = 'TARGET'
if iinfo.itype == 'TARGET':
iinfo.valid_target = True
#targetframes.append(iinfo)
if target_is_sky:
iinfo.valid_sky = True
#skyframes.append(iinfo)
if iinfo.itype == 'SKY':
iinfo.valid_sky = True
#skyframes.append(iinfo)
images_info.append(iinfo)
return images_info
|
python
|
def initial_classification(self, obresult, target_is_sky=False):
"""Classify input frames, """
# lists of targets and sky frames
with obresult.frames[0].open() as baseimg:
# Initial checks
has_bpm_ext = 'BPM' in baseimg
self.logger.debug('images have BPM extension: %s', has_bpm_ext)
images_info = []
for f in obresult.frames:
with f.open() as img:
# Getting some metadata from FITS header
hdr = img[0].header
iinfo = ImageInfo(f)
finfo = {}
iinfo.metadata = finfo
finfo['uuid'] = hdr['UUID']
finfo['exposure'] = hdr['EXPTIME']
# frame.baseshape = get_image_shape(hdr)
finfo['airmass'] = hdr['airmass']
finfo['mjd'] = hdr['tstamp']
iinfo.label = 'result_image_{}'.format(finfo['uuid'])
iinfo.mask = nfcom.Extension("BPM")
# Insert pixel offsets between frames
iinfo.objmask_data = None
iinfo.valid_target = False
iinfo.valid_sky = False
# FIXME: hardcode itype for the moment
iinfo.itype = 'TARGET'
if iinfo.itype == 'TARGET':
iinfo.valid_target = True
#targetframes.append(iinfo)
if target_is_sky:
iinfo.valid_sky = True
#skyframes.append(iinfo)
if iinfo.itype == 'SKY':
iinfo.valid_sky = True
#skyframes.append(iinfo)
images_info.append(iinfo)
return images_info
|
[
"def",
"initial_classification",
"(",
"self",
",",
"obresult",
",",
"target_is_sky",
"=",
"False",
")",
":",
"# lists of targets and sky frames",
"with",
"obresult",
".",
"frames",
"[",
"0",
"]",
".",
"open",
"(",
")",
"as",
"baseimg",
":",
"# Initial checks",
"has_bpm_ext",
"=",
"'BPM'",
"in",
"baseimg",
"self",
".",
"logger",
".",
"debug",
"(",
"'images have BPM extension: %s'",
",",
"has_bpm_ext",
")",
"images_info",
"=",
"[",
"]",
"for",
"f",
"in",
"obresult",
".",
"frames",
":",
"with",
"f",
".",
"open",
"(",
")",
"as",
"img",
":",
"# Getting some metadata from FITS header",
"hdr",
"=",
"img",
"[",
"0",
"]",
".",
"header",
"iinfo",
"=",
"ImageInfo",
"(",
"f",
")",
"finfo",
"=",
"{",
"}",
"iinfo",
".",
"metadata",
"=",
"finfo",
"finfo",
"[",
"'uuid'",
"]",
"=",
"hdr",
"[",
"'UUID'",
"]",
"finfo",
"[",
"'exposure'",
"]",
"=",
"hdr",
"[",
"'EXPTIME'",
"]",
"# frame.baseshape = get_image_shape(hdr)",
"finfo",
"[",
"'airmass'",
"]",
"=",
"hdr",
"[",
"'airmass'",
"]",
"finfo",
"[",
"'mjd'",
"]",
"=",
"hdr",
"[",
"'tstamp'",
"]",
"iinfo",
".",
"label",
"=",
"'result_image_{}'",
".",
"format",
"(",
"finfo",
"[",
"'uuid'",
"]",
")",
"iinfo",
".",
"mask",
"=",
"nfcom",
".",
"Extension",
"(",
"\"BPM\"",
")",
"# Insert pixel offsets between frames",
"iinfo",
".",
"objmask_data",
"=",
"None",
"iinfo",
".",
"valid_target",
"=",
"False",
"iinfo",
".",
"valid_sky",
"=",
"False",
"# FIXME: hardcode itype for the moment",
"iinfo",
".",
"itype",
"=",
"'TARGET'",
"if",
"iinfo",
".",
"itype",
"==",
"'TARGET'",
":",
"iinfo",
".",
"valid_target",
"=",
"True",
"#targetframes.append(iinfo)",
"if",
"target_is_sky",
":",
"iinfo",
".",
"valid_sky",
"=",
"True",
"#skyframes.append(iinfo)",
"if",
"iinfo",
".",
"itype",
"==",
"'SKY'",
":",
"iinfo",
".",
"valid_sky",
"=",
"True",
"#skyframes.append(iinfo)",
"images_info",
".",
"append",
"(",
"iinfo",
")",
"return",
"images_info"
] |
Classify input frames,
|
[
"Classify",
"input",
"frames"
] |
train
|
https://github.com/guaix-ucm/pyemir/blob/fef6bbabcb13f80123cafd1800a0f508a3c21702/emirdrp/recipes/image/dither.py#L427-L473
|
guaix-ucm/pyemir
|
emirdrp/instrument/csuconf.py
|
read_csu_2
|
def read_csu_2(barmodel, hdr):
"""Read CSU information and slits from header"""
conf = CSUConf(barmodel)
conf.conf_f = hdr.get('CSUCONFF', 'UNKNOWN')
# Read CSUPOS and set position in model
# The bars
for idx in conf.bars:
key = "CSUP{}".format(idx)
# UNIT of CSUPOS?
# who knows
# set CSUPOS for BAR
# Using the model, this sets the X,Y coordinate
conf.bars[idx].csupos = hdr[key]
# print('read_csu {}, position is {}'.format(idx, conf.bars[idx].current_pos))
# Slits. We dont have keywords to fuse slitlets into larger logical slits
# so there is one slitslet for each pair of bars
for idx in range(1, EMIR_NBARS + 1):
l_ids = [idx]
r_ids = [idx + EMIR_NBARS]
lbars = {lid: conf.bars[lid] for lid in l_ids}
rbars = {rid: conf.bars[rid] for rid in r_ids}
this = LogicalSlit(idx, lbars=lbars, rbars=rbars)
# References from header
try:
slit_t = hdr["SLIFL%d" % idx]
this.target_type = TargetType(slit_t)
except KeyError as err:
# print('warning', err)
this.target_type = TargetType.UNKNOWN
xref = hdr.get("XRSLI%d" % this.idx, -100) - 1
yref = hdr.get("YRSLI%d" % this.idx, -100) - 1
this.target_coordinates = (xref, yref)
xref = hdr.get("XVSLI%d" % this.idx, -100) - 1
yref = hdr.get("YVSLI%d" % this.idx, -100) - 1
this.target_coordinates_v = (xref, yref)
conf.slits[idx] = this
return conf
|
python
|
def read_csu_2(barmodel, hdr):
"""Read CSU information and slits from header"""
conf = CSUConf(barmodel)
conf.conf_f = hdr.get('CSUCONFF', 'UNKNOWN')
# Read CSUPOS and set position in model
# The bars
for idx in conf.bars:
key = "CSUP{}".format(idx)
# UNIT of CSUPOS?
# who knows
# set CSUPOS for BAR
# Using the model, this sets the X,Y coordinate
conf.bars[idx].csupos = hdr[key]
# print('read_csu {}, position is {}'.format(idx, conf.bars[idx].current_pos))
# Slits. We dont have keywords to fuse slitlets into larger logical slits
# so there is one slitslet for each pair of bars
for idx in range(1, EMIR_NBARS + 1):
l_ids = [idx]
r_ids = [idx + EMIR_NBARS]
lbars = {lid: conf.bars[lid] for lid in l_ids}
rbars = {rid: conf.bars[rid] for rid in r_ids}
this = LogicalSlit(idx, lbars=lbars, rbars=rbars)
# References from header
try:
slit_t = hdr["SLIFL%d" % idx]
this.target_type = TargetType(slit_t)
except KeyError as err:
# print('warning', err)
this.target_type = TargetType.UNKNOWN
xref = hdr.get("XRSLI%d" % this.idx, -100) - 1
yref = hdr.get("YRSLI%d" % this.idx, -100) - 1
this.target_coordinates = (xref, yref)
xref = hdr.get("XVSLI%d" % this.idx, -100) - 1
yref = hdr.get("YVSLI%d" % this.idx, -100) - 1
this.target_coordinates_v = (xref, yref)
conf.slits[idx] = this
return conf
|
[
"def",
"read_csu_2",
"(",
"barmodel",
",",
"hdr",
")",
":",
"conf",
"=",
"CSUConf",
"(",
"barmodel",
")",
"conf",
".",
"conf_f",
"=",
"hdr",
".",
"get",
"(",
"'CSUCONFF'",
",",
"'UNKNOWN'",
")",
"# Read CSUPOS and set position in model",
"# The bars",
"for",
"idx",
"in",
"conf",
".",
"bars",
":",
"key",
"=",
"\"CSUP{}\"",
".",
"format",
"(",
"idx",
")",
"# UNIT of CSUPOS?",
"# who knows",
"# set CSUPOS for BAR",
"# Using the model, this sets the X,Y coordinate",
"conf",
".",
"bars",
"[",
"idx",
"]",
".",
"csupos",
"=",
"hdr",
"[",
"key",
"]",
"# print('read_csu {}, position is {}'.format(idx, conf.bars[idx].current_pos))",
"# Slits. We dont have keywords to fuse slitlets into larger logical slits",
"# so there is one slitslet for each pair of bars",
"for",
"idx",
"in",
"range",
"(",
"1",
",",
"EMIR_NBARS",
"+",
"1",
")",
":",
"l_ids",
"=",
"[",
"idx",
"]",
"r_ids",
"=",
"[",
"idx",
"+",
"EMIR_NBARS",
"]",
"lbars",
"=",
"{",
"lid",
":",
"conf",
".",
"bars",
"[",
"lid",
"]",
"for",
"lid",
"in",
"l_ids",
"}",
"rbars",
"=",
"{",
"rid",
":",
"conf",
".",
"bars",
"[",
"rid",
"]",
"for",
"rid",
"in",
"r_ids",
"}",
"this",
"=",
"LogicalSlit",
"(",
"idx",
",",
"lbars",
"=",
"lbars",
",",
"rbars",
"=",
"rbars",
")",
"# References from header",
"try",
":",
"slit_t",
"=",
"hdr",
"[",
"\"SLIFL%d\"",
"%",
"idx",
"]",
"this",
".",
"target_type",
"=",
"TargetType",
"(",
"slit_t",
")",
"except",
"KeyError",
"as",
"err",
":",
"# print('warning', err)",
"this",
".",
"target_type",
"=",
"TargetType",
".",
"UNKNOWN",
"xref",
"=",
"hdr",
".",
"get",
"(",
"\"XRSLI%d\"",
"%",
"this",
".",
"idx",
",",
"-",
"100",
")",
"-",
"1",
"yref",
"=",
"hdr",
".",
"get",
"(",
"\"YRSLI%d\"",
"%",
"this",
".",
"idx",
",",
"-",
"100",
")",
"-",
"1",
"this",
".",
"target_coordinates",
"=",
"(",
"xref",
",",
"yref",
")",
"xref",
"=",
"hdr",
".",
"get",
"(",
"\"XVSLI%d\"",
"%",
"this",
".",
"idx",
",",
"-",
"100",
")",
"-",
"1",
"yref",
"=",
"hdr",
".",
"get",
"(",
"\"YVSLI%d\"",
"%",
"this",
".",
"idx",
",",
"-",
"100",
")",
"-",
"1",
"this",
".",
"target_coordinates_v",
"=",
"(",
"xref",
",",
"yref",
")",
"conf",
".",
"slits",
"[",
"idx",
"]",
"=",
"this",
"return",
"conf"
] |
Read CSU information and slits from header
|
[
"Read",
"CSU",
"information",
"and",
"slits",
"from",
"header"
] |
train
|
https://github.com/guaix-ucm/pyemir/blob/fef6bbabcb13f80123cafd1800a0f508a3c21702/emirdrp/instrument/csuconf.py#L262-L306
|
guaix-ucm/pyemir
|
emirdrp/instrument/csuconf.py
|
read_csu_3
|
def read_csu_3(barmodel, hdr):
"""Read CSU information and slits from header"""
conf = CSUConf(barmodel)
conf.set_state(hdr)
return conf
|
python
|
def read_csu_3(barmodel, hdr):
"""Read CSU information and slits from header"""
conf = CSUConf(barmodel)
conf.set_state(hdr)
return conf
|
[
"def",
"read_csu_3",
"(",
"barmodel",
",",
"hdr",
")",
":",
"conf",
"=",
"CSUConf",
"(",
"barmodel",
")",
"conf",
".",
"set_state",
"(",
"hdr",
")",
"return",
"conf"
] |
Read CSU information and slits from header
|
[
"Read",
"CSU",
"information",
"and",
"slits",
"from",
"header"
] |
train
|
https://github.com/guaix-ucm/pyemir/blob/fef6bbabcb13f80123cafd1800a0f508a3c21702/emirdrp/instrument/csuconf.py#L309-L313
|
guaix-ucm/pyemir
|
emirdrp/instrument/csuconf.py
|
CSUConf.set_state
|
def set_state(self, hdr):
"""Read CSU information and slits from header"""
self.conf_f = hdr.get('CSUCONFF', 'UNKNOWN')
# Read CSUPOS and set position in model
# The bars
for idx in self.bars:
key = "CSUP{}".format(idx)
# UNIT is mm
# set CSUPOS for BAR
# Using the model, this sets the X,Y coordinate
self.bars[idx].csupos = hdr[key]
self._update_slits(hdr)
|
python
|
def set_state(self, hdr):
"""Read CSU information and slits from header"""
self.conf_f = hdr.get('CSUCONFF', 'UNKNOWN')
# Read CSUPOS and set position in model
# The bars
for idx in self.bars:
key = "CSUP{}".format(idx)
# UNIT is mm
# set CSUPOS for BAR
# Using the model, this sets the X,Y coordinate
self.bars[idx].csupos = hdr[key]
self._update_slits(hdr)
|
[
"def",
"set_state",
"(",
"self",
",",
"hdr",
")",
":",
"self",
".",
"conf_f",
"=",
"hdr",
".",
"get",
"(",
"'CSUCONFF'",
",",
"'UNKNOWN'",
")",
"# Read CSUPOS and set position in model",
"# The bars",
"for",
"idx",
"in",
"self",
".",
"bars",
":",
"key",
"=",
"\"CSUP{}\"",
".",
"format",
"(",
"idx",
")",
"# UNIT is mm",
"# set CSUPOS for BAR",
"# Using the model, this sets the X,Y coordinate",
"self",
".",
"bars",
"[",
"idx",
"]",
".",
"csupos",
"=",
"hdr",
"[",
"key",
"]",
"self",
".",
"_update_slits",
"(",
"hdr",
")"
] |
Read CSU information and slits from header
|
[
"Read",
"CSU",
"information",
"and",
"slits",
"from",
"header"
] |
train
|
https://github.com/guaix-ucm/pyemir/blob/fef6bbabcb13f80123cafd1800a0f508a3c21702/emirdrp/instrument/csuconf.py#L46-L60
|
guaix-ucm/pyemir
|
emirdrp/instrument/csuconf.py
|
CSUConf._update_slits
|
def _update_slits(self, hdr):
"""Recreate slits"""
# Slits.
# For the moment we will fuse only reference slits
# clean existing slits
self.slits = {}
mm = []
for idx in self.LBARS:
# References from header
try:
slit_t = hdr["SLIFL%d" % idx]
target_type = TargetType(slit_t)
except KeyError:
target_type = TargetType.UNKNOWN
xref = hdr.get("XRSLI%d" % idx, -100) - 1
yref = hdr.get("YRSLI%d" % idx, -100) - 1
target_coordinates = (xref, yref)
xref = hdr.get("XVSLI%d" % idx, -100) - 1
yref = hdr.get("YVSLI%d" % idx, -100) - 1
target_coordinates_v = (xref, yref)
mm.append((idx, target_type, target_coordinates, target_coordinates_v))
bag = merge_slits(mm, max_slits=3, tol=1e-2)
for idx, l_ids in bag.items():
r_ids = [lid + EMIR_NBARS for lid in l_ids]
lbars = {lid: self.bars[lid] for lid in l_ids}
rbars = {rid: self.bars[rid] for rid in r_ids}
this = LogicalSlit(idx, lbars=lbars, rbars=rbars)
# References from header
ref = mm[l_ids[0] - 1]
this.target_type = ref[1]
this.target_coordinates = ref[2]
this.target_coordinates_v = ref[3]
self.slits[idx] = this
|
python
|
def _update_slits(self, hdr):
"""Recreate slits"""
# Slits.
# For the moment we will fuse only reference slits
# clean existing slits
self.slits = {}
mm = []
for idx in self.LBARS:
# References from header
try:
slit_t = hdr["SLIFL%d" % idx]
target_type = TargetType(slit_t)
except KeyError:
target_type = TargetType.UNKNOWN
xref = hdr.get("XRSLI%d" % idx, -100) - 1
yref = hdr.get("YRSLI%d" % idx, -100) - 1
target_coordinates = (xref, yref)
xref = hdr.get("XVSLI%d" % idx, -100) - 1
yref = hdr.get("YVSLI%d" % idx, -100) - 1
target_coordinates_v = (xref, yref)
mm.append((idx, target_type, target_coordinates, target_coordinates_v))
bag = merge_slits(mm, max_slits=3, tol=1e-2)
for idx, l_ids in bag.items():
r_ids = [lid + EMIR_NBARS for lid in l_ids]
lbars = {lid: self.bars[lid] for lid in l_ids}
rbars = {rid: self.bars[rid] for rid in r_ids}
this = LogicalSlit(idx, lbars=lbars, rbars=rbars)
# References from header
ref = mm[l_ids[0] - 1]
this.target_type = ref[1]
this.target_coordinates = ref[2]
this.target_coordinates_v = ref[3]
self.slits[idx] = this
|
[
"def",
"_update_slits",
"(",
"self",
",",
"hdr",
")",
":",
"# Slits.",
"# For the moment we will fuse only reference slits",
"# clean existing slits",
"self",
".",
"slits",
"=",
"{",
"}",
"mm",
"=",
"[",
"]",
"for",
"idx",
"in",
"self",
".",
"LBARS",
":",
"# References from header",
"try",
":",
"slit_t",
"=",
"hdr",
"[",
"\"SLIFL%d\"",
"%",
"idx",
"]",
"target_type",
"=",
"TargetType",
"(",
"slit_t",
")",
"except",
"KeyError",
":",
"target_type",
"=",
"TargetType",
".",
"UNKNOWN",
"xref",
"=",
"hdr",
".",
"get",
"(",
"\"XRSLI%d\"",
"%",
"idx",
",",
"-",
"100",
")",
"-",
"1",
"yref",
"=",
"hdr",
".",
"get",
"(",
"\"YRSLI%d\"",
"%",
"idx",
",",
"-",
"100",
")",
"-",
"1",
"target_coordinates",
"=",
"(",
"xref",
",",
"yref",
")",
"xref",
"=",
"hdr",
".",
"get",
"(",
"\"XVSLI%d\"",
"%",
"idx",
",",
"-",
"100",
")",
"-",
"1",
"yref",
"=",
"hdr",
".",
"get",
"(",
"\"YVSLI%d\"",
"%",
"idx",
",",
"-",
"100",
")",
"-",
"1",
"target_coordinates_v",
"=",
"(",
"xref",
",",
"yref",
")",
"mm",
".",
"append",
"(",
"(",
"idx",
",",
"target_type",
",",
"target_coordinates",
",",
"target_coordinates_v",
")",
")",
"bag",
"=",
"merge_slits",
"(",
"mm",
",",
"max_slits",
"=",
"3",
",",
"tol",
"=",
"1e-2",
")",
"for",
"idx",
",",
"l_ids",
"in",
"bag",
".",
"items",
"(",
")",
":",
"r_ids",
"=",
"[",
"lid",
"+",
"EMIR_NBARS",
"for",
"lid",
"in",
"l_ids",
"]",
"lbars",
"=",
"{",
"lid",
":",
"self",
".",
"bars",
"[",
"lid",
"]",
"for",
"lid",
"in",
"l_ids",
"}",
"rbars",
"=",
"{",
"rid",
":",
"self",
".",
"bars",
"[",
"rid",
"]",
"for",
"rid",
"in",
"r_ids",
"}",
"this",
"=",
"LogicalSlit",
"(",
"idx",
",",
"lbars",
"=",
"lbars",
",",
"rbars",
"=",
"rbars",
")",
"# References from header",
"ref",
"=",
"mm",
"[",
"l_ids",
"[",
"0",
"]",
"-",
"1",
"]",
"this",
".",
"target_type",
"=",
"ref",
"[",
"1",
"]",
"this",
".",
"target_coordinates",
"=",
"ref",
"[",
"2",
"]",
"this",
".",
"target_coordinates_v",
"=",
"ref",
"[",
"3",
"]",
"self",
".",
"slits",
"[",
"idx",
"]",
"=",
"this"
] |
Recreate slits
|
[
"Recreate",
"slits"
] |
train
|
https://github.com/guaix-ucm/pyemir/blob/fef6bbabcb13f80123cafd1800a0f508a3c21702/emirdrp/instrument/csuconf.py#L62-L104
|
thorgate/tg-utils
|
tg_utils/managers.py
|
NotifyPostChangeQuerySet.update_or_create
|
def update_or_create(self, *args, **kwargs):
""" Only sent when not created, since default implementation will
call `self.create` when creating which triggers our signal
already.
"""
obj, created = super().update_or_create(*args, **kwargs)
if not created:
return self.with_signal(result=(obj, created))
return obj, created
|
python
|
def update_or_create(self, *args, **kwargs):
""" Only sent when not created, since default implementation will
call `self.create` when creating which triggers our signal
already.
"""
obj, created = super().update_or_create(*args, **kwargs)
if not created:
return self.with_signal(result=(obj, created))
return obj, created
|
[
"def",
"update_or_create",
"(",
"self",
",",
"*",
"args",
",",
"*",
"*",
"kwargs",
")",
":",
"obj",
",",
"created",
"=",
"super",
"(",
")",
".",
"update_or_create",
"(",
"*",
"args",
",",
"*",
"*",
"kwargs",
")",
"if",
"not",
"created",
":",
"return",
"self",
".",
"with_signal",
"(",
"result",
"=",
"(",
"obj",
",",
"created",
")",
")",
"return",
"obj",
",",
"created"
] |
Only sent when not created, since default implementation will
call `self.create` when creating which triggers our signal
already.
|
[
"Only",
"sent",
"when",
"not",
"created",
"since",
"default",
"implementation",
"will",
"call",
"self",
".",
"create",
"when",
"creating",
"which",
"triggers",
"our",
"signal",
"already",
"."
] |
train
|
https://github.com/thorgate/tg-utils/blob/81e404e837334b241686d9159cc3eb44de509a88/tg_utils/managers.py#L41-L51
|
Yelp/uwsgi_metrics
|
uwsgi_metrics/timer.py
|
Timer.update
|
def update(self, duration):
"""Add a recorded duration."""
if duration >= 0:
self.histogram.update(duration)
self.meter.mark()
|
python
|
def update(self, duration):
"""Add a recorded duration."""
if duration >= 0:
self.histogram.update(duration)
self.meter.mark()
|
[
"def",
"update",
"(",
"self",
",",
"duration",
")",
":",
"if",
"duration",
">=",
"0",
":",
"self",
".",
"histogram",
".",
"update",
"(",
"duration",
")",
"self",
".",
"meter",
".",
"mark",
"(",
")"
] |
Add a recorded duration.
|
[
"Add",
"a",
"recorded",
"duration",
"."
] |
train
|
https://github.com/Yelp/uwsgi_metrics/blob/534966fd461ff711aecd1e3d4caaafdc23ac33f0/uwsgi_metrics/timer.py#L44-L48
|
BeyondTheClouds/enoslib
|
enoslib/infra/enos_vmong5k/provider.py
|
_build_g5k_conf
|
def _build_g5k_conf(vmong5k_conf):
"""Build the conf of the g5k provider from the vmong5k conf."""
clusters = [m.cluster for m in vmong5k_conf.machines]
sites = g5k_api_utils.get_clusters_sites(clusters)
site_names = set(sites.values())
if len(site_names) > 1:
raise Exception("Multisite deployment not supported yet")
site = site_names.pop()
return _do_build_g5k_conf(vmong5k_conf, site)
|
python
|
def _build_g5k_conf(vmong5k_conf):
"""Build the conf of the g5k provider from the vmong5k conf."""
clusters = [m.cluster for m in vmong5k_conf.machines]
sites = g5k_api_utils.get_clusters_sites(clusters)
site_names = set(sites.values())
if len(site_names) > 1:
raise Exception("Multisite deployment not supported yet")
site = site_names.pop()
return _do_build_g5k_conf(vmong5k_conf, site)
|
[
"def",
"_build_g5k_conf",
"(",
"vmong5k_conf",
")",
":",
"clusters",
"=",
"[",
"m",
".",
"cluster",
"for",
"m",
"in",
"vmong5k_conf",
".",
"machines",
"]",
"sites",
"=",
"g5k_api_utils",
".",
"get_clusters_sites",
"(",
"clusters",
")",
"site_names",
"=",
"set",
"(",
"sites",
".",
"values",
"(",
")",
")",
"if",
"len",
"(",
"site_names",
")",
">",
"1",
":",
"raise",
"Exception",
"(",
"\"Multisite deployment not supported yet\"",
")",
"site",
"=",
"site_names",
".",
"pop",
"(",
")",
"return",
"_do_build_g5k_conf",
"(",
"vmong5k_conf",
",",
"site",
")"
] |
Build the conf of the g5k provider from the vmong5k conf.
|
[
"Build",
"the",
"conf",
"of",
"the",
"g5k",
"provider",
"from",
"the",
"vmong5k",
"conf",
"."
] |
train
|
https://github.com/BeyondTheClouds/enoslib/blob/fb00be58e56a7848cfe482187d659744919fe2f7/enoslib/infra/enos_vmong5k/provider.py#L79-L87
|
BreakingBytes/simkit
|
simkit/contrib/readers.py
|
copy_model_instance
|
def copy_model_instance(obj):
"""
Copy Django model instance as a dictionary excluding automatically created
fields like an auto-generated sequence as a primary key or an auto-created
many-to-one reverse relation.
:param obj: Django model object
:return: copy of model instance as dictionary
"""
meta = getattr(obj, '_meta') # make pycharm happy
# dictionary of model values excluding auto created and related fields
return {f.name: getattr(obj, f.name)
for f in meta.get_fields(include_parents=False)
if not f.auto_created}
|
python
|
def copy_model_instance(obj):
"""
Copy Django model instance as a dictionary excluding automatically created
fields like an auto-generated sequence as a primary key or an auto-created
many-to-one reverse relation.
:param obj: Django model object
:return: copy of model instance as dictionary
"""
meta = getattr(obj, '_meta') # make pycharm happy
# dictionary of model values excluding auto created and related fields
return {f.name: getattr(obj, f.name)
for f in meta.get_fields(include_parents=False)
if not f.auto_created}
|
[
"def",
"copy_model_instance",
"(",
"obj",
")",
":",
"meta",
"=",
"getattr",
"(",
"obj",
",",
"'_meta'",
")",
"# make pycharm happy",
"# dictionary of model values excluding auto created and related fields",
"return",
"{",
"f",
".",
"name",
":",
"getattr",
"(",
"obj",
",",
"f",
".",
"name",
")",
"for",
"f",
"in",
"meta",
".",
"get_fields",
"(",
"include_parents",
"=",
"False",
")",
"if",
"not",
"f",
".",
"auto_created",
"}"
] |
Copy Django model instance as a dictionary excluding automatically created
fields like an auto-generated sequence as a primary key or an auto-created
many-to-one reverse relation.
:param obj: Django model object
:return: copy of model instance as dictionary
|
[
"Copy",
"Django",
"model",
"instance",
"as",
"a",
"dictionary",
"excluding",
"automatically",
"created",
"fields",
"like",
"an",
"auto",
"-",
"generated",
"sequence",
"as",
"a",
"primary",
"key",
"or",
"an",
"auto",
"-",
"created",
"many",
"-",
"to",
"-",
"one",
"reverse",
"relation",
"."
] |
train
|
https://github.com/BreakingBytes/simkit/blob/205163d879d3880b6c9ef609f1b723a58773026b/simkit/contrib/readers.py#L18-L31
|
BreakingBytes/simkit
|
simkit/contrib/readers.py
|
ArgumentReader.load_data
|
def load_data(self, *args, **kwargs):
"""
Collects positional and keyword arguments into `data` and applies units.
:return: data
"""
# get positional argument names from parameters and apply them to args
# update data with additional kwargs
argpos = {
v['extras']['argpos']: k for k, v in self.parameters.iteritems()
if 'argpos' in v['extras']
}
data = dict(
{argpos[n]: a for n, a in enumerate(args)}, **kwargs
)
return self.apply_units_to_cache(data)
|
python
|
def load_data(self, *args, **kwargs):
"""
Collects positional and keyword arguments into `data` and applies units.
:return: data
"""
# get positional argument names from parameters and apply them to args
# update data with additional kwargs
argpos = {
v['extras']['argpos']: k for k, v in self.parameters.iteritems()
if 'argpos' in v['extras']
}
data = dict(
{argpos[n]: a for n, a in enumerate(args)}, **kwargs
)
return self.apply_units_to_cache(data)
|
[
"def",
"load_data",
"(",
"self",
",",
"*",
"args",
",",
"*",
"*",
"kwargs",
")",
":",
"# get positional argument names from parameters and apply them to args",
"# update data with additional kwargs",
"argpos",
"=",
"{",
"v",
"[",
"'extras'",
"]",
"[",
"'argpos'",
"]",
":",
"k",
"for",
"k",
",",
"v",
"in",
"self",
".",
"parameters",
".",
"iteritems",
"(",
")",
"if",
"'argpos'",
"in",
"v",
"[",
"'extras'",
"]",
"}",
"data",
"=",
"dict",
"(",
"{",
"argpos",
"[",
"n",
"]",
":",
"a",
"for",
"n",
",",
"a",
"in",
"enumerate",
"(",
"args",
")",
"}",
",",
"*",
"*",
"kwargs",
")",
"return",
"self",
".",
"apply_units_to_cache",
"(",
"data",
")"
] |
Collects positional and keyword arguments into `data` and applies units.
:return: data
|
[
"Collects",
"positional",
"and",
"keyword",
"arguments",
"into",
"data",
"and",
"applies",
"units",
"."
] |
train
|
https://github.com/BreakingBytes/simkit/blob/205163d879d3880b6c9ef609f1b723a58773026b/simkit/contrib/readers.py#L60-L75
|
BreakingBytes/simkit
|
simkit/contrib/readers.py
|
ArgumentReader.apply_units_to_cache
|
def apply_units_to_cache(self, data):
"""
Applies units to data when a proxy reader is used. For example if the
data is cached as JSON and retrieved using the
:class:`~simkit.core.data_readers.JSONReader`, then units can be
applied from the original parameter schema.
:param data: Data read by proxy reader.
:return: data with units applied
"""
# if units key exists then apply
for k, v in self.parameters.iteritems():
if v and v.get('units'):
data[k] = Q_(data[k], v.get('units'))
return data
|
python
|
def apply_units_to_cache(self, data):
"""
Applies units to data when a proxy reader is used. For example if the
data is cached as JSON and retrieved using the
:class:`~simkit.core.data_readers.JSONReader`, then units can be
applied from the original parameter schema.
:param data: Data read by proxy reader.
:return: data with units applied
"""
# if units key exists then apply
for k, v in self.parameters.iteritems():
if v and v.get('units'):
data[k] = Q_(data[k], v.get('units'))
return data
|
[
"def",
"apply_units_to_cache",
"(",
"self",
",",
"data",
")",
":",
"# if units key exists then apply",
"for",
"k",
",",
"v",
"in",
"self",
".",
"parameters",
".",
"iteritems",
"(",
")",
":",
"if",
"v",
"and",
"v",
".",
"get",
"(",
"'units'",
")",
":",
"data",
"[",
"k",
"]",
"=",
"Q_",
"(",
"data",
"[",
"k",
"]",
",",
"v",
".",
"get",
"(",
"'units'",
")",
")",
"return",
"data"
] |
Applies units to data when a proxy reader is used. For example if the
data is cached as JSON and retrieved using the
:class:`~simkit.core.data_readers.JSONReader`, then units can be
applied from the original parameter schema.
:param data: Data read by proxy reader.
:return: data with units applied
|
[
"Applies",
"units",
"to",
"data",
"when",
"a",
"proxy",
"reader",
"is",
"used",
".",
"For",
"example",
"if",
"the",
"data",
"is",
"cached",
"as",
"JSON",
"and",
"retrieved",
"using",
"the",
":",
"class",
":",
"~simkit",
".",
"core",
".",
"data_readers",
".",
"JSONReader",
"then",
"units",
"can",
"be",
"applied",
"from",
"the",
"original",
"parameter",
"schema",
"."
] |
train
|
https://github.com/BreakingBytes/simkit/blob/205163d879d3880b6c9ef609f1b723a58773026b/simkit/contrib/readers.py#L77-L91
|
BreakingBytes/simkit
|
simkit/contrib/readers.py
|
DjangoModelReader.load_data
|
def load_data(self, model_instance, *args, **kwargs):
"""
Apply units to model.
:return: data
"""
model_dict = copy_model_instance(model_instance)
return super(DjangoModelReader, self).load_data(**model_dict)
|
python
|
def load_data(self, model_instance, *args, **kwargs):
"""
Apply units to model.
:return: data
"""
model_dict = copy_model_instance(model_instance)
return super(DjangoModelReader, self).load_data(**model_dict)
|
[
"def",
"load_data",
"(",
"self",
",",
"model_instance",
",",
"*",
"args",
",",
"*",
"*",
"kwargs",
")",
":",
"model_dict",
"=",
"copy_model_instance",
"(",
"model_instance",
")",
"return",
"super",
"(",
"DjangoModelReader",
",",
"self",
")",
".",
"load_data",
"(",
"*",
"*",
"model_dict",
")"
] |
Apply units to model.
:return: data
|
[
"Apply",
"units",
"to",
"model",
".",
":",
"return",
":",
"data"
] |
train
|
https://github.com/BreakingBytes/simkit/blob/205163d879d3880b6c9ef609f1b723a58773026b/simkit/contrib/readers.py#L129-L135
|
BeyondTheClouds/enoslib
|
enoslib/infra/enos_g5k/g5k_api_utils.py
|
cached
|
def cached(f):
"""Decorator for caching/retrieving api calls request.
Many calls to the API are getter on static parts (e.g site of a given
cluster name won't change). By caching some responses we can avoid
hammering the API server.
"""
@functools.wraps(f)
def wrapped(*args, **kwargs):
with _cache_lock:
identifier = (f.__name__, args, kwargs)
key = pickle.dumps(identifier)
value = cache.get(key)
if value is not None:
logger.debug("HIT for %s -> %s" % (str(identifier), value))
else:
logger.debug("MISS for %s" % str(identifier))
value = f(*args, **kwargs)
cache[key] = value
return value
return wrapped
|
python
|
def cached(f):
"""Decorator for caching/retrieving api calls request.
Many calls to the API are getter on static parts (e.g site of a given
cluster name won't change). By caching some responses we can avoid
hammering the API server.
"""
@functools.wraps(f)
def wrapped(*args, **kwargs):
with _cache_lock:
identifier = (f.__name__, args, kwargs)
key = pickle.dumps(identifier)
value = cache.get(key)
if value is not None:
logger.debug("HIT for %s -> %s" % (str(identifier), value))
else:
logger.debug("MISS for %s" % str(identifier))
value = f(*args, **kwargs)
cache[key] = value
return value
return wrapped
|
[
"def",
"cached",
"(",
"f",
")",
":",
"@",
"functools",
".",
"wraps",
"(",
"f",
")",
"def",
"wrapped",
"(",
"*",
"args",
",",
"*",
"*",
"kwargs",
")",
":",
"with",
"_cache_lock",
":",
"identifier",
"=",
"(",
"f",
".",
"__name__",
",",
"args",
",",
"kwargs",
")",
"key",
"=",
"pickle",
".",
"dumps",
"(",
"identifier",
")",
"value",
"=",
"cache",
".",
"get",
"(",
"key",
")",
"if",
"value",
"is",
"not",
"None",
":",
"logger",
".",
"debug",
"(",
"\"HIT for %s -> %s\"",
"%",
"(",
"str",
"(",
"identifier",
")",
",",
"value",
")",
")",
"else",
":",
"logger",
".",
"debug",
"(",
"\"MISS for %s\"",
"%",
"str",
"(",
"identifier",
")",
")",
"value",
"=",
"f",
"(",
"*",
"args",
",",
"*",
"*",
"kwargs",
")",
"cache",
"[",
"key",
"]",
"=",
"value",
"return",
"value",
"return",
"wrapped"
] |
Decorator for caching/retrieving api calls request.
Many calls to the API are getter on static parts (e.g site of a given
cluster name won't change). By caching some responses we can avoid
hammering the API server.
|
[
"Decorator",
"for",
"caching",
"/",
"retrieving",
"api",
"calls",
"request",
"."
] |
train
|
https://github.com/BeyondTheClouds/enoslib/blob/fb00be58e56a7848cfe482187d659744919fe2f7/enoslib/infra/enos_g5k/g5k_api_utils.py#L35-L55
|
BeyondTheClouds/enoslib
|
enoslib/infra/enos_g5k/g5k_api_utils.py
|
get_api_client
|
def get_api_client():
"""Gets the reference to the API cient (singleton)."""
with _api_lock:
global _api_client
if not _api_client:
conf_file = os.path.join(os.environ.get("HOME"),
".python-grid5000.yaml")
_api_client = Client.from_yaml(conf_file)
return _api_client
|
python
|
def get_api_client():
"""Gets the reference to the API cient (singleton)."""
with _api_lock:
global _api_client
if not _api_client:
conf_file = os.path.join(os.environ.get("HOME"),
".python-grid5000.yaml")
_api_client = Client.from_yaml(conf_file)
return _api_client
|
[
"def",
"get_api_client",
"(",
")",
":",
"with",
"_api_lock",
":",
"global",
"_api_client",
"if",
"not",
"_api_client",
":",
"conf_file",
"=",
"os",
".",
"path",
".",
"join",
"(",
"os",
".",
"environ",
".",
"get",
"(",
"\"HOME\"",
")",
",",
"\".python-grid5000.yaml\"",
")",
"_api_client",
"=",
"Client",
".",
"from_yaml",
"(",
"conf_file",
")",
"return",
"_api_client"
] |
Gets the reference to the API cient (singleton).
|
[
"Gets",
"the",
"reference",
"to",
"the",
"API",
"cient",
"(",
"singleton",
")",
"."
] |
train
|
https://github.com/BeyondTheClouds/enoslib/blob/fb00be58e56a7848cfe482187d659744919fe2f7/enoslib/infra/enos_g5k/g5k_api_utils.py#L78-L87
|
BeyondTheClouds/enoslib
|
enoslib/infra/enos_g5k/g5k_api_utils.py
|
grid_reload_from_name
|
def grid_reload_from_name(job_name):
"""Reload all running or pending jobs of Grid'5000 with a given name.
By default all the sites will be searched for jobs with the name
``job_name``. Using EnOSlib there can be only one job per site with name
``job_name``.
Note that it honors the ``exluded_sites`` attribute of the client so the
scan can be reduced.
Args:
job_name (str): the job name
Returns:
The list of the python-grid5000 jobs retrieved.
Raises:
EnosG5kDuplicateJobsError: if there's several jobs with the same name
on a site.
"""
gk = get_api_client()
sites = get_all_sites_obj()
jobs = []
for site in [s for s in sites if s.uid not in gk.excluded_site]:
logger.info("Reloading %s from %s" % (job_name, site.uid))
_jobs = site.jobs.list(name=job_name,
state="waiting,launching,running")
if len(_jobs) == 1:
logger.info("Reloading %s from %s" % (_jobs[0].uid, site.uid))
jobs.append(_jobs[0])
elif len(_jobs) > 1:
raise EnosG5kDuplicateJobsError(site, job_name)
return jobs
|
python
|
def grid_reload_from_name(job_name):
"""Reload all running or pending jobs of Grid'5000 with a given name.
By default all the sites will be searched for jobs with the name
``job_name``. Using EnOSlib there can be only one job per site with name
``job_name``.
Note that it honors the ``exluded_sites`` attribute of the client so the
scan can be reduced.
Args:
job_name (str): the job name
Returns:
The list of the python-grid5000 jobs retrieved.
Raises:
EnosG5kDuplicateJobsError: if there's several jobs with the same name
on a site.
"""
gk = get_api_client()
sites = get_all_sites_obj()
jobs = []
for site in [s for s in sites if s.uid not in gk.excluded_site]:
logger.info("Reloading %s from %s" % (job_name, site.uid))
_jobs = site.jobs.list(name=job_name,
state="waiting,launching,running")
if len(_jobs) == 1:
logger.info("Reloading %s from %s" % (_jobs[0].uid, site.uid))
jobs.append(_jobs[0])
elif len(_jobs) > 1:
raise EnosG5kDuplicateJobsError(site, job_name)
return jobs
|
[
"def",
"grid_reload_from_name",
"(",
"job_name",
")",
":",
"gk",
"=",
"get_api_client",
"(",
")",
"sites",
"=",
"get_all_sites_obj",
"(",
")",
"jobs",
"=",
"[",
"]",
"for",
"site",
"in",
"[",
"s",
"for",
"s",
"in",
"sites",
"if",
"s",
".",
"uid",
"not",
"in",
"gk",
".",
"excluded_site",
"]",
":",
"logger",
".",
"info",
"(",
"\"Reloading %s from %s\"",
"%",
"(",
"job_name",
",",
"site",
".",
"uid",
")",
")",
"_jobs",
"=",
"site",
".",
"jobs",
".",
"list",
"(",
"name",
"=",
"job_name",
",",
"state",
"=",
"\"waiting,launching,running\"",
")",
"if",
"len",
"(",
"_jobs",
")",
"==",
"1",
":",
"logger",
".",
"info",
"(",
"\"Reloading %s from %s\"",
"%",
"(",
"_jobs",
"[",
"0",
"]",
".",
"uid",
",",
"site",
".",
"uid",
")",
")",
"jobs",
".",
"append",
"(",
"_jobs",
"[",
"0",
"]",
")",
"elif",
"len",
"(",
"_jobs",
")",
">",
"1",
":",
"raise",
"EnosG5kDuplicateJobsError",
"(",
"site",
",",
"job_name",
")",
"return",
"jobs"
] |
Reload all running or pending jobs of Grid'5000 with a given name.
By default all the sites will be searched for jobs with the name
``job_name``. Using EnOSlib there can be only one job per site with name
``job_name``.
Note that it honors the ``exluded_sites`` attribute of the client so the
scan can be reduced.
Args:
job_name (str): the job name
Returns:
The list of the python-grid5000 jobs retrieved.
Raises:
EnosG5kDuplicateJobsError: if there's several jobs with the same name
on a site.
|
[
"Reload",
"all",
"running",
"or",
"pending",
"jobs",
"of",
"Grid",
"5000",
"with",
"a",
"given",
"name",
"."
] |
train
|
https://github.com/BeyondTheClouds/enoslib/blob/fb00be58e56a7848cfe482187d659744919fe2f7/enoslib/infra/enos_g5k/g5k_api_utils.py#L96-L129
|
BeyondTheClouds/enoslib
|
enoslib/infra/enos_g5k/g5k_api_utils.py
|
grid_reload_from_ids
|
def grid_reload_from_ids(oargrid_jobids):
"""Reload all running or pending jobs of Grid'5000 from their ids
Args:
oargrid_jobids (list): list of ``(site, oar_jobid)`` identifying the
jobs on each site
Returns:
The list of python-grid5000 jobs retrieved
"""
gk = get_api_client()
jobs = []
for site, job_id in oargrid_jobids:
jobs.append(gk.sites[site].jobs[job_id])
return jobs
|
python
|
def grid_reload_from_ids(oargrid_jobids):
"""Reload all running or pending jobs of Grid'5000 from their ids
Args:
oargrid_jobids (list): list of ``(site, oar_jobid)`` identifying the
jobs on each site
Returns:
The list of python-grid5000 jobs retrieved
"""
gk = get_api_client()
jobs = []
for site, job_id in oargrid_jobids:
jobs.append(gk.sites[site].jobs[job_id])
return jobs
|
[
"def",
"grid_reload_from_ids",
"(",
"oargrid_jobids",
")",
":",
"gk",
"=",
"get_api_client",
"(",
")",
"jobs",
"=",
"[",
"]",
"for",
"site",
",",
"job_id",
"in",
"oargrid_jobids",
":",
"jobs",
".",
"append",
"(",
"gk",
".",
"sites",
"[",
"site",
"]",
".",
"jobs",
"[",
"job_id",
"]",
")",
"return",
"jobs"
] |
Reload all running or pending jobs of Grid'5000 from their ids
Args:
oargrid_jobids (list): list of ``(site, oar_jobid)`` identifying the
jobs on each site
Returns:
The list of python-grid5000 jobs retrieved
|
[
"Reload",
"all",
"running",
"or",
"pending",
"jobs",
"of",
"Grid",
"5000",
"from",
"their",
"ids"
] |
train
|
https://github.com/BeyondTheClouds/enoslib/blob/fb00be58e56a7848cfe482187d659744919fe2f7/enoslib/infra/enos_g5k/g5k_api_utils.py#L132-L146
|
BeyondTheClouds/enoslib
|
enoslib/infra/enos_g5k/g5k_api_utils.py
|
grid_destroy_from_name
|
def grid_destroy_from_name(job_name):
"""Destroy all the jobs with a given name.
Args:
job_name (str): the job name
"""
jobs = grid_reload_from_name(job_name)
for job in jobs:
job.delete()
logger.info("Killing the job (%s, %s)" % (job.site, job.uid))
|
python
|
def grid_destroy_from_name(job_name):
"""Destroy all the jobs with a given name.
Args:
job_name (str): the job name
"""
jobs = grid_reload_from_name(job_name)
for job in jobs:
job.delete()
logger.info("Killing the job (%s, %s)" % (job.site, job.uid))
|
[
"def",
"grid_destroy_from_name",
"(",
"job_name",
")",
":",
"jobs",
"=",
"grid_reload_from_name",
"(",
"job_name",
")",
"for",
"job",
"in",
"jobs",
":",
"job",
".",
"delete",
"(",
")",
"logger",
".",
"info",
"(",
"\"Killing the job (%s, %s)\"",
"%",
"(",
"job",
".",
"site",
",",
"job",
".",
"uid",
")",
")"
] |
Destroy all the jobs with a given name.
Args:
job_name (str): the job name
|
[
"Destroy",
"all",
"the",
"jobs",
"with",
"a",
"given",
"name",
"."
] |
train
|
https://github.com/BeyondTheClouds/enoslib/blob/fb00be58e56a7848cfe482187d659744919fe2f7/enoslib/infra/enos_g5k/g5k_api_utils.py#L149-L158
|
BeyondTheClouds/enoslib
|
enoslib/infra/enos_g5k/g5k_api_utils.py
|
grid_destroy_from_ids
|
def grid_destroy_from_ids(oargrid_jobids):
"""Destroy all the jobs with corresponding ids
Args:
oargrid_jobids (list): the ``(site, oar_job_id)`` list of tuple
identifying the jobs for each site. """
jobs = grid_reload_from_ids(oargrid_jobids)
for job in jobs:
job.delete()
logger.info("Killing the jobs %s" % oargrid_jobids)
|
python
|
def grid_destroy_from_ids(oargrid_jobids):
"""Destroy all the jobs with corresponding ids
Args:
oargrid_jobids (list): the ``(site, oar_job_id)`` list of tuple
identifying the jobs for each site. """
jobs = grid_reload_from_ids(oargrid_jobids)
for job in jobs:
job.delete()
logger.info("Killing the jobs %s" % oargrid_jobids)
|
[
"def",
"grid_destroy_from_ids",
"(",
"oargrid_jobids",
")",
":",
"jobs",
"=",
"grid_reload_from_ids",
"(",
"oargrid_jobids",
")",
"for",
"job",
"in",
"jobs",
":",
"job",
".",
"delete",
"(",
")",
"logger",
".",
"info",
"(",
"\"Killing the jobs %s\"",
"%",
"oargrid_jobids",
")"
] |
Destroy all the jobs with corresponding ids
Args:
oargrid_jobids (list): the ``(site, oar_job_id)`` list of tuple
identifying the jobs for each site.
|
[
"Destroy",
"all",
"the",
"jobs",
"with",
"corresponding",
"ids"
] |
train
|
https://github.com/BeyondTheClouds/enoslib/blob/fb00be58e56a7848cfe482187d659744919fe2f7/enoslib/infra/enos_g5k/g5k_api_utils.py#L161-L170
|
BeyondTheClouds/enoslib
|
enoslib/infra/enos_g5k/g5k_api_utils.py
|
submit_jobs
|
def submit_jobs(job_specs):
"""Submit a job
Args:
job_spec (dict): The job specifiation (see Grid'5000 API reference)
"""
gk = get_api_client()
jobs = []
try:
for site, job_spec in job_specs:
logger.info("Submitting %s on %s" % (job_spec, site))
jobs.append(gk.sites[site].jobs.create(job_spec))
except Exception as e:
logger.error("An error occured during the job submissions")
logger.error("Cleaning the jobs created")
for job in jobs:
job.delete()
raise(e)
return jobs
|
python
|
def submit_jobs(job_specs):
"""Submit a job
Args:
job_spec (dict): The job specifiation (see Grid'5000 API reference)
"""
gk = get_api_client()
jobs = []
try:
for site, job_spec in job_specs:
logger.info("Submitting %s on %s" % (job_spec, site))
jobs.append(gk.sites[site].jobs.create(job_spec))
except Exception as e:
logger.error("An error occured during the job submissions")
logger.error("Cleaning the jobs created")
for job in jobs:
job.delete()
raise(e)
return jobs
|
[
"def",
"submit_jobs",
"(",
"job_specs",
")",
":",
"gk",
"=",
"get_api_client",
"(",
")",
"jobs",
"=",
"[",
"]",
"try",
":",
"for",
"site",
",",
"job_spec",
"in",
"job_specs",
":",
"logger",
".",
"info",
"(",
"\"Submitting %s on %s\"",
"%",
"(",
"job_spec",
",",
"site",
")",
")",
"jobs",
".",
"append",
"(",
"gk",
".",
"sites",
"[",
"site",
"]",
".",
"jobs",
".",
"create",
"(",
"job_spec",
")",
")",
"except",
"Exception",
"as",
"e",
":",
"logger",
".",
"error",
"(",
"\"An error occured during the job submissions\"",
")",
"logger",
".",
"error",
"(",
"\"Cleaning the jobs created\"",
")",
"for",
"job",
"in",
"jobs",
":",
"job",
".",
"delete",
"(",
")",
"raise",
"(",
"e",
")",
"return",
"jobs"
] |
Submit a job
Args:
job_spec (dict): The job specifiation (see Grid'5000 API reference)
|
[
"Submit",
"a",
"job"
] |
train
|
https://github.com/BeyondTheClouds/enoslib/blob/fb00be58e56a7848cfe482187d659744919fe2f7/enoslib/infra/enos_g5k/g5k_api_utils.py#L173-L192
|
BeyondTheClouds/enoslib
|
enoslib/infra/enos_g5k/g5k_api_utils.py
|
wait_for_jobs
|
def wait_for_jobs(jobs):
"""Waits for all the jobs to be runnning.
Args:
jobs(list): list of the python-grid5000 jobs to wait for
Raises:
Exception: if one of the job gets in error state.
"""
all_running = False
while not all_running:
all_running = True
time.sleep(5)
for job in jobs:
job.refresh()
scheduled = getattr(job, "scheduled_at", None)
if scheduled is not None:
logger.info("Waiting for %s on %s [%s]" % (job.uid,
job.site,
_date2h(scheduled)))
all_running = all_running and job.state == "running"
if job.state == "error":
raise Exception("The job %s is in error state" % job)
logger.info("All jobs are Running !")
|
python
|
def wait_for_jobs(jobs):
"""Waits for all the jobs to be runnning.
Args:
jobs(list): list of the python-grid5000 jobs to wait for
Raises:
Exception: if one of the job gets in error state.
"""
all_running = False
while not all_running:
all_running = True
time.sleep(5)
for job in jobs:
job.refresh()
scheduled = getattr(job, "scheduled_at", None)
if scheduled is not None:
logger.info("Waiting for %s on %s [%s]" % (job.uid,
job.site,
_date2h(scheduled)))
all_running = all_running and job.state == "running"
if job.state == "error":
raise Exception("The job %s is in error state" % job)
logger.info("All jobs are Running !")
|
[
"def",
"wait_for_jobs",
"(",
"jobs",
")",
":",
"all_running",
"=",
"False",
"while",
"not",
"all_running",
":",
"all_running",
"=",
"True",
"time",
".",
"sleep",
"(",
"5",
")",
"for",
"job",
"in",
"jobs",
":",
"job",
".",
"refresh",
"(",
")",
"scheduled",
"=",
"getattr",
"(",
"job",
",",
"\"scheduled_at\"",
",",
"None",
")",
"if",
"scheduled",
"is",
"not",
"None",
":",
"logger",
".",
"info",
"(",
"\"Waiting for %s on %s [%s]\"",
"%",
"(",
"job",
".",
"uid",
",",
"job",
".",
"site",
",",
"_date2h",
"(",
"scheduled",
")",
")",
")",
"all_running",
"=",
"all_running",
"and",
"job",
".",
"state",
"==",
"\"running\"",
"if",
"job",
".",
"state",
"==",
"\"error\"",
":",
"raise",
"Exception",
"(",
"\"The job %s is in error state\"",
"%",
"job",
")",
"logger",
".",
"info",
"(",
"\"All jobs are Running !\"",
")"
] |
Waits for all the jobs to be runnning.
Args:
jobs(list): list of the python-grid5000 jobs to wait for
Raises:
Exception: if one of the job gets in error state.
|
[
"Waits",
"for",
"all",
"the",
"jobs",
"to",
"be",
"runnning",
"."
] |
train
|
https://github.com/BeyondTheClouds/enoslib/blob/fb00be58e56a7848cfe482187d659744919fe2f7/enoslib/infra/enos_g5k/g5k_api_utils.py#L195-L220
|
BeyondTheClouds/enoslib
|
enoslib/infra/enos_g5k/g5k_api_utils.py
|
grid_deploy
|
def grid_deploy(site, nodes, options):
"""Deploy and wait for the deployment to be finished.
Args:
site(str): the site
nodes(list): list of nodes (str) to depoy
options(dict): option of the deployment (refer to the Grid'5000 API
Specifications)
Returns:
tuple of deployed(list), undeployed(list) nodes.
"""
gk = get_api_client()
environment = options.pop("env_name")
options.update(environment=environment)
options.update(nodes=nodes)
key_path = DEFAULT_SSH_KEYFILE
options.update(key=key_path.read_text())
logger.info("Deploying %s with options %s" % (nodes, options))
deployment = gk.sites[site].deployments.create(options)
while deployment.status not in ["terminated", "error"]:
deployment.refresh()
print("Waiting for the end of deployment [%s]" % deployment.uid)
time.sleep(10)
deploy = []
undeploy = []
if deployment.status == "terminated":
deploy = [node for node, v in deployment.result.items()
if v["state"] == "OK"]
undeploy = [node for node, v in deployment.result.items()
if v["state"] == "KO"]
elif deployment.status == "error":
undeploy = nodes
return deploy, undeploy
|
python
|
def grid_deploy(site, nodes, options):
"""Deploy and wait for the deployment to be finished.
Args:
site(str): the site
nodes(list): list of nodes (str) to depoy
options(dict): option of the deployment (refer to the Grid'5000 API
Specifications)
Returns:
tuple of deployed(list), undeployed(list) nodes.
"""
gk = get_api_client()
environment = options.pop("env_name")
options.update(environment=environment)
options.update(nodes=nodes)
key_path = DEFAULT_SSH_KEYFILE
options.update(key=key_path.read_text())
logger.info("Deploying %s with options %s" % (nodes, options))
deployment = gk.sites[site].deployments.create(options)
while deployment.status not in ["terminated", "error"]:
deployment.refresh()
print("Waiting for the end of deployment [%s]" % deployment.uid)
time.sleep(10)
deploy = []
undeploy = []
if deployment.status == "terminated":
deploy = [node for node, v in deployment.result.items()
if v["state"] == "OK"]
undeploy = [node for node, v in deployment.result.items()
if v["state"] == "KO"]
elif deployment.status == "error":
undeploy = nodes
return deploy, undeploy
|
[
"def",
"grid_deploy",
"(",
"site",
",",
"nodes",
",",
"options",
")",
":",
"gk",
"=",
"get_api_client",
"(",
")",
"environment",
"=",
"options",
".",
"pop",
"(",
"\"env_name\"",
")",
"options",
".",
"update",
"(",
"environment",
"=",
"environment",
")",
"options",
".",
"update",
"(",
"nodes",
"=",
"nodes",
")",
"key_path",
"=",
"DEFAULT_SSH_KEYFILE",
"options",
".",
"update",
"(",
"key",
"=",
"key_path",
".",
"read_text",
"(",
")",
")",
"logger",
".",
"info",
"(",
"\"Deploying %s with options %s\"",
"%",
"(",
"nodes",
",",
"options",
")",
")",
"deployment",
"=",
"gk",
".",
"sites",
"[",
"site",
"]",
".",
"deployments",
".",
"create",
"(",
"options",
")",
"while",
"deployment",
".",
"status",
"not",
"in",
"[",
"\"terminated\"",
",",
"\"error\"",
"]",
":",
"deployment",
".",
"refresh",
"(",
")",
"print",
"(",
"\"Waiting for the end of deployment [%s]\"",
"%",
"deployment",
".",
"uid",
")",
"time",
".",
"sleep",
"(",
"10",
")",
"deploy",
"=",
"[",
"]",
"undeploy",
"=",
"[",
"]",
"if",
"deployment",
".",
"status",
"==",
"\"terminated\"",
":",
"deploy",
"=",
"[",
"node",
"for",
"node",
",",
"v",
"in",
"deployment",
".",
"result",
".",
"items",
"(",
")",
"if",
"v",
"[",
"\"state\"",
"]",
"==",
"\"OK\"",
"]",
"undeploy",
"=",
"[",
"node",
"for",
"node",
",",
"v",
"in",
"deployment",
".",
"result",
".",
"items",
"(",
")",
"if",
"v",
"[",
"\"state\"",
"]",
"==",
"\"KO\"",
"]",
"elif",
"deployment",
".",
"status",
"==",
"\"error\"",
":",
"undeploy",
"=",
"nodes",
"return",
"deploy",
",",
"undeploy"
] |
Deploy and wait for the deployment to be finished.
Args:
site(str): the site
nodes(list): list of nodes (str) to depoy
options(dict): option of the deployment (refer to the Grid'5000 API
Specifications)
Returns:
tuple of deployed(list), undeployed(list) nodes.
|
[
"Deploy",
"and",
"wait",
"for",
"the",
"deployment",
"to",
"be",
"finished",
"."
] |
train
|
https://github.com/BeyondTheClouds/enoslib/blob/fb00be58e56a7848cfe482187d659744919fe2f7/enoslib/infra/enos_g5k/g5k_api_utils.py#L223-L257
|
BeyondTheClouds/enoslib
|
enoslib/infra/enos_g5k/g5k_api_utils.py
|
set_nodes_vlan
|
def set_nodes_vlan(site, nodes, interface, vlan_id):
"""Set the interface of the nodes in a specific vlan.
It is assumed that the same interface name is available on the node.
Args:
site(str): site to consider
nodes(list): nodes to consider
interface(str): the network interface to put in the vlan
vlan_id(str): the id of the vlan
"""
def _to_network_address(host):
"""Translate a host to a network address
e.g:
paranoia-20.rennes.grid5000.fr -> paranoia-20-eth2.rennes.grid5000.fr
"""
splitted = host.split('.')
splitted[0] = splitted[0] + "-" + interface
return ".".join(splitted)
gk = get_api_client()
network_addresses = [_to_network_address(n) for n in nodes]
gk.sites[site].vlans[str(vlan_id)].submit({"nodes": network_addresses})
|
python
|
def set_nodes_vlan(site, nodes, interface, vlan_id):
"""Set the interface of the nodes in a specific vlan.
It is assumed that the same interface name is available on the node.
Args:
site(str): site to consider
nodes(list): nodes to consider
interface(str): the network interface to put in the vlan
vlan_id(str): the id of the vlan
"""
def _to_network_address(host):
"""Translate a host to a network address
e.g:
paranoia-20.rennes.grid5000.fr -> paranoia-20-eth2.rennes.grid5000.fr
"""
splitted = host.split('.')
splitted[0] = splitted[0] + "-" + interface
return ".".join(splitted)
gk = get_api_client()
network_addresses = [_to_network_address(n) for n in nodes]
gk.sites[site].vlans[str(vlan_id)].submit({"nodes": network_addresses})
|
[
"def",
"set_nodes_vlan",
"(",
"site",
",",
"nodes",
",",
"interface",
",",
"vlan_id",
")",
":",
"def",
"_to_network_address",
"(",
"host",
")",
":",
"\"\"\"Translate a host to a network address\n e.g:\n paranoia-20.rennes.grid5000.fr -> paranoia-20-eth2.rennes.grid5000.fr\n \"\"\"",
"splitted",
"=",
"host",
".",
"split",
"(",
"'.'",
")",
"splitted",
"[",
"0",
"]",
"=",
"splitted",
"[",
"0",
"]",
"+",
"\"-\"",
"+",
"interface",
"return",
"\".\"",
".",
"join",
"(",
"splitted",
")",
"gk",
"=",
"get_api_client",
"(",
")",
"network_addresses",
"=",
"[",
"_to_network_address",
"(",
"n",
")",
"for",
"n",
"in",
"nodes",
"]",
"gk",
".",
"sites",
"[",
"site",
"]",
".",
"vlans",
"[",
"str",
"(",
"vlan_id",
")",
"]",
".",
"submit",
"(",
"{",
"\"nodes\"",
":",
"network_addresses",
"}",
")"
] |
Set the interface of the nodes in a specific vlan.
It is assumed that the same interface name is available on the node.
Args:
site(str): site to consider
nodes(list): nodes to consider
interface(str): the network interface to put in the vlan
vlan_id(str): the id of the vlan
|
[
"Set",
"the",
"interface",
"of",
"the",
"nodes",
"in",
"a",
"specific",
"vlan",
"."
] |
train
|
https://github.com/BeyondTheClouds/enoslib/blob/fb00be58e56a7848cfe482187d659744919fe2f7/enoslib/infra/enos_g5k/g5k_api_utils.py#L260-L282
|
BeyondTheClouds/enoslib
|
enoslib/infra/enos_g5k/g5k_api_utils.py
|
clusters_sites_obj
|
def clusters_sites_obj(clusters):
"""Get all the corresponding sites of the passed clusters.
Args:
clusters(list): list of string uid of sites (e.g 'rennes')
Return:
dict corresponding to the mapping cluster uid to python-grid5000 site
"""
result = {}
all_clusters = get_all_clusters_sites()
clusters_sites = {c: s for (c, s) in all_clusters.items()
if c in clusters}
for cluster, site in clusters_sites.items():
# here we want the site python-grid5000 site object
result.update({cluster: get_site_obj(site)})
return result
|
python
|
def clusters_sites_obj(clusters):
"""Get all the corresponding sites of the passed clusters.
Args:
clusters(list): list of string uid of sites (e.g 'rennes')
Return:
dict corresponding to the mapping cluster uid to python-grid5000 site
"""
result = {}
all_clusters = get_all_clusters_sites()
clusters_sites = {c: s for (c, s) in all_clusters.items()
if c in clusters}
for cluster, site in clusters_sites.items():
# here we want the site python-grid5000 site object
result.update({cluster: get_site_obj(site)})
return result
|
[
"def",
"clusters_sites_obj",
"(",
"clusters",
")",
":",
"result",
"=",
"{",
"}",
"all_clusters",
"=",
"get_all_clusters_sites",
"(",
")",
"clusters_sites",
"=",
"{",
"c",
":",
"s",
"for",
"(",
"c",
",",
"s",
")",
"in",
"all_clusters",
".",
"items",
"(",
")",
"if",
"c",
"in",
"clusters",
"}",
"for",
"cluster",
",",
"site",
"in",
"clusters_sites",
".",
"items",
"(",
")",
":",
"# here we want the site python-grid5000 site object",
"result",
".",
"update",
"(",
"{",
"cluster",
":",
"get_site_obj",
"(",
"site",
")",
"}",
")",
"return",
"result"
] |
Get all the corresponding sites of the passed clusters.
Args:
clusters(list): list of string uid of sites (e.g 'rennes')
Return:
dict corresponding to the mapping cluster uid to python-grid5000 site
|
[
"Get",
"all",
"the",
"corresponding",
"sites",
"of",
"the",
"passed",
"clusters",
"."
] |
train
|
https://github.com/BeyondTheClouds/enoslib/blob/fb00be58e56a7848cfe482187d659744919fe2f7/enoslib/infra/enos_g5k/g5k_api_utils.py#L309-L326
|
BeyondTheClouds/enoslib
|
enoslib/infra/enos_g5k/g5k_api_utils.py
|
get_all_clusters_sites
|
def get_all_clusters_sites():
"""Get all the cluster of all the sites.
Returns:
dict corresponding to the mapping cluster uid to python-grid5000 site
"""
result = {}
gk = get_api_client()
sites = gk.sites.list()
for site in sites:
clusters = site.clusters.list()
result.update({c.uid: site.uid for c in clusters})
return result
|
python
|
def get_all_clusters_sites():
"""Get all the cluster of all the sites.
Returns:
dict corresponding to the mapping cluster uid to python-grid5000 site
"""
result = {}
gk = get_api_client()
sites = gk.sites.list()
for site in sites:
clusters = site.clusters.list()
result.update({c.uid: site.uid for c in clusters})
return result
|
[
"def",
"get_all_clusters_sites",
"(",
")",
":",
"result",
"=",
"{",
"}",
"gk",
"=",
"get_api_client",
"(",
")",
"sites",
"=",
"gk",
".",
"sites",
".",
"list",
"(",
")",
"for",
"site",
"in",
"sites",
":",
"clusters",
"=",
"site",
".",
"clusters",
".",
"list",
"(",
")",
"result",
".",
"update",
"(",
"{",
"c",
".",
"uid",
":",
"site",
".",
"uid",
"for",
"c",
"in",
"clusters",
"}",
")",
"return",
"result"
] |
Get all the cluster of all the sites.
Returns:
dict corresponding to the mapping cluster uid to python-grid5000 site
|
[
"Get",
"all",
"the",
"cluster",
"of",
"all",
"the",
"sites",
"."
] |
train
|
https://github.com/BeyondTheClouds/enoslib/blob/fb00be58e56a7848cfe482187d659744919fe2f7/enoslib/infra/enos_g5k/g5k_api_utils.py#L330-L342
|
BeyondTheClouds/enoslib
|
enoslib/infra/enos_g5k/g5k_api_utils.py
|
get_nodes
|
def get_nodes(cluster):
"""Get all the nodes of a given cluster.
Args:
cluster(string): uid of the cluster (e.g 'rennes')
"""
gk = get_api_client()
site = get_cluster_site(cluster)
return gk.sites[site].clusters[cluster].nodes.list()
|
python
|
def get_nodes(cluster):
"""Get all the nodes of a given cluster.
Args:
cluster(string): uid of the cluster (e.g 'rennes')
"""
gk = get_api_client()
site = get_cluster_site(cluster)
return gk.sites[site].clusters[cluster].nodes.list()
|
[
"def",
"get_nodes",
"(",
"cluster",
")",
":",
"gk",
"=",
"get_api_client",
"(",
")",
"site",
"=",
"get_cluster_site",
"(",
"cluster",
")",
"return",
"gk",
".",
"sites",
"[",
"site",
"]",
".",
"clusters",
"[",
"cluster",
"]",
".",
"nodes",
".",
"list",
"(",
")"
] |
Get all the nodes of a given cluster.
Args:
cluster(string): uid of the cluster (e.g 'rennes')
|
[
"Get",
"all",
"the",
"nodes",
"of",
"a",
"given",
"cluster",
"."
] |
train
|
https://github.com/BeyondTheClouds/enoslib/blob/fb00be58e56a7848cfe482187d659744919fe2f7/enoslib/infra/enos_g5k/g5k_api_utils.py#L372-L380
|
BeyondTheClouds/enoslib
|
enoslib/infra/enos_g5k/g5k_api_utils.py
|
get_cluster_interfaces
|
def get_cluster_interfaces(cluster, extra_cond=lambda nic: True):
"""Get the network interfaces names corresponding to a criteria.
Note that the cluster is passed (not the individual node names), thus it is
assumed that all nodes in a cluster have the same interface names same
configuration. In addition to ``extra_cond``, only the mountable and
Ehernet interfaces are returned.
Args:
cluster(str): the cluster to consider
extra_cond(lambda): boolean lambda that takes the nic(dict) as
parameter
"""
nics = get_nics(cluster)
# NOTE(msimonin): Since 05/18 nics on g5k nodes have predictable names but
# the api description keep the legacy name (device key) and the new
# predictable name (key name). The legacy names is still used for api
# request to the vlan endpoint This should be fixed in
# https://intranet.grid5000.fr/bugzilla/show_bug.cgi?id=9272
# When its fixed we should be able to only use the new predictable name.
nics = [(nic['device'], nic['name']) for nic in nics
if nic['mountable']
and nic['interface'] == 'Ethernet'
and not nic['management']
and extra_cond(nic)]
nics = sorted(nics)
return nics
|
python
|
def get_cluster_interfaces(cluster, extra_cond=lambda nic: True):
"""Get the network interfaces names corresponding to a criteria.
Note that the cluster is passed (not the individual node names), thus it is
assumed that all nodes in a cluster have the same interface names same
configuration. In addition to ``extra_cond``, only the mountable and
Ehernet interfaces are returned.
Args:
cluster(str): the cluster to consider
extra_cond(lambda): boolean lambda that takes the nic(dict) as
parameter
"""
nics = get_nics(cluster)
# NOTE(msimonin): Since 05/18 nics on g5k nodes have predictable names but
# the api description keep the legacy name (device key) and the new
# predictable name (key name). The legacy names is still used for api
# request to the vlan endpoint This should be fixed in
# https://intranet.grid5000.fr/bugzilla/show_bug.cgi?id=9272
# When its fixed we should be able to only use the new predictable name.
nics = [(nic['device'], nic['name']) for nic in nics
if nic['mountable']
and nic['interface'] == 'Ethernet'
and not nic['management']
and extra_cond(nic)]
nics = sorted(nics)
return nics
|
[
"def",
"get_cluster_interfaces",
"(",
"cluster",
",",
"extra_cond",
"=",
"lambda",
"nic",
":",
"True",
")",
":",
"nics",
"=",
"get_nics",
"(",
"cluster",
")",
"# NOTE(msimonin): Since 05/18 nics on g5k nodes have predictable names but",
"# the api description keep the legacy name (device key) and the new",
"# predictable name (key name). The legacy names is still used for api",
"# request to the vlan endpoint This should be fixed in",
"# https://intranet.grid5000.fr/bugzilla/show_bug.cgi?id=9272",
"# When its fixed we should be able to only use the new predictable name.",
"nics",
"=",
"[",
"(",
"nic",
"[",
"'device'",
"]",
",",
"nic",
"[",
"'name'",
"]",
")",
"for",
"nic",
"in",
"nics",
"if",
"nic",
"[",
"'mountable'",
"]",
"and",
"nic",
"[",
"'interface'",
"]",
"==",
"'Ethernet'",
"and",
"not",
"nic",
"[",
"'management'",
"]",
"and",
"extra_cond",
"(",
"nic",
")",
"]",
"nics",
"=",
"sorted",
"(",
"nics",
")",
"return",
"nics"
] |
Get the network interfaces names corresponding to a criteria.
Note that the cluster is passed (not the individual node names), thus it is
assumed that all nodes in a cluster have the same interface names same
configuration. In addition to ``extra_cond``, only the mountable and
Ehernet interfaces are returned.
Args:
cluster(str): the cluster to consider
extra_cond(lambda): boolean lambda that takes the nic(dict) as
parameter
|
[
"Get",
"the",
"network",
"interfaces",
"names",
"corresponding",
"to",
"a",
"criteria",
"."
] |
train
|
https://github.com/BeyondTheClouds/enoslib/blob/fb00be58e56a7848cfe482187d659744919fe2f7/enoslib/infra/enos_g5k/g5k_api_utils.py#L397-L423
|
BeyondTheClouds/enoslib
|
enoslib/infra/enos_g5k/g5k_api_utils.py
|
get_clusters_interfaces
|
def get_clusters_interfaces(clusters, extra_cond=lambda nic: True):
""" Returns for each cluster the available cluster interfaces
Args:
clusters (str): list of the clusters
extra_cond (lambda): extra predicate to filter network card retrieved
from the API. E.g lambda nic: not nic['mounted'] will retrieve all the
usable network cards that are not mounted by default.
Returns:
dict of cluster with their associated nic names
Examples:
.. code-block:: python
# pseudo code
actual = get_clusters_interfaces(["paravance"])
expected = {"paravance": ["eth0", "eth1"]}
assertDictEquals(expected, actual)
"""
interfaces = {}
for cluster in clusters:
nics = get_cluster_interfaces(cluster, extra_cond=extra_cond)
interfaces.setdefault(cluster, nics)
return interfaces
|
python
|
def get_clusters_interfaces(clusters, extra_cond=lambda nic: True):
""" Returns for each cluster the available cluster interfaces
Args:
clusters (str): list of the clusters
extra_cond (lambda): extra predicate to filter network card retrieved
from the API. E.g lambda nic: not nic['mounted'] will retrieve all the
usable network cards that are not mounted by default.
Returns:
dict of cluster with their associated nic names
Examples:
.. code-block:: python
# pseudo code
actual = get_clusters_interfaces(["paravance"])
expected = {"paravance": ["eth0", "eth1"]}
assertDictEquals(expected, actual)
"""
interfaces = {}
for cluster in clusters:
nics = get_cluster_interfaces(cluster, extra_cond=extra_cond)
interfaces.setdefault(cluster, nics)
return interfaces
|
[
"def",
"get_clusters_interfaces",
"(",
"clusters",
",",
"extra_cond",
"=",
"lambda",
"nic",
":",
"True",
")",
":",
"interfaces",
"=",
"{",
"}",
"for",
"cluster",
"in",
"clusters",
":",
"nics",
"=",
"get_cluster_interfaces",
"(",
"cluster",
",",
"extra_cond",
"=",
"extra_cond",
")",
"interfaces",
".",
"setdefault",
"(",
"cluster",
",",
"nics",
")",
"return",
"interfaces"
] |
Returns for each cluster the available cluster interfaces
Args:
clusters (str): list of the clusters
extra_cond (lambda): extra predicate to filter network card retrieved
from the API. E.g lambda nic: not nic['mounted'] will retrieve all the
usable network cards that are not mounted by default.
Returns:
dict of cluster with their associated nic names
Examples:
.. code-block:: python
# pseudo code
actual = get_clusters_interfaces(["paravance"])
expected = {"paravance": ["eth0", "eth1"]}
assertDictEquals(expected, actual)
|
[
"Returns",
"for",
"each",
"cluster",
"the",
"available",
"cluster",
"interfaces"
] |
train
|
https://github.com/BeyondTheClouds/enoslib/blob/fb00be58e56a7848cfe482187d659744919fe2f7/enoslib/infra/enos_g5k/g5k_api_utils.py#L426-L452
|
BeyondTheClouds/enoslib
|
enoslib/infra/enos_g5k/g5k_api_utils.py
|
can_start_on_cluster
|
def can_start_on_cluster(nodes_status,
nodes,
start,
walltime):
"""Check if #nodes can be started on a given cluster.
This is intended to give a good enough approximation.
This can be use to prefiltered possible reservation dates before submitting
them on oar.
"""
candidates = []
for node, status in nodes_status.items():
reservations = status.get("reservations", [])
# we search for the overlapping reservations
overlapping_reservations = []
for reservation in reservations:
queue = reservation.get("queue")
if queue == "besteffort":
# ignoring any besteffort reservation
continue
r_start = reservation.get("started_at",
reservation.get("scheduled_at"))
if r_start is None:
break
r_start = int(r_start)
r_end = r_start + int(reservation["walltime"])
# compute segment intersection
_intersect = min(r_end, start + walltime) - max(r_start, start)
if _intersect > 0:
overlapping_reservations.append(reservation)
if len(overlapping_reservations) == 0:
# this node can be accounted for a potential reservation
candidates.append(node)
if len(candidates) >= nodes:
return True
return False
|
python
|
def can_start_on_cluster(nodes_status,
nodes,
start,
walltime):
"""Check if #nodes can be started on a given cluster.
This is intended to give a good enough approximation.
This can be use to prefiltered possible reservation dates before submitting
them on oar.
"""
candidates = []
for node, status in nodes_status.items():
reservations = status.get("reservations", [])
# we search for the overlapping reservations
overlapping_reservations = []
for reservation in reservations:
queue = reservation.get("queue")
if queue == "besteffort":
# ignoring any besteffort reservation
continue
r_start = reservation.get("started_at",
reservation.get("scheduled_at"))
if r_start is None:
break
r_start = int(r_start)
r_end = r_start + int(reservation["walltime"])
# compute segment intersection
_intersect = min(r_end, start + walltime) - max(r_start, start)
if _intersect > 0:
overlapping_reservations.append(reservation)
if len(overlapping_reservations) == 0:
# this node can be accounted for a potential reservation
candidates.append(node)
if len(candidates) >= nodes:
return True
return False
|
[
"def",
"can_start_on_cluster",
"(",
"nodes_status",
",",
"nodes",
",",
"start",
",",
"walltime",
")",
":",
"candidates",
"=",
"[",
"]",
"for",
"node",
",",
"status",
"in",
"nodes_status",
".",
"items",
"(",
")",
":",
"reservations",
"=",
"status",
".",
"get",
"(",
"\"reservations\"",
",",
"[",
"]",
")",
"# we search for the overlapping reservations",
"overlapping_reservations",
"=",
"[",
"]",
"for",
"reservation",
"in",
"reservations",
":",
"queue",
"=",
"reservation",
".",
"get",
"(",
"\"queue\"",
")",
"if",
"queue",
"==",
"\"besteffort\"",
":",
"# ignoring any besteffort reservation",
"continue",
"r_start",
"=",
"reservation",
".",
"get",
"(",
"\"started_at\"",
",",
"reservation",
".",
"get",
"(",
"\"scheduled_at\"",
")",
")",
"if",
"r_start",
"is",
"None",
":",
"break",
"r_start",
"=",
"int",
"(",
"r_start",
")",
"r_end",
"=",
"r_start",
"+",
"int",
"(",
"reservation",
"[",
"\"walltime\"",
"]",
")",
"# compute segment intersection",
"_intersect",
"=",
"min",
"(",
"r_end",
",",
"start",
"+",
"walltime",
")",
"-",
"max",
"(",
"r_start",
",",
"start",
")",
"if",
"_intersect",
">",
"0",
":",
"overlapping_reservations",
".",
"append",
"(",
"reservation",
")",
"if",
"len",
"(",
"overlapping_reservations",
")",
"==",
"0",
":",
"# this node can be accounted for a potential reservation",
"candidates",
".",
"append",
"(",
"node",
")",
"if",
"len",
"(",
"candidates",
")",
">=",
"nodes",
":",
"return",
"True",
"return",
"False"
] |
Check if #nodes can be started on a given cluster.
This is intended to give a good enough approximation.
This can be use to prefiltered possible reservation dates before submitting
them on oar.
|
[
"Check",
"if",
"#nodes",
"can",
"be",
"started",
"on",
"a",
"given",
"cluster",
"."
] |
train
|
https://github.com/BeyondTheClouds/enoslib/blob/fb00be58e56a7848cfe482187d659744919fe2f7/enoslib/infra/enos_g5k/g5k_api_utils.py#L455-L490
|
BeyondTheClouds/enoslib
|
enoslib/infra/enos_g5k/g5k_api_utils.py
|
_do_synchronise_jobs
|
def _do_synchronise_jobs(walltime, machines):
""" This returns a common reservation date for all the jobs.
This reservation date is really only a hint and will be supplied to each
oar server. Without this *common* reservation_date, one oar server can
decide to postpone the start of the job while the other are already
running. But this doens't prevent the start of a job on one site to drift
(e.g because the machines need to be restarted.) But this shouldn't exceed
few minutes.
"""
offset = SYNCHRONISATION_OFFSET
start = time.time() + offset
_t = time.strptime(walltime, "%H:%M:%S")
_walltime = _t.tm_hour * 3600 + _t.tm_min * 60 + _t.tm_sec
# Compute the demand for each cluster
demands = defaultdict(int)
for machine in machines:
cluster = machine["cluster"]
demands[cluster] += machine["nodes"]
# Early leave if only one cluster is there
if len(list(demands.keys())) <= 1:
logger.debug("Only one cluster detected: no synchronisation needed")
return None
clusters = clusters_sites_obj(list(demands.keys()))
# Early leave if only one site is concerned
sites = set(list(clusters.values()))
if len(sites) <= 1:
logger.debug("Only one site detected: no synchronisation needed")
return None
# Test the proposed reservation_date
ok = True
for cluster, nodes in demands.items():
cluster_status = clusters[cluster].status.list()
ok = ok and can_start_on_cluster(cluster_status.nodes,
nodes,
start,
_walltime)
if not ok:
break
if ok:
# The proposed reservation_date fits
logger.info("Reservation_date=%s (%s)" % (_date2h(start), sites))
return start
if start is None:
raise EnosG5kSynchronisationError(sites)
|
python
|
def _do_synchronise_jobs(walltime, machines):
""" This returns a common reservation date for all the jobs.
This reservation date is really only a hint and will be supplied to each
oar server. Without this *common* reservation_date, one oar server can
decide to postpone the start of the job while the other are already
running. But this doens't prevent the start of a job on one site to drift
(e.g because the machines need to be restarted.) But this shouldn't exceed
few minutes.
"""
offset = SYNCHRONISATION_OFFSET
start = time.time() + offset
_t = time.strptime(walltime, "%H:%M:%S")
_walltime = _t.tm_hour * 3600 + _t.tm_min * 60 + _t.tm_sec
# Compute the demand for each cluster
demands = defaultdict(int)
for machine in machines:
cluster = machine["cluster"]
demands[cluster] += machine["nodes"]
# Early leave if only one cluster is there
if len(list(demands.keys())) <= 1:
logger.debug("Only one cluster detected: no synchronisation needed")
return None
clusters = clusters_sites_obj(list(demands.keys()))
# Early leave if only one site is concerned
sites = set(list(clusters.values()))
if len(sites) <= 1:
logger.debug("Only one site detected: no synchronisation needed")
return None
# Test the proposed reservation_date
ok = True
for cluster, nodes in demands.items():
cluster_status = clusters[cluster].status.list()
ok = ok and can_start_on_cluster(cluster_status.nodes,
nodes,
start,
_walltime)
if not ok:
break
if ok:
# The proposed reservation_date fits
logger.info("Reservation_date=%s (%s)" % (_date2h(start), sites))
return start
if start is None:
raise EnosG5kSynchronisationError(sites)
|
[
"def",
"_do_synchronise_jobs",
"(",
"walltime",
",",
"machines",
")",
":",
"offset",
"=",
"SYNCHRONISATION_OFFSET",
"start",
"=",
"time",
".",
"time",
"(",
")",
"+",
"offset",
"_t",
"=",
"time",
".",
"strptime",
"(",
"walltime",
",",
"\"%H:%M:%S\"",
")",
"_walltime",
"=",
"_t",
".",
"tm_hour",
"*",
"3600",
"+",
"_t",
".",
"tm_min",
"*",
"60",
"+",
"_t",
".",
"tm_sec",
"# Compute the demand for each cluster",
"demands",
"=",
"defaultdict",
"(",
"int",
")",
"for",
"machine",
"in",
"machines",
":",
"cluster",
"=",
"machine",
"[",
"\"cluster\"",
"]",
"demands",
"[",
"cluster",
"]",
"+=",
"machine",
"[",
"\"nodes\"",
"]",
"# Early leave if only one cluster is there",
"if",
"len",
"(",
"list",
"(",
"demands",
".",
"keys",
"(",
")",
")",
")",
"<=",
"1",
":",
"logger",
".",
"debug",
"(",
"\"Only one cluster detected: no synchronisation needed\"",
")",
"return",
"None",
"clusters",
"=",
"clusters_sites_obj",
"(",
"list",
"(",
"demands",
".",
"keys",
"(",
")",
")",
")",
"# Early leave if only one site is concerned",
"sites",
"=",
"set",
"(",
"list",
"(",
"clusters",
".",
"values",
"(",
")",
")",
")",
"if",
"len",
"(",
"sites",
")",
"<=",
"1",
":",
"logger",
".",
"debug",
"(",
"\"Only one site detected: no synchronisation needed\"",
")",
"return",
"None",
"# Test the proposed reservation_date",
"ok",
"=",
"True",
"for",
"cluster",
",",
"nodes",
"in",
"demands",
".",
"items",
"(",
")",
":",
"cluster_status",
"=",
"clusters",
"[",
"cluster",
"]",
".",
"status",
".",
"list",
"(",
")",
"ok",
"=",
"ok",
"and",
"can_start_on_cluster",
"(",
"cluster_status",
".",
"nodes",
",",
"nodes",
",",
"start",
",",
"_walltime",
")",
"if",
"not",
"ok",
":",
"break",
"if",
"ok",
":",
"# The proposed reservation_date fits",
"logger",
".",
"info",
"(",
"\"Reservation_date=%s (%s)\"",
"%",
"(",
"_date2h",
"(",
"start",
")",
",",
"sites",
")",
")",
"return",
"start",
"if",
"start",
"is",
"None",
":",
"raise",
"EnosG5kSynchronisationError",
"(",
"sites",
")"
] |
This returns a common reservation date for all the jobs.
This reservation date is really only a hint and will be supplied to each
oar server. Without this *common* reservation_date, one oar server can
decide to postpone the start of the job while the other are already
running. But this doens't prevent the start of a job on one site to drift
(e.g because the machines need to be restarted.) But this shouldn't exceed
few minutes.
|
[
"This",
"returns",
"a",
"common",
"reservation",
"date",
"for",
"all",
"the",
"jobs",
"."
] |
train
|
https://github.com/BeyondTheClouds/enoslib/blob/fb00be58e56a7848cfe482187d659744919fe2f7/enoslib/infra/enos_g5k/g5k_api_utils.py#L493-L543
|
openmicroanalysis/pyxray
|
pyxray/sql/command.py
|
chunked
|
def chunked(iterable, n):
"""Break an iterable into lists of a given length::
>>> list(chunked([1, 2, 3, 4, 5, 6, 7], 3))
[[1, 2, 3], [4, 5, 6], [7]]
If the length of ``iterable`` is not evenly divisible by ``n``, the last
returned list will be shorter.
This is useful for splitting up a computation on a large number of keys
into batches, to be pickled and sent off to worker processes. One example
is operations on rows in MySQL, which does not implement server-side
cursors properly and would otherwise load the entire dataset into RAM on
the client.
Taken from more_itertools
"""
return iter(functools.partial(take, n, iter(iterable)), [])
|
python
|
def chunked(iterable, n):
"""Break an iterable into lists of a given length::
>>> list(chunked([1, 2, 3, 4, 5, 6, 7], 3))
[[1, 2, 3], [4, 5, 6], [7]]
If the length of ``iterable`` is not evenly divisible by ``n``, the last
returned list will be shorter.
This is useful for splitting up a computation on a large number of keys
into batches, to be pickled and sent off to worker processes. One example
is operations on rows in MySQL, which does not implement server-side
cursors properly and would otherwise load the entire dataset into RAM on
the client.
Taken from more_itertools
"""
return iter(functools.partial(take, n, iter(iterable)), [])
|
[
"def",
"chunked",
"(",
"iterable",
",",
"n",
")",
":",
"return",
"iter",
"(",
"functools",
".",
"partial",
"(",
"take",
",",
"n",
",",
"iter",
"(",
"iterable",
")",
")",
",",
"[",
"]",
")"
] |
Break an iterable into lists of a given length::
>>> list(chunked([1, 2, 3, 4, 5, 6, 7], 3))
[[1, 2, 3], [4, 5, 6], [7]]
If the length of ``iterable`` is not evenly divisible by ``n``, the last
returned list will be shorter.
This is useful for splitting up a computation on a large number of keys
into batches, to be pickled and sent off to worker processes. One example
is operations on rows in MySQL, which does not implement server-side
cursors properly and would otherwise load the entire dataset into RAM on
the client.
Taken from more_itertools
|
[
"Break",
"an",
"iterable",
"into",
"lists",
"of",
"a",
"given",
"length",
"::"
] |
train
|
https://github.com/openmicroanalysis/pyxray/blob/cae89677f00ebcc0952f94d1ab70e6b35e1a51e9/pyxray/sql/command.py#L31-L49
|
volafiled/python-volapi
|
volapi/handler.py
|
Handler.add_data
|
def add_data(self, rawdata):
"""Add data to given room's state"""
for data in rawdata:
try:
item = data[0]
if item[0] == 2:
# Flush messages but we got nothing to flush
continue
if item[0] != 0:
warnings.warn(f"Unknown message type '{item[0]}'", Warning)
continue
item = item[1]
# convert target to string because error codes are ints
target = str(item[0])
try:
data = item[1]
except IndexError:
data = dict()
try:
method = getattr(self, self.__head + target)
method(data)
except AttributeError:
self._handle_unhandled(target, data)
except IndexError:
LOGGER.warning("Wrongly constructed message received: %r", data)
self.conn.process_queues()
|
python
|
def add_data(self, rawdata):
"""Add data to given room's state"""
for data in rawdata:
try:
item = data[0]
if item[0] == 2:
# Flush messages but we got nothing to flush
continue
if item[0] != 0:
warnings.warn(f"Unknown message type '{item[0]}'", Warning)
continue
item = item[1]
# convert target to string because error codes are ints
target = str(item[0])
try:
data = item[1]
except IndexError:
data = dict()
try:
method = getattr(self, self.__head + target)
method(data)
except AttributeError:
self._handle_unhandled(target, data)
except IndexError:
LOGGER.warning("Wrongly constructed message received: %r", data)
self.conn.process_queues()
|
[
"def",
"add_data",
"(",
"self",
",",
"rawdata",
")",
":",
"for",
"data",
"in",
"rawdata",
":",
"try",
":",
"item",
"=",
"data",
"[",
"0",
"]",
"if",
"item",
"[",
"0",
"]",
"==",
"2",
":",
"# Flush messages but we got nothing to flush",
"continue",
"if",
"item",
"[",
"0",
"]",
"!=",
"0",
":",
"warnings",
".",
"warn",
"(",
"f\"Unknown message type '{item[0]}'\"",
",",
"Warning",
")",
"continue",
"item",
"=",
"item",
"[",
"1",
"]",
"# convert target to string because error codes are ints",
"target",
"=",
"str",
"(",
"item",
"[",
"0",
"]",
")",
"try",
":",
"data",
"=",
"item",
"[",
"1",
"]",
"except",
"IndexError",
":",
"data",
"=",
"dict",
"(",
")",
"try",
":",
"method",
"=",
"getattr",
"(",
"self",
",",
"self",
".",
"__head",
"+",
"target",
")",
"method",
"(",
"data",
")",
"except",
"AttributeError",
":",
"self",
".",
"_handle_unhandled",
"(",
"target",
",",
"data",
")",
"except",
"IndexError",
":",
"LOGGER",
".",
"warning",
"(",
"\"Wrongly constructed message received: %r\"",
",",
"data",
")",
"self",
".",
"conn",
".",
"process_queues",
"(",
")"
] |
Add data to given room's state
|
[
"Add",
"data",
"to",
"given",
"room",
"s",
"state"
] |
train
|
https://github.com/volafiled/python-volapi/blob/5f0bc03dbde703264ac6ed494e2050761f688a3e/volapi/handler.py#L34-L61
|
volafiled/python-volapi
|
volapi/handler.py
|
Handler.register_callback
|
def register_callback(self):
"""Register callback that we will have to wait for"""
cid = str(self.__cid)
self.__cid += 1
event = queue.Queue()
self.__callbacks[cid] = event
return cid, event
|
python
|
def register_callback(self):
"""Register callback that we will have to wait for"""
cid = str(self.__cid)
self.__cid += 1
event = queue.Queue()
self.__callbacks[cid] = event
return cid, event
|
[
"def",
"register_callback",
"(",
"self",
")",
":",
"cid",
"=",
"str",
"(",
"self",
".",
"__cid",
")",
"self",
".",
"__cid",
"+=",
"1",
"event",
"=",
"queue",
".",
"Queue",
"(",
")",
"self",
".",
"__callbacks",
"[",
"cid",
"]",
"=",
"event",
"return",
"cid",
",",
"event"
] |
Register callback that we will have to wait for
|
[
"Register",
"callback",
"that",
"we",
"will",
"have",
"to",
"wait",
"for"
] |
train
|
https://github.com/volafiled/python-volapi/blob/5f0bc03dbde703264ac6ed494e2050761f688a3e/volapi/handler.py#L63-L70
|
volafiled/python-volapi
|
volapi/handler.py
|
Handler._handle_401
|
def _handle_401(self, data):
"""Handle Lain being helpful"""
ex = ConnectionError(
"Can't login to a protected room without a proper password", data
)
self.conn.reraise(ex)
|
python
|
def _handle_401(self, data):
"""Handle Lain being helpful"""
ex = ConnectionError(
"Can't login to a protected room without a proper password", data
)
self.conn.reraise(ex)
|
[
"def",
"_handle_401",
"(",
"self",
",",
"data",
")",
":",
"ex",
"=",
"ConnectionError",
"(",
"\"Can't login to a protected room without a proper password\"",
",",
"data",
")",
"self",
".",
"conn",
".",
"reraise",
"(",
"ex",
")"
] |
Handle Lain being helpful
|
[
"Handle",
"Lain",
"being",
"helpful"
] |
train
|
https://github.com/volafiled/python-volapi/blob/5f0bc03dbde703264ac6ed494e2050761f688a3e/volapi/handler.py#L83-L89
|
volafiled/python-volapi
|
volapi/handler.py
|
Handler._handle_429
|
def _handle_429(self, data):
"""Handle Lain being helpful"""
ex = IOError("Too fast", data)
self.conn.reraise(ex)
|
python
|
def _handle_429(self, data):
"""Handle Lain being helpful"""
ex = IOError("Too fast", data)
self.conn.reraise(ex)
|
[
"def",
"_handle_429",
"(",
"self",
",",
"data",
")",
":",
"ex",
"=",
"IOError",
"(",
"\"Too fast\"",
",",
"data",
")",
"self",
".",
"conn",
".",
"reraise",
"(",
"ex",
")"
] |
Handle Lain being helpful
|
[
"Handle",
"Lain",
"being",
"helpful"
] |
train
|
https://github.com/volafiled/python-volapi/blob/5f0bc03dbde703264ac6ed494e2050761f688a3e/volapi/handler.py#L91-L95
|
volafiled/python-volapi
|
volapi/handler.py
|
Handler._handle_callback
|
def _handle_callback(self, data):
"""Handle lain's callback. Only used with getFileinfo so far"""
cb_id = data.get("id")
args = data.get("args")
event = self.__callbacks.pop(cb_id, None)
if not event:
return
if not args:
event.put(args)
return
err, info = args
if err is None:
event.put(info)
else:
LOGGER.warning("Callback returned error of %s", str(err))
event.put(err)
|
python
|
def _handle_callback(self, data):
"""Handle lain's callback. Only used with getFileinfo so far"""
cb_id = data.get("id")
args = data.get("args")
event = self.__callbacks.pop(cb_id, None)
if not event:
return
if not args:
event.put(args)
return
err, info = args
if err is None:
event.put(info)
else:
LOGGER.warning("Callback returned error of %s", str(err))
event.put(err)
|
[
"def",
"_handle_callback",
"(",
"self",
",",
"data",
")",
":",
"cb_id",
"=",
"data",
".",
"get",
"(",
"\"id\"",
")",
"args",
"=",
"data",
".",
"get",
"(",
"\"args\"",
")",
"event",
"=",
"self",
".",
"__callbacks",
".",
"pop",
"(",
"cb_id",
",",
"None",
")",
"if",
"not",
"event",
":",
"return",
"if",
"not",
"args",
":",
"event",
".",
"put",
"(",
"args",
")",
"return",
"err",
",",
"info",
"=",
"args",
"if",
"err",
"is",
"None",
":",
"event",
".",
"put",
"(",
"info",
")",
"else",
":",
"LOGGER",
".",
"warning",
"(",
"\"Callback returned error of %s\"",
",",
"str",
"(",
"err",
")",
")",
"event",
".",
"put",
"(",
"err",
")"
] |
Handle lain's callback. Only used with getFileinfo so far
|
[
"Handle",
"lain",
"s",
"callback",
".",
"Only",
"used",
"with",
"getFileinfo",
"so",
"far"
] |
train
|
https://github.com/volafiled/python-volapi/blob/5f0bc03dbde703264ac6ed494e2050761f688a3e/volapi/handler.py#L97-L113
|
volafiled/python-volapi
|
volapi/handler.py
|
Handler._handle_userCount
|
def _handle_userCount(self, data):
"""Handle user count changes"""
self.room.user_count = data
self.conn.enqueue_data("user_count", self.room.user_count)
|
python
|
def _handle_userCount(self, data):
"""Handle user count changes"""
self.room.user_count = data
self.conn.enqueue_data("user_count", self.room.user_count)
|
[
"def",
"_handle_userCount",
"(",
"self",
",",
"data",
")",
":",
"self",
".",
"room",
".",
"user_count",
"=",
"data",
"self",
".",
"conn",
".",
"enqueue_data",
"(",
"\"user_count\"",
",",
"self",
".",
"room",
".",
"user_count",
")"
] |
Handle user count changes
|
[
"Handle",
"user",
"count",
"changes"
] |
train
|
https://github.com/volafiled/python-volapi/blob/5f0bc03dbde703264ac6ed494e2050761f688a3e/volapi/handler.py#L115-L119
|
volafiled/python-volapi
|
volapi/handler.py
|
Handler._handle_userInfo
|
def _handle_userInfo(self, data):
"""Handle user information"""
for k, v in data.items():
if k == "nick":
if v == "None":
v = "Volaphile"
setattr(self.room.user, k, v)
self.conn.enqueue_data(k, self.room.user.nick)
elif k != "profile":
if not hasattr(self.room, k):
warnings.warn(f"Skipping unset property {k}", ResourceWarning)
continue
setattr(self.room, k, v)
self.conn.enqueue_data(k, getattr(self.room, k))
self.room.user_info = k, v
self.conn.enqueue_data("user_info", self.room.user_info)
|
python
|
def _handle_userInfo(self, data):
"""Handle user information"""
for k, v in data.items():
if k == "nick":
if v == "None":
v = "Volaphile"
setattr(self.room.user, k, v)
self.conn.enqueue_data(k, self.room.user.nick)
elif k != "profile":
if not hasattr(self.room, k):
warnings.warn(f"Skipping unset property {k}", ResourceWarning)
continue
setattr(self.room, k, v)
self.conn.enqueue_data(k, getattr(self.room, k))
self.room.user_info = k, v
self.conn.enqueue_data("user_info", self.room.user_info)
|
[
"def",
"_handle_userInfo",
"(",
"self",
",",
"data",
")",
":",
"for",
"k",
",",
"v",
"in",
"data",
".",
"items",
"(",
")",
":",
"if",
"k",
"==",
"\"nick\"",
":",
"if",
"v",
"==",
"\"None\"",
":",
"v",
"=",
"\"Volaphile\"",
"setattr",
"(",
"self",
".",
"room",
".",
"user",
",",
"k",
",",
"v",
")",
"self",
".",
"conn",
".",
"enqueue_data",
"(",
"k",
",",
"self",
".",
"room",
".",
"user",
".",
"nick",
")",
"elif",
"k",
"!=",
"\"profile\"",
":",
"if",
"not",
"hasattr",
"(",
"self",
".",
"room",
",",
"k",
")",
":",
"warnings",
".",
"warn",
"(",
"f\"Skipping unset property {k}\"",
",",
"ResourceWarning",
")",
"continue",
"setattr",
"(",
"self",
".",
"room",
",",
"k",
",",
"v",
")",
"self",
".",
"conn",
".",
"enqueue_data",
"(",
"k",
",",
"getattr",
"(",
"self",
".",
"room",
",",
"k",
")",
")",
"self",
".",
"room",
".",
"user_info",
"=",
"k",
",",
"v",
"self",
".",
"conn",
".",
"enqueue_data",
"(",
"\"user_info\"",
",",
"self",
".",
"room",
".",
"user_info",
")"
] |
Handle user information
|
[
"Handle",
"user",
"information"
] |
train
|
https://github.com/volafiled/python-volapi/blob/5f0bc03dbde703264ac6ed494e2050761f688a3e/volapi/handler.py#L121-L137
|
volafiled/python-volapi
|
volapi/handler.py
|
Handler._handle_key
|
def _handle_key(self, data):
"""Handle keys"""
self.room.key = data
self.conn.enqueue_data("key", self.room.key)
|
python
|
def _handle_key(self, data):
"""Handle keys"""
self.room.key = data
self.conn.enqueue_data("key", self.room.key)
|
[
"def",
"_handle_key",
"(",
"self",
",",
"data",
")",
":",
"self",
".",
"room",
".",
"key",
"=",
"data",
"self",
".",
"conn",
".",
"enqueue_data",
"(",
"\"key\"",
",",
"self",
".",
"room",
".",
"key",
")"
] |
Handle keys
|
[
"Handle",
"keys"
] |
train
|
https://github.com/volafiled/python-volapi/blob/5f0bc03dbde703264ac6ed494e2050761f688a3e/volapi/handler.py#L139-L143
|
volafiled/python-volapi
|
volapi/handler.py
|
Handler._handle_config
|
def _handle_config(self, data):
"""Handle initial config push and config changes"""
self.room.config.update(data)
self.conn.enqueue_data("config", data)
|
python
|
def _handle_config(self, data):
"""Handle initial config push and config changes"""
self.room.config.update(data)
self.conn.enqueue_data("config", data)
|
[
"def",
"_handle_config",
"(",
"self",
",",
"data",
")",
":",
"self",
".",
"room",
".",
"config",
".",
"update",
"(",
"data",
")",
"self",
".",
"conn",
".",
"enqueue_data",
"(",
"\"config\"",
",",
"data",
")"
] |
Handle initial config push and config changes
|
[
"Handle",
"initial",
"config",
"push",
"and",
"config",
"changes"
] |
train
|
https://github.com/volafiled/python-volapi/blob/5f0bc03dbde703264ac6ed494e2050761f688a3e/volapi/handler.py#L145-L149
|
volafiled/python-volapi
|
volapi/handler.py
|
Handler._handle_files
|
def _handle_files(self, data):
"""Handle new files being uploaded"""
initial = data.get("set", False)
files = data["files"]
for f in files:
try:
fobj = File(
self.room,
self.conn,
f[0],
f[1],
type=f[2],
size=f[3],
expire_time=int(f[4]) / 1000,
uploader=f[6].get("nick") or f[6].get("user"),
)
self.room.filedict = fobj.fid, fobj
if not initial:
self.conn.enqueue_data("file", fobj)
except Exception:
import pprint
LOGGER.exception("bad file")
pprint.pprint(f)
if initial:
self.conn.enqueue_data("initial_files", self.room.files)
|
python
|
def _handle_files(self, data):
"""Handle new files being uploaded"""
initial = data.get("set", False)
files = data["files"]
for f in files:
try:
fobj = File(
self.room,
self.conn,
f[0],
f[1],
type=f[2],
size=f[3],
expire_time=int(f[4]) / 1000,
uploader=f[6].get("nick") or f[6].get("user"),
)
self.room.filedict = fobj.fid, fobj
if not initial:
self.conn.enqueue_data("file", fobj)
except Exception:
import pprint
LOGGER.exception("bad file")
pprint.pprint(f)
if initial:
self.conn.enqueue_data("initial_files", self.room.files)
|
[
"def",
"_handle_files",
"(",
"self",
",",
"data",
")",
":",
"initial",
"=",
"data",
".",
"get",
"(",
"\"set\"",
",",
"False",
")",
"files",
"=",
"data",
"[",
"\"files\"",
"]",
"for",
"f",
"in",
"files",
":",
"try",
":",
"fobj",
"=",
"File",
"(",
"self",
".",
"room",
",",
"self",
".",
"conn",
",",
"f",
"[",
"0",
"]",
",",
"f",
"[",
"1",
"]",
",",
"type",
"=",
"f",
"[",
"2",
"]",
",",
"size",
"=",
"f",
"[",
"3",
"]",
",",
"expire_time",
"=",
"int",
"(",
"f",
"[",
"4",
"]",
")",
"/",
"1000",
",",
"uploader",
"=",
"f",
"[",
"6",
"]",
".",
"get",
"(",
"\"nick\"",
")",
"or",
"f",
"[",
"6",
"]",
".",
"get",
"(",
"\"user\"",
")",
",",
")",
"self",
".",
"room",
".",
"filedict",
"=",
"fobj",
".",
"fid",
",",
"fobj",
"if",
"not",
"initial",
":",
"self",
".",
"conn",
".",
"enqueue_data",
"(",
"\"file\"",
",",
"fobj",
")",
"except",
"Exception",
":",
"import",
"pprint",
"LOGGER",
".",
"exception",
"(",
"\"bad file\"",
")",
"pprint",
".",
"pprint",
"(",
"f",
")",
"if",
"initial",
":",
"self",
".",
"conn",
".",
"enqueue_data",
"(",
"\"initial_files\"",
",",
"self",
".",
"room",
".",
"files",
")"
] |
Handle new files being uploaded
|
[
"Handle",
"new",
"files",
"being",
"uploaded"
] |
train
|
https://github.com/volafiled/python-volapi/blob/5f0bc03dbde703264ac6ed494e2050761f688a3e/volapi/handler.py#L151-L177
|
volafiled/python-volapi
|
volapi/handler.py
|
Handler._handle_delete_file
|
def _handle_delete_file(self, data):
"""Handle files being removed"""
file = self.room.filedict.get(data)
if file:
self.room.filedict = data, None
self.conn.enqueue_data("delete_file", file)
|
python
|
def _handle_delete_file(self, data):
"""Handle files being removed"""
file = self.room.filedict.get(data)
if file:
self.room.filedict = data, None
self.conn.enqueue_data("delete_file", file)
|
[
"def",
"_handle_delete_file",
"(",
"self",
",",
"data",
")",
":",
"file",
"=",
"self",
".",
"room",
".",
"filedict",
".",
"get",
"(",
"data",
")",
"if",
"file",
":",
"self",
".",
"room",
".",
"filedict",
"=",
"data",
",",
"None",
"self",
".",
"conn",
".",
"enqueue_data",
"(",
"\"delete_file\"",
",",
"file",
")"
] |
Handle files being removed
|
[
"Handle",
"files",
"being",
"removed"
] |
train
|
https://github.com/volafiled/python-volapi/blob/5f0bc03dbde703264ac6ed494e2050761f688a3e/volapi/handler.py#L179-L185
|
volafiled/python-volapi
|
volapi/handler.py
|
Handler._handle_chat
|
def _handle_chat(self, data):
"""Handle chat messages"""
self.conn.enqueue_data(
"chat", ChatMessage.from_data(self.room, self.conn, data)
)
|
python
|
def _handle_chat(self, data):
"""Handle chat messages"""
self.conn.enqueue_data(
"chat", ChatMessage.from_data(self.room, self.conn, data)
)
|
[
"def",
"_handle_chat",
"(",
"self",
",",
"data",
")",
":",
"self",
".",
"conn",
".",
"enqueue_data",
"(",
"\"chat\"",
",",
"ChatMessage",
".",
"from_data",
"(",
"self",
".",
"room",
",",
"self",
".",
"conn",
",",
"data",
")",
")"
] |
Handle chat messages
|
[
"Handle",
"chat",
"messages"
] |
train
|
https://github.com/volafiled/python-volapi/blob/5f0bc03dbde703264ac6ed494e2050761f688a3e/volapi/handler.py#L187-L192
|
volafiled/python-volapi
|
volapi/handler.py
|
Handler._handle_changed_config
|
def _handle_changed_config(self, change):
"""Handle configuration changes"""
key, value = change.get("key"), change.get("value")
self.room.config.update({key: value})
self.conn.enqueue_data("config", self.room.config)
|
python
|
def _handle_changed_config(self, change):
"""Handle configuration changes"""
key, value = change.get("key"), change.get("value")
self.room.config.update({key: value})
self.conn.enqueue_data("config", self.room.config)
|
[
"def",
"_handle_changed_config",
"(",
"self",
",",
"change",
")",
":",
"key",
",",
"value",
"=",
"change",
".",
"get",
"(",
"\"key\"",
")",
",",
"change",
".",
"get",
"(",
"\"value\"",
")",
"self",
".",
"room",
".",
"config",
".",
"update",
"(",
"{",
"key",
":",
"value",
"}",
")",
"self",
".",
"conn",
".",
"enqueue_data",
"(",
"\"config\"",
",",
"self",
".",
"room",
".",
"config",
")"
] |
Handle configuration changes
|
[
"Handle",
"configuration",
"changes"
] |
train
|
https://github.com/volafiled/python-volapi/blob/5f0bc03dbde703264ac6ed494e2050761f688a3e/volapi/handler.py#L194-L199
|
volafiled/python-volapi
|
volapi/handler.py
|
Handler._handle_chat_name
|
def _handle_chat_name(self, data):
"""Handle user name changes"""
self.room.user.nick = data
self.conn.enqueue_data("user", self.room.user)
|
python
|
def _handle_chat_name(self, data):
"""Handle user name changes"""
self.room.user.nick = data
self.conn.enqueue_data("user", self.room.user)
|
[
"def",
"_handle_chat_name",
"(",
"self",
",",
"data",
")",
":",
"self",
".",
"room",
".",
"user",
".",
"nick",
"=",
"data",
"self",
".",
"conn",
".",
"enqueue_data",
"(",
"\"user\"",
",",
"self",
".",
"room",
".",
"user",
")"
] |
Handle user name changes
|
[
"Handle",
"user",
"name",
"changes"
] |
train
|
https://github.com/volafiled/python-volapi/blob/5f0bc03dbde703264ac6ed494e2050761f688a3e/volapi/handler.py#L201-L205
|
guaix-ucm/pyemir
|
emirdrp/processing/wavecal/islitlet_progress.py
|
islitlet_progress
|
def islitlet_progress(islitlet, islitlet_max):
"""Auxiliary function to print out progress in loop of slitlets.
Parameters
----------
islitlet : int
Current slitlet number.
islitlet_max : int
Maximum slitlet number.
"""
if islitlet % 10 == 0:
cout = str(islitlet // 10)
else:
cout = '.'
sys.stdout.write(cout)
if islitlet == islitlet_max:
sys.stdout.write('\n')
sys.stdout.flush()
|
python
|
def islitlet_progress(islitlet, islitlet_max):
"""Auxiliary function to print out progress in loop of slitlets.
Parameters
----------
islitlet : int
Current slitlet number.
islitlet_max : int
Maximum slitlet number.
"""
if islitlet % 10 == 0:
cout = str(islitlet // 10)
else:
cout = '.'
sys.stdout.write(cout)
if islitlet == islitlet_max:
sys.stdout.write('\n')
sys.stdout.flush()
|
[
"def",
"islitlet_progress",
"(",
"islitlet",
",",
"islitlet_max",
")",
":",
"if",
"islitlet",
"%",
"10",
"==",
"0",
":",
"cout",
"=",
"str",
"(",
"islitlet",
"//",
"10",
")",
"else",
":",
"cout",
"=",
"'.'",
"sys",
".",
"stdout",
".",
"write",
"(",
"cout",
")",
"if",
"islitlet",
"==",
"islitlet_max",
":",
"sys",
".",
"stdout",
".",
"write",
"(",
"'\\n'",
")",
"sys",
".",
"stdout",
".",
"flush",
"(",
")"
] |
Auxiliary function to print out progress in loop of slitlets.
Parameters
----------
islitlet : int
Current slitlet number.
islitlet_max : int
Maximum slitlet number.
|
[
"Auxiliary",
"function",
"to",
"print",
"out",
"progress",
"in",
"loop",
"of",
"slitlets",
"."
] |
train
|
https://github.com/guaix-ucm/pyemir/blob/fef6bbabcb13f80123cafd1800a0f508a3c21702/emirdrp/processing/wavecal/islitlet_progress.py#L27-L45
|
guaix-ucm/pyemir
|
emirdrp/core/correctors.py
|
get_corrector_f
|
def get_corrector_f(rinput, meta, ins, datamodel):
"""Corrector for intensity flat"""
from emirdrp.processing.flatfield import FlatFieldCorrector
flat_info = meta['master_flat']
with rinput.master_flat.open() as hdul:
_logger.info('loading intensity flat')
_logger.debug('flat info: %s', flat_info)
mflat = hdul[0].data
# Check NaN and Ceros
mask1 = mflat < 0
mask2 = ~numpy.isfinite(mflat)
if numpy.any(mask1):
_logger.warning('flat has %d values below 0', mask1.sum())
if numpy.any(mask2):
_logger.warning('flat has %d NaN', mask2.sum())
flat_corrector = FlatFieldCorrector(mflat,
datamodel=datamodel,
calibid=datamodel.get_imgid(hdul))
return flat_corrector
|
python
|
def get_corrector_f(rinput, meta, ins, datamodel):
"""Corrector for intensity flat"""
from emirdrp.processing.flatfield import FlatFieldCorrector
flat_info = meta['master_flat']
with rinput.master_flat.open() as hdul:
_logger.info('loading intensity flat')
_logger.debug('flat info: %s', flat_info)
mflat = hdul[0].data
# Check NaN and Ceros
mask1 = mflat < 0
mask2 = ~numpy.isfinite(mflat)
if numpy.any(mask1):
_logger.warning('flat has %d values below 0', mask1.sum())
if numpy.any(mask2):
_logger.warning('flat has %d NaN', mask2.sum())
flat_corrector = FlatFieldCorrector(mflat,
datamodel=datamodel,
calibid=datamodel.get_imgid(hdul))
return flat_corrector
|
[
"def",
"get_corrector_f",
"(",
"rinput",
",",
"meta",
",",
"ins",
",",
"datamodel",
")",
":",
"from",
"emirdrp",
".",
"processing",
".",
"flatfield",
"import",
"FlatFieldCorrector",
"flat_info",
"=",
"meta",
"[",
"'master_flat'",
"]",
"with",
"rinput",
".",
"master_flat",
".",
"open",
"(",
")",
"as",
"hdul",
":",
"_logger",
".",
"info",
"(",
"'loading intensity flat'",
")",
"_logger",
".",
"debug",
"(",
"'flat info: %s'",
",",
"flat_info",
")",
"mflat",
"=",
"hdul",
"[",
"0",
"]",
".",
"data",
"# Check NaN and Ceros",
"mask1",
"=",
"mflat",
"<",
"0",
"mask2",
"=",
"~",
"numpy",
".",
"isfinite",
"(",
"mflat",
")",
"if",
"numpy",
".",
"any",
"(",
"mask1",
")",
":",
"_logger",
".",
"warning",
"(",
"'flat has %d values below 0'",
",",
"mask1",
".",
"sum",
"(",
")",
")",
"if",
"numpy",
".",
"any",
"(",
"mask2",
")",
":",
"_logger",
".",
"warning",
"(",
"'flat has %d NaN'",
",",
"mask2",
".",
"sum",
"(",
")",
")",
"flat_corrector",
"=",
"FlatFieldCorrector",
"(",
"mflat",
",",
"datamodel",
"=",
"datamodel",
",",
"calibid",
"=",
"datamodel",
".",
"get_imgid",
"(",
"hdul",
")",
")",
"return",
"flat_corrector"
] |
Corrector for intensity flat
|
[
"Corrector",
"for",
"intensity",
"flat"
] |
train
|
https://github.com/guaix-ucm/pyemir/blob/fef6bbabcb13f80123cafd1800a0f508a3c21702/emirdrp/core/correctors.py#L94-L113
|
guaix-ucm/pyemir
|
emirdrp/decorators.py
|
loginfo
|
def loginfo(method):
"""Log the contents of Recipe Input"""
def loginfo_method(self, rinput):
klass = rinput.__class__
for key in klass.stored():
val = getattr(rinput, key)
if isinstance(val, DataFrame):
self.logger.debug("DataFrame %s", info.gather_info_dframe(val))
elif isinstance(val, ObservationResult):
for f in val.images:
self.logger.debug("OB DataFrame %s" , info.gather_info_dframe(f))
else:
pass
result = method(self, rinput)
return result
return loginfo_method
|
python
|
def loginfo(method):
"""Log the contents of Recipe Input"""
def loginfo_method(self, rinput):
klass = rinput.__class__
for key in klass.stored():
val = getattr(rinput, key)
if isinstance(val, DataFrame):
self.logger.debug("DataFrame %s", info.gather_info_dframe(val))
elif isinstance(val, ObservationResult):
for f in val.images:
self.logger.debug("OB DataFrame %s" , info.gather_info_dframe(f))
else:
pass
result = method(self, rinput)
return result
return loginfo_method
|
[
"def",
"loginfo",
"(",
"method",
")",
":",
"def",
"loginfo_method",
"(",
"self",
",",
"rinput",
")",
":",
"klass",
"=",
"rinput",
".",
"__class__",
"for",
"key",
"in",
"klass",
".",
"stored",
"(",
")",
":",
"val",
"=",
"getattr",
"(",
"rinput",
",",
"key",
")",
"if",
"isinstance",
"(",
"val",
",",
"DataFrame",
")",
":",
"self",
".",
"logger",
".",
"debug",
"(",
"\"DataFrame %s\"",
",",
"info",
".",
"gather_info_dframe",
"(",
"val",
")",
")",
"elif",
"isinstance",
"(",
"val",
",",
"ObservationResult",
")",
":",
"for",
"f",
"in",
"val",
".",
"images",
":",
"self",
".",
"logger",
".",
"debug",
"(",
"\"OB DataFrame %s\"",
",",
"info",
".",
"gather_info_dframe",
"(",
"f",
")",
")",
"else",
":",
"pass",
"result",
"=",
"method",
"(",
"self",
",",
"rinput",
")",
"return",
"result",
"return",
"loginfo_method"
] |
Log the contents of Recipe Input
|
[
"Log",
"the",
"contents",
"of",
"Recipe",
"Input"
] |
train
|
https://github.com/guaix-ucm/pyemir/blob/fef6bbabcb13f80123cafd1800a0f508a3c21702/emirdrp/decorators.py#L28-L50
|
BeyondTheClouds/enoslib
|
enoslib/host.py
|
Host.to_host
|
def to_host(self):
"""Copy or coerce to a Host."""
return Host(self.address,
alias=self.alias,
user=self.user,
keyfile=self.keyfile,
port=self.port,
extra=self.extra)
|
python
|
def to_host(self):
"""Copy or coerce to a Host."""
return Host(self.address,
alias=self.alias,
user=self.user,
keyfile=self.keyfile,
port=self.port,
extra=self.extra)
|
[
"def",
"to_host",
"(",
"self",
")",
":",
"return",
"Host",
"(",
"self",
".",
"address",
",",
"alias",
"=",
"self",
".",
"alias",
",",
"user",
"=",
"self",
".",
"user",
",",
"keyfile",
"=",
"self",
".",
"keyfile",
",",
"port",
"=",
"self",
".",
"port",
",",
"extra",
"=",
"self",
".",
"extra",
")"
] |
Copy or coerce to a Host.
|
[
"Copy",
"or",
"coerce",
"to",
"a",
"Host",
"."
] |
train
|
https://github.com/BeyondTheClouds/enoslib/blob/fb00be58e56a7848cfe482187d659744919fe2f7/enoslib/host.py#L27-L34
|
Jaymon/prom
|
prom/cli/generate.py
|
get_table_info
|
def get_table_info(*table_names):
"""Returns a dict with table_name keys mapped to the Interface that table exists in
:param *table_names: the tables you are searching for
"""
ret = {}
if table_names:
for table_name in table_names:
for name, inter in get_interfaces().items():
if inter.has_table(table_name):
yield table_name, inter, inter.get_fields(table_name)
else:
for name, inter in get_interfaces().items():
table_names = inter.get_tables()
for table_name in table_names:
yield table_name, inter, inter.get_fields(table_name)
|
python
|
def get_table_info(*table_names):
"""Returns a dict with table_name keys mapped to the Interface that table exists in
:param *table_names: the tables you are searching for
"""
ret = {}
if table_names:
for table_name in table_names:
for name, inter in get_interfaces().items():
if inter.has_table(table_name):
yield table_name, inter, inter.get_fields(table_name)
else:
for name, inter in get_interfaces().items():
table_names = inter.get_tables()
for table_name in table_names:
yield table_name, inter, inter.get_fields(table_name)
|
[
"def",
"get_table_info",
"(",
"*",
"table_names",
")",
":",
"ret",
"=",
"{",
"}",
"if",
"table_names",
":",
"for",
"table_name",
"in",
"table_names",
":",
"for",
"name",
",",
"inter",
"in",
"get_interfaces",
"(",
")",
".",
"items",
"(",
")",
":",
"if",
"inter",
".",
"has_table",
"(",
"table_name",
")",
":",
"yield",
"table_name",
",",
"inter",
",",
"inter",
".",
"get_fields",
"(",
"table_name",
")",
"else",
":",
"for",
"name",
",",
"inter",
"in",
"get_interfaces",
"(",
")",
".",
"items",
"(",
")",
":",
"table_names",
"=",
"inter",
".",
"get_tables",
"(",
")",
"for",
"table_name",
"in",
"table_names",
":",
"yield",
"table_name",
",",
"inter",
",",
"inter",
".",
"get_fields",
"(",
"table_name",
")"
] |
Returns a dict with table_name keys mapped to the Interface that table exists in
:param *table_names: the tables you are searching for
|
[
"Returns",
"a",
"dict",
"with",
"table_name",
"keys",
"mapped",
"to",
"the",
"Interface",
"that",
"table",
"exists",
"in"
] |
train
|
https://github.com/Jaymon/prom/blob/b7ad2c259eca198da03e1e4bc7d95014c168c361/prom/cli/generate.py#L12-L28
|
Jaymon/prom
|
prom/cli/generate.py
|
main_generate
|
def main_generate(table_names, stream):
"""This will print out valid prom python code for given tables that already exist
in a database.
This is really handy when you want to bootstrap an existing database to work
with prom and don't want to manually create Orm objects for the tables you want
to use, let `generate` do it for you
"""
with stream.open() as fp:
fp.write_line("from datetime import datetime, date")
fp.write_line("from decimal import Decimal")
fp.write_line("from prom import Orm, Field")
fp.write_newlines()
for table_name, inter, fields in get_table_info(*table_names):
fp.write_line("class {}(Orm):".format(table_name.title().replace("_", "")))
fp.write_line(" table_name = '{}'".format(table_name))
if inter.connection_config.name:
fp.write_line(" connection_name = '{}'".format(inter.connection_config.name))
fp.write_newlines()
magic_field_names = set(["_id", "_created", "_updated"])
if "_id" in fields:
fp.write_line(get_field_def("_id", fields.pop("_id")))
magic_field_names.discard("_id")
for field_name, field_d in fields.items():
fp.write_line(get_field_def(field_name, field_d))
for magic_field_name in magic_field_names:
if magic_field_name not in fields:
fp.write_line(" {} = None".format(magic_field_name))
fp.write_newlines(2)
|
python
|
def main_generate(table_names, stream):
"""This will print out valid prom python code for given tables that already exist
in a database.
This is really handy when you want to bootstrap an existing database to work
with prom and don't want to manually create Orm objects for the tables you want
to use, let `generate` do it for you
"""
with stream.open() as fp:
fp.write_line("from datetime import datetime, date")
fp.write_line("from decimal import Decimal")
fp.write_line("from prom import Orm, Field")
fp.write_newlines()
for table_name, inter, fields in get_table_info(*table_names):
fp.write_line("class {}(Orm):".format(table_name.title().replace("_", "")))
fp.write_line(" table_name = '{}'".format(table_name))
if inter.connection_config.name:
fp.write_line(" connection_name = '{}'".format(inter.connection_config.name))
fp.write_newlines()
magic_field_names = set(["_id", "_created", "_updated"])
if "_id" in fields:
fp.write_line(get_field_def("_id", fields.pop("_id")))
magic_field_names.discard("_id")
for field_name, field_d in fields.items():
fp.write_line(get_field_def(field_name, field_d))
for magic_field_name in magic_field_names:
if magic_field_name not in fields:
fp.write_line(" {} = None".format(magic_field_name))
fp.write_newlines(2)
|
[
"def",
"main_generate",
"(",
"table_names",
",",
"stream",
")",
":",
"with",
"stream",
".",
"open",
"(",
")",
"as",
"fp",
":",
"fp",
".",
"write_line",
"(",
"\"from datetime import datetime, date\"",
")",
"fp",
".",
"write_line",
"(",
"\"from decimal import Decimal\"",
")",
"fp",
".",
"write_line",
"(",
"\"from prom import Orm, Field\"",
")",
"fp",
".",
"write_newlines",
"(",
")",
"for",
"table_name",
",",
"inter",
",",
"fields",
"in",
"get_table_info",
"(",
"*",
"table_names",
")",
":",
"fp",
".",
"write_line",
"(",
"\"class {}(Orm):\"",
".",
"format",
"(",
"table_name",
".",
"title",
"(",
")",
".",
"replace",
"(",
"\"_\"",
",",
"\"\"",
")",
")",
")",
"fp",
".",
"write_line",
"(",
"\" table_name = '{}'\"",
".",
"format",
"(",
"table_name",
")",
")",
"if",
"inter",
".",
"connection_config",
".",
"name",
":",
"fp",
".",
"write_line",
"(",
"\" connection_name = '{}'\"",
".",
"format",
"(",
"inter",
".",
"connection_config",
".",
"name",
")",
")",
"fp",
".",
"write_newlines",
"(",
")",
"magic_field_names",
"=",
"set",
"(",
"[",
"\"_id\"",
",",
"\"_created\"",
",",
"\"_updated\"",
"]",
")",
"if",
"\"_id\"",
"in",
"fields",
":",
"fp",
".",
"write_line",
"(",
"get_field_def",
"(",
"\"_id\"",
",",
"fields",
".",
"pop",
"(",
"\"_id\"",
")",
")",
")",
"magic_field_names",
".",
"discard",
"(",
"\"_id\"",
")",
"for",
"field_name",
",",
"field_d",
"in",
"fields",
".",
"items",
"(",
")",
":",
"fp",
".",
"write_line",
"(",
"get_field_def",
"(",
"field_name",
",",
"field_d",
")",
")",
"for",
"magic_field_name",
"in",
"magic_field_names",
":",
"if",
"magic_field_name",
"not",
"in",
"fields",
":",
"fp",
".",
"write_line",
"(",
"\" {} = None\"",
".",
"format",
"(",
"magic_field_name",
")",
")",
"fp",
".",
"write_newlines",
"(",
"2",
")"
] |
This will print out valid prom python code for given tables that already exist
in a database.
This is really handy when you want to bootstrap an existing database to work
with prom and don't want to manually create Orm objects for the tables you want
to use, let `generate` do it for you
|
[
"This",
"will",
"print",
"out",
"valid",
"prom",
"python",
"code",
"for",
"given",
"tables",
"that",
"already",
"exist",
"in",
"a",
"database",
"."
] |
train
|
https://github.com/Jaymon/prom/blob/b7ad2c259eca198da03e1e4bc7d95014c168c361/prom/cli/generate.py#L50-L84
|
guaix-ucm/pyemir
|
emirdrp/instrument/distortions.py
|
exvp
|
def exvp(pos_x, pos_y):
"""Convert virtual pixel to real pixel"""
pos_x = numpy.asarray(pos_x)
pos_y = numpy.asarray(pos_y)
# convert virtual pixel to real pixel
# convert world coordinate to pixel
center = [1024.5, 1024.5]
cf = EMIR_PLATESCALE_RADS
pos_base_x = pos_x - center[0]
pos_base_y = pos_y - center[1]
ra = numpy.hypot(pos_base_x, pos_base_y)
thet = numpy.arctan2(pos_base_y, pos_base_x)
r = cf * ra
rr1 = 1 + 14606.7 * r**2 + 1739716115.1 * r**4
nx1 = rr1 * ra * numpy.cos(thet) + center[0]
ny1 = rr1 * ra * numpy.sin(thet) + center[1]
return nx1, ny1
|
python
|
def exvp(pos_x, pos_y):
"""Convert virtual pixel to real pixel"""
pos_x = numpy.asarray(pos_x)
pos_y = numpy.asarray(pos_y)
# convert virtual pixel to real pixel
# convert world coordinate to pixel
center = [1024.5, 1024.5]
cf = EMIR_PLATESCALE_RADS
pos_base_x = pos_x - center[0]
pos_base_y = pos_y - center[1]
ra = numpy.hypot(pos_base_x, pos_base_y)
thet = numpy.arctan2(pos_base_y, pos_base_x)
r = cf * ra
rr1 = 1 + 14606.7 * r**2 + 1739716115.1 * r**4
nx1 = rr1 * ra * numpy.cos(thet) + center[0]
ny1 = rr1 * ra * numpy.sin(thet) + center[1]
return nx1, ny1
|
[
"def",
"exvp",
"(",
"pos_x",
",",
"pos_y",
")",
":",
"pos_x",
"=",
"numpy",
".",
"asarray",
"(",
"pos_x",
")",
"pos_y",
"=",
"numpy",
".",
"asarray",
"(",
"pos_y",
")",
"# convert virtual pixel to real pixel",
"# convert world coordinate to pixel",
"center",
"=",
"[",
"1024.5",
",",
"1024.5",
"]",
"cf",
"=",
"EMIR_PLATESCALE_RADS",
"pos_base_x",
"=",
"pos_x",
"-",
"center",
"[",
"0",
"]",
"pos_base_y",
"=",
"pos_y",
"-",
"center",
"[",
"1",
"]",
"ra",
"=",
"numpy",
".",
"hypot",
"(",
"pos_base_x",
",",
"pos_base_y",
")",
"thet",
"=",
"numpy",
".",
"arctan2",
"(",
"pos_base_y",
",",
"pos_base_x",
")",
"r",
"=",
"cf",
"*",
"ra",
"rr1",
"=",
"1",
"+",
"14606.7",
"*",
"r",
"**",
"2",
"+",
"1739716115.1",
"*",
"r",
"**",
"4",
"nx1",
"=",
"rr1",
"*",
"ra",
"*",
"numpy",
".",
"cos",
"(",
"thet",
")",
"+",
"center",
"[",
"0",
"]",
"ny1",
"=",
"rr1",
"*",
"ra",
"*",
"numpy",
".",
"sin",
"(",
"thet",
")",
"+",
"center",
"[",
"1",
"]",
"return",
"nx1",
",",
"ny1"
] |
Convert virtual pixel to real pixel
|
[
"Convert",
"virtual",
"pixel",
"to",
"real",
"pixel"
] |
train
|
https://github.com/guaix-ucm/pyemir/blob/fef6bbabcb13f80123cafd1800a0f508a3c21702/emirdrp/instrument/distortions.py#L26-L45
|
IdentityPython/oidcendpoint
|
src/oidcendpoint/oidc/registration.py
|
match_sp_sep
|
def match_sp_sep(first, second):
"""
Verify that all the values in 'first' appear in 'second'.
The values can either be in the form of lists or as space separated
items.
:param first:
:param second:
:return: True/False
"""
if isinstance(first, list):
one = [set(v.split(" ")) for v in first]
else:
one = [{v} for v in first.split(" ")]
if isinstance(second, list):
other = [set(v.split(" ")) for v in second]
else:
other = [{v} for v in second.split(" ")]
# all values in one must appear in other
if any(rt not in other for rt in one):
return False
return True
|
python
|
def match_sp_sep(first, second):
"""
Verify that all the values in 'first' appear in 'second'.
The values can either be in the form of lists or as space separated
items.
:param first:
:param second:
:return: True/False
"""
if isinstance(first, list):
one = [set(v.split(" ")) for v in first]
else:
one = [{v} for v in first.split(" ")]
if isinstance(second, list):
other = [set(v.split(" ")) for v in second]
else:
other = [{v} for v in second.split(" ")]
# all values in one must appear in other
if any(rt not in other for rt in one):
return False
return True
|
[
"def",
"match_sp_sep",
"(",
"first",
",",
"second",
")",
":",
"if",
"isinstance",
"(",
"first",
",",
"list",
")",
":",
"one",
"=",
"[",
"set",
"(",
"v",
".",
"split",
"(",
"\" \"",
")",
")",
"for",
"v",
"in",
"first",
"]",
"else",
":",
"one",
"=",
"[",
"{",
"v",
"}",
"for",
"v",
"in",
"first",
".",
"split",
"(",
"\" \"",
")",
"]",
"if",
"isinstance",
"(",
"second",
",",
"list",
")",
":",
"other",
"=",
"[",
"set",
"(",
"v",
".",
"split",
"(",
"\" \"",
")",
")",
"for",
"v",
"in",
"second",
"]",
"else",
":",
"other",
"=",
"[",
"{",
"v",
"}",
"for",
"v",
"in",
"second",
".",
"split",
"(",
"\" \"",
")",
"]",
"# all values in one must appear in other",
"if",
"any",
"(",
"rt",
"not",
"in",
"other",
"for",
"rt",
"in",
"one",
")",
":",
"return",
"False",
"return",
"True"
] |
Verify that all the values in 'first' appear in 'second'.
The values can either be in the form of lists or as space separated
items.
:param first:
:param second:
:return: True/False
|
[
"Verify",
"that",
"all",
"the",
"values",
"in",
"first",
"appear",
"in",
"second",
".",
"The",
"values",
"can",
"either",
"be",
"in",
"the",
"form",
"of",
"lists",
"or",
"as",
"space",
"separated",
"items",
"."
] |
train
|
https://github.com/IdentityPython/oidcendpoint/blob/6c1d729d51bfb6332816117fe476073df7a1d823/src/oidcendpoint/oidc/registration.py#L57-L80
|
IdentityPython/oidcendpoint
|
src/oidcendpoint/oidc/registration.py
|
Registration._verify_sector_identifier
|
def _verify_sector_identifier(self, request):
"""
Verify `sector_identifier_uri` is reachable and that it contains
`redirect_uri`s.
:param request: Provider registration request
:return: si_redirects, sector_id
:raises: InvalidSectorIdentifier
"""
si_url = request["sector_identifier_uri"]
try:
res = self.endpoint_context.httpc.get(si_url)
except Exception as err:
logger.error(err)
res = None
if not res:
raise InvalidSectorIdentifier("Couldn't read from sector_identifier_uri")
logger.debug("sector_identifier_uri => %s", sanitize(res.text))
try:
si_redirects = json.loads(res.text)
except ValueError:
raise InvalidSectorIdentifier(
"Error deserializing sector_identifier_uri content")
if "redirect_uris" in request:
logger.debug("redirect_uris: %s", request["redirect_uris"])
for uri in request["redirect_uris"]:
if uri not in si_redirects:
raise InvalidSectorIdentifier(
"redirect_uri missing from sector_identifiers")
return si_redirects, si_url
|
python
|
def _verify_sector_identifier(self, request):
"""
Verify `sector_identifier_uri` is reachable and that it contains
`redirect_uri`s.
:param request: Provider registration request
:return: si_redirects, sector_id
:raises: InvalidSectorIdentifier
"""
si_url = request["sector_identifier_uri"]
try:
res = self.endpoint_context.httpc.get(si_url)
except Exception as err:
logger.error(err)
res = None
if not res:
raise InvalidSectorIdentifier("Couldn't read from sector_identifier_uri")
logger.debug("sector_identifier_uri => %s", sanitize(res.text))
try:
si_redirects = json.loads(res.text)
except ValueError:
raise InvalidSectorIdentifier(
"Error deserializing sector_identifier_uri content")
if "redirect_uris" in request:
logger.debug("redirect_uris: %s", request["redirect_uris"])
for uri in request["redirect_uris"]:
if uri not in si_redirects:
raise InvalidSectorIdentifier(
"redirect_uri missing from sector_identifiers")
return si_redirects, si_url
|
[
"def",
"_verify_sector_identifier",
"(",
"self",
",",
"request",
")",
":",
"si_url",
"=",
"request",
"[",
"\"sector_identifier_uri\"",
"]",
"try",
":",
"res",
"=",
"self",
".",
"endpoint_context",
".",
"httpc",
".",
"get",
"(",
"si_url",
")",
"except",
"Exception",
"as",
"err",
":",
"logger",
".",
"error",
"(",
"err",
")",
"res",
"=",
"None",
"if",
"not",
"res",
":",
"raise",
"InvalidSectorIdentifier",
"(",
"\"Couldn't read from sector_identifier_uri\"",
")",
"logger",
".",
"debug",
"(",
"\"sector_identifier_uri => %s\"",
",",
"sanitize",
"(",
"res",
".",
"text",
")",
")",
"try",
":",
"si_redirects",
"=",
"json",
".",
"loads",
"(",
"res",
".",
"text",
")",
"except",
"ValueError",
":",
"raise",
"InvalidSectorIdentifier",
"(",
"\"Error deserializing sector_identifier_uri content\"",
")",
"if",
"\"redirect_uris\"",
"in",
"request",
":",
"logger",
".",
"debug",
"(",
"\"redirect_uris: %s\"",
",",
"request",
"[",
"\"redirect_uris\"",
"]",
")",
"for",
"uri",
"in",
"request",
"[",
"\"redirect_uris\"",
"]",
":",
"if",
"uri",
"not",
"in",
"si_redirects",
":",
"raise",
"InvalidSectorIdentifier",
"(",
"\"redirect_uri missing from sector_identifiers\"",
")",
"return",
"si_redirects",
",",
"si_url"
] |
Verify `sector_identifier_uri` is reachable and that it contains
`redirect_uri`s.
:param request: Provider registration request
:return: si_redirects, sector_id
:raises: InvalidSectorIdentifier
|
[
"Verify",
"sector_identifier_uri",
"is",
"reachable",
"and",
"that",
"it",
"contains",
"redirect_uri",
"s",
"."
] |
train
|
https://github.com/IdentityPython/oidcendpoint/blob/6c1d729d51bfb6332816117fe476073df7a1d823/src/oidcendpoint/oidc/registration.py#L288-L322
|
guaix-ucm/pyemir
|
emirdrp/util/sexcatalog.py
|
SExtractorfile.readline
|
def readline(self):
"""
Read and analyse the next line of the SExtractor catalog
and return a dictionary {'param1': value, 'param2': value, ...}.
"""
if not(self._firstline):
self._line = self._file.readline()
self._firstline = False
if not(self._line):
return None
__ll = (self._line).replace('\n', '')
__values = __ll.split()
self._outdict.update(dict(zip(self._keys, __values)))
for i in self._keys:
self._outdict[i] = (
SExtractorfile._SE_keys[i]["infunc"](self._outdict[i]))
return self._outdict.copy()
|
python
|
def readline(self):
"""
Read and analyse the next line of the SExtractor catalog
and return a dictionary {'param1': value, 'param2': value, ...}.
"""
if not(self._firstline):
self._line = self._file.readline()
self._firstline = False
if not(self._line):
return None
__ll = (self._line).replace('\n', '')
__values = __ll.split()
self._outdict.update(dict(zip(self._keys, __values)))
for i in self._keys:
self._outdict[i] = (
SExtractorfile._SE_keys[i]["infunc"](self._outdict[i]))
return self._outdict.copy()
|
[
"def",
"readline",
"(",
"self",
")",
":",
"if",
"not",
"(",
"self",
".",
"_firstline",
")",
":",
"self",
".",
"_line",
"=",
"self",
".",
"_file",
".",
"readline",
"(",
")",
"self",
".",
"_firstline",
"=",
"False",
"if",
"not",
"(",
"self",
".",
"_line",
")",
":",
"return",
"None",
"__ll",
"=",
"(",
"self",
".",
"_line",
")",
".",
"replace",
"(",
"'\\n'",
",",
"''",
")",
"__values",
"=",
"__ll",
".",
"split",
"(",
")",
"self",
".",
"_outdict",
".",
"update",
"(",
"dict",
"(",
"zip",
"(",
"self",
".",
"_keys",
",",
"__values",
")",
")",
")",
"for",
"i",
"in",
"self",
".",
"_keys",
":",
"self",
".",
"_outdict",
"[",
"i",
"]",
"=",
"(",
"SExtractorfile",
".",
"_SE_keys",
"[",
"i",
"]",
"[",
"\"infunc\"",
"]",
"(",
"self",
".",
"_outdict",
"[",
"i",
"]",
")",
")",
"return",
"self",
".",
"_outdict",
".",
"copy",
"(",
")"
] |
Read and analyse the next line of the SExtractor catalog
and return a dictionary {'param1': value, 'param2': value, ...}.
|
[
"Read",
"and",
"analyse",
"the",
"next",
"line",
"of",
"the",
"SExtractor",
"catalog",
"and",
"return",
"a",
"dictionary",
"{",
"param1",
":",
"value",
"param2",
":",
"value",
"...",
"}",
"."
] |
train
|
https://github.com/guaix-ucm/pyemir/blob/fef6bbabcb13f80123cafd1800a0f508a3c21702/emirdrp/util/sexcatalog.py#L740-L759
|
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.