repository_name
stringlengths
7
55
func_path_in_repository
stringlengths
4
223
func_name
stringlengths
1
134
whole_func_string
stringlengths
75
104k
language
stringclasses
1 value
func_code_string
stringlengths
75
104k
func_code_tokens
listlengths
19
28.4k
func_documentation_string
stringlengths
1
46.9k
func_documentation_tokens
listlengths
1
1.97k
split_name
stringclasses
1 value
func_code_url
stringlengths
87
315
guaix-ucm/pyemir
emirdrp/util/sexcatalog.py
SExtractorfile.read
def read(self): """ Read the file until EOF and return a list of dictionaries. """ __result = [] __ll = self.readline() while __ll: __result.append(__ll) __ll = self.readline() return list(__result)
python
def read(self): """ Read the file until EOF and return a list of dictionaries. """ __result = [] __ll = self.readline() while __ll: __result.append(__ll) __ll = self.readline() return list(__result)
[ "def", "read", "(", "self", ")", ":", "__result", "=", "[", "]", "__ll", "=", "self", ".", "readline", "(", ")", "while", "__ll", ":", "__result", ".", "append", "(", "__ll", ")", "__ll", "=", "self", ".", "readline", "(", ")", "return", "list", "(", "__result", ")" ]
Read the file until EOF and return a list of dictionaries.
[ "Read", "the", "file", "until", "EOF", "and", "return", "a", "list", "of", "dictionaries", "." ]
train
https://github.com/guaix-ucm/pyemir/blob/fef6bbabcb13f80123cafd1800a0f508a3c21702/emirdrp/util/sexcatalog.py#L761-L772
guaix-ucm/pyemir
emirdrp/util/sexcatalog.py
SExtractorfile.close
def close(self): """ Close the SExtractor file. """ if self._file: if not(self._file.closed): self._file.close() self.closed = True
python
def close(self): """ Close the SExtractor file. """ if self._file: if not(self._file.closed): self._file.close() self.closed = True
[ "def", "close", "(", "self", ")", ":", "if", "self", ".", "_file", ":", "if", "not", "(", "self", ".", "_file", ".", "closed", ")", ":", "self", ".", "_file", ".", "close", "(", ")", "self", ".", "closed", "=", "True" ]
Close the SExtractor file.
[ "Close", "the", "SExtractor", "file", "." ]
train
https://github.com/guaix-ucm/pyemir/blob/fef6bbabcb13f80123cafd1800a0f508a3c21702/emirdrp/util/sexcatalog.py#L777-L784
BeyondTheClouds/enoslib
enoslib/infra/enos_g5k/driver.py
get_driver
def get_driver(configuration): """Build an instance of the driver to interact with G5K """ resources = configuration["resources"] machines = resources["machines"] networks = resources["networks"] oargrid_jobids = configuration.get("oargrid_jobids") if oargrid_jobids: logger.debug("Loading the OargridStaticDriver") return OargridStaticDriver(oargrid_jobids) else: job_name = configuration.get("job_name", DEFAULT_JOB_NAME) walltime = configuration.get("walltime", DEFAULT_WALLTIME) job_type = configuration.get("job_type", JOB_TYPE_DEPLOY) reservation_date = configuration.get("reservation", False) # NOTE(msimonin): some time ago asimonet proposes to auto-detect # the queues and it was quiet convenient # see https://github.com/BeyondTheClouds/enos/pull/62 queue = configuration.get("queue", None) logger.debug("Loading the OargridDynamicDriver") return OargridDynamicDriver( job_name, walltime, job_type, reservation_date, queue, machines, networks )
python
def get_driver(configuration): """Build an instance of the driver to interact with G5K """ resources = configuration["resources"] machines = resources["machines"] networks = resources["networks"] oargrid_jobids = configuration.get("oargrid_jobids") if oargrid_jobids: logger.debug("Loading the OargridStaticDriver") return OargridStaticDriver(oargrid_jobids) else: job_name = configuration.get("job_name", DEFAULT_JOB_NAME) walltime = configuration.get("walltime", DEFAULT_WALLTIME) job_type = configuration.get("job_type", JOB_TYPE_DEPLOY) reservation_date = configuration.get("reservation", False) # NOTE(msimonin): some time ago asimonet proposes to auto-detect # the queues and it was quiet convenient # see https://github.com/BeyondTheClouds/enos/pull/62 queue = configuration.get("queue", None) logger.debug("Loading the OargridDynamicDriver") return OargridDynamicDriver( job_name, walltime, job_type, reservation_date, queue, machines, networks )
[ "def", "get_driver", "(", "configuration", ")", ":", "resources", "=", "configuration", "[", "\"resources\"", "]", "machines", "=", "resources", "[", "\"machines\"", "]", "networks", "=", "resources", "[", "\"networks\"", "]", "oargrid_jobids", "=", "configuration", ".", "get", "(", "\"oargrid_jobids\"", ")", "if", "oargrid_jobids", ":", "logger", ".", "debug", "(", "\"Loading the OargridStaticDriver\"", ")", "return", "OargridStaticDriver", "(", "oargrid_jobids", ")", "else", ":", "job_name", "=", "configuration", ".", "get", "(", "\"job_name\"", ",", "DEFAULT_JOB_NAME", ")", "walltime", "=", "configuration", ".", "get", "(", "\"walltime\"", ",", "DEFAULT_WALLTIME", ")", "job_type", "=", "configuration", ".", "get", "(", "\"job_type\"", ",", "JOB_TYPE_DEPLOY", ")", "reservation_date", "=", "configuration", ".", "get", "(", "\"reservation\"", ",", "False", ")", "# NOTE(msimonin): some time ago asimonet proposes to auto-detect", "# the queues and it was quiet convenient", "# see https://github.com/BeyondTheClouds/enos/pull/62", "queue", "=", "configuration", ".", "get", "(", "\"queue\"", ",", "None", ")", "logger", ".", "debug", "(", "\"Loading the OargridDynamicDriver\"", ")", "return", "OargridDynamicDriver", "(", "job_name", ",", "walltime", ",", "job_type", ",", "reservation_date", ",", "queue", ",", "machines", ",", "networks", ")" ]
Build an instance of the driver to interact with G5K
[ "Build", "an", "instance", "of", "the", "driver", "to", "interact", "with", "G5K" ]
train
https://github.com/BeyondTheClouds/enoslib/blob/fb00be58e56a7848cfe482187d659744919fe2f7/enoslib/infra/enos_g5k/driver.py#L18-L48
IdentityPython/oidcendpoint
src/oidcendpoint/cookie.py
ver_dec_content
def ver_dec_content(parts, sign_key=None, enc_key=None, sign_alg='SHA256'): """ Verifies the value of a cookie :param parts: The parts of the payload :param sign_key: A :py:class:`cryptojwt.jwk.hmac.SYMKey` instance :param enc_key: A :py:class:`cryptojwt.jwk.hmac.SYMKey` instance :param sign_alg: Which signing algorithm to was used :return: A tuple with basic information and a timestamp """ if parts is None: return None elif len(parts) == 3: # verify the cookie signature timestamp, load, b64_mac = parts mac = base64.b64decode(b64_mac) verifier = HMACSigner(algorithm=sign_alg) if verifier.verify(load.encode('utf-8') + timestamp.encode('utf-8'), mac, sign_key.key): return load, timestamp else: raise VerificationError() elif len(parts) == 4: b_timestamp = parts[0] iv = base64.b64decode(parts[1]) ciphertext = base64.b64decode(parts[2]) tag = base64.b64decode(parts[3]) decrypter = AES_GCMEncrypter(key=enc_key.key) msg = decrypter.decrypt(ciphertext, iv, tag=tag) p = lv_unpack(msg.decode('utf-8')) load = p[0] timestamp = p[1] if len(p) == 3: verifier = HMACSigner(algorithm=sign_alg) if verifier.verify(load.encode('utf-8') + timestamp.encode('utf-8'), base64.b64decode(p[2]), sign_key.key): return load, timestamp else: return load, timestamp return None
python
def ver_dec_content(parts, sign_key=None, enc_key=None, sign_alg='SHA256'): """ Verifies the value of a cookie :param parts: The parts of the payload :param sign_key: A :py:class:`cryptojwt.jwk.hmac.SYMKey` instance :param enc_key: A :py:class:`cryptojwt.jwk.hmac.SYMKey` instance :param sign_alg: Which signing algorithm to was used :return: A tuple with basic information and a timestamp """ if parts is None: return None elif len(parts) == 3: # verify the cookie signature timestamp, load, b64_mac = parts mac = base64.b64decode(b64_mac) verifier = HMACSigner(algorithm=sign_alg) if verifier.verify(load.encode('utf-8') + timestamp.encode('utf-8'), mac, sign_key.key): return load, timestamp else: raise VerificationError() elif len(parts) == 4: b_timestamp = parts[0] iv = base64.b64decode(parts[1]) ciphertext = base64.b64decode(parts[2]) tag = base64.b64decode(parts[3]) decrypter = AES_GCMEncrypter(key=enc_key.key) msg = decrypter.decrypt(ciphertext, iv, tag=tag) p = lv_unpack(msg.decode('utf-8')) load = p[0] timestamp = p[1] if len(p) == 3: verifier = HMACSigner(algorithm=sign_alg) if verifier.verify(load.encode('utf-8') + timestamp.encode('utf-8'), base64.b64decode(p[2]), sign_key.key): return load, timestamp else: return load, timestamp return None
[ "def", "ver_dec_content", "(", "parts", ",", "sign_key", "=", "None", ",", "enc_key", "=", "None", ",", "sign_alg", "=", "'SHA256'", ")", ":", "if", "parts", "is", "None", ":", "return", "None", "elif", "len", "(", "parts", ")", "==", "3", ":", "# verify the cookie signature", "timestamp", ",", "load", ",", "b64_mac", "=", "parts", "mac", "=", "base64", ".", "b64decode", "(", "b64_mac", ")", "verifier", "=", "HMACSigner", "(", "algorithm", "=", "sign_alg", ")", "if", "verifier", ".", "verify", "(", "load", ".", "encode", "(", "'utf-8'", ")", "+", "timestamp", ".", "encode", "(", "'utf-8'", ")", ",", "mac", ",", "sign_key", ".", "key", ")", ":", "return", "load", ",", "timestamp", "else", ":", "raise", "VerificationError", "(", ")", "elif", "len", "(", "parts", ")", "==", "4", ":", "b_timestamp", "=", "parts", "[", "0", "]", "iv", "=", "base64", ".", "b64decode", "(", "parts", "[", "1", "]", ")", "ciphertext", "=", "base64", ".", "b64decode", "(", "parts", "[", "2", "]", ")", "tag", "=", "base64", ".", "b64decode", "(", "parts", "[", "3", "]", ")", "decrypter", "=", "AES_GCMEncrypter", "(", "key", "=", "enc_key", ".", "key", ")", "msg", "=", "decrypter", ".", "decrypt", "(", "ciphertext", ",", "iv", ",", "tag", "=", "tag", ")", "p", "=", "lv_unpack", "(", "msg", ".", "decode", "(", "'utf-8'", ")", ")", "load", "=", "p", "[", "0", "]", "timestamp", "=", "p", "[", "1", "]", "if", "len", "(", "p", ")", "==", "3", ":", "verifier", "=", "HMACSigner", "(", "algorithm", "=", "sign_alg", ")", "if", "verifier", ".", "verify", "(", "load", ".", "encode", "(", "'utf-8'", ")", "+", "timestamp", ".", "encode", "(", "'utf-8'", ")", ",", "base64", ".", "b64decode", "(", "p", "[", "2", "]", ")", ",", "sign_key", ".", "key", ")", ":", "return", "load", ",", "timestamp", "else", ":", "return", "load", ",", "timestamp", "return", "None" ]
Verifies the value of a cookie :param parts: The parts of the payload :param sign_key: A :py:class:`cryptojwt.jwk.hmac.SYMKey` instance :param enc_key: A :py:class:`cryptojwt.jwk.hmac.SYMKey` instance :param sign_alg: Which signing algorithm to was used :return: A tuple with basic information and a timestamp
[ "Verifies", "the", "value", "of", "a", "cookie" ]
train
https://github.com/IdentityPython/oidcendpoint/blob/6c1d729d51bfb6332816117fe476073df7a1d823/src/oidcendpoint/cookie.py#L88-L129
IdentityPython/oidcendpoint
src/oidcendpoint/cookie.py
make_cookie_content
def make_cookie_content(name, load, sign_key, domain=None, path=None, timestamp="", enc_key=None, max_age=0, sign_alg='SHA256'): """ Create and return a cookies content If you only provide a `seed`, a HMAC gets added to the cookies value and this is checked, when the cookie is parsed again. If you provide both `seed` and `enc_key`, the cookie gets protected by using AEAD encryption. This provides both a MAC over the whole cookie and encrypts the `load` in a single step. The `seed` and `enc_key` parameters should be byte strings of at least 16 bytes length each. Those are used as cryptographic keys. :param name: Cookie name :type name: text :param load: Cookie load :type load: text :param sign_key: A sign_key key for payload signing :type sign_key: A :py:class:`cryptojwt.jwk.hmac.SYMKey` instance :param domain: The domain of the cookie :param path: The path specification for the cookie :param timestamp: A time stamp :type timestamp: text :param enc_key: The key to use for payload encryption. :type enc_key: A :py:class:`cryptojwt.jwk.hmac.SYMKey` instance :param max_age: The time in seconds for when a cookie will be deleted :type max_age: int :return: A SimpleCookie instance """ if not timestamp: timestamp = str(int(time.time())) _cookie_value = sign_enc_payload(load, timestamp, sign_key=sign_key, enc_key=enc_key, sign_alg=sign_alg) content = {name: {"value": _cookie_value}} if path is not None: content[name]["path"] = path if domain is not None: content[name]["domain"] = domain content[name]['httponly'] = True if max_age: content[name]["expires"] = in_a_while(seconds=max_age) return content
python
def make_cookie_content(name, load, sign_key, domain=None, path=None, timestamp="", enc_key=None, max_age=0, sign_alg='SHA256'): """ Create and return a cookies content If you only provide a `seed`, a HMAC gets added to the cookies value and this is checked, when the cookie is parsed again. If you provide both `seed` and `enc_key`, the cookie gets protected by using AEAD encryption. This provides both a MAC over the whole cookie and encrypts the `load` in a single step. The `seed` and `enc_key` parameters should be byte strings of at least 16 bytes length each. Those are used as cryptographic keys. :param name: Cookie name :type name: text :param load: Cookie load :type load: text :param sign_key: A sign_key key for payload signing :type sign_key: A :py:class:`cryptojwt.jwk.hmac.SYMKey` instance :param domain: The domain of the cookie :param path: The path specification for the cookie :param timestamp: A time stamp :type timestamp: text :param enc_key: The key to use for payload encryption. :type enc_key: A :py:class:`cryptojwt.jwk.hmac.SYMKey` instance :param max_age: The time in seconds for when a cookie will be deleted :type max_age: int :return: A SimpleCookie instance """ if not timestamp: timestamp = str(int(time.time())) _cookie_value = sign_enc_payload(load, timestamp, sign_key=sign_key, enc_key=enc_key, sign_alg=sign_alg) content = {name: {"value": _cookie_value}} if path is not None: content[name]["path"] = path if domain is not None: content[name]["domain"] = domain content[name]['httponly'] = True if max_age: content[name]["expires"] = in_a_while(seconds=max_age) return content
[ "def", "make_cookie_content", "(", "name", ",", "load", ",", "sign_key", ",", "domain", "=", "None", ",", "path", "=", "None", ",", "timestamp", "=", "\"\"", ",", "enc_key", "=", "None", ",", "max_age", "=", "0", ",", "sign_alg", "=", "'SHA256'", ")", ":", "if", "not", "timestamp", ":", "timestamp", "=", "str", "(", "int", "(", "time", ".", "time", "(", ")", ")", ")", "_cookie_value", "=", "sign_enc_payload", "(", "load", ",", "timestamp", ",", "sign_key", "=", "sign_key", ",", "enc_key", "=", "enc_key", ",", "sign_alg", "=", "sign_alg", ")", "content", "=", "{", "name", ":", "{", "\"value\"", ":", "_cookie_value", "}", "}", "if", "path", "is", "not", "None", ":", "content", "[", "name", "]", "[", "\"path\"", "]", "=", "path", "if", "domain", "is", "not", "None", ":", "content", "[", "name", "]", "[", "\"domain\"", "]", "=", "domain", "content", "[", "name", "]", "[", "'httponly'", "]", "=", "True", "if", "max_age", ":", "content", "[", "name", "]", "[", "\"expires\"", "]", "=", "in_a_while", "(", "seconds", "=", "max_age", ")", "return", "content" ]
Create and return a cookies content If you only provide a `seed`, a HMAC gets added to the cookies value and this is checked, when the cookie is parsed again. If you provide both `seed` and `enc_key`, the cookie gets protected by using AEAD encryption. This provides both a MAC over the whole cookie and encrypts the `load` in a single step. The `seed` and `enc_key` parameters should be byte strings of at least 16 bytes length each. Those are used as cryptographic keys. :param name: Cookie name :type name: text :param load: Cookie load :type load: text :param sign_key: A sign_key key for payload signing :type sign_key: A :py:class:`cryptojwt.jwk.hmac.SYMKey` instance :param domain: The domain of the cookie :param path: The path specification for the cookie :param timestamp: A time stamp :type timestamp: text :param enc_key: The key to use for payload encryption. :type enc_key: A :py:class:`cryptojwt.jwk.hmac.SYMKey` instance :param max_age: The time in seconds for when a cookie will be deleted :type max_age: int :return: A SimpleCookie instance
[ "Create", "and", "return", "a", "cookies", "content" ]
train
https://github.com/IdentityPython/oidcendpoint/blob/6c1d729d51bfb6332816117fe476073df7a1d823/src/oidcendpoint/cookie.py#L132-L181
IdentityPython/oidcendpoint
src/oidcendpoint/cookie.py
cookie_parts
def cookie_parts(name, kaka): """ Give me the parts of the cookie payload :param name: A name of a cookie object :param kaka: The cookie :return: A list of parts or None if there is no cookie object with the given name """ cookie_obj = SimpleCookie(as_unicode(kaka)) morsel = cookie_obj.get(name) if morsel: return morsel.value.split("|") else: return None
python
def cookie_parts(name, kaka): """ Give me the parts of the cookie payload :param name: A name of a cookie object :param kaka: The cookie :return: A list of parts or None if there is no cookie object with the given name """ cookie_obj = SimpleCookie(as_unicode(kaka)) morsel = cookie_obj.get(name) if morsel: return morsel.value.split("|") else: return None
[ "def", "cookie_parts", "(", "name", ",", "kaka", ")", ":", "cookie_obj", "=", "SimpleCookie", "(", "as_unicode", "(", "kaka", ")", ")", "morsel", "=", "cookie_obj", ".", "get", "(", "name", ")", "if", "morsel", ":", "return", "morsel", ".", "value", ".", "split", "(", "\"|\"", ")", "else", ":", "return", "None" ]
Give me the parts of the cookie payload :param name: A name of a cookie object :param kaka: The cookie :return: A list of parts or None if there is no cookie object with the given name
[ "Give", "me", "the", "parts", "of", "the", "cookie", "payload" ]
train
https://github.com/IdentityPython/oidcendpoint/blob/6c1d729d51bfb6332816117fe476073df7a1d823/src/oidcendpoint/cookie.py#L201-L215
IdentityPython/oidcendpoint
src/oidcendpoint/cookie.py
parse_cookie
def parse_cookie(name, sign_key, kaka, enc_key=None, sign_alg='SHA256'): """Parses and verifies a cookie value Parses a cookie created by `make_cookie` and verifies it has not been tampered with. You need to provide the same `sign_key` and `enc_key` used when creating the cookie, otherwise the verification fails. See `make_cookie` for details about the verification. :param sign_key: A signing key used to create the signature :type sign_key: A :py:class:`cryptojwt.jwk.hmac.SYMKey` instance :param kaka: The cookie :param enc_key: The encryption key used. :type enc_key: A :py:class:`cryptojwt.jwk.hmac.SYMKey` instance or None :raises InvalidCookieSign: When verification fails. :return: A tuple consisting of (payload, timestamp) or None if parsing fails """ if not kaka: return None parts = cookie_parts(name, kaka) return ver_dec_content(parts, sign_key, enc_key, sign_alg)
python
def parse_cookie(name, sign_key, kaka, enc_key=None, sign_alg='SHA256'): """Parses and verifies a cookie value Parses a cookie created by `make_cookie` and verifies it has not been tampered with. You need to provide the same `sign_key` and `enc_key` used when creating the cookie, otherwise the verification fails. See `make_cookie` for details about the verification. :param sign_key: A signing key used to create the signature :type sign_key: A :py:class:`cryptojwt.jwk.hmac.SYMKey` instance :param kaka: The cookie :param enc_key: The encryption key used. :type enc_key: A :py:class:`cryptojwt.jwk.hmac.SYMKey` instance or None :raises InvalidCookieSign: When verification fails. :return: A tuple consisting of (payload, timestamp) or None if parsing fails """ if not kaka: return None parts = cookie_parts(name, kaka) return ver_dec_content(parts, sign_key, enc_key, sign_alg)
[ "def", "parse_cookie", "(", "name", ",", "sign_key", ",", "kaka", ",", "enc_key", "=", "None", ",", "sign_alg", "=", "'SHA256'", ")", ":", "if", "not", "kaka", ":", "return", "None", "parts", "=", "cookie_parts", "(", "name", ",", "kaka", ")", "return", "ver_dec_content", "(", "parts", ",", "sign_key", ",", "enc_key", ",", "sign_alg", ")" ]
Parses and verifies a cookie value Parses a cookie created by `make_cookie` and verifies it has not been tampered with. You need to provide the same `sign_key` and `enc_key` used when creating the cookie, otherwise the verification fails. See `make_cookie` for details about the verification. :param sign_key: A signing key used to create the signature :type sign_key: A :py:class:`cryptojwt.jwk.hmac.SYMKey` instance :param kaka: The cookie :param enc_key: The encryption key used. :type enc_key: A :py:class:`cryptojwt.jwk.hmac.SYMKey` instance or None :raises InvalidCookieSign: When verification fails. :return: A tuple consisting of (payload, timestamp) or None if parsing fails
[ "Parses", "and", "verifies", "a", "cookie", "value" ]
train
https://github.com/IdentityPython/oidcendpoint/blob/6c1d729d51bfb6332816117fe476073df7a1d823/src/oidcendpoint/cookie.py#L218-L241
IdentityPython/oidcendpoint
src/oidcendpoint/cookie.py
CookieDealer.delete_cookie
def delete_cookie(self, cookie_name=None): """ Create a cookie that will immediately expire when it hits the other side. :param cookie_name: Name of the cookie :return: A tuple to be added to headers """ if cookie_name is None: cookie_name = self.default_value['name'] return self.create_cookie("", "", cookie_name=cookie_name, kill=True)
python
def delete_cookie(self, cookie_name=None): """ Create a cookie that will immediately expire when it hits the other side. :param cookie_name: Name of the cookie :return: A tuple to be added to headers """ if cookie_name is None: cookie_name = self.default_value['name'] return self.create_cookie("", "", cookie_name=cookie_name, kill=True)
[ "def", "delete_cookie", "(", "self", ",", "cookie_name", "=", "None", ")", ":", "if", "cookie_name", "is", "None", ":", "cookie_name", "=", "self", ".", "default_value", "[", "'name'", "]", "return", "self", ".", "create_cookie", "(", "\"\"", ",", "\"\"", ",", "cookie_name", "=", "cookie_name", ",", "kill", "=", "True", ")" ]
Create a cookie that will immediately expire when it hits the other side. :param cookie_name: Name of the cookie :return: A tuple to be added to headers
[ "Create", "a", "cookie", "that", "will", "immediately", "expire", "when", "it", "hits", "the", "other", "side", "." ]
train
https://github.com/IdentityPython/oidcendpoint/blob/6c1d729d51bfb6332816117fe476073df7a1d823/src/oidcendpoint/cookie.py#L286-L297
IdentityPython/oidcendpoint
src/oidcendpoint/cookie.py
CookieDealer.get_cookie_value
def get_cookie_value(self, cookie=None, cookie_name=None): """ Return information stored in a Cookie :param cookie: A cookie instance :param cookie_name: The name of the cookie I'm looking for :return: tuple (value, timestamp, type) """ if cookie_name is None: cookie_name = self.default_value['name'] if cookie is None or cookie_name is None: return None else: try: info, timestamp = parse_cookie(cookie_name, self.sign_key, cookie, self.enc_key, self.sign_alg) except (TypeError, AssertionError): return None else: value, _ts, typ = info.split("::") if timestamp == _ts: return value, _ts, typ return None
python
def get_cookie_value(self, cookie=None, cookie_name=None): """ Return information stored in a Cookie :param cookie: A cookie instance :param cookie_name: The name of the cookie I'm looking for :return: tuple (value, timestamp, type) """ if cookie_name is None: cookie_name = self.default_value['name'] if cookie is None or cookie_name is None: return None else: try: info, timestamp = parse_cookie(cookie_name, self.sign_key, cookie, self.enc_key, self.sign_alg) except (TypeError, AssertionError): return None else: value, _ts, typ = info.split("::") if timestamp == _ts: return value, _ts, typ return None
[ "def", "get_cookie_value", "(", "self", ",", "cookie", "=", "None", ",", "cookie_name", "=", "None", ")", ":", "if", "cookie_name", "is", "None", ":", "cookie_name", "=", "self", ".", "default_value", "[", "'name'", "]", "if", "cookie", "is", "None", "or", "cookie_name", "is", "None", ":", "return", "None", "else", ":", "try", ":", "info", ",", "timestamp", "=", "parse_cookie", "(", "cookie_name", ",", "self", ".", "sign_key", ",", "cookie", ",", "self", ".", "enc_key", ",", "self", ".", "sign_alg", ")", "except", "(", "TypeError", ",", "AssertionError", ")", ":", "return", "None", "else", ":", "value", ",", "_ts", ",", "typ", "=", "info", ".", "split", "(", "\"::\"", ")", "if", "timestamp", "==", "_ts", ":", "return", "value", ",", "_ts", ",", "typ", "return", "None" ]
Return information stored in a Cookie :param cookie: A cookie instance :param cookie_name: The name of the cookie I'm looking for :return: tuple (value, timestamp, type)
[ "Return", "information", "stored", "in", "a", "Cookie" ]
train
https://github.com/IdentityPython/oidcendpoint/blob/6c1d729d51bfb6332816117fe476073df7a1d823/src/oidcendpoint/cookie.py#L344-L367
IdentityPython/oidcendpoint
src/oidcendpoint/cookie.py
CookieDealer.append_cookie
def append_cookie(self, cookie, name, payload, typ, domain=None, path=None, timestamp="", max_age=0): """ Adds a cookie to a SimpleCookie instance :param cookie: :param name: :param payload: :param typ: :param domain: :param path: :param timestamp: :param max_age: :return: """ timestamp = str(int(time.time())) # create cookie payload try: _payload = "::".join([payload, timestamp, typ]) except TypeError: _payload = "::".join([payload[0], timestamp, typ]) content = make_cookie_content(name, _payload, self.sign_key, domain=domain, path=path, timestamp=timestamp, enc_key=self.enc_key, max_age=max_age, sign_alg=self.sign_alg) for name, args in content.items(): cookie[name] = args['value'] for key, value in args.items(): if key == 'value': continue cookie[name][key] = value return cookie
python
def append_cookie(self, cookie, name, payload, typ, domain=None, path=None, timestamp="", max_age=0): """ Adds a cookie to a SimpleCookie instance :param cookie: :param name: :param payload: :param typ: :param domain: :param path: :param timestamp: :param max_age: :return: """ timestamp = str(int(time.time())) # create cookie payload try: _payload = "::".join([payload, timestamp, typ]) except TypeError: _payload = "::".join([payload[0], timestamp, typ]) content = make_cookie_content(name, _payload, self.sign_key, domain=domain, path=path, timestamp=timestamp, enc_key=self.enc_key, max_age=max_age, sign_alg=self.sign_alg) for name, args in content.items(): cookie[name] = args['value'] for key, value in args.items(): if key == 'value': continue cookie[name][key] = value return cookie
[ "def", "append_cookie", "(", "self", ",", "cookie", ",", "name", ",", "payload", ",", "typ", ",", "domain", "=", "None", ",", "path", "=", "None", ",", "timestamp", "=", "\"\"", ",", "max_age", "=", "0", ")", ":", "timestamp", "=", "str", "(", "int", "(", "time", ".", "time", "(", ")", ")", ")", "# create cookie payload", "try", ":", "_payload", "=", "\"::\"", ".", "join", "(", "[", "payload", ",", "timestamp", ",", "typ", "]", ")", "except", "TypeError", ":", "_payload", "=", "\"::\"", ".", "join", "(", "[", "payload", "[", "0", "]", ",", "timestamp", ",", "typ", "]", ")", "content", "=", "make_cookie_content", "(", "name", ",", "_payload", ",", "self", ".", "sign_key", ",", "domain", "=", "domain", ",", "path", "=", "path", ",", "timestamp", "=", "timestamp", ",", "enc_key", "=", "self", ".", "enc_key", ",", "max_age", "=", "max_age", ",", "sign_alg", "=", "self", ".", "sign_alg", ")", "for", "name", ",", "args", "in", "content", ".", "items", "(", ")", ":", "cookie", "[", "name", "]", "=", "args", "[", "'value'", "]", "for", "key", ",", "value", "in", "args", ".", "items", "(", ")", ":", "if", "key", "==", "'value'", ":", "continue", "cookie", "[", "name", "]", "[", "key", "]", "=", "value", "return", "cookie" ]
Adds a cookie to a SimpleCookie instance :param cookie: :param name: :param payload: :param typ: :param domain: :param path: :param timestamp: :param max_age: :return:
[ "Adds", "a", "cookie", "to", "a", "SimpleCookie", "instance" ]
train
https://github.com/IdentityPython/oidcendpoint/blob/6c1d729d51bfb6332816117fe476073df7a1d823/src/oidcendpoint/cookie.py#L369-L404
BreakingBytes/simkit
examples/PVPower/pvpower/formulas/performance.py
f_ac_power
def f_ac_power(inverter, v_mp, p_mp): """ Calculate AC power :param inverter: :param v_mp: :param p_mp: :return: AC power [W] """ return pvlib.pvsystem.snlinverter(v_mp, p_mp, inverter).flatten()
python
def f_ac_power(inverter, v_mp, p_mp): """ Calculate AC power :param inverter: :param v_mp: :param p_mp: :return: AC power [W] """ return pvlib.pvsystem.snlinverter(v_mp, p_mp, inverter).flatten()
[ "def", "f_ac_power", "(", "inverter", ",", "v_mp", ",", "p_mp", ")", ":", "return", "pvlib", ".", "pvsystem", ".", "snlinverter", "(", "v_mp", ",", "p_mp", ",", "inverter", ")", ".", "flatten", "(", ")" ]
Calculate AC power :param inverter: :param v_mp: :param p_mp: :return: AC power [W]
[ "Calculate", "AC", "power" ]
train
https://github.com/BreakingBytes/simkit/blob/205163d879d3880b6c9ef609f1b723a58773026b/examples/PVPower/pvpower/formulas/performance.py#L10-L19
BreakingBytes/simkit
examples/PVPower/pvpower/formulas/performance.py
f_dc_power
def f_dc_power(effective_irradiance, cell_temp, module): """ Calculate DC power using Sandia Performance model :param effective_irradiance: effective irradiance [suns] :param cell_temp: PV cell temperature [degC] :param module: PV module dictionary or pandas data frame :returns: i_sc, i_mp, v_oc, v_mp, p_mp """ dc = pvlib.pvsystem.sapm(effective_irradiance, cell_temp, module) fields = ('i_sc', 'i_mp', 'v_oc', 'v_mp', 'p_mp') return tuple(dc[field] for field in fields)
python
def f_dc_power(effective_irradiance, cell_temp, module): """ Calculate DC power using Sandia Performance model :param effective_irradiance: effective irradiance [suns] :param cell_temp: PV cell temperature [degC] :param module: PV module dictionary or pandas data frame :returns: i_sc, i_mp, v_oc, v_mp, p_mp """ dc = pvlib.pvsystem.sapm(effective_irradiance, cell_temp, module) fields = ('i_sc', 'i_mp', 'v_oc', 'v_mp', 'p_mp') return tuple(dc[field] for field in fields)
[ "def", "f_dc_power", "(", "effective_irradiance", ",", "cell_temp", ",", "module", ")", ":", "dc", "=", "pvlib", ".", "pvsystem", ".", "sapm", "(", "effective_irradiance", ",", "cell_temp", ",", "module", ")", "fields", "=", "(", "'i_sc'", ",", "'i_mp'", ",", "'v_oc'", ",", "'v_mp'", ",", "'p_mp'", ")", "return", "tuple", "(", "dc", "[", "field", "]", "for", "field", "in", "fields", ")" ]
Calculate DC power using Sandia Performance model :param effective_irradiance: effective irradiance [suns] :param cell_temp: PV cell temperature [degC] :param module: PV module dictionary or pandas data frame :returns: i_sc, i_mp, v_oc, v_mp, p_mp
[ "Calculate", "DC", "power", "using", "Sandia", "Performance", "model" ]
train
https://github.com/BreakingBytes/simkit/blob/205163d879d3880b6c9ef609f1b723a58773026b/examples/PVPower/pvpower/formulas/performance.py#L22-L33
BreakingBytes/simkit
examples/PVPower/pvpower/formulas/performance.py
f_effective_irradiance
def f_effective_irradiance(poa_direct, poa_diffuse, am_abs, aoi, module): """ Calculate effective irradiance for Sandia Performance model :param poa_direct: plane of array direct irradiance [W/m**2] :param poa_diffuse: plane of array diffuse irradiance [W/m**2] :param am_abs: absolute air mass [dimensionless] :param aoi: angle of incidence [degrees] :param module: PV module dictionary or pandas data frame :return: effective irradiance (Ee) [suns] """ Ee = pvlib.pvsystem.sapm_effective_irradiance(poa_direct, poa_diffuse, am_abs, aoi, module) return Ee.reshape(1, -1)
python
def f_effective_irradiance(poa_direct, poa_diffuse, am_abs, aoi, module): """ Calculate effective irradiance for Sandia Performance model :param poa_direct: plane of array direct irradiance [W/m**2] :param poa_diffuse: plane of array diffuse irradiance [W/m**2] :param am_abs: absolute air mass [dimensionless] :param aoi: angle of incidence [degrees] :param module: PV module dictionary or pandas data frame :return: effective irradiance (Ee) [suns] """ Ee = pvlib.pvsystem.sapm_effective_irradiance(poa_direct, poa_diffuse, am_abs, aoi, module) return Ee.reshape(1, -1)
[ "def", "f_effective_irradiance", "(", "poa_direct", ",", "poa_diffuse", ",", "am_abs", ",", "aoi", ",", "module", ")", ":", "Ee", "=", "pvlib", ".", "pvsystem", ".", "sapm_effective_irradiance", "(", "poa_direct", ",", "poa_diffuse", ",", "am_abs", ",", "aoi", ",", "module", ")", "return", "Ee", ".", "reshape", "(", "1", ",", "-", "1", ")" ]
Calculate effective irradiance for Sandia Performance model :param poa_direct: plane of array direct irradiance [W/m**2] :param poa_diffuse: plane of array diffuse irradiance [W/m**2] :param am_abs: absolute air mass [dimensionless] :param aoi: angle of incidence [degrees] :param module: PV module dictionary or pandas data frame :return: effective irradiance (Ee) [suns]
[ "Calculate", "effective", "irradiance", "for", "Sandia", "Performance", "model" ]
train
https://github.com/BreakingBytes/simkit/blob/205163d879d3880b6c9ef609f1b723a58773026b/examples/PVPower/pvpower/formulas/performance.py#L36-L49
BreakingBytes/simkit
examples/PVPower/pvpower/formulas/performance.py
f_cell_temp
def f_cell_temp(poa_global, wind_speed, air_temp): """ Calculate cell temperature. :param poa_global: plane of array global irradiance [W/m**2] :param wind_speed: wind speed [m/s] :param air_temp: ambient dry bulb air temperature [degC] :return: cell temperature [degC] """ temps = pvlib.pvsystem.sapm_celltemp(poa_global, wind_speed, air_temp) return temps['temp_cell'].values, temps['temp_module'].values
python
def f_cell_temp(poa_global, wind_speed, air_temp): """ Calculate cell temperature. :param poa_global: plane of array global irradiance [W/m**2] :param wind_speed: wind speed [m/s] :param air_temp: ambient dry bulb air temperature [degC] :return: cell temperature [degC] """ temps = pvlib.pvsystem.sapm_celltemp(poa_global, wind_speed, air_temp) return temps['temp_cell'].values, temps['temp_module'].values
[ "def", "f_cell_temp", "(", "poa_global", ",", "wind_speed", ",", "air_temp", ")", ":", "temps", "=", "pvlib", ".", "pvsystem", ".", "sapm_celltemp", "(", "poa_global", ",", "wind_speed", ",", "air_temp", ")", "return", "temps", "[", "'temp_cell'", "]", ".", "values", ",", "temps", "[", "'temp_module'", "]", ".", "values" ]
Calculate cell temperature. :param poa_global: plane of array global irradiance [W/m**2] :param wind_speed: wind speed [m/s] :param air_temp: ambient dry bulb air temperature [degC] :return: cell temperature [degC]
[ "Calculate", "cell", "temperature", "." ]
train
https://github.com/BreakingBytes/simkit/blob/205163d879d3880b6c9ef609f1b723a58773026b/examples/PVPower/pvpower/formulas/performance.py#L52-L62
BreakingBytes/simkit
examples/PVPower/pvpower/formulas/performance.py
f_aoi
def f_aoi(surface_tilt, surface_azimuth, solar_zenith, solar_azimuth): """ Calculate angle of incidence :param surface_tilt: :param surface_azimuth: :param solar_zenith: :param solar_azimuth: :return: angle of incidence [deg] """ return pvlib.irradiance.aoi(surface_tilt, surface_azimuth, solar_zenith, solar_azimuth)
python
def f_aoi(surface_tilt, surface_azimuth, solar_zenith, solar_azimuth): """ Calculate angle of incidence :param surface_tilt: :param surface_azimuth: :param solar_zenith: :param solar_azimuth: :return: angle of incidence [deg] """ return pvlib.irradiance.aoi(surface_tilt, surface_azimuth, solar_zenith, solar_azimuth)
[ "def", "f_aoi", "(", "surface_tilt", ",", "surface_azimuth", ",", "solar_zenith", ",", "solar_azimuth", ")", ":", "return", "pvlib", ".", "irradiance", ".", "aoi", "(", "surface_tilt", ",", "surface_azimuth", ",", "solar_zenith", ",", "solar_azimuth", ")" ]
Calculate angle of incidence :param surface_tilt: :param surface_azimuth: :param solar_zenith: :param solar_azimuth: :return: angle of incidence [deg]
[ "Calculate", "angle", "of", "incidence" ]
train
https://github.com/BreakingBytes/simkit/blob/205163d879d3880b6c9ef609f1b723a58773026b/examples/PVPower/pvpower/formulas/performance.py#L65-L76
volafiled/python-volapi
setup.py
find_version
def find_version(filename): """ Search for assignment of __version__ string in given file and return what it is assigned to. """ with open(filename, "r") as filep: version_file = filep.read() version_match = re.search( r"^__version__ = ['\"]([^'\"]*)['\"]", version_file, re.M ) if version_match: return version_match.group(1) raise RuntimeError("Unable to find version string.")
python
def find_version(filename): """ Search for assignment of __version__ string in given file and return what it is assigned to. """ with open(filename, "r") as filep: version_file = filep.read() version_match = re.search( r"^__version__ = ['\"]([^'\"]*)['\"]", version_file, re.M ) if version_match: return version_match.group(1) raise RuntimeError("Unable to find version string.")
[ "def", "find_version", "(", "filename", ")", ":", "with", "open", "(", "filename", ",", "\"r\"", ")", "as", "filep", ":", "version_file", "=", "filep", ".", "read", "(", ")", "version_match", "=", "re", ".", "search", "(", "r\"^__version__ = ['\\\"]([^'\\\"]*)['\\\"]\"", ",", "version_file", ",", "re", ".", "M", ")", "if", "version_match", ":", "return", "version_match", ".", "group", "(", "1", ")", "raise", "RuntimeError", "(", "\"Unable to find version string.\"", ")" ]
Search for assignment of __version__ string in given file and return what it is assigned to.
[ "Search", "for", "assignment", "of", "__version__", "string", "in", "given", "file", "and", "return", "what", "it", "is", "assigned", "to", "." ]
train
https://github.com/volafiled/python-volapi/blob/5f0bc03dbde703264ac6ed494e2050761f688a3e/setup.py#L22-L34
guaix-ucm/pyemir
emirdrp/tools/save_ndarray_to_fits.py
save_ndarray_to_fits
def save_ndarray_to_fits(array=None, file_name=None, main_header=None, cast_to_float=True, crpix1=None, crval1=None, cdelt1=None, overwrite=True): """Save numpy array(s) into a FITS file with the provided filename. Parameters ---------- array : numpy array or list of numpy arrays Array(s) to be exported as the FITS file. If the input is a list, a multi-extension FITS file is generated assuming that the list contains a list of arrays. file_name : string File name for the output FITS file. main_header : astropy FITS header Header to be introduced in the primary HDU. cast_to_float : bool or list of booleans If True, the array(s) data are save as float. If a list of arrays has been provided, this parameter must be either a list (with the same length) of booleans or None. crpix1 : float, list of floats or None If not None, this value is used for the keyword CRPIX1. If a list of arrays has been provided, this paramater must be either a list (with the same length) of floats or None. crval1 : float, list of floats or None If not None, this value is used for the keyword CRVAL1. If a list of arrays has been provided, this paramater must be either a list (with the same length) of floats or None. cdelt1 : float, list of floats or None If not None, this value is used for the keyword CDELT1. If a list of arrays has been provided, this paramater must be either a list (with the same length) of floats or None. overwrite : bool If True, the file is overwritten (in the case it already exists). """ # protections if file_name is None: raise ValueError("File_name is not defined in save_ndarray_to_fits") if type(array) is list: list_of_arrays = array narrays = len(list_of_arrays) # cast_to_float must be a list of bools if type(cast_to_float) is not list: raise ValueError("Expected list of cast_to_float not found!") else: if len(cast_to_float) != narrays: raise ValueError("Unexpected length of cast_to_float") list_cast_to_float = cast_to_float # check that the additional associated lists have been provided # and that they have the expected length (or they are None) for ldum, cdum in zip([crpix1, crval1, cdelt1], ['crpix1', 'crval1', 'cdelt1']): if ldum is not None: if type(ldum) is not list: raise ValueError("Expected list of " + cdum + " not found!") else: if len(ldum) != narrays: raise ValueError("Unexpected length of " + cdum) if crpix1 is None: list_crpix1 = [None] * narrays else: list_crpix1 = crpix1 if crval1 is None: list_crval1 = [None] * narrays else: list_crval1 = crval1 if cdelt1 is None: list_cdelt1 = [None] * narrays else: list_cdelt1 = cdelt1 else: list_of_arrays = [array] list_cast_to_float = [cast_to_float] list_crpix1 = [crpix1] list_crval1 = [crval1] list_cdelt1 = [cdelt1] hdulist = fits.HDUList() for ihdu, tmp_array in enumerate(list_of_arrays): if type(tmp_array) is not np.ndarray: raise ValueError("Array#" + str(ihdu) + "=" + str(tmp_array) + " must be a numpy.ndarray") if ihdu == 0: if list_cast_to_float[ihdu]: hdu = fits.PrimaryHDU(data=tmp_array.astype(np.float), header=main_header) else: hdu = fits.PrimaryHDU(data=tmp_array, header=main_header) else: if list_cast_to_float[ihdu]: hdu = fits.ImageHDU(data=tmp_array.astype(np.float)) else: hdu = fits.ImageHDU(data=tmp_array) # set additional FITS keywords if requested tmp_crpix1 = list_crpix1[ihdu] if tmp_crpix1 is not None: hdu.header.set('CRPIX1', tmp_crpix1, 'Reference pixel') tmp_crval1 = list_crval1[ihdu] if tmp_crval1 is not None: hdu.header.set('CRVAL1', tmp_crval1, 'Reference wavelength corresponding to CRPIX1') tmp_cdelt1 = list_cdelt1[ihdu] if tmp_cdelt1 is not None: hdu.header.set('CDELT1', tmp_cdelt1, 'Linear dispersion (angstrom/pixel)') # add HDU to HDUList hdulist.append(hdu) # write output file hdulist.writeto(file_name, overwrite=overwrite)
python
def save_ndarray_to_fits(array=None, file_name=None, main_header=None, cast_to_float=True, crpix1=None, crval1=None, cdelt1=None, overwrite=True): """Save numpy array(s) into a FITS file with the provided filename. Parameters ---------- array : numpy array or list of numpy arrays Array(s) to be exported as the FITS file. If the input is a list, a multi-extension FITS file is generated assuming that the list contains a list of arrays. file_name : string File name for the output FITS file. main_header : astropy FITS header Header to be introduced in the primary HDU. cast_to_float : bool or list of booleans If True, the array(s) data are save as float. If a list of arrays has been provided, this parameter must be either a list (with the same length) of booleans or None. crpix1 : float, list of floats or None If not None, this value is used for the keyword CRPIX1. If a list of arrays has been provided, this paramater must be either a list (with the same length) of floats or None. crval1 : float, list of floats or None If not None, this value is used for the keyword CRVAL1. If a list of arrays has been provided, this paramater must be either a list (with the same length) of floats or None. cdelt1 : float, list of floats or None If not None, this value is used for the keyword CDELT1. If a list of arrays has been provided, this paramater must be either a list (with the same length) of floats or None. overwrite : bool If True, the file is overwritten (in the case it already exists). """ # protections if file_name is None: raise ValueError("File_name is not defined in save_ndarray_to_fits") if type(array) is list: list_of_arrays = array narrays = len(list_of_arrays) # cast_to_float must be a list of bools if type(cast_to_float) is not list: raise ValueError("Expected list of cast_to_float not found!") else: if len(cast_to_float) != narrays: raise ValueError("Unexpected length of cast_to_float") list_cast_to_float = cast_to_float # check that the additional associated lists have been provided # and that they have the expected length (or they are None) for ldum, cdum in zip([crpix1, crval1, cdelt1], ['crpix1', 'crval1', 'cdelt1']): if ldum is not None: if type(ldum) is not list: raise ValueError("Expected list of " + cdum + " not found!") else: if len(ldum) != narrays: raise ValueError("Unexpected length of " + cdum) if crpix1 is None: list_crpix1 = [None] * narrays else: list_crpix1 = crpix1 if crval1 is None: list_crval1 = [None] * narrays else: list_crval1 = crval1 if cdelt1 is None: list_cdelt1 = [None] * narrays else: list_cdelt1 = cdelt1 else: list_of_arrays = [array] list_cast_to_float = [cast_to_float] list_crpix1 = [crpix1] list_crval1 = [crval1] list_cdelt1 = [cdelt1] hdulist = fits.HDUList() for ihdu, tmp_array in enumerate(list_of_arrays): if type(tmp_array) is not np.ndarray: raise ValueError("Array#" + str(ihdu) + "=" + str(tmp_array) + " must be a numpy.ndarray") if ihdu == 0: if list_cast_to_float[ihdu]: hdu = fits.PrimaryHDU(data=tmp_array.astype(np.float), header=main_header) else: hdu = fits.PrimaryHDU(data=tmp_array, header=main_header) else: if list_cast_to_float[ihdu]: hdu = fits.ImageHDU(data=tmp_array.astype(np.float)) else: hdu = fits.ImageHDU(data=tmp_array) # set additional FITS keywords if requested tmp_crpix1 = list_crpix1[ihdu] if tmp_crpix1 is not None: hdu.header.set('CRPIX1', tmp_crpix1, 'Reference pixel') tmp_crval1 = list_crval1[ihdu] if tmp_crval1 is not None: hdu.header.set('CRVAL1', tmp_crval1, 'Reference wavelength corresponding to CRPIX1') tmp_cdelt1 = list_cdelt1[ihdu] if tmp_cdelt1 is not None: hdu.header.set('CDELT1', tmp_cdelt1, 'Linear dispersion (angstrom/pixel)') # add HDU to HDUList hdulist.append(hdu) # write output file hdulist.writeto(file_name, overwrite=overwrite)
[ "def", "save_ndarray_to_fits", "(", "array", "=", "None", ",", "file_name", "=", "None", ",", "main_header", "=", "None", ",", "cast_to_float", "=", "True", ",", "crpix1", "=", "None", ",", "crval1", "=", "None", ",", "cdelt1", "=", "None", ",", "overwrite", "=", "True", ")", ":", "# protections", "if", "file_name", "is", "None", ":", "raise", "ValueError", "(", "\"File_name is not defined in save_ndarray_to_fits\"", ")", "if", "type", "(", "array", ")", "is", "list", ":", "list_of_arrays", "=", "array", "narrays", "=", "len", "(", "list_of_arrays", ")", "# cast_to_float must be a list of bools", "if", "type", "(", "cast_to_float", ")", "is", "not", "list", ":", "raise", "ValueError", "(", "\"Expected list of cast_to_float not found!\"", ")", "else", ":", "if", "len", "(", "cast_to_float", ")", "!=", "narrays", ":", "raise", "ValueError", "(", "\"Unexpected length of cast_to_float\"", ")", "list_cast_to_float", "=", "cast_to_float", "# check that the additional associated lists have been provided", "# and that they have the expected length (or they are None)", "for", "ldum", ",", "cdum", "in", "zip", "(", "[", "crpix1", ",", "crval1", ",", "cdelt1", "]", ",", "[", "'crpix1'", ",", "'crval1'", ",", "'cdelt1'", "]", ")", ":", "if", "ldum", "is", "not", "None", ":", "if", "type", "(", "ldum", ")", "is", "not", "list", ":", "raise", "ValueError", "(", "\"Expected list of \"", "+", "cdum", "+", "\" not found!\"", ")", "else", ":", "if", "len", "(", "ldum", ")", "!=", "narrays", ":", "raise", "ValueError", "(", "\"Unexpected length of \"", "+", "cdum", ")", "if", "crpix1", "is", "None", ":", "list_crpix1", "=", "[", "None", "]", "*", "narrays", "else", ":", "list_crpix1", "=", "crpix1", "if", "crval1", "is", "None", ":", "list_crval1", "=", "[", "None", "]", "*", "narrays", "else", ":", "list_crval1", "=", "crval1", "if", "cdelt1", "is", "None", ":", "list_cdelt1", "=", "[", "None", "]", "*", "narrays", "else", ":", "list_cdelt1", "=", "cdelt1", "else", ":", "list_of_arrays", "=", "[", "array", "]", "list_cast_to_float", "=", "[", "cast_to_float", "]", "list_crpix1", "=", "[", "crpix1", "]", "list_crval1", "=", "[", "crval1", "]", "list_cdelt1", "=", "[", "cdelt1", "]", "hdulist", "=", "fits", ".", "HDUList", "(", ")", "for", "ihdu", ",", "tmp_array", "in", "enumerate", "(", "list_of_arrays", ")", ":", "if", "type", "(", "tmp_array", ")", "is", "not", "np", ".", "ndarray", ":", "raise", "ValueError", "(", "\"Array#\"", "+", "str", "(", "ihdu", ")", "+", "\"=\"", "+", "str", "(", "tmp_array", ")", "+", "\" must be a numpy.ndarray\"", ")", "if", "ihdu", "==", "0", ":", "if", "list_cast_to_float", "[", "ihdu", "]", ":", "hdu", "=", "fits", ".", "PrimaryHDU", "(", "data", "=", "tmp_array", ".", "astype", "(", "np", ".", "float", ")", ",", "header", "=", "main_header", ")", "else", ":", "hdu", "=", "fits", ".", "PrimaryHDU", "(", "data", "=", "tmp_array", ",", "header", "=", "main_header", ")", "else", ":", "if", "list_cast_to_float", "[", "ihdu", "]", ":", "hdu", "=", "fits", ".", "ImageHDU", "(", "data", "=", "tmp_array", ".", "astype", "(", "np", ".", "float", ")", ")", "else", ":", "hdu", "=", "fits", ".", "ImageHDU", "(", "data", "=", "tmp_array", ")", "# set additional FITS keywords if requested", "tmp_crpix1", "=", "list_crpix1", "[", "ihdu", "]", "if", "tmp_crpix1", "is", "not", "None", ":", "hdu", ".", "header", ".", "set", "(", "'CRPIX1'", ",", "tmp_crpix1", ",", "'Reference pixel'", ")", "tmp_crval1", "=", "list_crval1", "[", "ihdu", "]", "if", "tmp_crval1", "is", "not", "None", ":", "hdu", ".", "header", ".", "set", "(", "'CRVAL1'", ",", "tmp_crval1", ",", "'Reference wavelength corresponding to CRPIX1'", ")", "tmp_cdelt1", "=", "list_cdelt1", "[", "ihdu", "]", "if", "tmp_cdelt1", "is", "not", "None", ":", "hdu", ".", "header", ".", "set", "(", "'CDELT1'", ",", "tmp_cdelt1", ",", "'Linear dispersion (angstrom/pixel)'", ")", "# add HDU to HDUList", "hdulist", ".", "append", "(", "hdu", ")", "# write output file", "hdulist", ".", "writeto", "(", "file_name", ",", "overwrite", "=", "overwrite", ")" ]
Save numpy array(s) into a FITS file with the provided filename. Parameters ---------- array : numpy array or list of numpy arrays Array(s) to be exported as the FITS file. If the input is a list, a multi-extension FITS file is generated assuming that the list contains a list of arrays. file_name : string File name for the output FITS file. main_header : astropy FITS header Header to be introduced in the primary HDU. cast_to_float : bool or list of booleans If True, the array(s) data are save as float. If a list of arrays has been provided, this parameter must be either a list (with the same length) of booleans or None. crpix1 : float, list of floats or None If not None, this value is used for the keyword CRPIX1. If a list of arrays has been provided, this paramater must be either a list (with the same length) of floats or None. crval1 : float, list of floats or None If not None, this value is used for the keyword CRVAL1. If a list of arrays has been provided, this paramater must be either a list (with the same length) of floats or None. cdelt1 : float, list of floats or None If not None, this value is used for the keyword CDELT1. If a list of arrays has been provided, this paramater must be either a list (with the same length) of floats or None. overwrite : bool If True, the file is overwritten (in the case it already exists).
[ "Save", "numpy", "array", "(", "s", ")", "into", "a", "FITS", "file", "with", "the", "provided", "filename", "." ]
train
https://github.com/guaix-ucm/pyemir/blob/fef6bbabcb13f80123cafd1800a0f508a3c21702/emirdrp/tools/save_ndarray_to_fits.py#L27-L145
guaix-ucm/pyemir
emirdrp/processing/bardetect.py
find_position
def find_position(edges, prow, bstart, bend, total=5): """Find a EMIR CSU bar position in a edge image. Parameters ========== edges; ndarray, a 2d image with 1 where is a border, 0 otherwise prow: int, reference 'row' of the bars bstart: int, minimum 'x' position of a bar (0-based) bend: int maximum 'x' position of a bar (0 based) total: int number of rows to check near `prow` Return ====== list of (x, y) centroids """ nt = total // 2 # This bar is too near the border if prow-nt < 0 or prow + nt >= edges.shape[0]: return [] s2edges = edges[prow-nt:prow+nt+1, bstart:bend] structure = morph.generate_binary_structure(2,2) # 8 way conection har, num_f = mes.label(s2edges, structure=structure) cen_of_mass = mes.center_of_mass(s2edges, labels=har, index=range(1, num_f + 1)) # center_of_mass returns y, x coordinates cen_of_mass_off = [(x + bstart, prow-nt + y) for y,x in cen_of_mass] return cen_of_mass_off
python
def find_position(edges, prow, bstart, bend, total=5): """Find a EMIR CSU bar position in a edge image. Parameters ========== edges; ndarray, a 2d image with 1 where is a border, 0 otherwise prow: int, reference 'row' of the bars bstart: int, minimum 'x' position of a bar (0-based) bend: int maximum 'x' position of a bar (0 based) total: int number of rows to check near `prow` Return ====== list of (x, y) centroids """ nt = total // 2 # This bar is too near the border if prow-nt < 0 or prow + nt >= edges.shape[0]: return [] s2edges = edges[prow-nt:prow+nt+1, bstart:bend] structure = morph.generate_binary_structure(2,2) # 8 way conection har, num_f = mes.label(s2edges, structure=structure) cen_of_mass = mes.center_of_mass(s2edges, labels=har, index=range(1, num_f + 1)) # center_of_mass returns y, x coordinates cen_of_mass_off = [(x + bstart, prow-nt + y) for y,x in cen_of_mass] return cen_of_mass_off
[ "def", "find_position", "(", "edges", ",", "prow", ",", "bstart", ",", "bend", ",", "total", "=", "5", ")", ":", "nt", "=", "total", "//", "2", "# This bar is too near the border", "if", "prow", "-", "nt", "<", "0", "or", "prow", "+", "nt", ">=", "edges", ".", "shape", "[", "0", "]", ":", "return", "[", "]", "s2edges", "=", "edges", "[", "prow", "-", "nt", ":", "prow", "+", "nt", "+", "1", ",", "bstart", ":", "bend", "]", "structure", "=", "morph", ".", "generate_binary_structure", "(", "2", ",", "2", ")", "# 8 way conection", "har", ",", "num_f", "=", "mes", ".", "label", "(", "s2edges", ",", "structure", "=", "structure", ")", "cen_of_mass", "=", "mes", ".", "center_of_mass", "(", "s2edges", ",", "labels", "=", "har", ",", "index", "=", "range", "(", "1", ",", "num_f", "+", "1", ")", ")", "# center_of_mass returns y, x coordinates", "cen_of_mass_off", "=", "[", "(", "x", "+", "bstart", ",", "prow", "-", "nt", "+", "y", ")", "for", "y", ",", "x", "in", "cen_of_mass", "]", "return", "cen_of_mass_off" ]
Find a EMIR CSU bar position in a edge image. Parameters ========== edges; ndarray, a 2d image with 1 where is a border, 0 otherwise prow: int, reference 'row' of the bars bstart: int, minimum 'x' position of a bar (0-based) bend: int maximum 'x' position of a bar (0 based) total: int number of rows to check near `prow` Return ====== list of (x, y) centroids
[ "Find", "a", "EMIR", "CSU", "bar", "position", "in", "a", "edge", "image", "." ]
train
https://github.com/guaix-ucm/pyemir/blob/fef6bbabcb13f80123cafd1800a0f508a3c21702/emirdrp/processing/bardetect.py#L38-L77
guaix-ucm/pyemir
emirdrp/processing/bardetect.py
calc_fwhm
def calc_fwhm(img, region, fexpand=3, axis=0): """Compute the FWHM in the direction given by axis""" # We compute know the FWHM of the slit # Given the computed position of the slit # Expand 'fexpand' pixels around # and cut an slice in the median filtered image xpregion = expand_region(region, fexpand, fexpand) cslit = img[xpregion] # Collapse it pslit = cslit.mean(axis=axis) # Estimate the background as a flat line # starting in pslit[0] and ending in pslit[-1] x2 = len(pslit) y1, y2 = pslit[0], pslit[-1] mslope = (y2-y1) / x2 # background estimation backstim = mslope*numpy.arange(x2) + y1 # We subtract background qslit = pslit-backstim # and find the pixel of the maximum pidx = numpy.argmax(qslit) peak, fwhm = fmod.compute_fwhm_1d_simple(qslit, pidx) return fwhm
python
def calc_fwhm(img, region, fexpand=3, axis=0): """Compute the FWHM in the direction given by axis""" # We compute know the FWHM of the slit # Given the computed position of the slit # Expand 'fexpand' pixels around # and cut an slice in the median filtered image xpregion = expand_region(region, fexpand, fexpand) cslit = img[xpregion] # Collapse it pslit = cslit.mean(axis=axis) # Estimate the background as a flat line # starting in pslit[0] and ending in pslit[-1] x2 = len(pslit) y1, y2 = pslit[0], pslit[-1] mslope = (y2-y1) / x2 # background estimation backstim = mslope*numpy.arange(x2) + y1 # We subtract background qslit = pslit-backstim # and find the pixel of the maximum pidx = numpy.argmax(qslit) peak, fwhm = fmod.compute_fwhm_1d_simple(qslit, pidx) return fwhm
[ "def", "calc_fwhm", "(", "img", ",", "region", ",", "fexpand", "=", "3", ",", "axis", "=", "0", ")", ":", "# We compute know the FWHM of the slit", "# Given the computed position of the slit", "# Expand 'fexpand' pixels around", "# and cut an slice in the median filtered image", "xpregion", "=", "expand_region", "(", "region", ",", "fexpand", ",", "fexpand", ")", "cslit", "=", "img", "[", "xpregion", "]", "# Collapse it", "pslit", "=", "cslit", ".", "mean", "(", "axis", "=", "axis", ")", "# Estimate the background as a flat line", "# starting in pslit[0] and ending in pslit[-1]", "x2", "=", "len", "(", "pslit", ")", "y1", ",", "y2", "=", "pslit", "[", "0", "]", ",", "pslit", "[", "-", "1", "]", "mslope", "=", "(", "y2", "-", "y1", ")", "/", "x2", "# background estimation", "backstim", "=", "mslope", "*", "numpy", ".", "arange", "(", "x2", ")", "+", "y1", "# We subtract background", "qslit", "=", "pslit", "-", "backstim", "# and find the pixel of the maximum", "pidx", "=", "numpy", ".", "argmax", "(", "qslit", ")", "peak", ",", "fwhm", "=", "fmod", ".", "compute_fwhm_1d_simple", "(", "qslit", ",", "pidx", ")", "return", "fwhm" ]
Compute the FWHM in the direction given by axis
[ "Compute", "the", "FWHM", "in", "the", "direction", "given", "by", "axis" ]
train
https://github.com/guaix-ucm/pyemir/blob/fef6bbabcb13f80123cafd1800a0f508a3c21702/emirdrp/processing/bardetect.py#L80-L107
guaix-ucm/pyemir
emirdrp/processing/bardetect.py
simple_prot
def simple_prot(x, start): """Find the first peak to the right of start""" # start must b >= 1 for i in range(start,len(x)-1): a,b,c = x[i-1], x[i], x[i+1] if b - a > 0 and b -c >= 0: return i else: return None
python
def simple_prot(x, start): """Find the first peak to the right of start""" # start must b >= 1 for i in range(start,len(x)-1): a,b,c = x[i-1], x[i], x[i+1] if b - a > 0 and b -c >= 0: return i else: return None
[ "def", "simple_prot", "(", "x", ",", "start", ")", ":", "# start must b >= 1", "for", "i", "in", "range", "(", "start", ",", "len", "(", "x", ")", "-", "1", ")", ":", "a", ",", "b", ",", "c", "=", "x", "[", "i", "-", "1", "]", ",", "x", "[", "i", "]", ",", "x", "[", "i", "+", "1", "]", "if", "b", "-", "a", ">", "0", "and", "b", "-", "c", ">=", "0", ":", "return", "i", "else", ":", "return", "None" ]
Find the first peak to the right of start
[ "Find", "the", "first", "peak", "to", "the", "right", "of", "start" ]
train
https://github.com/guaix-ucm/pyemir/blob/fef6bbabcb13f80123cafd1800a0f508a3c21702/emirdrp/processing/bardetect.py#L110-L120
guaix-ucm/pyemir
emirdrp/processing/bardetect.py
position_half_h
def position_half_h(pslit, cpix, backw=4): """Find the position where the value is half of the peak""" # Find the first peak to the right of cpix next_peak = simple_prot(pslit, cpix) if next_peak is None: raise ValueError dis_peak = next_peak - cpix wpos2 = cpix - dis_peak wpos1 = wpos2 - backw # Compute background in a window of width backw # in a position simetrical to the peak # around cpix left_background = pslit[wpos1:wpos2].min() # height of the peak height = pslit[next_peak] - left_background half_height = left_background + 0.5 * height # Position at halg peak, linear interpolation vv = pslit[wpos1:next_peak+1] - half_height res1, = numpy.nonzero(numpy.diff(vv > 0)) i1 = res1[0] xint = wpos1 + i1 + (0 - vv[i1]) / (vv[i1+1] - vv[i1]) return xint, next_peak, wpos1, wpos2, left_background, half_height
python
def position_half_h(pslit, cpix, backw=4): """Find the position where the value is half of the peak""" # Find the first peak to the right of cpix next_peak = simple_prot(pslit, cpix) if next_peak is None: raise ValueError dis_peak = next_peak - cpix wpos2 = cpix - dis_peak wpos1 = wpos2 - backw # Compute background in a window of width backw # in a position simetrical to the peak # around cpix left_background = pslit[wpos1:wpos2].min() # height of the peak height = pslit[next_peak] - left_background half_height = left_background + 0.5 * height # Position at halg peak, linear interpolation vv = pslit[wpos1:next_peak+1] - half_height res1, = numpy.nonzero(numpy.diff(vv > 0)) i1 = res1[0] xint = wpos1 + i1 + (0 - vv[i1]) / (vv[i1+1] - vv[i1]) return xint, next_peak, wpos1, wpos2, left_background, half_height
[ "def", "position_half_h", "(", "pslit", ",", "cpix", ",", "backw", "=", "4", ")", ":", "# Find the first peak to the right of cpix", "next_peak", "=", "simple_prot", "(", "pslit", ",", "cpix", ")", "if", "next_peak", "is", "None", ":", "raise", "ValueError", "dis_peak", "=", "next_peak", "-", "cpix", "wpos2", "=", "cpix", "-", "dis_peak", "wpos1", "=", "wpos2", "-", "backw", "# Compute background in a window of width backw", "# in a position simetrical to the peak", "# around cpix", "left_background", "=", "pslit", "[", "wpos1", ":", "wpos2", "]", ".", "min", "(", ")", "# height of the peak", "height", "=", "pslit", "[", "next_peak", "]", "-", "left_background", "half_height", "=", "left_background", "+", "0.5", "*", "height", "# Position at halg peak, linear interpolation", "vv", "=", "pslit", "[", "wpos1", ":", "next_peak", "+", "1", "]", "-", "half_height", "res1", ",", "=", "numpy", ".", "nonzero", "(", "numpy", ".", "diff", "(", "vv", ">", "0", ")", ")", "i1", "=", "res1", "[", "0", "]", "xint", "=", "wpos1", "+", "i1", "+", "(", "0", "-", "vv", "[", "i1", "]", ")", "/", "(", "vv", "[", "i1", "+", "1", "]", "-", "vv", "[", "i1", "]", ")", "return", "xint", ",", "next_peak", ",", "wpos1", ",", "wpos2", ",", "left_background", ",", "half_height" ]
Find the position where the value is half of the peak
[ "Find", "the", "position", "where", "the", "value", "is", "half", "of", "the", "peak" ]
train
https://github.com/guaix-ucm/pyemir/blob/fef6bbabcb13f80123cafd1800a0f508a3c21702/emirdrp/processing/bardetect.py#L123-L155
guaix-ucm/pyemir
emirdrp/processing/bardetect.py
locate_bar_l
def locate_bar_l(icut, epos): """Fine position of the left CSU bar""" def swap_coor(x): return x def swap_line(tab): return tab return _locate_bar_gen(icut, epos, transform1=swap_coor, transform2=swap_line )
python
def locate_bar_l(icut, epos): """Fine position of the left CSU bar""" def swap_coor(x): return x def swap_line(tab): return tab return _locate_bar_gen(icut, epos, transform1=swap_coor, transform2=swap_line )
[ "def", "locate_bar_l", "(", "icut", ",", "epos", ")", ":", "def", "swap_coor", "(", "x", ")", ":", "return", "x", "def", "swap_line", "(", "tab", ")", ":", "return", "tab", "return", "_locate_bar_gen", "(", "icut", ",", "epos", ",", "transform1", "=", "swap_coor", ",", "transform2", "=", "swap_line", ")" ]
Fine position of the left CSU bar
[ "Fine", "position", "of", "the", "left", "CSU", "bar" ]
train
https://github.com/guaix-ucm/pyemir/blob/fef6bbabcb13f80123cafd1800a0f508a3c21702/emirdrp/processing/bardetect.py#L158-L169
guaix-ucm/pyemir
emirdrp/processing/bardetect.py
locate_bar_r
def locate_bar_r(icut, epos): """Fine position of the right CSU bar""" sm = len(icut) def swap_coor(x): return sm - 1 - x def swap_line(tab): return tab[::-1] return _locate_bar_gen(icut, epos, transform1=swap_coor, transform2=swap_line)
python
def locate_bar_r(icut, epos): """Fine position of the right CSU bar""" sm = len(icut) def swap_coor(x): return sm - 1 - x def swap_line(tab): return tab[::-1] return _locate_bar_gen(icut, epos, transform1=swap_coor, transform2=swap_line)
[ "def", "locate_bar_r", "(", "icut", ",", "epos", ")", ":", "sm", "=", "len", "(", "icut", ")", "def", "swap_coor", "(", "x", ")", ":", "return", "sm", "-", "1", "-", "x", "def", "swap_line", "(", "tab", ")", ":", "return", "tab", "[", ":", ":", "-", "1", "]", "return", "_locate_bar_gen", "(", "icut", ",", "epos", ",", "transform1", "=", "swap_coor", ",", "transform2", "=", "swap_line", ")" ]
Fine position of the right CSU bar
[ "Fine", "position", "of", "the", "right", "CSU", "bar" ]
train
https://github.com/guaix-ucm/pyemir/blob/fef6bbabcb13f80123cafd1800a0f508a3c21702/emirdrp/processing/bardetect.py#L172-L183
guaix-ucm/pyemir
emirdrp/processing/bardetect.py
_locate_bar_gen
def _locate_bar_gen(icut, epos, transform1, transform2): """Generic function for the fine position of the CSU""" epos_pix = coor_to_pix_1d(epos) # transform -> epos_pix_s = transform1(epos_pix) icut2 = transform2(icut) # try: res = position_half_h(icut2, epos_pix_s) xint_s, next_peak_s, wpos1_s, wpos2_s, background_level, half_height = res # xint = transform1(xint_s) # epos_f = xint error = 0 except ValueError: error = 2 epos_f = epos return epos_pix, epos_f, error
python
def _locate_bar_gen(icut, epos, transform1, transform2): """Generic function for the fine position of the CSU""" epos_pix = coor_to_pix_1d(epos) # transform -> epos_pix_s = transform1(epos_pix) icut2 = transform2(icut) # try: res = position_half_h(icut2, epos_pix_s) xint_s, next_peak_s, wpos1_s, wpos2_s, background_level, half_height = res # xint = transform1(xint_s) # epos_f = xint error = 0 except ValueError: error = 2 epos_f = epos return epos_pix, epos_f, error
[ "def", "_locate_bar_gen", "(", "icut", ",", "epos", ",", "transform1", ",", "transform2", ")", ":", "epos_pix", "=", "coor_to_pix_1d", "(", "epos", ")", "# transform ->", "epos_pix_s", "=", "transform1", "(", "epos_pix", ")", "icut2", "=", "transform2", "(", "icut", ")", "#", "try", ":", "res", "=", "position_half_h", "(", "icut2", ",", "epos_pix_s", ")", "xint_s", ",", "next_peak_s", ",", "wpos1_s", ",", "wpos2_s", ",", "background_level", ",", "half_height", "=", "res", "#", "xint", "=", "transform1", "(", "xint_s", ")", "#", "epos_f", "=", "xint", "error", "=", "0", "except", "ValueError", ":", "error", "=", "2", "epos_f", "=", "epos", "return", "epos_pix", ",", "epos_f", ",", "error" ]
Generic function for the fine position of the CSU
[ "Generic", "function", "for", "the", "fine", "position", "of", "the", "CSU" ]
train
https://github.com/guaix-ucm/pyemir/blob/fef6bbabcb13f80123cafd1800a0f508a3c21702/emirdrp/processing/bardetect.py#L186-L211
guaix-ucm/pyemir
emirdrp/processing/bardetect.py
overlap
def overlap(intv1, intv2): """Overlaping of two intervals""" return max(0, min(intv1[1], intv2[1]) - max(intv1[0], intv2[0]))
python
def overlap(intv1, intv2): """Overlaping of two intervals""" return max(0, min(intv1[1], intv2[1]) - max(intv1[0], intv2[0]))
[ "def", "overlap", "(", "intv1", ",", "intv2", ")", ":", "return", "max", "(", "0", ",", "min", "(", "intv1", "[", "1", "]", ",", "intv2", "[", "1", "]", ")", "-", "max", "(", "intv1", "[", "0", "]", ",", "intv2", "[", "0", "]", ")", ")" ]
Overlaping of two intervals
[ "Overlaping", "of", "two", "intervals" ]
train
https://github.com/guaix-ucm/pyemir/blob/fef6bbabcb13f80123cafd1800a0f508a3c21702/emirdrp/processing/bardetect.py#L430-L432
guaix-ucm/pyemir
emirdrp/tools/fit_boundaries.py
integrity_check
def integrity_check(bounddict, max_dtu_offset): """Integrity check of 'bounddict' content. Parameters ---------- bounddict : JSON structure Structure employed to store bounddict information. max_dtu_offset : float Maximum allowed difference in DTU location (mm) for each parameter """ if 'meta_info' not in bounddict.keys(): raise ValueError('"meta_info" not found in JSON file') if 'description' not in bounddict['meta_info'].keys(): raise ValueError('"description" not found in JSON file') if bounddict['meta_info']['description'] != \ 'slitlet boundaries from fits to continuum-lamp exposures': raise ValueError('Unexpected "description" in JSON file') grism = bounddict['tags']['grism'] print('>>> grism...:', grism) spfilter = bounddict['tags']['filter'] print('>>> filter..:', spfilter) valid_slitlets = ["slitlet" + str(i).zfill(2) for i in range(1, EMIR_NBARS + 1)] read_slitlets = list(bounddict['contents'].keys()) read_slitlets.sort() first_dtu = True first_dtu_configuration = None # avoid PyCharm warning list_dtu_configurations = [] for tmp_slitlet in read_slitlets: if tmp_slitlet not in valid_slitlets: raise ValueError("Unexpected slitlet key: " + tmp_slitlet) # for each slitlet, check valid DATE-OBS (ISO 8601) read_dateobs = list(bounddict['contents'][tmp_slitlet].keys()) read_dateobs.sort() for tmp_dateobs in read_dateobs: try: datetime.strptime(tmp_dateobs, "%Y-%m-%dT%H:%M:%S.%f") except ValueError: print("Unexpected date_obs key: " + tmp_dateobs) raise # for each DATE-OBS, check expected fields tmp_dict = bounddict['contents'][tmp_slitlet][tmp_dateobs] valid_keys = ["boundary_coef_lower", "boundary_coef_upper", "boundary_xmax_lower", "boundary_xmax_upper", "boundary_xmin_lower", "boundary_xmin_upper", "csu_bar_left", "csu_bar_right", "csu_bar_slit_center", "csu_bar_slit_width", "rotang", "xdtu", "xdtu_0", "ydtu", "ydtu_0", "zdtu", "zdtu_0", "zzz_info1", "zzz_info2"] read_keys = tmp_dict.keys() for tmp_key in read_keys: if tmp_key not in valid_keys: print("ERROR:") print("grism...:", grism) print("slitlet.:", tmp_slitlet) print("date_obs:", tmp_dateobs) raise ValueError("Unexpected key " + tmp_key) for tmp_key in valid_keys: if tmp_key not in read_keys: print("ERROR:") print("grism...:", grism) print("slitlet.:", tmp_slitlet) print("date_obs:", tmp_dateobs) raise ValueError("Expected key " + tmp_key + " not found") if tmp_dict['boundary_xmax_lower'] <= \ tmp_dict['boundary_xmin_lower']: print("ERROR:") print("grism...:", grism) print("slitlet.:", tmp_slitlet) print("date_obs:", tmp_dateobs) print("boundary_xmin_lower", tmp_dict['boundary_xmin_lower']) print("boundary_xmax_lower", tmp_dict['boundary_xmax_lower']) raise ValueError("Unexpected boundary_xmax_lower <= " "boundary_xmin_lower") if tmp_dict['boundary_xmax_upper'] <= \ tmp_dict['boundary_xmin_upper']: print("ERROR:") print("grism...:", grism) print("slitlet.:", tmp_slitlet) print("date_obs:", tmp_dateobs) print("boundary_xmin_upper", tmp_dict['boundary_xmin_upper']) print("boundary_xmax_upper", tmp_dict['boundary_xmax_upper']) raise ValueError("Unexpected boundary_xmax_upper <= " "boundary_xmin_upper") if first_dtu: first_dtu_configuration = \ DtuConfiguration.define_from_dictionary(tmp_dict) first_dtu = False list_dtu_configurations.append(first_dtu_configuration) else: last_dtu_configuration = \ DtuConfiguration.define_from_dictionary(tmp_dict) if not first_dtu_configuration.closeto( last_dtu_configuration, abserror=max_dtu_offset ): print("ERROR:") print("grism...:", grism) print("slitlet.:", tmp_slitlet) print("date_obs:", tmp_dateobs) print("First DTU configuration..:\n\t", first_dtu_configuration) print("Last DTU configuration...:\n\t", last_dtu_configuration) raise ValueError("Unexpected DTU configuration") list_dtu_configurations.append(last_dtu_configuration) print("* Integrity check OK!") averaged_dtu_configuration = average_dtu_configurations( list_dtu_configurations) maxdiff_dtu_configuration = maxdiff_dtu_configurations( list_dtu_configurations ) return averaged_dtu_configuration, maxdiff_dtu_configuration
python
def integrity_check(bounddict, max_dtu_offset): """Integrity check of 'bounddict' content. Parameters ---------- bounddict : JSON structure Structure employed to store bounddict information. max_dtu_offset : float Maximum allowed difference in DTU location (mm) for each parameter """ if 'meta_info' not in bounddict.keys(): raise ValueError('"meta_info" not found in JSON file') if 'description' not in bounddict['meta_info'].keys(): raise ValueError('"description" not found in JSON file') if bounddict['meta_info']['description'] != \ 'slitlet boundaries from fits to continuum-lamp exposures': raise ValueError('Unexpected "description" in JSON file') grism = bounddict['tags']['grism'] print('>>> grism...:', grism) spfilter = bounddict['tags']['filter'] print('>>> filter..:', spfilter) valid_slitlets = ["slitlet" + str(i).zfill(2) for i in range(1, EMIR_NBARS + 1)] read_slitlets = list(bounddict['contents'].keys()) read_slitlets.sort() first_dtu = True first_dtu_configuration = None # avoid PyCharm warning list_dtu_configurations = [] for tmp_slitlet in read_slitlets: if tmp_slitlet not in valid_slitlets: raise ValueError("Unexpected slitlet key: " + tmp_slitlet) # for each slitlet, check valid DATE-OBS (ISO 8601) read_dateobs = list(bounddict['contents'][tmp_slitlet].keys()) read_dateobs.sort() for tmp_dateobs in read_dateobs: try: datetime.strptime(tmp_dateobs, "%Y-%m-%dT%H:%M:%S.%f") except ValueError: print("Unexpected date_obs key: " + tmp_dateobs) raise # for each DATE-OBS, check expected fields tmp_dict = bounddict['contents'][tmp_slitlet][tmp_dateobs] valid_keys = ["boundary_coef_lower", "boundary_coef_upper", "boundary_xmax_lower", "boundary_xmax_upper", "boundary_xmin_lower", "boundary_xmin_upper", "csu_bar_left", "csu_bar_right", "csu_bar_slit_center", "csu_bar_slit_width", "rotang", "xdtu", "xdtu_0", "ydtu", "ydtu_0", "zdtu", "zdtu_0", "zzz_info1", "zzz_info2"] read_keys = tmp_dict.keys() for tmp_key in read_keys: if tmp_key not in valid_keys: print("ERROR:") print("grism...:", grism) print("slitlet.:", tmp_slitlet) print("date_obs:", tmp_dateobs) raise ValueError("Unexpected key " + tmp_key) for tmp_key in valid_keys: if tmp_key not in read_keys: print("ERROR:") print("grism...:", grism) print("slitlet.:", tmp_slitlet) print("date_obs:", tmp_dateobs) raise ValueError("Expected key " + tmp_key + " not found") if tmp_dict['boundary_xmax_lower'] <= \ tmp_dict['boundary_xmin_lower']: print("ERROR:") print("grism...:", grism) print("slitlet.:", tmp_slitlet) print("date_obs:", tmp_dateobs) print("boundary_xmin_lower", tmp_dict['boundary_xmin_lower']) print("boundary_xmax_lower", tmp_dict['boundary_xmax_lower']) raise ValueError("Unexpected boundary_xmax_lower <= " "boundary_xmin_lower") if tmp_dict['boundary_xmax_upper'] <= \ tmp_dict['boundary_xmin_upper']: print("ERROR:") print("grism...:", grism) print("slitlet.:", tmp_slitlet) print("date_obs:", tmp_dateobs) print("boundary_xmin_upper", tmp_dict['boundary_xmin_upper']) print("boundary_xmax_upper", tmp_dict['boundary_xmax_upper']) raise ValueError("Unexpected boundary_xmax_upper <= " "boundary_xmin_upper") if first_dtu: first_dtu_configuration = \ DtuConfiguration.define_from_dictionary(tmp_dict) first_dtu = False list_dtu_configurations.append(first_dtu_configuration) else: last_dtu_configuration = \ DtuConfiguration.define_from_dictionary(tmp_dict) if not first_dtu_configuration.closeto( last_dtu_configuration, abserror=max_dtu_offset ): print("ERROR:") print("grism...:", grism) print("slitlet.:", tmp_slitlet) print("date_obs:", tmp_dateobs) print("First DTU configuration..:\n\t", first_dtu_configuration) print("Last DTU configuration...:\n\t", last_dtu_configuration) raise ValueError("Unexpected DTU configuration") list_dtu_configurations.append(last_dtu_configuration) print("* Integrity check OK!") averaged_dtu_configuration = average_dtu_configurations( list_dtu_configurations) maxdiff_dtu_configuration = maxdiff_dtu_configurations( list_dtu_configurations ) return averaged_dtu_configuration, maxdiff_dtu_configuration
[ "def", "integrity_check", "(", "bounddict", ",", "max_dtu_offset", ")", ":", "if", "'meta_info'", "not", "in", "bounddict", ".", "keys", "(", ")", ":", "raise", "ValueError", "(", "'\"meta_info\" not found in JSON file'", ")", "if", "'description'", "not", "in", "bounddict", "[", "'meta_info'", "]", ".", "keys", "(", ")", ":", "raise", "ValueError", "(", "'\"description\" not found in JSON file'", ")", "if", "bounddict", "[", "'meta_info'", "]", "[", "'description'", "]", "!=", "'slitlet boundaries from fits to continuum-lamp exposures'", ":", "raise", "ValueError", "(", "'Unexpected \"description\" in JSON file'", ")", "grism", "=", "bounddict", "[", "'tags'", "]", "[", "'grism'", "]", "print", "(", "'>>> grism...:'", ",", "grism", ")", "spfilter", "=", "bounddict", "[", "'tags'", "]", "[", "'filter'", "]", "print", "(", "'>>> filter..:'", ",", "spfilter", ")", "valid_slitlets", "=", "[", "\"slitlet\"", "+", "str", "(", "i", ")", ".", "zfill", "(", "2", ")", "for", "i", "in", "range", "(", "1", ",", "EMIR_NBARS", "+", "1", ")", "]", "read_slitlets", "=", "list", "(", "bounddict", "[", "'contents'", "]", ".", "keys", "(", ")", ")", "read_slitlets", ".", "sort", "(", ")", "first_dtu", "=", "True", "first_dtu_configuration", "=", "None", "# avoid PyCharm warning", "list_dtu_configurations", "=", "[", "]", "for", "tmp_slitlet", "in", "read_slitlets", ":", "if", "tmp_slitlet", "not", "in", "valid_slitlets", ":", "raise", "ValueError", "(", "\"Unexpected slitlet key: \"", "+", "tmp_slitlet", ")", "# for each slitlet, check valid DATE-OBS (ISO 8601)", "read_dateobs", "=", "list", "(", "bounddict", "[", "'contents'", "]", "[", "tmp_slitlet", "]", ".", "keys", "(", ")", ")", "read_dateobs", ".", "sort", "(", ")", "for", "tmp_dateobs", "in", "read_dateobs", ":", "try", ":", "datetime", ".", "strptime", "(", "tmp_dateobs", ",", "\"%Y-%m-%dT%H:%M:%S.%f\"", ")", "except", "ValueError", ":", "print", "(", "\"Unexpected date_obs key: \"", "+", "tmp_dateobs", ")", "raise", "# for each DATE-OBS, check expected fields", "tmp_dict", "=", "bounddict", "[", "'contents'", "]", "[", "tmp_slitlet", "]", "[", "tmp_dateobs", "]", "valid_keys", "=", "[", "\"boundary_coef_lower\"", ",", "\"boundary_coef_upper\"", ",", "\"boundary_xmax_lower\"", ",", "\"boundary_xmax_upper\"", ",", "\"boundary_xmin_lower\"", ",", "\"boundary_xmin_upper\"", ",", "\"csu_bar_left\"", ",", "\"csu_bar_right\"", ",", "\"csu_bar_slit_center\"", ",", "\"csu_bar_slit_width\"", ",", "\"rotang\"", ",", "\"xdtu\"", ",", "\"xdtu_0\"", ",", "\"ydtu\"", ",", "\"ydtu_0\"", ",", "\"zdtu\"", ",", "\"zdtu_0\"", ",", "\"zzz_info1\"", ",", "\"zzz_info2\"", "]", "read_keys", "=", "tmp_dict", ".", "keys", "(", ")", "for", "tmp_key", "in", "read_keys", ":", "if", "tmp_key", "not", "in", "valid_keys", ":", "print", "(", "\"ERROR:\"", ")", "print", "(", "\"grism...:\"", ",", "grism", ")", "print", "(", "\"slitlet.:\"", ",", "tmp_slitlet", ")", "print", "(", "\"date_obs:\"", ",", "tmp_dateobs", ")", "raise", "ValueError", "(", "\"Unexpected key \"", "+", "tmp_key", ")", "for", "tmp_key", "in", "valid_keys", ":", "if", "tmp_key", "not", "in", "read_keys", ":", "print", "(", "\"ERROR:\"", ")", "print", "(", "\"grism...:\"", ",", "grism", ")", "print", "(", "\"slitlet.:\"", ",", "tmp_slitlet", ")", "print", "(", "\"date_obs:\"", ",", "tmp_dateobs", ")", "raise", "ValueError", "(", "\"Expected key \"", "+", "tmp_key", "+", "\" not found\"", ")", "if", "tmp_dict", "[", "'boundary_xmax_lower'", "]", "<=", "tmp_dict", "[", "'boundary_xmin_lower'", "]", ":", "print", "(", "\"ERROR:\"", ")", "print", "(", "\"grism...:\"", ",", "grism", ")", "print", "(", "\"slitlet.:\"", ",", "tmp_slitlet", ")", "print", "(", "\"date_obs:\"", ",", "tmp_dateobs", ")", "print", "(", "\"boundary_xmin_lower\"", ",", "tmp_dict", "[", "'boundary_xmin_lower'", "]", ")", "print", "(", "\"boundary_xmax_lower\"", ",", "tmp_dict", "[", "'boundary_xmax_lower'", "]", ")", "raise", "ValueError", "(", "\"Unexpected boundary_xmax_lower <= \"", "\"boundary_xmin_lower\"", ")", "if", "tmp_dict", "[", "'boundary_xmax_upper'", "]", "<=", "tmp_dict", "[", "'boundary_xmin_upper'", "]", ":", "print", "(", "\"ERROR:\"", ")", "print", "(", "\"grism...:\"", ",", "grism", ")", "print", "(", "\"slitlet.:\"", ",", "tmp_slitlet", ")", "print", "(", "\"date_obs:\"", ",", "tmp_dateobs", ")", "print", "(", "\"boundary_xmin_upper\"", ",", "tmp_dict", "[", "'boundary_xmin_upper'", "]", ")", "print", "(", "\"boundary_xmax_upper\"", ",", "tmp_dict", "[", "'boundary_xmax_upper'", "]", ")", "raise", "ValueError", "(", "\"Unexpected boundary_xmax_upper <= \"", "\"boundary_xmin_upper\"", ")", "if", "first_dtu", ":", "first_dtu_configuration", "=", "DtuConfiguration", ".", "define_from_dictionary", "(", "tmp_dict", ")", "first_dtu", "=", "False", "list_dtu_configurations", ".", "append", "(", "first_dtu_configuration", ")", "else", ":", "last_dtu_configuration", "=", "DtuConfiguration", ".", "define_from_dictionary", "(", "tmp_dict", ")", "if", "not", "first_dtu_configuration", ".", "closeto", "(", "last_dtu_configuration", ",", "abserror", "=", "max_dtu_offset", ")", ":", "print", "(", "\"ERROR:\"", ")", "print", "(", "\"grism...:\"", ",", "grism", ")", "print", "(", "\"slitlet.:\"", ",", "tmp_slitlet", ")", "print", "(", "\"date_obs:\"", ",", "tmp_dateobs", ")", "print", "(", "\"First DTU configuration..:\\n\\t\"", ",", "first_dtu_configuration", ")", "print", "(", "\"Last DTU configuration...:\\n\\t\"", ",", "last_dtu_configuration", ")", "raise", "ValueError", "(", "\"Unexpected DTU configuration\"", ")", "list_dtu_configurations", ".", "append", "(", "last_dtu_configuration", ")", "print", "(", "\"* Integrity check OK!\"", ")", "averaged_dtu_configuration", "=", "average_dtu_configurations", "(", "list_dtu_configurations", ")", "maxdiff_dtu_configuration", "=", "maxdiff_dtu_configurations", "(", "list_dtu_configurations", ")", "return", "averaged_dtu_configuration", ",", "maxdiff_dtu_configuration" ]
Integrity check of 'bounddict' content. Parameters ---------- bounddict : JSON structure Structure employed to store bounddict information. max_dtu_offset : float Maximum allowed difference in DTU location (mm) for each parameter
[ "Integrity", "check", "of", "bounddict", "content", "." ]
train
https://github.com/guaix-ucm/pyemir/blob/fef6bbabcb13f80123cafd1800a0f508a3c21702/emirdrp/tools/fit_boundaries.py#L63-L197
guaix-ucm/pyemir
emirdrp/tools/fit_boundaries.py
exvp_scalar
def exvp_scalar(x, y, x0, y0, c2, c4, theta0, ff): """Convert virtual pixel to real pixel. Parameters ---------- x : array-like of floats X coordinate (pixel). y : array-like of floats Y coordinate (pixel). x0 : float X coordinate of reference pixel, in units of 1E3. y0 : float Y coordinate of reference pixel, in units of 1E3. c2 : float Coefficient corresponding to the term r**2 in distortion equation, in units of 1E4. c4 : float Coefficient corresponding to the term r**4 in distortion equation, in units of 1E9 theta0 : float Additional rotation angle (radians). ff : float Scaling factor to be applied to the Y axis. Returns ------- xdist, ydist : tuple of floats Distorted coordinates. """ # plate scale: 0.1944 arcsec/pixel # conversion factor (in radian/pixel) factor = 0.1944 * np.pi/(180.0*3600) # distance from image center (pixels) r_pix = np.sqrt((x - x0*1000)**2 + (y - y0*1000)**2) # distance from imagen center (radians) r_rad = factor * r_pix # radial distortion: this number is 1.0 for r=0 and increases # slightly (reaching values around 1.033) for r~sqrt(2)*1024 # (the distance to the corner of the detector measured from the # center) rdist = (1 + c2 * 1.0E4 * r_rad**2 + c4 * 1.0E9 * r_rad**4) # angle measured from the Y axis towards the X axis theta = np.arctan((x - x0*1000)/(y - y0*1000)) if y < y0*1000: theta = theta - np.pi # distorted coordinates xdist = (rdist * r_pix * np.sin(theta+theta0)) + x0*1000 ydist = (ff * rdist * r_pix * np.cos(theta+theta0)) + y0*1000 return xdist, ydist
python
def exvp_scalar(x, y, x0, y0, c2, c4, theta0, ff): """Convert virtual pixel to real pixel. Parameters ---------- x : array-like of floats X coordinate (pixel). y : array-like of floats Y coordinate (pixel). x0 : float X coordinate of reference pixel, in units of 1E3. y0 : float Y coordinate of reference pixel, in units of 1E3. c2 : float Coefficient corresponding to the term r**2 in distortion equation, in units of 1E4. c4 : float Coefficient corresponding to the term r**4 in distortion equation, in units of 1E9 theta0 : float Additional rotation angle (radians). ff : float Scaling factor to be applied to the Y axis. Returns ------- xdist, ydist : tuple of floats Distorted coordinates. """ # plate scale: 0.1944 arcsec/pixel # conversion factor (in radian/pixel) factor = 0.1944 * np.pi/(180.0*3600) # distance from image center (pixels) r_pix = np.sqrt((x - x0*1000)**2 + (y - y0*1000)**2) # distance from imagen center (radians) r_rad = factor * r_pix # radial distortion: this number is 1.0 for r=0 and increases # slightly (reaching values around 1.033) for r~sqrt(2)*1024 # (the distance to the corner of the detector measured from the # center) rdist = (1 + c2 * 1.0E4 * r_rad**2 + c4 * 1.0E9 * r_rad**4) # angle measured from the Y axis towards the X axis theta = np.arctan((x - x0*1000)/(y - y0*1000)) if y < y0*1000: theta = theta - np.pi # distorted coordinates xdist = (rdist * r_pix * np.sin(theta+theta0)) + x0*1000 ydist = (ff * rdist * r_pix * np.cos(theta+theta0)) + y0*1000 return xdist, ydist
[ "def", "exvp_scalar", "(", "x", ",", "y", ",", "x0", ",", "y0", ",", "c2", ",", "c4", ",", "theta0", ",", "ff", ")", ":", "# plate scale: 0.1944 arcsec/pixel", "# conversion factor (in radian/pixel)", "factor", "=", "0.1944", "*", "np", ".", "pi", "/", "(", "180.0", "*", "3600", ")", "# distance from image center (pixels)", "r_pix", "=", "np", ".", "sqrt", "(", "(", "x", "-", "x0", "*", "1000", ")", "**", "2", "+", "(", "y", "-", "y0", "*", "1000", ")", "**", "2", ")", "# distance from imagen center (radians)", "r_rad", "=", "factor", "*", "r_pix", "# radial distortion: this number is 1.0 for r=0 and increases", "# slightly (reaching values around 1.033) for r~sqrt(2)*1024", "# (the distance to the corner of the detector measured from the", "# center)", "rdist", "=", "(", "1", "+", "c2", "*", "1.0E4", "*", "r_rad", "**", "2", "+", "c4", "*", "1.0E9", "*", "r_rad", "**", "4", ")", "# angle measured from the Y axis towards the X axis", "theta", "=", "np", ".", "arctan", "(", "(", "x", "-", "x0", "*", "1000", ")", "/", "(", "y", "-", "y0", "*", "1000", ")", ")", "if", "y", "<", "y0", "*", "1000", ":", "theta", "=", "theta", "-", "np", ".", "pi", "# distorted coordinates", "xdist", "=", "(", "rdist", "*", "r_pix", "*", "np", ".", "sin", "(", "theta", "+", "theta0", ")", ")", "+", "x0", "*", "1000", "ydist", "=", "(", "ff", "*", "rdist", "*", "r_pix", "*", "np", ".", "cos", "(", "theta", "+", "theta0", ")", ")", "+", "y0", "*", "1000", "return", "xdist", ",", "ydist" ]
Convert virtual pixel to real pixel. Parameters ---------- x : array-like of floats X coordinate (pixel). y : array-like of floats Y coordinate (pixel). x0 : float X coordinate of reference pixel, in units of 1E3. y0 : float Y coordinate of reference pixel, in units of 1E3. c2 : float Coefficient corresponding to the term r**2 in distortion equation, in units of 1E4. c4 : float Coefficient corresponding to the term r**4 in distortion equation, in units of 1E9 theta0 : float Additional rotation angle (radians). ff : float Scaling factor to be applied to the Y axis. Returns ------- xdist, ydist : tuple of floats Distorted coordinates.
[ "Convert", "virtual", "pixel", "to", "real", "pixel", "." ]
train
https://github.com/guaix-ucm/pyemir/blob/fef6bbabcb13f80123cafd1800a0f508a3c21702/emirdrp/tools/fit_boundaries.py#L200-L253
guaix-ucm/pyemir
emirdrp/tools/fit_boundaries.py
exvp
def exvp(x, y, x0, y0, c2, c4, theta0, ff): """Convert virtual pixel(s) to real pixel(s). This function makes use of exvp_scalar(), which performs the conversion for a single point (x, y), over an array of X and Y values. Parameters ---------- x : array-like X coordinate (pixel). y : array-like Y coordinate (pixel). x0 : float X coordinate of reference pixel. y0 : float Y coordinate of reference pixel. c2 : float Coefficient corresponding to the term r**2 in distortion equation. c4 : float Coefficient corresponding to the term r**4 in distortion equation. theta0 : float Additional rotation angle (radians). ff : float Scaling factor to be applied to the Y axis. Returns ------- xdist, ydist : tuple of floats (or two arrays of floats) Distorted coordinates. """ if all([np.isscalar(x), np.isscalar(y)]): xdist, ydist = exvp_scalar(x, y, x0=x0, y0=y0, c2=c2, c4=c4, theta0=theta0, ff=ff) return xdist, ydist elif any([np.isscalar(x), np.isscalar(y)]): raise ValueError("invalid mixture of scalars and arrays") else: xdist = [] ydist = [] for x_, y_ in zip(x, y): xdist_, ydist_ = exvp_scalar(x_, y_, x0=x0, y0=y0, c2=c2, c4=c4, theta0=theta0, ff=ff) xdist.append(xdist_) ydist.append(ydist_) return np.array(xdist), np.array(ydist)
python
def exvp(x, y, x0, y0, c2, c4, theta0, ff): """Convert virtual pixel(s) to real pixel(s). This function makes use of exvp_scalar(), which performs the conversion for a single point (x, y), over an array of X and Y values. Parameters ---------- x : array-like X coordinate (pixel). y : array-like Y coordinate (pixel). x0 : float X coordinate of reference pixel. y0 : float Y coordinate of reference pixel. c2 : float Coefficient corresponding to the term r**2 in distortion equation. c4 : float Coefficient corresponding to the term r**4 in distortion equation. theta0 : float Additional rotation angle (radians). ff : float Scaling factor to be applied to the Y axis. Returns ------- xdist, ydist : tuple of floats (or two arrays of floats) Distorted coordinates. """ if all([np.isscalar(x), np.isscalar(y)]): xdist, ydist = exvp_scalar(x, y, x0=x0, y0=y0, c2=c2, c4=c4, theta0=theta0, ff=ff) return xdist, ydist elif any([np.isscalar(x), np.isscalar(y)]): raise ValueError("invalid mixture of scalars and arrays") else: xdist = [] ydist = [] for x_, y_ in zip(x, y): xdist_, ydist_ = exvp_scalar(x_, y_, x0=x0, y0=y0, c2=c2, c4=c4, theta0=theta0, ff=ff) xdist.append(xdist_) ydist.append(ydist_) return np.array(xdist), np.array(ydist)
[ "def", "exvp", "(", "x", ",", "y", ",", "x0", ",", "y0", ",", "c2", ",", "c4", ",", "theta0", ",", "ff", ")", ":", "if", "all", "(", "[", "np", ".", "isscalar", "(", "x", ")", ",", "np", ".", "isscalar", "(", "y", ")", "]", ")", ":", "xdist", ",", "ydist", "=", "exvp_scalar", "(", "x", ",", "y", ",", "x0", "=", "x0", ",", "y0", "=", "y0", ",", "c2", "=", "c2", ",", "c4", "=", "c4", ",", "theta0", "=", "theta0", ",", "ff", "=", "ff", ")", "return", "xdist", ",", "ydist", "elif", "any", "(", "[", "np", ".", "isscalar", "(", "x", ")", ",", "np", ".", "isscalar", "(", "y", ")", "]", ")", ":", "raise", "ValueError", "(", "\"invalid mixture of scalars and arrays\"", ")", "else", ":", "xdist", "=", "[", "]", "ydist", "=", "[", "]", "for", "x_", ",", "y_", "in", "zip", "(", "x", ",", "y", ")", ":", "xdist_", ",", "ydist_", "=", "exvp_scalar", "(", "x_", ",", "y_", ",", "x0", "=", "x0", ",", "y0", "=", "y0", ",", "c2", "=", "c2", ",", "c4", "=", "c4", ",", "theta0", "=", "theta0", ",", "ff", "=", "ff", ")", "xdist", ".", "append", "(", "xdist_", ")", "ydist", ".", "append", "(", "ydist_", ")", "return", "np", ".", "array", "(", "xdist", ")", ",", "np", ".", "array", "(", "ydist", ")" ]
Convert virtual pixel(s) to real pixel(s). This function makes use of exvp_scalar(), which performs the conversion for a single point (x, y), over an array of X and Y values. Parameters ---------- x : array-like X coordinate (pixel). y : array-like Y coordinate (pixel). x0 : float X coordinate of reference pixel. y0 : float Y coordinate of reference pixel. c2 : float Coefficient corresponding to the term r**2 in distortion equation. c4 : float Coefficient corresponding to the term r**4 in distortion equation. theta0 : float Additional rotation angle (radians). ff : float Scaling factor to be applied to the Y axis. Returns ------- xdist, ydist : tuple of floats (or two arrays of floats) Distorted coordinates.
[ "Convert", "virtual", "pixel", "(", "s", ")", "to", "real", "pixel", "(", "s", ")", "." ]
train
https://github.com/guaix-ucm/pyemir/blob/fef6bbabcb13f80123cafd1800a0f508a3c21702/emirdrp/tools/fit_boundaries.py#L256-L305
guaix-ucm/pyemir
emirdrp/tools/fit_boundaries.py
return_params
def return_params(islitlet, csu_bar_slit_center, params, parmodel): """Return individual model parameters from object of type Parameters. Parameters ---------- islitlet : int Number of slitlet. csu_bar_slit_center : float CSU bar slit center, in mm. params : :class:`~lmfit.parameter.Parameters` Parameters to be employed in the prediction of the distorted boundaries. parmodel : str Model to be assumed. Allowed values are 'longslit' and 'multislit'. Returns ------- c2 : float Coefficient corresponding to the term r**2 in distortion equation. c4 : float Coefficient corresponding to the term r**4 in distortion equation. ff : float Scaling factor to be applied to the Y axis. slit_gap : float Slit gap. slit_height : float Slit height. theta0 : float Additional rotation angle (radians). x0 : float X coordinate of reference pixel. y0 : float Y coordinate of reference pixel. y_baseline : float Y coordinate employed as baseline. """ if parmodel == "longslit": # set each variable in EXPECTED_PARAMETER_LIST to the value # transferred through 'params' c2 = params['c2'].value c4 = params['c4'].value ff = params['ff'].value slit_gap = params['slit_gap'].value slit_height = params['slit_height'].value theta0_origin = params['theta0_origin'].value theta0_slope = params['theta0_slope'].value x0 = params['x0'].value y0 = params['y0'].value y_baseline = params['y_baseline'].value else: # set each variable in EXPECTED_PARAMETER_LIST_EXTENDED to the value # transferred through 'params' c2_a0s = params['c2_a0s'].value c2_a1s = params['c2_a1s'].value / 1E3 c2_a2s = params['c2_a2s'].value / 1E6 c2 = c2_a0s + \ c2_a1s * csu_bar_slit_center + \ c2_a2s * csu_bar_slit_center ** 2 # --- c4_a0s = params['c4_a0s'].value c4_a1s = params['c4_a1s'].value / 1E3 c4_a2s = params['c4_a2s'].value / 1E6 c4 = c4_a0s + \ c4_a1s * csu_bar_slit_center + \ c4_a2s * csu_bar_slit_center ** 2 # --- ff_a0s = params['ff_a0s'].value ff_a1s = params['ff_a1s'].value / 1E3 ff_a2s = params['ff_a2s'].value / 1E6 ff = ff_a0s + \ ff_a1s * csu_bar_slit_center + \ ff_a2s * csu_bar_slit_center ** 2 # --- slit_gap_a0s = params['slit_gap_a0s'].value slit_gap_a1s = params['slit_gap_a1s'].value / 1E3 slit_gap_a2s = params['slit_gap_a2s'].value / 1E6 slit_gap = slit_gap_a0s + \ slit_gap_a1s * csu_bar_slit_center + \ slit_gap_a2s * csu_bar_slit_center ** 2 # --- slit_height_a0s = params['slit_height_a0s'].value slit_height_a1s = params['slit_height_a1s'].value / 1E3 slit_height_a2s = params['slit_height_a2s'].value / 1E6 slit_height = slit_height_a0s + \ slit_height_a1s * csu_bar_slit_center + \ slit_height_a2s * csu_bar_slit_center ** 2 # --- theta0_origin_a0s = params['theta0_origin_a0s'].value theta0_origin_a1s = params['theta0_origin_a1s'].value / 1E3 theta0_origin_a2s = params['theta0_origin_a2s'].value / 1E6 theta0_origin = theta0_origin_a0s + \ theta0_origin_a1s * csu_bar_slit_center + \ theta0_origin_a2s * csu_bar_slit_center ** 2 # --- theta0_slope_a0s = params['theta0_slope_a0s'].value theta0_slope_a1s = params['theta0_slope_a1s'].value / 1E3 theta0_slope_a2s = params['theta0_slope_a2s'].value / 1E6 theta0_slope = theta0_slope_a0s + \ theta0_slope_a1s * csu_bar_slit_center + \ theta0_slope_a2s * csu_bar_slit_center ** 2 # --- x0_a0s = params['x0_a0s'].value x0_a1s = params['x0_a1s'].value / 1E3 x0_a2s = params['x0_a2s'].value / 1E6 x0 = x0_a0s + \ x0_a1s * csu_bar_slit_center + \ x0_a2s * csu_bar_slit_center ** 2 # --- y0_a0s = params['y0_a0s'].value y0_a1s = params['y0_a1s'].value / 1E3 y0_a2s = params['y0_a2s'].value / 1E6 y0 = y0_a0s + \ y0_a1s * csu_bar_slit_center + \ y0_a2s * csu_bar_slit_center ** 2 # --- y_baseline_a0s = params['y_baseline_a0s'].value y_baseline_a1s = params['y_baseline_a1s'].value / 1E3 y_baseline_a2s = params['y_baseline_a2s'].value / 1E6 y_baseline = y_baseline_a0s + \ y_baseline_a1s * csu_bar_slit_center + \ y_baseline_a2s * csu_bar_slit_center ** 2 theta0 = theta0_origin / 1E3 + theta0_slope / 1E4 * islitlet return c2, c4, ff, slit_gap, slit_height, theta0, x0, y0, y_baseline
python
def return_params(islitlet, csu_bar_slit_center, params, parmodel): """Return individual model parameters from object of type Parameters. Parameters ---------- islitlet : int Number of slitlet. csu_bar_slit_center : float CSU bar slit center, in mm. params : :class:`~lmfit.parameter.Parameters` Parameters to be employed in the prediction of the distorted boundaries. parmodel : str Model to be assumed. Allowed values are 'longslit' and 'multislit'. Returns ------- c2 : float Coefficient corresponding to the term r**2 in distortion equation. c4 : float Coefficient corresponding to the term r**4 in distortion equation. ff : float Scaling factor to be applied to the Y axis. slit_gap : float Slit gap. slit_height : float Slit height. theta0 : float Additional rotation angle (radians). x0 : float X coordinate of reference pixel. y0 : float Y coordinate of reference pixel. y_baseline : float Y coordinate employed as baseline. """ if parmodel == "longslit": # set each variable in EXPECTED_PARAMETER_LIST to the value # transferred through 'params' c2 = params['c2'].value c4 = params['c4'].value ff = params['ff'].value slit_gap = params['slit_gap'].value slit_height = params['slit_height'].value theta0_origin = params['theta0_origin'].value theta0_slope = params['theta0_slope'].value x0 = params['x0'].value y0 = params['y0'].value y_baseline = params['y_baseline'].value else: # set each variable in EXPECTED_PARAMETER_LIST_EXTENDED to the value # transferred through 'params' c2_a0s = params['c2_a0s'].value c2_a1s = params['c2_a1s'].value / 1E3 c2_a2s = params['c2_a2s'].value / 1E6 c2 = c2_a0s + \ c2_a1s * csu_bar_slit_center + \ c2_a2s * csu_bar_slit_center ** 2 # --- c4_a0s = params['c4_a0s'].value c4_a1s = params['c4_a1s'].value / 1E3 c4_a2s = params['c4_a2s'].value / 1E6 c4 = c4_a0s + \ c4_a1s * csu_bar_slit_center + \ c4_a2s * csu_bar_slit_center ** 2 # --- ff_a0s = params['ff_a0s'].value ff_a1s = params['ff_a1s'].value / 1E3 ff_a2s = params['ff_a2s'].value / 1E6 ff = ff_a0s + \ ff_a1s * csu_bar_slit_center + \ ff_a2s * csu_bar_slit_center ** 2 # --- slit_gap_a0s = params['slit_gap_a0s'].value slit_gap_a1s = params['slit_gap_a1s'].value / 1E3 slit_gap_a2s = params['slit_gap_a2s'].value / 1E6 slit_gap = slit_gap_a0s + \ slit_gap_a1s * csu_bar_slit_center + \ slit_gap_a2s * csu_bar_slit_center ** 2 # --- slit_height_a0s = params['slit_height_a0s'].value slit_height_a1s = params['slit_height_a1s'].value / 1E3 slit_height_a2s = params['slit_height_a2s'].value / 1E6 slit_height = slit_height_a0s + \ slit_height_a1s * csu_bar_slit_center + \ slit_height_a2s * csu_bar_slit_center ** 2 # --- theta0_origin_a0s = params['theta0_origin_a0s'].value theta0_origin_a1s = params['theta0_origin_a1s'].value / 1E3 theta0_origin_a2s = params['theta0_origin_a2s'].value / 1E6 theta0_origin = theta0_origin_a0s + \ theta0_origin_a1s * csu_bar_slit_center + \ theta0_origin_a2s * csu_bar_slit_center ** 2 # --- theta0_slope_a0s = params['theta0_slope_a0s'].value theta0_slope_a1s = params['theta0_slope_a1s'].value / 1E3 theta0_slope_a2s = params['theta0_slope_a2s'].value / 1E6 theta0_slope = theta0_slope_a0s + \ theta0_slope_a1s * csu_bar_slit_center + \ theta0_slope_a2s * csu_bar_slit_center ** 2 # --- x0_a0s = params['x0_a0s'].value x0_a1s = params['x0_a1s'].value / 1E3 x0_a2s = params['x0_a2s'].value / 1E6 x0 = x0_a0s + \ x0_a1s * csu_bar_slit_center + \ x0_a2s * csu_bar_slit_center ** 2 # --- y0_a0s = params['y0_a0s'].value y0_a1s = params['y0_a1s'].value / 1E3 y0_a2s = params['y0_a2s'].value / 1E6 y0 = y0_a0s + \ y0_a1s * csu_bar_slit_center + \ y0_a2s * csu_bar_slit_center ** 2 # --- y_baseline_a0s = params['y_baseline_a0s'].value y_baseline_a1s = params['y_baseline_a1s'].value / 1E3 y_baseline_a2s = params['y_baseline_a2s'].value / 1E6 y_baseline = y_baseline_a0s + \ y_baseline_a1s * csu_bar_slit_center + \ y_baseline_a2s * csu_bar_slit_center ** 2 theta0 = theta0_origin / 1E3 + theta0_slope / 1E4 * islitlet return c2, c4, ff, slit_gap, slit_height, theta0, x0, y0, y_baseline
[ "def", "return_params", "(", "islitlet", ",", "csu_bar_slit_center", ",", "params", ",", "parmodel", ")", ":", "if", "parmodel", "==", "\"longslit\"", ":", "# set each variable in EXPECTED_PARAMETER_LIST to the value", "# transferred through 'params'", "c2", "=", "params", "[", "'c2'", "]", ".", "value", "c4", "=", "params", "[", "'c4'", "]", ".", "value", "ff", "=", "params", "[", "'ff'", "]", ".", "value", "slit_gap", "=", "params", "[", "'slit_gap'", "]", ".", "value", "slit_height", "=", "params", "[", "'slit_height'", "]", ".", "value", "theta0_origin", "=", "params", "[", "'theta0_origin'", "]", ".", "value", "theta0_slope", "=", "params", "[", "'theta0_slope'", "]", ".", "value", "x0", "=", "params", "[", "'x0'", "]", ".", "value", "y0", "=", "params", "[", "'y0'", "]", ".", "value", "y_baseline", "=", "params", "[", "'y_baseline'", "]", ".", "value", "else", ":", "# set each variable in EXPECTED_PARAMETER_LIST_EXTENDED to the value", "# transferred through 'params'", "c2_a0s", "=", "params", "[", "'c2_a0s'", "]", ".", "value", "c2_a1s", "=", "params", "[", "'c2_a1s'", "]", ".", "value", "/", "1E3", "c2_a2s", "=", "params", "[", "'c2_a2s'", "]", ".", "value", "/", "1E6", "c2", "=", "c2_a0s", "+", "c2_a1s", "*", "csu_bar_slit_center", "+", "c2_a2s", "*", "csu_bar_slit_center", "**", "2", "# ---", "c4_a0s", "=", "params", "[", "'c4_a0s'", "]", ".", "value", "c4_a1s", "=", "params", "[", "'c4_a1s'", "]", ".", "value", "/", "1E3", "c4_a2s", "=", "params", "[", "'c4_a2s'", "]", ".", "value", "/", "1E6", "c4", "=", "c4_a0s", "+", "c4_a1s", "*", "csu_bar_slit_center", "+", "c4_a2s", "*", "csu_bar_slit_center", "**", "2", "# ---", "ff_a0s", "=", "params", "[", "'ff_a0s'", "]", ".", "value", "ff_a1s", "=", "params", "[", "'ff_a1s'", "]", ".", "value", "/", "1E3", "ff_a2s", "=", "params", "[", "'ff_a2s'", "]", ".", "value", "/", "1E6", "ff", "=", "ff_a0s", "+", "ff_a1s", "*", "csu_bar_slit_center", "+", "ff_a2s", "*", "csu_bar_slit_center", "**", "2", "# ---", "slit_gap_a0s", "=", "params", "[", "'slit_gap_a0s'", "]", ".", "value", "slit_gap_a1s", "=", "params", "[", "'slit_gap_a1s'", "]", ".", "value", "/", "1E3", "slit_gap_a2s", "=", "params", "[", "'slit_gap_a2s'", "]", ".", "value", "/", "1E6", "slit_gap", "=", "slit_gap_a0s", "+", "slit_gap_a1s", "*", "csu_bar_slit_center", "+", "slit_gap_a2s", "*", "csu_bar_slit_center", "**", "2", "# ---", "slit_height_a0s", "=", "params", "[", "'slit_height_a0s'", "]", ".", "value", "slit_height_a1s", "=", "params", "[", "'slit_height_a1s'", "]", ".", "value", "/", "1E3", "slit_height_a2s", "=", "params", "[", "'slit_height_a2s'", "]", ".", "value", "/", "1E6", "slit_height", "=", "slit_height_a0s", "+", "slit_height_a1s", "*", "csu_bar_slit_center", "+", "slit_height_a2s", "*", "csu_bar_slit_center", "**", "2", "# ---", "theta0_origin_a0s", "=", "params", "[", "'theta0_origin_a0s'", "]", ".", "value", "theta0_origin_a1s", "=", "params", "[", "'theta0_origin_a1s'", "]", ".", "value", "/", "1E3", "theta0_origin_a2s", "=", "params", "[", "'theta0_origin_a2s'", "]", ".", "value", "/", "1E6", "theta0_origin", "=", "theta0_origin_a0s", "+", "theta0_origin_a1s", "*", "csu_bar_slit_center", "+", "theta0_origin_a2s", "*", "csu_bar_slit_center", "**", "2", "# ---", "theta0_slope_a0s", "=", "params", "[", "'theta0_slope_a0s'", "]", ".", "value", "theta0_slope_a1s", "=", "params", "[", "'theta0_slope_a1s'", "]", ".", "value", "/", "1E3", "theta0_slope_a2s", "=", "params", "[", "'theta0_slope_a2s'", "]", ".", "value", "/", "1E6", "theta0_slope", "=", "theta0_slope_a0s", "+", "theta0_slope_a1s", "*", "csu_bar_slit_center", "+", "theta0_slope_a2s", "*", "csu_bar_slit_center", "**", "2", "# ---", "x0_a0s", "=", "params", "[", "'x0_a0s'", "]", ".", "value", "x0_a1s", "=", "params", "[", "'x0_a1s'", "]", ".", "value", "/", "1E3", "x0_a2s", "=", "params", "[", "'x0_a2s'", "]", ".", "value", "/", "1E6", "x0", "=", "x0_a0s", "+", "x0_a1s", "*", "csu_bar_slit_center", "+", "x0_a2s", "*", "csu_bar_slit_center", "**", "2", "# ---", "y0_a0s", "=", "params", "[", "'y0_a0s'", "]", ".", "value", "y0_a1s", "=", "params", "[", "'y0_a1s'", "]", ".", "value", "/", "1E3", "y0_a2s", "=", "params", "[", "'y0_a2s'", "]", ".", "value", "/", "1E6", "y0", "=", "y0_a0s", "+", "y0_a1s", "*", "csu_bar_slit_center", "+", "y0_a2s", "*", "csu_bar_slit_center", "**", "2", "# ---", "y_baseline_a0s", "=", "params", "[", "'y_baseline_a0s'", "]", ".", "value", "y_baseline_a1s", "=", "params", "[", "'y_baseline_a1s'", "]", ".", "value", "/", "1E3", "y_baseline_a2s", "=", "params", "[", "'y_baseline_a2s'", "]", ".", "value", "/", "1E6", "y_baseline", "=", "y_baseline_a0s", "+", "y_baseline_a1s", "*", "csu_bar_slit_center", "+", "y_baseline_a2s", "*", "csu_bar_slit_center", "**", "2", "theta0", "=", "theta0_origin", "/", "1E3", "+", "theta0_slope", "/", "1E4", "*", "islitlet", "return", "c2", ",", "c4", ",", "ff", ",", "slit_gap", ",", "slit_height", ",", "theta0", ",", "x0", ",", "y0", ",", "y_baseline" ]
Return individual model parameters from object of type Parameters. Parameters ---------- islitlet : int Number of slitlet. csu_bar_slit_center : float CSU bar slit center, in mm. params : :class:`~lmfit.parameter.Parameters` Parameters to be employed in the prediction of the distorted boundaries. parmodel : str Model to be assumed. Allowed values are 'longslit' and 'multislit'. Returns ------- c2 : float Coefficient corresponding to the term r**2 in distortion equation. c4 : float Coefficient corresponding to the term r**4 in distortion equation. ff : float Scaling factor to be applied to the Y axis. slit_gap : float Slit gap. slit_height : float Slit height. theta0 : float Additional rotation angle (radians). x0 : float X coordinate of reference pixel. y0 : float Y coordinate of reference pixel. y_baseline : float Y coordinate employed as baseline.
[ "Return", "individual", "model", "parameters", "from", "object", "of", "type", "Parameters", "." ]
train
https://github.com/guaix-ucm/pyemir/blob/fef6bbabcb13f80123cafd1800a0f508a3c21702/emirdrp/tools/fit_boundaries.py#L308-L438
guaix-ucm/pyemir
emirdrp/tools/fit_boundaries.py
expected_distorted_boundaries
def expected_distorted_boundaries(islitlet, csu_bar_slit_center, borderlist, params, parmodel, numpts, deg, debugplot=0): """Return expected SpectrumTrail instances associated to a given slitlet. Several SpectrumTrail objects can be computed for the considered slitlet. The parameter borderlist is a list of floats, ranging from 0 to 1, indicating the spatial location of the spectrum trail within the slitlet: 0 means the lower boundary and 1 corresponds to the upper bounday. Any value in (0,1) will provide the spectrum trail located in between accordingly. Parameters ---------- islitlet : int Number of slitlet. csu_bar_slit_center : float CSU bar slit center, in mm. borderlist : list of floats Each float provides the fractional vertical location of the spectrum trail relative to the lower boundary. In other words, 0.0 corresponds to the lower boundary, 1.0 to the upper boundary, and any number in the interval (0,1) will be a spectral trail in between. params : :class:`~lmfit.parameter.Parameters` Parameters to be employed in the prediction of the distorted boundaries. parmodel : str Model to be assumed. Allowed values are 'longslit' and 'multislit'. numpts : int Number of points in which the X-range interval is subdivided before fitting the returned polynomial(s). deg : int Degree of the fitted polynomial. debugplot : int Debugging level for messages and plots. For details see 'numina.array.display.pause_debugplot.py'. Returns ------- list_spectrails : list of SpectrumTrail objects List containing the fitted spectrum trails. """ c2, c4, ff, slit_gap, slit_height, theta0, x0, y0, y_baseline = \ return_params(islitlet, csu_bar_slit_center, params, parmodel) xp = np.linspace(1, EMIR_NAXIS1, numpts) slit_dist = (slit_height * 10) + slit_gap # undistorted (constant) y-coordinate of the lower and upper boundaries ybottom = y_baseline * 100 + (islitlet - 1) * slit_dist ytop = ybottom + (slit_height * 10) list_spectrails = [] for borderval in borderlist: yvalue = ybottom + borderval * (ytop - ybottom) # undistorted boundary yp_value = np.ones(numpts) * yvalue # distorted boundary xdist, ydist = exvp(xp, yp_value, x0=x0, y0=y0, c2=c2, c4=c4, theta0=theta0, ff=ff) spectrail = SpectrumTrail() # declare SpectrumTrail instance spectrail.fit(x=xdist, y=ydist, deg=deg, debugplot=debugplot) list_spectrails.append(spectrail) return list_spectrails
python
def expected_distorted_boundaries(islitlet, csu_bar_slit_center, borderlist, params, parmodel, numpts, deg, debugplot=0): """Return expected SpectrumTrail instances associated to a given slitlet. Several SpectrumTrail objects can be computed for the considered slitlet. The parameter borderlist is a list of floats, ranging from 0 to 1, indicating the spatial location of the spectrum trail within the slitlet: 0 means the lower boundary and 1 corresponds to the upper bounday. Any value in (0,1) will provide the spectrum trail located in between accordingly. Parameters ---------- islitlet : int Number of slitlet. csu_bar_slit_center : float CSU bar slit center, in mm. borderlist : list of floats Each float provides the fractional vertical location of the spectrum trail relative to the lower boundary. In other words, 0.0 corresponds to the lower boundary, 1.0 to the upper boundary, and any number in the interval (0,1) will be a spectral trail in between. params : :class:`~lmfit.parameter.Parameters` Parameters to be employed in the prediction of the distorted boundaries. parmodel : str Model to be assumed. Allowed values are 'longslit' and 'multislit'. numpts : int Number of points in which the X-range interval is subdivided before fitting the returned polynomial(s). deg : int Degree of the fitted polynomial. debugplot : int Debugging level for messages and plots. For details see 'numina.array.display.pause_debugplot.py'. Returns ------- list_spectrails : list of SpectrumTrail objects List containing the fitted spectrum trails. """ c2, c4, ff, slit_gap, slit_height, theta0, x0, y0, y_baseline = \ return_params(islitlet, csu_bar_slit_center, params, parmodel) xp = np.linspace(1, EMIR_NAXIS1, numpts) slit_dist = (slit_height * 10) + slit_gap # undistorted (constant) y-coordinate of the lower and upper boundaries ybottom = y_baseline * 100 + (islitlet - 1) * slit_dist ytop = ybottom + (slit_height * 10) list_spectrails = [] for borderval in borderlist: yvalue = ybottom + borderval * (ytop - ybottom) # undistorted boundary yp_value = np.ones(numpts) * yvalue # distorted boundary xdist, ydist = exvp(xp, yp_value, x0=x0, y0=y0, c2=c2, c4=c4, theta0=theta0, ff=ff) spectrail = SpectrumTrail() # declare SpectrumTrail instance spectrail.fit(x=xdist, y=ydist, deg=deg, debugplot=debugplot) list_spectrails.append(spectrail) return list_spectrails
[ "def", "expected_distorted_boundaries", "(", "islitlet", ",", "csu_bar_slit_center", ",", "borderlist", ",", "params", ",", "parmodel", ",", "numpts", ",", "deg", ",", "debugplot", "=", "0", ")", ":", "c2", ",", "c4", ",", "ff", ",", "slit_gap", ",", "slit_height", ",", "theta0", ",", "x0", ",", "y0", ",", "y_baseline", "=", "return_params", "(", "islitlet", ",", "csu_bar_slit_center", ",", "params", ",", "parmodel", ")", "xp", "=", "np", ".", "linspace", "(", "1", ",", "EMIR_NAXIS1", ",", "numpts", ")", "slit_dist", "=", "(", "slit_height", "*", "10", ")", "+", "slit_gap", "# undistorted (constant) y-coordinate of the lower and upper boundaries", "ybottom", "=", "y_baseline", "*", "100", "+", "(", "islitlet", "-", "1", ")", "*", "slit_dist", "ytop", "=", "ybottom", "+", "(", "slit_height", "*", "10", ")", "list_spectrails", "=", "[", "]", "for", "borderval", "in", "borderlist", ":", "yvalue", "=", "ybottom", "+", "borderval", "*", "(", "ytop", "-", "ybottom", ")", "# undistorted boundary", "yp_value", "=", "np", ".", "ones", "(", "numpts", ")", "*", "yvalue", "# distorted boundary", "xdist", ",", "ydist", "=", "exvp", "(", "xp", ",", "yp_value", ",", "x0", "=", "x0", ",", "y0", "=", "y0", ",", "c2", "=", "c2", ",", "c4", "=", "c4", ",", "theta0", "=", "theta0", ",", "ff", "=", "ff", ")", "spectrail", "=", "SpectrumTrail", "(", ")", "# declare SpectrumTrail instance", "spectrail", ".", "fit", "(", "x", "=", "xdist", ",", "y", "=", "ydist", ",", "deg", "=", "deg", ",", "debugplot", "=", "debugplot", ")", "list_spectrails", ".", "append", "(", "spectrail", ")", "return", "list_spectrails" ]
Return expected SpectrumTrail instances associated to a given slitlet. Several SpectrumTrail objects can be computed for the considered slitlet. The parameter borderlist is a list of floats, ranging from 0 to 1, indicating the spatial location of the spectrum trail within the slitlet: 0 means the lower boundary and 1 corresponds to the upper bounday. Any value in (0,1) will provide the spectrum trail located in between accordingly. Parameters ---------- islitlet : int Number of slitlet. csu_bar_slit_center : float CSU bar slit center, in mm. borderlist : list of floats Each float provides the fractional vertical location of the spectrum trail relative to the lower boundary. In other words, 0.0 corresponds to the lower boundary, 1.0 to the upper boundary, and any number in the interval (0,1) will be a spectral trail in between. params : :class:`~lmfit.parameter.Parameters` Parameters to be employed in the prediction of the distorted boundaries. parmodel : str Model to be assumed. Allowed values are 'longslit' and 'multislit'. numpts : int Number of points in which the X-range interval is subdivided before fitting the returned polynomial(s). deg : int Degree of the fitted polynomial. debugplot : int Debugging level for messages and plots. For details see 'numina.array.display.pause_debugplot.py'. Returns ------- list_spectrails : list of SpectrumTrail objects List containing the fitted spectrum trails.
[ "Return", "expected", "SpectrumTrail", "instances", "associated", "to", "a", "given", "slitlet", "." ]
train
https://github.com/guaix-ucm/pyemir/blob/fef6bbabcb13f80123cafd1800a0f508a3c21702/emirdrp/tools/fit_boundaries.py#L441-L509
guaix-ucm/pyemir
emirdrp/tools/fit_boundaries.py
fun_residuals
def fun_residuals(params, parmodel, bounddict, shrinking_factor, numresolution, islitmin, islitmax, debugplot): """Function to be minimised. Parameters ---------- params : :class:`~lmfit.parameter.Parameters` Parameters to be employed in the prediction of the distorted boundaries. parmodel : str Model to be assumed. Allowed values are 'longslit' and 'multislit'. bounddict : JSON structure Structure employed to store bounddict information. shrinking_factor : float Fraction of the detected X range (specrtral) to be employed in the fit. This must be a number verifying 0 < shrinking_factor <= 1. The resulting interval will be centered within the original one. numresolution : int Number of points in which the X-range interval is subdivided before computing the residuals. islitmin : int Minimum slitlet number. islitmax : int Maximum slitlet number. debugplot : int Debugging level for messages and plots. For details see 'numina.array.display.pause_debugplot.py'. Returns ------- global_residual : float Squared root of the averaged sum of squared residuals. """ global FUNCTION_EVALUATIONS global_residual = 0.0 nsummed = 0 read_slitlets = list(bounddict['contents'].keys()) # read_slitlets.sort() # this is not really necessary for tmp_slitlet in read_slitlets: islitlet = int(tmp_slitlet[7:]) if islitmin <= islitlet <= islitmax: read_dateobs = list(bounddict['contents'][tmp_slitlet].keys()) # read_dateobs.sort() # this is not really necessary for tmp_dateobs in read_dateobs: tmp_dict = bounddict['contents'][tmp_slitlet][tmp_dateobs] csu_bar_slit_center = tmp_dict['csu_bar_slit_center'] # expected boundaries using provided parameters list_spectrails = expected_distorted_boundaries( islitlet, csu_bar_slit_center, [0, 1], params, parmodel, numpts=numresolution, deg=5, debugplot=0 ) poly_lower_expected = list_spectrails[0].poly_funct poly_upper_expected = list_spectrails[1].poly_funct # measured lower boundary poly_lower_measured = np.polynomial.Polynomial( tmp_dict['boundary_coef_lower'] ) xmin_lower_bound = tmp_dict['boundary_xmin_lower'] xmax_lower_bound = tmp_dict['boundary_xmax_lower'] dx = (xmax_lower_bound - xmin_lower_bound) * \ (1 - shrinking_factor) / 2 xdum_lower = np.linspace(xmin_lower_bound + dx, xmax_lower_bound - dx, num=numresolution) # distance between expected and measured polynomials poly_diff = poly_lower_expected - poly_lower_measured global_residual += np.sum(poly_diff(xdum_lower)**2) nsummed += numresolution # measured upper boundary poly_upper_measured = np.polynomial.Polynomial( tmp_dict['boundary_coef_upper'] ) xmin_upper_bound = tmp_dict['boundary_xmin_upper'] xmax_upper_bound = tmp_dict['boundary_xmax_upper'] dx = (xmax_lower_bound - xmin_lower_bound) * \ (1 - shrinking_factor) / 2 xdum_upper = np.linspace(xmin_upper_bound + dx, xmax_upper_bound - dx, num=numresolution) # distance between expected and measured polynomials poly_diff = poly_upper_expected - poly_upper_measured global_residual += np.sum(poly_diff(xdum_upper)**2) nsummed += numresolution if nsummed > 0: global_residual = np.sqrt(global_residual/nsummed) if debugplot >= 10: FUNCTION_EVALUATIONS += 1 print('-' * 79) print('>>> Number of function evaluations:', FUNCTION_EVALUATIONS) print('>>> global residual...............:', global_residual) params.pretty_print() return global_residual
python
def fun_residuals(params, parmodel, bounddict, shrinking_factor, numresolution, islitmin, islitmax, debugplot): """Function to be minimised. Parameters ---------- params : :class:`~lmfit.parameter.Parameters` Parameters to be employed in the prediction of the distorted boundaries. parmodel : str Model to be assumed. Allowed values are 'longslit' and 'multislit'. bounddict : JSON structure Structure employed to store bounddict information. shrinking_factor : float Fraction of the detected X range (specrtral) to be employed in the fit. This must be a number verifying 0 < shrinking_factor <= 1. The resulting interval will be centered within the original one. numresolution : int Number of points in which the X-range interval is subdivided before computing the residuals. islitmin : int Minimum slitlet number. islitmax : int Maximum slitlet number. debugplot : int Debugging level for messages and plots. For details see 'numina.array.display.pause_debugplot.py'. Returns ------- global_residual : float Squared root of the averaged sum of squared residuals. """ global FUNCTION_EVALUATIONS global_residual = 0.0 nsummed = 0 read_slitlets = list(bounddict['contents'].keys()) # read_slitlets.sort() # this is not really necessary for tmp_slitlet in read_slitlets: islitlet = int(tmp_slitlet[7:]) if islitmin <= islitlet <= islitmax: read_dateobs = list(bounddict['contents'][tmp_slitlet].keys()) # read_dateobs.sort() # this is not really necessary for tmp_dateobs in read_dateobs: tmp_dict = bounddict['contents'][tmp_slitlet][tmp_dateobs] csu_bar_slit_center = tmp_dict['csu_bar_slit_center'] # expected boundaries using provided parameters list_spectrails = expected_distorted_boundaries( islitlet, csu_bar_slit_center, [0, 1], params, parmodel, numpts=numresolution, deg=5, debugplot=0 ) poly_lower_expected = list_spectrails[0].poly_funct poly_upper_expected = list_spectrails[1].poly_funct # measured lower boundary poly_lower_measured = np.polynomial.Polynomial( tmp_dict['boundary_coef_lower'] ) xmin_lower_bound = tmp_dict['boundary_xmin_lower'] xmax_lower_bound = tmp_dict['boundary_xmax_lower'] dx = (xmax_lower_bound - xmin_lower_bound) * \ (1 - shrinking_factor) / 2 xdum_lower = np.linspace(xmin_lower_bound + dx, xmax_lower_bound - dx, num=numresolution) # distance between expected and measured polynomials poly_diff = poly_lower_expected - poly_lower_measured global_residual += np.sum(poly_diff(xdum_lower)**2) nsummed += numresolution # measured upper boundary poly_upper_measured = np.polynomial.Polynomial( tmp_dict['boundary_coef_upper'] ) xmin_upper_bound = tmp_dict['boundary_xmin_upper'] xmax_upper_bound = tmp_dict['boundary_xmax_upper'] dx = (xmax_lower_bound - xmin_lower_bound) * \ (1 - shrinking_factor) / 2 xdum_upper = np.linspace(xmin_upper_bound + dx, xmax_upper_bound - dx, num=numresolution) # distance between expected and measured polynomials poly_diff = poly_upper_expected - poly_upper_measured global_residual += np.sum(poly_diff(xdum_upper)**2) nsummed += numresolution if nsummed > 0: global_residual = np.sqrt(global_residual/nsummed) if debugplot >= 10: FUNCTION_EVALUATIONS += 1 print('-' * 79) print('>>> Number of function evaluations:', FUNCTION_EVALUATIONS) print('>>> global residual...............:', global_residual) params.pretty_print() return global_residual
[ "def", "fun_residuals", "(", "params", ",", "parmodel", ",", "bounddict", ",", "shrinking_factor", ",", "numresolution", ",", "islitmin", ",", "islitmax", ",", "debugplot", ")", ":", "global", "FUNCTION_EVALUATIONS", "global_residual", "=", "0.0", "nsummed", "=", "0", "read_slitlets", "=", "list", "(", "bounddict", "[", "'contents'", "]", ".", "keys", "(", ")", ")", "# read_slitlets.sort() # this is not really necessary", "for", "tmp_slitlet", "in", "read_slitlets", ":", "islitlet", "=", "int", "(", "tmp_slitlet", "[", "7", ":", "]", ")", "if", "islitmin", "<=", "islitlet", "<=", "islitmax", ":", "read_dateobs", "=", "list", "(", "bounddict", "[", "'contents'", "]", "[", "tmp_slitlet", "]", ".", "keys", "(", ")", ")", "# read_dateobs.sort() # this is not really necessary", "for", "tmp_dateobs", "in", "read_dateobs", ":", "tmp_dict", "=", "bounddict", "[", "'contents'", "]", "[", "tmp_slitlet", "]", "[", "tmp_dateobs", "]", "csu_bar_slit_center", "=", "tmp_dict", "[", "'csu_bar_slit_center'", "]", "# expected boundaries using provided parameters", "list_spectrails", "=", "expected_distorted_boundaries", "(", "islitlet", ",", "csu_bar_slit_center", ",", "[", "0", ",", "1", "]", ",", "params", ",", "parmodel", ",", "numpts", "=", "numresolution", ",", "deg", "=", "5", ",", "debugplot", "=", "0", ")", "poly_lower_expected", "=", "list_spectrails", "[", "0", "]", ".", "poly_funct", "poly_upper_expected", "=", "list_spectrails", "[", "1", "]", ".", "poly_funct", "# measured lower boundary", "poly_lower_measured", "=", "np", ".", "polynomial", ".", "Polynomial", "(", "tmp_dict", "[", "'boundary_coef_lower'", "]", ")", "xmin_lower_bound", "=", "tmp_dict", "[", "'boundary_xmin_lower'", "]", "xmax_lower_bound", "=", "tmp_dict", "[", "'boundary_xmax_lower'", "]", "dx", "=", "(", "xmax_lower_bound", "-", "xmin_lower_bound", ")", "*", "(", "1", "-", "shrinking_factor", ")", "/", "2", "xdum_lower", "=", "np", ".", "linspace", "(", "xmin_lower_bound", "+", "dx", ",", "xmax_lower_bound", "-", "dx", ",", "num", "=", "numresolution", ")", "# distance between expected and measured polynomials", "poly_diff", "=", "poly_lower_expected", "-", "poly_lower_measured", "global_residual", "+=", "np", ".", "sum", "(", "poly_diff", "(", "xdum_lower", ")", "**", "2", ")", "nsummed", "+=", "numresolution", "# measured upper boundary", "poly_upper_measured", "=", "np", ".", "polynomial", ".", "Polynomial", "(", "tmp_dict", "[", "'boundary_coef_upper'", "]", ")", "xmin_upper_bound", "=", "tmp_dict", "[", "'boundary_xmin_upper'", "]", "xmax_upper_bound", "=", "tmp_dict", "[", "'boundary_xmax_upper'", "]", "dx", "=", "(", "xmax_lower_bound", "-", "xmin_lower_bound", ")", "*", "(", "1", "-", "shrinking_factor", ")", "/", "2", "xdum_upper", "=", "np", ".", "linspace", "(", "xmin_upper_bound", "+", "dx", ",", "xmax_upper_bound", "-", "dx", ",", "num", "=", "numresolution", ")", "# distance between expected and measured polynomials", "poly_diff", "=", "poly_upper_expected", "-", "poly_upper_measured", "global_residual", "+=", "np", ".", "sum", "(", "poly_diff", "(", "xdum_upper", ")", "**", "2", ")", "nsummed", "+=", "numresolution", "if", "nsummed", ">", "0", ":", "global_residual", "=", "np", ".", "sqrt", "(", "global_residual", "/", "nsummed", ")", "if", "debugplot", ">=", "10", ":", "FUNCTION_EVALUATIONS", "+=", "1", "print", "(", "'-'", "*", "79", ")", "print", "(", "'>>> Number of function evaluations:'", ",", "FUNCTION_EVALUATIONS", ")", "print", "(", "'>>> global residual...............:'", ",", "global_residual", ")", "params", ".", "pretty_print", "(", ")", "return", "global_residual" ]
Function to be minimised. Parameters ---------- params : :class:`~lmfit.parameter.Parameters` Parameters to be employed in the prediction of the distorted boundaries. parmodel : str Model to be assumed. Allowed values are 'longslit' and 'multislit'. bounddict : JSON structure Structure employed to store bounddict information. shrinking_factor : float Fraction of the detected X range (specrtral) to be employed in the fit. This must be a number verifying 0 < shrinking_factor <= 1. The resulting interval will be centered within the original one. numresolution : int Number of points in which the X-range interval is subdivided before computing the residuals. islitmin : int Minimum slitlet number. islitmax : int Maximum slitlet number. debugplot : int Debugging level for messages and plots. For details see 'numina.array.display.pause_debugplot.py'. Returns ------- global_residual : float Squared root of the averaged sum of squared residuals.
[ "Function", "to", "be", "minimised", "." ]
train
https://github.com/guaix-ucm/pyemir/blob/fef6bbabcb13f80123cafd1800a0f508a3c21702/emirdrp/tools/fit_boundaries.py#L578-L677
guaix-ucm/pyemir
emirdrp/tools/fit_boundaries.py
overplot_boundaries_from_bounddict
def overplot_boundaries_from_bounddict(ax, bounddict, micolors, linetype='-'): """Overplot boundaries on current plot. Parameters ---------- ax : matplotlib axes Current plot axes. bounddict : JSON structure Structure employed to store bounddict information. micolors : list of char List with two characters corresponding to alternating colors for odd and even slitlets. linetype : str Line type. """ for islitlet in range(1, EMIR_NBARS + 1): tmpcolor = micolors[islitlet % 2] tmp_slitlet = 'slitlet' + str(islitlet).zfill(2) if tmp_slitlet in bounddict['contents'].keys(): read_dateobs = list(bounddict['contents'][tmp_slitlet].keys()) read_dateobs.sort() for tmp_dateobs in read_dateobs: tmp_dict = bounddict['contents'][tmp_slitlet][tmp_dateobs] # lower boundary pol_lower_measured = np.polynomial.Polynomial( tmp_dict['boundary_coef_lower'] ) xdum = np.linspace(1, EMIR_NAXIS1, num=EMIR_NAXIS1) ydum = pol_lower_measured(xdum) ax.plot(xdum, ydum, tmpcolor + linetype) pol_upper_measured = np.polynomial.Polynomial( tmp_dict['boundary_coef_upper'] ) ydum = pol_upper_measured(xdum) ax.plot(xdum, ydum, tmpcolor + linetype)
python
def overplot_boundaries_from_bounddict(ax, bounddict, micolors, linetype='-'): """Overplot boundaries on current plot. Parameters ---------- ax : matplotlib axes Current plot axes. bounddict : JSON structure Structure employed to store bounddict information. micolors : list of char List with two characters corresponding to alternating colors for odd and even slitlets. linetype : str Line type. """ for islitlet in range(1, EMIR_NBARS + 1): tmpcolor = micolors[islitlet % 2] tmp_slitlet = 'slitlet' + str(islitlet).zfill(2) if tmp_slitlet in bounddict['contents'].keys(): read_dateobs = list(bounddict['contents'][tmp_slitlet].keys()) read_dateobs.sort() for tmp_dateobs in read_dateobs: tmp_dict = bounddict['contents'][tmp_slitlet][tmp_dateobs] # lower boundary pol_lower_measured = np.polynomial.Polynomial( tmp_dict['boundary_coef_lower'] ) xdum = np.linspace(1, EMIR_NAXIS1, num=EMIR_NAXIS1) ydum = pol_lower_measured(xdum) ax.plot(xdum, ydum, tmpcolor + linetype) pol_upper_measured = np.polynomial.Polynomial( tmp_dict['boundary_coef_upper'] ) ydum = pol_upper_measured(xdum) ax.plot(xdum, ydum, tmpcolor + linetype)
[ "def", "overplot_boundaries_from_bounddict", "(", "ax", ",", "bounddict", ",", "micolors", ",", "linetype", "=", "'-'", ")", ":", "for", "islitlet", "in", "range", "(", "1", ",", "EMIR_NBARS", "+", "1", ")", ":", "tmpcolor", "=", "micolors", "[", "islitlet", "%", "2", "]", "tmp_slitlet", "=", "'slitlet'", "+", "str", "(", "islitlet", ")", ".", "zfill", "(", "2", ")", "if", "tmp_slitlet", "in", "bounddict", "[", "'contents'", "]", ".", "keys", "(", ")", ":", "read_dateobs", "=", "list", "(", "bounddict", "[", "'contents'", "]", "[", "tmp_slitlet", "]", ".", "keys", "(", ")", ")", "read_dateobs", ".", "sort", "(", ")", "for", "tmp_dateobs", "in", "read_dateobs", ":", "tmp_dict", "=", "bounddict", "[", "'contents'", "]", "[", "tmp_slitlet", "]", "[", "tmp_dateobs", "]", "# lower boundary", "pol_lower_measured", "=", "np", ".", "polynomial", ".", "Polynomial", "(", "tmp_dict", "[", "'boundary_coef_lower'", "]", ")", "xdum", "=", "np", ".", "linspace", "(", "1", ",", "EMIR_NAXIS1", ",", "num", "=", "EMIR_NAXIS1", ")", "ydum", "=", "pol_lower_measured", "(", "xdum", ")", "ax", ".", "plot", "(", "xdum", ",", "ydum", ",", "tmpcolor", "+", "linetype", ")", "pol_upper_measured", "=", "np", ".", "polynomial", ".", "Polynomial", "(", "tmp_dict", "[", "'boundary_coef_upper'", "]", ")", "ydum", "=", "pol_upper_measured", "(", "xdum", ")", "ax", ".", "plot", "(", "xdum", ",", "ydum", ",", "tmpcolor", "+", "linetype", ")" ]
Overplot boundaries on current plot. Parameters ---------- ax : matplotlib axes Current plot axes. bounddict : JSON structure Structure employed to store bounddict information. micolors : list of char List with two characters corresponding to alternating colors for odd and even slitlets. linetype : str Line type.
[ "Overplot", "boundaries", "on", "current", "plot", "." ]
train
https://github.com/guaix-ucm/pyemir/blob/fef6bbabcb13f80123cafd1800a0f508a3c21702/emirdrp/tools/fit_boundaries.py#L680-L716
guaix-ucm/pyemir
emirdrp/tools/fit_boundaries.py
overplot_boundaries_from_params
def overplot_boundaries_from_params(ax, params, parmodel, list_islitlet, list_csu_bar_slit_center, micolors=('m', 'c'), linetype='--', labels=True, alpha_fill=None, global_offset_x_pix=0, global_offset_y_pix=0): """Overplot boundaries computed from fitted parameters. Parameters ---------- ax : matplotlib axes Current plot axes. params : :class:`~lmfit.parameter.Parameters` Parameters to be employed in the prediction of the distorted boundaries. parmodel : str Model to be assumed. Allowed values are 'longslit' and 'multislit'. list_islitlet : list of integers Slitlet numbers to be considered. longslits. list_csu_bar_slit_center : list of floats CSU bar slit centers of the considered slitlets. micolors : Python list List with two characters corresponding to alternating colors for odd and even slitlets. linetype : str Line type. labels : bool If True, display slilet label alpha_fill : float or None Alpha factor to be employed to fill slitlet region. global_integer_offset_x_pix : int or float Global offset in the X direction to be applied after computing the expected location. global_offset_y_pix : int or float Global offset in the Y direction to be applied after computing the expected location. Returns ------- list_pol_lower_boundaries : python list List of numpy.polynomial.Polynomial instances with the lower polynomial boundaries computed for the requested slitlets. list_pol_upper_boundaries : python list List of numpy.polynomial.Polynomial instances with the upper polynomial boundaries computed for the requested slitlets. """ # duplicate to shorten the variable names xoff = float(global_offset_x_pix) yoff = float(global_offset_y_pix) list_pol_lower_boundaries = [] list_pol_upper_boundaries = [] for islitlet, csu_bar_slit_center in \ zip(list_islitlet, list_csu_bar_slit_center): tmpcolor = micolors[islitlet % 2] pol_lower_expected = expected_distorted_boundaries( islitlet, csu_bar_slit_center, [0], params, parmodel, numpts=101, deg=5, debugplot=0 )[0].poly_funct list_pol_lower_boundaries.append(pol_lower_expected) pol_upper_expected = expected_distorted_boundaries( islitlet, csu_bar_slit_center, [1], params, parmodel, numpts=101, deg=5, debugplot=0 )[0].poly_funct list_pol_upper_boundaries.append(pol_upper_expected) xdum = np.linspace(1, EMIR_NAXIS1, num=EMIR_NAXIS1) ydum1 = pol_lower_expected(xdum) ax.plot(xdum + xoff, ydum1 + yoff, tmpcolor + linetype) ydum2 = pol_upper_expected(xdum) ax.plot(xdum + xoff, ydum2 + yoff, tmpcolor + linetype) if alpha_fill is not None: ax.fill_between(xdum + xoff, ydum1 + yoff, ydum2 + yoff, facecolor=tmpcolor, alpha=alpha_fill) if labels: # slitlet label yc_lower = pol_lower_expected(EMIR_NAXIS1 / 2 + 0.5) yc_upper = pol_upper_expected(EMIR_NAXIS1 / 2 + 0.5) xcsu = EMIR_NAXIS1 * csu_bar_slit_center / 341.5 ax.text(xcsu + xoff, (yc_lower + yc_upper) / 2 + yoff, str(islitlet), fontsize=10, va='center', ha='center', bbox=dict(boxstyle="round,pad=0.1", fc="white", ec="grey"), color=tmpcolor, fontweight='bold', backgroundcolor='white') # return lists with boundaries return list_pol_lower_boundaries, list_pol_upper_boundaries
python
def overplot_boundaries_from_params(ax, params, parmodel, list_islitlet, list_csu_bar_slit_center, micolors=('m', 'c'), linetype='--', labels=True, alpha_fill=None, global_offset_x_pix=0, global_offset_y_pix=0): """Overplot boundaries computed from fitted parameters. Parameters ---------- ax : matplotlib axes Current plot axes. params : :class:`~lmfit.parameter.Parameters` Parameters to be employed in the prediction of the distorted boundaries. parmodel : str Model to be assumed. Allowed values are 'longslit' and 'multislit'. list_islitlet : list of integers Slitlet numbers to be considered. longslits. list_csu_bar_slit_center : list of floats CSU bar slit centers of the considered slitlets. micolors : Python list List with two characters corresponding to alternating colors for odd and even slitlets. linetype : str Line type. labels : bool If True, display slilet label alpha_fill : float or None Alpha factor to be employed to fill slitlet region. global_integer_offset_x_pix : int or float Global offset in the X direction to be applied after computing the expected location. global_offset_y_pix : int or float Global offset in the Y direction to be applied after computing the expected location. Returns ------- list_pol_lower_boundaries : python list List of numpy.polynomial.Polynomial instances with the lower polynomial boundaries computed for the requested slitlets. list_pol_upper_boundaries : python list List of numpy.polynomial.Polynomial instances with the upper polynomial boundaries computed for the requested slitlets. """ # duplicate to shorten the variable names xoff = float(global_offset_x_pix) yoff = float(global_offset_y_pix) list_pol_lower_boundaries = [] list_pol_upper_boundaries = [] for islitlet, csu_bar_slit_center in \ zip(list_islitlet, list_csu_bar_slit_center): tmpcolor = micolors[islitlet % 2] pol_lower_expected = expected_distorted_boundaries( islitlet, csu_bar_slit_center, [0], params, parmodel, numpts=101, deg=5, debugplot=0 )[0].poly_funct list_pol_lower_boundaries.append(pol_lower_expected) pol_upper_expected = expected_distorted_boundaries( islitlet, csu_bar_slit_center, [1], params, parmodel, numpts=101, deg=5, debugplot=0 )[0].poly_funct list_pol_upper_boundaries.append(pol_upper_expected) xdum = np.linspace(1, EMIR_NAXIS1, num=EMIR_NAXIS1) ydum1 = pol_lower_expected(xdum) ax.plot(xdum + xoff, ydum1 + yoff, tmpcolor + linetype) ydum2 = pol_upper_expected(xdum) ax.plot(xdum + xoff, ydum2 + yoff, tmpcolor + linetype) if alpha_fill is not None: ax.fill_between(xdum + xoff, ydum1 + yoff, ydum2 + yoff, facecolor=tmpcolor, alpha=alpha_fill) if labels: # slitlet label yc_lower = pol_lower_expected(EMIR_NAXIS1 / 2 + 0.5) yc_upper = pol_upper_expected(EMIR_NAXIS1 / 2 + 0.5) xcsu = EMIR_NAXIS1 * csu_bar_slit_center / 341.5 ax.text(xcsu + xoff, (yc_lower + yc_upper) / 2 + yoff, str(islitlet), fontsize=10, va='center', ha='center', bbox=dict(boxstyle="round,pad=0.1", fc="white", ec="grey"), color=tmpcolor, fontweight='bold', backgroundcolor='white') # return lists with boundaries return list_pol_lower_boundaries, list_pol_upper_boundaries
[ "def", "overplot_boundaries_from_params", "(", "ax", ",", "params", ",", "parmodel", ",", "list_islitlet", ",", "list_csu_bar_slit_center", ",", "micolors", "=", "(", "'m'", ",", "'c'", ")", ",", "linetype", "=", "'--'", ",", "labels", "=", "True", ",", "alpha_fill", "=", "None", ",", "global_offset_x_pix", "=", "0", ",", "global_offset_y_pix", "=", "0", ")", ":", "# duplicate to shorten the variable names", "xoff", "=", "float", "(", "global_offset_x_pix", ")", "yoff", "=", "float", "(", "global_offset_y_pix", ")", "list_pol_lower_boundaries", "=", "[", "]", "list_pol_upper_boundaries", "=", "[", "]", "for", "islitlet", ",", "csu_bar_slit_center", "in", "zip", "(", "list_islitlet", ",", "list_csu_bar_slit_center", ")", ":", "tmpcolor", "=", "micolors", "[", "islitlet", "%", "2", "]", "pol_lower_expected", "=", "expected_distorted_boundaries", "(", "islitlet", ",", "csu_bar_slit_center", ",", "[", "0", "]", ",", "params", ",", "parmodel", ",", "numpts", "=", "101", ",", "deg", "=", "5", ",", "debugplot", "=", "0", ")", "[", "0", "]", ".", "poly_funct", "list_pol_lower_boundaries", ".", "append", "(", "pol_lower_expected", ")", "pol_upper_expected", "=", "expected_distorted_boundaries", "(", "islitlet", ",", "csu_bar_slit_center", ",", "[", "1", "]", ",", "params", ",", "parmodel", ",", "numpts", "=", "101", ",", "deg", "=", "5", ",", "debugplot", "=", "0", ")", "[", "0", "]", ".", "poly_funct", "list_pol_upper_boundaries", ".", "append", "(", "pol_upper_expected", ")", "xdum", "=", "np", ".", "linspace", "(", "1", ",", "EMIR_NAXIS1", ",", "num", "=", "EMIR_NAXIS1", ")", "ydum1", "=", "pol_lower_expected", "(", "xdum", ")", "ax", ".", "plot", "(", "xdum", "+", "xoff", ",", "ydum1", "+", "yoff", ",", "tmpcolor", "+", "linetype", ")", "ydum2", "=", "pol_upper_expected", "(", "xdum", ")", "ax", ".", "plot", "(", "xdum", "+", "xoff", ",", "ydum2", "+", "yoff", ",", "tmpcolor", "+", "linetype", ")", "if", "alpha_fill", "is", "not", "None", ":", "ax", ".", "fill_between", "(", "xdum", "+", "xoff", ",", "ydum1", "+", "yoff", ",", "ydum2", "+", "yoff", ",", "facecolor", "=", "tmpcolor", ",", "alpha", "=", "alpha_fill", ")", "if", "labels", ":", "# slitlet label", "yc_lower", "=", "pol_lower_expected", "(", "EMIR_NAXIS1", "/", "2", "+", "0.5", ")", "yc_upper", "=", "pol_upper_expected", "(", "EMIR_NAXIS1", "/", "2", "+", "0.5", ")", "xcsu", "=", "EMIR_NAXIS1", "*", "csu_bar_slit_center", "/", "341.5", "ax", ".", "text", "(", "xcsu", "+", "xoff", ",", "(", "yc_lower", "+", "yc_upper", ")", "/", "2", "+", "yoff", ",", "str", "(", "islitlet", ")", ",", "fontsize", "=", "10", ",", "va", "=", "'center'", ",", "ha", "=", "'center'", ",", "bbox", "=", "dict", "(", "boxstyle", "=", "\"round,pad=0.1\"", ",", "fc", "=", "\"white\"", ",", "ec", "=", "\"grey\"", ")", ",", "color", "=", "tmpcolor", ",", "fontweight", "=", "'bold'", ",", "backgroundcolor", "=", "'white'", ")", "# return lists with boundaries", "return", "list_pol_lower_boundaries", ",", "list_pol_upper_boundaries" ]
Overplot boundaries computed from fitted parameters. Parameters ---------- ax : matplotlib axes Current plot axes. params : :class:`~lmfit.parameter.Parameters` Parameters to be employed in the prediction of the distorted boundaries. parmodel : str Model to be assumed. Allowed values are 'longslit' and 'multislit'. list_islitlet : list of integers Slitlet numbers to be considered. longslits. list_csu_bar_slit_center : list of floats CSU bar slit centers of the considered slitlets. micolors : Python list List with two characters corresponding to alternating colors for odd and even slitlets. linetype : str Line type. labels : bool If True, display slilet label alpha_fill : float or None Alpha factor to be employed to fill slitlet region. global_integer_offset_x_pix : int or float Global offset in the X direction to be applied after computing the expected location. global_offset_y_pix : int or float Global offset in the Y direction to be applied after computing the expected location. Returns ------- list_pol_lower_boundaries : python list List of numpy.polynomial.Polynomial instances with the lower polynomial boundaries computed for the requested slitlets. list_pol_upper_boundaries : python list List of numpy.polynomial.Polynomial instances with the upper polynomial boundaries computed for the requested slitlets.
[ "Overplot", "boundaries", "computed", "from", "fitted", "parameters", "." ]
train
https://github.com/guaix-ucm/pyemir/blob/fef6bbabcb13f80123cafd1800a0f508a3c21702/emirdrp/tools/fit_boundaries.py#L719-L811
guaix-ucm/pyemir
emirdrp/tools/fit_boundaries.py
save_boundaries_from_bounddict_ds9
def save_boundaries_from_bounddict_ds9(bounddict, ds9_filename, numpix=100): """Export to ds9 region file the boundaries in bounddict. Parameters ---------- bounddict : JSON structure Structure employed to store bounddict information. ds9_filename : str Output file name for the ds9 region file. numpix : int Number of points in which the X-range interval is subdivided in order to save each boundary as a connected set of line segments. """ ds9_file = open(ds9_filename, 'w') ds9_file.write('# Region file format: DS9 version 4.1\n') ds9_file.write('global color=green dashlist=2 4 width=2 ' 'font="helvetica 10 normal roman" select=1 ' 'highlite=1 dash=1 fixed=0 edit=1 ' 'move=1 delete=1 include=1 source=1\n') ds9_file.write('physical\n') uuid = bounddict['uuid'] spfilter = bounddict['tags']['filter'] grism = bounddict['tags']['grism'] ds9_file.write('#\n# uuid (boundict file): {0}\n'.format(uuid)) ds9_file.write('# filter..............: {0}\n'.format(spfilter)) ds9_file.write('# grism...............: {0}\n'.format(grism)) colorbox = ['green', 'green'] for islitlet in range(1, EMIR_NBARS + 1): tmp_slitlet = 'slitlet' + str(islitlet).zfill(2) if tmp_slitlet in bounddict['contents'].keys(): ds9_file.write('#\n# islitlet: {0}\n'.format(tmp_slitlet)) read_dateobs = list(bounddict['contents'][tmp_slitlet].keys()) read_dateobs.sort() for tmp_dateobs in read_dateobs: ds9_file.write('#\n# date-obs: {0}\n'.format(tmp_dateobs)) tmp_dict = bounddict['contents'][tmp_slitlet][tmp_dateobs] # lower boundary pol_lower_measured = np.polynomial.Polynomial( tmp_dict['boundary_coef_lower'] ) xmin_lower = tmp_dict['boundary_xmin_lower'] xmax_lower = tmp_dict['boundary_xmax_lower'] xdum = np.linspace(xmin_lower, xmax_lower, num=numpix) ydum = pol_lower_measured(xdum) for i in range(len(xdum) - 1): ds9_file.write( 'line {0} {1} {2} {3}'.format(xdum[i], ydum[i], xdum[i + 1], ydum[i + 1]) ) ds9_file.write( ' # color={0}\n'.format(colorbox[islitlet % 2])) # upper boundary pol_upper_measured = np.polynomial.Polynomial( tmp_dict['boundary_coef_upper'] ) xmin_upper = tmp_dict['boundary_xmin_upper'] xmax_upper = tmp_dict['boundary_xmax_upper'] xdum = np.linspace(xmin_upper, xmax_upper, num=numpix) ydum = pol_upper_measured(xdum) for i in range(len(xdum) - 1): ds9_file.write( 'line {0} {1} {2} {3}'.format(xdum[i], ydum[i], xdum[i + 1], ydum[i + 1]) ) ds9_file.write( ' # color={0}\n'.format(colorbox[islitlet % 2])) # slitlet label xlabel = xmax_lower + xmax_upper + xmin_lower + xmin_upper xlabel /= 4 yc_lower = pol_lower_measured(xlabel) yc_upper = pol_upper_measured(xlabel) ds9_file.write('text {0} {1} {{{2}}} # color={3} ' 'font="helvetica 10 bold ' 'roman"\n'.format(xlabel, (yc_lower + yc_upper) / 2, islitlet, colorbox[islitlet % 2])) ds9_file.close()
python
def save_boundaries_from_bounddict_ds9(bounddict, ds9_filename, numpix=100): """Export to ds9 region file the boundaries in bounddict. Parameters ---------- bounddict : JSON structure Structure employed to store bounddict information. ds9_filename : str Output file name for the ds9 region file. numpix : int Number of points in which the X-range interval is subdivided in order to save each boundary as a connected set of line segments. """ ds9_file = open(ds9_filename, 'w') ds9_file.write('# Region file format: DS9 version 4.1\n') ds9_file.write('global color=green dashlist=2 4 width=2 ' 'font="helvetica 10 normal roman" select=1 ' 'highlite=1 dash=1 fixed=0 edit=1 ' 'move=1 delete=1 include=1 source=1\n') ds9_file.write('physical\n') uuid = bounddict['uuid'] spfilter = bounddict['tags']['filter'] grism = bounddict['tags']['grism'] ds9_file.write('#\n# uuid (boundict file): {0}\n'.format(uuid)) ds9_file.write('# filter..............: {0}\n'.format(spfilter)) ds9_file.write('# grism...............: {0}\n'.format(grism)) colorbox = ['green', 'green'] for islitlet in range(1, EMIR_NBARS + 1): tmp_slitlet = 'slitlet' + str(islitlet).zfill(2) if tmp_slitlet in bounddict['contents'].keys(): ds9_file.write('#\n# islitlet: {0}\n'.format(tmp_slitlet)) read_dateobs = list(bounddict['contents'][tmp_slitlet].keys()) read_dateobs.sort() for tmp_dateobs in read_dateobs: ds9_file.write('#\n# date-obs: {0}\n'.format(tmp_dateobs)) tmp_dict = bounddict['contents'][tmp_slitlet][tmp_dateobs] # lower boundary pol_lower_measured = np.polynomial.Polynomial( tmp_dict['boundary_coef_lower'] ) xmin_lower = tmp_dict['boundary_xmin_lower'] xmax_lower = tmp_dict['boundary_xmax_lower'] xdum = np.linspace(xmin_lower, xmax_lower, num=numpix) ydum = pol_lower_measured(xdum) for i in range(len(xdum) - 1): ds9_file.write( 'line {0} {1} {2} {3}'.format(xdum[i], ydum[i], xdum[i + 1], ydum[i + 1]) ) ds9_file.write( ' # color={0}\n'.format(colorbox[islitlet % 2])) # upper boundary pol_upper_measured = np.polynomial.Polynomial( tmp_dict['boundary_coef_upper'] ) xmin_upper = tmp_dict['boundary_xmin_upper'] xmax_upper = tmp_dict['boundary_xmax_upper'] xdum = np.linspace(xmin_upper, xmax_upper, num=numpix) ydum = pol_upper_measured(xdum) for i in range(len(xdum) - 1): ds9_file.write( 'line {0} {1} {2} {3}'.format(xdum[i], ydum[i], xdum[i + 1], ydum[i + 1]) ) ds9_file.write( ' # color={0}\n'.format(colorbox[islitlet % 2])) # slitlet label xlabel = xmax_lower + xmax_upper + xmin_lower + xmin_upper xlabel /= 4 yc_lower = pol_lower_measured(xlabel) yc_upper = pol_upper_measured(xlabel) ds9_file.write('text {0} {1} {{{2}}} # color={3} ' 'font="helvetica 10 bold ' 'roman"\n'.format(xlabel, (yc_lower + yc_upper) / 2, islitlet, colorbox[islitlet % 2])) ds9_file.close()
[ "def", "save_boundaries_from_bounddict_ds9", "(", "bounddict", ",", "ds9_filename", ",", "numpix", "=", "100", ")", ":", "ds9_file", "=", "open", "(", "ds9_filename", ",", "'w'", ")", "ds9_file", ".", "write", "(", "'# Region file format: DS9 version 4.1\\n'", ")", "ds9_file", ".", "write", "(", "'global color=green dashlist=2 4 width=2 '", "'font=\"helvetica 10 normal roman\" select=1 '", "'highlite=1 dash=1 fixed=0 edit=1 '", "'move=1 delete=1 include=1 source=1\\n'", ")", "ds9_file", ".", "write", "(", "'physical\\n'", ")", "uuid", "=", "bounddict", "[", "'uuid'", "]", "spfilter", "=", "bounddict", "[", "'tags'", "]", "[", "'filter'", "]", "grism", "=", "bounddict", "[", "'tags'", "]", "[", "'grism'", "]", "ds9_file", ".", "write", "(", "'#\\n# uuid (boundict file): {0}\\n'", ".", "format", "(", "uuid", ")", ")", "ds9_file", ".", "write", "(", "'# filter..............: {0}\\n'", ".", "format", "(", "spfilter", ")", ")", "ds9_file", ".", "write", "(", "'# grism...............: {0}\\n'", ".", "format", "(", "grism", ")", ")", "colorbox", "=", "[", "'green'", ",", "'green'", "]", "for", "islitlet", "in", "range", "(", "1", ",", "EMIR_NBARS", "+", "1", ")", ":", "tmp_slitlet", "=", "'slitlet'", "+", "str", "(", "islitlet", ")", ".", "zfill", "(", "2", ")", "if", "tmp_slitlet", "in", "bounddict", "[", "'contents'", "]", ".", "keys", "(", ")", ":", "ds9_file", ".", "write", "(", "'#\\n# islitlet: {0}\\n'", ".", "format", "(", "tmp_slitlet", ")", ")", "read_dateobs", "=", "list", "(", "bounddict", "[", "'contents'", "]", "[", "tmp_slitlet", "]", ".", "keys", "(", ")", ")", "read_dateobs", ".", "sort", "(", ")", "for", "tmp_dateobs", "in", "read_dateobs", ":", "ds9_file", ".", "write", "(", "'#\\n# date-obs: {0}\\n'", ".", "format", "(", "tmp_dateobs", ")", ")", "tmp_dict", "=", "bounddict", "[", "'contents'", "]", "[", "tmp_slitlet", "]", "[", "tmp_dateobs", "]", "# lower boundary", "pol_lower_measured", "=", "np", ".", "polynomial", ".", "Polynomial", "(", "tmp_dict", "[", "'boundary_coef_lower'", "]", ")", "xmin_lower", "=", "tmp_dict", "[", "'boundary_xmin_lower'", "]", "xmax_lower", "=", "tmp_dict", "[", "'boundary_xmax_lower'", "]", "xdum", "=", "np", ".", "linspace", "(", "xmin_lower", ",", "xmax_lower", ",", "num", "=", "numpix", ")", "ydum", "=", "pol_lower_measured", "(", "xdum", ")", "for", "i", "in", "range", "(", "len", "(", "xdum", ")", "-", "1", ")", ":", "ds9_file", ".", "write", "(", "'line {0} {1} {2} {3}'", ".", "format", "(", "xdum", "[", "i", "]", ",", "ydum", "[", "i", "]", ",", "xdum", "[", "i", "+", "1", "]", ",", "ydum", "[", "i", "+", "1", "]", ")", ")", "ds9_file", ".", "write", "(", "' # color={0}\\n'", ".", "format", "(", "colorbox", "[", "islitlet", "%", "2", "]", ")", ")", "# upper boundary", "pol_upper_measured", "=", "np", ".", "polynomial", ".", "Polynomial", "(", "tmp_dict", "[", "'boundary_coef_upper'", "]", ")", "xmin_upper", "=", "tmp_dict", "[", "'boundary_xmin_upper'", "]", "xmax_upper", "=", "tmp_dict", "[", "'boundary_xmax_upper'", "]", "xdum", "=", "np", ".", "linspace", "(", "xmin_upper", ",", "xmax_upper", ",", "num", "=", "numpix", ")", "ydum", "=", "pol_upper_measured", "(", "xdum", ")", "for", "i", "in", "range", "(", "len", "(", "xdum", ")", "-", "1", ")", ":", "ds9_file", ".", "write", "(", "'line {0} {1} {2} {3}'", ".", "format", "(", "xdum", "[", "i", "]", ",", "ydum", "[", "i", "]", ",", "xdum", "[", "i", "+", "1", "]", ",", "ydum", "[", "i", "+", "1", "]", ")", ")", "ds9_file", ".", "write", "(", "' # color={0}\\n'", ".", "format", "(", "colorbox", "[", "islitlet", "%", "2", "]", ")", ")", "# slitlet label", "xlabel", "=", "xmax_lower", "+", "xmax_upper", "+", "xmin_lower", "+", "xmin_upper", "xlabel", "/=", "4", "yc_lower", "=", "pol_lower_measured", "(", "xlabel", ")", "yc_upper", "=", "pol_upper_measured", "(", "xlabel", ")", "ds9_file", ".", "write", "(", "'text {0} {1} {{{2}}} # color={3} '", "'font=\"helvetica 10 bold '", "'roman\"\\n'", ".", "format", "(", "xlabel", ",", "(", "yc_lower", "+", "yc_upper", ")", "/", "2", ",", "islitlet", ",", "colorbox", "[", "islitlet", "%", "2", "]", ")", ")", "ds9_file", ".", "close", "(", ")" ]
Export to ds9 region file the boundaries in bounddict. Parameters ---------- bounddict : JSON structure Structure employed to store bounddict information. ds9_filename : str Output file name for the ds9 region file. numpix : int Number of points in which the X-range interval is subdivided in order to save each boundary as a connected set of line segments.
[ "Export", "to", "ds9", "region", "file", "the", "boundaries", "in", "bounddict", "." ]
train
https://github.com/guaix-ucm/pyemir/blob/fef6bbabcb13f80123cafd1800a0f508a3c21702/emirdrp/tools/fit_boundaries.py#L907-L992
guaix-ucm/pyemir
emirdrp/tools/fit_boundaries.py
save_boundaries_from_params_ds9
def save_boundaries_from_params_ds9(params, parmodel, list_islitlet, list_csu_bar_slit_center, uuid, grism, spfilter, ds9_filename, numpix=100, global_offset_x_pix=0, global_offset_y_pix=0): """Export to ds9 region file the boundaries parametrised with params. Parameters ---------- params : :class:`~lmfit.parameter.Parameters` Parameters to be employed in the prediction of the distorted boundaries. parmodel : str Model to be assumed. Allowed values are 'longslit' and 'multislit'. list_islitlet : list (integers) Slitlet numbers to be considered. list_csu_bar_slit_center : list of floats CSU bar slit centers of the considered slitlets. uuid: str UUID associated to the parameters 'params'. grism : str Employed grism. spfilter : str Employed filter. ds9_filename : str Output file name for the ds9 region file. numpix : int Number of points in which the X-range interval is subdivided in order to save each boundary as a connected set of line segments. global_offset_x_pix : int or float Global offset in the X direction to be applied after computing the expected location. global_offset_y_pix : int or float Global offset in the Y direction to be applied after computing the expected location. """ ds9_file = open(ds9_filename, 'w') ds9_file.write('# Region file format: DS9 version 4.1\n') ds9_file.write('global color=green dashlist=2 4 width=2 ' 'font="helvetica 10 normal roman" select=1 ' 'highlite=1 dash=1 fixed=0 edit=1 ' 'move=1 delete=1 include=1 source=1\n') ds9_file.write('physical\n#\n') ds9_file.write('#\n# uuid..: {0}\n'.format(uuid)) ds9_file.write('# filter: {0}\n'.format(spfilter)) ds9_file.write('# grism.: {0}\n'.format(grism)) ds9_file.write('#\n# global_offset_x_pix: {0}\n'.format( global_offset_x_pix)) ds9_file.write('# global_offset_y_pix: {0}\n#\n'.format( global_offset_y_pix)) # duplicate to shorten the variable names xoff = float(global_offset_x_pix) yoff = float(global_offset_y_pix) if parmodel == "longslit": for dumpar in EXPECTED_PARAMETER_LIST: parvalue = params[dumpar].value ds9_file.write('# {0}: {1}\n'.format(dumpar, parvalue)) else: for dumpar in EXPECTED_PARAMETER_LIST_EXTENDED: parvalue = params[dumpar].value ds9_file.write('# {0}: {1}\n'.format(dumpar, parvalue)) for islitlet, csu_bar_slit_center in \ zip(list_islitlet, list_csu_bar_slit_center): if islitlet % 2 == 0: colorbox = '#ff00ff' # '#ff77ff' else: colorbox = '#00ffff' # '#4444ff' ds9_file.write( '#\n# islitlet...........: {0}\n'.format(islitlet) ) ds9_file.write( '# csu_bar_slit_center: {0}\n'.format(csu_bar_slit_center) ) pol_lower_expected = expected_distorted_boundaries( islitlet, csu_bar_slit_center, [0], params, parmodel, numpts=101, deg=5, debugplot=0 )[0].poly_funct pol_upper_expected = expected_distorted_boundaries( islitlet, csu_bar_slit_center, [1], params, parmodel, numpts=101, deg=5, debugplot=0 )[0].poly_funct xdum = np.linspace(1, EMIR_NAXIS1, num=numpix) ydum = pol_lower_expected(xdum) for i in range(len(xdum)-1): ds9_file.write( 'line {0} {1} {2} {3}'.format(xdum[i]+xoff, ydum[i]+yoff, xdum[i+1]+xoff, ydum[i+1]+yoff) ) ds9_file.write(' # color={0}\n'.format(colorbox)) ydum = pol_upper_expected(xdum) for i in range(len(xdum)-1): ds9_file.write( 'line {0} {1} {2} {3}'.format(xdum[i]+xoff, ydum[i]+yoff, xdum[i+1]+xoff, ydum[i+1]+yoff) ) ds9_file.write(' # color={0}\n'.format(colorbox)) # slitlet label yc_lower = pol_lower_expected(EMIR_NAXIS1 / 2 + 0.5) yc_upper = pol_upper_expected(EMIR_NAXIS1 / 2 + 0.5) ds9_file.write('text {0} {1} {{{2}}} # color={3} ' 'font="helvetica 10 bold ' 'roman"\n'.format(EMIR_NAXIS1 / 2 + 0.5 + xoff, (yc_lower + yc_upper) / 2 + yoff, islitlet, colorbox)) ds9_file.close()
python
def save_boundaries_from_params_ds9(params, parmodel, list_islitlet, list_csu_bar_slit_center, uuid, grism, spfilter, ds9_filename, numpix=100, global_offset_x_pix=0, global_offset_y_pix=0): """Export to ds9 region file the boundaries parametrised with params. Parameters ---------- params : :class:`~lmfit.parameter.Parameters` Parameters to be employed in the prediction of the distorted boundaries. parmodel : str Model to be assumed. Allowed values are 'longslit' and 'multislit'. list_islitlet : list (integers) Slitlet numbers to be considered. list_csu_bar_slit_center : list of floats CSU bar slit centers of the considered slitlets. uuid: str UUID associated to the parameters 'params'. grism : str Employed grism. spfilter : str Employed filter. ds9_filename : str Output file name for the ds9 region file. numpix : int Number of points in which the X-range interval is subdivided in order to save each boundary as a connected set of line segments. global_offset_x_pix : int or float Global offset in the X direction to be applied after computing the expected location. global_offset_y_pix : int or float Global offset in the Y direction to be applied after computing the expected location. """ ds9_file = open(ds9_filename, 'w') ds9_file.write('# Region file format: DS9 version 4.1\n') ds9_file.write('global color=green dashlist=2 4 width=2 ' 'font="helvetica 10 normal roman" select=1 ' 'highlite=1 dash=1 fixed=0 edit=1 ' 'move=1 delete=1 include=1 source=1\n') ds9_file.write('physical\n#\n') ds9_file.write('#\n# uuid..: {0}\n'.format(uuid)) ds9_file.write('# filter: {0}\n'.format(spfilter)) ds9_file.write('# grism.: {0}\n'.format(grism)) ds9_file.write('#\n# global_offset_x_pix: {0}\n'.format( global_offset_x_pix)) ds9_file.write('# global_offset_y_pix: {0}\n#\n'.format( global_offset_y_pix)) # duplicate to shorten the variable names xoff = float(global_offset_x_pix) yoff = float(global_offset_y_pix) if parmodel == "longslit": for dumpar in EXPECTED_PARAMETER_LIST: parvalue = params[dumpar].value ds9_file.write('# {0}: {1}\n'.format(dumpar, parvalue)) else: for dumpar in EXPECTED_PARAMETER_LIST_EXTENDED: parvalue = params[dumpar].value ds9_file.write('# {0}: {1}\n'.format(dumpar, parvalue)) for islitlet, csu_bar_slit_center in \ zip(list_islitlet, list_csu_bar_slit_center): if islitlet % 2 == 0: colorbox = '#ff00ff' # '#ff77ff' else: colorbox = '#00ffff' # '#4444ff' ds9_file.write( '#\n# islitlet...........: {0}\n'.format(islitlet) ) ds9_file.write( '# csu_bar_slit_center: {0}\n'.format(csu_bar_slit_center) ) pol_lower_expected = expected_distorted_boundaries( islitlet, csu_bar_slit_center, [0], params, parmodel, numpts=101, deg=5, debugplot=0 )[0].poly_funct pol_upper_expected = expected_distorted_boundaries( islitlet, csu_bar_slit_center, [1], params, parmodel, numpts=101, deg=5, debugplot=0 )[0].poly_funct xdum = np.linspace(1, EMIR_NAXIS1, num=numpix) ydum = pol_lower_expected(xdum) for i in range(len(xdum)-1): ds9_file.write( 'line {0} {1} {2} {3}'.format(xdum[i]+xoff, ydum[i]+yoff, xdum[i+1]+xoff, ydum[i+1]+yoff) ) ds9_file.write(' # color={0}\n'.format(colorbox)) ydum = pol_upper_expected(xdum) for i in range(len(xdum)-1): ds9_file.write( 'line {0} {1} {2} {3}'.format(xdum[i]+xoff, ydum[i]+yoff, xdum[i+1]+xoff, ydum[i+1]+yoff) ) ds9_file.write(' # color={0}\n'.format(colorbox)) # slitlet label yc_lower = pol_lower_expected(EMIR_NAXIS1 / 2 + 0.5) yc_upper = pol_upper_expected(EMIR_NAXIS1 / 2 + 0.5) ds9_file.write('text {0} {1} {{{2}}} # color={3} ' 'font="helvetica 10 bold ' 'roman"\n'.format(EMIR_NAXIS1 / 2 + 0.5 + xoff, (yc_lower + yc_upper) / 2 + yoff, islitlet, colorbox)) ds9_file.close()
[ "def", "save_boundaries_from_params_ds9", "(", "params", ",", "parmodel", ",", "list_islitlet", ",", "list_csu_bar_slit_center", ",", "uuid", ",", "grism", ",", "spfilter", ",", "ds9_filename", ",", "numpix", "=", "100", ",", "global_offset_x_pix", "=", "0", ",", "global_offset_y_pix", "=", "0", ")", ":", "ds9_file", "=", "open", "(", "ds9_filename", ",", "'w'", ")", "ds9_file", ".", "write", "(", "'# Region file format: DS9 version 4.1\\n'", ")", "ds9_file", ".", "write", "(", "'global color=green dashlist=2 4 width=2 '", "'font=\"helvetica 10 normal roman\" select=1 '", "'highlite=1 dash=1 fixed=0 edit=1 '", "'move=1 delete=1 include=1 source=1\\n'", ")", "ds9_file", ".", "write", "(", "'physical\\n#\\n'", ")", "ds9_file", ".", "write", "(", "'#\\n# uuid..: {0}\\n'", ".", "format", "(", "uuid", ")", ")", "ds9_file", ".", "write", "(", "'# filter: {0}\\n'", ".", "format", "(", "spfilter", ")", ")", "ds9_file", ".", "write", "(", "'# grism.: {0}\\n'", ".", "format", "(", "grism", ")", ")", "ds9_file", ".", "write", "(", "'#\\n# global_offset_x_pix: {0}\\n'", ".", "format", "(", "global_offset_x_pix", ")", ")", "ds9_file", ".", "write", "(", "'# global_offset_y_pix: {0}\\n#\\n'", ".", "format", "(", "global_offset_y_pix", ")", ")", "# duplicate to shorten the variable names", "xoff", "=", "float", "(", "global_offset_x_pix", ")", "yoff", "=", "float", "(", "global_offset_y_pix", ")", "if", "parmodel", "==", "\"longslit\"", ":", "for", "dumpar", "in", "EXPECTED_PARAMETER_LIST", ":", "parvalue", "=", "params", "[", "dumpar", "]", ".", "value", "ds9_file", ".", "write", "(", "'# {0}: {1}\\n'", ".", "format", "(", "dumpar", ",", "parvalue", ")", ")", "else", ":", "for", "dumpar", "in", "EXPECTED_PARAMETER_LIST_EXTENDED", ":", "parvalue", "=", "params", "[", "dumpar", "]", ".", "value", "ds9_file", ".", "write", "(", "'# {0}: {1}\\n'", ".", "format", "(", "dumpar", ",", "parvalue", ")", ")", "for", "islitlet", ",", "csu_bar_slit_center", "in", "zip", "(", "list_islitlet", ",", "list_csu_bar_slit_center", ")", ":", "if", "islitlet", "%", "2", "==", "0", ":", "colorbox", "=", "'#ff00ff'", "# '#ff77ff'", "else", ":", "colorbox", "=", "'#00ffff'", "# '#4444ff'", "ds9_file", ".", "write", "(", "'#\\n# islitlet...........: {0}\\n'", ".", "format", "(", "islitlet", ")", ")", "ds9_file", ".", "write", "(", "'# csu_bar_slit_center: {0}\\n'", ".", "format", "(", "csu_bar_slit_center", ")", ")", "pol_lower_expected", "=", "expected_distorted_boundaries", "(", "islitlet", ",", "csu_bar_slit_center", ",", "[", "0", "]", ",", "params", ",", "parmodel", ",", "numpts", "=", "101", ",", "deg", "=", "5", ",", "debugplot", "=", "0", ")", "[", "0", "]", ".", "poly_funct", "pol_upper_expected", "=", "expected_distorted_boundaries", "(", "islitlet", ",", "csu_bar_slit_center", ",", "[", "1", "]", ",", "params", ",", "parmodel", ",", "numpts", "=", "101", ",", "deg", "=", "5", ",", "debugplot", "=", "0", ")", "[", "0", "]", ".", "poly_funct", "xdum", "=", "np", ".", "linspace", "(", "1", ",", "EMIR_NAXIS1", ",", "num", "=", "numpix", ")", "ydum", "=", "pol_lower_expected", "(", "xdum", ")", "for", "i", "in", "range", "(", "len", "(", "xdum", ")", "-", "1", ")", ":", "ds9_file", ".", "write", "(", "'line {0} {1} {2} {3}'", ".", "format", "(", "xdum", "[", "i", "]", "+", "xoff", ",", "ydum", "[", "i", "]", "+", "yoff", ",", "xdum", "[", "i", "+", "1", "]", "+", "xoff", ",", "ydum", "[", "i", "+", "1", "]", "+", "yoff", ")", ")", "ds9_file", ".", "write", "(", "' # color={0}\\n'", ".", "format", "(", "colorbox", ")", ")", "ydum", "=", "pol_upper_expected", "(", "xdum", ")", "for", "i", "in", "range", "(", "len", "(", "xdum", ")", "-", "1", ")", ":", "ds9_file", ".", "write", "(", "'line {0} {1} {2} {3}'", ".", "format", "(", "xdum", "[", "i", "]", "+", "xoff", ",", "ydum", "[", "i", "]", "+", "yoff", ",", "xdum", "[", "i", "+", "1", "]", "+", "xoff", ",", "ydum", "[", "i", "+", "1", "]", "+", "yoff", ")", ")", "ds9_file", ".", "write", "(", "' # color={0}\\n'", ".", "format", "(", "colorbox", ")", ")", "# slitlet label", "yc_lower", "=", "pol_lower_expected", "(", "EMIR_NAXIS1", "/", "2", "+", "0.5", ")", "yc_upper", "=", "pol_upper_expected", "(", "EMIR_NAXIS1", "/", "2", "+", "0.5", ")", "ds9_file", ".", "write", "(", "'text {0} {1} {{{2}}} # color={3} '", "'font=\"helvetica 10 bold '", "'roman\"\\n'", ".", "format", "(", "EMIR_NAXIS1", "/", "2", "+", "0.5", "+", "xoff", ",", "(", "yc_lower", "+", "yc_upper", ")", "/", "2", "+", "yoff", ",", "islitlet", ",", "colorbox", ")", ")", "ds9_file", ".", "close", "(", ")" ]
Export to ds9 region file the boundaries parametrised with params. Parameters ---------- params : :class:`~lmfit.parameter.Parameters` Parameters to be employed in the prediction of the distorted boundaries. parmodel : str Model to be assumed. Allowed values are 'longslit' and 'multislit'. list_islitlet : list (integers) Slitlet numbers to be considered. list_csu_bar_slit_center : list of floats CSU bar slit centers of the considered slitlets. uuid: str UUID associated to the parameters 'params'. grism : str Employed grism. spfilter : str Employed filter. ds9_filename : str Output file name for the ds9 region file. numpix : int Number of points in which the X-range interval is subdivided in order to save each boundary as a connected set of line segments. global_offset_x_pix : int or float Global offset in the X direction to be applied after computing the expected location. global_offset_y_pix : int or float Global offset in the Y direction to be applied after computing the expected location.
[ "Export", "to", "ds9", "region", "file", "the", "boundaries", "parametrised", "with", "params", "." ]
train
https://github.com/guaix-ucm/pyemir/blob/fef6bbabcb13f80123cafd1800a0f508a3c21702/emirdrp/tools/fit_boundaries.py#L995-L1114
guaix-ucm/pyemir
emirdrp/tools/fit_boundaries.py
bound_params_from_dict
def bound_params_from_dict(bound_param_dict): """Define `~lmfit.parameter.Parameters` object from dictionary. Parameters ---------- bound_param_dict : dictionary Dictionary containing the JSON contents of a boundary parameter file. Returns ------- params : :class:`~lmfit.parameter.Parameters` Parameters object. """ params = Parameters() for mainpar in EXPECTED_PARAMETER_LIST: if mainpar not in bound_param_dict['contents'].keys(): raise ValueError('Parameter ' + mainpar + ' not found!') if bound_param_dict['meta_info']['parmodel'] == "longslit": dumdict = bound_param_dict['contents'][mainpar] params.add(mainpar, value=dumdict["value"], vary=dumdict["vary"]) elif bound_param_dict['meta_info']['parmodel'] == 'multislit': for subpar in ['a0s', 'a1s', 'a2s']: if subpar not in bound_param_dict['contents'][mainpar].keys(): raise ValueError('Subparameter ' + subpar + ' not found' + ' under parameter ' + mainpar) cpar = mainpar + '_' + subpar dumdict = bound_param_dict['contents'][mainpar][subpar] params.add(cpar, value=dumdict["value"], vary=dumdict["vary"]) else: print('parmodel: ', bound_param_dict['meta_info']['parmodel']) raise ValueError('Unexpected parmodel') return params
python
def bound_params_from_dict(bound_param_dict): """Define `~lmfit.parameter.Parameters` object from dictionary. Parameters ---------- bound_param_dict : dictionary Dictionary containing the JSON contents of a boundary parameter file. Returns ------- params : :class:`~lmfit.parameter.Parameters` Parameters object. """ params = Parameters() for mainpar in EXPECTED_PARAMETER_LIST: if mainpar not in bound_param_dict['contents'].keys(): raise ValueError('Parameter ' + mainpar + ' not found!') if bound_param_dict['meta_info']['parmodel'] == "longslit": dumdict = bound_param_dict['contents'][mainpar] params.add(mainpar, value=dumdict["value"], vary=dumdict["vary"]) elif bound_param_dict['meta_info']['parmodel'] == 'multislit': for subpar in ['a0s', 'a1s', 'a2s']: if subpar not in bound_param_dict['contents'][mainpar].keys(): raise ValueError('Subparameter ' + subpar + ' not found' + ' under parameter ' + mainpar) cpar = mainpar + '_' + subpar dumdict = bound_param_dict['contents'][mainpar][subpar] params.add(cpar, value=dumdict["value"], vary=dumdict["vary"]) else: print('parmodel: ', bound_param_dict['meta_info']['parmodel']) raise ValueError('Unexpected parmodel') return params
[ "def", "bound_params_from_dict", "(", "bound_param_dict", ")", ":", "params", "=", "Parameters", "(", ")", "for", "mainpar", "in", "EXPECTED_PARAMETER_LIST", ":", "if", "mainpar", "not", "in", "bound_param_dict", "[", "'contents'", "]", ".", "keys", "(", ")", ":", "raise", "ValueError", "(", "'Parameter '", "+", "mainpar", "+", "' not found!'", ")", "if", "bound_param_dict", "[", "'meta_info'", "]", "[", "'parmodel'", "]", "==", "\"longslit\"", ":", "dumdict", "=", "bound_param_dict", "[", "'contents'", "]", "[", "mainpar", "]", "params", ".", "add", "(", "mainpar", ",", "value", "=", "dumdict", "[", "\"value\"", "]", ",", "vary", "=", "dumdict", "[", "\"vary\"", "]", ")", "elif", "bound_param_dict", "[", "'meta_info'", "]", "[", "'parmodel'", "]", "==", "'multislit'", ":", "for", "subpar", "in", "[", "'a0s'", ",", "'a1s'", ",", "'a2s'", "]", ":", "if", "subpar", "not", "in", "bound_param_dict", "[", "'contents'", "]", "[", "mainpar", "]", ".", "keys", "(", ")", ":", "raise", "ValueError", "(", "'Subparameter '", "+", "subpar", "+", "' not found'", "+", "' under parameter '", "+", "mainpar", ")", "cpar", "=", "mainpar", "+", "'_'", "+", "subpar", "dumdict", "=", "bound_param_dict", "[", "'contents'", "]", "[", "mainpar", "]", "[", "subpar", "]", "params", ".", "add", "(", "cpar", ",", "value", "=", "dumdict", "[", "\"value\"", "]", ",", "vary", "=", "dumdict", "[", "\"vary\"", "]", ")", "else", ":", "print", "(", "'parmodel: '", ",", "bound_param_dict", "[", "'meta_info'", "]", "[", "'parmodel'", "]", ")", "raise", "ValueError", "(", "'Unexpected parmodel'", ")", "return", "params" ]
Define `~lmfit.parameter.Parameters` object from dictionary. Parameters ---------- bound_param_dict : dictionary Dictionary containing the JSON contents of a boundary parameter file. Returns ------- params : :class:`~lmfit.parameter.Parameters` Parameters object.
[ "Define", "~lmfit", ".", "parameter", ".", "Parameters", "object", "from", "dictionary", "." ]
train
https://github.com/guaix-ucm/pyemir/blob/fef6bbabcb13f80123cafd1800a0f508a3c21702/emirdrp/tools/fit_boundaries.py#L1237-L1273
guaix-ucm/pyemir
emirdrp/recipes/image/stare.py
StareImageRecipe2.set_base_headers
def set_base_headers(self, hdr): """Set metadata in FITS headers.""" hdr = super(StareImageRecipe2, self).set_base_headers(hdr) # Set EXP to 0 hdr['EXP'] = 0 return hdr
python
def set_base_headers(self, hdr): """Set metadata in FITS headers.""" hdr = super(StareImageRecipe2, self).set_base_headers(hdr) # Set EXP to 0 hdr['EXP'] = 0 return hdr
[ "def", "set_base_headers", "(", "self", ",", "hdr", ")", ":", "hdr", "=", "super", "(", "StareImageRecipe2", ",", "self", ")", ".", "set_base_headers", "(", "hdr", ")", "# Set EXP to 0", "hdr", "[", "'EXP'", "]", "=", "0", "return", "hdr" ]
Set metadata in FITS headers.
[ "Set", "metadata", "in", "FITS", "headers", "." ]
train
https://github.com/guaix-ucm/pyemir/blob/fef6bbabcb13f80123cafd1800a0f508a3c21702/emirdrp/recipes/image/stare.py#L82-L87
guaix-ucm/pyemir
emirdrp/recipes/image/stare.py
StareImageBaseRecipe.set_base_headers
def set_base_headers(self, hdr): """Set metadata in FITS headers.""" hdr = super(StareImageBaseRecipe, self).set_base_headers(hdr) # Update EXP to 0 hdr['EXP'] = 0 return hdr
python
def set_base_headers(self, hdr): """Set metadata in FITS headers.""" hdr = super(StareImageBaseRecipe, self).set_base_headers(hdr) # Update EXP to 0 hdr['EXP'] = 0 return hdr
[ "def", "set_base_headers", "(", "self", ",", "hdr", ")", ":", "hdr", "=", "super", "(", "StareImageBaseRecipe", ",", "self", ")", ".", "set_base_headers", "(", "hdr", ")", "# Update EXP to 0", "hdr", "[", "'EXP'", "]", "=", "0", "return", "hdr" ]
Set metadata in FITS headers.
[ "Set", "metadata", "in", "FITS", "headers", "." ]
train
https://github.com/guaix-ucm/pyemir/blob/fef6bbabcb13f80123cafd1800a0f508a3c21702/emirdrp/recipes/image/stare.py#L135-L140
guaix-ucm/pyemir
emirdrp/processing/wavecal/apply_rectwv_coeff.py
apply_rectwv_coeff
def apply_rectwv_coeff(reduced_image, rectwv_coeff, args_resampling=2, args_ignore_dtu_configuration=True, debugplot=0): """Compute rectification and wavelength calibration coefficients. Parameters ---------- reduced_image : HDUList object Image with preliminary basic reduction: bpm, bias, dark and flatfield. rectwv_coeff : RectWaveCoeff instance Rectification and wavelength calibration coefficients for the particular CSU configuration. args_resampling : int 1: nearest neighbour, 2: flux preserving interpolation. args_ignore_dtu_configuration : bool If True, ignore differences in DTU configuration. debugplot : int Debugging level for messages and plots. For details see 'numina.array.display.pause_debugplot.py'. Returns ------- rectwv_image : HDUList object Rectified and wavelength calibrated image. """ logger = logging.getLogger(__name__) # header and data array (use deepcopy to avoid modifying # reduced_image[0].header as a side effect) header = copy.deepcopy(reduced_image[0].header) image2d = reduced_image[0].data # apply global offsets image2d = apply_integer_offsets( image2d=image2d, offx=rectwv_coeff.global_integer_offset_x_pix, offy=rectwv_coeff.global_integer_offset_y_pix ) # check grism and filter filter_name = header['filter'] logger.info('Filter: ' + filter_name) if filter_name != rectwv_coeff.tags['filter']: raise ValueError('Filter name does not match!') grism_name = header['grism'] logger.info('Grism: ' + grism_name) if grism_name != rectwv_coeff.tags['grism']: raise ValueError('Grism name does not match!') # read the DTU configuration from the image header dtu_conf = DtuConfiguration.define_from_header(header) # retrieve DTU configuration from RectWaveCoeff object dtu_conf_calib = DtuConfiguration.define_from_dictionary( rectwv_coeff.meta_info['dtu_configuration'] ) # check that the DTU configuration employed to obtain the calibration # corresponds to the DTU configuration in the input FITS file if dtu_conf != dtu_conf_calib: if args_ignore_dtu_configuration: logger.warning('DTU configuration differences found!') else: logger.warning('DTU configuration from image header:') logger.warning(dtu_conf) logger.warning('DTU configuration from master calibration:') logger.warning(dtu_conf_calib) raise ValueError("DTU configurations do not match!") else: logger.info('DTU configuration match!') # valid slitlet numbers list_valid_islitlets = list(range(1, EMIR_NBARS + 1)) for idel in rectwv_coeff.missing_slitlets: list_valid_islitlets.remove(idel) logger.debug('Valid slitlet numbers:\n' + str(list_valid_islitlets)) # --- # relevant wavelength calibration parameters for rectified and wavelength # calibrated image wv_parameters = set_wv_parameters(filter_name, grism_name) crpix1_enlarged = wv_parameters['crpix1_enlarged'] crval1_enlarged = wv_parameters['crval1_enlarged'] cdelt1_enlarged = wv_parameters['cdelt1_enlarged'] naxis1_enlarged = wv_parameters['naxis1_enlarged'] # initialize rectified and wavelength calibrated image naxis2_enlarged = EMIR_NBARS * EMIR_NPIXPERSLIT_RECTIFIED image2d_rectwv = np.zeros((naxis2_enlarged, naxis1_enlarged), dtype='float32') # main loop logger.info('Applying rectification and wavelength calibration') logger.info('RectWaveCoeff uuid={}'.format(rectwv_coeff.uuid)) cout = '0' for islitlet in range(1, EMIR_NBARS + 1): if islitlet in list_valid_islitlets: # define Slitlet2D object slt = Slitlet2D(islitlet=islitlet, rectwv_coeff=rectwv_coeff, debugplot=debugplot) # extract (distorted) slitlet from the initial image slitlet2d = slt.extract_slitlet2d(image2d) # rectify slitlet slitlet2d_rect = slt.rectify(slitlet2d, resampling=args_resampling) # wavelength calibration of the rectifed slitlet slitlet2d_rect_wv = resample_image2d_flux( image2d_orig=slitlet2d_rect, naxis1=naxis1_enlarged, cdelt1=cdelt1_enlarged, crval1=crval1_enlarged, crpix1=crpix1_enlarged, coeff=slt.wpoly ) # minimum and maximum useful row in the full 2d rectified image # (starting from 0) i1 = slt.iminslt - 1 i2 = slt.imaxslt # minimum and maximum scan in the rectified slitlet # (in pixels, from 1 to NAXIS2) ii1 = slt.min_row_rectified ii2 = slt.max_row_rectified + 1 # save rectified slitlet in its corresponding location within # the full 2d rectified image image2d_rectwv[i1:i2, :] = slitlet2d_rect_wv[ii1:ii2, :] # include scan range in FITS header header['imnslt' + str(islitlet).zfill(2)] = \ slt.iminslt, 'minimum Y pixel of useful slitlet region' header['imxslt' + str(islitlet).zfill(2)] = \ slt.imaxslt, 'maximum Y pixel of useful slitlet region' # determine useful channel region in each spectrum and include # that information in FITS header jminslt = [] jmaxslt = [] for idum in range(ii1, ii2 + 1): jminmax = find_pix_borders( slitlet2d_rect_wv[idum, :], sought_value=0 ) if jminmax != (-1, naxis1_enlarged): jminslt.append(jminmax[0]) jmaxslt.append(jminmax[1]) if len(jminslt) > 0: slt.jminslt = min(jminslt) + 1 slt.jmaxslt = max(jmaxslt) + 1 header['jmnslt' + str(islitlet).zfill(2)] = \ slt.jminslt, 'minimum X pixel of useful slitlet region' header['jmxslt' + str(islitlet).zfill(2)] = \ slt.jmaxslt, 'maximum X pixel of useful slitlet region' cout += '.' else: # include scan and channel range in FITS header header['imnslt' + str(islitlet).zfill(2)] = \ 0, 'minimum Y pixel of useful slitlet region' header['imxslt' + str(islitlet).zfill(2)] = \ 0, 'maximum Y pixel of useful slitlet region' header['jmnslt' + str(islitlet).zfill(2)] = \ 0, 'minimum X pixel of useful slitlet region' header['jmxslt' + str(islitlet).zfill(2)] = \ 0, 'maximum X pixel of useful slitlet region' cout += 'i' if islitlet % 10 == 0: if cout != 'i': cout = str(islitlet // 10) logger.info(cout) # update wavelength calibration in FITS header logger.info('Updating image header') for keyword in ['crval1', 'crpix1', 'crval2', 'crpix2']: if keyword in header: header.remove(keyword) header['crpix1'] = (crpix1_enlarged, 'reference pixel') header['crval1'] = (crval1_enlarged, 'central wavelength at crpix1') header['cdelt1'] = (cdelt1_enlarged, 'linear dispersion (Angstrom/pixel)') header['cunit1'] = ('Angstrom', 'units along axis1') header['ctype1'] = 'WAVELENGTH' header['crpix2'] = (0.0, 'reference pixel') header['crval2'] = (0.0, 'central value at crpix2') header['cdelt2'] = (1.0, 'increment') header['ctype2'] = 'PIXEL' header['cunit2'] = ('Pixel', 'units along axis2') for keyword in ['cd1_1', 'cd1_2', 'cd2_1', 'cd2_2', 'PCD1_1', 'PCD1_2', 'PCD2_1', 'PCD2_2', 'PCRPIX1', 'PCRPIX2']: if keyword in header: header.remove(keyword) # update history in FITS header header['history'] = 'Boundary parameters uuid:' + \ rectwv_coeff.meta_info['origin']['bound_param'][4:] if 'master_rectwv' in rectwv_coeff.meta_info['origin']: header['history'] = \ 'MasterRectWave uuid:' + \ rectwv_coeff.meta_info['origin']['master_rectwv'][4:] header['history'] = 'RectWaveCoeff uuid:' + rectwv_coeff.uuid header['history'] = 'Rectification and wavelength calibration time ' \ + datetime.now().isoformat() logger.info('Generating rectified and wavelength calibrated image') rectwv_image = fits.PrimaryHDU(data=image2d_rectwv, header=header) return fits.HDUList([rectwv_image])
python
def apply_rectwv_coeff(reduced_image, rectwv_coeff, args_resampling=2, args_ignore_dtu_configuration=True, debugplot=0): """Compute rectification and wavelength calibration coefficients. Parameters ---------- reduced_image : HDUList object Image with preliminary basic reduction: bpm, bias, dark and flatfield. rectwv_coeff : RectWaveCoeff instance Rectification and wavelength calibration coefficients for the particular CSU configuration. args_resampling : int 1: nearest neighbour, 2: flux preserving interpolation. args_ignore_dtu_configuration : bool If True, ignore differences in DTU configuration. debugplot : int Debugging level for messages and plots. For details see 'numina.array.display.pause_debugplot.py'. Returns ------- rectwv_image : HDUList object Rectified and wavelength calibrated image. """ logger = logging.getLogger(__name__) # header and data array (use deepcopy to avoid modifying # reduced_image[0].header as a side effect) header = copy.deepcopy(reduced_image[0].header) image2d = reduced_image[0].data # apply global offsets image2d = apply_integer_offsets( image2d=image2d, offx=rectwv_coeff.global_integer_offset_x_pix, offy=rectwv_coeff.global_integer_offset_y_pix ) # check grism and filter filter_name = header['filter'] logger.info('Filter: ' + filter_name) if filter_name != rectwv_coeff.tags['filter']: raise ValueError('Filter name does not match!') grism_name = header['grism'] logger.info('Grism: ' + grism_name) if grism_name != rectwv_coeff.tags['grism']: raise ValueError('Grism name does not match!') # read the DTU configuration from the image header dtu_conf = DtuConfiguration.define_from_header(header) # retrieve DTU configuration from RectWaveCoeff object dtu_conf_calib = DtuConfiguration.define_from_dictionary( rectwv_coeff.meta_info['dtu_configuration'] ) # check that the DTU configuration employed to obtain the calibration # corresponds to the DTU configuration in the input FITS file if dtu_conf != dtu_conf_calib: if args_ignore_dtu_configuration: logger.warning('DTU configuration differences found!') else: logger.warning('DTU configuration from image header:') logger.warning(dtu_conf) logger.warning('DTU configuration from master calibration:') logger.warning(dtu_conf_calib) raise ValueError("DTU configurations do not match!") else: logger.info('DTU configuration match!') # valid slitlet numbers list_valid_islitlets = list(range(1, EMIR_NBARS + 1)) for idel in rectwv_coeff.missing_slitlets: list_valid_islitlets.remove(idel) logger.debug('Valid slitlet numbers:\n' + str(list_valid_islitlets)) # --- # relevant wavelength calibration parameters for rectified and wavelength # calibrated image wv_parameters = set_wv_parameters(filter_name, grism_name) crpix1_enlarged = wv_parameters['crpix1_enlarged'] crval1_enlarged = wv_parameters['crval1_enlarged'] cdelt1_enlarged = wv_parameters['cdelt1_enlarged'] naxis1_enlarged = wv_parameters['naxis1_enlarged'] # initialize rectified and wavelength calibrated image naxis2_enlarged = EMIR_NBARS * EMIR_NPIXPERSLIT_RECTIFIED image2d_rectwv = np.zeros((naxis2_enlarged, naxis1_enlarged), dtype='float32') # main loop logger.info('Applying rectification and wavelength calibration') logger.info('RectWaveCoeff uuid={}'.format(rectwv_coeff.uuid)) cout = '0' for islitlet in range(1, EMIR_NBARS + 1): if islitlet in list_valid_islitlets: # define Slitlet2D object slt = Slitlet2D(islitlet=islitlet, rectwv_coeff=rectwv_coeff, debugplot=debugplot) # extract (distorted) slitlet from the initial image slitlet2d = slt.extract_slitlet2d(image2d) # rectify slitlet slitlet2d_rect = slt.rectify(slitlet2d, resampling=args_resampling) # wavelength calibration of the rectifed slitlet slitlet2d_rect_wv = resample_image2d_flux( image2d_orig=slitlet2d_rect, naxis1=naxis1_enlarged, cdelt1=cdelt1_enlarged, crval1=crval1_enlarged, crpix1=crpix1_enlarged, coeff=slt.wpoly ) # minimum and maximum useful row in the full 2d rectified image # (starting from 0) i1 = slt.iminslt - 1 i2 = slt.imaxslt # minimum and maximum scan in the rectified slitlet # (in pixels, from 1 to NAXIS2) ii1 = slt.min_row_rectified ii2 = slt.max_row_rectified + 1 # save rectified slitlet in its corresponding location within # the full 2d rectified image image2d_rectwv[i1:i2, :] = slitlet2d_rect_wv[ii1:ii2, :] # include scan range in FITS header header['imnslt' + str(islitlet).zfill(2)] = \ slt.iminslt, 'minimum Y pixel of useful slitlet region' header['imxslt' + str(islitlet).zfill(2)] = \ slt.imaxslt, 'maximum Y pixel of useful slitlet region' # determine useful channel region in each spectrum and include # that information in FITS header jminslt = [] jmaxslt = [] for idum in range(ii1, ii2 + 1): jminmax = find_pix_borders( slitlet2d_rect_wv[idum, :], sought_value=0 ) if jminmax != (-1, naxis1_enlarged): jminslt.append(jminmax[0]) jmaxslt.append(jminmax[1]) if len(jminslt) > 0: slt.jminslt = min(jminslt) + 1 slt.jmaxslt = max(jmaxslt) + 1 header['jmnslt' + str(islitlet).zfill(2)] = \ slt.jminslt, 'minimum X pixel of useful slitlet region' header['jmxslt' + str(islitlet).zfill(2)] = \ slt.jmaxslt, 'maximum X pixel of useful slitlet region' cout += '.' else: # include scan and channel range in FITS header header['imnslt' + str(islitlet).zfill(2)] = \ 0, 'minimum Y pixel of useful slitlet region' header['imxslt' + str(islitlet).zfill(2)] = \ 0, 'maximum Y pixel of useful slitlet region' header['jmnslt' + str(islitlet).zfill(2)] = \ 0, 'minimum X pixel of useful slitlet region' header['jmxslt' + str(islitlet).zfill(2)] = \ 0, 'maximum X pixel of useful slitlet region' cout += 'i' if islitlet % 10 == 0: if cout != 'i': cout = str(islitlet // 10) logger.info(cout) # update wavelength calibration in FITS header logger.info('Updating image header') for keyword in ['crval1', 'crpix1', 'crval2', 'crpix2']: if keyword in header: header.remove(keyword) header['crpix1'] = (crpix1_enlarged, 'reference pixel') header['crval1'] = (crval1_enlarged, 'central wavelength at crpix1') header['cdelt1'] = (cdelt1_enlarged, 'linear dispersion (Angstrom/pixel)') header['cunit1'] = ('Angstrom', 'units along axis1') header['ctype1'] = 'WAVELENGTH' header['crpix2'] = (0.0, 'reference pixel') header['crval2'] = (0.0, 'central value at crpix2') header['cdelt2'] = (1.0, 'increment') header['ctype2'] = 'PIXEL' header['cunit2'] = ('Pixel', 'units along axis2') for keyword in ['cd1_1', 'cd1_2', 'cd2_1', 'cd2_2', 'PCD1_1', 'PCD1_2', 'PCD2_1', 'PCD2_2', 'PCRPIX1', 'PCRPIX2']: if keyword in header: header.remove(keyword) # update history in FITS header header['history'] = 'Boundary parameters uuid:' + \ rectwv_coeff.meta_info['origin']['bound_param'][4:] if 'master_rectwv' in rectwv_coeff.meta_info['origin']: header['history'] = \ 'MasterRectWave uuid:' + \ rectwv_coeff.meta_info['origin']['master_rectwv'][4:] header['history'] = 'RectWaveCoeff uuid:' + rectwv_coeff.uuid header['history'] = 'Rectification and wavelength calibration time ' \ + datetime.now().isoformat() logger.info('Generating rectified and wavelength calibrated image') rectwv_image = fits.PrimaryHDU(data=image2d_rectwv, header=header) return fits.HDUList([rectwv_image])
[ "def", "apply_rectwv_coeff", "(", "reduced_image", ",", "rectwv_coeff", ",", "args_resampling", "=", "2", ",", "args_ignore_dtu_configuration", "=", "True", ",", "debugplot", "=", "0", ")", ":", "logger", "=", "logging", ".", "getLogger", "(", "__name__", ")", "# header and data array (use deepcopy to avoid modifying", "# reduced_image[0].header as a side effect)", "header", "=", "copy", ".", "deepcopy", "(", "reduced_image", "[", "0", "]", ".", "header", ")", "image2d", "=", "reduced_image", "[", "0", "]", ".", "data", "# apply global offsets", "image2d", "=", "apply_integer_offsets", "(", "image2d", "=", "image2d", ",", "offx", "=", "rectwv_coeff", ".", "global_integer_offset_x_pix", ",", "offy", "=", "rectwv_coeff", ".", "global_integer_offset_y_pix", ")", "# check grism and filter", "filter_name", "=", "header", "[", "'filter'", "]", "logger", ".", "info", "(", "'Filter: '", "+", "filter_name", ")", "if", "filter_name", "!=", "rectwv_coeff", ".", "tags", "[", "'filter'", "]", ":", "raise", "ValueError", "(", "'Filter name does not match!'", ")", "grism_name", "=", "header", "[", "'grism'", "]", "logger", ".", "info", "(", "'Grism: '", "+", "grism_name", ")", "if", "grism_name", "!=", "rectwv_coeff", ".", "tags", "[", "'grism'", "]", ":", "raise", "ValueError", "(", "'Grism name does not match!'", ")", "# read the DTU configuration from the image header", "dtu_conf", "=", "DtuConfiguration", ".", "define_from_header", "(", "header", ")", "# retrieve DTU configuration from RectWaveCoeff object", "dtu_conf_calib", "=", "DtuConfiguration", ".", "define_from_dictionary", "(", "rectwv_coeff", ".", "meta_info", "[", "'dtu_configuration'", "]", ")", "# check that the DTU configuration employed to obtain the calibration", "# corresponds to the DTU configuration in the input FITS file", "if", "dtu_conf", "!=", "dtu_conf_calib", ":", "if", "args_ignore_dtu_configuration", ":", "logger", ".", "warning", "(", "'DTU configuration differences found!'", ")", "else", ":", "logger", ".", "warning", "(", "'DTU configuration from image header:'", ")", "logger", ".", "warning", "(", "dtu_conf", ")", "logger", ".", "warning", "(", "'DTU configuration from master calibration:'", ")", "logger", ".", "warning", "(", "dtu_conf_calib", ")", "raise", "ValueError", "(", "\"DTU configurations do not match!\"", ")", "else", ":", "logger", ".", "info", "(", "'DTU configuration match!'", ")", "# valid slitlet numbers", "list_valid_islitlets", "=", "list", "(", "range", "(", "1", ",", "EMIR_NBARS", "+", "1", ")", ")", "for", "idel", "in", "rectwv_coeff", ".", "missing_slitlets", ":", "list_valid_islitlets", ".", "remove", "(", "idel", ")", "logger", ".", "debug", "(", "'Valid slitlet numbers:\\n'", "+", "str", "(", "list_valid_islitlets", ")", ")", "# ---", "# relevant wavelength calibration parameters for rectified and wavelength", "# calibrated image", "wv_parameters", "=", "set_wv_parameters", "(", "filter_name", ",", "grism_name", ")", "crpix1_enlarged", "=", "wv_parameters", "[", "'crpix1_enlarged'", "]", "crval1_enlarged", "=", "wv_parameters", "[", "'crval1_enlarged'", "]", "cdelt1_enlarged", "=", "wv_parameters", "[", "'cdelt1_enlarged'", "]", "naxis1_enlarged", "=", "wv_parameters", "[", "'naxis1_enlarged'", "]", "# initialize rectified and wavelength calibrated image", "naxis2_enlarged", "=", "EMIR_NBARS", "*", "EMIR_NPIXPERSLIT_RECTIFIED", "image2d_rectwv", "=", "np", ".", "zeros", "(", "(", "naxis2_enlarged", ",", "naxis1_enlarged", ")", ",", "dtype", "=", "'float32'", ")", "# main loop", "logger", ".", "info", "(", "'Applying rectification and wavelength calibration'", ")", "logger", ".", "info", "(", "'RectWaveCoeff uuid={}'", ".", "format", "(", "rectwv_coeff", ".", "uuid", ")", ")", "cout", "=", "'0'", "for", "islitlet", "in", "range", "(", "1", ",", "EMIR_NBARS", "+", "1", ")", ":", "if", "islitlet", "in", "list_valid_islitlets", ":", "# define Slitlet2D object", "slt", "=", "Slitlet2D", "(", "islitlet", "=", "islitlet", ",", "rectwv_coeff", "=", "rectwv_coeff", ",", "debugplot", "=", "debugplot", ")", "# extract (distorted) slitlet from the initial image", "slitlet2d", "=", "slt", ".", "extract_slitlet2d", "(", "image2d", ")", "# rectify slitlet", "slitlet2d_rect", "=", "slt", ".", "rectify", "(", "slitlet2d", ",", "resampling", "=", "args_resampling", ")", "# wavelength calibration of the rectifed slitlet", "slitlet2d_rect_wv", "=", "resample_image2d_flux", "(", "image2d_orig", "=", "slitlet2d_rect", ",", "naxis1", "=", "naxis1_enlarged", ",", "cdelt1", "=", "cdelt1_enlarged", ",", "crval1", "=", "crval1_enlarged", ",", "crpix1", "=", "crpix1_enlarged", ",", "coeff", "=", "slt", ".", "wpoly", ")", "# minimum and maximum useful row in the full 2d rectified image", "# (starting from 0)", "i1", "=", "slt", ".", "iminslt", "-", "1", "i2", "=", "slt", ".", "imaxslt", "# minimum and maximum scan in the rectified slitlet", "# (in pixels, from 1 to NAXIS2)", "ii1", "=", "slt", ".", "min_row_rectified", "ii2", "=", "slt", ".", "max_row_rectified", "+", "1", "# save rectified slitlet in its corresponding location within", "# the full 2d rectified image", "image2d_rectwv", "[", "i1", ":", "i2", ",", ":", "]", "=", "slitlet2d_rect_wv", "[", "ii1", ":", "ii2", ",", ":", "]", "# include scan range in FITS header", "header", "[", "'imnslt'", "+", "str", "(", "islitlet", ")", ".", "zfill", "(", "2", ")", "]", "=", "slt", ".", "iminslt", ",", "'minimum Y pixel of useful slitlet region'", "header", "[", "'imxslt'", "+", "str", "(", "islitlet", ")", ".", "zfill", "(", "2", ")", "]", "=", "slt", ".", "imaxslt", ",", "'maximum Y pixel of useful slitlet region'", "# determine useful channel region in each spectrum and include", "# that information in FITS header", "jminslt", "=", "[", "]", "jmaxslt", "=", "[", "]", "for", "idum", "in", "range", "(", "ii1", ",", "ii2", "+", "1", ")", ":", "jminmax", "=", "find_pix_borders", "(", "slitlet2d_rect_wv", "[", "idum", ",", ":", "]", ",", "sought_value", "=", "0", ")", "if", "jminmax", "!=", "(", "-", "1", ",", "naxis1_enlarged", ")", ":", "jminslt", ".", "append", "(", "jminmax", "[", "0", "]", ")", "jmaxslt", ".", "append", "(", "jminmax", "[", "1", "]", ")", "if", "len", "(", "jminslt", ")", ">", "0", ":", "slt", ".", "jminslt", "=", "min", "(", "jminslt", ")", "+", "1", "slt", ".", "jmaxslt", "=", "max", "(", "jmaxslt", ")", "+", "1", "header", "[", "'jmnslt'", "+", "str", "(", "islitlet", ")", ".", "zfill", "(", "2", ")", "]", "=", "slt", ".", "jminslt", ",", "'minimum X pixel of useful slitlet region'", "header", "[", "'jmxslt'", "+", "str", "(", "islitlet", ")", ".", "zfill", "(", "2", ")", "]", "=", "slt", ".", "jmaxslt", ",", "'maximum X pixel of useful slitlet region'", "cout", "+=", "'.'", "else", ":", "# include scan and channel range in FITS header", "header", "[", "'imnslt'", "+", "str", "(", "islitlet", ")", ".", "zfill", "(", "2", ")", "]", "=", "0", ",", "'minimum Y pixel of useful slitlet region'", "header", "[", "'imxslt'", "+", "str", "(", "islitlet", ")", ".", "zfill", "(", "2", ")", "]", "=", "0", ",", "'maximum Y pixel of useful slitlet region'", "header", "[", "'jmnslt'", "+", "str", "(", "islitlet", ")", ".", "zfill", "(", "2", ")", "]", "=", "0", ",", "'minimum X pixel of useful slitlet region'", "header", "[", "'jmxslt'", "+", "str", "(", "islitlet", ")", ".", "zfill", "(", "2", ")", "]", "=", "0", ",", "'maximum X pixel of useful slitlet region'", "cout", "+=", "'i'", "if", "islitlet", "%", "10", "==", "0", ":", "if", "cout", "!=", "'i'", ":", "cout", "=", "str", "(", "islitlet", "//", "10", ")", "logger", ".", "info", "(", "cout", ")", "# update wavelength calibration in FITS header", "logger", ".", "info", "(", "'Updating image header'", ")", "for", "keyword", "in", "[", "'crval1'", ",", "'crpix1'", ",", "'crval2'", ",", "'crpix2'", "]", ":", "if", "keyword", "in", "header", ":", "header", ".", "remove", "(", "keyword", ")", "header", "[", "'crpix1'", "]", "=", "(", "crpix1_enlarged", ",", "'reference pixel'", ")", "header", "[", "'crval1'", "]", "=", "(", "crval1_enlarged", ",", "'central wavelength at crpix1'", ")", "header", "[", "'cdelt1'", "]", "=", "(", "cdelt1_enlarged", ",", "'linear dispersion (Angstrom/pixel)'", ")", "header", "[", "'cunit1'", "]", "=", "(", "'Angstrom'", ",", "'units along axis1'", ")", "header", "[", "'ctype1'", "]", "=", "'WAVELENGTH'", "header", "[", "'crpix2'", "]", "=", "(", "0.0", ",", "'reference pixel'", ")", "header", "[", "'crval2'", "]", "=", "(", "0.0", ",", "'central value at crpix2'", ")", "header", "[", "'cdelt2'", "]", "=", "(", "1.0", ",", "'increment'", ")", "header", "[", "'ctype2'", "]", "=", "'PIXEL'", "header", "[", "'cunit2'", "]", "=", "(", "'Pixel'", ",", "'units along axis2'", ")", "for", "keyword", "in", "[", "'cd1_1'", ",", "'cd1_2'", ",", "'cd2_1'", ",", "'cd2_2'", ",", "'PCD1_1'", ",", "'PCD1_2'", ",", "'PCD2_1'", ",", "'PCD2_2'", ",", "'PCRPIX1'", ",", "'PCRPIX2'", "]", ":", "if", "keyword", "in", "header", ":", "header", ".", "remove", "(", "keyword", ")", "# update history in FITS header", "header", "[", "'history'", "]", "=", "'Boundary parameters uuid:'", "+", "rectwv_coeff", ".", "meta_info", "[", "'origin'", "]", "[", "'bound_param'", "]", "[", "4", ":", "]", "if", "'master_rectwv'", "in", "rectwv_coeff", ".", "meta_info", "[", "'origin'", "]", ":", "header", "[", "'history'", "]", "=", "'MasterRectWave uuid:'", "+", "rectwv_coeff", ".", "meta_info", "[", "'origin'", "]", "[", "'master_rectwv'", "]", "[", "4", ":", "]", "header", "[", "'history'", "]", "=", "'RectWaveCoeff uuid:'", "+", "rectwv_coeff", ".", "uuid", "header", "[", "'history'", "]", "=", "'Rectification and wavelength calibration time '", "+", "datetime", ".", "now", "(", ")", ".", "isoformat", "(", ")", "logger", ".", "info", "(", "'Generating rectified and wavelength calibrated image'", ")", "rectwv_image", "=", "fits", ".", "PrimaryHDU", "(", "data", "=", "image2d_rectwv", ",", "header", "=", "header", ")", "return", "fits", ".", "HDUList", "(", "[", "rectwv_image", "]", ")" ]
Compute rectification and wavelength calibration coefficients. Parameters ---------- reduced_image : HDUList object Image with preliminary basic reduction: bpm, bias, dark and flatfield. rectwv_coeff : RectWaveCoeff instance Rectification and wavelength calibration coefficients for the particular CSU configuration. args_resampling : int 1: nearest neighbour, 2: flux preserving interpolation. args_ignore_dtu_configuration : bool If True, ignore differences in DTU configuration. debugplot : int Debugging level for messages and plots. For details see 'numina.array.display.pause_debugplot.py'. Returns ------- rectwv_image : HDUList object Rectified and wavelength calibrated image.
[ "Compute", "rectification", "and", "wavelength", "calibration", "coefficients", "." ]
train
https://github.com/guaix-ucm/pyemir/blob/fef6bbabcb13f80123cafd1800a0f508a3c21702/emirdrp/processing/wavecal/apply_rectwv_coeff.py#L48-L273
Jaymon/prom
prom/interface/base.py
Connection.transaction_start
def transaction_start(self, name): """ start a transaction this will increment transaction semaphore and pass it to _transaction_start() """ if not name: raise ValueError("Transaction name cannot be empty") #uid = id(self) self.transaction_count += 1 logger.debug("{}. Start transaction {}".format(self.transaction_count, name)) if self.transaction_count == 1: self._transaction_start() else: self._transaction_started(name) return self.transaction_count
python
def transaction_start(self, name): """ start a transaction this will increment transaction semaphore and pass it to _transaction_start() """ if not name: raise ValueError("Transaction name cannot be empty") #uid = id(self) self.transaction_count += 1 logger.debug("{}. Start transaction {}".format(self.transaction_count, name)) if self.transaction_count == 1: self._transaction_start() else: self._transaction_started(name) return self.transaction_count
[ "def", "transaction_start", "(", "self", ",", "name", ")", ":", "if", "not", "name", ":", "raise", "ValueError", "(", "\"Transaction name cannot be empty\"", ")", "#uid = id(self)", "self", ".", "transaction_count", "+=", "1", "logger", ".", "debug", "(", "\"{}. Start transaction {}\"", ".", "format", "(", "self", ".", "transaction_count", ",", "name", ")", ")", "if", "self", ".", "transaction_count", "==", "1", ":", "self", ".", "_transaction_start", "(", ")", "else", ":", "self", ".", "_transaction_started", "(", "name", ")", "return", "self", ".", "transaction_count" ]
start a transaction this will increment transaction semaphore and pass it to _transaction_start()
[ "start", "a", "transaction" ]
train
https://github.com/Jaymon/prom/blob/b7ad2c259eca198da03e1e4bc7d95014c168c361/prom/interface/base.py#L43-L60
Jaymon/prom
prom/interface/base.py
Connection.transaction_stop
def transaction_stop(self): """stop/commit a transaction if ready""" if self.transaction_count > 0: logger.debug("{}. Stop transaction".format(self.transaction_count)) if self.transaction_count == 1: self._transaction_stop() self.transaction_count -= 1 return self.transaction_count
python
def transaction_stop(self): """stop/commit a transaction if ready""" if self.transaction_count > 0: logger.debug("{}. Stop transaction".format(self.transaction_count)) if self.transaction_count == 1: self._transaction_stop() self.transaction_count -= 1 return self.transaction_count
[ "def", "transaction_stop", "(", "self", ")", ":", "if", "self", ".", "transaction_count", ">", "0", ":", "logger", ".", "debug", "(", "\"{}. Stop transaction\"", ".", "format", "(", "self", ".", "transaction_count", ")", ")", "if", "self", ".", "transaction_count", "==", "1", ":", "self", ".", "_transaction_stop", "(", ")", "self", ".", "transaction_count", "-=", "1", "return", "self", ".", "transaction_count" ]
stop/commit a transaction if ready
[ "stop", "/", "commit", "a", "transaction", "if", "ready" ]
train
https://github.com/Jaymon/prom/blob/b7ad2c259eca198da03e1e4bc7d95014c168c361/prom/interface/base.py#L66-L75
Jaymon/prom
prom/interface/base.py
Connection.transaction_fail
def transaction_fail(self, name): """ rollback a transaction if currently in one e -- Exception() -- if passed in, bubble up the exception by re-raising it """ if not name: raise ValueError("Transaction name cannot be empty") if self.transaction_count > 0: logger.debug("{}. Failing transaction {}".format(self.transaction_count, name)) if self.transaction_count == 1: self._transaction_fail() else: self._transaction_failing(name) self.transaction_count -= 1
python
def transaction_fail(self, name): """ rollback a transaction if currently in one e -- Exception() -- if passed in, bubble up the exception by re-raising it """ if not name: raise ValueError("Transaction name cannot be empty") if self.transaction_count > 0: logger.debug("{}. Failing transaction {}".format(self.transaction_count, name)) if self.transaction_count == 1: self._transaction_fail() else: self._transaction_failing(name) self.transaction_count -= 1
[ "def", "transaction_fail", "(", "self", ",", "name", ")", ":", "if", "not", "name", ":", "raise", "ValueError", "(", "\"Transaction name cannot be empty\"", ")", "if", "self", ".", "transaction_count", ">", "0", ":", "logger", ".", "debug", "(", "\"{}. Failing transaction {}\"", ".", "format", "(", "self", ".", "transaction_count", ",", "name", ")", ")", "if", "self", ".", "transaction_count", "==", "1", ":", "self", ".", "_transaction_fail", "(", ")", "else", ":", "self", ".", "_transaction_failing", "(", "name", ")", "self", ".", "transaction_count", "-=", "1" ]
rollback a transaction if currently in one e -- Exception() -- if passed in, bubble up the exception by re-raising it
[ "rollback", "a", "transaction", "if", "currently", "in", "one" ]
train
https://github.com/Jaymon/prom/blob/b7ad2c259eca198da03e1e4bc7d95014c168c361/prom/interface/base.py#L79-L95
Jaymon/prom
prom/interface/base.py
Interface.connect
def connect(self, connection_config=None, *args, **kwargs): """ connect to the interface this will set the raw db connection to self.connection *args -- anything you want that will help the db connect **kwargs -- anything you want that the backend db connection will need to actually connect """ if self.connected: return self.connected if connection_config: self.connection_config = connection_config self.connected = True try: self._connect(self.connection_config) except Exception as e: self.connected = False self.raise_error(e) self.log("Connected {}", self.connection_config.interface_name) return self.connected
python
def connect(self, connection_config=None, *args, **kwargs): """ connect to the interface this will set the raw db connection to self.connection *args -- anything you want that will help the db connect **kwargs -- anything you want that the backend db connection will need to actually connect """ if self.connected: return self.connected if connection_config: self.connection_config = connection_config self.connected = True try: self._connect(self.connection_config) except Exception as e: self.connected = False self.raise_error(e) self.log("Connected {}", self.connection_config.interface_name) return self.connected
[ "def", "connect", "(", "self", ",", "connection_config", "=", "None", ",", "*", "args", ",", "*", "*", "kwargs", ")", ":", "if", "self", ".", "connected", ":", "return", "self", ".", "connected", "if", "connection_config", ":", "self", ".", "connection_config", "=", "connection_config", "self", ".", "connected", "=", "True", "try", ":", "self", ".", "_connect", "(", "self", ".", "connection_config", ")", "except", "Exception", "as", "e", ":", "self", ".", "connected", "=", "False", "self", ".", "raise_error", "(", "e", ")", "self", ".", "log", "(", "\"Connected {}\"", ",", "self", ".", "connection_config", ".", "interface_name", ")", "return", "self", ".", "connected" ]
connect to the interface this will set the raw db connection to self.connection *args -- anything you want that will help the db connect **kwargs -- anything you want that the backend db connection will need to actually connect
[ "connect", "to", "the", "interface" ]
train
https://github.com/Jaymon/prom/blob/b7ad2c259eca198da03e1e4bc7d95014c168c361/prom/interface/base.py#L153-L175
Jaymon/prom
prom/interface/base.py
Interface.close
def close(self): """close an open connection""" if not self.connected: return True self._close() self.connected = False self.log("Closed Connection {}", self.connection_config.interface_name) return True
python
def close(self): """close an open connection""" if not self.connected: return True self._close() self.connected = False self.log("Closed Connection {}", self.connection_config.interface_name) return True
[ "def", "close", "(", "self", ")", ":", "if", "not", "self", ".", "connected", ":", "return", "True", "self", ".", "_close", "(", ")", "self", ".", "connected", "=", "False", "self", ".", "log", "(", "\"Closed Connection {}\"", ",", "self", ".", "connection_config", ".", "interface_name", ")", "return", "True" ]
close an open connection
[ "close", "an", "open", "connection" ]
train
https://github.com/Jaymon/prom/blob/b7ad2c259eca198da03e1e4bc7d95014c168c361/prom/interface/base.py#L206-L213
Jaymon/prom
prom/interface/base.py
Interface.query
def query(self, query_str, *query_args, **query_options): """ run a raw query on the db query_str -- string -- the query to run *query_args -- if the query_str is a formatting string, pass the values in this **query_options -- any query options can be passed in by using key=val syntax """ with self.connection(**query_options) as connection: query_options['connection'] = connection return self._query(query_str, query_args, **query_options)
python
def query(self, query_str, *query_args, **query_options): """ run a raw query on the db query_str -- string -- the query to run *query_args -- if the query_str is a formatting string, pass the values in this **query_options -- any query options can be passed in by using key=val syntax """ with self.connection(**query_options) as connection: query_options['connection'] = connection return self._query(query_str, query_args, **query_options)
[ "def", "query", "(", "self", ",", "query_str", ",", "*", "query_args", ",", "*", "*", "query_options", ")", ":", "with", "self", ".", "connection", "(", "*", "*", "query_options", ")", "as", "connection", ":", "query_options", "[", "'connection'", "]", "=", "connection", "return", "self", ".", "_query", "(", "query_str", ",", "query_args", ",", "*", "*", "query_options", ")" ]
run a raw query on the db query_str -- string -- the query to run *query_args -- if the query_str is a formatting string, pass the values in this **query_options -- any query options can be passed in by using key=val syntax
[ "run", "a", "raw", "query", "on", "the", "db" ]
train
https://github.com/Jaymon/prom/blob/b7ad2c259eca198da03e1e4bc7d95014c168c361/prom/interface/base.py#L217-L227
Jaymon/prom
prom/interface/base.py
Interface.transaction
def transaction(self, connection=None, **kwargs): """ a simple context manager useful for when you want to wrap a bunch of db calls in a transaction http://docs.python.org/2/library/contextlib.html http://docs.python.org/release/2.5/whatsnew/pep-343.html example -- with self.transaction() # do a bunch of calls # those db calls will be committed by this line """ with self.connection(connection) as connection: name = connection.transaction_name() connection.transaction_start(name) try: yield connection connection.transaction_stop() except Exception as e: connection.transaction_fail(name) self.raise_error(e)
python
def transaction(self, connection=None, **kwargs): """ a simple context manager useful for when you want to wrap a bunch of db calls in a transaction http://docs.python.org/2/library/contextlib.html http://docs.python.org/release/2.5/whatsnew/pep-343.html example -- with self.transaction() # do a bunch of calls # those db calls will be committed by this line """ with self.connection(connection) as connection: name = connection.transaction_name() connection.transaction_start(name) try: yield connection connection.transaction_stop() except Exception as e: connection.transaction_fail(name) self.raise_error(e)
[ "def", "transaction", "(", "self", ",", "connection", "=", "None", ",", "*", "*", "kwargs", ")", ":", "with", "self", ".", "connection", "(", "connection", ")", "as", "connection", ":", "name", "=", "connection", ".", "transaction_name", "(", ")", "connection", ".", "transaction_start", "(", "name", ")", "try", ":", "yield", "connection", "connection", ".", "transaction_stop", "(", ")", "except", "Exception", "as", "e", ":", "connection", ".", "transaction_fail", "(", "name", ")", "self", ".", "raise_error", "(", "e", ")" ]
a simple context manager useful for when you want to wrap a bunch of db calls in a transaction http://docs.python.org/2/library/contextlib.html http://docs.python.org/release/2.5/whatsnew/pep-343.html example -- with self.transaction() # do a bunch of calls # those db calls will be committed by this line
[ "a", "simple", "context", "manager", "useful", "for", "when", "you", "want", "to", "wrap", "a", "bunch", "of", "db", "calls", "in", "a", "transaction", "http", ":", "//", "docs", ".", "python", ".", "org", "/", "2", "/", "library", "/", "contextlib", ".", "html", "http", ":", "//", "docs", ".", "python", ".", "org", "/", "release", "/", "2", ".", "5", "/", "whatsnew", "/", "pep", "-", "343", ".", "html" ]
train
https://github.com/Jaymon/prom/blob/b7ad2c259eca198da03e1e4bc7d95014c168c361/prom/interface/base.py#L233-L253
Jaymon/prom
prom/interface/base.py
Interface.set_table
def set_table(self, schema, **kwargs): """ add the table to the db schema -- Schema() -- contains all the information about the table """ with self.connection(**kwargs) as connection: kwargs['connection'] = connection if self.has_table(str(schema), **kwargs): return True try: with self.transaction(**kwargs): self._set_table(schema, **kwargs) for index_name, index in schema.indexes.items(): self.set_index( schema, name=index.name, fields=index.fields, connection=connection, **index.options ) except InterfaceError: # check to see if this table now exists, it might have been created # in another thread if not self.has_table(schema, **kwargs): raise
python
def set_table(self, schema, **kwargs): """ add the table to the db schema -- Schema() -- contains all the information about the table """ with self.connection(**kwargs) as connection: kwargs['connection'] = connection if self.has_table(str(schema), **kwargs): return True try: with self.transaction(**kwargs): self._set_table(schema, **kwargs) for index_name, index in schema.indexes.items(): self.set_index( schema, name=index.name, fields=index.fields, connection=connection, **index.options ) except InterfaceError: # check to see if this table now exists, it might have been created # in another thread if not self.has_table(schema, **kwargs): raise
[ "def", "set_table", "(", "self", ",", "schema", ",", "*", "*", "kwargs", ")", ":", "with", "self", ".", "connection", "(", "*", "*", "kwargs", ")", "as", "connection", ":", "kwargs", "[", "'connection'", "]", "=", "connection", "if", "self", ".", "has_table", "(", "str", "(", "schema", ")", ",", "*", "*", "kwargs", ")", ":", "return", "True", "try", ":", "with", "self", ".", "transaction", "(", "*", "*", "kwargs", ")", ":", "self", ".", "_set_table", "(", "schema", ",", "*", "*", "kwargs", ")", "for", "index_name", ",", "index", "in", "schema", ".", "indexes", ".", "items", "(", ")", ":", "self", ".", "set_index", "(", "schema", ",", "name", "=", "index", ".", "name", ",", "fields", "=", "index", ".", "fields", ",", "connection", "=", "connection", ",", "*", "*", "index", ".", "options", ")", "except", "InterfaceError", ":", "# check to see if this table now exists, it might have been created", "# in another thread", "if", "not", "self", ".", "has_table", "(", "schema", ",", "*", "*", "kwargs", ")", ":", "raise" ]
add the table to the db schema -- Schema() -- contains all the information about the table
[ "add", "the", "table", "to", "the", "db" ]
train
https://github.com/Jaymon/prom/blob/b7ad2c259eca198da03e1e4bc7d95014c168c361/prom/interface/base.py#L255-L282
Jaymon/prom
prom/interface/base.py
Interface.has_table
def has_table(self, table_name, **kwargs): """ check to see if a table is in the db table_name -- string -- the table to check return -- boolean -- True if the table exists, false otherwise """ with self.connection(kwargs.get('connection', None)) as connection: kwargs['connection'] = connection tables = self.get_tables(table_name, **kwargs) return len(tables) > 0
python
def has_table(self, table_name, **kwargs): """ check to see if a table is in the db table_name -- string -- the table to check return -- boolean -- True if the table exists, false otherwise """ with self.connection(kwargs.get('connection', None)) as connection: kwargs['connection'] = connection tables = self.get_tables(table_name, **kwargs) return len(tables) > 0
[ "def", "has_table", "(", "self", ",", "table_name", ",", "*", "*", "kwargs", ")", ":", "with", "self", ".", "connection", "(", "kwargs", ".", "get", "(", "'connection'", ",", "None", ")", ")", "as", "connection", ":", "kwargs", "[", "'connection'", "]", "=", "connection", "tables", "=", "self", ".", "get_tables", "(", "table_name", ",", "*", "*", "kwargs", ")", "return", "len", "(", "tables", ")", ">", "0" ]
check to see if a table is in the db table_name -- string -- the table to check return -- boolean -- True if the table exists, false otherwise
[ "check", "to", "see", "if", "a", "table", "is", "in", "the", "db" ]
train
https://github.com/Jaymon/prom/blob/b7ad2c259eca198da03e1e4bc7d95014c168c361/prom/interface/base.py#L286-L296
Jaymon/prom
prom/interface/base.py
Interface.get_tables
def get_tables(self, table_name="", **kwargs): """ get all the tables of the currently connected db table_name -- string -- if you would like to filter the tables list to only include matches with this name return -- list -- a list of table names """ with self.connection(**kwargs) as connection: kwargs['connection'] = connection return self._get_tables(str(table_name), **kwargs)
python
def get_tables(self, table_name="", **kwargs): """ get all the tables of the currently connected db table_name -- string -- if you would like to filter the tables list to only include matches with this name return -- list -- a list of table names """ with self.connection(**kwargs) as connection: kwargs['connection'] = connection return self._get_tables(str(table_name), **kwargs)
[ "def", "get_tables", "(", "self", ",", "table_name", "=", "\"\"", ",", "*", "*", "kwargs", ")", ":", "with", "self", ".", "connection", "(", "*", "*", "kwargs", ")", "as", "connection", ":", "kwargs", "[", "'connection'", "]", "=", "connection", "return", "self", ".", "_get_tables", "(", "str", "(", "table_name", ")", ",", "*", "*", "kwargs", ")" ]
get all the tables of the currently connected db table_name -- string -- if you would like to filter the tables list to only include matches with this name return -- list -- a list of table names
[ "get", "all", "the", "tables", "of", "the", "currently", "connected", "db" ]
train
https://github.com/Jaymon/prom/blob/b7ad2c259eca198da03e1e4bc7d95014c168c361/prom/interface/base.py#L298-L307
Jaymon/prom
prom/interface/base.py
Interface.delete_table
def delete_table(self, schema, **kwargs): """ remove a table matching schema from the db schema -- Schema() """ with self.connection(**kwargs) as connection: kwargs['connection'] = connection if not self.has_table(str(schema), **kwargs): return True with self.transaction(**kwargs): self._delete_table(schema, **kwargs) return True
python
def delete_table(self, schema, **kwargs): """ remove a table matching schema from the db schema -- Schema() """ with self.connection(**kwargs) as connection: kwargs['connection'] = connection if not self.has_table(str(schema), **kwargs): return True with self.transaction(**kwargs): self._delete_table(schema, **kwargs) return True
[ "def", "delete_table", "(", "self", ",", "schema", ",", "*", "*", "kwargs", ")", ":", "with", "self", ".", "connection", "(", "*", "*", "kwargs", ")", "as", "connection", ":", "kwargs", "[", "'connection'", "]", "=", "connection", "if", "not", "self", ".", "has_table", "(", "str", "(", "schema", ")", ",", "*", "*", "kwargs", ")", ":", "return", "True", "with", "self", ".", "transaction", "(", "*", "*", "kwargs", ")", ":", "self", ".", "_delete_table", "(", "schema", ",", "*", "*", "kwargs", ")", "return", "True" ]
remove a table matching schema from the db schema -- Schema()
[ "remove", "a", "table", "matching", "schema", "from", "the", "db" ]
train
https://github.com/Jaymon/prom/blob/b7ad2c259eca198da03e1e4bc7d95014c168c361/prom/interface/base.py#L311-L323
Jaymon/prom
prom/interface/base.py
Interface.delete_tables
def delete_tables(self, **kwargs): """ removes all the tables from the db this is, obviously, very bad if you didn't mean to call this, because of that, you have to pass in disable_protection=True, if it doesn't get that passed in, it won't run this method """ if not kwargs.get('disable_protection', False): raise ValueError('In order to delete all the tables, pass in disable_protection=True') with self.connection(**kwargs) as connection: kwargs['connection'] = connection self._delete_tables(**kwargs)
python
def delete_tables(self, **kwargs): """ removes all the tables from the db this is, obviously, very bad if you didn't mean to call this, because of that, you have to pass in disable_protection=True, if it doesn't get that passed in, it won't run this method """ if not kwargs.get('disable_protection', False): raise ValueError('In order to delete all the tables, pass in disable_protection=True') with self.connection(**kwargs) as connection: kwargs['connection'] = connection self._delete_tables(**kwargs)
[ "def", "delete_tables", "(", "self", ",", "*", "*", "kwargs", ")", ":", "if", "not", "kwargs", ".", "get", "(", "'disable_protection'", ",", "False", ")", ":", "raise", "ValueError", "(", "'In order to delete all the tables, pass in disable_protection=True'", ")", "with", "self", ".", "connection", "(", "*", "*", "kwargs", ")", "as", "connection", ":", "kwargs", "[", "'connection'", "]", "=", "connection", "self", ".", "_delete_tables", "(", "*", "*", "kwargs", ")" ]
removes all the tables from the db this is, obviously, very bad if you didn't mean to call this, because of that, you have to pass in disable_protection=True, if it doesn't get that passed in, it won't run this method
[ "removes", "all", "the", "tables", "from", "the", "db" ]
train
https://github.com/Jaymon/prom/blob/b7ad2c259eca198da03e1e4bc7d95014c168c361/prom/interface/base.py#L327-L340
Jaymon/prom
prom/interface/base.py
Interface.get_indexes
def get_indexes(self, schema, **kwargs): """ get all the indexes schema -- Schema() return -- dict -- the indexes in {indexname: fields} format """ with self.connection(**kwargs) as connection: kwargs['connection'] = connection return self._get_indexes(schema, **kwargs)
python
def get_indexes(self, schema, **kwargs): """ get all the indexes schema -- Schema() return -- dict -- the indexes in {indexname: fields} format """ with self.connection(**kwargs) as connection: kwargs['connection'] = connection return self._get_indexes(schema, **kwargs)
[ "def", "get_indexes", "(", "self", ",", "schema", ",", "*", "*", "kwargs", ")", ":", "with", "self", ".", "connection", "(", "*", "*", "kwargs", ")", "as", "connection", ":", "kwargs", "[", "'connection'", "]", "=", "connection", "return", "self", ".", "_get_indexes", "(", "schema", ",", "*", "*", "kwargs", ")" ]
get all the indexes schema -- Schema() return -- dict -- the indexes in {indexname: fields} format
[ "get", "all", "the", "indexes" ]
train
https://github.com/Jaymon/prom/blob/b7ad2c259eca198da03e1e4bc7d95014c168c361/prom/interface/base.py#L351-L361
Jaymon/prom
prom/interface/base.py
Interface.set_index
def set_index(self, schema, name, fields, **index_options): """ add an index to the table schema -- Schema() name -- string -- the name of the index fields -- array -- the fields the index should be on **index_options -- dict -- any index options that might be useful to create the index """ with self.transaction(**index_options) as connection: index_options['connection'] = connection self._set_index(schema, name, fields, **index_options) return True
python
def set_index(self, schema, name, fields, **index_options): """ add an index to the table schema -- Schema() name -- string -- the name of the index fields -- array -- the fields the index should be on **index_options -- dict -- any index options that might be useful to create the index """ with self.transaction(**index_options) as connection: index_options['connection'] = connection self._set_index(schema, name, fields, **index_options) return True
[ "def", "set_index", "(", "self", ",", "schema", ",", "name", ",", "fields", ",", "*", "*", "index_options", ")", ":", "with", "self", ".", "transaction", "(", "*", "*", "index_options", ")", "as", "connection", ":", "index_options", "[", "'connection'", "]", "=", "connection", "self", ".", "_set_index", "(", "schema", ",", "name", ",", "fields", ",", "*", "*", "index_options", ")", "return", "True" ]
add an index to the table schema -- Schema() name -- string -- the name of the index fields -- array -- the fields the index should be on **index_options -- dict -- any index options that might be useful to create the index
[ "add", "an", "index", "to", "the", "table" ]
train
https://github.com/Jaymon/prom/blob/b7ad2c259eca198da03e1e4bc7d95014c168c361/prom/interface/base.py#L365-L378
Jaymon/prom
prom/interface/base.py
Interface.insert
def insert(self, schema, fields, **kwargs): """ Persist d into the db schema -- Schema() fields -- dict -- the values to persist return -- int -- the primary key of the row just inserted """ r = 0 with self.connection(**kwargs) as connection: kwargs['connection'] = connection try: with self.transaction(**kwargs): r = self._insert(schema, fields, **kwargs) except Exception as e: exc_info = sys.exc_info() if self.handle_error(schema, e, **kwargs): r = self._insert(schema, fields, **kwargs) else: self.raise_error(e, exc_info) return r
python
def insert(self, schema, fields, **kwargs): """ Persist d into the db schema -- Schema() fields -- dict -- the values to persist return -- int -- the primary key of the row just inserted """ r = 0 with self.connection(**kwargs) as connection: kwargs['connection'] = connection try: with self.transaction(**kwargs): r = self._insert(schema, fields, **kwargs) except Exception as e: exc_info = sys.exc_info() if self.handle_error(schema, e, **kwargs): r = self._insert(schema, fields, **kwargs) else: self.raise_error(e, exc_info) return r
[ "def", "insert", "(", "self", ",", "schema", ",", "fields", ",", "*", "*", "kwargs", ")", ":", "r", "=", "0", "with", "self", ".", "connection", "(", "*", "*", "kwargs", ")", "as", "connection", ":", "kwargs", "[", "'connection'", "]", "=", "connection", "try", ":", "with", "self", ".", "transaction", "(", "*", "*", "kwargs", ")", ":", "r", "=", "self", ".", "_insert", "(", "schema", ",", "fields", ",", "*", "*", "kwargs", ")", "except", "Exception", "as", "e", ":", "exc_info", "=", "sys", ".", "exc_info", "(", ")", "if", "self", ".", "handle_error", "(", "schema", ",", "e", ",", "*", "*", "kwargs", ")", ":", "r", "=", "self", ".", "_insert", "(", "schema", ",", "fields", ",", "*", "*", "kwargs", ")", "else", ":", "self", ".", "raise_error", "(", "e", ",", "exc_info", ")", "return", "r" ]
Persist d into the db schema -- Schema() fields -- dict -- the values to persist return -- int -- the primary key of the row just inserted
[ "Persist", "d", "into", "the", "db" ]
train
https://github.com/Jaymon/prom/blob/b7ad2c259eca198da03e1e4bc7d95014c168c361/prom/interface/base.py#L384-L408
Jaymon/prom
prom/interface/base.py
Interface.update
def update(self, schema, fields, query, **kwargs): """ Persist the query.fields into the db that match query.fields_where schema -- Schema() fields -- dict -- the values to persist query -- Query() -- will be used to create the where clause return -- int -- how many rows where updated """ with self.connection(**kwargs) as connection: kwargs['connection'] = connection try: with self.transaction(**kwargs): r = self._update(schema, fields, query, **kwargs) except Exception as e: exc_info = sys.exc_info() if self.handle_error(schema, e, **kwargs): r = self._update(schema, fields, query, **kwargs) else: self.raise_error(e, exc_info) return r
python
def update(self, schema, fields, query, **kwargs): """ Persist the query.fields into the db that match query.fields_where schema -- Schema() fields -- dict -- the values to persist query -- Query() -- will be used to create the where clause return -- int -- how many rows where updated """ with self.connection(**kwargs) as connection: kwargs['connection'] = connection try: with self.transaction(**kwargs): r = self._update(schema, fields, query, **kwargs) except Exception as e: exc_info = sys.exc_info() if self.handle_error(schema, e, **kwargs): r = self._update(schema, fields, query, **kwargs) else: self.raise_error(e, exc_info) return r
[ "def", "update", "(", "self", ",", "schema", ",", "fields", ",", "query", ",", "*", "*", "kwargs", ")", ":", "with", "self", ".", "connection", "(", "*", "*", "kwargs", ")", "as", "connection", ":", "kwargs", "[", "'connection'", "]", "=", "connection", "try", ":", "with", "self", ".", "transaction", "(", "*", "*", "kwargs", ")", ":", "r", "=", "self", ".", "_update", "(", "schema", ",", "fields", ",", "query", ",", "*", "*", "kwargs", ")", "except", "Exception", "as", "e", ":", "exc_info", "=", "sys", ".", "exc_info", "(", ")", "if", "self", ".", "handle_error", "(", "schema", ",", "e", ",", "*", "*", "kwargs", ")", ":", "r", "=", "self", ".", "_update", "(", "schema", ",", "fields", ",", "query", ",", "*", "*", "kwargs", ")", "else", ":", "self", ".", "raise_error", "(", "e", ",", "exc_info", ")", "return", "r" ]
Persist the query.fields into the db that match query.fields_where schema -- Schema() fields -- dict -- the values to persist query -- Query() -- will be used to create the where clause return -- int -- how many rows where updated
[ "Persist", "the", "query", ".", "fields", "into", "the", "db", "that", "match", "query", ".", "fields_where" ]
train
https://github.com/Jaymon/prom/blob/b7ad2c259eca198da03e1e4bc7d95014c168c361/prom/interface/base.py#L413-L436
Jaymon/prom
prom/interface/base.py
Interface._get_query
def _get_query(self, callback, schema, query=None, *args, **kwargs): """this is just a common wrapper around all the get queries since they are all really similar in how they execute""" if not query: query = Query() ret = None with self.connection(**kwargs) as connection: kwargs['connection'] = connection try: if connection.in_transaction(): # we wrap SELECT queries in a transaction if we are in a transaction because # it could cause data loss if it failed by causing the db to discard # anything in the current transaction if the query isn't wrapped, # go ahead, ask me how I know this with self.transaction(**kwargs): ret = callback(schema, query, *args, **kwargs) else: ret = callback(schema, query, *args, **kwargs) except Exception as e: exc_info = sys.exc_info() if self.handle_error(schema, e, **kwargs): ret = callback(schema, query, *args, **kwargs) else: self.raise_error(e, exc_info) return ret
python
def _get_query(self, callback, schema, query=None, *args, **kwargs): """this is just a common wrapper around all the get queries since they are all really similar in how they execute""" if not query: query = Query() ret = None with self.connection(**kwargs) as connection: kwargs['connection'] = connection try: if connection.in_transaction(): # we wrap SELECT queries in a transaction if we are in a transaction because # it could cause data loss if it failed by causing the db to discard # anything in the current transaction if the query isn't wrapped, # go ahead, ask me how I know this with self.transaction(**kwargs): ret = callback(schema, query, *args, **kwargs) else: ret = callback(schema, query, *args, **kwargs) except Exception as e: exc_info = sys.exc_info() if self.handle_error(schema, e, **kwargs): ret = callback(schema, query, *args, **kwargs) else: self.raise_error(e, exc_info) return ret
[ "def", "_get_query", "(", "self", ",", "callback", ",", "schema", ",", "query", "=", "None", ",", "*", "args", ",", "*", "*", "kwargs", ")", ":", "if", "not", "query", ":", "query", "=", "Query", "(", ")", "ret", "=", "None", "with", "self", ".", "connection", "(", "*", "*", "kwargs", ")", "as", "connection", ":", "kwargs", "[", "'connection'", "]", "=", "connection", "try", ":", "if", "connection", ".", "in_transaction", "(", ")", ":", "# we wrap SELECT queries in a transaction if we are in a transaction because", "# it could cause data loss if it failed by causing the db to discard", "# anything in the current transaction if the query isn't wrapped,", "# go ahead, ask me how I know this", "with", "self", ".", "transaction", "(", "*", "*", "kwargs", ")", ":", "ret", "=", "callback", "(", "schema", ",", "query", ",", "*", "args", ",", "*", "*", "kwargs", ")", "else", ":", "ret", "=", "callback", "(", "schema", ",", "query", ",", "*", "args", ",", "*", "*", "kwargs", ")", "except", "Exception", "as", "e", ":", "exc_info", "=", "sys", ".", "exc_info", "(", ")", "if", "self", ".", "handle_error", "(", "schema", ",", "e", ",", "*", "*", "kwargs", ")", ":", "ret", "=", "callback", "(", "schema", ",", "query", ",", "*", "args", ",", "*", "*", "kwargs", ")", "else", ":", "self", ".", "raise_error", "(", "e", ",", "exc_info", ")", "return", "ret" ]
this is just a common wrapper around all the get queries since they are all really similar in how they execute
[ "this", "is", "just", "a", "common", "wrapper", "around", "all", "the", "get", "queries", "since", "they", "are", "all", "really", "similar", "in", "how", "they", "execute" ]
train
https://github.com/Jaymon/prom/blob/b7ad2c259eca198da03e1e4bc7d95014c168c361/prom/interface/base.py#L441-L468
Jaymon/prom
prom/interface/base.py
Interface.get_one
def get_one(self, schema, query=None, **kwargs): """ get one row from the db matching filters set in query schema -- Schema() query -- Query() return -- dict -- the matching row """ ret = self._get_query(self._get_one, schema, query, **kwargs) if not ret: ret = {} return ret
python
def get_one(self, schema, query=None, **kwargs): """ get one row from the db matching filters set in query schema -- Schema() query -- Query() return -- dict -- the matching row """ ret = self._get_query(self._get_one, schema, query, **kwargs) if not ret: ret = {} return ret
[ "def", "get_one", "(", "self", ",", "schema", ",", "query", "=", "None", ",", "*", "*", "kwargs", ")", ":", "ret", "=", "self", ".", "_get_query", "(", "self", ".", "_get_one", ",", "schema", ",", "query", ",", "*", "*", "kwargs", ")", "if", "not", "ret", ":", "ret", "=", "{", "}", "return", "ret" ]
get one row from the db matching filters set in query schema -- Schema() query -- Query() return -- dict -- the matching row
[ "get", "one", "row", "from", "the", "db", "matching", "filters", "set", "in", "query" ]
train
https://github.com/Jaymon/prom/blob/b7ad2c259eca198da03e1e4bc7d95014c168c361/prom/interface/base.py#L470-L481
Jaymon/prom
prom/interface/base.py
Interface.get
def get(self, schema, query=None, **kwargs): """ get matching rows from the db matching filters set in query schema -- Schema() query -- Query() return -- list -- a list of matching dicts """ ret = self._get_query(self._get, schema, query, **kwargs) if not ret: ret = [] return ret
python
def get(self, schema, query=None, **kwargs): """ get matching rows from the db matching filters set in query schema -- Schema() query -- Query() return -- list -- a list of matching dicts """ ret = self._get_query(self._get, schema, query, **kwargs) if not ret: ret = [] return ret
[ "def", "get", "(", "self", ",", "schema", ",", "query", "=", "None", ",", "*", "*", "kwargs", ")", ":", "ret", "=", "self", ".", "_get_query", "(", "self", ".", "_get", ",", "schema", ",", "query", ",", "*", "*", "kwargs", ")", "if", "not", "ret", ":", "ret", "=", "[", "]", "return", "ret" ]
get matching rows from the db matching filters set in query schema -- Schema() query -- Query() return -- list -- a list of matching dicts
[ "get", "matching", "rows", "from", "the", "db", "matching", "filters", "set", "in", "query" ]
train
https://github.com/Jaymon/prom/blob/b7ad2c259eca198da03e1e4bc7d95014c168c361/prom/interface/base.py#L485-L496
Jaymon/prom
prom/interface/base.py
Interface.log
def log(self, format_str, *format_args, **log_options): """ wrapper around the module's logger format_str -- string -- the message to log *format_args -- list -- if format_str is a string containing {}, then format_str.format(*format_args) is ran **log_options -- level -- something like logging.DEBUG """ if isinstance(format_str, Exception): logger.exception(format_str, *format_args) else: log_level = log_options.get('level', logging.DEBUG) if logger.isEnabledFor(log_level): if format_args: logger.log(log_level, format_str.format(*format_args)) else: logger.log(log_level, format_str)
python
def log(self, format_str, *format_args, **log_options): """ wrapper around the module's logger format_str -- string -- the message to log *format_args -- list -- if format_str is a string containing {}, then format_str.format(*format_args) is ran **log_options -- level -- something like logging.DEBUG """ if isinstance(format_str, Exception): logger.exception(format_str, *format_args) else: log_level = log_options.get('level', logging.DEBUG) if logger.isEnabledFor(log_level): if format_args: logger.log(log_level, format_str.format(*format_args)) else: logger.log(log_level, format_str)
[ "def", "log", "(", "self", ",", "format_str", ",", "*", "format_args", ",", "*", "*", "log_options", ")", ":", "if", "isinstance", "(", "format_str", ",", "Exception", ")", ":", "logger", ".", "exception", "(", "format_str", ",", "*", "format_args", ")", "else", ":", "log_level", "=", "log_options", ".", "get", "(", "'level'", ",", "logging", ".", "DEBUG", ")", "if", "logger", ".", "isEnabledFor", "(", "log_level", ")", ":", "if", "format_args", ":", "logger", ".", "log", "(", "log_level", ",", "format_str", ".", "format", "(", "*", "format_args", ")", ")", "else", ":", "logger", ".", "log", "(", "log_level", ",", "format_str", ")" ]
wrapper around the module's logger format_str -- string -- the message to log *format_args -- list -- if format_str is a string containing {}, then format_str.format(*format_args) is ran **log_options -- level -- something like logging.DEBUG
[ "wrapper", "around", "the", "module", "s", "logger" ]
train
https://github.com/Jaymon/prom/blob/b7ad2c259eca198da03e1e4bc7d95014c168c361/prom/interface/base.py#L536-L553
Jaymon/prom
prom/interface/base.py
Interface.raise_error
def raise_error(self, e, exc_info=None): """this is just a wrapper to make the passed in exception an InterfaceError""" if not exc_info: exc_info = sys.exc_info() if not isinstance(e, InterfaceError): # allow python's built in errors to filter up through # https://docs.python.org/2/library/exceptions.html #if not hasattr(exceptions, e.__class__.__name__): if not hasattr(builtins, e.__class__.__name__): e = self._create_error(e, exc_info) reraise(e.__class__, e, exc_info[2])
python
def raise_error(self, e, exc_info=None): """this is just a wrapper to make the passed in exception an InterfaceError""" if not exc_info: exc_info = sys.exc_info() if not isinstance(e, InterfaceError): # allow python's built in errors to filter up through # https://docs.python.org/2/library/exceptions.html #if not hasattr(exceptions, e.__class__.__name__): if not hasattr(builtins, e.__class__.__name__): e = self._create_error(e, exc_info) reraise(e.__class__, e, exc_info[2])
[ "def", "raise_error", "(", "self", ",", "e", ",", "exc_info", "=", "None", ")", ":", "if", "not", "exc_info", ":", "exc_info", "=", "sys", ".", "exc_info", "(", ")", "if", "not", "isinstance", "(", "e", ",", "InterfaceError", ")", ":", "# allow python's built in errors to filter up through", "# https://docs.python.org/2/library/exceptions.html", "#if not hasattr(exceptions, e.__class__.__name__):", "if", "not", "hasattr", "(", "builtins", ",", "e", ".", "__class__", ".", "__name__", ")", ":", "e", "=", "self", ".", "_create_error", "(", "e", ",", "exc_info", ")", "reraise", "(", "e", ".", "__class__", ",", "e", ",", "exc_info", "[", "2", "]", ")" ]
this is just a wrapper to make the passed in exception an InterfaceError
[ "this", "is", "just", "a", "wrapper", "to", "make", "the", "passed", "in", "exception", "an", "InterfaceError" ]
train
https://github.com/Jaymon/prom/blob/b7ad2c259eca198da03e1e4bc7d95014c168c361/prom/interface/base.py#L555-L567
Jaymon/prom
prom/interface/base.py
SQLInterface._query
def _query(self, query_str, query_args=None, **query_options): """ **query_options -- dict ignore_result -- boolean -- true to not attempt to fetch results fetchone -- boolean -- true to only fetch one result count_result -- boolean -- true to return the int count of rows affected """ ret = True # http://stackoverflow.com/questions/6739355/dictcursor-doesnt-seem-to-work-under-psycopg2 connection = query_options.get('connection', None) with self.connection(connection) as connection: cur = connection.cursor() ignore_result = query_options.get('ignore_result', False) count_result = query_options.get('count_result', False) one_result = query_options.get('fetchone', query_options.get('one_result', False)) cursor_result = query_options.get('cursor_result', False) try: if query_args: self.log("{}{}{}", query_str, os.linesep, query_args) cur.execute(query_str, query_args) else: self.log(query_str) cur.execute(query_str) if cursor_result: ret = cur elif not ignore_result: if one_result: ret = self._normalize_result_dict(cur.fetchone()) elif count_result: ret = cur.rowcount else: ret = self._normalize_result_list(cur.fetchall()) except Exception as e: self.log(e) raise return ret
python
def _query(self, query_str, query_args=None, **query_options): """ **query_options -- dict ignore_result -- boolean -- true to not attempt to fetch results fetchone -- boolean -- true to only fetch one result count_result -- boolean -- true to return the int count of rows affected """ ret = True # http://stackoverflow.com/questions/6739355/dictcursor-doesnt-seem-to-work-under-psycopg2 connection = query_options.get('connection', None) with self.connection(connection) as connection: cur = connection.cursor() ignore_result = query_options.get('ignore_result', False) count_result = query_options.get('count_result', False) one_result = query_options.get('fetchone', query_options.get('one_result', False)) cursor_result = query_options.get('cursor_result', False) try: if query_args: self.log("{}{}{}", query_str, os.linesep, query_args) cur.execute(query_str, query_args) else: self.log(query_str) cur.execute(query_str) if cursor_result: ret = cur elif not ignore_result: if one_result: ret = self._normalize_result_dict(cur.fetchone()) elif count_result: ret = cur.rowcount else: ret = self._normalize_result_list(cur.fetchall()) except Exception as e: self.log(e) raise return ret
[ "def", "_query", "(", "self", ",", "query_str", ",", "query_args", "=", "None", ",", "*", "*", "query_options", ")", ":", "ret", "=", "True", "# http://stackoverflow.com/questions/6739355/dictcursor-doesnt-seem-to-work-under-psycopg2", "connection", "=", "query_options", ".", "get", "(", "'connection'", ",", "None", ")", "with", "self", ".", "connection", "(", "connection", ")", "as", "connection", ":", "cur", "=", "connection", ".", "cursor", "(", ")", "ignore_result", "=", "query_options", ".", "get", "(", "'ignore_result'", ",", "False", ")", "count_result", "=", "query_options", ".", "get", "(", "'count_result'", ",", "False", ")", "one_result", "=", "query_options", ".", "get", "(", "'fetchone'", ",", "query_options", ".", "get", "(", "'one_result'", ",", "False", ")", ")", "cursor_result", "=", "query_options", ".", "get", "(", "'cursor_result'", ",", "False", ")", "try", ":", "if", "query_args", ":", "self", ".", "log", "(", "\"{}{}{}\"", ",", "query_str", ",", "os", ".", "linesep", ",", "query_args", ")", "cur", ".", "execute", "(", "query_str", ",", "query_args", ")", "else", ":", "self", ".", "log", "(", "query_str", ")", "cur", ".", "execute", "(", "query_str", ")", "if", "cursor_result", ":", "ret", "=", "cur", "elif", "not", "ignore_result", ":", "if", "one_result", ":", "ret", "=", "self", ".", "_normalize_result_dict", "(", "cur", ".", "fetchone", "(", ")", ")", "elif", "count_result", ":", "ret", "=", "cur", ".", "rowcount", "else", ":", "ret", "=", "self", ".", "_normalize_result_list", "(", "cur", ".", "fetchall", "(", ")", ")", "except", "Exception", "as", "e", ":", "self", ".", "log", "(", "e", ")", "raise", "return", "ret" ]
**query_options -- dict ignore_result -- boolean -- true to not attempt to fetch results fetchone -- boolean -- true to only fetch one result count_result -- boolean -- true to return the int count of rows affected
[ "**", "query_options", "--", "dict", "ignore_result", "--", "boolean", "--", "true", "to", "not", "attempt", "to", "fetch", "results", "fetchone", "--", "boolean", "--", "true", "to", "only", "fetch", "one", "result", "count_result", "--", "boolean", "--", "true", "to", "return", "the", "int", "count", "of", "rows", "affected" ]
train
https://github.com/Jaymon/prom/blob/b7ad2c259eca198da03e1e4bc7d95014c168c361/prom/interface/base.py#L599-L639
Jaymon/prom
prom/interface/base.py
SQLInterface.get_SQL
def get_SQL(self, schema, query, **sql_options): """ convert the query instance into SQL this is the glue method that translates the generic Query() instance to the SQL specific query, this is where the magic happens **sql_options -- dict count_query -- boolean -- true if this is a count query SELECT only_where_clause -- boolean -- true to only return after WHERE ... """ only_where_clause = sql_options.get('only_where_clause', False) symbol_map = { 'in': {'symbol': 'IN', 'list': True}, 'nin': {'symbol': 'NOT IN', 'list': True}, 'is': {'symbol': '=', 'none_symbol': 'IS'}, 'not': {'symbol': '!=', 'none_symbol': 'IS NOT'}, 'gt': {'symbol': '>'}, 'gte': {'symbol': '>='}, 'lt': {'symbol': '<'}, 'lte': {'symbol': '<='}, # https://www.tutorialspoint.com/postgresql/postgresql_like_clause.htm # https://www.tutorialspoint.com/sqlite/sqlite_like_clause.htm 'like': {'symbol': 'LIKE'}, 'nlike': {'symbol': 'NOT LIKE'}, } query_args = [] query_str = [] if not only_where_clause: query_str.append('SELECT') select_fields = query.fields_select if select_fields: distinct = "DISTINCT " if select_fields.options.get("unique", False) else "" select_fields_str = distinct + ',{}'.format(os.linesep).join( (self._normalize_name(f) for f in select_fields.names()) ) else: select_fields_str = "*" if sql_options.get('count_query', False): query_str.append(' count({}) as ct'.format(select_fields_str)) else: query_str.append(' {}'.format(select_fields_str)) query_str.append('FROM') query_str.append(" {}".format(self._normalize_table_name(schema))) if query.fields_where: query_str.append('WHERE') for i, field in enumerate(query.fields_where): if i > 0: query_str.append('AND') field_str = '' field_args = [] sd = symbol_map[field[0]] # field[0], field[1], field[2], field[3] _, field_name, field_val, field_kwargs = field field_str, field_args = self._normalize_val_SQL( schema, sd, field_name, field_val, field_kwargs ) query_str.append(' {}'.format(field_str)) query_args.extend(field_args) if query.fields_sort: query_sort_str = [] query_str.append('ORDER BY') for field in query.fields_sort: sort_dir_str = 'ASC' if field[0] > 0 else 'DESC' if field[2]: field_sort_str, field_sort_args = self._normalize_sort_SQL(field[1], field[2], sort_dir_str) query_sort_str.append(field_sort_str) query_args.extend(field_sort_args) else: query_sort_str.append(' {} {}'.format(field[1], sort_dir_str)) query_str.append(',{}'.format(os.linesep).join(query_sort_str)) if query.bounds: offset = query.bounds.offset limit = 1 if sql_options.get('one_query', False) else query.bounds.limit query_str.append('LIMIT {} OFFSET {}'.format( limit, offset )) query_str = os.linesep.join(query_str) return query_str, query_args
python
def get_SQL(self, schema, query, **sql_options): """ convert the query instance into SQL this is the glue method that translates the generic Query() instance to the SQL specific query, this is where the magic happens **sql_options -- dict count_query -- boolean -- true if this is a count query SELECT only_where_clause -- boolean -- true to only return after WHERE ... """ only_where_clause = sql_options.get('only_where_clause', False) symbol_map = { 'in': {'symbol': 'IN', 'list': True}, 'nin': {'symbol': 'NOT IN', 'list': True}, 'is': {'symbol': '=', 'none_symbol': 'IS'}, 'not': {'symbol': '!=', 'none_symbol': 'IS NOT'}, 'gt': {'symbol': '>'}, 'gte': {'symbol': '>='}, 'lt': {'symbol': '<'}, 'lte': {'symbol': '<='}, # https://www.tutorialspoint.com/postgresql/postgresql_like_clause.htm # https://www.tutorialspoint.com/sqlite/sqlite_like_clause.htm 'like': {'symbol': 'LIKE'}, 'nlike': {'symbol': 'NOT LIKE'}, } query_args = [] query_str = [] if not only_where_clause: query_str.append('SELECT') select_fields = query.fields_select if select_fields: distinct = "DISTINCT " if select_fields.options.get("unique", False) else "" select_fields_str = distinct + ',{}'.format(os.linesep).join( (self._normalize_name(f) for f in select_fields.names()) ) else: select_fields_str = "*" if sql_options.get('count_query', False): query_str.append(' count({}) as ct'.format(select_fields_str)) else: query_str.append(' {}'.format(select_fields_str)) query_str.append('FROM') query_str.append(" {}".format(self._normalize_table_name(schema))) if query.fields_where: query_str.append('WHERE') for i, field in enumerate(query.fields_where): if i > 0: query_str.append('AND') field_str = '' field_args = [] sd = symbol_map[field[0]] # field[0], field[1], field[2], field[3] _, field_name, field_val, field_kwargs = field field_str, field_args = self._normalize_val_SQL( schema, sd, field_name, field_val, field_kwargs ) query_str.append(' {}'.format(field_str)) query_args.extend(field_args) if query.fields_sort: query_sort_str = [] query_str.append('ORDER BY') for field in query.fields_sort: sort_dir_str = 'ASC' if field[0] > 0 else 'DESC' if field[2]: field_sort_str, field_sort_args = self._normalize_sort_SQL(field[1], field[2], sort_dir_str) query_sort_str.append(field_sort_str) query_args.extend(field_sort_args) else: query_sort_str.append(' {} {}'.format(field[1], sort_dir_str)) query_str.append(',{}'.format(os.linesep).join(query_sort_str)) if query.bounds: offset = query.bounds.offset limit = 1 if sql_options.get('one_query', False) else query.bounds.limit query_str.append('LIMIT {} OFFSET {}'.format( limit, offset )) query_str = os.linesep.join(query_str) return query_str, query_args
[ "def", "get_SQL", "(", "self", ",", "schema", ",", "query", ",", "*", "*", "sql_options", ")", ":", "only_where_clause", "=", "sql_options", ".", "get", "(", "'only_where_clause'", ",", "False", ")", "symbol_map", "=", "{", "'in'", ":", "{", "'symbol'", ":", "'IN'", ",", "'list'", ":", "True", "}", ",", "'nin'", ":", "{", "'symbol'", ":", "'NOT IN'", ",", "'list'", ":", "True", "}", ",", "'is'", ":", "{", "'symbol'", ":", "'='", ",", "'none_symbol'", ":", "'IS'", "}", ",", "'not'", ":", "{", "'symbol'", ":", "'!='", ",", "'none_symbol'", ":", "'IS NOT'", "}", ",", "'gt'", ":", "{", "'symbol'", ":", "'>'", "}", ",", "'gte'", ":", "{", "'symbol'", ":", "'>='", "}", ",", "'lt'", ":", "{", "'symbol'", ":", "'<'", "}", ",", "'lte'", ":", "{", "'symbol'", ":", "'<='", "}", ",", "# https://www.tutorialspoint.com/postgresql/postgresql_like_clause.htm", "# https://www.tutorialspoint.com/sqlite/sqlite_like_clause.htm", "'like'", ":", "{", "'symbol'", ":", "'LIKE'", "}", ",", "'nlike'", ":", "{", "'symbol'", ":", "'NOT LIKE'", "}", ",", "}", "query_args", "=", "[", "]", "query_str", "=", "[", "]", "if", "not", "only_where_clause", ":", "query_str", ".", "append", "(", "'SELECT'", ")", "select_fields", "=", "query", ".", "fields_select", "if", "select_fields", ":", "distinct", "=", "\"DISTINCT \"", "if", "select_fields", ".", "options", ".", "get", "(", "\"unique\"", ",", "False", ")", "else", "\"\"", "select_fields_str", "=", "distinct", "+", "',{}'", ".", "format", "(", "os", ".", "linesep", ")", ".", "join", "(", "(", "self", ".", "_normalize_name", "(", "f", ")", "for", "f", "in", "select_fields", ".", "names", "(", ")", ")", ")", "else", ":", "select_fields_str", "=", "\"*\"", "if", "sql_options", ".", "get", "(", "'count_query'", ",", "False", ")", ":", "query_str", ".", "append", "(", "' count({}) as ct'", ".", "format", "(", "select_fields_str", ")", ")", "else", ":", "query_str", ".", "append", "(", "' {}'", ".", "format", "(", "select_fields_str", ")", ")", "query_str", ".", "append", "(", "'FROM'", ")", "query_str", ".", "append", "(", "\" {}\"", ".", "format", "(", "self", ".", "_normalize_table_name", "(", "schema", ")", ")", ")", "if", "query", ".", "fields_where", ":", "query_str", ".", "append", "(", "'WHERE'", ")", "for", "i", ",", "field", "in", "enumerate", "(", "query", ".", "fields_where", ")", ":", "if", "i", ">", "0", ":", "query_str", ".", "append", "(", "'AND'", ")", "field_str", "=", "''", "field_args", "=", "[", "]", "sd", "=", "symbol_map", "[", "field", "[", "0", "]", "]", "# field[0], field[1], field[2], field[3]", "_", ",", "field_name", ",", "field_val", ",", "field_kwargs", "=", "field", "field_str", ",", "field_args", "=", "self", ".", "_normalize_val_SQL", "(", "schema", ",", "sd", ",", "field_name", ",", "field_val", ",", "field_kwargs", ")", "query_str", ".", "append", "(", "' {}'", ".", "format", "(", "field_str", ")", ")", "query_args", ".", "extend", "(", "field_args", ")", "if", "query", ".", "fields_sort", ":", "query_sort_str", "=", "[", "]", "query_str", ".", "append", "(", "'ORDER BY'", ")", "for", "field", "in", "query", ".", "fields_sort", ":", "sort_dir_str", "=", "'ASC'", "if", "field", "[", "0", "]", ">", "0", "else", "'DESC'", "if", "field", "[", "2", "]", ":", "field_sort_str", ",", "field_sort_args", "=", "self", ".", "_normalize_sort_SQL", "(", "field", "[", "1", "]", ",", "field", "[", "2", "]", ",", "sort_dir_str", ")", "query_sort_str", ".", "append", "(", "field_sort_str", ")", "query_args", ".", "extend", "(", "field_sort_args", ")", "else", ":", "query_sort_str", ".", "append", "(", "' {} {}'", ".", "format", "(", "field", "[", "1", "]", ",", "sort_dir_str", ")", ")", "query_str", ".", "append", "(", "',{}'", ".", "format", "(", "os", ".", "linesep", ")", ".", "join", "(", "query_sort_str", ")", ")", "if", "query", ".", "bounds", ":", "offset", "=", "query", ".", "bounds", ".", "offset", "limit", "=", "1", "if", "sql_options", ".", "get", "(", "'one_query'", ",", "False", ")", "else", "query", ".", "bounds", ".", "limit", "query_str", ".", "append", "(", "'LIMIT {} OFFSET {}'", ".", "format", "(", "limit", ",", "offset", ")", ")", "query_str", "=", "os", ".", "linesep", ".", "join", "(", "query_str", ")", "return", "query_str", ",", "query_args" ]
convert the query instance into SQL this is the glue method that translates the generic Query() instance to the SQL specific query, this is where the magic happens **sql_options -- dict count_query -- boolean -- true if this is a count query SELECT only_where_clause -- boolean -- true to only return after WHERE ...
[ "convert", "the", "query", "instance", "into", "SQL" ]
train
https://github.com/Jaymon/prom/blob/b7ad2c259eca198da03e1e4bc7d95014c168c361/prom/interface/base.py#L733-L830
Jaymon/prom
prom/interface/base.py
SQLInterface._set_all_tables
def _set_all_tables(self, schema, **kwargs): """ You can run into a problem when you are trying to set a table and it has a foreign key to a table that doesn't exist, so this method will go through all fk refs and make sure the tables exist """ with self.transaction(**kwargs) as connection: kwargs['connection'] = connection # go through and make sure all foreign key referenced tables exist for field_name, field_val in schema.fields.items(): s = field_val.schema if s: self._set_all_tables(s, **kwargs) # now that we know all fk tables exist, create this table self.set_table(schema, **kwargs) return True
python
def _set_all_tables(self, schema, **kwargs): """ You can run into a problem when you are trying to set a table and it has a foreign key to a table that doesn't exist, so this method will go through all fk refs and make sure the tables exist """ with self.transaction(**kwargs) as connection: kwargs['connection'] = connection # go through and make sure all foreign key referenced tables exist for field_name, field_val in schema.fields.items(): s = field_val.schema if s: self._set_all_tables(s, **kwargs) # now that we know all fk tables exist, create this table self.set_table(schema, **kwargs) return True
[ "def", "_set_all_tables", "(", "self", ",", "schema", ",", "*", "*", "kwargs", ")", ":", "with", "self", ".", "transaction", "(", "*", "*", "kwargs", ")", "as", "connection", ":", "kwargs", "[", "'connection'", "]", "=", "connection", "# go through and make sure all foreign key referenced tables exist", "for", "field_name", ",", "field_val", "in", "schema", ".", "fields", ".", "items", "(", ")", ":", "s", "=", "field_val", ".", "schema", "if", "s", ":", "self", ".", "_set_all_tables", "(", "s", ",", "*", "*", "kwargs", ")", "# now that we know all fk tables exist, create this table", "self", ".", "set_table", "(", "schema", ",", "*", "*", "kwargs", ")", "return", "True" ]
You can run into a problem when you are trying to set a table and it has a foreign key to a table that doesn't exist, so this method will go through all fk refs and make sure the tables exist
[ "You", "can", "run", "into", "a", "problem", "when", "you", "are", "trying", "to", "set", "a", "table", "and", "it", "has", "a", "foreign", "key", "to", "a", "table", "that", "doesn", "t", "exist", "so", "this", "method", "will", "go", "through", "all", "fk", "refs", "and", "make", "sure", "the", "tables", "exist" ]
train
https://github.com/Jaymon/prom/blob/b7ad2c259eca198da03e1e4bc7d95014c168c361/prom/interface/base.py#L856-L873
Jaymon/prom
prom/interface/base.py
SQLInterface._set_all_fields
def _set_all_fields(self, schema, **kwargs): """ this will add fields that don't exist in the table if they can be set to NULL, the reason they have to be NULL is adding fields to Postgres that can be NULL is really light, but if they have a default value, then it can be costly """ current_fields = self.get_fields(schema, **kwargs) for field_name, field in schema.fields.items(): if field_name not in current_fields: if field.required: raise ValueError('Cannot safely add {} on the fly because it is required'.format(field_name)) else: query_str = [] query_str.append('ALTER TABLE') query_str.append(' {}'.format(schema)) query_str.append('ADD COLUMN') query_str.append(' {}'.format(self.get_field_SQL(field_name, field))) query_str = os.linesep.join(query_str) self.query(query_str, ignore_result=True, **kwargs) return True
python
def _set_all_fields(self, schema, **kwargs): """ this will add fields that don't exist in the table if they can be set to NULL, the reason they have to be NULL is adding fields to Postgres that can be NULL is really light, but if they have a default value, then it can be costly """ current_fields = self.get_fields(schema, **kwargs) for field_name, field in schema.fields.items(): if field_name not in current_fields: if field.required: raise ValueError('Cannot safely add {} on the fly because it is required'.format(field_name)) else: query_str = [] query_str.append('ALTER TABLE') query_str.append(' {}'.format(schema)) query_str.append('ADD COLUMN') query_str.append(' {}'.format(self.get_field_SQL(field_name, field))) query_str = os.linesep.join(query_str) self.query(query_str, ignore_result=True, **kwargs) return True
[ "def", "_set_all_fields", "(", "self", ",", "schema", ",", "*", "*", "kwargs", ")", ":", "current_fields", "=", "self", ".", "get_fields", "(", "schema", ",", "*", "*", "kwargs", ")", "for", "field_name", ",", "field", "in", "schema", ".", "fields", ".", "items", "(", ")", ":", "if", "field_name", "not", "in", "current_fields", ":", "if", "field", ".", "required", ":", "raise", "ValueError", "(", "'Cannot safely add {} on the fly because it is required'", ".", "format", "(", "field_name", ")", ")", "else", ":", "query_str", "=", "[", "]", "query_str", ".", "append", "(", "'ALTER TABLE'", ")", "query_str", ".", "append", "(", "' {}'", ".", "format", "(", "schema", ")", ")", "query_str", ".", "append", "(", "'ADD COLUMN'", ")", "query_str", ".", "append", "(", "' {}'", ".", "format", "(", "self", ".", "get_field_SQL", "(", "field_name", ",", "field", ")", ")", ")", "query_str", "=", "os", ".", "linesep", ".", "join", "(", "query_str", ")", "self", ".", "query", "(", "query_str", ",", "ignore_result", "=", "True", ",", "*", "*", "kwargs", ")", "return", "True" ]
this will add fields that don't exist in the table if they can be set to NULL, the reason they have to be NULL is adding fields to Postgres that can be NULL is really light, but if they have a default value, then it can be costly
[ "this", "will", "add", "fields", "that", "don", "t", "exist", "in", "the", "table", "if", "they", "can", "be", "set", "to", "NULL", "the", "reason", "they", "have", "to", "be", "NULL", "is", "adding", "fields", "to", "Postgres", "that", "can", "be", "NULL", "is", "really", "light", "but", "if", "they", "have", "a", "default", "value", "then", "it", "can", "be", "costly" ]
train
https://github.com/Jaymon/prom/blob/b7ad2c259eca198da03e1e4bc7d95014c168c361/prom/interface/base.py#L912-L933
thorgate/tg-utils
tg_utils/files.py
random_path
def random_path(instance, filename): """ Random path generator for uploads, specify this for upload_to= argument of FileFields """ # Split the uuid into two parts so that we won't run into subdirectory count limits. First part has 3 hex chars, # thus 4k possible values. uuid_hex = get_uuid() return os.path.join(uuid_hex[:3], uuid_hex[3:], filename)
python
def random_path(instance, filename): """ Random path generator for uploads, specify this for upload_to= argument of FileFields """ # Split the uuid into two parts so that we won't run into subdirectory count limits. First part has 3 hex chars, # thus 4k possible values. uuid_hex = get_uuid() return os.path.join(uuid_hex[:3], uuid_hex[3:], filename)
[ "def", "random_path", "(", "instance", ",", "filename", ")", ":", "# Split the uuid into two parts so that we won't run into subdirectory count limits. First part has 3 hex chars,", "# thus 4k possible values.", "uuid_hex", "=", "get_uuid", "(", ")", "return", "os", ".", "path", ".", "join", "(", "uuid_hex", "[", ":", "3", "]", ",", "uuid_hex", "[", "3", ":", "]", ",", "filename", ")" ]
Random path generator for uploads, specify this for upload_to= argument of FileFields
[ "Random", "path", "generator", "for", "uploads", "specify", "this", "for", "upload_to", "=", "argument", "of", "FileFields" ]
train
https://github.com/thorgate/tg-utils/blob/81e404e837334b241686d9159cc3eb44de509a88/tg_utils/files.py#L6-L12
Yelp/uwsgi_metrics
uwsgi_metrics/metrics.py
initialize
def initialize(signal_number=DEFAULT_TIMER_SIGNAL_NUMBER, update_period_s=DEFAULT_UPDATE_PERIOD_S): """Initialize metrics, must be invoked at least once prior to invoking any other method.""" global initialized if initialized: return initialized = True uwsgi.add_timer(signal_number, update_period_s) uwsgi.register_signal(signal_number, MULE, emit)
python
def initialize(signal_number=DEFAULT_TIMER_SIGNAL_NUMBER, update_period_s=DEFAULT_UPDATE_PERIOD_S): """Initialize metrics, must be invoked at least once prior to invoking any other method.""" global initialized if initialized: return initialized = True uwsgi.add_timer(signal_number, update_period_s) uwsgi.register_signal(signal_number, MULE, emit)
[ "def", "initialize", "(", "signal_number", "=", "DEFAULT_TIMER_SIGNAL_NUMBER", ",", "update_period_s", "=", "DEFAULT_UPDATE_PERIOD_S", ")", ":", "global", "initialized", "if", "initialized", ":", "return", "initialized", "=", "True", "uwsgi", ".", "add_timer", "(", "signal_number", ",", "update_period_s", ")", "uwsgi", ".", "register_signal", "(", "signal_number", ",", "MULE", ",", "emit", ")" ]
Initialize metrics, must be invoked at least once prior to invoking any other method.
[ "Initialize", "metrics", "must", "be", "invoked", "at", "least", "once", "prior", "to", "invoking", "any", "other", "method", "." ]
train
https://github.com/Yelp/uwsgi_metrics/blob/534966fd461ff711aecd1e3d4caaafdc23ac33f0/uwsgi_metrics/metrics.py#L91-L100
Yelp/uwsgi_metrics
uwsgi_metrics/metrics.py
emit
def emit(_): """Serialize metrics to the memory mapped buffer.""" if not initialized: raise NotInitialized view = { 'version': __version__, 'counters': {}, 'gauges': {}, 'histograms': {}, 'meters': {}, 'timers': {}, } for (ty, module, name), metric in six.iteritems(all_metrics): view[ty]['%s.%s' % (module, name)] = metric.view() marshalled_view = marshal.dumps(view) if len(marshalled_view) > MAX_MARSHALLED_VIEW_SIZE: log.warn( 'Marshalled length too large, got %d, max %d. ' 'Try recording fewer metrics or increasing ' 'MAX_MARSHALLED_VIEW_SIZE' % (len(marshalled_view), MAX_MARSHALLED_VIEW_SIZE)) return marshalled_metrics_mmap.seek(0) try: # Reading and writing to/from an mmap'ed buffer is not guaranteed # to be atomic, so we must serialize access to it. uwsgi.lock() marshalled_metrics_mmap.write(marshalled_view) finally: uwsgi.unlock()
python
def emit(_): """Serialize metrics to the memory mapped buffer.""" if not initialized: raise NotInitialized view = { 'version': __version__, 'counters': {}, 'gauges': {}, 'histograms': {}, 'meters': {}, 'timers': {}, } for (ty, module, name), metric in six.iteritems(all_metrics): view[ty]['%s.%s' % (module, name)] = metric.view() marshalled_view = marshal.dumps(view) if len(marshalled_view) > MAX_MARSHALLED_VIEW_SIZE: log.warn( 'Marshalled length too large, got %d, max %d. ' 'Try recording fewer metrics or increasing ' 'MAX_MARSHALLED_VIEW_SIZE' % (len(marshalled_view), MAX_MARSHALLED_VIEW_SIZE)) return marshalled_metrics_mmap.seek(0) try: # Reading and writing to/from an mmap'ed buffer is not guaranteed # to be atomic, so we must serialize access to it. uwsgi.lock() marshalled_metrics_mmap.write(marshalled_view) finally: uwsgi.unlock()
[ "def", "emit", "(", "_", ")", ":", "if", "not", "initialized", ":", "raise", "NotInitialized", "view", "=", "{", "'version'", ":", "__version__", ",", "'counters'", ":", "{", "}", ",", "'gauges'", ":", "{", "}", ",", "'histograms'", ":", "{", "}", ",", "'meters'", ":", "{", "}", ",", "'timers'", ":", "{", "}", ",", "}", "for", "(", "ty", ",", "module", ",", "name", ")", ",", "metric", "in", "six", ".", "iteritems", "(", "all_metrics", ")", ":", "view", "[", "ty", "]", "[", "'%s.%s'", "%", "(", "module", ",", "name", ")", "]", "=", "metric", ".", "view", "(", ")", "marshalled_view", "=", "marshal", ".", "dumps", "(", "view", ")", "if", "len", "(", "marshalled_view", ")", ">", "MAX_MARSHALLED_VIEW_SIZE", ":", "log", ".", "warn", "(", "'Marshalled length too large, got %d, max %d. '", "'Try recording fewer metrics or increasing '", "'MAX_MARSHALLED_VIEW_SIZE'", "%", "(", "len", "(", "marshalled_view", ")", ",", "MAX_MARSHALLED_VIEW_SIZE", ")", ")", "return", "marshalled_metrics_mmap", ".", "seek", "(", "0", ")", "try", ":", "# Reading and writing to/from an mmap'ed buffer is not guaranteed", "# to be atomic, so we must serialize access to it.", "uwsgi", ".", "lock", "(", ")", "marshalled_metrics_mmap", ".", "write", "(", "marshalled_view", ")", "finally", ":", "uwsgi", ".", "unlock", "(", ")" ]
Serialize metrics to the memory mapped buffer.
[ "Serialize", "metrics", "to", "the", "memory", "mapped", "buffer", "." ]
train
https://github.com/Yelp/uwsgi_metrics/blob/534966fd461ff711aecd1e3d4caaafdc23ac33f0/uwsgi_metrics/metrics.py#L110-L142
Yelp/uwsgi_metrics
uwsgi_metrics/metrics.py
view
def view(): """Get a dictionary representation of current metrics.""" if not initialized: raise NotInitialized marshalled_metrics_mmap.seek(0) try: uwsgi.lock() marshalled_view = marshalled_metrics_mmap.read( MAX_MARSHALLED_VIEW_SIZE) finally: uwsgi.unlock() return marshal.loads(marshalled_view)
python
def view(): """Get a dictionary representation of current metrics.""" if not initialized: raise NotInitialized marshalled_metrics_mmap.seek(0) try: uwsgi.lock() marshalled_view = marshalled_metrics_mmap.read( MAX_MARSHALLED_VIEW_SIZE) finally: uwsgi.unlock() return marshal.loads(marshalled_view)
[ "def", "view", "(", ")", ":", "if", "not", "initialized", ":", "raise", "NotInitialized", "marshalled_metrics_mmap", ".", "seek", "(", "0", ")", "try", ":", "uwsgi", ".", "lock", "(", ")", "marshalled_view", "=", "marshalled_metrics_mmap", ".", "read", "(", "MAX_MARSHALLED_VIEW_SIZE", ")", "finally", ":", "uwsgi", ".", "unlock", "(", ")", "return", "marshal", ".", "loads", "(", "marshalled_view", ")" ]
Get a dictionary representation of current metrics.
[ "Get", "a", "dictionary", "representation", "of", "current", "metrics", "." ]
train
https://github.com/Yelp/uwsgi_metrics/blob/534966fd461ff711aecd1e3d4caaafdc23ac33f0/uwsgi_metrics/metrics.py#L145-L157
Yelp/uwsgi_metrics
uwsgi_metrics/metrics.py
timing
def timing(module, name): """ Context manager to time a section of code:: with timing(__name__, 'my_timer'): do_some_operation() """ start_time_s = time.time() try: yield finally: end_time_s = time.time() delta_s = end_time_s - start_time_s delta_ms = delta_s * 1000 timer(module, name, delta_ms)
python
def timing(module, name): """ Context manager to time a section of code:: with timing(__name__, 'my_timer'): do_some_operation() """ start_time_s = time.time() try: yield finally: end_time_s = time.time() delta_s = end_time_s - start_time_s delta_ms = delta_s * 1000 timer(module, name, delta_ms)
[ "def", "timing", "(", "module", ",", "name", ")", ":", "start_time_s", "=", "time", ".", "time", "(", ")", "try", ":", "yield", "finally", ":", "end_time_s", "=", "time", ".", "time", "(", ")", "delta_s", "=", "end_time_s", "-", "start_time_s", "delta_ms", "=", "delta_s", "*", "1000", "timer", "(", "module", ",", "name", ",", "delta_ms", ")" ]
Context manager to time a section of code:: with timing(__name__, 'my_timer'): do_some_operation()
[ "Context", "manager", "to", "time", "a", "section", "of", "code", "::" ]
train
https://github.com/Yelp/uwsgi_metrics/blob/534966fd461ff711aecd1e3d4caaafdc23ac33f0/uwsgi_metrics/metrics.py#L161-L175
Yelp/uwsgi_metrics
uwsgi_metrics/metrics.py
timer
def timer(module, name, delta, duration_units='milliseconds'): """ Record a timing delta: :: start_time_s = time.time() do_some_operation() end_time_s = time.time() delta_s = end_time_s - start_time_s delta_ms = delta_s * 1000 timer(__name__, 'my_timer', delta_ms) """ timer = get_metric('timers', module, name, Timer(duration_units)) timer.update(delta)
python
def timer(module, name, delta, duration_units='milliseconds'): """ Record a timing delta: :: start_time_s = time.time() do_some_operation() end_time_s = time.time() delta_s = end_time_s - start_time_s delta_ms = delta_s * 1000 timer(__name__, 'my_timer', delta_ms) """ timer = get_metric('timers', module, name, Timer(duration_units)) timer.update(delta)
[ "def", "timer", "(", "module", ",", "name", ",", "delta", ",", "duration_units", "=", "'milliseconds'", ")", ":", "timer", "=", "get_metric", "(", "'timers'", ",", "module", ",", "name", ",", "Timer", "(", "duration_units", ")", ")", "timer", ".", "update", "(", "delta", ")" ]
Record a timing delta: :: start_time_s = time.time() do_some_operation() end_time_s = time.time() delta_s = end_time_s - start_time_s delta_ms = delta_s * 1000 timer(__name__, 'my_timer', delta_ms)
[ "Record", "a", "timing", "delta", ":", "::" ]
train
https://github.com/Yelp/uwsgi_metrics/blob/534966fd461ff711aecd1e3d4caaafdc23ac33f0/uwsgi_metrics/metrics.py#L179-L192
Yelp/uwsgi_metrics
uwsgi_metrics/metrics.py
histogram
def histogram(module, name, value): """ Record a value in a histogram: :: histogram(__name__, 'my_histogram', len(queue)) """ histogram = get_metric('histograms', module, name, Histogram()) histogram.update(value)
python
def histogram(module, name, value): """ Record a value in a histogram: :: histogram(__name__, 'my_histogram', len(queue)) """ histogram = get_metric('histograms', module, name, Histogram()) histogram.update(value)
[ "def", "histogram", "(", "module", ",", "name", ",", "value", ")", ":", "histogram", "=", "get_metric", "(", "'histograms'", ",", "module", ",", "name", ",", "Histogram", "(", ")", ")", "histogram", ".", "update", "(", "value", ")" ]
Record a value in a histogram: :: histogram(__name__, 'my_histogram', len(queue))
[ "Record", "a", "value", "in", "a", "histogram", ":", "::" ]
train
https://github.com/Yelp/uwsgi_metrics/blob/534966fd461ff711aecd1e3d4caaafdc23ac33f0/uwsgi_metrics/metrics.py#L196-L204
Yelp/uwsgi_metrics
uwsgi_metrics/metrics.py
counter
def counter(module, name, count=1): """ Record an event's occurence in a counter: :: counter(__name__, 'my_counter') """ counter = get_metric('counters', module, name, Counter()) counter.inc(count)
python
def counter(module, name, count=1): """ Record an event's occurence in a counter: :: counter(__name__, 'my_counter') """ counter = get_metric('counters', module, name, Counter()) counter.inc(count)
[ "def", "counter", "(", "module", ",", "name", ",", "count", "=", "1", ")", ":", "counter", "=", "get_metric", "(", "'counters'", ",", "module", ",", "name", ",", "Counter", "(", ")", ")", "counter", ".", "inc", "(", "count", ")" ]
Record an event's occurence in a counter: :: counter(__name__, 'my_counter')
[ "Record", "an", "event", "s", "occurence", "in", "a", "counter", ":", "::" ]
train
https://github.com/Yelp/uwsgi_metrics/blob/534966fd461ff711aecd1e3d4caaafdc23ac33f0/uwsgi_metrics/metrics.py#L208-L216
Yelp/uwsgi_metrics
uwsgi_metrics/metrics.py
meter
def meter(module, name, count=1): """ Record an event rate: :: meter(__name__, 'my_meter', 'event_type') """ meter = get_metric('meters', module, name, Meter()) meter.mark(count)
python
def meter(module, name, count=1): """ Record an event rate: :: meter(__name__, 'my_meter', 'event_type') """ meter = get_metric('meters', module, name, Meter()) meter.mark(count)
[ "def", "meter", "(", "module", ",", "name", ",", "count", "=", "1", ")", ":", "meter", "=", "get_metric", "(", "'meters'", ",", "module", ",", "name", ",", "Meter", "(", ")", ")", "meter", ".", "mark", "(", "count", ")" ]
Record an event rate: :: meter(__name__, 'my_meter', 'event_type')
[ "Record", "an", "event", "rate", ":", "::" ]
train
https://github.com/Yelp/uwsgi_metrics/blob/534966fd461ff711aecd1e3d4caaafdc23ac33f0/uwsgi_metrics/metrics.py#L220-L228
BeyondTheClouds/enoslib
enoslib/infra/enos_g5k/remote.py
exec_command_on_nodes
def exec_command_on_nodes(nodes, cmd, label, conn_params=None): """Execute a command on a node (id or hostname) or on a set of nodes. :param nodes: list of targets of the command cmd. Each must be an execo.Host. :param cmd: string representing the command to run on the remote nodes. :param label: string for debugging purpose. :param conn_params: connection parameters passed to the execo.Remote function """ if isinstance(nodes, BASESTRING): nodes = [nodes] if conn_params is None: conn_params = DEFAULT_CONN_PARAMS logger.debug("Running %s on %s ", label, nodes) remote = ex.get_remote(cmd, nodes, conn_params) remote.run() if not remote.finished_ok: raise Exception('An error occcured during remote execution') return remote
python
def exec_command_on_nodes(nodes, cmd, label, conn_params=None): """Execute a command on a node (id or hostname) or on a set of nodes. :param nodes: list of targets of the command cmd. Each must be an execo.Host. :param cmd: string representing the command to run on the remote nodes. :param label: string for debugging purpose. :param conn_params: connection parameters passed to the execo.Remote function """ if isinstance(nodes, BASESTRING): nodes = [nodes] if conn_params is None: conn_params = DEFAULT_CONN_PARAMS logger.debug("Running %s on %s ", label, nodes) remote = ex.get_remote(cmd, nodes, conn_params) remote.run() if not remote.finished_ok: raise Exception('An error occcured during remote execution') return remote
[ "def", "exec_command_on_nodes", "(", "nodes", ",", "cmd", ",", "label", ",", "conn_params", "=", "None", ")", ":", "if", "isinstance", "(", "nodes", ",", "BASESTRING", ")", ":", "nodes", "=", "[", "nodes", "]", "if", "conn_params", "is", "None", ":", "conn_params", "=", "DEFAULT_CONN_PARAMS", "logger", ".", "debug", "(", "\"Running %s on %s \"", ",", "label", ",", "nodes", ")", "remote", "=", "ex", ".", "get_remote", "(", "cmd", ",", "nodes", ",", "conn_params", ")", "remote", ".", "run", "(", ")", "if", "not", "remote", ".", "finished_ok", ":", "raise", "Exception", "(", "'An error occcured during remote execution'", ")", "return", "remote" ]
Execute a command on a node (id or hostname) or on a set of nodes. :param nodes: list of targets of the command cmd. Each must be an execo.Host. :param cmd: string representing the command to run on the remote nodes. :param label: string for debugging purpose. :param conn_params: connection parameters passed to the execo.Remote function
[ "Execute", "a", "command", "on", "a", "node", "(", "id", "or", "hostname", ")", "or", "on", "a", "set", "of", "nodes", "." ]
train
https://github.com/BeyondTheClouds/enoslib/blob/fb00be58e56a7848cfe482187d659744919fe2f7/enoslib/infra/enos_g5k/remote.py#L17-L40
guaix-ucm/pyemir
emirdrp/products.py
ProcessedImageProduct.convert_out
def convert_out(self, obj): """Write EMIRUUID header on reduction""" newobj = super(ProcessedImageProduct, self).convert_out(obj) if newobj: hdulist = newobj.open() hdr = hdulist[0].header if 'EMIRUUID' not in hdr: hdr['EMIRUUID'] = str(uuid.uuid1()) return newobj
python
def convert_out(self, obj): """Write EMIRUUID header on reduction""" newobj = super(ProcessedImageProduct, self).convert_out(obj) if newobj: hdulist = newobj.open() hdr = hdulist[0].header if 'EMIRUUID' not in hdr: hdr['EMIRUUID'] = str(uuid.uuid1()) return newobj
[ "def", "convert_out", "(", "self", ",", "obj", ")", ":", "newobj", "=", "super", "(", "ProcessedImageProduct", ",", "self", ")", ".", "convert_out", "(", "obj", ")", "if", "newobj", ":", "hdulist", "=", "newobj", ".", "open", "(", ")", "hdr", "=", "hdulist", "[", "0", "]", ".", "header", "if", "'EMIRUUID'", "not", "in", "hdr", ":", "hdr", "[", "'EMIRUUID'", "]", "=", "str", "(", "uuid", ".", "uuid1", "(", ")", ")", "return", "newobj" ]
Write EMIRUUID header on reduction
[ "Write", "EMIRUUID", "header", "on", "reduction" ]
train
https://github.com/guaix-ucm/pyemir/blob/fef6bbabcb13f80123cafd1800a0f508a3c21702/emirdrp/products.py#L97-L105
IdentityPython/oidcendpoint
src/oidcendpoint/client_authn.py
verify_client
def verify_client(endpoint_context, request, authorization_info): """ Initiated Guessing ! :param endpoint_context: SrvInfo instance :param request: The request :param authorization_info: Client authentication information :return: dictionary containing client id, client authentication method and possibly access token. """ if not authorization_info: if 'client_id' in request and 'client_secret' in request: auth_info = ClientSecretPost(endpoint_context).verify(request) auth_info['method'] = 'client_secret_post' elif 'client_assertion' in request: auth_info = JWSAuthnMethod(endpoint_context).verify(request) # If symmetric key was used # auth_method = 'client_secret_jwt' # If asymmetric key was used auth_info['method'] = 'private_key_jwt' elif 'access_token' in request: auth_info = BearerBody(endpoint_context).verify(request) auth_info['method'] = 'bearer_body' else: raise UnknownOrNoAuthnMethod() else: if authorization_info.startswith('Basic '): auth_info = ClientSecretBasic(endpoint_context).verify( request, authorization_info) auth_info['method'] = 'client_secret_basic' elif authorization_info.startswith('Bearer '): auth_info = BearerHeader(endpoint_context).verify( request, authorization_info) auth_info['method'] = 'bearer_header' else: raise UnknownOrNoAuthnMethod(authorization_info) try: client_id = auth_info['client_id'] except KeyError: client_id = '' try: _token = auth_info['token'] except KeyError: logger.warning('Unknown client ID') else: sinfo = endpoint_context.sdb[_token] auth_info['client_id'] = sinfo['authn_req']['client_id'] else: try: _cinfo = endpoint_context.cdb[client_id] except KeyError: raise ValueError('Unknown Client ID') else: if isinstance(_cinfo,str): try: _cinfo = endpoint_context.cdb[_cinfo] except KeyError: raise ValueError('Unknown Client ID') try: valid_client_info(_cinfo) except KeyError: logger.warning('Client registration has timed out') raise ValueError('Not valid client') else: # check that the expected authz method was used try: endpoint_context.cdb[client_id]['auth_method'][ request.__class__.__name__] = auth_info['method'] except KeyError: try: endpoint_context.cdb[client_id]['auth_method'] = { request.__class__.__name__: auth_info['method']} except KeyError: pass return auth_info
python
def verify_client(endpoint_context, request, authorization_info): """ Initiated Guessing ! :param endpoint_context: SrvInfo instance :param request: The request :param authorization_info: Client authentication information :return: dictionary containing client id, client authentication method and possibly access token. """ if not authorization_info: if 'client_id' in request and 'client_secret' in request: auth_info = ClientSecretPost(endpoint_context).verify(request) auth_info['method'] = 'client_secret_post' elif 'client_assertion' in request: auth_info = JWSAuthnMethod(endpoint_context).verify(request) # If symmetric key was used # auth_method = 'client_secret_jwt' # If asymmetric key was used auth_info['method'] = 'private_key_jwt' elif 'access_token' in request: auth_info = BearerBody(endpoint_context).verify(request) auth_info['method'] = 'bearer_body' else: raise UnknownOrNoAuthnMethod() else: if authorization_info.startswith('Basic '): auth_info = ClientSecretBasic(endpoint_context).verify( request, authorization_info) auth_info['method'] = 'client_secret_basic' elif authorization_info.startswith('Bearer '): auth_info = BearerHeader(endpoint_context).verify( request, authorization_info) auth_info['method'] = 'bearer_header' else: raise UnknownOrNoAuthnMethod(authorization_info) try: client_id = auth_info['client_id'] except KeyError: client_id = '' try: _token = auth_info['token'] except KeyError: logger.warning('Unknown client ID') else: sinfo = endpoint_context.sdb[_token] auth_info['client_id'] = sinfo['authn_req']['client_id'] else: try: _cinfo = endpoint_context.cdb[client_id] except KeyError: raise ValueError('Unknown Client ID') else: if isinstance(_cinfo,str): try: _cinfo = endpoint_context.cdb[_cinfo] except KeyError: raise ValueError('Unknown Client ID') try: valid_client_info(_cinfo) except KeyError: logger.warning('Client registration has timed out') raise ValueError('Not valid client') else: # check that the expected authz method was used try: endpoint_context.cdb[client_id]['auth_method'][ request.__class__.__name__] = auth_info['method'] except KeyError: try: endpoint_context.cdb[client_id]['auth_method'] = { request.__class__.__name__: auth_info['method']} except KeyError: pass return auth_info
[ "def", "verify_client", "(", "endpoint_context", ",", "request", ",", "authorization_info", ")", ":", "if", "not", "authorization_info", ":", "if", "'client_id'", "in", "request", "and", "'client_secret'", "in", "request", ":", "auth_info", "=", "ClientSecretPost", "(", "endpoint_context", ")", ".", "verify", "(", "request", ")", "auth_info", "[", "'method'", "]", "=", "'client_secret_post'", "elif", "'client_assertion'", "in", "request", ":", "auth_info", "=", "JWSAuthnMethod", "(", "endpoint_context", ")", ".", "verify", "(", "request", ")", "# If symmetric key was used", "# auth_method = 'client_secret_jwt'", "# If asymmetric key was used", "auth_info", "[", "'method'", "]", "=", "'private_key_jwt'", "elif", "'access_token'", "in", "request", ":", "auth_info", "=", "BearerBody", "(", "endpoint_context", ")", ".", "verify", "(", "request", ")", "auth_info", "[", "'method'", "]", "=", "'bearer_body'", "else", ":", "raise", "UnknownOrNoAuthnMethod", "(", ")", "else", ":", "if", "authorization_info", ".", "startswith", "(", "'Basic '", ")", ":", "auth_info", "=", "ClientSecretBasic", "(", "endpoint_context", ")", ".", "verify", "(", "request", ",", "authorization_info", ")", "auth_info", "[", "'method'", "]", "=", "'client_secret_basic'", "elif", "authorization_info", ".", "startswith", "(", "'Bearer '", ")", ":", "auth_info", "=", "BearerHeader", "(", "endpoint_context", ")", ".", "verify", "(", "request", ",", "authorization_info", ")", "auth_info", "[", "'method'", "]", "=", "'bearer_header'", "else", ":", "raise", "UnknownOrNoAuthnMethod", "(", "authorization_info", ")", "try", ":", "client_id", "=", "auth_info", "[", "'client_id'", "]", "except", "KeyError", ":", "client_id", "=", "''", "try", ":", "_token", "=", "auth_info", "[", "'token'", "]", "except", "KeyError", ":", "logger", ".", "warning", "(", "'Unknown client ID'", ")", "else", ":", "sinfo", "=", "endpoint_context", ".", "sdb", "[", "_token", "]", "auth_info", "[", "'client_id'", "]", "=", "sinfo", "[", "'authn_req'", "]", "[", "'client_id'", "]", "else", ":", "try", ":", "_cinfo", "=", "endpoint_context", ".", "cdb", "[", "client_id", "]", "except", "KeyError", ":", "raise", "ValueError", "(", "'Unknown Client ID'", ")", "else", ":", "if", "isinstance", "(", "_cinfo", ",", "str", ")", ":", "try", ":", "_cinfo", "=", "endpoint_context", ".", "cdb", "[", "_cinfo", "]", "except", "KeyError", ":", "raise", "ValueError", "(", "'Unknown Client ID'", ")", "try", ":", "valid_client_info", "(", "_cinfo", ")", "except", "KeyError", ":", "logger", ".", "warning", "(", "'Client registration has timed out'", ")", "raise", "ValueError", "(", "'Not valid client'", ")", "else", ":", "# check that the expected authz method was used", "try", ":", "endpoint_context", ".", "cdb", "[", "client_id", "]", "[", "'auth_method'", "]", "[", "request", ".", "__class__", ".", "__name__", "]", "=", "auth_info", "[", "'method'", "]", "except", "KeyError", ":", "try", ":", "endpoint_context", ".", "cdb", "[", "client_id", "]", "[", "'auth_method'", "]", "=", "{", "request", ".", "__class__", ".", "__name__", ":", "auth_info", "[", "'method'", "]", "}", "except", "KeyError", ":", "pass", "return", "auth_info" ]
Initiated Guessing ! :param endpoint_context: SrvInfo instance :param request: The request :param authorization_info: Client authentication information :return: dictionary containing client id, client authentication method and possibly access token.
[ "Initiated", "Guessing", "!" ]
train
https://github.com/IdentityPython/oidcendpoint/blob/6c1d729d51bfb6332816117fe476073df7a1d823/src/oidcendpoint/client_authn.py#L194-L272
IdentityPython/oidcendpoint
src/oidcendpoint/oidc/refresh_token.py
RefreshAccessToken._post_parse_request
def _post_parse_request(self, request, client_id='', **kwargs): """ This is where clients come to refresh their access tokens :param request: The request :param authn: Authentication info, comes from HTTP header :returns: """ request = RefreshAccessTokenRequest(**request.to_dict()) try: keyjar = self.endpoint_context.keyjar except AttributeError: keyjar = "" request.verify(keyjar=keyjar, opponent_id=client_id) if "client_id" not in request: # Optional for refresh access token request request["client_id"] = client_id logger.debug("%s: %s" % (request.__class__.__name__, sanitize(request))) return request
python
def _post_parse_request(self, request, client_id='', **kwargs): """ This is where clients come to refresh their access tokens :param request: The request :param authn: Authentication info, comes from HTTP header :returns: """ request = RefreshAccessTokenRequest(**request.to_dict()) try: keyjar = self.endpoint_context.keyjar except AttributeError: keyjar = "" request.verify(keyjar=keyjar, opponent_id=client_id) if "client_id" not in request: # Optional for refresh access token request request["client_id"] = client_id logger.debug("%s: %s" % (request.__class__.__name__, sanitize(request))) return request
[ "def", "_post_parse_request", "(", "self", ",", "request", ",", "client_id", "=", "''", ",", "*", "*", "kwargs", ")", ":", "request", "=", "RefreshAccessTokenRequest", "(", "*", "*", "request", ".", "to_dict", "(", ")", ")", "try", ":", "keyjar", "=", "self", ".", "endpoint_context", ".", "keyjar", "except", "AttributeError", ":", "keyjar", "=", "\"\"", "request", ".", "verify", "(", "keyjar", "=", "keyjar", ",", "opponent_id", "=", "client_id", ")", "if", "\"client_id\"", "not", "in", "request", ":", "# Optional for refresh access token request", "request", "[", "\"client_id\"", "]", "=", "client_id", "logger", ".", "debug", "(", "\"%s: %s\"", "%", "(", "request", ".", "__class__", ".", "__name__", ",", "sanitize", "(", "request", ")", ")", ")", "return", "request" ]
This is where clients come to refresh their access tokens :param request: The request :param authn: Authentication info, comes from HTTP header :returns:
[ "This", "is", "where", "clients", "come", "to", "refresh", "their", "access", "tokens" ]
train
https://github.com/IdentityPython/oidcendpoint/blob/6c1d729d51bfb6332816117fe476073df7a1d823/src/oidcendpoint/oidc/refresh_token.py#L77-L100
volafiled/python-volapi
volapi/utils.py
random_id
def random_id(length): """Generates a random ID of given length""" def char(): """Generate single random char""" return random.choice(string.ascii_letters + string.digits) return "".join(char() for _ in range(length))
python
def random_id(length): """Generates a random ID of given length""" def char(): """Generate single random char""" return random.choice(string.ascii_letters + string.digits) return "".join(char() for _ in range(length))
[ "def", "random_id", "(", "length", ")", ":", "def", "char", "(", ")", ":", "\"\"\"Generate single random char\"\"\"", "return", "random", ".", "choice", "(", "string", ".", "ascii_letters", "+", "string", ".", "digits", ")", "return", "\"\"", ".", "join", "(", "char", "(", ")", "for", "_", "in", "range", "(", "length", ")", ")" ]
Generates a random ID of given length
[ "Generates", "a", "random", "ID", "of", "given", "length" ]
train
https://github.com/volafiled/python-volapi/blob/5f0bc03dbde703264ac6ed494e2050761f688a3e/volapi/utils.py#L39-L47
volafiled/python-volapi
volapi/utils.py
delayed_close
def delayed_close(closable): """Delay close until this contextmanager dies""" close = getattr(closable, "close", None) if close: # we do not want the library to close file in case we need to # resume, hence make close a no-op # pylint: disable=unused-argument def replacement_close(*args, **kw): """ No op """ pass # pylint: enable=unused-argument setattr(closable, "close", replacement_close) try: yield closable finally: if close: setattr(closable, "close", close) closable.close()
python
def delayed_close(closable): """Delay close until this contextmanager dies""" close = getattr(closable, "close", None) if close: # we do not want the library to close file in case we need to # resume, hence make close a no-op # pylint: disable=unused-argument def replacement_close(*args, **kw): """ No op """ pass # pylint: enable=unused-argument setattr(closable, "close", replacement_close) try: yield closable finally: if close: setattr(closable, "close", close) closable.close()
[ "def", "delayed_close", "(", "closable", ")", ":", "close", "=", "getattr", "(", "closable", ",", "\"close\"", ",", "None", ")", "if", "close", ":", "# we do not want the library to close file in case we need to", "# resume, hence make close a no-op", "# pylint: disable=unused-argument", "def", "replacement_close", "(", "*", "args", ",", "*", "*", "kw", ")", ":", "\"\"\" No op \"\"\"", "pass", "# pylint: enable=unused-argument", "setattr", "(", "closable", ",", "\"close\"", ",", "replacement_close", ")", "try", ":", "yield", "closable", "finally", ":", "if", "close", ":", "setattr", "(", "closable", ",", "\"close\"", ",", "close", ")", "closable", ".", "close", "(", ")" ]
Delay close until this contextmanager dies
[ "Delay", "close", "until", "this", "contextmanager", "dies" ]
train
https://github.com/volafiled/python-volapi/blob/5f0bc03dbde703264ac6ed494e2050761f688a3e/volapi/utils.py#L63-L82
IdentityPython/oidcendpoint
src/oidcendpoint/sso_db.py
SSODb.map_sid2uid
def map_sid2uid(self, sid, uid): """ Store the connection between a Session ID and a User ID :param sid: Session ID :param uid: User ID """ self.set('sid2uid', sid, uid) self.set('uid2sid', uid, sid)
python
def map_sid2uid(self, sid, uid): """ Store the connection between a Session ID and a User ID :param sid: Session ID :param uid: User ID """ self.set('sid2uid', sid, uid) self.set('uid2sid', uid, sid)
[ "def", "map_sid2uid", "(", "self", ",", "sid", ",", "uid", ")", ":", "self", ".", "set", "(", "'sid2uid'", ",", "sid", ",", "uid", ")", "self", ".", "set", "(", "'uid2sid'", ",", "uid", ",", "sid", ")" ]
Store the connection between a Session ID and a User ID :param sid: Session ID :param uid: User ID
[ "Store", "the", "connection", "between", "a", "Session", "ID", "and", "a", "User", "ID" ]
train
https://github.com/IdentityPython/oidcendpoint/blob/6c1d729d51bfb6332816117fe476073df7a1d823/src/oidcendpoint/sso_db.py#L52-L60
IdentityPython/oidcendpoint
src/oidcendpoint/sso_db.py
SSODb.map_sid2sub
def map_sid2sub(self, sid, sub): """ Store the connection between a Session ID and a subject ID. :param sid: Session ID :param sub: subject ID """ self.set('sid2sub', sid, sub) self.set('sub2sid', sub, sid)
python
def map_sid2sub(self, sid, sub): """ Store the connection between a Session ID and a subject ID. :param sid: Session ID :param sub: subject ID """ self.set('sid2sub', sid, sub) self.set('sub2sid', sub, sid)
[ "def", "map_sid2sub", "(", "self", ",", "sid", ",", "sub", ")", ":", "self", ".", "set", "(", "'sid2sub'", ",", "sid", ",", "sub", ")", "self", ".", "set", "(", "'sub2sid'", ",", "sub", ",", "sid", ")" ]
Store the connection between a Session ID and a subject ID. :param sid: Session ID :param sub: subject ID
[ "Store", "the", "connection", "between", "a", "Session", "ID", "and", "a", "subject", "ID", "." ]
train
https://github.com/IdentityPython/oidcendpoint/blob/6c1d729d51bfb6332816117fe476073df7a1d823/src/oidcendpoint/sso_db.py#L62-L70
IdentityPython/oidcendpoint
src/oidcendpoint/sso_db.py
SSODb.get_subs_by_uid
def get_subs_by_uid(self, uid): """ Find all subject identifiers that is connected to a User ID. :param uid: A User ID :return: A set of subject identifiers """ res = set() for sid in self.get('uid2sid', uid): res |= set(self.get('sid2sub', sid)) return res
python
def get_subs_by_uid(self, uid): """ Find all subject identifiers that is connected to a User ID. :param uid: A User ID :return: A set of subject identifiers """ res = set() for sid in self.get('uid2sid', uid): res |= set(self.get('sid2sub', sid)) return res
[ "def", "get_subs_by_uid", "(", "self", ",", "uid", ")", ":", "res", "=", "set", "(", ")", "for", "sid", "in", "self", ".", "get", "(", "'uid2sid'", ",", "uid", ")", ":", "res", "|=", "set", "(", "self", ".", "get", "(", "'sid2sub'", ",", "sid", ")", ")", "return", "res" ]
Find all subject identifiers that is connected to a User ID. :param uid: A User ID :return: A set of subject identifiers
[ "Find", "all", "subject", "identifiers", "that", "is", "connected", "to", "a", "User", "ID", "." ]
train
https://github.com/IdentityPython/oidcendpoint/blob/6c1d729d51bfb6332816117fe476073df7a1d823/src/oidcendpoint/sso_db.py#L104-L114
IdentityPython/oidcendpoint
src/oidcendpoint/sso_db.py
SSODb.remove_sid2sub
def remove_sid2sub(self, sid, sub): """ Remove the connection between a session ID and a Subject :param sid: Session ID :param sub: Subject identifier ´ """ self.remove('sub2sid', sub, sid) self.remove('sid2sub', sid, sub)
python
def remove_sid2sub(self, sid, sub): """ Remove the connection between a session ID and a Subject :param sid: Session ID :param sub: Subject identifier ´ """ self.remove('sub2sid', sub, sid) self.remove('sid2sub', sid, sub)
[ "def", "remove_sid2sub", "(", "self", ",", "sid", ",", "sub", ")", ":", "self", ".", "remove", "(", "'sub2sid'", ",", "sub", ",", "sid", ")", "self", ".", "remove", "(", "'sid2sub'", ",", "sid", ",", "sub", ")" ]
Remove the connection between a session ID and a Subject :param sid: Session ID :param sub: Subject identifier ´
[ "Remove", "the", "connection", "between", "a", "session", "ID", "and", "a", "Subject" ]
train
https://github.com/IdentityPython/oidcendpoint/blob/6c1d729d51bfb6332816117fe476073df7a1d823/src/oidcendpoint/sso_db.py#L116-L124
IdentityPython/oidcendpoint
src/oidcendpoint/sso_db.py
SSODb.remove_sid2uid
def remove_sid2uid(self, sid, uid): """ Remove the connection between a session ID and a Subject :param sid: Session ID :param uid: User identifier ´ """ self.remove('uid2sid', uid, sid) self.remove('sid2uid', sid, uid)
python
def remove_sid2uid(self, sid, uid): """ Remove the connection between a session ID and a Subject :param sid: Session ID :param uid: User identifier ´ """ self.remove('uid2sid', uid, sid) self.remove('sid2uid', sid, uid)
[ "def", "remove_sid2uid", "(", "self", ",", "sid", ",", "uid", ")", ":", "self", ".", "remove", "(", "'uid2sid'", ",", "uid", ",", "sid", ")", "self", ".", "remove", "(", "'sid2uid'", ",", "sid", ",", "uid", ")" ]
Remove the connection between a session ID and a Subject :param sid: Session ID :param uid: User identifier ´
[ "Remove", "the", "connection", "between", "a", "session", "ID", "and", "a", "Subject" ]
train
https://github.com/IdentityPython/oidcendpoint/blob/6c1d729d51bfb6332816117fe476073df7a1d823/src/oidcendpoint/sso_db.py#L126-L134
IdentityPython/oidcendpoint
src/oidcendpoint/sso_db.py
SSODb.remove_session_id
def remove_session_id(self, sid): """ Remove all references to a specific Session ID :param sid: A Session ID """ for uid in self.get('sid2uid', sid): self.remove('uid2sid', uid, sid) self.delete('sid2uid', sid) for sub in self.get('sid2sub', sid): self.remove('sub2sid', sub, sid) self.delete('sid2sub', sid)
python
def remove_session_id(self, sid): """ Remove all references to a specific Session ID :param sid: A Session ID """ for uid in self.get('sid2uid', sid): self.remove('uid2sid', uid, sid) self.delete('sid2uid', sid) for sub in self.get('sid2sub', sid): self.remove('sub2sid', sub, sid) self.delete('sid2sub', sid)
[ "def", "remove_session_id", "(", "self", ",", "sid", ")", ":", "for", "uid", "in", "self", ".", "get", "(", "'sid2uid'", ",", "sid", ")", ":", "self", ".", "remove", "(", "'uid2sid'", ",", "uid", ",", "sid", ")", "self", ".", "delete", "(", "'sid2uid'", ",", "sid", ")", "for", "sub", "in", "self", ".", "get", "(", "'sid2sub'", ",", "sid", ")", ":", "self", ".", "remove", "(", "'sub2sid'", ",", "sub", ",", "sid", ")", "self", ".", "delete", "(", "'sid2sub'", ",", "sid", ")" ]
Remove all references to a specific Session ID :param sid: A Session ID
[ "Remove", "all", "references", "to", "a", "specific", "Session", "ID" ]
train
https://github.com/IdentityPython/oidcendpoint/blob/6c1d729d51bfb6332816117fe476073df7a1d823/src/oidcendpoint/sso_db.py#L136-L148
IdentityPython/oidcendpoint
src/oidcendpoint/sso_db.py
SSODb.remove_uid
def remove_uid(self, uid): """ Remove all references to a specific User ID :param uid: A User ID """ for sid in self.get('uid2sid', uid): self.remove('sid2uid', sid, uid) self.delete('uid2sid', uid)
python
def remove_uid(self, uid): """ Remove all references to a specific User ID :param uid: A User ID """ for sid in self.get('uid2sid', uid): self.remove('sid2uid', sid, uid) self.delete('uid2sid', uid)
[ "def", "remove_uid", "(", "self", ",", "uid", ")", ":", "for", "sid", "in", "self", ".", "get", "(", "'uid2sid'", ",", "uid", ")", ":", "self", ".", "remove", "(", "'sid2uid'", ",", "sid", ",", "uid", ")", "self", ".", "delete", "(", "'uid2sid'", ",", "uid", ")" ]
Remove all references to a specific User ID :param uid: A User ID
[ "Remove", "all", "references", "to", "a", "specific", "User", "ID" ]
train
https://github.com/IdentityPython/oidcendpoint/blob/6c1d729d51bfb6332816117fe476073df7a1d823/src/oidcendpoint/sso_db.py#L150-L158
IdentityPython/oidcendpoint
src/oidcendpoint/sso_db.py
SSODb.remove_sub
def remove_sub(self, sub): """ Remove all references to a specific Subject ID :param sub: A Subject ID """ for _sid in self.get('sub2sid', sub): self.remove('sid2sub', _sid, sub) self.delete('sub2sid', sub)
python
def remove_sub(self, sub): """ Remove all references to a specific Subject ID :param sub: A Subject ID """ for _sid in self.get('sub2sid', sub): self.remove('sid2sub', _sid, sub) self.delete('sub2sid', sub)
[ "def", "remove_sub", "(", "self", ",", "sub", ")", ":", "for", "_sid", "in", "self", ".", "get", "(", "'sub2sid'", ",", "sub", ")", ":", "self", ".", "remove", "(", "'sid2sub'", ",", "_sid", ",", "sub", ")", "self", ".", "delete", "(", "'sub2sid'", ",", "sub", ")" ]
Remove all references to a specific Subject ID :param sub: A Subject ID
[ "Remove", "all", "references", "to", "a", "specific", "Subject", "ID" ]
train
https://github.com/IdentityPython/oidcendpoint/blob/6c1d729d51bfb6332816117fe476073df7a1d823/src/oidcendpoint/sso_db.py#L160-L168
BeyondTheClouds/enoslib
docs/tutorials/using-tasks/step2.py
up
def up(force=True, env=None, **kwargs): "Starts a new experiment" inventory = os.path.join(os.getcwd(), "hosts") conf = Configuration.from_dictionnary(provider_conf) provider = Enos_vagrant(conf) roles, networks = provider.init() check_networks(roles, networks) env["roles"] = roles env["networks"] = networks
python
def up(force=True, env=None, **kwargs): "Starts a new experiment" inventory = os.path.join(os.getcwd(), "hosts") conf = Configuration.from_dictionnary(provider_conf) provider = Enos_vagrant(conf) roles, networks = provider.init() check_networks(roles, networks) env["roles"] = roles env["networks"] = networks
[ "def", "up", "(", "force", "=", "True", ",", "env", "=", "None", ",", "*", "*", "kwargs", ")", ":", "inventory", "=", "os", ".", "path", ".", "join", "(", "os", ".", "getcwd", "(", ")", ",", "\"hosts\"", ")", "conf", "=", "Configuration", ".", "from_dictionnary", "(", "provider_conf", ")", "provider", "=", "Enos_vagrant", "(", "conf", ")", "roles", ",", "networks", "=", "provider", ".", "init", "(", ")", "check_networks", "(", "roles", ",", "networks", ")", "env", "[", "\"roles\"", "]", "=", "roles", "env", "[", "\"networks\"", "]", "=", "networks" ]
Starts a new experiment
[ "Starts", "a", "new", "experiment" ]
train
https://github.com/BeyondTheClouds/enoslib/blob/fb00be58e56a7848cfe482187d659744919fe2f7/docs/tutorials/using-tasks/step2.py#L32-L40
BreakingBytes/simkit
simkit/core/__init__.py
convert_args
def convert_args(test_fcn, *test_args): """ Decorator to be using in formulas to convert ``test_args`` depending on the ``test_fcn``. :param test_fcn: A test function that converts arguments. :type test_fcn: function :param test_args: Names of args to convert using ``test_fcn``. :type test_args: str The following test functions are available. * :func:`dimensionless_to_index` Example: Convert ``dawn_idx`` and ``eve_idx`` to indices:: @convert_args(dimensionless_to_index, 'dawn_idx', 'eve_idx') def f_max_T(Tcell24, dawn_idx, eve_idx): idx = dawn_idx + np.argmax(Tcell24[dawn_idx:eve_idx]) return Tcell24[idx], idx """ def wrapper(origfcn): @functools.wraps(origfcn) def newfcn(*args, **kwargs): argspec = getargspec(origfcn) # use ``inspect`` to get arg names kwargs.update(zip(argspec.args, args)) # convert args to kw # loop over test args for a in test_args: # convert a if it's in args if a in argspec.args: kwargs[a] = test_fcn(kwargs[a]) # update kwargs # call original function with converted args return origfcn(**kwargs) # return wrapped function return newfcn # return the wrapper function that consumes the original function return wrapper
python
def convert_args(test_fcn, *test_args): """ Decorator to be using in formulas to convert ``test_args`` depending on the ``test_fcn``. :param test_fcn: A test function that converts arguments. :type test_fcn: function :param test_args: Names of args to convert using ``test_fcn``. :type test_args: str The following test functions are available. * :func:`dimensionless_to_index` Example: Convert ``dawn_idx`` and ``eve_idx`` to indices:: @convert_args(dimensionless_to_index, 'dawn_idx', 'eve_idx') def f_max_T(Tcell24, dawn_idx, eve_idx): idx = dawn_idx + np.argmax(Tcell24[dawn_idx:eve_idx]) return Tcell24[idx], idx """ def wrapper(origfcn): @functools.wraps(origfcn) def newfcn(*args, **kwargs): argspec = getargspec(origfcn) # use ``inspect`` to get arg names kwargs.update(zip(argspec.args, args)) # convert args to kw # loop over test args for a in test_args: # convert a if it's in args if a in argspec.args: kwargs[a] = test_fcn(kwargs[a]) # update kwargs # call original function with converted args return origfcn(**kwargs) # return wrapped function return newfcn # return the wrapper function that consumes the original function return wrapper
[ "def", "convert_args", "(", "test_fcn", ",", "*", "test_args", ")", ":", "def", "wrapper", "(", "origfcn", ")", ":", "@", "functools", ".", "wraps", "(", "origfcn", ")", "def", "newfcn", "(", "*", "args", ",", "*", "*", "kwargs", ")", ":", "argspec", "=", "getargspec", "(", "origfcn", ")", "# use ``inspect`` to get arg names", "kwargs", ".", "update", "(", "zip", "(", "argspec", ".", "args", ",", "args", ")", ")", "# convert args to kw", "# loop over test args", "for", "a", "in", "test_args", ":", "# convert a if it's in args", "if", "a", "in", "argspec", ".", "args", ":", "kwargs", "[", "a", "]", "=", "test_fcn", "(", "kwargs", "[", "a", "]", ")", "# update kwargs", "# call original function with converted args", "return", "origfcn", "(", "*", "*", "kwargs", ")", "# return wrapped function", "return", "newfcn", "# return the wrapper function that consumes the original function", "return", "wrapper" ]
Decorator to be using in formulas to convert ``test_args`` depending on the ``test_fcn``. :param test_fcn: A test function that converts arguments. :type test_fcn: function :param test_args: Names of args to convert using ``test_fcn``. :type test_args: str The following test functions are available. * :func:`dimensionless_to_index` Example: Convert ``dawn_idx`` and ``eve_idx`` to indices:: @convert_args(dimensionless_to_index, 'dawn_idx', 'eve_idx') def f_max_T(Tcell24, dawn_idx, eve_idx): idx = dawn_idx + np.argmax(Tcell24[dawn_idx:eve_idx]) return Tcell24[idx], idx
[ "Decorator", "to", "be", "using", "in", "formulas", "to", "convert", "test_args", "depending", "on", "the", "test_fcn", "." ]
train
https://github.com/BreakingBytes/simkit/blob/205163d879d3880b6c9ef609f1b723a58773026b/simkit/core/__init__.py#L157-L192
BreakingBytes/simkit
simkit/core/__init__.py
get_public_attributes
def get_public_attributes(cls, as_list=True): """ Return class attributes that are neither private nor magic. :param cls: class :param as_list: [True] set to False to return generator :return: only public attributes of class """ attrs = (a for a in dir(cls) if not a.startswith('_')) if as_list: return list(attrs) return attrs
python
def get_public_attributes(cls, as_list=True): """ Return class attributes that are neither private nor magic. :param cls: class :param as_list: [True] set to False to return generator :return: only public attributes of class """ attrs = (a for a in dir(cls) if not a.startswith('_')) if as_list: return list(attrs) return attrs
[ "def", "get_public_attributes", "(", "cls", ",", "as_list", "=", "True", ")", ":", "attrs", "=", "(", "a", "for", "a", "in", "dir", "(", "cls", ")", "if", "not", "a", ".", "startswith", "(", "'_'", ")", ")", "if", "as_list", ":", "return", "list", "(", "attrs", ")", "return", "attrs" ]
Return class attributes that are neither private nor magic. :param cls: class :param as_list: [True] set to False to return generator :return: only public attributes of class
[ "Return", "class", "attributes", "that", "are", "neither", "private", "nor", "magic", "." ]
train
https://github.com/BreakingBytes/simkit/blob/205163d879d3880b6c9ef609f1b723a58773026b/simkit/core/__init__.py#L226-L237
BreakingBytes/simkit
simkit/core/__init__.py
Registry.register
def register(self, newitems, *args, **kwargs): """ Register newitems in registry. :param newitems: New items to add to registry. When registering new items, keys are not allowed to override existing keys in the registry. :type newitems: mapping :param args: Positional arguments with meta data corresponding to order of meta names class attributes :param kwargs: Maps of corresponding meta for new keys. Each set of meta keys must be a subset of the new item keys. :raises: :exc:`~simkit.core.exceptions.DuplicateRegItemError`, :exc:`~simkit.core.exceptions.MismatchRegMetaKeysError` """ newkeys = newitems.viewkeys() # set of the new item keys if any(self.viewkeys() & newkeys): # duplicates raise DuplicateRegItemError(self.viewkeys() & newkeys) self.update(newitems) # register new item # update meta fields kwargs.update(zip(self.meta_names, args)) for k, v in kwargs.iteritems(): meta = getattr(self, k) # get the meta attribute if v: if not v.viewkeys() <= newkeys: raise MismatchRegMetaKeysError(newkeys - v.viewkeys()) meta.update(v)
python
def register(self, newitems, *args, **kwargs): """ Register newitems in registry. :param newitems: New items to add to registry. When registering new items, keys are not allowed to override existing keys in the registry. :type newitems: mapping :param args: Positional arguments with meta data corresponding to order of meta names class attributes :param kwargs: Maps of corresponding meta for new keys. Each set of meta keys must be a subset of the new item keys. :raises: :exc:`~simkit.core.exceptions.DuplicateRegItemError`, :exc:`~simkit.core.exceptions.MismatchRegMetaKeysError` """ newkeys = newitems.viewkeys() # set of the new item keys if any(self.viewkeys() & newkeys): # duplicates raise DuplicateRegItemError(self.viewkeys() & newkeys) self.update(newitems) # register new item # update meta fields kwargs.update(zip(self.meta_names, args)) for k, v in kwargs.iteritems(): meta = getattr(self, k) # get the meta attribute if v: if not v.viewkeys() <= newkeys: raise MismatchRegMetaKeysError(newkeys - v.viewkeys()) meta.update(v)
[ "def", "register", "(", "self", ",", "newitems", ",", "*", "args", ",", "*", "*", "kwargs", ")", ":", "newkeys", "=", "newitems", ".", "viewkeys", "(", ")", "# set of the new item keys", "if", "any", "(", "self", ".", "viewkeys", "(", ")", "&", "newkeys", ")", ":", "# duplicates", "raise", "DuplicateRegItemError", "(", "self", ".", "viewkeys", "(", ")", "&", "newkeys", ")", "self", ".", "update", "(", "newitems", ")", "# register new item", "# update meta fields", "kwargs", ".", "update", "(", "zip", "(", "self", ".", "meta_names", ",", "args", ")", ")", "for", "k", ",", "v", "in", "kwargs", ".", "iteritems", "(", ")", ":", "meta", "=", "getattr", "(", "self", ",", "k", ")", "# get the meta attribute", "if", "v", ":", "if", "not", "v", ".", "viewkeys", "(", ")", "<=", "newkeys", ":", "raise", "MismatchRegMetaKeysError", "(", "newkeys", "-", "v", ".", "viewkeys", "(", ")", ")", "meta", ".", "update", "(", "v", ")" ]
Register newitems in registry. :param newitems: New items to add to registry. When registering new items, keys are not allowed to override existing keys in the registry. :type newitems: mapping :param args: Positional arguments with meta data corresponding to order of meta names class attributes :param kwargs: Maps of corresponding meta for new keys. Each set of meta keys must be a subset of the new item keys. :raises: :exc:`~simkit.core.exceptions.DuplicateRegItemError`, :exc:`~simkit.core.exceptions.MismatchRegMetaKeysError`
[ "Register", "newitems", "in", "registry", "." ]
train
https://github.com/BreakingBytes/simkit/blob/205163d879d3880b6c9ef609f1b723a58773026b/simkit/core/__init__.py#L103-L130
BreakingBytes/simkit
simkit/core/__init__.py
Registry.unregister
def unregister(self, items): """ Remove items from registry. :param items: """ items = _listify(items) # get all members of Registry except private, special or class meta_names = (m for m in vars(self).iterkeys() if (not m.startswith('_') and m not in dir(Registry))) # check that meta names matches # FIXME: this is so lame. replace this with something more robust for m in meta_names: if m not in self.meta_names: raise AttributeError('Meta name %s not listed.') # pop items from Registry and from meta for it in items: if it in self: self.pop(it) for m in (getattr(self, m_) for m_ in self.meta_names): if it in m: m.pop(it)
python
def unregister(self, items): """ Remove items from registry. :param items: """ items = _listify(items) # get all members of Registry except private, special or class meta_names = (m for m in vars(self).iterkeys() if (not m.startswith('_') and m not in dir(Registry))) # check that meta names matches # FIXME: this is so lame. replace this with something more robust for m in meta_names: if m not in self.meta_names: raise AttributeError('Meta name %s not listed.') # pop items from Registry and from meta for it in items: if it in self: self.pop(it) for m in (getattr(self, m_) for m_ in self.meta_names): if it in m: m.pop(it)
[ "def", "unregister", "(", "self", ",", "items", ")", ":", "items", "=", "_listify", "(", "items", ")", "# get all members of Registry except private, special or class", "meta_names", "=", "(", "m", "for", "m", "in", "vars", "(", "self", ")", ".", "iterkeys", "(", ")", "if", "(", "not", "m", ".", "startswith", "(", "'_'", ")", "and", "m", "not", "in", "dir", "(", "Registry", ")", ")", ")", "# check that meta names matches", "# FIXME: this is so lame. replace this with something more robust", "for", "m", "in", "meta_names", ":", "if", "m", "not", "in", "self", ".", "meta_names", ":", "raise", "AttributeError", "(", "'Meta name %s not listed.'", ")", "# pop items from Registry and from meta", "for", "it", "in", "items", ":", "if", "it", "in", "self", ":", "self", ".", "pop", "(", "it", ")", "for", "m", "in", "(", "getattr", "(", "self", ",", "m_", ")", "for", "m_", "in", "self", ".", "meta_names", ")", ":", "if", "it", "in", "m", ":", "m", ".", "pop", "(", "it", ")" ]
Remove items from registry. :param items:
[ "Remove", "items", "from", "registry", "." ]
train
https://github.com/BreakingBytes/simkit/blob/205163d879d3880b6c9ef609f1b723a58773026b/simkit/core/__init__.py#L132-L153
BreakingBytes/simkit
simkit/core/__init__.py
SimKitJSONEncoder.default
def default(self, o): """ JSONEncoder default method that converts NumPy arrays and quantities objects to lists. """ if isinstance(o, Q_): return o.magnitude elif isinstance(o, np.ndarray): return o.tolist() else: # raise TypeError if not serializable return super(SimKitJSONEncoder, self).default(o)
python
def default(self, o): """ JSONEncoder default method that converts NumPy arrays and quantities objects to lists. """ if isinstance(o, Q_): return o.magnitude elif isinstance(o, np.ndarray): return o.tolist() else: # raise TypeError if not serializable return super(SimKitJSONEncoder, self).default(o)
[ "def", "default", "(", "self", ",", "o", ")", ":", "if", "isinstance", "(", "o", ",", "Q_", ")", ":", "return", "o", ".", "magnitude", "elif", "isinstance", "(", "o", ",", "np", ".", "ndarray", ")", ":", "return", "o", ".", "tolist", "(", ")", "else", ":", "# raise TypeError if not serializable", "return", "super", "(", "SimKitJSONEncoder", ",", "self", ")", ".", "default", "(", "o", ")" ]
JSONEncoder default method that converts NumPy arrays and quantities objects to lists.
[ "JSONEncoder", "default", "method", "that", "converts", "NumPy", "arrays", "and", "quantities", "objects", "to", "lists", "." ]
train
https://github.com/BreakingBytes/simkit/blob/205163d879d3880b6c9ef609f1b723a58773026b/simkit/core/__init__.py#L212-L223
BreakingBytes/simkit
simkit/core/__init__.py
CommonBase.set_meta
def set_meta(mcs, bases, attr): """ Get all of the ``Meta`` classes from bases and combine them with this class. Pops or creates ``Meta`` from attributes, combines all bases, adds ``_meta`` to attributes with all meta :param bases: bases of this class :param attr: class attributes :return: attributes with ``Meta`` class from combined parents """ # pop the meta class from the attributes meta = attr.pop(mcs._meta_cls, types.ClassType(mcs._meta_cls, (), {})) # get a list of the meta public class attributes meta_attrs = get_public_attributes(meta) # check all bases for meta for base in bases: base_meta = getattr(base, mcs._meta_cls, None) # skip if base has no meta if base_meta is None: continue # loop over base meta for a in get_public_attributes(base_meta, as_list=False): # skip if already in meta if a in meta_attrs: continue # copy meta-option attribute from base setattr(meta, a, getattr(base_meta, a)) attr[mcs._meta_attr] = meta # set _meta combined from bases return attr
python
def set_meta(mcs, bases, attr): """ Get all of the ``Meta`` classes from bases and combine them with this class. Pops or creates ``Meta`` from attributes, combines all bases, adds ``_meta`` to attributes with all meta :param bases: bases of this class :param attr: class attributes :return: attributes with ``Meta`` class from combined parents """ # pop the meta class from the attributes meta = attr.pop(mcs._meta_cls, types.ClassType(mcs._meta_cls, (), {})) # get a list of the meta public class attributes meta_attrs = get_public_attributes(meta) # check all bases for meta for base in bases: base_meta = getattr(base, mcs._meta_cls, None) # skip if base has no meta if base_meta is None: continue # loop over base meta for a in get_public_attributes(base_meta, as_list=False): # skip if already in meta if a in meta_attrs: continue # copy meta-option attribute from base setattr(meta, a, getattr(base_meta, a)) attr[mcs._meta_attr] = meta # set _meta combined from bases return attr
[ "def", "set_meta", "(", "mcs", ",", "bases", ",", "attr", ")", ":", "# pop the meta class from the attributes", "meta", "=", "attr", ".", "pop", "(", "mcs", ".", "_meta_cls", ",", "types", ".", "ClassType", "(", "mcs", ".", "_meta_cls", ",", "(", ")", ",", "{", "}", ")", ")", "# get a list of the meta public class attributes", "meta_attrs", "=", "get_public_attributes", "(", "meta", ")", "# check all bases for meta", "for", "base", "in", "bases", ":", "base_meta", "=", "getattr", "(", "base", ",", "mcs", ".", "_meta_cls", ",", "None", ")", "# skip if base has no meta", "if", "base_meta", "is", "None", ":", "continue", "# loop over base meta", "for", "a", "in", "get_public_attributes", "(", "base_meta", ",", "as_list", "=", "False", ")", ":", "# skip if already in meta", "if", "a", "in", "meta_attrs", ":", "continue", "# copy meta-option attribute from base", "setattr", "(", "meta", ",", "a", ",", "getattr", "(", "base_meta", ",", "a", ")", ")", "attr", "[", "mcs", ".", "_meta_attr", "]", "=", "meta", "# set _meta combined from bases", "return", "attr" ]
Get all of the ``Meta`` classes from bases and combine them with this class. Pops or creates ``Meta`` from attributes, combines all bases, adds ``_meta`` to attributes with all meta :param bases: bases of this class :param attr: class attributes :return: attributes with ``Meta`` class from combined parents
[ "Get", "all", "of", "the", "Meta", "classes", "from", "bases", "and", "combine", "them", "with", "this", "class", "." ]
train
https://github.com/BreakingBytes/simkit/blob/205163d879d3880b6c9ef609f1b723a58773026b/simkit/core/__init__.py#L267-L297
BreakingBytes/simkit
simkit/core/__init__.py
CommonBase.set_param_file_or_parameters
def set_param_file_or_parameters(mcs, attr): """ Set parameters from class attributes that are instances of :class:`~simkit.core.Parameter` or from a parameter file. Any class attributes that are instances of :class:`~simkit.core.Parameter` are popped from the class and added to a the ``parameters`` attribute, which is a dictionary of the parameters. :param attr: class attributes :return: new list of class attributes with parameters """ meta = attr[mcs._meta_attr] # look for parameter file path in meta cls_path = getattr(meta, mcs._path_attr, None) cls_file = getattr(meta, mcs._file_attr, None) # read parameters attr[mcs._param_attr] = {} attr[mcs._param_file] = None # read parameters from file if None not in [cls_path, cls_file]: param_file = os.path.join(cls_path, cls_file) attr[mcs._param_file] = param_file # read and load JSON parameter map file as "parameters" with open(param_file, 'r') as param_file: file_params = json.load(param_file) # update meta from file for k, v in file_params.pop(mcs._meta_cls, {}).iteritems(): setattr(meta, k, v) # dictionary of parameters for reading source file attr[mcs._param_attr] = { k: mcs._param_cls(**v) for k, v in file_params.iteritems() } # get parameters from class parameters = dict.fromkeys( k for k, v in attr.iteritems() if isinstance(v, Parameter) ) # update parameters for k in parameters: attr[mcs._param_attr][k] = attr.pop(k) return attr
python
def set_param_file_or_parameters(mcs, attr): """ Set parameters from class attributes that are instances of :class:`~simkit.core.Parameter` or from a parameter file. Any class attributes that are instances of :class:`~simkit.core.Parameter` are popped from the class and added to a the ``parameters`` attribute, which is a dictionary of the parameters. :param attr: class attributes :return: new list of class attributes with parameters """ meta = attr[mcs._meta_attr] # look for parameter file path in meta cls_path = getattr(meta, mcs._path_attr, None) cls_file = getattr(meta, mcs._file_attr, None) # read parameters attr[mcs._param_attr] = {} attr[mcs._param_file] = None # read parameters from file if None not in [cls_path, cls_file]: param_file = os.path.join(cls_path, cls_file) attr[mcs._param_file] = param_file # read and load JSON parameter map file as "parameters" with open(param_file, 'r') as param_file: file_params = json.load(param_file) # update meta from file for k, v in file_params.pop(mcs._meta_cls, {}).iteritems(): setattr(meta, k, v) # dictionary of parameters for reading source file attr[mcs._param_attr] = { k: mcs._param_cls(**v) for k, v in file_params.iteritems() } # get parameters from class parameters = dict.fromkeys( k for k, v in attr.iteritems() if isinstance(v, Parameter) ) # update parameters for k in parameters: attr[mcs._param_attr][k] = attr.pop(k) return attr
[ "def", "set_param_file_or_parameters", "(", "mcs", ",", "attr", ")", ":", "meta", "=", "attr", "[", "mcs", ".", "_meta_attr", "]", "# look for parameter file path in meta", "cls_path", "=", "getattr", "(", "meta", ",", "mcs", ".", "_path_attr", ",", "None", ")", "cls_file", "=", "getattr", "(", "meta", ",", "mcs", ".", "_file_attr", ",", "None", ")", "# read parameters", "attr", "[", "mcs", ".", "_param_attr", "]", "=", "{", "}", "attr", "[", "mcs", ".", "_param_file", "]", "=", "None", "# read parameters from file", "if", "None", "not", "in", "[", "cls_path", ",", "cls_file", "]", ":", "param_file", "=", "os", ".", "path", ".", "join", "(", "cls_path", ",", "cls_file", ")", "attr", "[", "mcs", ".", "_param_file", "]", "=", "param_file", "# read and load JSON parameter map file as \"parameters\"", "with", "open", "(", "param_file", ",", "'r'", ")", "as", "param_file", ":", "file_params", "=", "json", ".", "load", "(", "param_file", ")", "# update meta from file", "for", "k", ",", "v", "in", "file_params", ".", "pop", "(", "mcs", ".", "_meta_cls", ",", "{", "}", ")", ".", "iteritems", "(", ")", ":", "setattr", "(", "meta", ",", "k", ",", "v", ")", "# dictionary of parameters for reading source file", "attr", "[", "mcs", ".", "_param_attr", "]", "=", "{", "k", ":", "mcs", ".", "_param_cls", "(", "*", "*", "v", ")", "for", "k", ",", "v", "in", "file_params", ".", "iteritems", "(", ")", "}", "# get parameters from class", "parameters", "=", "dict", ".", "fromkeys", "(", "k", "for", "k", ",", "v", "in", "attr", ".", "iteritems", "(", ")", "if", "isinstance", "(", "v", ",", "Parameter", ")", ")", "# update parameters", "for", "k", "in", "parameters", ":", "attr", "[", "mcs", ".", "_param_attr", "]", "[", "k", "]", "=", "attr", ".", "pop", "(", "k", ")", "return", "attr" ]
Set parameters from class attributes that are instances of :class:`~simkit.core.Parameter` or from a parameter file. Any class attributes that are instances of :class:`~simkit.core.Parameter` are popped from the class and added to a the ``parameters`` attribute, which is a dictionary of the parameters. :param attr: class attributes :return: new list of class attributes with parameters
[ "Set", "parameters", "from", "class", "attributes", "that", "are", "instances", "of", ":", "class", ":", "~simkit", ".", "core", ".", "Parameter", "or", "from", "a", "parameter", "file", "." ]
train
https://github.com/BreakingBytes/simkit/blob/205163d879d3880b6c9ef609f1b723a58773026b/simkit/core/__init__.py#L300-L339
IdentityPython/oidcendpoint
src/oidcendpoint/user_authn/user.py
factory
def factory(cls, **kwargs): """ Factory method that can be used to easily instantiate a class instance :param cls: The name of the class :param kwargs: Keyword arguments :return: An instance of the class or None if the name doesn't match any known class. """ for name, obj in inspect.getmembers(sys.modules[__name__]): if inspect.isclass(obj) and issubclass(obj, UserAuthnMethod): try: if obj.__name__ == cls: return obj(**kwargs) except AttributeError: pass
python
def factory(cls, **kwargs): """ Factory method that can be used to easily instantiate a class instance :param cls: The name of the class :param kwargs: Keyword arguments :return: An instance of the class or None if the name doesn't match any known class. """ for name, obj in inspect.getmembers(sys.modules[__name__]): if inspect.isclass(obj) and issubclass(obj, UserAuthnMethod): try: if obj.__name__ == cls: return obj(**kwargs) except AttributeError: pass
[ "def", "factory", "(", "cls", ",", "*", "*", "kwargs", ")", ":", "for", "name", ",", "obj", "in", "inspect", ".", "getmembers", "(", "sys", ".", "modules", "[", "__name__", "]", ")", ":", "if", "inspect", ".", "isclass", "(", "obj", ")", "and", "issubclass", "(", "obj", ",", "UserAuthnMethod", ")", ":", "try", ":", "if", "obj", ".", "__name__", "==", "cls", ":", "return", "obj", "(", "*", "*", "kwargs", ")", "except", "AttributeError", ":", "pass" ]
Factory method that can be used to easily instantiate a class instance :param cls: The name of the class :param kwargs: Keyword arguments :return: An instance of the class or None if the name doesn't match any known class.
[ "Factory", "method", "that", "can", "be", "used", "to", "easily", "instantiate", "a", "class", "instance" ]
train
https://github.com/IdentityPython/oidcendpoint/blob/6c1d729d51bfb6332816117fe476073df7a1d823/src/oidcendpoint/user_authn/user.py#L301-L316
BreakingBytes/simkit
simkit/core/formulas.py
FormulaRegistry.register
def register(self, new_formulas, *args, **kwargs): """ Register formula and meta data. * ``islinear`` - ``True`` if formula is linear, ``False`` if non-linear. * ``args`` - position of arguments * ``units`` - units of returns and arguments as pair of tuples * ``isconstant`` - constant arguments not included in covariance :param new_formulas: new formulas to add to registry. """ kwargs.update(zip(self.meta_names, args)) # call super method, meta must be passed as kwargs! super(FormulaRegistry, self).register(new_formulas, **kwargs)
python
def register(self, new_formulas, *args, **kwargs): """ Register formula and meta data. * ``islinear`` - ``True`` if formula is linear, ``False`` if non-linear. * ``args`` - position of arguments * ``units`` - units of returns and arguments as pair of tuples * ``isconstant`` - constant arguments not included in covariance :param new_formulas: new formulas to add to registry. """ kwargs.update(zip(self.meta_names, args)) # call super method, meta must be passed as kwargs! super(FormulaRegistry, self).register(new_formulas, **kwargs)
[ "def", "register", "(", "self", ",", "new_formulas", ",", "*", "args", ",", "*", "*", "kwargs", ")", ":", "kwargs", ".", "update", "(", "zip", "(", "self", ".", "meta_names", ",", "args", ")", ")", "# call super method, meta must be passed as kwargs!", "super", "(", "FormulaRegistry", ",", "self", ")", ".", "register", "(", "new_formulas", ",", "*", "*", "kwargs", ")" ]
Register formula and meta data. * ``islinear`` - ``True`` if formula is linear, ``False`` if non-linear. * ``args`` - position of arguments * ``units`` - units of returns and arguments as pair of tuples * ``isconstant`` - constant arguments not included in covariance :param new_formulas: new formulas to add to registry.
[ "Register", "formula", "and", "meta", "data", "." ]
train
https://github.com/BreakingBytes/simkit/blob/205163d879d3880b6c9ef609f1b723a58773026b/simkit/core/formulas.py#L35-L48
BreakingBytes/simkit
simkit/core/formulas.py
PyModuleImporter.import_formulas
def import_formulas(self): """ Import formulas specified in :attr:`parameters`. :returns: formulas :rtype: dict """ # TODO: unit tests! # TODO: move this to somewhere else and call it "importy", maybe # core.__init__.py since a lot of modules might use it. module = self.meta.module # module read from parameters package = getattr(self.meta, 'package', None) # package read from meta name = package + module if package else module # concat pkg + name path = getattr(self.meta, 'path', None) # path read from parameters # import module using module and package mod = None # SEE ALSO: http://docs.python.org/2/library/imp.html#examples try: # fast path: see if module was already imported mod = sys.modules[name] except KeyError: try: # import module specified in parameters mod = importlib.import_module(module, package) except ImportError as err: if not path: msg = ('%s could not be imported either because it was not ' 'on the PYTHONPATH or path was not given.') LOGGER.exception(msg, name) raise err else: # import module using path # expand ~, environmental variables and make path absolute if not os.path.isabs(path): path = os.path.expanduser(os.path.expandvars(path)) path = os.path.abspath(path) # paths must be a list paths = [path] # imp does not find hierarchical module names, find and load # packages recursively, then load module, see last paragraph # https://docs.python.org/2/library/imp.html#imp.find_module pname = '' # full dotted name of package to load # traverse namespace while name: # if dot in name get first package if '.' in name: pkg, name = name.split('.', 1) else: pkg, name = name, None # pkg is the module # Find package or module by name and path fp, filename, desc = imp.find_module(pkg, paths) # full dotted name of package to load pname = pkg if not pname else '%s.%s' % (pname, pkg) LOGGER.debug('package name: %s', pname) # try to load the package or module try: mod = imp.load_module(pname, fp, filename, desc) finally: if fp: fp.close() # append package paths for imp.find_module if name: paths = mod.__path__ formulas = {} # an empty list of formulas formula_param = self.parameters # formulas key # FYI: iterating over dictionary is equivalent to iterkeys() if isinstance(formula_param, (list, tuple, dict)): # iterate through formulas for f in formula_param: formulas[f] = getattr(mod, f) elif isinstance(formula_param, basestring): # only one formula # FYI: use basestring to test for str and unicode # SEE: http://docs.python.org/2/library/functions.html#basestring formulas[formula_param] = getattr(mod, formula_param) else: # autodetect formulas assuming first letter is f formulas = {f: getattr(mod, f) for f in dir(mod) if f[:2] == 'f_'} if not len(formulas): for f in dir(mod): mod_attr = getattr(mod, f) if inspect.isfunction(mod_attr): formulas[f] = mod_attr return formulas
python
def import_formulas(self): """ Import formulas specified in :attr:`parameters`. :returns: formulas :rtype: dict """ # TODO: unit tests! # TODO: move this to somewhere else and call it "importy", maybe # core.__init__.py since a lot of modules might use it. module = self.meta.module # module read from parameters package = getattr(self.meta, 'package', None) # package read from meta name = package + module if package else module # concat pkg + name path = getattr(self.meta, 'path', None) # path read from parameters # import module using module and package mod = None # SEE ALSO: http://docs.python.org/2/library/imp.html#examples try: # fast path: see if module was already imported mod = sys.modules[name] except KeyError: try: # import module specified in parameters mod = importlib.import_module(module, package) except ImportError as err: if not path: msg = ('%s could not be imported either because it was not ' 'on the PYTHONPATH or path was not given.') LOGGER.exception(msg, name) raise err else: # import module using path # expand ~, environmental variables and make path absolute if not os.path.isabs(path): path = os.path.expanduser(os.path.expandvars(path)) path = os.path.abspath(path) # paths must be a list paths = [path] # imp does not find hierarchical module names, find and load # packages recursively, then load module, see last paragraph # https://docs.python.org/2/library/imp.html#imp.find_module pname = '' # full dotted name of package to load # traverse namespace while name: # if dot in name get first package if '.' in name: pkg, name = name.split('.', 1) else: pkg, name = name, None # pkg is the module # Find package or module by name and path fp, filename, desc = imp.find_module(pkg, paths) # full dotted name of package to load pname = pkg if not pname else '%s.%s' % (pname, pkg) LOGGER.debug('package name: %s', pname) # try to load the package or module try: mod = imp.load_module(pname, fp, filename, desc) finally: if fp: fp.close() # append package paths for imp.find_module if name: paths = mod.__path__ formulas = {} # an empty list of formulas formula_param = self.parameters # formulas key # FYI: iterating over dictionary is equivalent to iterkeys() if isinstance(formula_param, (list, tuple, dict)): # iterate through formulas for f in formula_param: formulas[f] = getattr(mod, f) elif isinstance(formula_param, basestring): # only one formula # FYI: use basestring to test for str and unicode # SEE: http://docs.python.org/2/library/functions.html#basestring formulas[formula_param] = getattr(mod, formula_param) else: # autodetect formulas assuming first letter is f formulas = {f: getattr(mod, f) for f in dir(mod) if f[:2] == 'f_'} if not len(formulas): for f in dir(mod): mod_attr = getattr(mod, f) if inspect.isfunction(mod_attr): formulas[f] = mod_attr return formulas
[ "def", "import_formulas", "(", "self", ")", ":", "# TODO: unit tests!", "# TODO: move this to somewhere else and call it \"importy\", maybe", "# core.__init__.py since a lot of modules might use it.", "module", "=", "self", ".", "meta", ".", "module", "# module read from parameters", "package", "=", "getattr", "(", "self", ".", "meta", ",", "'package'", ",", "None", ")", "# package read from meta", "name", "=", "package", "+", "module", "if", "package", "else", "module", "# concat pkg + name", "path", "=", "getattr", "(", "self", ".", "meta", ",", "'path'", ",", "None", ")", "# path read from parameters", "# import module using module and package", "mod", "=", "None", "# SEE ALSO: http://docs.python.org/2/library/imp.html#examples", "try", ":", "# fast path: see if module was already imported", "mod", "=", "sys", ".", "modules", "[", "name", "]", "except", "KeyError", ":", "try", ":", "# import module specified in parameters", "mod", "=", "importlib", ".", "import_module", "(", "module", ",", "package", ")", "except", "ImportError", "as", "err", ":", "if", "not", "path", ":", "msg", "=", "(", "'%s could not be imported either because it was not '", "'on the PYTHONPATH or path was not given.'", ")", "LOGGER", ".", "exception", "(", "msg", ",", "name", ")", "raise", "err", "else", ":", "# import module using path", "# expand ~, environmental variables and make path absolute", "if", "not", "os", ".", "path", ".", "isabs", "(", "path", ")", ":", "path", "=", "os", ".", "path", ".", "expanduser", "(", "os", ".", "path", ".", "expandvars", "(", "path", ")", ")", "path", "=", "os", ".", "path", ".", "abspath", "(", "path", ")", "# paths must be a list", "paths", "=", "[", "path", "]", "# imp does not find hierarchical module names, find and load", "# packages recursively, then load module, see last paragraph", "# https://docs.python.org/2/library/imp.html#imp.find_module", "pname", "=", "''", "# full dotted name of package to load", "# traverse namespace", "while", "name", ":", "# if dot in name get first package", "if", "'.'", "in", "name", ":", "pkg", ",", "name", "=", "name", ".", "split", "(", "'.'", ",", "1", ")", "else", ":", "pkg", ",", "name", "=", "name", ",", "None", "# pkg is the module", "# Find package or module by name and path", "fp", ",", "filename", ",", "desc", "=", "imp", ".", "find_module", "(", "pkg", ",", "paths", ")", "# full dotted name of package to load", "pname", "=", "pkg", "if", "not", "pname", "else", "'%s.%s'", "%", "(", "pname", ",", "pkg", ")", "LOGGER", ".", "debug", "(", "'package name: %s'", ",", "pname", ")", "# try to load the package or module", "try", ":", "mod", "=", "imp", ".", "load_module", "(", "pname", ",", "fp", ",", "filename", ",", "desc", ")", "finally", ":", "if", "fp", ":", "fp", ".", "close", "(", ")", "# append package paths for imp.find_module", "if", "name", ":", "paths", "=", "mod", ".", "__path__", "formulas", "=", "{", "}", "# an empty list of formulas", "formula_param", "=", "self", ".", "parameters", "# formulas key", "# FYI: iterating over dictionary is equivalent to iterkeys()", "if", "isinstance", "(", "formula_param", ",", "(", "list", ",", "tuple", ",", "dict", ")", ")", ":", "# iterate through formulas", "for", "f", "in", "formula_param", ":", "formulas", "[", "f", "]", "=", "getattr", "(", "mod", ",", "f", ")", "elif", "isinstance", "(", "formula_param", ",", "basestring", ")", ":", "# only one formula", "# FYI: use basestring to test for str and unicode", "# SEE: http://docs.python.org/2/library/functions.html#basestring", "formulas", "[", "formula_param", "]", "=", "getattr", "(", "mod", ",", "formula_param", ")", "else", ":", "# autodetect formulas assuming first letter is f", "formulas", "=", "{", "f", ":", "getattr", "(", "mod", ",", "f", ")", "for", "f", "in", "dir", "(", "mod", ")", "if", "f", "[", ":", "2", "]", "==", "'f_'", "}", "if", "not", "len", "(", "formulas", ")", ":", "for", "f", "in", "dir", "(", "mod", ")", ":", "mod_attr", "=", "getattr", "(", "mod", ",", "f", ")", "if", "inspect", ".", "isfunction", "(", "mod_attr", ")", ":", "formulas", "[", "f", "]", "=", "mod_attr", "return", "formulas" ]
Import formulas specified in :attr:`parameters`. :returns: formulas :rtype: dict
[ "Import", "formulas", "specified", "in", ":", "attr", ":", "parameters", "." ]
train
https://github.com/BreakingBytes/simkit/blob/205163d879d3880b6c9ef609f1b723a58773026b/simkit/core/formulas.py#L82-L165
BeyondTheClouds/enoslib
enoslib/infra/enos_chameleonbaremetal/provider.py
create_blazar_client
def create_blazar_client(config, session): """Check the reservation, creates a new one if nescessary.""" return blazar_client.Client(session=session, service_type="reservation", region_name=os.environ["OS_REGION_NAME"])
python
def create_blazar_client(config, session): """Check the reservation, creates a new one if nescessary.""" return blazar_client.Client(session=session, service_type="reservation", region_name=os.environ["OS_REGION_NAME"])
[ "def", "create_blazar_client", "(", "config", ",", "session", ")", ":", "return", "blazar_client", ".", "Client", "(", "session", "=", "session", ",", "service_type", "=", "\"reservation\"", ",", "region_name", "=", "os", ".", "environ", "[", "\"OS_REGION_NAME\"", "]", ")" ]
Check the reservation, creates a new one if nescessary.
[ "Check", "the", "reservation", "creates", "a", "new", "one", "if", "nescessary", "." ]
train
https://github.com/BeyondTheClouds/enoslib/blob/fb00be58e56a7848cfe482187d659744919fe2f7/enoslib/infra/enos_chameleonbaremetal/provider.py#L46-L50
Jaymon/prom
prom/decorators.py
reconnecting
def reconnecting(count=None, backoff=None): """this is a very specific decorator meant to be used on Interface classes. It will attempt to reconnect if the connection is closed and run the same method again. TODO -- I think this will have issues with transactions using passed in connections, ie, you pass in a transacting connection to the insert() method and that connection gets dropped, this will reconnect but the transaction will be hosed. count -- integer -- how many attempts to run the method, defaults to 3 backoff -- float -- how long to sleep on failure, defaults to 1.0 """ # we get trixxy here so we can manipulate these values in the wrapped function, # this is one of the first times I wish we were on Python 3 # http://stackoverflow.com/a/9264845/5006 reconn_params = { "count": count, "backoff": backoff } def retry_decorator(func): @wraps(func) def wrapper(self, *args, **kwargs): count = reconn_params["count"] backoff = reconn_params["backoff"] if count is None: count = self.connection_config.options.get('reconnect_attempts', 3) if backoff is None: backoff = self.connection_config.options.get('reconnect_backoff', 1.0) count = int(count) backoff = float(backoff) for attempt in range(1, count + 1): try: backoff_seconds = float(attempt - 1) * backoff if backoff_seconds: logger.debug("sleeping {} seconds before attempt {}".format( backoff_seconds, attempt )) time.sleep(backoff_seconds) return func(self, *args, **kwargs) except InterfaceError as e: e_msg = str(e.e) # TODO -- this gets us by SQLite and Postgres, but might not # work in the future, so this needs to be a tad more robust if "closed" in e_msg.lower(): if attempt == count: logger.debug("all {} attempts failed".format(count)) raise else: logger.debug("attempt {}/{} failed, retrying".format( attempt, count )) else: raise return wrapper return retry_decorator
python
def reconnecting(count=None, backoff=None): """this is a very specific decorator meant to be used on Interface classes. It will attempt to reconnect if the connection is closed and run the same method again. TODO -- I think this will have issues with transactions using passed in connections, ie, you pass in a transacting connection to the insert() method and that connection gets dropped, this will reconnect but the transaction will be hosed. count -- integer -- how many attempts to run the method, defaults to 3 backoff -- float -- how long to sleep on failure, defaults to 1.0 """ # we get trixxy here so we can manipulate these values in the wrapped function, # this is one of the first times I wish we were on Python 3 # http://stackoverflow.com/a/9264845/5006 reconn_params = { "count": count, "backoff": backoff } def retry_decorator(func): @wraps(func) def wrapper(self, *args, **kwargs): count = reconn_params["count"] backoff = reconn_params["backoff"] if count is None: count = self.connection_config.options.get('reconnect_attempts', 3) if backoff is None: backoff = self.connection_config.options.get('reconnect_backoff', 1.0) count = int(count) backoff = float(backoff) for attempt in range(1, count + 1): try: backoff_seconds = float(attempt - 1) * backoff if backoff_seconds: logger.debug("sleeping {} seconds before attempt {}".format( backoff_seconds, attempt )) time.sleep(backoff_seconds) return func(self, *args, **kwargs) except InterfaceError as e: e_msg = str(e.e) # TODO -- this gets us by SQLite and Postgres, but might not # work in the future, so this needs to be a tad more robust if "closed" in e_msg.lower(): if attempt == count: logger.debug("all {} attempts failed".format(count)) raise else: logger.debug("attempt {}/{} failed, retrying".format( attempt, count )) else: raise return wrapper return retry_decorator
[ "def", "reconnecting", "(", "count", "=", "None", ",", "backoff", "=", "None", ")", ":", "# we get trixxy here so we can manipulate these values in the wrapped function,", "# this is one of the first times I wish we were on Python 3", "# http://stackoverflow.com/a/9264845/5006", "reconn_params", "=", "{", "\"count\"", ":", "count", ",", "\"backoff\"", ":", "backoff", "}", "def", "retry_decorator", "(", "func", ")", ":", "@", "wraps", "(", "func", ")", "def", "wrapper", "(", "self", ",", "*", "args", ",", "*", "*", "kwargs", ")", ":", "count", "=", "reconn_params", "[", "\"count\"", "]", "backoff", "=", "reconn_params", "[", "\"backoff\"", "]", "if", "count", "is", "None", ":", "count", "=", "self", ".", "connection_config", ".", "options", ".", "get", "(", "'reconnect_attempts'", ",", "3", ")", "if", "backoff", "is", "None", ":", "backoff", "=", "self", ".", "connection_config", ".", "options", ".", "get", "(", "'reconnect_backoff'", ",", "1.0", ")", "count", "=", "int", "(", "count", ")", "backoff", "=", "float", "(", "backoff", ")", "for", "attempt", "in", "range", "(", "1", ",", "count", "+", "1", ")", ":", "try", ":", "backoff_seconds", "=", "float", "(", "attempt", "-", "1", ")", "*", "backoff", "if", "backoff_seconds", ":", "logger", ".", "debug", "(", "\"sleeping {} seconds before attempt {}\"", ".", "format", "(", "backoff_seconds", ",", "attempt", ")", ")", "time", ".", "sleep", "(", "backoff_seconds", ")", "return", "func", "(", "self", ",", "*", "args", ",", "*", "*", "kwargs", ")", "except", "InterfaceError", "as", "e", ":", "e_msg", "=", "str", "(", "e", ".", "e", ")", "# TODO -- this gets us by SQLite and Postgres, but might not", "# work in the future, so this needs to be a tad more robust", "if", "\"closed\"", "in", "e_msg", ".", "lower", "(", ")", ":", "if", "attempt", "==", "count", ":", "logger", ".", "debug", "(", "\"all {} attempts failed\"", ".", "format", "(", "count", ")", ")", "raise", "else", ":", "logger", ".", "debug", "(", "\"attempt {}/{} failed, retrying\"", ".", "format", "(", "attempt", ",", "count", ")", ")", "else", ":", "raise", "return", "wrapper", "return", "retry_decorator" ]
this is a very specific decorator meant to be used on Interface classes. It will attempt to reconnect if the connection is closed and run the same method again. TODO -- I think this will have issues with transactions using passed in connections, ie, you pass in a transacting connection to the insert() method and that connection gets dropped, this will reconnect but the transaction will be hosed. count -- integer -- how many attempts to run the method, defaults to 3 backoff -- float -- how long to sleep on failure, defaults to 1.0
[ "this", "is", "a", "very", "specific", "decorator", "meant", "to", "be", "used", "on", "Interface", "classes", ".", "It", "will", "attempt", "to", "reconnect", "if", "the", "connection", "is", "closed", "and", "run", "the", "same", "method", "again", "." ]
train
https://github.com/Jaymon/prom/blob/b7ad2c259eca198da03e1e4bc7d95014c168c361/prom/decorators.py#L13-L81