repository_name
stringlengths
7
55
func_path_in_repository
stringlengths
4
223
func_name
stringlengths
1
134
whole_func_string
stringlengths
75
104k
language
stringclasses
1 value
func_code_string
stringlengths
75
104k
func_code_tokens
listlengths
19
28.4k
func_documentation_string
stringlengths
1
46.9k
func_documentation_tokens
listlengths
1
1.97k
split_name
stringclasses
1 value
func_code_url
stringlengths
87
315
volafiled/python-volapi
volapi/multipart.py
escape_header
def escape_header(val): """Escapes a value so that it can be used in a mime header""" if val is None: return None try: return quote(val, encoding="ascii", safe="/ ") except ValueError: return "utf-8''" + quote(val, encoding="utf-8", safe="/ ")
python
def escape_header(val): """Escapes a value so that it can be used in a mime header""" if val is None: return None try: return quote(val, encoding="ascii", safe="/ ") except ValueError: return "utf-8''" + quote(val, encoding="utf-8", safe="/ ")
[ "def", "escape_header", "(", "val", ")", ":", "if", "val", "is", "None", ":", "return", "None", "try", ":", "return", "quote", "(", "val", ",", "encoding", "=", "\"ascii\"", ",", "safe", "=", "\"/ \"", ")", "except", "ValueError", ":", "return", "\"utf-8''\"", "+", "quote", "(", "val", ",", "encoding", "=", "\"utf-8\"", ",", "safe", "=", "\"/ \"", ")" ]
Escapes a value so that it can be used in a mime header
[ "Escapes", "a", "value", "so", "that", "it", "can", "be", "used", "in", "a", "mime", "header" ]
train
https://github.com/volafiled/python-volapi/blob/5f0bc03dbde703264ac6ed494e2050761f688a3e/volapi/multipart.py#L24-L32
volafiled/python-volapi
volapi/multipart.py
make_streams
def make_streams(name, value, boundary, encoding): """Generates one or more streams for each name, value pair""" filename = None mime = None # user passed in a special dict. if isinstance(value, collections.Mapping) and "name" in value and "value" in value: filename = value["name"] try: mime = value["mime"] except KeyError: pass value = value["value"] if not filename: filename = getattr(value, "name", None) if filename: filename = os.path.split(filename)[1] mime = mime or "application/octet-stream" name, filename, mime = [escape_header(v) for v in (name, filename, mime)] stream = BytesIO() stream.write("--{}\r\n".format(boundary).encode(encoding)) if not filename: stream.write( 'Content-Disposition: form-data; name="{}"\r\n'.format(name).encode( encoding ) ) else: stream.write( 'Content-Disposition: form-data; name="{}"; filename="{}"\r\n'.format( name, filename ).encode(encoding) ) if mime: stream.write("Content-Type: {}\r\n".format(mime).encode(encoding)) stream.write(b"\r\n") if hasattr(value, "read"): stream.seek(0) return stream, value, BytesIO("\r\n".encode(encoding)) # not a file-like object, encode headers and value in one go value = value if isinstance(value, (str, bytes)) else json.dumps(value) if isinstance(value, bytes): stream.write(value) else: stream.write(value.encode(encoding)) stream.write(b"\r\n") stream.seek(0) return (stream,)
python
def make_streams(name, value, boundary, encoding): """Generates one or more streams for each name, value pair""" filename = None mime = None # user passed in a special dict. if isinstance(value, collections.Mapping) and "name" in value and "value" in value: filename = value["name"] try: mime = value["mime"] except KeyError: pass value = value["value"] if not filename: filename = getattr(value, "name", None) if filename: filename = os.path.split(filename)[1] mime = mime or "application/octet-stream" name, filename, mime = [escape_header(v) for v in (name, filename, mime)] stream = BytesIO() stream.write("--{}\r\n".format(boundary).encode(encoding)) if not filename: stream.write( 'Content-Disposition: form-data; name="{}"\r\n'.format(name).encode( encoding ) ) else: stream.write( 'Content-Disposition: form-data; name="{}"; filename="{}"\r\n'.format( name, filename ).encode(encoding) ) if mime: stream.write("Content-Type: {}\r\n".format(mime).encode(encoding)) stream.write(b"\r\n") if hasattr(value, "read"): stream.seek(0) return stream, value, BytesIO("\r\n".encode(encoding)) # not a file-like object, encode headers and value in one go value = value if isinstance(value, (str, bytes)) else json.dumps(value) if isinstance(value, bytes): stream.write(value) else: stream.write(value.encode(encoding)) stream.write(b"\r\n") stream.seek(0) return (stream,)
[ "def", "make_streams", "(", "name", ",", "value", ",", "boundary", ",", "encoding", ")", ":", "filename", "=", "None", "mime", "=", "None", "# user passed in a special dict.", "if", "isinstance", "(", "value", ",", "collections", ".", "Mapping", ")", "and", "\"name\"", "in", "value", "and", "\"value\"", "in", "value", ":", "filename", "=", "value", "[", "\"name\"", "]", "try", ":", "mime", "=", "value", "[", "\"mime\"", "]", "except", "KeyError", ":", "pass", "value", "=", "value", "[", "\"value\"", "]", "if", "not", "filename", ":", "filename", "=", "getattr", "(", "value", ",", "\"name\"", ",", "None", ")", "if", "filename", ":", "filename", "=", "os", ".", "path", ".", "split", "(", "filename", ")", "[", "1", "]", "mime", "=", "mime", "or", "\"application/octet-stream\"", "name", ",", "filename", ",", "mime", "=", "[", "escape_header", "(", "v", ")", "for", "v", "in", "(", "name", ",", "filename", ",", "mime", ")", "]", "stream", "=", "BytesIO", "(", ")", "stream", ".", "write", "(", "\"--{}\\r\\n\"", ".", "format", "(", "boundary", ")", ".", "encode", "(", "encoding", ")", ")", "if", "not", "filename", ":", "stream", ".", "write", "(", "'Content-Disposition: form-data; name=\"{}\"\\r\\n'", ".", "format", "(", "name", ")", ".", "encode", "(", "encoding", ")", ")", "else", ":", "stream", ".", "write", "(", "'Content-Disposition: form-data; name=\"{}\"; filename=\"{}\"\\r\\n'", ".", "format", "(", "name", ",", "filename", ")", ".", "encode", "(", "encoding", ")", ")", "if", "mime", ":", "stream", ".", "write", "(", "\"Content-Type: {}\\r\\n\"", ".", "format", "(", "mime", ")", ".", "encode", "(", "encoding", ")", ")", "stream", ".", "write", "(", "b\"\\r\\n\"", ")", "if", "hasattr", "(", "value", ",", "\"read\"", ")", ":", "stream", ".", "seek", "(", "0", ")", "return", "stream", ",", "value", ",", "BytesIO", "(", "\"\\r\\n\"", ".", "encode", "(", "encoding", ")", ")", "# not a file-like object, encode headers and value in one go", "value", "=", "value", "if", "isinstance", "(", "value", ",", "(", "str", ",", "bytes", ")", ")", "else", "json", ".", "dumps", "(", "value", ")", "if", "isinstance", "(", "value", ",", "bytes", ")", ":", "stream", ".", "write", "(", "value", ")", "else", ":", "stream", ".", "write", "(", "value", ".", "encode", "(", "encoding", ")", ")", "stream", ".", "write", "(", "b\"\\r\\n\"", ")", "stream", ".", "seek", "(", "0", ")", "return", "(", "stream", ",", ")" ]
Generates one or more streams for each name, value pair
[ "Generates", "one", "or", "more", "streams", "for", "each", "name", "value", "pair" ]
train
https://github.com/volafiled/python-volapi/blob/5f0bc03dbde703264ac6ed494e2050761f688a3e/volapi/multipart.py#L35-L89
volafiled/python-volapi
volapi/multipart.py
Data.len
def len(self): """Length of the data stream""" # The len property is needed for requests. # requests checks __len__, then len # Since we cannot implement __len__ because python 32-bit uses 32-bit # sizes, we implement this instead. def stream_len(stream): """Stream length""" cur = stream.tell() try: stream.seek(0, 2) return stream.tell() - cur finally: stream.seek(cur) return sum(stream_len(s) for s in self.streams)
python
def len(self): """Length of the data stream""" # The len property is needed for requests. # requests checks __len__, then len # Since we cannot implement __len__ because python 32-bit uses 32-bit # sizes, we implement this instead. def stream_len(stream): """Stream length""" cur = stream.tell() try: stream.seek(0, 2) return stream.tell() - cur finally: stream.seek(cur) return sum(stream_len(s) for s in self.streams)
[ "def", "len", "(", "self", ")", ":", "# The len property is needed for requests.", "# requests checks __len__, then len", "# Since we cannot implement __len__ because python 32-bit uses 32-bit", "# sizes, we implement this instead.", "def", "stream_len", "(", "stream", ")", ":", "\"\"\"Stream length\"\"\"", "cur", "=", "stream", ".", "tell", "(", ")", "try", ":", "stream", ".", "seek", "(", "0", ",", "2", ")", "return", "stream", ".", "tell", "(", ")", "-", "cur", "finally", ":", "stream", ".", "seek", "(", "cur", ")", "return", "sum", "(", "stream_len", "(", "s", ")", "for", "s", "in", "self", ".", "streams", ")" ]
Length of the data stream
[ "Length", "of", "the", "data", "stream" ]
train
https://github.com/volafiled/python-volapi/blob/5f0bc03dbde703264ac6ed494e2050761f688a3e/volapi/multipart.py#L132-L147
volafiled/python-volapi
volapi/multipart.py
Data.headers
def headers(self): """All headers needed to make a request""" return { "Content-Type": ("multipart/form-data; boundary={}".format(self.boundary)), "Content-Length": str(self.len), "Content-Encoding": self.encoding, }
python
def headers(self): """All headers needed to make a request""" return { "Content-Type": ("multipart/form-data; boundary={}".format(self.boundary)), "Content-Length": str(self.len), "Content-Encoding": self.encoding, }
[ "def", "headers", "(", "self", ")", ":", "return", "{", "\"Content-Type\"", ":", "(", "\"multipart/form-data; boundary={}\"", ".", "format", "(", "self", ".", "boundary", ")", ")", ",", "\"Content-Length\"", ":", "str", "(", "self", ".", "len", ")", ",", "\"Content-Encoding\"", ":", "self", ".", "encoding", ",", "}" ]
All headers needed to make a request
[ "All", "headers", "needed", "to", "make", "a", "request" ]
train
https://github.com/volafiled/python-volapi/blob/5f0bc03dbde703264ac6ed494e2050761f688a3e/volapi/multipart.py#L150-L156
BeyondTheClouds/enoslib
enoslib/infra/utils.py
mk_pools
def mk_pools(things, keyfnc=lambda x: x): "Indexes a thing by the keyfnc to construct pools of things." pools = {} sthings = sorted(things, key=keyfnc) for key, thingz in groupby(sthings, key=keyfnc): pools.setdefault(key, []).extend(list(thingz)) return pools
python
def mk_pools(things, keyfnc=lambda x: x): "Indexes a thing by the keyfnc to construct pools of things." pools = {} sthings = sorted(things, key=keyfnc) for key, thingz in groupby(sthings, key=keyfnc): pools.setdefault(key, []).extend(list(thingz)) return pools
[ "def", "mk_pools", "(", "things", ",", "keyfnc", "=", "lambda", "x", ":", "x", ")", ":", "pools", "=", "{", "}", "sthings", "=", "sorted", "(", "things", ",", "key", "=", "keyfnc", ")", "for", "key", ",", "thingz", "in", "groupby", "(", "sthings", ",", "key", "=", "keyfnc", ")", ":", "pools", ".", "setdefault", "(", "key", ",", "[", "]", ")", ".", "extend", "(", "list", "(", "thingz", ")", ")", "return", "pools" ]
Indexes a thing by the keyfnc to construct pools of things.
[ "Indexes", "a", "thing", "by", "the", "keyfnc", "to", "construct", "pools", "of", "things", "." ]
train
https://github.com/BeyondTheClouds/enoslib/blob/fb00be58e56a7848cfe482187d659744919fe2f7/enoslib/infra/utils.py#L6-L12
BeyondTheClouds/enoslib
enoslib/infra/utils.py
pick_things
def pick_things(pools, key, n): "Picks a maximum of n things in a dict of indexed pool of things." pool = pools.get(key) if not pool: return [] things = pool[:n] del pool[:n] return things
python
def pick_things(pools, key, n): "Picks a maximum of n things in a dict of indexed pool of things." pool = pools.get(key) if not pool: return [] things = pool[:n] del pool[:n] return things
[ "def", "pick_things", "(", "pools", ",", "key", ",", "n", ")", ":", "pool", "=", "pools", ".", "get", "(", "key", ")", "if", "not", "pool", ":", "return", "[", "]", "things", "=", "pool", "[", ":", "n", "]", "del", "pool", "[", ":", "n", "]", "return", "things" ]
Picks a maximum of n things in a dict of indexed pool of things.
[ "Picks", "a", "maximum", "of", "n", "things", "in", "a", "dict", "of", "indexed", "pool", "of", "things", "." ]
train
https://github.com/BeyondTheClouds/enoslib/blob/fb00be58e56a7848cfe482187d659744919fe2f7/enoslib/infra/utils.py#L15-L22
volafiled/python-volapi
docs/examples/parrot.py
listen
def listen(room): """Open a volafile room and start listening to it""" def onmessage(m): """Print the new message and respond to it.""" print(m) if m.admin or m.nick == r.user.name: return if "parrot" in m.msg.lower(): r.post_chat("ayy lmao") elif m.msg.lower() in ("lol", "lel", "kek"): r.post_chat("*kok") else: r.post_chat(re.sub(r"\blain\b", "purpleadmin", m.msg, re.I)) with Room(room) as r: r.user.change_nick("DumbParrot") r.add_listener("chat", onmessage) r.listen()
python
def listen(room): """Open a volafile room and start listening to it""" def onmessage(m): """Print the new message and respond to it.""" print(m) if m.admin or m.nick == r.user.name: return if "parrot" in m.msg.lower(): r.post_chat("ayy lmao") elif m.msg.lower() in ("lol", "lel", "kek"): r.post_chat("*kok") else: r.post_chat(re.sub(r"\blain\b", "purpleadmin", m.msg, re.I)) with Room(room) as r: r.user.change_nick("DumbParrot") r.add_listener("chat", onmessage) r.listen()
[ "def", "listen", "(", "room", ")", ":", "def", "onmessage", "(", "m", ")", ":", "\"\"\"Print the new message and respond to it.\"\"\"", "print", "(", "m", ")", "if", "m", ".", "admin", "or", "m", ".", "nick", "==", "r", ".", "user", ".", "name", ":", "return", "if", "\"parrot\"", "in", "m", ".", "msg", ".", "lower", "(", ")", ":", "r", ".", "post_chat", "(", "\"ayy lmao\"", ")", "elif", "m", ".", "msg", ".", "lower", "(", ")", "in", "(", "\"lol\"", ",", "\"lel\"", ",", "\"kek\"", ")", ":", "r", ".", "post_chat", "(", "\"*kok\"", ")", "else", ":", "r", ".", "post_chat", "(", "re", ".", "sub", "(", "r\"\\blain\\b\"", ",", "\"purpleadmin\"", ",", "m", ".", "msg", ",", "re", ".", "I", ")", ")", "with", "Room", "(", "room", ")", "as", "r", ":", "r", ".", "user", ".", "change_nick", "(", "\"DumbParrot\"", ")", "r", ".", "add_listener", "(", "\"chat\"", ",", "onmessage", ")", "r", ".", "listen", "(", ")" ]
Open a volafile room and start listening to it
[ "Open", "a", "volafile", "room", "and", "start", "listening", "to", "it" ]
train
https://github.com/volafiled/python-volapi/blob/5f0bc03dbde703264ac6ed494e2050761f688a3e/docs/examples/parrot.py#L11-L28
guaix-ucm/pyemir
emirdrp/tools/nscan_minmax_frontiers.py
nscan_minmax_frontiers
def nscan_minmax_frontiers(y0_frontier_lower, y0_frontier_upper, resize=False): """Compute valid scan range for provided y0_frontier values. Parameters ---------- y0_frontier_lower : float Ordinate of the lower frontier. y0_frontier_upper : float Ordinate of the upper frontier. resize : bool If True, when the limits are beyond the expected values [1,EMIR_NAXIS2], the values are truncated. Returns ------- nscan_min : int Minimum useful scan for the image. nscan_max : int Maximum useful scan for the image. """ fraction_pixel = y0_frontier_lower - int(y0_frontier_lower) if fraction_pixel > 0.0: nscan_min = int(y0_frontier_lower) + 1 else: nscan_min = int(y0_frontier_lower) if nscan_min < 1: if resize: nscan_min = 1 else: raise ValueError("nscan_min=" + str(nscan_min) + " is < 1") fraction_pixel = y0_frontier_upper - int(y0_frontier_upper) if fraction_pixel > 0.0: nscan_max = int(y0_frontier_upper) else: nscan_max = int(y0_frontier_upper) - 1 if nscan_max > EMIR_NAXIS2: if resize: nscan_max = EMIR_NAXIS2 else: raise ValueError("nscan_max=" + str(nscan_max) + " is > NAXIS2_EMIR=" + str(EMIR_NAXIS2)) return nscan_min, nscan_max
python
def nscan_minmax_frontiers(y0_frontier_lower, y0_frontier_upper, resize=False): """Compute valid scan range for provided y0_frontier values. Parameters ---------- y0_frontier_lower : float Ordinate of the lower frontier. y0_frontier_upper : float Ordinate of the upper frontier. resize : bool If True, when the limits are beyond the expected values [1,EMIR_NAXIS2], the values are truncated. Returns ------- nscan_min : int Minimum useful scan for the image. nscan_max : int Maximum useful scan for the image. """ fraction_pixel = y0_frontier_lower - int(y0_frontier_lower) if fraction_pixel > 0.0: nscan_min = int(y0_frontier_lower) + 1 else: nscan_min = int(y0_frontier_lower) if nscan_min < 1: if resize: nscan_min = 1 else: raise ValueError("nscan_min=" + str(nscan_min) + " is < 1") fraction_pixel = y0_frontier_upper - int(y0_frontier_upper) if fraction_pixel > 0.0: nscan_max = int(y0_frontier_upper) else: nscan_max = int(y0_frontier_upper) - 1 if nscan_max > EMIR_NAXIS2: if resize: nscan_max = EMIR_NAXIS2 else: raise ValueError("nscan_max=" + str(nscan_max) + " is > NAXIS2_EMIR=" + str(EMIR_NAXIS2)) return nscan_min, nscan_max
[ "def", "nscan_minmax_frontiers", "(", "y0_frontier_lower", ",", "y0_frontier_upper", ",", "resize", "=", "False", ")", ":", "fraction_pixel", "=", "y0_frontier_lower", "-", "int", "(", "y0_frontier_lower", ")", "if", "fraction_pixel", ">", "0.0", ":", "nscan_min", "=", "int", "(", "y0_frontier_lower", ")", "+", "1", "else", ":", "nscan_min", "=", "int", "(", "y0_frontier_lower", ")", "if", "nscan_min", "<", "1", ":", "if", "resize", ":", "nscan_min", "=", "1", "else", ":", "raise", "ValueError", "(", "\"nscan_min=\"", "+", "str", "(", "nscan_min", ")", "+", "\" is < 1\"", ")", "fraction_pixel", "=", "y0_frontier_upper", "-", "int", "(", "y0_frontier_upper", ")", "if", "fraction_pixel", ">", "0.0", ":", "nscan_max", "=", "int", "(", "y0_frontier_upper", ")", "else", ":", "nscan_max", "=", "int", "(", "y0_frontier_upper", ")", "-", "1", "if", "nscan_max", ">", "EMIR_NAXIS2", ":", "if", "resize", ":", "nscan_max", "=", "EMIR_NAXIS2", "else", ":", "raise", "ValueError", "(", "\"nscan_max=\"", "+", "str", "(", "nscan_max", ")", "+", "\" is > NAXIS2_EMIR=\"", "+", "str", "(", "EMIR_NAXIS2", ")", ")", "return", "nscan_min", ",", "nscan_max" ]
Compute valid scan range for provided y0_frontier values. Parameters ---------- y0_frontier_lower : float Ordinate of the lower frontier. y0_frontier_upper : float Ordinate of the upper frontier. resize : bool If True, when the limits are beyond the expected values [1,EMIR_NAXIS2], the values are truncated. Returns ------- nscan_min : int Minimum useful scan for the image. nscan_max : int Maximum useful scan for the image.
[ "Compute", "valid", "scan", "range", "for", "provided", "y0_frontier", "values", ".", "Parameters", "----------", "y0_frontier_lower", ":", "float", "Ordinate", "of", "the", "lower", "frontier", ".", "y0_frontier_upper", ":", "float", "Ordinate", "of", "the", "upper", "frontier", ".", "resize", ":", "bool", "If", "True", "when", "the", "limits", "are", "beyond", "the", "expected", "values", "[", "1", "EMIR_NAXIS2", "]", "the", "values", "are", "truncated", ".", "Returns", "-------", "nscan_min", ":", "int", "Minimum", "useful", "scan", "for", "the", "image", ".", "nscan_max", ":", "int", "Maximum", "useful", "scan", "for", "the", "image", "." ]
train
https://github.com/guaix-ucm/pyemir/blob/fef6bbabcb13f80123cafd1800a0f508a3c21702/emirdrp/tools/nscan_minmax_frontiers.py#L26-L69
guaix-ucm/pyemir
emirdrp/instrument/dtu_configuration.py
average_dtu_configurations
def average_dtu_configurations(list_of_objects): """Return DtuConfiguration instance with averaged values. Parameters ---------- list_of_objects : python list List of DtuConfiguration instances to be averaged. Returns ------- result : DtuConfiguration instance Object with averaged values. """ result = DtuConfiguration() if len(list_of_objects) == 0: return result list_of_members = result.__dict__.keys() # compute average of all the members of the class for member in list_of_members: result.__dict__[member] = np.mean( [tmp_dtu.__dict__[member] for tmp_dtu in list_of_objects] ) return result
python
def average_dtu_configurations(list_of_objects): """Return DtuConfiguration instance with averaged values. Parameters ---------- list_of_objects : python list List of DtuConfiguration instances to be averaged. Returns ------- result : DtuConfiguration instance Object with averaged values. """ result = DtuConfiguration() if len(list_of_objects) == 0: return result list_of_members = result.__dict__.keys() # compute average of all the members of the class for member in list_of_members: result.__dict__[member] = np.mean( [tmp_dtu.__dict__[member] for tmp_dtu in list_of_objects] ) return result
[ "def", "average_dtu_configurations", "(", "list_of_objects", ")", ":", "result", "=", "DtuConfiguration", "(", ")", "if", "len", "(", "list_of_objects", ")", "==", "0", ":", "return", "result", "list_of_members", "=", "result", ".", "__dict__", ".", "keys", "(", ")", "# compute average of all the members of the class", "for", "member", "in", "list_of_members", ":", "result", ".", "__dict__", "[", "member", "]", "=", "np", ".", "mean", "(", "[", "tmp_dtu", ".", "__dict__", "[", "member", "]", "for", "tmp_dtu", "in", "list_of_objects", "]", ")", "return", "result" ]
Return DtuConfiguration instance with averaged values. Parameters ---------- list_of_objects : python list List of DtuConfiguration instances to be averaged. Returns ------- result : DtuConfiguration instance Object with averaged values.
[ "Return", "DtuConfiguration", "instance", "with", "averaged", "values", "." ]
train
https://github.com/guaix-ucm/pyemir/blob/fef6bbabcb13f80123cafd1800a0f508a3c21702/emirdrp/instrument/dtu_configuration.py#L208-L236
guaix-ucm/pyemir
emirdrp/instrument/dtu_configuration.py
maxdiff_dtu_configurations
def maxdiff_dtu_configurations(list_of_objects): """Return DtuConfiguration instance with maximum differences. Parameters ---------- list_of_objects : python list List of DtuConfiguration instances to be averaged. Returns ------- result : DtuConfiguration instance Object with averaged values. """ result = DtuConfiguration() if len(list_of_objects) == 0: return result list_of_members = result.__dict__.keys() # compute maximum difference for each member for member in list_of_members: tmp_array = np.array( [tmp_dtu.__dict__[member] for tmp_dtu in list_of_objects] ) minval = tmp_array.min() maxval = tmp_array.max() result.__dict__[member] = maxval - minval return result
python
def maxdiff_dtu_configurations(list_of_objects): """Return DtuConfiguration instance with maximum differences. Parameters ---------- list_of_objects : python list List of DtuConfiguration instances to be averaged. Returns ------- result : DtuConfiguration instance Object with averaged values. """ result = DtuConfiguration() if len(list_of_objects) == 0: return result list_of_members = result.__dict__.keys() # compute maximum difference for each member for member in list_of_members: tmp_array = np.array( [tmp_dtu.__dict__[member] for tmp_dtu in list_of_objects] ) minval = tmp_array.min() maxval = tmp_array.max() result.__dict__[member] = maxval - minval return result
[ "def", "maxdiff_dtu_configurations", "(", "list_of_objects", ")", ":", "result", "=", "DtuConfiguration", "(", ")", "if", "len", "(", "list_of_objects", ")", "==", "0", ":", "return", "result", "list_of_members", "=", "result", ".", "__dict__", ".", "keys", "(", ")", "# compute maximum difference for each member", "for", "member", "in", "list_of_members", ":", "tmp_array", "=", "np", ".", "array", "(", "[", "tmp_dtu", ".", "__dict__", "[", "member", "]", "for", "tmp_dtu", "in", "list_of_objects", "]", ")", "minval", "=", "tmp_array", ".", "min", "(", ")", "maxval", "=", "tmp_array", ".", "max", "(", ")", "result", ".", "__dict__", "[", "member", "]", "=", "maxval", "-", "minval", "return", "result" ]
Return DtuConfiguration instance with maximum differences. Parameters ---------- list_of_objects : python list List of DtuConfiguration instances to be averaged. Returns ------- result : DtuConfiguration instance Object with averaged values.
[ "Return", "DtuConfiguration", "instance", "with", "maximum", "differences", "." ]
train
https://github.com/guaix-ucm/pyemir/blob/fef6bbabcb13f80123cafd1800a0f508a3c21702/emirdrp/instrument/dtu_configuration.py#L239-L270
guaix-ucm/pyemir
emirdrp/instrument/dtu_configuration.py
DtuConfiguration.define_from_fits
def define_from_fits(cls, fitsobj, extnum=0): """Define class object from header information in FITS file. Parameters ---------- fitsobj: file object FITS file whose header contains the DTU information needed to initialise the members of this class. extnum : int Extension number (first extension is 0) """ # read input FITS file with fits.open(fitsobj) as hdulist: image_header = hdulist[extnum].header return cls.define_from_header(image_header)
python
def define_from_fits(cls, fitsobj, extnum=0): """Define class object from header information in FITS file. Parameters ---------- fitsobj: file object FITS file whose header contains the DTU information needed to initialise the members of this class. extnum : int Extension number (first extension is 0) """ # read input FITS file with fits.open(fitsobj) as hdulist: image_header = hdulist[extnum].header return cls.define_from_header(image_header)
[ "def", "define_from_fits", "(", "cls", ",", "fitsobj", ",", "extnum", "=", "0", ")", ":", "# read input FITS file", "with", "fits", ".", "open", "(", "fitsobj", ")", "as", "hdulist", ":", "image_header", "=", "hdulist", "[", "extnum", "]", ".", "header", "return", "cls", ".", "define_from_header", "(", "image_header", ")" ]
Define class object from header information in FITS file. Parameters ---------- fitsobj: file object FITS file whose header contains the DTU information needed to initialise the members of this class. extnum : int Extension number (first extension is 0)
[ "Define", "class", "object", "from", "header", "information", "in", "FITS", "file", "." ]
train
https://github.com/guaix-ucm/pyemir/blob/fef6bbabcb13f80123cafd1800a0f508a3c21702/emirdrp/instrument/dtu_configuration.py#L94-L110
guaix-ucm/pyemir
emirdrp/instrument/dtu_configuration.py
DtuConfiguration.define_from_dictionary
def define_from_dictionary(cls, inputdict): """Define class object from dictionary. Parameters ---------- inputdict : dictionary like object Dictionary like object defining each member of the class. """ self = DtuConfiguration() for item in self.__dict__: self.__dict__[item] = inputdict[item] return self
python
def define_from_dictionary(cls, inputdict): """Define class object from dictionary. Parameters ---------- inputdict : dictionary like object Dictionary like object defining each member of the class. """ self = DtuConfiguration() for item in self.__dict__: self.__dict__[item] = inputdict[item] return self
[ "def", "define_from_dictionary", "(", "cls", ",", "inputdict", ")", ":", "self", "=", "DtuConfiguration", "(", ")", "for", "item", "in", "self", ".", "__dict__", ":", "self", ".", "__dict__", "[", "item", "]", "=", "inputdict", "[", "item", "]", "return", "self" ]
Define class object from dictionary. Parameters ---------- inputdict : dictionary like object Dictionary like object defining each member of the class.
[ "Define", "class", "object", "from", "dictionary", "." ]
train
https://github.com/guaix-ucm/pyemir/blob/fef6bbabcb13f80123cafd1800a0f508a3c21702/emirdrp/instrument/dtu_configuration.py#L126-L139
guaix-ucm/pyemir
emirdrp/instrument/dtu_configuration.py
DtuConfiguration.define_from_values
def define_from_values(cls, xdtu, ydtu, zdtu, xdtu_0, ydtu_0, zdtu_0): """Define class object from from provided values. Parameters ---------- xdtu : float XDTU fits keyword value. ydtu : float YDTU fits keyword value. zdtu : float ZDTU fits keyword value. xdtu_0 : float XDTU_0 fits keyword value. ydtu_0 : float YDTU_0 fits keyword value. zdtu_0 : float ZDTU_0 fits keyword value. """ self = DtuConfiguration() # define DTU variables self.xdtu = xdtu self.ydtu = ydtu self.zdtu = zdtu self.xdtu_0 = xdtu_0 self.ydtu_0 = ydtu_0 self.zdtu_0 = zdtu_0 return self
python
def define_from_values(cls, xdtu, ydtu, zdtu, xdtu_0, ydtu_0, zdtu_0): """Define class object from from provided values. Parameters ---------- xdtu : float XDTU fits keyword value. ydtu : float YDTU fits keyword value. zdtu : float ZDTU fits keyword value. xdtu_0 : float XDTU_0 fits keyword value. ydtu_0 : float YDTU_0 fits keyword value. zdtu_0 : float ZDTU_0 fits keyword value. """ self = DtuConfiguration() # define DTU variables self.xdtu = xdtu self.ydtu = ydtu self.zdtu = zdtu self.xdtu_0 = xdtu_0 self.ydtu_0 = ydtu_0 self.zdtu_0 = zdtu_0 return self
[ "def", "define_from_values", "(", "cls", ",", "xdtu", ",", "ydtu", ",", "zdtu", ",", "xdtu_0", ",", "ydtu_0", ",", "zdtu_0", ")", ":", "self", "=", "DtuConfiguration", "(", ")", "# define DTU variables", "self", ".", "xdtu", "=", "xdtu", "self", ".", "ydtu", "=", "ydtu", "self", ".", "zdtu", "=", "zdtu", "self", ".", "xdtu_0", "=", "xdtu_0", "self", ".", "ydtu_0", "=", "ydtu_0", "self", ".", "zdtu_0", "=", "zdtu_0", "return", "self" ]
Define class object from from provided values. Parameters ---------- xdtu : float XDTU fits keyword value. ydtu : float YDTU fits keyword value. zdtu : float ZDTU fits keyword value. xdtu_0 : float XDTU_0 fits keyword value. ydtu_0 : float YDTU_0 fits keyword value. zdtu_0 : float ZDTU_0 fits keyword value.
[ "Define", "class", "object", "from", "from", "provided", "values", "." ]
train
https://github.com/guaix-ucm/pyemir/blob/fef6bbabcb13f80123cafd1800a0f508a3c21702/emirdrp/instrument/dtu_configuration.py#L142-L170
guaix-ucm/pyemir
emirdrp/instrument/dtu_configuration.py
DtuConfiguration.closeto
def closeto(self, other, abserror): """Check that all the members are equal within provided absolute error. Parameters ---------- other : DtuConfiguration object DTU configuration instance to be compared with self. abserror : float Absolute maximum allowed error. Returns ------- result : bool True is all members are within the specified maximum absolute error """ result = \ (abs(self.xdtu - other.xdtu) <= abserror) and \ (abs(self.ydtu - other.ydtu) <= abserror) and \ (abs(self.zdtu - other.zdtu) <= abserror) and \ (abs(self.xdtu_0 - other.xdtu_0) <= abserror) and \ (abs(self.ydtu_0 - other.ydtu_0) <= abserror) and \ (abs(self.zdtu_0 - other.zdtu_0) <= abserror) return result
python
def closeto(self, other, abserror): """Check that all the members are equal within provided absolute error. Parameters ---------- other : DtuConfiguration object DTU configuration instance to be compared with self. abserror : float Absolute maximum allowed error. Returns ------- result : bool True is all members are within the specified maximum absolute error """ result = \ (abs(self.xdtu - other.xdtu) <= abserror) and \ (abs(self.ydtu - other.ydtu) <= abserror) and \ (abs(self.zdtu - other.zdtu) <= abserror) and \ (abs(self.xdtu_0 - other.xdtu_0) <= abserror) and \ (abs(self.ydtu_0 - other.ydtu_0) <= abserror) and \ (abs(self.zdtu_0 - other.zdtu_0) <= abserror) return result
[ "def", "closeto", "(", "self", ",", "other", ",", "abserror", ")", ":", "result", "=", "(", "abs", "(", "self", ".", "xdtu", "-", "other", ".", "xdtu", ")", "<=", "abserror", ")", "and", "(", "abs", "(", "self", ".", "ydtu", "-", "other", ".", "ydtu", ")", "<=", "abserror", ")", "and", "(", "abs", "(", "self", ".", "zdtu", "-", "other", ".", "zdtu", ")", "<=", "abserror", ")", "and", "(", "abs", "(", "self", ".", "xdtu_0", "-", "other", ".", "xdtu_0", ")", "<=", "abserror", ")", "and", "(", "abs", "(", "self", ".", "ydtu_0", "-", "other", ".", "ydtu_0", ")", "<=", "abserror", ")", "and", "(", "abs", "(", "self", ".", "zdtu_0", "-", "other", ".", "zdtu_0", ")", "<=", "abserror", ")", "return", "result" ]
Check that all the members are equal within provided absolute error. Parameters ---------- other : DtuConfiguration object DTU configuration instance to be compared with self. abserror : float Absolute maximum allowed error. Returns ------- result : bool True is all members are within the specified maximum absolute error
[ "Check", "that", "all", "the", "members", "are", "equal", "within", "provided", "absolute", "error", "." ]
train
https://github.com/guaix-ucm/pyemir/blob/fef6bbabcb13f80123cafd1800a0f508a3c21702/emirdrp/instrument/dtu_configuration.py#L172-L197
guaix-ucm/pyemir
emirdrp/instrument/dtu_configuration.py
DtuConfiguration.outdict
def outdict(self, ndigits=3): """Return dictionary structure rounded to a given precision.""" output = self.__dict__.copy() for item in output: output[item] = round(output[item], ndigits) return output
python
def outdict(self, ndigits=3): """Return dictionary structure rounded to a given precision.""" output = self.__dict__.copy() for item in output: output[item] = round(output[item], ndigits) return output
[ "def", "outdict", "(", "self", ",", "ndigits", "=", "3", ")", ":", "output", "=", "self", ".", "__dict__", ".", "copy", "(", ")", "for", "item", "in", "output", ":", "output", "[", "item", "]", "=", "round", "(", "output", "[", "item", "]", ",", "ndigits", ")", "return", "output" ]
Return dictionary structure rounded to a given precision.
[ "Return", "dictionary", "structure", "rounded", "to", "a", "given", "precision", "." ]
train
https://github.com/guaix-ucm/pyemir/blob/fef6bbabcb13f80123cafd1800a0f508a3c21702/emirdrp/instrument/dtu_configuration.py#L199-L205
IdentityPython/oidcendpoint
src/oidcendpoint/util.py
build_endpoints
def build_endpoints(conf, endpoint_context, client_authn_method, issuer): """ conf typically contains:: 'provider_config': { 'path': '.well-known/openid-configuration', 'class': ProviderConfiguration, 'kwargs': {} }, :param conf: :param endpoint_context: :param client_authn_method: :param issuer: :return: """ if issuer.endswith('/'): _url = issuer[:-1] else: _url = issuer endpoint = {} for name, spec in conf.items(): try: kwargs = spec['kwargs'] except KeyError: kwargs = {} if isinstance(spec['class'], str): _instance = importer(spec['class'])( endpoint_context=endpoint_context, **kwargs) else: _instance = spec['class'](endpoint_context=endpoint_context, **kwargs) _instance.endpoint_path = spec['path'] _instance.full_path = '{}/{}'.format(_url, spec['path']) if 'provider_info' in spec: _instance.provider_info = spec['provider_info'] try: _client_authn_method = kwargs['client_authn_method'] except KeyError: _instance.client_auth_method = client_authn_method else: _instance.client_auth_method = _client_authn_method endpoint[name] = _instance return endpoint
python
def build_endpoints(conf, endpoint_context, client_authn_method, issuer): """ conf typically contains:: 'provider_config': { 'path': '.well-known/openid-configuration', 'class': ProviderConfiguration, 'kwargs': {} }, :param conf: :param endpoint_context: :param client_authn_method: :param issuer: :return: """ if issuer.endswith('/'): _url = issuer[:-1] else: _url = issuer endpoint = {} for name, spec in conf.items(): try: kwargs = spec['kwargs'] except KeyError: kwargs = {} if isinstance(spec['class'], str): _instance = importer(spec['class'])( endpoint_context=endpoint_context, **kwargs) else: _instance = spec['class'](endpoint_context=endpoint_context, **kwargs) _instance.endpoint_path = spec['path'] _instance.full_path = '{}/{}'.format(_url, spec['path']) if 'provider_info' in spec: _instance.provider_info = spec['provider_info'] try: _client_authn_method = kwargs['client_authn_method'] except KeyError: _instance.client_auth_method = client_authn_method else: _instance.client_auth_method = _client_authn_method endpoint[name] = _instance return endpoint
[ "def", "build_endpoints", "(", "conf", ",", "endpoint_context", ",", "client_authn_method", ",", "issuer", ")", ":", "if", "issuer", ".", "endswith", "(", "'/'", ")", ":", "_url", "=", "issuer", "[", ":", "-", "1", "]", "else", ":", "_url", "=", "issuer", "endpoint", "=", "{", "}", "for", "name", ",", "spec", "in", "conf", ".", "items", "(", ")", ":", "try", ":", "kwargs", "=", "spec", "[", "'kwargs'", "]", "except", "KeyError", ":", "kwargs", "=", "{", "}", "if", "isinstance", "(", "spec", "[", "'class'", "]", ",", "str", ")", ":", "_instance", "=", "importer", "(", "spec", "[", "'class'", "]", ")", "(", "endpoint_context", "=", "endpoint_context", ",", "*", "*", "kwargs", ")", "else", ":", "_instance", "=", "spec", "[", "'class'", "]", "(", "endpoint_context", "=", "endpoint_context", ",", "*", "*", "kwargs", ")", "_instance", ".", "endpoint_path", "=", "spec", "[", "'path'", "]", "_instance", ".", "full_path", "=", "'{}/{}'", ".", "format", "(", "_url", ",", "spec", "[", "'path'", "]", ")", "if", "'provider_info'", "in", "spec", ":", "_instance", ".", "provider_info", "=", "spec", "[", "'provider_info'", "]", "try", ":", "_client_authn_method", "=", "kwargs", "[", "'client_authn_method'", "]", "except", "KeyError", ":", "_instance", ".", "client_auth_method", "=", "client_authn_method", "else", ":", "_instance", ".", "client_auth_method", "=", "_client_authn_method", "endpoint", "[", "name", "]", "=", "_instance", "return", "endpoint" ]
conf typically contains:: 'provider_config': { 'path': '.well-known/openid-configuration', 'class': ProviderConfiguration, 'kwargs': {} }, :param conf: :param endpoint_context: :param client_authn_method: :param issuer: :return:
[ "conf", "typically", "contains", "::" ]
train
https://github.com/IdentityPython/oidcendpoint/blob/6c1d729d51bfb6332816117fe476073df7a1d823/src/oidcendpoint/util.py#L34-L84
IdentityPython/oidcendpoint
src/oidcendpoint/util.py
lv_pack
def lv_pack(*args): """ Serializes using length:value format :param args: values :return: string """ s = [] for a in args: s.append('{}:{}'.format(len(a), a)) return ''.join(s)
python
def lv_pack(*args): """ Serializes using length:value format :param args: values :return: string """ s = [] for a in args: s.append('{}:{}'.format(len(a), a)) return ''.join(s)
[ "def", "lv_pack", "(", "*", "args", ")", ":", "s", "=", "[", "]", "for", "a", "in", "args", ":", "s", ".", "append", "(", "'{}:{}'", ".", "format", "(", "len", "(", "a", ")", ",", "a", ")", ")", "return", "''", ".", "join", "(", "s", ")" ]
Serializes using length:value format :param args: values :return: string
[ "Serializes", "using", "length", ":", "value", "format" ]
train
https://github.com/IdentityPython/oidcendpoint/blob/6c1d729d51bfb6332816117fe476073df7a1d823/src/oidcendpoint/util.py#L106-L116
IdentityPython/oidcendpoint
src/oidcendpoint/util.py
lv_unpack
def lv_unpack(txt): """ Deserializes a string of the length:value format :param txt: The input string :return: a list og values """ txt = txt.strip() res = [] while txt: l, v = txt.split(':', 1) res.append(v[:int(l)]) txt = v[int(l):] return res
python
def lv_unpack(txt): """ Deserializes a string of the length:value format :param txt: The input string :return: a list og values """ txt = txt.strip() res = [] while txt: l, v = txt.split(':', 1) res.append(v[:int(l)]) txt = v[int(l):] return res
[ "def", "lv_unpack", "(", "txt", ")", ":", "txt", "=", "txt", ".", "strip", "(", ")", "res", "=", "[", "]", "while", "txt", ":", "l", ",", "v", "=", "txt", ".", "split", "(", "':'", ",", "1", ")", "res", ".", "append", "(", "v", "[", ":", "int", "(", "l", ")", "]", ")", "txt", "=", "v", "[", "int", "(", "l", ")", ":", "]", "return", "res" ]
Deserializes a string of the length:value format :param txt: The input string :return: a list og values
[ "Deserializes", "a", "string", "of", "the", "length", ":", "value", "format" ]
train
https://github.com/IdentityPython/oidcendpoint/blob/6c1d729d51bfb6332816117fe476073df7a1d823/src/oidcendpoint/util.py#L119-L132
BeyondTheClouds/enoslib
enoslib/infra/enos_vagrant/provider.py
Enos_vagrant.init
def init(self, force_deploy=False): """Reserve and deploys the vagrant boxes. Args: force_deploy (bool): True iff new machines should be started """ machines = self.provider_conf.machines networks = self.provider_conf.networks _networks = [] for network in networks: ipnet = IPNetwork(network.cidr) _networks.append({ "netpool": list(ipnet)[10:-10], "cidr": network.cidr, "roles": network.roles, "gateway": ipnet.ip }) vagrant_machines = [] vagrant_roles = {} j = 0 for machine in machines: for _ in range(machine.number): vagrant_machine = { "name": "enos-%s" % j, "cpu": machine.flavour_desc["core"], "mem": machine.flavour_desc["mem"], "ips": [n["netpool"].pop() for n in _networks], } vagrant_machines.append(vagrant_machine) # Assign the machines to the right roles for role in machine.roles: vagrant_roles.setdefault(role, []).append(vagrant_machine) j = j + 1 logger.debug(vagrant_roles) loader = FileSystemLoader(searchpath=TEMPLATE_DIR) env = Environment(loader=loader, autoescape=True) template = env.get_template('Vagrantfile.j2') vagrantfile = template.render(machines=vagrant_machines, provider_conf=self.provider_conf) vagrantfile_path = os.path.join(os.getcwd(), "Vagrantfile") with open(vagrantfile_path, 'w') as f: f.write(vagrantfile) # Build env for Vagrant with a copy of env variables (needed by # subprocess opened by vagrant v_env = dict(os.environ) v_env['VAGRANT_DEFAULT_PROVIDER'] = self.provider_conf.backend v = vagrant.Vagrant(root=os.getcwd(), quiet_stdout=False, quiet_stderr=False, env=v_env) if force_deploy: v.destroy() v.up() v.provision() roles = {} for role, machines in vagrant_roles.items(): for machine in machines: keyfile = v.keyfile(vm_name=machine['name']) port = v.port(vm_name=machine['name']) address = v.hostname(vm_name=machine['name']) roles.setdefault(role, []).append( Host(address, alias=machine['name'], user=self.provider_conf.user, port=port, keyfile=keyfile)) networks = [{ 'cidr': str(n["cidr"]), 'start': str(n["netpool"][0]), 'end': str(n["netpool"][-1]), 'dns': '8.8.8.8', 'gateway': str(n["gateway"]), 'roles': n["roles"] } for n in _networks] logger.debug(roles) logger.debug(networks) return (roles, networks)
python
def init(self, force_deploy=False): """Reserve and deploys the vagrant boxes. Args: force_deploy (bool): True iff new machines should be started """ machines = self.provider_conf.machines networks = self.provider_conf.networks _networks = [] for network in networks: ipnet = IPNetwork(network.cidr) _networks.append({ "netpool": list(ipnet)[10:-10], "cidr": network.cidr, "roles": network.roles, "gateway": ipnet.ip }) vagrant_machines = [] vagrant_roles = {} j = 0 for machine in machines: for _ in range(machine.number): vagrant_machine = { "name": "enos-%s" % j, "cpu": machine.flavour_desc["core"], "mem": machine.flavour_desc["mem"], "ips": [n["netpool"].pop() for n in _networks], } vagrant_machines.append(vagrant_machine) # Assign the machines to the right roles for role in machine.roles: vagrant_roles.setdefault(role, []).append(vagrant_machine) j = j + 1 logger.debug(vagrant_roles) loader = FileSystemLoader(searchpath=TEMPLATE_DIR) env = Environment(loader=loader, autoescape=True) template = env.get_template('Vagrantfile.j2') vagrantfile = template.render(machines=vagrant_machines, provider_conf=self.provider_conf) vagrantfile_path = os.path.join(os.getcwd(), "Vagrantfile") with open(vagrantfile_path, 'w') as f: f.write(vagrantfile) # Build env for Vagrant with a copy of env variables (needed by # subprocess opened by vagrant v_env = dict(os.environ) v_env['VAGRANT_DEFAULT_PROVIDER'] = self.provider_conf.backend v = vagrant.Vagrant(root=os.getcwd(), quiet_stdout=False, quiet_stderr=False, env=v_env) if force_deploy: v.destroy() v.up() v.provision() roles = {} for role, machines in vagrant_roles.items(): for machine in machines: keyfile = v.keyfile(vm_name=machine['name']) port = v.port(vm_name=machine['name']) address = v.hostname(vm_name=machine['name']) roles.setdefault(role, []).append( Host(address, alias=machine['name'], user=self.provider_conf.user, port=port, keyfile=keyfile)) networks = [{ 'cidr': str(n["cidr"]), 'start': str(n["netpool"][0]), 'end': str(n["netpool"][-1]), 'dns': '8.8.8.8', 'gateway': str(n["gateway"]), 'roles': n["roles"] } for n in _networks] logger.debug(roles) logger.debug(networks) return (roles, networks)
[ "def", "init", "(", "self", ",", "force_deploy", "=", "False", ")", ":", "machines", "=", "self", ".", "provider_conf", ".", "machines", "networks", "=", "self", ".", "provider_conf", ".", "networks", "_networks", "=", "[", "]", "for", "network", "in", "networks", ":", "ipnet", "=", "IPNetwork", "(", "network", ".", "cidr", ")", "_networks", ".", "append", "(", "{", "\"netpool\"", ":", "list", "(", "ipnet", ")", "[", "10", ":", "-", "10", "]", ",", "\"cidr\"", ":", "network", ".", "cidr", ",", "\"roles\"", ":", "network", ".", "roles", ",", "\"gateway\"", ":", "ipnet", ".", "ip", "}", ")", "vagrant_machines", "=", "[", "]", "vagrant_roles", "=", "{", "}", "j", "=", "0", "for", "machine", "in", "machines", ":", "for", "_", "in", "range", "(", "machine", ".", "number", ")", ":", "vagrant_machine", "=", "{", "\"name\"", ":", "\"enos-%s\"", "%", "j", ",", "\"cpu\"", ":", "machine", ".", "flavour_desc", "[", "\"core\"", "]", ",", "\"mem\"", ":", "machine", ".", "flavour_desc", "[", "\"mem\"", "]", ",", "\"ips\"", ":", "[", "n", "[", "\"netpool\"", "]", ".", "pop", "(", ")", "for", "n", "in", "_networks", "]", ",", "}", "vagrant_machines", ".", "append", "(", "vagrant_machine", ")", "# Assign the machines to the right roles", "for", "role", "in", "machine", ".", "roles", ":", "vagrant_roles", ".", "setdefault", "(", "role", ",", "[", "]", ")", ".", "append", "(", "vagrant_machine", ")", "j", "=", "j", "+", "1", "logger", ".", "debug", "(", "vagrant_roles", ")", "loader", "=", "FileSystemLoader", "(", "searchpath", "=", "TEMPLATE_DIR", ")", "env", "=", "Environment", "(", "loader", "=", "loader", ",", "autoescape", "=", "True", ")", "template", "=", "env", ".", "get_template", "(", "'Vagrantfile.j2'", ")", "vagrantfile", "=", "template", ".", "render", "(", "machines", "=", "vagrant_machines", ",", "provider_conf", "=", "self", ".", "provider_conf", ")", "vagrantfile_path", "=", "os", ".", "path", ".", "join", "(", "os", ".", "getcwd", "(", ")", ",", "\"Vagrantfile\"", ")", "with", "open", "(", "vagrantfile_path", ",", "'w'", ")", "as", "f", ":", "f", ".", "write", "(", "vagrantfile", ")", "# Build env for Vagrant with a copy of env variables (needed by", "# subprocess opened by vagrant", "v_env", "=", "dict", "(", "os", ".", "environ", ")", "v_env", "[", "'VAGRANT_DEFAULT_PROVIDER'", "]", "=", "self", ".", "provider_conf", ".", "backend", "v", "=", "vagrant", ".", "Vagrant", "(", "root", "=", "os", ".", "getcwd", "(", ")", ",", "quiet_stdout", "=", "False", ",", "quiet_stderr", "=", "False", ",", "env", "=", "v_env", ")", "if", "force_deploy", ":", "v", ".", "destroy", "(", ")", "v", ".", "up", "(", ")", "v", ".", "provision", "(", ")", "roles", "=", "{", "}", "for", "role", ",", "machines", "in", "vagrant_roles", ".", "items", "(", ")", ":", "for", "machine", "in", "machines", ":", "keyfile", "=", "v", ".", "keyfile", "(", "vm_name", "=", "machine", "[", "'name'", "]", ")", "port", "=", "v", ".", "port", "(", "vm_name", "=", "machine", "[", "'name'", "]", ")", "address", "=", "v", ".", "hostname", "(", "vm_name", "=", "machine", "[", "'name'", "]", ")", "roles", ".", "setdefault", "(", "role", ",", "[", "]", ")", ".", "append", "(", "Host", "(", "address", ",", "alias", "=", "machine", "[", "'name'", "]", ",", "user", "=", "self", ".", "provider_conf", ".", "user", ",", "port", "=", "port", ",", "keyfile", "=", "keyfile", ")", ")", "networks", "=", "[", "{", "'cidr'", ":", "str", "(", "n", "[", "\"cidr\"", "]", ")", ",", "'start'", ":", "str", "(", "n", "[", "\"netpool\"", "]", "[", "0", "]", ")", ",", "'end'", ":", "str", "(", "n", "[", "\"netpool\"", "]", "[", "-", "1", "]", ")", ",", "'dns'", ":", "'8.8.8.8'", ",", "'gateway'", ":", "str", "(", "n", "[", "\"gateway\"", "]", ")", ",", "'roles'", ":", "n", "[", "\"roles\"", "]", "}", "for", "n", "in", "_networks", "]", "logger", ".", "debug", "(", "roles", ")", "logger", ".", "debug", "(", "networks", ")", "return", "(", "roles", ",", "networks", ")" ]
Reserve and deploys the vagrant boxes. Args: force_deploy (bool): True iff new machines should be started
[ "Reserve", "and", "deploys", "the", "vagrant", "boxes", "." ]
train
https://github.com/BeyondTheClouds/enoslib/blob/fb00be58e56a7848cfe482187d659744919fe2f7/enoslib/infra/enos_vagrant/provider.py#L22-L105
BeyondTheClouds/enoslib
enoslib/infra/enos_vagrant/provider.py
Enos_vagrant.destroy
def destroy(self): """Destroy all vagrant box involved in the deployment.""" v = vagrant.Vagrant(root=os.getcwd(), quiet_stdout=False, quiet_stderr=True) v.destroy()
python
def destroy(self): """Destroy all vagrant box involved in the deployment.""" v = vagrant.Vagrant(root=os.getcwd(), quiet_stdout=False, quiet_stderr=True) v.destroy()
[ "def", "destroy", "(", "self", ")", ":", "v", "=", "vagrant", ".", "Vagrant", "(", "root", "=", "os", ".", "getcwd", "(", ")", ",", "quiet_stdout", "=", "False", ",", "quiet_stderr", "=", "True", ")", "v", ".", "destroy", "(", ")" ]
Destroy all vagrant box involved in the deployment.
[ "Destroy", "all", "vagrant", "box", "involved", "in", "the", "deployment", "." ]
train
https://github.com/BeyondTheClouds/enoslib/blob/fb00be58e56a7848cfe482187d659744919fe2f7/enoslib/infra/enos_vagrant/provider.py#L107-L112
Yelp/uwsgi_metrics
uwsgi_metrics/ewma.py
EWMA.tick
def tick(self): """Mark the passage of time and decay the current rate accordingly.""" instant_rate = self.count / float(self.tick_interval_s) self.count = 0 if self.initialized: self.rate += (self.alpha * (instant_rate - self.rate)) else: self.rate = instant_rate self.initialized = True
python
def tick(self): """Mark the passage of time and decay the current rate accordingly.""" instant_rate = self.count / float(self.tick_interval_s) self.count = 0 if self.initialized: self.rate += (self.alpha * (instant_rate - self.rate)) else: self.rate = instant_rate self.initialized = True
[ "def", "tick", "(", "self", ")", ":", "instant_rate", "=", "self", ".", "count", "/", "float", "(", "self", ".", "tick_interval_s", ")", "self", ".", "count", "=", "0", "if", "self", ".", "initialized", ":", "self", ".", "rate", "+=", "(", "self", ".", "alpha", "*", "(", "instant_rate", "-", "self", ".", "rate", ")", ")", "else", ":", "self", ".", "rate", "=", "instant_rate", "self", ".", "initialized", "=", "True" ]
Mark the passage of time and decay the current rate accordingly.
[ "Mark", "the", "passage", "of", "time", "and", "decay", "the", "current", "rate", "accordingly", "." ]
train
https://github.com/Yelp/uwsgi_metrics/blob/534966fd461ff711aecd1e3d4caaafdc23ac33f0/uwsgi_metrics/ewma.py#L66-L74
guaix-ucm/pyemir
emirdrp/instrument/csu_configuration.py
merge_odd_even_csu_configurations
def merge_odd_even_csu_configurations(conf_odd, conf_even): """Merge CSU configuration using odd- and even-numbered values. The CSU returned CSU configuration include the odd-numbered values from 'conf_odd' and the even-numbered values from 'conf_even'. Parameters ---------- conf_odd : CsuConfiguration instance CSU configuration corresponding to odd-numbered slitlets. conf_even : CsuConfiguration instance CSU configuration corresponding to even-numbered slitlets. Returns ------- merged_conf : CsuConfiguration instance CSU configuration resulting from the merging process. """ # initialize resulting CsuConfiguration instance using one of the # input configuration corresponding to the odd-numbered slitlets merged_conf = deepcopy(conf_odd) # update the resulting configuration with the values corresponding # to the even-numbered slitlets for i in range(EMIR_NBARS): ibar = i + 1 if ibar % 2 == 0: merged_conf._csu_bar_left[i] = conf_even._csu_bar_left[i] merged_conf._csu_bar_right[i] = conf_even._csu_bar_right[i] merged_conf._csu_bar_slit_center[i] = \ conf_even._csu_bar_slit_center[i] merged_conf._csu_bar_slit_width[i] = \ conf_even._csu_bar_slit_width[i] # return merged configuration return merged_conf
python
def merge_odd_even_csu_configurations(conf_odd, conf_even): """Merge CSU configuration using odd- and even-numbered values. The CSU returned CSU configuration include the odd-numbered values from 'conf_odd' and the even-numbered values from 'conf_even'. Parameters ---------- conf_odd : CsuConfiguration instance CSU configuration corresponding to odd-numbered slitlets. conf_even : CsuConfiguration instance CSU configuration corresponding to even-numbered slitlets. Returns ------- merged_conf : CsuConfiguration instance CSU configuration resulting from the merging process. """ # initialize resulting CsuConfiguration instance using one of the # input configuration corresponding to the odd-numbered slitlets merged_conf = deepcopy(conf_odd) # update the resulting configuration with the values corresponding # to the even-numbered slitlets for i in range(EMIR_NBARS): ibar = i + 1 if ibar % 2 == 0: merged_conf._csu_bar_left[i] = conf_even._csu_bar_left[i] merged_conf._csu_bar_right[i] = conf_even._csu_bar_right[i] merged_conf._csu_bar_slit_center[i] = \ conf_even._csu_bar_slit_center[i] merged_conf._csu_bar_slit_width[i] = \ conf_even._csu_bar_slit_width[i] # return merged configuration return merged_conf
[ "def", "merge_odd_even_csu_configurations", "(", "conf_odd", ",", "conf_even", ")", ":", "# initialize resulting CsuConfiguration instance using one of the", "# input configuration corresponding to the odd-numbered slitlets", "merged_conf", "=", "deepcopy", "(", "conf_odd", ")", "# update the resulting configuration with the values corresponding", "# to the even-numbered slitlets", "for", "i", "in", "range", "(", "EMIR_NBARS", ")", ":", "ibar", "=", "i", "+", "1", "if", "ibar", "%", "2", "==", "0", ":", "merged_conf", ".", "_csu_bar_left", "[", "i", "]", "=", "conf_even", ".", "_csu_bar_left", "[", "i", "]", "merged_conf", ".", "_csu_bar_right", "[", "i", "]", "=", "conf_even", ".", "_csu_bar_right", "[", "i", "]", "merged_conf", ".", "_csu_bar_slit_center", "[", "i", "]", "=", "conf_even", ".", "_csu_bar_slit_center", "[", "i", "]", "merged_conf", ".", "_csu_bar_slit_width", "[", "i", "]", "=", "conf_even", ".", "_csu_bar_slit_width", "[", "i", "]", "# return merged configuration", "return", "merged_conf" ]
Merge CSU configuration using odd- and even-numbered values. The CSU returned CSU configuration include the odd-numbered values from 'conf_odd' and the even-numbered values from 'conf_even'. Parameters ---------- conf_odd : CsuConfiguration instance CSU configuration corresponding to odd-numbered slitlets. conf_even : CsuConfiguration instance CSU configuration corresponding to even-numbered slitlets. Returns ------- merged_conf : CsuConfiguration instance CSU configuration resulting from the merging process.
[ "Merge", "CSU", "configuration", "using", "odd", "-", "and", "even", "-", "numbered", "values", "." ]
train
https://github.com/guaix-ucm/pyemir/blob/fef6bbabcb13f80123cafd1800a0f508a3c21702/emirdrp/instrument/csu_configuration.py#L210-L247
guaix-ucm/pyemir
emirdrp/instrument/csu_configuration.py
CsuConfiguration.define_from_header
def define_from_header(cls, image_header): """Define class members directly from FITS header. Parameters ---------- image_header : instance of hdulist.header Header content from a FITS file. """ self = CsuConfiguration() # declare lists to store configuration of CSU bars self._csu_bar_left = [] self._csu_bar_right = [] self._csu_bar_slit_center = [] self._csu_bar_slit_width = [] for i in range(EMIR_NBARS): ibar = i + 1 keyword = 'CSUP{}'.format(ibar) if keyword in image_header: self._csu_bar_left.append(image_header[keyword]) else: raise ValueError("Expected keyword " + keyword + " not found!") keyword = 'CSUP{}'.format(ibar + EMIR_NBARS) if keyword in image_header: # set the same origin as the one employed for _csu_bar_left self._csu_bar_right.append(341.5 - image_header[keyword]) else: raise ValueError("Expected keyword " + keyword + " not found!") self._csu_bar_slit_center.append( (self._csu_bar_left[i] + self._csu_bar_right[i]) / 2 ) self._csu_bar_slit_width.append( self._csu_bar_right[i] - self._csu_bar_left[i] ) return self
python
def define_from_header(cls, image_header): """Define class members directly from FITS header. Parameters ---------- image_header : instance of hdulist.header Header content from a FITS file. """ self = CsuConfiguration() # declare lists to store configuration of CSU bars self._csu_bar_left = [] self._csu_bar_right = [] self._csu_bar_slit_center = [] self._csu_bar_slit_width = [] for i in range(EMIR_NBARS): ibar = i + 1 keyword = 'CSUP{}'.format(ibar) if keyword in image_header: self._csu_bar_left.append(image_header[keyword]) else: raise ValueError("Expected keyword " + keyword + " not found!") keyword = 'CSUP{}'.format(ibar + EMIR_NBARS) if keyword in image_header: # set the same origin as the one employed for _csu_bar_left self._csu_bar_right.append(341.5 - image_header[keyword]) else: raise ValueError("Expected keyword " + keyword + " not found!") self._csu_bar_slit_center.append( (self._csu_bar_left[i] + self._csu_bar_right[i]) / 2 ) self._csu_bar_slit_width.append( self._csu_bar_right[i] - self._csu_bar_left[i] ) return self
[ "def", "define_from_header", "(", "cls", ",", "image_header", ")", ":", "self", "=", "CsuConfiguration", "(", ")", "# declare lists to store configuration of CSU bars", "self", ".", "_csu_bar_left", "=", "[", "]", "self", ".", "_csu_bar_right", "=", "[", "]", "self", ".", "_csu_bar_slit_center", "=", "[", "]", "self", ".", "_csu_bar_slit_width", "=", "[", "]", "for", "i", "in", "range", "(", "EMIR_NBARS", ")", ":", "ibar", "=", "i", "+", "1", "keyword", "=", "'CSUP{}'", ".", "format", "(", "ibar", ")", "if", "keyword", "in", "image_header", ":", "self", ".", "_csu_bar_left", ".", "append", "(", "image_header", "[", "keyword", "]", ")", "else", ":", "raise", "ValueError", "(", "\"Expected keyword \"", "+", "keyword", "+", "\" not found!\"", ")", "keyword", "=", "'CSUP{}'", ".", "format", "(", "ibar", "+", "EMIR_NBARS", ")", "if", "keyword", "in", "image_header", ":", "# set the same origin as the one employed for _csu_bar_left", "self", ".", "_csu_bar_right", ".", "append", "(", "341.5", "-", "image_header", "[", "keyword", "]", ")", "else", ":", "raise", "ValueError", "(", "\"Expected keyword \"", "+", "keyword", "+", "\" not found!\"", ")", "self", ".", "_csu_bar_slit_center", ".", "append", "(", "(", "self", ".", "_csu_bar_left", "[", "i", "]", "+", "self", ".", "_csu_bar_right", "[", "i", "]", ")", "/", "2", ")", "self", ".", "_csu_bar_slit_width", ".", "append", "(", "self", ".", "_csu_bar_right", "[", "i", "]", "-", "self", ".", "_csu_bar_left", "[", "i", "]", ")", "return", "self" ]
Define class members directly from FITS header. Parameters ---------- image_header : instance of hdulist.header Header content from a FITS file.
[ "Define", "class", "members", "directly", "from", "FITS", "header", "." ]
train
https://github.com/guaix-ucm/pyemir/blob/fef6bbabcb13f80123cafd1800a0f508a3c21702/emirdrp/instrument/csu_configuration.py#L100-L138
guaix-ucm/pyemir
emirdrp/instrument/csu_configuration.py
CsuConfiguration.outdict
def outdict(self, ndigits=3): """Return dictionary structure rounded to a given precision.""" outdict = {} for i in range(EMIR_NBARS): ibar = i + 1 cbar = 'slitlet' + str(ibar).zfill(2) outdict[cbar] = {} outdict[cbar]['_csu_bar_left'] = \ round(self._csu_bar_left[i], ndigits) outdict[cbar]['_csu_bar_right'] = \ round(self._csu_bar_right[i], ndigits) outdict[cbar]['_csu_bar_slit_center'] = \ round(self._csu_bar_slit_center[i], ndigits) outdict[cbar]['_csu_bar_slit_width'] = \ round(self._csu_bar_slit_width[i], ndigits) return outdict
python
def outdict(self, ndigits=3): """Return dictionary structure rounded to a given precision.""" outdict = {} for i in range(EMIR_NBARS): ibar = i + 1 cbar = 'slitlet' + str(ibar).zfill(2) outdict[cbar] = {} outdict[cbar]['_csu_bar_left'] = \ round(self._csu_bar_left[i], ndigits) outdict[cbar]['_csu_bar_right'] = \ round(self._csu_bar_right[i], ndigits) outdict[cbar]['_csu_bar_slit_center'] = \ round(self._csu_bar_slit_center[i], ndigits) outdict[cbar]['_csu_bar_slit_width'] = \ round(self._csu_bar_slit_width[i], ndigits) return outdict
[ "def", "outdict", "(", "self", ",", "ndigits", "=", "3", ")", ":", "outdict", "=", "{", "}", "for", "i", "in", "range", "(", "EMIR_NBARS", ")", ":", "ibar", "=", "i", "+", "1", "cbar", "=", "'slitlet'", "+", "str", "(", "ibar", ")", ".", "zfill", "(", "2", ")", "outdict", "[", "cbar", "]", "=", "{", "}", "outdict", "[", "cbar", "]", "[", "'_csu_bar_left'", "]", "=", "round", "(", "self", ".", "_csu_bar_left", "[", "i", "]", ",", "ndigits", ")", "outdict", "[", "cbar", "]", "[", "'_csu_bar_right'", "]", "=", "round", "(", "self", ".", "_csu_bar_right", "[", "i", "]", ",", "ndigits", ")", "outdict", "[", "cbar", "]", "[", "'_csu_bar_slit_center'", "]", "=", "round", "(", "self", ".", "_csu_bar_slit_center", "[", "i", "]", ",", "ndigits", ")", "outdict", "[", "cbar", "]", "[", "'_csu_bar_slit_width'", "]", "=", "round", "(", "self", ".", "_csu_bar_slit_width", "[", "i", "]", ",", "ndigits", ")", "return", "outdict" ]
Return dictionary structure rounded to a given precision.
[ "Return", "dictionary", "structure", "rounded", "to", "a", "given", "precision", "." ]
train
https://github.com/guaix-ucm/pyemir/blob/fef6bbabcb13f80123cafd1800a0f508a3c21702/emirdrp/instrument/csu_configuration.py#L160-L177
guaix-ucm/pyemir
emirdrp/instrument/csu_configuration.py
CsuConfiguration.widths_in_range_mm
def widths_in_range_mm( self, minwidth=EMIR_MINIMUM_SLITLET_WIDTH_MM, maxwidth=EMIR_MAXIMUM_SLITLET_WIDTH_MM ): """Return list of slitlets which width is within given range Parameters ---------- minwidth : float Minimum slit width (mm). maxwidth : float Maximum slit width (mm). Returns ------- list_ok : list List of booleans indicating whether the corresponding slitlet width is within range """ list_ok = [] for i in range(EMIR_NBARS): slitlet_ok = minwidth <= self._csu_bar_slit_width[i] <= maxwidth if slitlet_ok: list_ok.append(i + 1) return list_ok
python
def widths_in_range_mm( self, minwidth=EMIR_MINIMUM_SLITLET_WIDTH_MM, maxwidth=EMIR_MAXIMUM_SLITLET_WIDTH_MM ): """Return list of slitlets which width is within given range Parameters ---------- minwidth : float Minimum slit width (mm). maxwidth : float Maximum slit width (mm). Returns ------- list_ok : list List of booleans indicating whether the corresponding slitlet width is within range """ list_ok = [] for i in range(EMIR_NBARS): slitlet_ok = minwidth <= self._csu_bar_slit_width[i] <= maxwidth if slitlet_ok: list_ok.append(i + 1) return list_ok
[ "def", "widths_in_range_mm", "(", "self", ",", "minwidth", "=", "EMIR_MINIMUM_SLITLET_WIDTH_MM", ",", "maxwidth", "=", "EMIR_MAXIMUM_SLITLET_WIDTH_MM", ")", ":", "list_ok", "=", "[", "]", "for", "i", "in", "range", "(", "EMIR_NBARS", ")", ":", "slitlet_ok", "=", "minwidth", "<=", "self", ".", "_csu_bar_slit_width", "[", "i", "]", "<=", "maxwidth", "if", "slitlet_ok", ":", "list_ok", ".", "append", "(", "i", "+", "1", ")", "return", "list_ok" ]
Return list of slitlets which width is within given range Parameters ---------- minwidth : float Minimum slit width (mm). maxwidth : float Maximum slit width (mm). Returns ------- list_ok : list List of booleans indicating whether the corresponding slitlet width is within range
[ "Return", "list", "of", "slitlets", "which", "width", "is", "within", "given", "range" ]
train
https://github.com/guaix-ucm/pyemir/blob/fef6bbabcb13f80123cafd1800a0f508a3c21702/emirdrp/instrument/csu_configuration.py#L179-L207
guaix-ucm/pyemir
emirdrp/recipes/aiv/procedures.py
encloses_annulus
def encloses_annulus(x_min, x_max, y_min, y_max, nx, ny, r_in, r_out): '''Encloses function backported from old photutils''' gout = circular_overlap_grid(x_min, x_max, y_min, y_max, nx, ny, r_out, 1, 1) gin = circular_overlap_grid(x_min, x_max, y_min, y_max, nx, ny, r_in, 1, 1) return gout - gin
python
def encloses_annulus(x_min, x_max, y_min, y_max, nx, ny, r_in, r_out): '''Encloses function backported from old photutils''' gout = circular_overlap_grid(x_min, x_max, y_min, y_max, nx, ny, r_out, 1, 1) gin = circular_overlap_grid(x_min, x_max, y_min, y_max, nx, ny, r_in, 1, 1) return gout - gin
[ "def", "encloses_annulus", "(", "x_min", ",", "x_max", ",", "y_min", ",", "y_max", ",", "nx", ",", "ny", ",", "r_in", ",", "r_out", ")", ":", "gout", "=", "circular_overlap_grid", "(", "x_min", ",", "x_max", ",", "y_min", ",", "y_max", ",", "nx", ",", "ny", ",", "r_out", ",", "1", ",", "1", ")", "gin", "=", "circular_overlap_grid", "(", "x_min", ",", "x_max", ",", "y_min", ",", "y_max", ",", "nx", ",", "ny", ",", "r_in", ",", "1", ",", "1", ")", "return", "gout", "-", "gin" ]
Encloses function backported from old photutils
[ "Encloses", "function", "backported", "from", "old", "photutils" ]
train
https://github.com/guaix-ucm/pyemir/blob/fef6bbabcb13f80123cafd1800a0f508a3c21702/emirdrp/recipes/aiv/procedures.py#L43-L48
guaix-ucm/pyemir
emirdrp/recipes/aiv/procedures.py
comp_back_with_annulus
def comp_back_with_annulus(img, xc, yc, r_in, r_out, frac=0.1): ''' center: [x,y], center of first pixel is [0,0] ''' x_min = -0.5 - xc x_max = img.shape[1] - 0.5 - xc y_min = -0.5 - yc y_max = img.shape[1] - 0.5 - yc mm = encloses_annulus(x_min, x_max, y_min, y_max, img.shape[1], img.shape[0], r_in, r_out) valid = mm > frac rr = img[valid] if rr.size == 0: raise ValueError("Not enough points to compute background") # mode? bck = mode_half_sample(rr) return bck, mm
python
def comp_back_with_annulus(img, xc, yc, r_in, r_out, frac=0.1): ''' center: [x,y], center of first pixel is [0,0] ''' x_min = -0.5 - xc x_max = img.shape[1] - 0.5 - xc y_min = -0.5 - yc y_max = img.shape[1] - 0.5 - yc mm = encloses_annulus(x_min, x_max, y_min, y_max, img.shape[1], img.shape[0], r_in, r_out) valid = mm > frac rr = img[valid] if rr.size == 0: raise ValueError("Not enough points to compute background") # mode? bck = mode_half_sample(rr) return bck, mm
[ "def", "comp_back_with_annulus", "(", "img", ",", "xc", ",", "yc", ",", "r_in", ",", "r_out", ",", "frac", "=", "0.1", ")", ":", "x_min", "=", "-", "0.5", "-", "xc", "x_max", "=", "img", ".", "shape", "[", "1", "]", "-", "0.5", "-", "xc", "y_min", "=", "-", "0.5", "-", "yc", "y_max", "=", "img", ".", "shape", "[", "1", "]", "-", "0.5", "-", "yc", "mm", "=", "encloses_annulus", "(", "x_min", ",", "x_max", ",", "y_min", ",", "y_max", ",", "img", ".", "shape", "[", "1", "]", ",", "img", ".", "shape", "[", "0", "]", ",", "r_in", ",", "r_out", ")", "valid", "=", "mm", ">", "frac", "rr", "=", "img", "[", "valid", "]", "if", "rr", ".", "size", "==", "0", ":", "raise", "ValueError", "(", "\"Not enough points to compute background\"", ")", "# mode?", "bck", "=", "mode_half_sample", "(", "rr", ")", "return", "bck", ",", "mm" ]
center: [x,y], center of first pixel is [0,0]
[ "center", ":", "[", "x", "y", "]", "center", "of", "first", "pixel", "is", "[", "0", "0", "]" ]
train
https://github.com/guaix-ucm/pyemir/blob/fef6bbabcb13f80123cafd1800a0f508a3c21702/emirdrp/recipes/aiv/procedures.py#L51-L73
guaix-ucm/pyemir
emirdrp/tools/rect_wpoly_for_mos.py
main
def main(args=None): # parse command-line options parser = argparse.ArgumentParser(prog='rect_wpoly_for_mos') # required arguments parser.add_argument("input_list", help="TXT file with list JSON files derived from " "longslit data") parser.add_argument("--fitted_bound_param", required=True, help="Input JSON with fitted boundary parameters", type=argparse.FileType('rt')) parser.add_argument("--out_MOSlibrary", required=True, help="Output JSON file with results", type=lambda x: arg_file_is_new(parser, x)) # optional arguments parser.add_argument("--debugplot", help="Integer indicating plotting & debugging options" " (default=0)", default=0, type=int, choices=DEBUGPLOT_CODES) parser.add_argument("--echo", help="Display full command line", action="store_true") args = parser.parse_args(args) if args.echo: print('\033[1m\033[31m% ' + ' '.join(sys.argv) + '\033[0m\n') # --- # Read input TXT file with list of JSON files list_json_files = list_fileinfo_from_txt(args.input_list) nfiles = len(list_json_files) if abs(args.debugplot) >= 10: print('>>> Number of input JSON files:', nfiles) for item in list_json_files: print(item) if nfiles < 2: raise ValueError("Insufficient number of input JSON files") # read fitted boundary parameters and check that all the longslit JSON # files have been computed using the same fitted boundary parameters refined_boundary_model = RefinedBoundaryModelParam._datatype_load( args.fitted_bound_param.name) for ifile in range(nfiles): coef_rect_wpoly = RectWaveCoeff._datatype_load( list_json_files[ifile].filename) uuid_tmp = coef_rect_wpoly.meta_info['origin']['bound_param'] if uuid_tmp[4:] != refined_boundary_model.uuid: print('Expected uuid:', refined_boundary_model.uuid) print('uuid for ifile #' + str(ifile + 1) + ": " + uuid_tmp) raise ValueError("Fitted boundary parameter uuid's do not match") # check consistency of grism, filter, DTU configuration and list of # valid slitlets coef_rect_wpoly_first_longslit = RectWaveCoeff._datatype_load( list_json_files[0].filename) filter_name = coef_rect_wpoly_first_longslit.tags['filter'] grism_name = coef_rect_wpoly_first_longslit.tags['grism'] dtu_conf = DtuConfiguration.define_from_dictionary( coef_rect_wpoly_first_longslit.meta_info['dtu_configuration'] ) list_valid_islitlets = list(range(1, EMIR_NBARS + 1)) for idel in coef_rect_wpoly_first_longslit.missing_slitlets: list_valid_islitlets.remove(idel) for ifile in range(1, nfiles): coef_rect_wpoly = RectWaveCoeff._datatype_load( list_json_files[ifile].filename) filter_tmp = coef_rect_wpoly.tags['filter'] if filter_name != filter_tmp: print(filter_name) print(filter_tmp) raise ValueError("Unexpected different filter found") grism_tmp = coef_rect_wpoly.tags['grism'] if grism_name != grism_tmp: print(grism_name) print(grism_tmp) raise ValueError("Unexpected different grism found") coef_rect_wpoly = RectWaveCoeff._datatype_load( list_json_files[ifile].filename) dtu_conf_tmp = DtuConfiguration.define_from_dictionary( coef_rect_wpoly.meta_info['dtu_configuration'] ) if dtu_conf != dtu_conf_tmp: print(dtu_conf) print(dtu_conf_tmp) raise ValueError("Unexpected different DTU configurations found") list_valid_islitlets_tmp = list(range(1, EMIR_NBARS + 1)) for idel in coef_rect_wpoly.missing_slitlets: list_valid_islitlets_tmp.remove(idel) if list_valid_islitlets != list_valid_islitlets_tmp: print(list_valid_islitlets) print(list_valid_islitlets_tmp) raise ValueError("Unexpected different list of valid slitlets") # check consistency of horizontal bounding box limits (bb_nc1_orig and # bb_nc2_orig) and ymargin_bb, and store the values for each slitlet dict_bb_param = {} print("Checking horizontal bounding box limits and ymargin_bb:") for islitlet in list_valid_islitlets: islitlet_progress(islitlet, EMIR_NBARS) cslitlet = 'slitlet' + str(islitlet).zfill(2) dict_bb_param[cslitlet] = {} for par in ['bb_nc1_orig', 'bb_nc2_orig', 'ymargin_bb']: value_initial = \ coef_rect_wpoly_first_longslit.contents[islitlet - 1][par] for ifile in range(1, nfiles): coef_rect_wpoly = RectWaveCoeff._datatype_load( list_json_files[ifile].filename) value_tmp = coef_rect_wpoly.contents[islitlet - 1][par] if value_initial != value_tmp: print(islitlet, value_initial, value_tmp) print(value_tmp) raise ValueError("Unexpected different " + par) dict_bb_param[cslitlet][par] = value_initial print('OK!') # --- # Read and store all the longslit data list_coef_rect_wpoly = [] for ifile in range(nfiles): coef_rect_wpoly = RectWaveCoeff._datatype_load( list_json_files[ifile].filename) list_coef_rect_wpoly.append(coef_rect_wpoly) # --- # Initialize structure to save results into an ouptut JSON file outdict = {} outdict['refined_boundary_model'] = refined_boundary_model.__getstate__() outdict['instrument'] = 'EMIR' outdict['meta_info'] = {} outdict['meta_info']['creation_date'] = datetime.now().isoformat() outdict['meta_info']['description'] = \ 'rectification and wavelength calibration polynomial coefficients ' \ 'as a function of csu_bar_slit_center for MOS' outdict['meta_info']['recipe_name'] = 'undefined' outdict['meta_info']['origin'] = {} outdict['meta_info']['origin']['wpoly_longslits'] = {} for ifile in range(nfiles): cdum = 'longslit_' + str(ifile + 1).zfill(3) + '_uuid' outdict['meta_info']['origin']['wpoly_longslits'][cdum] = \ list_coef_rect_wpoly[ifile].uuid outdict['tags'] = {} outdict['tags']['grism'] = grism_name outdict['tags']['filter'] = filter_name outdict['dtu_configuration'] = dtu_conf.outdict() outdict['uuid'] = str(uuid4()) outdict['contents'] = {} # include bb_nc1_orig, bb_nc2_orig and ymargin_bb for each slitlet # (note that the values of bb_ns1_orig and bb_ns2_orig cannot be # computed at this stage because they depend on csu_bar_slit_center) for islitlet in list_valid_islitlets: cslitlet = 'slitlet' + str(islitlet).zfill(2) outdict['contents'][cslitlet] = dict_bb_param[cslitlet] # check that order for rectification transformations is the same for all # the slitlets and longslit configurations order_check_list = [] for ifile in range(nfiles): tmpdict = list_coef_rect_wpoly[ifile].contents for islitlet in list_valid_islitlets: ttd_order = tmpdict[islitlet - 1]['ttd_order'] if ttd_order is not None: order_check_list.append(ttd_order) ttd_order_modeled = \ tmpdict[islitlet - 1]['ttd_order_longslit_model'] order_check_list.append(ttd_order_modeled) # remove duplicates in list order_no_duplicates = list(set(order_check_list)) if len(order_no_duplicates) != 1: print('order_no_duplicates:', order_no_duplicates) raise ValueError('tdd_order is not constant!') ttd_order = int(order_no_duplicates[0]) ncoef_rect = ncoef_fmap(ttd_order) if abs(args.debugplot) >= 10: print('>>> ttd_order........:', ttd_order) print('>>> ncoef_rect.......:', ncoef_rect) # check that polynomial degree in frontiers and spectrails are the same poldeg_check_list = [] for ifile in range(nfiles): tmpdict = list_coef_rect_wpoly[ifile].contents for islitlet in list_valid_islitlets: tmppoly = tmpdict[islitlet - 1]['frontier']['poly_coef_lower'] poldeg_check_list.append(len(tmppoly) - 1) tmppoly = tmpdict[islitlet - 1]['frontier']['poly_coef_upper'] poldeg_check_list.append(len(tmppoly) - 1) tmppoly = tmpdict[islitlet - 1]['spectrail']['poly_coef_lower'] poldeg_check_list.append(len(tmppoly) - 1) tmppoly = tmpdict[islitlet - 1]['spectrail']['poly_coef_middle'] poldeg_check_list.append(len(tmppoly) - 1) tmppoly = tmpdict[islitlet - 1]['spectrail']['poly_coef_upper'] poldeg_check_list.append(len(tmppoly) - 1) # remove duplicates in list poldeg_no_duplicates = list(set(poldeg_check_list)) if len(poldeg_no_duplicates) != 1: print('poldeg_no_duplicates:', poldeg_no_duplicates) raise ValueError('poldeg is not constant in frontiers and ' 'spectrails!') poldeg_spectrails = int(poldeg_no_duplicates[0]) if abs(args.debugplot) >= 10: print('>>> poldeg spectrails:', poldeg_spectrails) # check that polynomial degree of wavelength calibration is the same for # all the slitlets poldeg_check_list = [] for ifile in range(nfiles): tmpdict = list_coef_rect_wpoly[ifile].contents for islitlet in list_valid_islitlets: tmppoly = tmpdict[islitlet - 1]['wpoly_coeff'] poldeg_check_list.append(len(tmppoly) - 1) tmppoly = tmpdict[islitlet - 1]['wpoly_coeff_longslit_model'] poldeg_check_list.append(len(tmppoly) - 1) # remove duplicates in list poldeg_no_duplicates = list(set(poldeg_check_list)) if len(poldeg_no_duplicates) != 1: print('poldeg_no_duplicates:', poldeg_no_duplicates) raise ValueError('poldeg is not constant in wavelength calibration ' 'polynomials!') poldeg_wavecal = int(poldeg_no_duplicates[0]) if abs(args.debugplot) >= 10: print('>>> poldeg wavecal...:', poldeg_wavecal) # --- # csu_bar_slit_center values for each slitlet print("CSU_bar_slit_center values:") for islitlet in list_valid_islitlets: islitlet_progress(islitlet, EMIR_NBARS) cslitlet = 'slitlet' + str(islitlet).zfill(2) list_csu_bar_slit_center = [] for ifile in range(nfiles): tmpdict = list_coef_rect_wpoly[ifile].contents[islitlet - 1] csu_bar_slit_center = tmpdict['csu_bar_slit_center'] list_csu_bar_slit_center.append(csu_bar_slit_center) # check that list_csu_bar_slit_center is properly sorted if not np.all(list_csu_bar_slit_center[:-1] <= list_csu_bar_slit_center[1:]): print('cslitlet: ', cslitlet) print('list_csu_bar_slit_center: ', list_csu_bar_slit_center) raise ValueError('Unsorted list_csu_bar_slit_center') outdict['contents'][cslitlet]['list_csu_bar_slit_center'] = \ list_csu_bar_slit_center print('OK!') # --- # rectification polynomial coefficients # note: when aij and bij have not been computed, we use the modeled # version aij_longslit_model and bij_longslit_model print("Rectification polynomial coefficients:") for islitlet in list_valid_islitlets: islitlet_progress(islitlet, EMIR_NBARS) cslitlet = 'slitlet' + str(islitlet).zfill(2) outdict['contents'][cslitlet]['ttd_order'] = ttd_order outdict['contents'][cslitlet]['ncoef_rect'] = ncoef_rect for keycoef in ['ttd_aij', 'ttd_bij', 'tti_aij', 'tti_bij']: for icoef in range(ncoef_rect): ccoef = str(icoef).zfill(2) list_cij = [] for ifile in range(nfiles): tmpdict = \ list_coef_rect_wpoly[ifile].contents[islitlet - 1] cij = tmpdict[keycoef] if cij is not None: list_cij.append(cij[icoef]) else: cij_modeled = tmpdict[keycoef + '_longslit_model'] if cij_modeled is None: raise ValueError("Unexpected cij_modeled=None!") else: list_cij.append(cij_modeled[icoef]) if abs(args.debugplot) >= 10: print("Warning: using " + keycoef + "_longslit_model for " + cslitlet + " in file " + list_json_files[ifile].filename) cdum = 'list_' + keycoef + '_' + ccoef outdict['contents'][cslitlet][cdum] = list_cij print('OK!') # --- # wavelength calibration polynomial coefficients # note: when wpoly_coeff have not been computed, we use the # wpoly_coeff_longslit_model print("Wavelength calibration polynomial coefficients:") for islitlet in list_valid_islitlets: islitlet_progress(islitlet, EMIR_NBARS) cslitlet = 'slitlet' + str(islitlet).zfill(2) outdict['contents'][cslitlet]['wpoly_degree'] = poldeg_wavecal for icoef in range(poldeg_wavecal + 1): ccoef = str(icoef).zfill(2) list_cij = [] for ifile in range(nfiles): tmpdict = list_coef_rect_wpoly[ifile].contents[islitlet - 1] cij = tmpdict['wpoly_coeff'] if cij is not None: list_cij.append(cij[icoef]) else: cij_modeled = tmpdict['wpoly_coeff_longslit_model'] if cij_modeled is None: raise ValueError("Unexpected cij_modeled=None!") else: list_cij.append(cij_modeled[icoef]) if abs(args.debugplot) >= 10: print("Warning: using wpoly_coeff_longslit_model" + " for " + cslitlet + " in file " + list_json_files[ifile].filename) outdict['contents'][cslitlet]['list_wpoly_coeff_' + ccoef] = \ list_cij print('OK!') # --- # OBSOLETE # Save resulting JSON structure ''' with open(args.out_MOSlibrary.name + '_old', 'w') as fstream: json.dump(outdict, fstream, indent=2, sort_keys=True) print('>>> Saving file ' + args.out_MOSlibrary.name + '_old') ''' # -- # Create object of type MasterRectWave with library of coefficients # for rectification and wavelength calibration master_rectwv = MasterRectWave(instrument='EMIR') master_rectwv.quality_control = numina.types.qc.QC.GOOD master_rectwv.tags['grism'] = grism_name master_rectwv.tags['filter'] = filter_name master_rectwv.meta_info['dtu_configuration'] = outdict['dtu_configuration'] master_rectwv.meta_info['refined_boundary_model'] = { 'parmodel': refined_boundary_model.meta_info['parmodel'] } master_rectwv.meta_info['refined_boundary_model'].update( outdict['refined_boundary_model']['contents'] ) master_rectwv.total_slitlets = EMIR_NBARS master_rectwv.meta_info['origin'] = { 'bound_param': 'uuid' + refined_boundary_model.uuid, 'longslit_frames': ['uuid:' + list_coef_rect_wpoly[ifile].uuid for ifile in range(nfiles)] } for i in range(EMIR_NBARS): islitlet = i + 1 dumdict = {'islitlet': islitlet} cslitlet = 'slitlet' + str(islitlet).zfill(2) if cslitlet in outdict['contents']: dumdict.update(outdict['contents'][cslitlet]) else: dumdict.update({ 'bb_nc1_orig': 0, 'bb_nc2_orig': 0, 'ymargin_bb': 0, 'list_csu_bar_slit_center': [], 'ttd_order': 0, 'ncoef_rect': 0, 'wpolydegree': 0 }) master_rectwv.missing_slitlets.append(islitlet) master_rectwv.contents.append(dumdict) master_rectwv.writeto(args.out_MOSlibrary.name) print('>>> Saving file ' + args.out_MOSlibrary.name)
python
def main(args=None): # parse command-line options parser = argparse.ArgumentParser(prog='rect_wpoly_for_mos') # required arguments parser.add_argument("input_list", help="TXT file with list JSON files derived from " "longslit data") parser.add_argument("--fitted_bound_param", required=True, help="Input JSON with fitted boundary parameters", type=argparse.FileType('rt')) parser.add_argument("--out_MOSlibrary", required=True, help="Output JSON file with results", type=lambda x: arg_file_is_new(parser, x)) # optional arguments parser.add_argument("--debugplot", help="Integer indicating plotting & debugging options" " (default=0)", default=0, type=int, choices=DEBUGPLOT_CODES) parser.add_argument("--echo", help="Display full command line", action="store_true") args = parser.parse_args(args) if args.echo: print('\033[1m\033[31m% ' + ' '.join(sys.argv) + '\033[0m\n') # --- # Read input TXT file with list of JSON files list_json_files = list_fileinfo_from_txt(args.input_list) nfiles = len(list_json_files) if abs(args.debugplot) >= 10: print('>>> Number of input JSON files:', nfiles) for item in list_json_files: print(item) if nfiles < 2: raise ValueError("Insufficient number of input JSON files") # read fitted boundary parameters and check that all the longslit JSON # files have been computed using the same fitted boundary parameters refined_boundary_model = RefinedBoundaryModelParam._datatype_load( args.fitted_bound_param.name) for ifile in range(nfiles): coef_rect_wpoly = RectWaveCoeff._datatype_load( list_json_files[ifile].filename) uuid_tmp = coef_rect_wpoly.meta_info['origin']['bound_param'] if uuid_tmp[4:] != refined_boundary_model.uuid: print('Expected uuid:', refined_boundary_model.uuid) print('uuid for ifile #' + str(ifile + 1) + ": " + uuid_tmp) raise ValueError("Fitted boundary parameter uuid's do not match") # check consistency of grism, filter, DTU configuration and list of # valid slitlets coef_rect_wpoly_first_longslit = RectWaveCoeff._datatype_load( list_json_files[0].filename) filter_name = coef_rect_wpoly_first_longslit.tags['filter'] grism_name = coef_rect_wpoly_first_longslit.tags['grism'] dtu_conf = DtuConfiguration.define_from_dictionary( coef_rect_wpoly_first_longslit.meta_info['dtu_configuration'] ) list_valid_islitlets = list(range(1, EMIR_NBARS + 1)) for idel in coef_rect_wpoly_first_longslit.missing_slitlets: list_valid_islitlets.remove(idel) for ifile in range(1, nfiles): coef_rect_wpoly = RectWaveCoeff._datatype_load( list_json_files[ifile].filename) filter_tmp = coef_rect_wpoly.tags['filter'] if filter_name != filter_tmp: print(filter_name) print(filter_tmp) raise ValueError("Unexpected different filter found") grism_tmp = coef_rect_wpoly.tags['grism'] if grism_name != grism_tmp: print(grism_name) print(grism_tmp) raise ValueError("Unexpected different grism found") coef_rect_wpoly = RectWaveCoeff._datatype_load( list_json_files[ifile].filename) dtu_conf_tmp = DtuConfiguration.define_from_dictionary( coef_rect_wpoly.meta_info['dtu_configuration'] ) if dtu_conf != dtu_conf_tmp: print(dtu_conf) print(dtu_conf_tmp) raise ValueError("Unexpected different DTU configurations found") list_valid_islitlets_tmp = list(range(1, EMIR_NBARS + 1)) for idel in coef_rect_wpoly.missing_slitlets: list_valid_islitlets_tmp.remove(idel) if list_valid_islitlets != list_valid_islitlets_tmp: print(list_valid_islitlets) print(list_valid_islitlets_tmp) raise ValueError("Unexpected different list of valid slitlets") # check consistency of horizontal bounding box limits (bb_nc1_orig and # bb_nc2_orig) and ymargin_bb, and store the values for each slitlet dict_bb_param = {} print("Checking horizontal bounding box limits and ymargin_bb:") for islitlet in list_valid_islitlets: islitlet_progress(islitlet, EMIR_NBARS) cslitlet = 'slitlet' + str(islitlet).zfill(2) dict_bb_param[cslitlet] = {} for par in ['bb_nc1_orig', 'bb_nc2_orig', 'ymargin_bb']: value_initial = \ coef_rect_wpoly_first_longslit.contents[islitlet - 1][par] for ifile in range(1, nfiles): coef_rect_wpoly = RectWaveCoeff._datatype_load( list_json_files[ifile].filename) value_tmp = coef_rect_wpoly.contents[islitlet - 1][par] if value_initial != value_tmp: print(islitlet, value_initial, value_tmp) print(value_tmp) raise ValueError("Unexpected different " + par) dict_bb_param[cslitlet][par] = value_initial print('OK!') # --- # Read and store all the longslit data list_coef_rect_wpoly = [] for ifile in range(nfiles): coef_rect_wpoly = RectWaveCoeff._datatype_load( list_json_files[ifile].filename) list_coef_rect_wpoly.append(coef_rect_wpoly) # --- # Initialize structure to save results into an ouptut JSON file outdict = {} outdict['refined_boundary_model'] = refined_boundary_model.__getstate__() outdict['instrument'] = 'EMIR' outdict['meta_info'] = {} outdict['meta_info']['creation_date'] = datetime.now().isoformat() outdict['meta_info']['description'] = \ 'rectification and wavelength calibration polynomial coefficients ' \ 'as a function of csu_bar_slit_center for MOS' outdict['meta_info']['recipe_name'] = 'undefined' outdict['meta_info']['origin'] = {} outdict['meta_info']['origin']['wpoly_longslits'] = {} for ifile in range(nfiles): cdum = 'longslit_' + str(ifile + 1).zfill(3) + '_uuid' outdict['meta_info']['origin']['wpoly_longslits'][cdum] = \ list_coef_rect_wpoly[ifile].uuid outdict['tags'] = {} outdict['tags']['grism'] = grism_name outdict['tags']['filter'] = filter_name outdict['dtu_configuration'] = dtu_conf.outdict() outdict['uuid'] = str(uuid4()) outdict['contents'] = {} # include bb_nc1_orig, bb_nc2_orig and ymargin_bb for each slitlet # (note that the values of bb_ns1_orig and bb_ns2_orig cannot be # computed at this stage because they depend on csu_bar_slit_center) for islitlet in list_valid_islitlets: cslitlet = 'slitlet' + str(islitlet).zfill(2) outdict['contents'][cslitlet] = dict_bb_param[cslitlet] # check that order for rectification transformations is the same for all # the slitlets and longslit configurations order_check_list = [] for ifile in range(nfiles): tmpdict = list_coef_rect_wpoly[ifile].contents for islitlet in list_valid_islitlets: ttd_order = tmpdict[islitlet - 1]['ttd_order'] if ttd_order is not None: order_check_list.append(ttd_order) ttd_order_modeled = \ tmpdict[islitlet - 1]['ttd_order_longslit_model'] order_check_list.append(ttd_order_modeled) # remove duplicates in list order_no_duplicates = list(set(order_check_list)) if len(order_no_duplicates) != 1: print('order_no_duplicates:', order_no_duplicates) raise ValueError('tdd_order is not constant!') ttd_order = int(order_no_duplicates[0]) ncoef_rect = ncoef_fmap(ttd_order) if abs(args.debugplot) >= 10: print('>>> ttd_order........:', ttd_order) print('>>> ncoef_rect.......:', ncoef_rect) # check that polynomial degree in frontiers and spectrails are the same poldeg_check_list = [] for ifile in range(nfiles): tmpdict = list_coef_rect_wpoly[ifile].contents for islitlet in list_valid_islitlets: tmppoly = tmpdict[islitlet - 1]['frontier']['poly_coef_lower'] poldeg_check_list.append(len(tmppoly) - 1) tmppoly = tmpdict[islitlet - 1]['frontier']['poly_coef_upper'] poldeg_check_list.append(len(tmppoly) - 1) tmppoly = tmpdict[islitlet - 1]['spectrail']['poly_coef_lower'] poldeg_check_list.append(len(tmppoly) - 1) tmppoly = tmpdict[islitlet - 1]['spectrail']['poly_coef_middle'] poldeg_check_list.append(len(tmppoly) - 1) tmppoly = tmpdict[islitlet - 1]['spectrail']['poly_coef_upper'] poldeg_check_list.append(len(tmppoly) - 1) # remove duplicates in list poldeg_no_duplicates = list(set(poldeg_check_list)) if len(poldeg_no_duplicates) != 1: print('poldeg_no_duplicates:', poldeg_no_duplicates) raise ValueError('poldeg is not constant in frontiers and ' 'spectrails!') poldeg_spectrails = int(poldeg_no_duplicates[0]) if abs(args.debugplot) >= 10: print('>>> poldeg spectrails:', poldeg_spectrails) # check that polynomial degree of wavelength calibration is the same for # all the slitlets poldeg_check_list = [] for ifile in range(nfiles): tmpdict = list_coef_rect_wpoly[ifile].contents for islitlet in list_valid_islitlets: tmppoly = tmpdict[islitlet - 1]['wpoly_coeff'] poldeg_check_list.append(len(tmppoly) - 1) tmppoly = tmpdict[islitlet - 1]['wpoly_coeff_longslit_model'] poldeg_check_list.append(len(tmppoly) - 1) # remove duplicates in list poldeg_no_duplicates = list(set(poldeg_check_list)) if len(poldeg_no_duplicates) != 1: print('poldeg_no_duplicates:', poldeg_no_duplicates) raise ValueError('poldeg is not constant in wavelength calibration ' 'polynomials!') poldeg_wavecal = int(poldeg_no_duplicates[0]) if abs(args.debugplot) >= 10: print('>>> poldeg wavecal...:', poldeg_wavecal) # --- # csu_bar_slit_center values for each slitlet print("CSU_bar_slit_center values:") for islitlet in list_valid_islitlets: islitlet_progress(islitlet, EMIR_NBARS) cslitlet = 'slitlet' + str(islitlet).zfill(2) list_csu_bar_slit_center = [] for ifile in range(nfiles): tmpdict = list_coef_rect_wpoly[ifile].contents[islitlet - 1] csu_bar_slit_center = tmpdict['csu_bar_slit_center'] list_csu_bar_slit_center.append(csu_bar_slit_center) # check that list_csu_bar_slit_center is properly sorted if not np.all(list_csu_bar_slit_center[:-1] <= list_csu_bar_slit_center[1:]): print('cslitlet: ', cslitlet) print('list_csu_bar_slit_center: ', list_csu_bar_slit_center) raise ValueError('Unsorted list_csu_bar_slit_center') outdict['contents'][cslitlet]['list_csu_bar_slit_center'] = \ list_csu_bar_slit_center print('OK!') # --- # rectification polynomial coefficients # note: when aij and bij have not been computed, we use the modeled # version aij_longslit_model and bij_longslit_model print("Rectification polynomial coefficients:") for islitlet in list_valid_islitlets: islitlet_progress(islitlet, EMIR_NBARS) cslitlet = 'slitlet' + str(islitlet).zfill(2) outdict['contents'][cslitlet]['ttd_order'] = ttd_order outdict['contents'][cslitlet]['ncoef_rect'] = ncoef_rect for keycoef in ['ttd_aij', 'ttd_bij', 'tti_aij', 'tti_bij']: for icoef in range(ncoef_rect): ccoef = str(icoef).zfill(2) list_cij = [] for ifile in range(nfiles): tmpdict = \ list_coef_rect_wpoly[ifile].contents[islitlet - 1] cij = tmpdict[keycoef] if cij is not None: list_cij.append(cij[icoef]) else: cij_modeled = tmpdict[keycoef + '_longslit_model'] if cij_modeled is None: raise ValueError("Unexpected cij_modeled=None!") else: list_cij.append(cij_modeled[icoef]) if abs(args.debugplot) >= 10: print("Warning: using " + keycoef + "_longslit_model for " + cslitlet + " in file " + list_json_files[ifile].filename) cdum = 'list_' + keycoef + '_' + ccoef outdict['contents'][cslitlet][cdum] = list_cij print('OK!') # --- # wavelength calibration polynomial coefficients # note: when wpoly_coeff have not been computed, we use the # wpoly_coeff_longslit_model print("Wavelength calibration polynomial coefficients:") for islitlet in list_valid_islitlets: islitlet_progress(islitlet, EMIR_NBARS) cslitlet = 'slitlet' + str(islitlet).zfill(2) outdict['contents'][cslitlet]['wpoly_degree'] = poldeg_wavecal for icoef in range(poldeg_wavecal + 1): ccoef = str(icoef).zfill(2) list_cij = [] for ifile in range(nfiles): tmpdict = list_coef_rect_wpoly[ifile].contents[islitlet - 1] cij = tmpdict['wpoly_coeff'] if cij is not None: list_cij.append(cij[icoef]) else: cij_modeled = tmpdict['wpoly_coeff_longslit_model'] if cij_modeled is None: raise ValueError("Unexpected cij_modeled=None!") else: list_cij.append(cij_modeled[icoef]) if abs(args.debugplot) >= 10: print("Warning: using wpoly_coeff_longslit_model" + " for " + cslitlet + " in file " + list_json_files[ifile].filename) outdict['contents'][cslitlet]['list_wpoly_coeff_' + ccoef] = \ list_cij print('OK!') # --- # OBSOLETE # Save resulting JSON structure ''' with open(args.out_MOSlibrary.name + '_old', 'w') as fstream: json.dump(outdict, fstream, indent=2, sort_keys=True) print('>>> Saving file ' + args.out_MOSlibrary.name + '_old') ''' # -- # Create object of type MasterRectWave with library of coefficients # for rectification and wavelength calibration master_rectwv = MasterRectWave(instrument='EMIR') master_rectwv.quality_control = numina.types.qc.QC.GOOD master_rectwv.tags['grism'] = grism_name master_rectwv.tags['filter'] = filter_name master_rectwv.meta_info['dtu_configuration'] = outdict['dtu_configuration'] master_rectwv.meta_info['refined_boundary_model'] = { 'parmodel': refined_boundary_model.meta_info['parmodel'] } master_rectwv.meta_info['refined_boundary_model'].update( outdict['refined_boundary_model']['contents'] ) master_rectwv.total_slitlets = EMIR_NBARS master_rectwv.meta_info['origin'] = { 'bound_param': 'uuid' + refined_boundary_model.uuid, 'longslit_frames': ['uuid:' + list_coef_rect_wpoly[ifile].uuid for ifile in range(nfiles)] } for i in range(EMIR_NBARS): islitlet = i + 1 dumdict = {'islitlet': islitlet} cslitlet = 'slitlet' + str(islitlet).zfill(2) if cslitlet in outdict['contents']: dumdict.update(outdict['contents'][cslitlet]) else: dumdict.update({ 'bb_nc1_orig': 0, 'bb_nc2_orig': 0, 'ymargin_bb': 0, 'list_csu_bar_slit_center': [], 'ttd_order': 0, 'ncoef_rect': 0, 'wpolydegree': 0 }) master_rectwv.missing_slitlets.append(islitlet) master_rectwv.contents.append(dumdict) master_rectwv.writeto(args.out_MOSlibrary.name) print('>>> Saving file ' + args.out_MOSlibrary.name)
[ "def", "main", "(", "args", "=", "None", ")", ":", "# parse command-line options", "parser", "=", "argparse", ".", "ArgumentParser", "(", "prog", "=", "'rect_wpoly_for_mos'", ")", "# required arguments", "parser", ".", "add_argument", "(", "\"input_list\"", ",", "help", "=", "\"TXT file with list JSON files derived from \"", "\"longslit data\"", ")", "parser", ".", "add_argument", "(", "\"--fitted_bound_param\"", ",", "required", "=", "True", ",", "help", "=", "\"Input JSON with fitted boundary parameters\"", ",", "type", "=", "argparse", ".", "FileType", "(", "'rt'", ")", ")", "parser", ".", "add_argument", "(", "\"--out_MOSlibrary\"", ",", "required", "=", "True", ",", "help", "=", "\"Output JSON file with results\"", ",", "type", "=", "lambda", "x", ":", "arg_file_is_new", "(", "parser", ",", "x", ")", ")", "# optional arguments", "parser", ".", "add_argument", "(", "\"--debugplot\"", ",", "help", "=", "\"Integer indicating plotting & debugging options\"", "\" (default=0)\"", ",", "default", "=", "0", ",", "type", "=", "int", ",", "choices", "=", "DEBUGPLOT_CODES", ")", "parser", ".", "add_argument", "(", "\"--echo\"", ",", "help", "=", "\"Display full command line\"", ",", "action", "=", "\"store_true\"", ")", "args", "=", "parser", ".", "parse_args", "(", "args", ")", "if", "args", ".", "echo", ":", "print", "(", "'\\033[1m\\033[31m% '", "+", "' '", ".", "join", "(", "sys", ".", "argv", ")", "+", "'\\033[0m\\n'", ")", "# ---", "# Read input TXT file with list of JSON files", "list_json_files", "=", "list_fileinfo_from_txt", "(", "args", ".", "input_list", ")", "nfiles", "=", "len", "(", "list_json_files", ")", "if", "abs", "(", "args", ".", "debugplot", ")", ">=", "10", ":", "print", "(", "'>>> Number of input JSON files:'", ",", "nfiles", ")", "for", "item", "in", "list_json_files", ":", "print", "(", "item", ")", "if", "nfiles", "<", "2", ":", "raise", "ValueError", "(", "\"Insufficient number of input JSON files\"", ")", "# read fitted boundary parameters and check that all the longslit JSON", "# files have been computed using the same fitted boundary parameters", "refined_boundary_model", "=", "RefinedBoundaryModelParam", ".", "_datatype_load", "(", "args", ".", "fitted_bound_param", ".", "name", ")", "for", "ifile", "in", "range", "(", "nfiles", ")", ":", "coef_rect_wpoly", "=", "RectWaveCoeff", ".", "_datatype_load", "(", "list_json_files", "[", "ifile", "]", ".", "filename", ")", "uuid_tmp", "=", "coef_rect_wpoly", ".", "meta_info", "[", "'origin'", "]", "[", "'bound_param'", "]", "if", "uuid_tmp", "[", "4", ":", "]", "!=", "refined_boundary_model", ".", "uuid", ":", "print", "(", "'Expected uuid:'", ",", "refined_boundary_model", ".", "uuid", ")", "print", "(", "'uuid for ifile #'", "+", "str", "(", "ifile", "+", "1", ")", "+", "\": \"", "+", "uuid_tmp", ")", "raise", "ValueError", "(", "\"Fitted boundary parameter uuid's do not match\"", ")", "# check consistency of grism, filter, DTU configuration and list of", "# valid slitlets", "coef_rect_wpoly_first_longslit", "=", "RectWaveCoeff", ".", "_datatype_load", "(", "list_json_files", "[", "0", "]", ".", "filename", ")", "filter_name", "=", "coef_rect_wpoly_first_longslit", ".", "tags", "[", "'filter'", "]", "grism_name", "=", "coef_rect_wpoly_first_longslit", ".", "tags", "[", "'grism'", "]", "dtu_conf", "=", "DtuConfiguration", ".", "define_from_dictionary", "(", "coef_rect_wpoly_first_longslit", ".", "meta_info", "[", "'dtu_configuration'", "]", ")", "list_valid_islitlets", "=", "list", "(", "range", "(", "1", ",", "EMIR_NBARS", "+", "1", ")", ")", "for", "idel", "in", "coef_rect_wpoly_first_longslit", ".", "missing_slitlets", ":", "list_valid_islitlets", ".", "remove", "(", "idel", ")", "for", "ifile", "in", "range", "(", "1", ",", "nfiles", ")", ":", "coef_rect_wpoly", "=", "RectWaveCoeff", ".", "_datatype_load", "(", "list_json_files", "[", "ifile", "]", ".", "filename", ")", "filter_tmp", "=", "coef_rect_wpoly", ".", "tags", "[", "'filter'", "]", "if", "filter_name", "!=", "filter_tmp", ":", "print", "(", "filter_name", ")", "print", "(", "filter_tmp", ")", "raise", "ValueError", "(", "\"Unexpected different filter found\"", ")", "grism_tmp", "=", "coef_rect_wpoly", ".", "tags", "[", "'grism'", "]", "if", "grism_name", "!=", "grism_tmp", ":", "print", "(", "grism_name", ")", "print", "(", "grism_tmp", ")", "raise", "ValueError", "(", "\"Unexpected different grism found\"", ")", "coef_rect_wpoly", "=", "RectWaveCoeff", ".", "_datatype_load", "(", "list_json_files", "[", "ifile", "]", ".", "filename", ")", "dtu_conf_tmp", "=", "DtuConfiguration", ".", "define_from_dictionary", "(", "coef_rect_wpoly", ".", "meta_info", "[", "'dtu_configuration'", "]", ")", "if", "dtu_conf", "!=", "dtu_conf_tmp", ":", "print", "(", "dtu_conf", ")", "print", "(", "dtu_conf_tmp", ")", "raise", "ValueError", "(", "\"Unexpected different DTU configurations found\"", ")", "list_valid_islitlets_tmp", "=", "list", "(", "range", "(", "1", ",", "EMIR_NBARS", "+", "1", ")", ")", "for", "idel", "in", "coef_rect_wpoly", ".", "missing_slitlets", ":", "list_valid_islitlets_tmp", ".", "remove", "(", "idel", ")", "if", "list_valid_islitlets", "!=", "list_valid_islitlets_tmp", ":", "print", "(", "list_valid_islitlets", ")", "print", "(", "list_valid_islitlets_tmp", ")", "raise", "ValueError", "(", "\"Unexpected different list of valid slitlets\"", ")", "# check consistency of horizontal bounding box limits (bb_nc1_orig and", "# bb_nc2_orig) and ymargin_bb, and store the values for each slitlet", "dict_bb_param", "=", "{", "}", "print", "(", "\"Checking horizontal bounding box limits and ymargin_bb:\"", ")", "for", "islitlet", "in", "list_valid_islitlets", ":", "islitlet_progress", "(", "islitlet", ",", "EMIR_NBARS", ")", "cslitlet", "=", "'slitlet'", "+", "str", "(", "islitlet", ")", ".", "zfill", "(", "2", ")", "dict_bb_param", "[", "cslitlet", "]", "=", "{", "}", "for", "par", "in", "[", "'bb_nc1_orig'", ",", "'bb_nc2_orig'", ",", "'ymargin_bb'", "]", ":", "value_initial", "=", "coef_rect_wpoly_first_longslit", ".", "contents", "[", "islitlet", "-", "1", "]", "[", "par", "]", "for", "ifile", "in", "range", "(", "1", ",", "nfiles", ")", ":", "coef_rect_wpoly", "=", "RectWaveCoeff", ".", "_datatype_load", "(", "list_json_files", "[", "ifile", "]", ".", "filename", ")", "value_tmp", "=", "coef_rect_wpoly", ".", "contents", "[", "islitlet", "-", "1", "]", "[", "par", "]", "if", "value_initial", "!=", "value_tmp", ":", "print", "(", "islitlet", ",", "value_initial", ",", "value_tmp", ")", "print", "(", "value_tmp", ")", "raise", "ValueError", "(", "\"Unexpected different \"", "+", "par", ")", "dict_bb_param", "[", "cslitlet", "]", "[", "par", "]", "=", "value_initial", "print", "(", "'OK!'", ")", "# ---", "# Read and store all the longslit data", "list_coef_rect_wpoly", "=", "[", "]", "for", "ifile", "in", "range", "(", "nfiles", ")", ":", "coef_rect_wpoly", "=", "RectWaveCoeff", ".", "_datatype_load", "(", "list_json_files", "[", "ifile", "]", ".", "filename", ")", "list_coef_rect_wpoly", ".", "append", "(", "coef_rect_wpoly", ")", "# ---", "# Initialize structure to save results into an ouptut JSON file", "outdict", "=", "{", "}", "outdict", "[", "'refined_boundary_model'", "]", "=", "refined_boundary_model", ".", "__getstate__", "(", ")", "outdict", "[", "'instrument'", "]", "=", "'EMIR'", "outdict", "[", "'meta_info'", "]", "=", "{", "}", "outdict", "[", "'meta_info'", "]", "[", "'creation_date'", "]", "=", "datetime", ".", "now", "(", ")", ".", "isoformat", "(", ")", "outdict", "[", "'meta_info'", "]", "[", "'description'", "]", "=", "'rectification and wavelength calibration polynomial coefficients '", "'as a function of csu_bar_slit_center for MOS'", "outdict", "[", "'meta_info'", "]", "[", "'recipe_name'", "]", "=", "'undefined'", "outdict", "[", "'meta_info'", "]", "[", "'origin'", "]", "=", "{", "}", "outdict", "[", "'meta_info'", "]", "[", "'origin'", "]", "[", "'wpoly_longslits'", "]", "=", "{", "}", "for", "ifile", "in", "range", "(", "nfiles", ")", ":", "cdum", "=", "'longslit_'", "+", "str", "(", "ifile", "+", "1", ")", ".", "zfill", "(", "3", ")", "+", "'_uuid'", "outdict", "[", "'meta_info'", "]", "[", "'origin'", "]", "[", "'wpoly_longslits'", "]", "[", "cdum", "]", "=", "list_coef_rect_wpoly", "[", "ifile", "]", ".", "uuid", "outdict", "[", "'tags'", "]", "=", "{", "}", "outdict", "[", "'tags'", "]", "[", "'grism'", "]", "=", "grism_name", "outdict", "[", "'tags'", "]", "[", "'filter'", "]", "=", "filter_name", "outdict", "[", "'dtu_configuration'", "]", "=", "dtu_conf", ".", "outdict", "(", ")", "outdict", "[", "'uuid'", "]", "=", "str", "(", "uuid4", "(", ")", ")", "outdict", "[", "'contents'", "]", "=", "{", "}", "# include bb_nc1_orig, bb_nc2_orig and ymargin_bb for each slitlet", "# (note that the values of bb_ns1_orig and bb_ns2_orig cannot be", "# computed at this stage because they depend on csu_bar_slit_center)", "for", "islitlet", "in", "list_valid_islitlets", ":", "cslitlet", "=", "'slitlet'", "+", "str", "(", "islitlet", ")", ".", "zfill", "(", "2", ")", "outdict", "[", "'contents'", "]", "[", "cslitlet", "]", "=", "dict_bb_param", "[", "cslitlet", "]", "# check that order for rectification transformations is the same for all", "# the slitlets and longslit configurations", "order_check_list", "=", "[", "]", "for", "ifile", "in", "range", "(", "nfiles", ")", ":", "tmpdict", "=", "list_coef_rect_wpoly", "[", "ifile", "]", ".", "contents", "for", "islitlet", "in", "list_valid_islitlets", ":", "ttd_order", "=", "tmpdict", "[", "islitlet", "-", "1", "]", "[", "'ttd_order'", "]", "if", "ttd_order", "is", "not", "None", ":", "order_check_list", ".", "append", "(", "ttd_order", ")", "ttd_order_modeled", "=", "tmpdict", "[", "islitlet", "-", "1", "]", "[", "'ttd_order_longslit_model'", "]", "order_check_list", ".", "append", "(", "ttd_order_modeled", ")", "# remove duplicates in list", "order_no_duplicates", "=", "list", "(", "set", "(", "order_check_list", ")", ")", "if", "len", "(", "order_no_duplicates", ")", "!=", "1", ":", "print", "(", "'order_no_duplicates:'", ",", "order_no_duplicates", ")", "raise", "ValueError", "(", "'tdd_order is not constant!'", ")", "ttd_order", "=", "int", "(", "order_no_duplicates", "[", "0", "]", ")", "ncoef_rect", "=", "ncoef_fmap", "(", "ttd_order", ")", "if", "abs", "(", "args", ".", "debugplot", ")", ">=", "10", ":", "print", "(", "'>>> ttd_order........:'", ",", "ttd_order", ")", "print", "(", "'>>> ncoef_rect.......:'", ",", "ncoef_rect", ")", "# check that polynomial degree in frontiers and spectrails are the same", "poldeg_check_list", "=", "[", "]", "for", "ifile", "in", "range", "(", "nfiles", ")", ":", "tmpdict", "=", "list_coef_rect_wpoly", "[", "ifile", "]", ".", "contents", "for", "islitlet", "in", "list_valid_islitlets", ":", "tmppoly", "=", "tmpdict", "[", "islitlet", "-", "1", "]", "[", "'frontier'", "]", "[", "'poly_coef_lower'", "]", "poldeg_check_list", ".", "append", "(", "len", "(", "tmppoly", ")", "-", "1", ")", "tmppoly", "=", "tmpdict", "[", "islitlet", "-", "1", "]", "[", "'frontier'", "]", "[", "'poly_coef_upper'", "]", "poldeg_check_list", ".", "append", "(", "len", "(", "tmppoly", ")", "-", "1", ")", "tmppoly", "=", "tmpdict", "[", "islitlet", "-", "1", "]", "[", "'spectrail'", "]", "[", "'poly_coef_lower'", "]", "poldeg_check_list", ".", "append", "(", "len", "(", "tmppoly", ")", "-", "1", ")", "tmppoly", "=", "tmpdict", "[", "islitlet", "-", "1", "]", "[", "'spectrail'", "]", "[", "'poly_coef_middle'", "]", "poldeg_check_list", ".", "append", "(", "len", "(", "tmppoly", ")", "-", "1", ")", "tmppoly", "=", "tmpdict", "[", "islitlet", "-", "1", "]", "[", "'spectrail'", "]", "[", "'poly_coef_upper'", "]", "poldeg_check_list", ".", "append", "(", "len", "(", "tmppoly", ")", "-", "1", ")", "# remove duplicates in list", "poldeg_no_duplicates", "=", "list", "(", "set", "(", "poldeg_check_list", ")", ")", "if", "len", "(", "poldeg_no_duplicates", ")", "!=", "1", ":", "print", "(", "'poldeg_no_duplicates:'", ",", "poldeg_no_duplicates", ")", "raise", "ValueError", "(", "'poldeg is not constant in frontiers and '", "'spectrails!'", ")", "poldeg_spectrails", "=", "int", "(", "poldeg_no_duplicates", "[", "0", "]", ")", "if", "abs", "(", "args", ".", "debugplot", ")", ">=", "10", ":", "print", "(", "'>>> poldeg spectrails:'", ",", "poldeg_spectrails", ")", "# check that polynomial degree of wavelength calibration is the same for", "# all the slitlets", "poldeg_check_list", "=", "[", "]", "for", "ifile", "in", "range", "(", "nfiles", ")", ":", "tmpdict", "=", "list_coef_rect_wpoly", "[", "ifile", "]", ".", "contents", "for", "islitlet", "in", "list_valid_islitlets", ":", "tmppoly", "=", "tmpdict", "[", "islitlet", "-", "1", "]", "[", "'wpoly_coeff'", "]", "poldeg_check_list", ".", "append", "(", "len", "(", "tmppoly", ")", "-", "1", ")", "tmppoly", "=", "tmpdict", "[", "islitlet", "-", "1", "]", "[", "'wpoly_coeff_longslit_model'", "]", "poldeg_check_list", ".", "append", "(", "len", "(", "tmppoly", ")", "-", "1", ")", "# remove duplicates in list", "poldeg_no_duplicates", "=", "list", "(", "set", "(", "poldeg_check_list", ")", ")", "if", "len", "(", "poldeg_no_duplicates", ")", "!=", "1", ":", "print", "(", "'poldeg_no_duplicates:'", ",", "poldeg_no_duplicates", ")", "raise", "ValueError", "(", "'poldeg is not constant in wavelength calibration '", "'polynomials!'", ")", "poldeg_wavecal", "=", "int", "(", "poldeg_no_duplicates", "[", "0", "]", ")", "if", "abs", "(", "args", ".", "debugplot", ")", ">=", "10", ":", "print", "(", "'>>> poldeg wavecal...:'", ",", "poldeg_wavecal", ")", "# ---", "# csu_bar_slit_center values for each slitlet", "print", "(", "\"CSU_bar_slit_center values:\"", ")", "for", "islitlet", "in", "list_valid_islitlets", ":", "islitlet_progress", "(", "islitlet", ",", "EMIR_NBARS", ")", "cslitlet", "=", "'slitlet'", "+", "str", "(", "islitlet", ")", ".", "zfill", "(", "2", ")", "list_csu_bar_slit_center", "=", "[", "]", "for", "ifile", "in", "range", "(", "nfiles", ")", ":", "tmpdict", "=", "list_coef_rect_wpoly", "[", "ifile", "]", ".", "contents", "[", "islitlet", "-", "1", "]", "csu_bar_slit_center", "=", "tmpdict", "[", "'csu_bar_slit_center'", "]", "list_csu_bar_slit_center", ".", "append", "(", "csu_bar_slit_center", ")", "# check that list_csu_bar_slit_center is properly sorted", "if", "not", "np", ".", "all", "(", "list_csu_bar_slit_center", "[", ":", "-", "1", "]", "<=", "list_csu_bar_slit_center", "[", "1", ":", "]", ")", ":", "print", "(", "'cslitlet: '", ",", "cslitlet", ")", "print", "(", "'list_csu_bar_slit_center: '", ",", "list_csu_bar_slit_center", ")", "raise", "ValueError", "(", "'Unsorted list_csu_bar_slit_center'", ")", "outdict", "[", "'contents'", "]", "[", "cslitlet", "]", "[", "'list_csu_bar_slit_center'", "]", "=", "list_csu_bar_slit_center", "print", "(", "'OK!'", ")", "# ---", "# rectification polynomial coefficients", "# note: when aij and bij have not been computed, we use the modeled", "# version aij_longslit_model and bij_longslit_model", "print", "(", "\"Rectification polynomial coefficients:\"", ")", "for", "islitlet", "in", "list_valid_islitlets", ":", "islitlet_progress", "(", "islitlet", ",", "EMIR_NBARS", ")", "cslitlet", "=", "'slitlet'", "+", "str", "(", "islitlet", ")", ".", "zfill", "(", "2", ")", "outdict", "[", "'contents'", "]", "[", "cslitlet", "]", "[", "'ttd_order'", "]", "=", "ttd_order", "outdict", "[", "'contents'", "]", "[", "cslitlet", "]", "[", "'ncoef_rect'", "]", "=", "ncoef_rect", "for", "keycoef", "in", "[", "'ttd_aij'", ",", "'ttd_bij'", ",", "'tti_aij'", ",", "'tti_bij'", "]", ":", "for", "icoef", "in", "range", "(", "ncoef_rect", ")", ":", "ccoef", "=", "str", "(", "icoef", ")", ".", "zfill", "(", "2", ")", "list_cij", "=", "[", "]", "for", "ifile", "in", "range", "(", "nfiles", ")", ":", "tmpdict", "=", "list_coef_rect_wpoly", "[", "ifile", "]", ".", "contents", "[", "islitlet", "-", "1", "]", "cij", "=", "tmpdict", "[", "keycoef", "]", "if", "cij", "is", "not", "None", ":", "list_cij", ".", "append", "(", "cij", "[", "icoef", "]", ")", "else", ":", "cij_modeled", "=", "tmpdict", "[", "keycoef", "+", "'_longslit_model'", "]", "if", "cij_modeled", "is", "None", ":", "raise", "ValueError", "(", "\"Unexpected cij_modeled=None!\"", ")", "else", ":", "list_cij", ".", "append", "(", "cij_modeled", "[", "icoef", "]", ")", "if", "abs", "(", "args", ".", "debugplot", ")", ">=", "10", ":", "print", "(", "\"Warning: using \"", "+", "keycoef", "+", "\"_longslit_model for \"", "+", "cslitlet", "+", "\" in file \"", "+", "list_json_files", "[", "ifile", "]", ".", "filename", ")", "cdum", "=", "'list_'", "+", "keycoef", "+", "'_'", "+", "ccoef", "outdict", "[", "'contents'", "]", "[", "cslitlet", "]", "[", "cdum", "]", "=", "list_cij", "print", "(", "'OK!'", ")", "# ---", "# wavelength calibration polynomial coefficients", "# note: when wpoly_coeff have not been computed, we use the", "# wpoly_coeff_longslit_model", "print", "(", "\"Wavelength calibration polynomial coefficients:\"", ")", "for", "islitlet", "in", "list_valid_islitlets", ":", "islitlet_progress", "(", "islitlet", ",", "EMIR_NBARS", ")", "cslitlet", "=", "'slitlet'", "+", "str", "(", "islitlet", ")", ".", "zfill", "(", "2", ")", "outdict", "[", "'contents'", "]", "[", "cslitlet", "]", "[", "'wpoly_degree'", "]", "=", "poldeg_wavecal", "for", "icoef", "in", "range", "(", "poldeg_wavecal", "+", "1", ")", ":", "ccoef", "=", "str", "(", "icoef", ")", ".", "zfill", "(", "2", ")", "list_cij", "=", "[", "]", "for", "ifile", "in", "range", "(", "nfiles", ")", ":", "tmpdict", "=", "list_coef_rect_wpoly", "[", "ifile", "]", ".", "contents", "[", "islitlet", "-", "1", "]", "cij", "=", "tmpdict", "[", "'wpoly_coeff'", "]", "if", "cij", "is", "not", "None", ":", "list_cij", ".", "append", "(", "cij", "[", "icoef", "]", ")", "else", ":", "cij_modeled", "=", "tmpdict", "[", "'wpoly_coeff_longslit_model'", "]", "if", "cij_modeled", "is", "None", ":", "raise", "ValueError", "(", "\"Unexpected cij_modeled=None!\"", ")", "else", ":", "list_cij", ".", "append", "(", "cij_modeled", "[", "icoef", "]", ")", "if", "abs", "(", "args", ".", "debugplot", ")", ">=", "10", ":", "print", "(", "\"Warning: using wpoly_coeff_longslit_model\"", "+", "\" for \"", "+", "cslitlet", "+", "\" in file \"", "+", "list_json_files", "[", "ifile", "]", ".", "filename", ")", "outdict", "[", "'contents'", "]", "[", "cslitlet", "]", "[", "'list_wpoly_coeff_'", "+", "ccoef", "]", "=", "list_cij", "print", "(", "'OK!'", ")", "# ---", "# OBSOLETE", "# Save resulting JSON structure", "# --", "# Create object of type MasterRectWave with library of coefficients", "# for rectification and wavelength calibration", "master_rectwv", "=", "MasterRectWave", "(", "instrument", "=", "'EMIR'", ")", "master_rectwv", ".", "quality_control", "=", "numina", ".", "types", ".", "qc", ".", "QC", ".", "GOOD", "master_rectwv", ".", "tags", "[", "'grism'", "]", "=", "grism_name", "master_rectwv", ".", "tags", "[", "'filter'", "]", "=", "filter_name", "master_rectwv", ".", "meta_info", "[", "'dtu_configuration'", "]", "=", "outdict", "[", "'dtu_configuration'", "]", "master_rectwv", ".", "meta_info", "[", "'refined_boundary_model'", "]", "=", "{", "'parmodel'", ":", "refined_boundary_model", ".", "meta_info", "[", "'parmodel'", "]", "}", "master_rectwv", ".", "meta_info", "[", "'refined_boundary_model'", "]", ".", "update", "(", "outdict", "[", "'refined_boundary_model'", "]", "[", "'contents'", "]", ")", "master_rectwv", ".", "total_slitlets", "=", "EMIR_NBARS", "master_rectwv", ".", "meta_info", "[", "'origin'", "]", "=", "{", "'bound_param'", ":", "'uuid'", "+", "refined_boundary_model", ".", "uuid", ",", "'longslit_frames'", ":", "[", "'uuid:'", "+", "list_coef_rect_wpoly", "[", "ifile", "]", ".", "uuid", "for", "ifile", "in", "range", "(", "nfiles", ")", "]", "}", "for", "i", "in", "range", "(", "EMIR_NBARS", ")", ":", "islitlet", "=", "i", "+", "1", "dumdict", "=", "{", "'islitlet'", ":", "islitlet", "}", "cslitlet", "=", "'slitlet'", "+", "str", "(", "islitlet", ")", ".", "zfill", "(", "2", ")", "if", "cslitlet", "in", "outdict", "[", "'contents'", "]", ":", "dumdict", ".", "update", "(", "outdict", "[", "'contents'", "]", "[", "cslitlet", "]", ")", "else", ":", "dumdict", ".", "update", "(", "{", "'bb_nc1_orig'", ":", "0", ",", "'bb_nc2_orig'", ":", "0", ",", "'ymargin_bb'", ":", "0", ",", "'list_csu_bar_slit_center'", ":", "[", "]", ",", "'ttd_order'", ":", "0", ",", "'ncoef_rect'", ":", "0", ",", "'wpolydegree'", ":", "0", "}", ")", "master_rectwv", ".", "missing_slitlets", ".", "append", "(", "islitlet", ")", "master_rectwv", ".", "contents", ".", "append", "(", "dumdict", ")", "master_rectwv", ".", "writeto", "(", "args", ".", "out_MOSlibrary", ".", "name", ")", "print", "(", "'>>> Saving file '", "+", "args", ".", "out_MOSlibrary", ".", "name", ")" ]
with open(args.out_MOSlibrary.name + '_old', 'w') as fstream: json.dump(outdict, fstream, indent=2, sort_keys=True) print('>>> Saving file ' + args.out_MOSlibrary.name + '_old')
[ "with", "open", "(", "args", ".", "out_MOSlibrary", ".", "name", "+", "_old", "w", ")", "as", "fstream", ":", "json", ".", "dump", "(", "outdict", "fstream", "indent", "=", "2", "sort_keys", "=", "True", ")", "print", "(", ">>>", "Saving", "file", "+", "args", ".", "out_MOSlibrary", ".", "name", "+", "_old", ")" ]
train
https://github.com/guaix-ucm/pyemir/blob/fef6bbabcb13f80123cafd1800a0f508a3c21702/emirdrp/tools/rect_wpoly_for_mos.py#L65-L434
guaix-ucm/pyemir
emirdrp/tools/display_slitlet_arrangement.py
display_slitlet_arrangement
def display_slitlet_arrangement(fileobj, grism=None, spfilter=None, bbox=None, adjust=None, geometry=None, debugplot=0): """Display slitlet arrangment from CSUP keywords in FITS header. Parameters ---------- fileobj : file object FITS or TXT file object. grism : str Grism. grism : str Filter. bbox : tuple of 4 floats If not None, values for xmin, xmax, ymin and ymax. adjust : bool Adjust X range according to minimum and maximum csu_bar_left and csu_bar_right (note that this option is overriden by 'bbox') geometry : tuple (4 integers) or None x, y, dx, dy values employed to set the Qt backend geometry. debugplot : int Determines whether intermediate computations and/or plots are displayed. The valid codes are defined in numina.array.display.pause_debugplot Returns ------- csu_bar_left : list of floats Location (mm) of the left bar for each slitlet. csu_bar_right : list of floats Location (mm) of the right bar for each slitlet, using the same origin employed for csu_bar_left (which is not the value stored in the FITS keywords. csu_bar_slit_center : list of floats Middle point (mm) in between the two bars defining a slitlet. csu_bar_slit_width : list of floats Slitlet width (mm), computed as the distance between the two bars defining the slitlet. """ if fileobj.name[-4:] == ".txt": if grism is None: raise ValueError("Undefined grism!") if spfilter is None: raise ValueError("Undefined filter!") # define CsuConfiguration object csu_config = CsuConfiguration() csu_config._csu_bar_left = [] csu_config._csu_bar_right = [] csu_config._csu_bar_slit_center = [] csu_config._csu_bar_slit_width = [] # since the input filename has been opened with argparse in binary # mode, it is necessary to close it and open it in text mode fileobj.close() # read TXT file with open(fileobj.name, mode='rt') as f: file_content = f.read().splitlines() next_id_bar = 1 for line in file_content: if len(line) > 0: if line[0] not in ['#']: line_contents = line.split() id_bar = int(line_contents[0]) position = float(line_contents[1]) if id_bar == next_id_bar: if id_bar <= EMIR_NBARS: csu_config._csu_bar_left.append(position) next_id_bar = id_bar + EMIR_NBARS else: csu_config._csu_bar_right.append(341.5 - position) next_id_bar = id_bar - EMIR_NBARS + 1 else: raise ValueError("Unexpected id_bar:" + str(id_bar)) # compute slit width and center for i in range(EMIR_NBARS): csu_config._csu_bar_slit_center.append( (csu_config._csu_bar_left[i] + csu_config._csu_bar_right[i])/2 ) csu_config._csu_bar_slit_width.append( csu_config._csu_bar_right[i] - csu_config._csu_bar_left[i] ) else: # read input FITS file hdulist = fits.open(fileobj.name) image_header = hdulist[0].header hdulist.close() # additional info from header grism = image_header['grism'] spfilter = image_header['filter'] # define slitlet arrangement csu_config = CsuConfiguration.define_from_fits(fileobj) # determine calibration if grism in ["J", "OPEN"] and spfilter == "J": wv_parameters = set_wv_parameters("J", "J") elif grism in ["H", "OPEN"] and spfilter == "H": wv_parameters = set_wv_parameters("H", "H") elif grism in ["K", "OPEN"] and spfilter == "Ksp": wv_parameters = set_wv_parameters("Ksp", "K") elif grism in ["LR", "OPEN"] and spfilter == "YJ": wv_parameters = set_wv_parameters("YJ", "LR") elif grism in ["LR", "OPEN"] and spfilter == "HK": wv_parameters = set_wv_parameters("HK", "LR") else: raise ValueError("Invalid grism + filter configuration") crval1 = wv_parameters['poly_crval1_linear'] cdelt1 = wv_parameters['poly_cdelt1_linear'] wvmin_useful = wv_parameters['wvmin_useful'] wvmax_useful = wv_parameters['wvmax_useful'] # display arrangement if abs(debugplot) >= 10: print("slit left right center width min.wave max.wave") print("==== ======= ======= ======= ===== ======== ========") for i in range(EMIR_NBARS): ibar = i + 1 csu_crval1 = crval1(csu_config.csu_bar_slit_center(ibar)) csu_cdelt1 = cdelt1(csu_config.csu_bar_slit_center(ibar)) csu_crvaln = csu_crval1 + (EMIR_NAXIS1 - 1) * csu_cdelt1 if wvmin_useful is not None: csu_crval1 = np.amax([csu_crval1, wvmin_useful]) if wvmax_useful is not None: csu_crvaln = np.amin([csu_crvaln, wvmax_useful]) print("{0:4d} {1:8.3f} {2:8.3f} {3:8.3f} {4:7.3f} " "{5:8.2f} {6:8.2f}".format( ibar, csu_config.csu_bar_left(ibar), csu_config.csu_bar_right(ibar), csu_config.csu_bar_slit_center(ibar), csu_config.csu_bar_slit_width(ibar), csu_crval1, csu_crvaln) ) print( "---> {0:8.3f} {1:8.3f} {2:8.3f} {3:7.3f} <- mean (all)".format( np.mean(csu_config._csu_bar_left), np.mean(csu_config._csu_bar_right), np.mean(csu_config._csu_bar_slit_center), np.mean(csu_config._csu_bar_slit_width) ) ) print( "---> {0:8.3f} {1:8.3f} {2:8.3f} {3:7.3f} <- mean (odd)".format( np.mean(csu_config._csu_bar_left[::2]), np.mean(csu_config._csu_bar_right[::2]), np.mean(csu_config._csu_bar_slit_center[::2]), np.mean(csu_config._csu_bar_slit_width[::2]) ) ) print( "---> {0:8.3f} {1:8.3f} {2:8.3f} {3:7.3f} <- mean (even)".format( np.mean(csu_config._csu_bar_left[1::2]), np.mean(csu_config._csu_bar_right[1::2]), np.mean(csu_config._csu_bar_slit_center[1::2]), np.mean(csu_config._csu_bar_slit_width[1::2]) ) ) # display slit arrangement if abs(debugplot) % 10 != 0: fig = plt.figure() set_window_geometry(geometry) ax = fig.add_subplot(111) if bbox is None: if adjust: xmin = min(csu_config._csu_bar_left) xmax = max(csu_config._csu_bar_right) dx = xmax - xmin if dx == 0: dx = 1 xmin -= dx/20 xmax += dx/20 ax.set_xlim(xmin, xmax) else: ax.set_xlim(0., 341.5) ax.set_ylim(0, 56) else: ax.set_xlim(bbox[0], bbox[1]) ax.set_ylim(bbox[2], bbox[3]) ax.set_xlabel('csu_bar_position (mm)') ax.set_ylabel('slit number') for i in range(EMIR_NBARS): ibar = i + 1 ax.add_patch(patches.Rectangle( (csu_config.csu_bar_left(ibar), ibar-0.5), csu_config.csu_bar_slit_width(ibar), 1.0)) ax.plot([0., csu_config.csu_bar_left(ibar)], [ibar, ibar], '-', color='gray') ax.plot([csu_config.csu_bar_right(ibar), 341.5], [ibar, ibar], '-', color='gray') plt.title("File: " + fileobj.name + "\ngrism=" + grism + ", filter=" + spfilter) pause_debugplot(debugplot, pltshow=True) # return results return csu_config._csu_bar_left, csu_config._csu_bar_right, \ csu_config._csu_bar_slit_center, csu_config._csu_bar_slit_width
python
def display_slitlet_arrangement(fileobj, grism=None, spfilter=None, bbox=None, adjust=None, geometry=None, debugplot=0): """Display slitlet arrangment from CSUP keywords in FITS header. Parameters ---------- fileobj : file object FITS or TXT file object. grism : str Grism. grism : str Filter. bbox : tuple of 4 floats If not None, values for xmin, xmax, ymin and ymax. adjust : bool Adjust X range according to minimum and maximum csu_bar_left and csu_bar_right (note that this option is overriden by 'bbox') geometry : tuple (4 integers) or None x, y, dx, dy values employed to set the Qt backend geometry. debugplot : int Determines whether intermediate computations and/or plots are displayed. The valid codes are defined in numina.array.display.pause_debugplot Returns ------- csu_bar_left : list of floats Location (mm) of the left bar for each slitlet. csu_bar_right : list of floats Location (mm) of the right bar for each slitlet, using the same origin employed for csu_bar_left (which is not the value stored in the FITS keywords. csu_bar_slit_center : list of floats Middle point (mm) in between the two bars defining a slitlet. csu_bar_slit_width : list of floats Slitlet width (mm), computed as the distance between the two bars defining the slitlet. """ if fileobj.name[-4:] == ".txt": if grism is None: raise ValueError("Undefined grism!") if spfilter is None: raise ValueError("Undefined filter!") # define CsuConfiguration object csu_config = CsuConfiguration() csu_config._csu_bar_left = [] csu_config._csu_bar_right = [] csu_config._csu_bar_slit_center = [] csu_config._csu_bar_slit_width = [] # since the input filename has been opened with argparse in binary # mode, it is necessary to close it and open it in text mode fileobj.close() # read TXT file with open(fileobj.name, mode='rt') as f: file_content = f.read().splitlines() next_id_bar = 1 for line in file_content: if len(line) > 0: if line[0] not in ['#']: line_contents = line.split() id_bar = int(line_contents[0]) position = float(line_contents[1]) if id_bar == next_id_bar: if id_bar <= EMIR_NBARS: csu_config._csu_bar_left.append(position) next_id_bar = id_bar + EMIR_NBARS else: csu_config._csu_bar_right.append(341.5 - position) next_id_bar = id_bar - EMIR_NBARS + 1 else: raise ValueError("Unexpected id_bar:" + str(id_bar)) # compute slit width and center for i in range(EMIR_NBARS): csu_config._csu_bar_slit_center.append( (csu_config._csu_bar_left[i] + csu_config._csu_bar_right[i])/2 ) csu_config._csu_bar_slit_width.append( csu_config._csu_bar_right[i] - csu_config._csu_bar_left[i] ) else: # read input FITS file hdulist = fits.open(fileobj.name) image_header = hdulist[0].header hdulist.close() # additional info from header grism = image_header['grism'] spfilter = image_header['filter'] # define slitlet arrangement csu_config = CsuConfiguration.define_from_fits(fileobj) # determine calibration if grism in ["J", "OPEN"] and spfilter == "J": wv_parameters = set_wv_parameters("J", "J") elif grism in ["H", "OPEN"] and spfilter == "H": wv_parameters = set_wv_parameters("H", "H") elif grism in ["K", "OPEN"] and spfilter == "Ksp": wv_parameters = set_wv_parameters("Ksp", "K") elif grism in ["LR", "OPEN"] and spfilter == "YJ": wv_parameters = set_wv_parameters("YJ", "LR") elif grism in ["LR", "OPEN"] and spfilter == "HK": wv_parameters = set_wv_parameters("HK", "LR") else: raise ValueError("Invalid grism + filter configuration") crval1 = wv_parameters['poly_crval1_linear'] cdelt1 = wv_parameters['poly_cdelt1_linear'] wvmin_useful = wv_parameters['wvmin_useful'] wvmax_useful = wv_parameters['wvmax_useful'] # display arrangement if abs(debugplot) >= 10: print("slit left right center width min.wave max.wave") print("==== ======= ======= ======= ===== ======== ========") for i in range(EMIR_NBARS): ibar = i + 1 csu_crval1 = crval1(csu_config.csu_bar_slit_center(ibar)) csu_cdelt1 = cdelt1(csu_config.csu_bar_slit_center(ibar)) csu_crvaln = csu_crval1 + (EMIR_NAXIS1 - 1) * csu_cdelt1 if wvmin_useful is not None: csu_crval1 = np.amax([csu_crval1, wvmin_useful]) if wvmax_useful is not None: csu_crvaln = np.amin([csu_crvaln, wvmax_useful]) print("{0:4d} {1:8.3f} {2:8.3f} {3:8.3f} {4:7.3f} " "{5:8.2f} {6:8.2f}".format( ibar, csu_config.csu_bar_left(ibar), csu_config.csu_bar_right(ibar), csu_config.csu_bar_slit_center(ibar), csu_config.csu_bar_slit_width(ibar), csu_crval1, csu_crvaln) ) print( "---> {0:8.3f} {1:8.3f} {2:8.3f} {3:7.3f} <- mean (all)".format( np.mean(csu_config._csu_bar_left), np.mean(csu_config._csu_bar_right), np.mean(csu_config._csu_bar_slit_center), np.mean(csu_config._csu_bar_slit_width) ) ) print( "---> {0:8.3f} {1:8.3f} {2:8.3f} {3:7.3f} <- mean (odd)".format( np.mean(csu_config._csu_bar_left[::2]), np.mean(csu_config._csu_bar_right[::2]), np.mean(csu_config._csu_bar_slit_center[::2]), np.mean(csu_config._csu_bar_slit_width[::2]) ) ) print( "---> {0:8.3f} {1:8.3f} {2:8.3f} {3:7.3f} <- mean (even)".format( np.mean(csu_config._csu_bar_left[1::2]), np.mean(csu_config._csu_bar_right[1::2]), np.mean(csu_config._csu_bar_slit_center[1::2]), np.mean(csu_config._csu_bar_slit_width[1::2]) ) ) # display slit arrangement if abs(debugplot) % 10 != 0: fig = plt.figure() set_window_geometry(geometry) ax = fig.add_subplot(111) if bbox is None: if adjust: xmin = min(csu_config._csu_bar_left) xmax = max(csu_config._csu_bar_right) dx = xmax - xmin if dx == 0: dx = 1 xmin -= dx/20 xmax += dx/20 ax.set_xlim(xmin, xmax) else: ax.set_xlim(0., 341.5) ax.set_ylim(0, 56) else: ax.set_xlim(bbox[0], bbox[1]) ax.set_ylim(bbox[2], bbox[3]) ax.set_xlabel('csu_bar_position (mm)') ax.set_ylabel('slit number') for i in range(EMIR_NBARS): ibar = i + 1 ax.add_patch(patches.Rectangle( (csu_config.csu_bar_left(ibar), ibar-0.5), csu_config.csu_bar_slit_width(ibar), 1.0)) ax.plot([0., csu_config.csu_bar_left(ibar)], [ibar, ibar], '-', color='gray') ax.plot([csu_config.csu_bar_right(ibar), 341.5], [ibar, ibar], '-', color='gray') plt.title("File: " + fileobj.name + "\ngrism=" + grism + ", filter=" + spfilter) pause_debugplot(debugplot, pltshow=True) # return results return csu_config._csu_bar_left, csu_config._csu_bar_right, \ csu_config._csu_bar_slit_center, csu_config._csu_bar_slit_width
[ "def", "display_slitlet_arrangement", "(", "fileobj", ",", "grism", "=", "None", ",", "spfilter", "=", "None", ",", "bbox", "=", "None", ",", "adjust", "=", "None", ",", "geometry", "=", "None", ",", "debugplot", "=", "0", ")", ":", "if", "fileobj", ".", "name", "[", "-", "4", ":", "]", "==", "\".txt\"", ":", "if", "grism", "is", "None", ":", "raise", "ValueError", "(", "\"Undefined grism!\"", ")", "if", "spfilter", "is", "None", ":", "raise", "ValueError", "(", "\"Undefined filter!\"", ")", "# define CsuConfiguration object", "csu_config", "=", "CsuConfiguration", "(", ")", "csu_config", ".", "_csu_bar_left", "=", "[", "]", "csu_config", ".", "_csu_bar_right", "=", "[", "]", "csu_config", ".", "_csu_bar_slit_center", "=", "[", "]", "csu_config", ".", "_csu_bar_slit_width", "=", "[", "]", "# since the input filename has been opened with argparse in binary", "# mode, it is necessary to close it and open it in text mode", "fileobj", ".", "close", "(", ")", "# read TXT file", "with", "open", "(", "fileobj", ".", "name", ",", "mode", "=", "'rt'", ")", "as", "f", ":", "file_content", "=", "f", ".", "read", "(", ")", ".", "splitlines", "(", ")", "next_id_bar", "=", "1", "for", "line", "in", "file_content", ":", "if", "len", "(", "line", ")", ">", "0", ":", "if", "line", "[", "0", "]", "not", "in", "[", "'#'", "]", ":", "line_contents", "=", "line", ".", "split", "(", ")", "id_bar", "=", "int", "(", "line_contents", "[", "0", "]", ")", "position", "=", "float", "(", "line_contents", "[", "1", "]", ")", "if", "id_bar", "==", "next_id_bar", ":", "if", "id_bar", "<=", "EMIR_NBARS", ":", "csu_config", ".", "_csu_bar_left", ".", "append", "(", "position", ")", "next_id_bar", "=", "id_bar", "+", "EMIR_NBARS", "else", ":", "csu_config", ".", "_csu_bar_right", ".", "append", "(", "341.5", "-", "position", ")", "next_id_bar", "=", "id_bar", "-", "EMIR_NBARS", "+", "1", "else", ":", "raise", "ValueError", "(", "\"Unexpected id_bar:\"", "+", "str", "(", "id_bar", ")", ")", "# compute slit width and center", "for", "i", "in", "range", "(", "EMIR_NBARS", ")", ":", "csu_config", ".", "_csu_bar_slit_center", ".", "append", "(", "(", "csu_config", ".", "_csu_bar_left", "[", "i", "]", "+", "csu_config", ".", "_csu_bar_right", "[", "i", "]", ")", "/", "2", ")", "csu_config", ".", "_csu_bar_slit_width", ".", "append", "(", "csu_config", ".", "_csu_bar_right", "[", "i", "]", "-", "csu_config", ".", "_csu_bar_left", "[", "i", "]", ")", "else", ":", "# read input FITS file", "hdulist", "=", "fits", ".", "open", "(", "fileobj", ".", "name", ")", "image_header", "=", "hdulist", "[", "0", "]", ".", "header", "hdulist", ".", "close", "(", ")", "# additional info from header", "grism", "=", "image_header", "[", "'grism'", "]", "spfilter", "=", "image_header", "[", "'filter'", "]", "# define slitlet arrangement", "csu_config", "=", "CsuConfiguration", ".", "define_from_fits", "(", "fileobj", ")", "# determine calibration", "if", "grism", "in", "[", "\"J\"", ",", "\"OPEN\"", "]", "and", "spfilter", "==", "\"J\"", ":", "wv_parameters", "=", "set_wv_parameters", "(", "\"J\"", ",", "\"J\"", ")", "elif", "grism", "in", "[", "\"H\"", ",", "\"OPEN\"", "]", "and", "spfilter", "==", "\"H\"", ":", "wv_parameters", "=", "set_wv_parameters", "(", "\"H\"", ",", "\"H\"", ")", "elif", "grism", "in", "[", "\"K\"", ",", "\"OPEN\"", "]", "and", "spfilter", "==", "\"Ksp\"", ":", "wv_parameters", "=", "set_wv_parameters", "(", "\"Ksp\"", ",", "\"K\"", ")", "elif", "grism", "in", "[", "\"LR\"", ",", "\"OPEN\"", "]", "and", "spfilter", "==", "\"YJ\"", ":", "wv_parameters", "=", "set_wv_parameters", "(", "\"YJ\"", ",", "\"LR\"", ")", "elif", "grism", "in", "[", "\"LR\"", ",", "\"OPEN\"", "]", "and", "spfilter", "==", "\"HK\"", ":", "wv_parameters", "=", "set_wv_parameters", "(", "\"HK\"", ",", "\"LR\"", ")", "else", ":", "raise", "ValueError", "(", "\"Invalid grism + filter configuration\"", ")", "crval1", "=", "wv_parameters", "[", "'poly_crval1_linear'", "]", "cdelt1", "=", "wv_parameters", "[", "'poly_cdelt1_linear'", "]", "wvmin_useful", "=", "wv_parameters", "[", "'wvmin_useful'", "]", "wvmax_useful", "=", "wv_parameters", "[", "'wvmax_useful'", "]", "# display arrangement", "if", "abs", "(", "debugplot", ")", ">=", "10", ":", "print", "(", "\"slit left right center width min.wave max.wave\"", ")", "print", "(", "\"==== ======= ======= ======= ===== ======== ========\"", ")", "for", "i", "in", "range", "(", "EMIR_NBARS", ")", ":", "ibar", "=", "i", "+", "1", "csu_crval1", "=", "crval1", "(", "csu_config", ".", "csu_bar_slit_center", "(", "ibar", ")", ")", "csu_cdelt1", "=", "cdelt1", "(", "csu_config", ".", "csu_bar_slit_center", "(", "ibar", ")", ")", "csu_crvaln", "=", "csu_crval1", "+", "(", "EMIR_NAXIS1", "-", "1", ")", "*", "csu_cdelt1", "if", "wvmin_useful", "is", "not", "None", ":", "csu_crval1", "=", "np", ".", "amax", "(", "[", "csu_crval1", ",", "wvmin_useful", "]", ")", "if", "wvmax_useful", "is", "not", "None", ":", "csu_crvaln", "=", "np", ".", "amin", "(", "[", "csu_crvaln", ",", "wvmax_useful", "]", ")", "print", "(", "\"{0:4d} {1:8.3f} {2:8.3f} {3:8.3f} {4:7.3f} \"", "\"{5:8.2f} {6:8.2f}\"", ".", "format", "(", "ibar", ",", "csu_config", ".", "csu_bar_left", "(", "ibar", ")", ",", "csu_config", ".", "csu_bar_right", "(", "ibar", ")", ",", "csu_config", ".", "csu_bar_slit_center", "(", "ibar", ")", ",", "csu_config", ".", "csu_bar_slit_width", "(", "ibar", ")", ",", "csu_crval1", ",", "csu_crvaln", ")", ")", "print", "(", "\"---> {0:8.3f} {1:8.3f} {2:8.3f} {3:7.3f} <- mean (all)\"", ".", "format", "(", "np", ".", "mean", "(", "csu_config", ".", "_csu_bar_left", ")", ",", "np", ".", "mean", "(", "csu_config", ".", "_csu_bar_right", ")", ",", "np", ".", "mean", "(", "csu_config", ".", "_csu_bar_slit_center", ")", ",", "np", ".", "mean", "(", "csu_config", ".", "_csu_bar_slit_width", ")", ")", ")", "print", "(", "\"---> {0:8.3f} {1:8.3f} {2:8.3f} {3:7.3f} <- mean (odd)\"", ".", "format", "(", "np", ".", "mean", "(", "csu_config", ".", "_csu_bar_left", "[", ":", ":", "2", "]", ")", ",", "np", ".", "mean", "(", "csu_config", ".", "_csu_bar_right", "[", ":", ":", "2", "]", ")", ",", "np", ".", "mean", "(", "csu_config", ".", "_csu_bar_slit_center", "[", ":", ":", "2", "]", ")", ",", "np", ".", "mean", "(", "csu_config", ".", "_csu_bar_slit_width", "[", ":", ":", "2", "]", ")", ")", ")", "print", "(", "\"---> {0:8.3f} {1:8.3f} {2:8.3f} {3:7.3f} <- mean (even)\"", ".", "format", "(", "np", ".", "mean", "(", "csu_config", ".", "_csu_bar_left", "[", "1", ":", ":", "2", "]", ")", ",", "np", ".", "mean", "(", "csu_config", ".", "_csu_bar_right", "[", "1", ":", ":", "2", "]", ")", ",", "np", ".", "mean", "(", "csu_config", ".", "_csu_bar_slit_center", "[", "1", ":", ":", "2", "]", ")", ",", "np", ".", "mean", "(", "csu_config", ".", "_csu_bar_slit_width", "[", "1", ":", ":", "2", "]", ")", ")", ")", "# display slit arrangement", "if", "abs", "(", "debugplot", ")", "%", "10", "!=", "0", ":", "fig", "=", "plt", ".", "figure", "(", ")", "set_window_geometry", "(", "geometry", ")", "ax", "=", "fig", ".", "add_subplot", "(", "111", ")", "if", "bbox", "is", "None", ":", "if", "adjust", ":", "xmin", "=", "min", "(", "csu_config", ".", "_csu_bar_left", ")", "xmax", "=", "max", "(", "csu_config", ".", "_csu_bar_right", ")", "dx", "=", "xmax", "-", "xmin", "if", "dx", "==", "0", ":", "dx", "=", "1", "xmin", "-=", "dx", "/", "20", "xmax", "+=", "dx", "/", "20", "ax", ".", "set_xlim", "(", "xmin", ",", "xmax", ")", "else", ":", "ax", ".", "set_xlim", "(", "0.", ",", "341.5", ")", "ax", ".", "set_ylim", "(", "0", ",", "56", ")", "else", ":", "ax", ".", "set_xlim", "(", "bbox", "[", "0", "]", ",", "bbox", "[", "1", "]", ")", "ax", ".", "set_ylim", "(", "bbox", "[", "2", "]", ",", "bbox", "[", "3", "]", ")", "ax", ".", "set_xlabel", "(", "'csu_bar_position (mm)'", ")", "ax", ".", "set_ylabel", "(", "'slit number'", ")", "for", "i", "in", "range", "(", "EMIR_NBARS", ")", ":", "ibar", "=", "i", "+", "1", "ax", ".", "add_patch", "(", "patches", ".", "Rectangle", "(", "(", "csu_config", ".", "csu_bar_left", "(", "ibar", ")", ",", "ibar", "-", "0.5", ")", ",", "csu_config", ".", "csu_bar_slit_width", "(", "ibar", ")", ",", "1.0", ")", ")", "ax", ".", "plot", "(", "[", "0.", ",", "csu_config", ".", "csu_bar_left", "(", "ibar", ")", "]", ",", "[", "ibar", ",", "ibar", "]", ",", "'-'", ",", "color", "=", "'gray'", ")", "ax", ".", "plot", "(", "[", "csu_config", ".", "csu_bar_right", "(", "ibar", ")", ",", "341.5", "]", ",", "[", "ibar", ",", "ibar", "]", ",", "'-'", ",", "color", "=", "'gray'", ")", "plt", ".", "title", "(", "\"File: \"", "+", "fileobj", ".", "name", "+", "\"\\ngrism=\"", "+", "grism", "+", "\", filter=\"", "+", "spfilter", ")", "pause_debugplot", "(", "debugplot", ",", "pltshow", "=", "True", ")", "# return results", "return", "csu_config", ".", "_csu_bar_left", ",", "csu_config", ".", "_csu_bar_right", ",", "csu_config", ".", "_csu_bar_slit_center", ",", "csu_config", ".", "_csu_bar_slit_width" ]
Display slitlet arrangment from CSUP keywords in FITS header. Parameters ---------- fileobj : file object FITS or TXT file object. grism : str Grism. grism : str Filter. bbox : tuple of 4 floats If not None, values for xmin, xmax, ymin and ymax. adjust : bool Adjust X range according to minimum and maximum csu_bar_left and csu_bar_right (note that this option is overriden by 'bbox') geometry : tuple (4 integers) or None x, y, dx, dy values employed to set the Qt backend geometry. debugplot : int Determines whether intermediate computations and/or plots are displayed. The valid codes are defined in numina.array.display.pause_debugplot Returns ------- csu_bar_left : list of floats Location (mm) of the left bar for each slitlet. csu_bar_right : list of floats Location (mm) of the right bar for each slitlet, using the same origin employed for csu_bar_left (which is not the value stored in the FITS keywords. csu_bar_slit_center : list of floats Middle point (mm) in between the two bars defining a slitlet. csu_bar_slit_width : list of floats Slitlet width (mm), computed as the distance between the two bars defining the slitlet.
[ "Display", "slitlet", "arrangment", "from", "CSUP", "keywords", "in", "FITS", "header", "." ]
train
https://github.com/guaix-ucm/pyemir/blob/fef6bbabcb13f80123cafd1800a0f508a3c21702/emirdrp/tools/display_slitlet_arrangement.py#L42-L248
guaix-ucm/pyemir
emirdrp/processing/wavecal/slitlet2d.py
Slitlet2D.extract_slitlet2d
def extract_slitlet2d(self, image_2k2k): """Extract slitlet 2d image from image with original EMIR dimensions. Parameters ---------- image_2k2k : numpy array Original image (dimensions EMIR_NAXIS1 * EMIR_NAXIS2) Returns ------- slitlet2d : numpy array Image corresponding to the slitlet region defined by its bounding box. """ # protections naxis2, naxis1 = image_2k2k.shape if naxis1 != EMIR_NAXIS1: raise ValueError('Unexpected naxis1') if naxis2 != EMIR_NAXIS2: raise ValueError('Unexpected naxis2') # extract slitlet region slitlet2d = image_2k2k[(self.bb_ns1_orig - 1):self.bb_ns2_orig, (self.bb_nc1_orig - 1):self.bb_nc2_orig] # transform to float slitlet2d = slitlet2d.astype(np.float) # display slitlet2d with boundaries and middle spectrum trail if abs(self.debugplot) in [21, 22]: self.ximshow_unrectified(slitlet2d) # return slitlet image return slitlet2d
python
def extract_slitlet2d(self, image_2k2k): """Extract slitlet 2d image from image with original EMIR dimensions. Parameters ---------- image_2k2k : numpy array Original image (dimensions EMIR_NAXIS1 * EMIR_NAXIS2) Returns ------- slitlet2d : numpy array Image corresponding to the slitlet region defined by its bounding box. """ # protections naxis2, naxis1 = image_2k2k.shape if naxis1 != EMIR_NAXIS1: raise ValueError('Unexpected naxis1') if naxis2 != EMIR_NAXIS2: raise ValueError('Unexpected naxis2') # extract slitlet region slitlet2d = image_2k2k[(self.bb_ns1_orig - 1):self.bb_ns2_orig, (self.bb_nc1_orig - 1):self.bb_nc2_orig] # transform to float slitlet2d = slitlet2d.astype(np.float) # display slitlet2d with boundaries and middle spectrum trail if abs(self.debugplot) in [21, 22]: self.ximshow_unrectified(slitlet2d) # return slitlet image return slitlet2d
[ "def", "extract_slitlet2d", "(", "self", ",", "image_2k2k", ")", ":", "# protections", "naxis2", ",", "naxis1", "=", "image_2k2k", ".", "shape", "if", "naxis1", "!=", "EMIR_NAXIS1", ":", "raise", "ValueError", "(", "'Unexpected naxis1'", ")", "if", "naxis2", "!=", "EMIR_NAXIS2", ":", "raise", "ValueError", "(", "'Unexpected naxis2'", ")", "# extract slitlet region", "slitlet2d", "=", "image_2k2k", "[", "(", "self", ".", "bb_ns1_orig", "-", "1", ")", ":", "self", ".", "bb_ns2_orig", ",", "(", "self", ".", "bb_nc1_orig", "-", "1", ")", ":", "self", ".", "bb_nc2_orig", "]", "# transform to float", "slitlet2d", "=", "slitlet2d", ".", "astype", "(", "np", ".", "float", ")", "# display slitlet2d with boundaries and middle spectrum trail", "if", "abs", "(", "self", ".", "debugplot", ")", "in", "[", "21", ",", "22", "]", ":", "self", ".", "ximshow_unrectified", "(", "slitlet2d", ")", "# return slitlet image", "return", "slitlet2d" ]
Extract slitlet 2d image from image with original EMIR dimensions. Parameters ---------- image_2k2k : numpy array Original image (dimensions EMIR_NAXIS1 * EMIR_NAXIS2) Returns ------- slitlet2d : numpy array Image corresponding to the slitlet region defined by its bounding box.
[ "Extract", "slitlet", "2d", "image", "from", "image", "with", "original", "EMIR", "dimensions", "." ]
train
https://github.com/guaix-ucm/pyemir/blob/fef6bbabcb13f80123cafd1800a0f508a3c21702/emirdrp/processing/wavecal/slitlet2d.py#L316-L351
guaix-ucm/pyemir
emirdrp/processing/wavecal/slitlet2d.py
Slitlet2D.rectify
def rectify(self, slitlet2d, resampling, inverse=False): """Rectify slitlet using computed transformation. Parameters ---------- slitlet2d : numpy array Image containing the 2d slitlet image. resampling : int 1: nearest neighbour, 2: flux preserving interpolation. inverse : bool If true, the inverse rectification transformation is employed. Returns ------- slitlet2d_rect : numpy array Rectified slitlet image. """ if resampling not in [1, 2]: raise ValueError("Unexpected resampling value=" + str(resampling)) # check image dimension naxis2, naxis1 = slitlet2d.shape if naxis1 != self.bb_nc2_orig - self.bb_nc1_orig + 1: raise ValueError("Unexpected slitlet2d_rect naxis1") if naxis2 != self.bb_ns2_orig - self.bb_ns1_orig + 1: raise ValueError("Unexpected slitlet2d_rect naxis2") if inverse: aij = self.tti_aij bij = self.tti_bij else: aij = self.ttd_aij bij = self.ttd_bij # rectify image slitlet2d_rect = rectify2d( image2d=slitlet2d, aij=aij, bij=bij, resampling=resampling ) if abs(self.debugplot % 10) != 0: if inverse: self.ximshow_unrectified(slitlet2d_rect) else: self.ximshow_rectified(slitlet2d_rect) return slitlet2d_rect
python
def rectify(self, slitlet2d, resampling, inverse=False): """Rectify slitlet using computed transformation. Parameters ---------- slitlet2d : numpy array Image containing the 2d slitlet image. resampling : int 1: nearest neighbour, 2: flux preserving interpolation. inverse : bool If true, the inverse rectification transformation is employed. Returns ------- slitlet2d_rect : numpy array Rectified slitlet image. """ if resampling not in [1, 2]: raise ValueError("Unexpected resampling value=" + str(resampling)) # check image dimension naxis2, naxis1 = slitlet2d.shape if naxis1 != self.bb_nc2_orig - self.bb_nc1_orig + 1: raise ValueError("Unexpected slitlet2d_rect naxis1") if naxis2 != self.bb_ns2_orig - self.bb_ns1_orig + 1: raise ValueError("Unexpected slitlet2d_rect naxis2") if inverse: aij = self.tti_aij bij = self.tti_bij else: aij = self.ttd_aij bij = self.ttd_bij # rectify image slitlet2d_rect = rectify2d( image2d=slitlet2d, aij=aij, bij=bij, resampling=resampling ) if abs(self.debugplot % 10) != 0: if inverse: self.ximshow_unrectified(slitlet2d_rect) else: self.ximshow_rectified(slitlet2d_rect) return slitlet2d_rect
[ "def", "rectify", "(", "self", ",", "slitlet2d", ",", "resampling", ",", "inverse", "=", "False", ")", ":", "if", "resampling", "not", "in", "[", "1", ",", "2", "]", ":", "raise", "ValueError", "(", "\"Unexpected resampling value=\"", "+", "str", "(", "resampling", ")", ")", "# check image dimension", "naxis2", ",", "naxis1", "=", "slitlet2d", ".", "shape", "if", "naxis1", "!=", "self", ".", "bb_nc2_orig", "-", "self", ".", "bb_nc1_orig", "+", "1", ":", "raise", "ValueError", "(", "\"Unexpected slitlet2d_rect naxis1\"", ")", "if", "naxis2", "!=", "self", ".", "bb_ns2_orig", "-", "self", ".", "bb_ns1_orig", "+", "1", ":", "raise", "ValueError", "(", "\"Unexpected slitlet2d_rect naxis2\"", ")", "if", "inverse", ":", "aij", "=", "self", ".", "tti_aij", "bij", "=", "self", ".", "tti_bij", "else", ":", "aij", "=", "self", ".", "ttd_aij", "bij", "=", "self", ".", "ttd_bij", "# rectify image", "slitlet2d_rect", "=", "rectify2d", "(", "image2d", "=", "slitlet2d", ",", "aij", "=", "aij", ",", "bij", "=", "bij", ",", "resampling", "=", "resampling", ")", "if", "abs", "(", "self", ".", "debugplot", "%", "10", ")", "!=", "0", ":", "if", "inverse", ":", "self", ".", "ximshow_unrectified", "(", "slitlet2d_rect", ")", "else", ":", "self", ".", "ximshow_rectified", "(", "slitlet2d_rect", ")", "return", "slitlet2d_rect" ]
Rectify slitlet using computed transformation. Parameters ---------- slitlet2d : numpy array Image containing the 2d slitlet image. resampling : int 1: nearest neighbour, 2: flux preserving interpolation. inverse : bool If true, the inverse rectification transformation is employed. Returns ------- slitlet2d_rect : numpy array Rectified slitlet image.
[ "Rectify", "slitlet", "using", "computed", "transformation", "." ]
train
https://github.com/guaix-ucm/pyemir/blob/fef6bbabcb13f80123cafd1800a0f508a3c21702/emirdrp/processing/wavecal/slitlet2d.py#L353-L404
guaix-ucm/pyemir
emirdrp/processing/wavecal/slitlet2d.py
Slitlet2D.ximshow_unrectified
def ximshow_unrectified(self, slitlet2d): """Display unrectified image with spectrails and frontiers. Parameters ---------- slitlet2d : numpy array Array containing the unrectified slitlet image. """ title = "Slitlet#" + str(self.islitlet) ax = ximshow(slitlet2d, title=title, first_pixel=(self.bb_nc1_orig, self.bb_ns1_orig), show=False) xdum = np.linspace(1, EMIR_NAXIS1, num=EMIR_NAXIS1) ylower = self.list_spectrails[0](xdum) ax.plot(xdum, ylower, 'b-') ymiddle = self.list_spectrails[1](xdum) ax.plot(xdum, ymiddle, 'b--') yupper = self.list_spectrails[2](xdum) ax.plot(xdum, yupper, 'b-') ylower_frontier = self.list_frontiers[0](xdum) ax.plot(xdum, ylower_frontier, 'b:') yupper_frontier = self.list_frontiers[1](xdum) ax.plot(xdum, yupper_frontier, 'b:') pause_debugplot(debugplot=self.debugplot, pltshow=True)
python
def ximshow_unrectified(self, slitlet2d): """Display unrectified image with spectrails and frontiers. Parameters ---------- slitlet2d : numpy array Array containing the unrectified slitlet image. """ title = "Slitlet#" + str(self.islitlet) ax = ximshow(slitlet2d, title=title, first_pixel=(self.bb_nc1_orig, self.bb_ns1_orig), show=False) xdum = np.linspace(1, EMIR_NAXIS1, num=EMIR_NAXIS1) ylower = self.list_spectrails[0](xdum) ax.plot(xdum, ylower, 'b-') ymiddle = self.list_spectrails[1](xdum) ax.plot(xdum, ymiddle, 'b--') yupper = self.list_spectrails[2](xdum) ax.plot(xdum, yupper, 'b-') ylower_frontier = self.list_frontiers[0](xdum) ax.plot(xdum, ylower_frontier, 'b:') yupper_frontier = self.list_frontiers[1](xdum) ax.plot(xdum, yupper_frontier, 'b:') pause_debugplot(debugplot=self.debugplot, pltshow=True)
[ "def", "ximshow_unrectified", "(", "self", ",", "slitlet2d", ")", ":", "title", "=", "\"Slitlet#\"", "+", "str", "(", "self", ".", "islitlet", ")", "ax", "=", "ximshow", "(", "slitlet2d", ",", "title", "=", "title", ",", "first_pixel", "=", "(", "self", ".", "bb_nc1_orig", ",", "self", ".", "bb_ns1_orig", ")", ",", "show", "=", "False", ")", "xdum", "=", "np", ".", "linspace", "(", "1", ",", "EMIR_NAXIS1", ",", "num", "=", "EMIR_NAXIS1", ")", "ylower", "=", "self", ".", "list_spectrails", "[", "0", "]", "(", "xdum", ")", "ax", ".", "plot", "(", "xdum", ",", "ylower", ",", "'b-'", ")", "ymiddle", "=", "self", ".", "list_spectrails", "[", "1", "]", "(", "xdum", ")", "ax", ".", "plot", "(", "xdum", ",", "ymiddle", ",", "'b--'", ")", "yupper", "=", "self", ".", "list_spectrails", "[", "2", "]", "(", "xdum", ")", "ax", ".", "plot", "(", "xdum", ",", "yupper", ",", "'b-'", ")", "ylower_frontier", "=", "self", ".", "list_frontiers", "[", "0", "]", "(", "xdum", ")", "ax", ".", "plot", "(", "xdum", ",", "ylower_frontier", ",", "'b:'", ")", "yupper_frontier", "=", "self", ".", "list_frontiers", "[", "1", "]", "(", "xdum", ")", "ax", ".", "plot", "(", "xdum", ",", "yupper_frontier", ",", "'b:'", ")", "pause_debugplot", "(", "debugplot", "=", "self", ".", "debugplot", ",", "pltshow", "=", "True", ")" ]
Display unrectified image with spectrails and frontiers. Parameters ---------- slitlet2d : numpy array Array containing the unrectified slitlet image.
[ "Display", "unrectified", "image", "with", "spectrails", "and", "frontiers", "." ]
train
https://github.com/guaix-ucm/pyemir/blob/fef6bbabcb13f80123cafd1800a0f508a3c21702/emirdrp/processing/wavecal/slitlet2d.py#L406-L431
guaix-ucm/pyemir
emirdrp/processing/wavecal/slitlet2d.py
Slitlet2D.ximshow_rectified
def ximshow_rectified(self, slitlet2d_rect): """Display rectified image with spectrails and frontiers. Parameters ---------- slitlet2d_rect : numpy array Array containing the rectified slitlet image """ title = "Slitlet#" + str(self.islitlet) + " (rectify)" ax = ximshow(slitlet2d_rect, title=title, first_pixel=(self.bb_nc1_orig, self.bb_ns1_orig), show=False) # grid with fitted transformation: spectrum trails xx = np.arange(0, self.bb_nc2_orig - self.bb_nc1_orig + 1, dtype=np.float) for spectrail in self.list_spectrails: yy0 = self.corr_yrect_a + \ self.corr_yrect_b * spectrail(self.x0_reference) yy = np.tile([yy0 - self.bb_ns1_orig], xx.size) ax.plot(xx + self.bb_nc1_orig, yy + self.bb_ns1_orig, "b") for spectrail in self.list_frontiers: yy0 = self.corr_yrect_a +\ self.corr_yrect_b * spectrail(self.x0_reference) yy = np.tile([yy0 - self.bb_ns1_orig], xx.size) ax.plot(xx + self.bb_nc1_orig, yy + self.bb_ns1_orig, "b:") # show plot pause_debugplot(self.debugplot, pltshow=True)
python
def ximshow_rectified(self, slitlet2d_rect): """Display rectified image with spectrails and frontiers. Parameters ---------- slitlet2d_rect : numpy array Array containing the rectified slitlet image """ title = "Slitlet#" + str(self.islitlet) + " (rectify)" ax = ximshow(slitlet2d_rect, title=title, first_pixel=(self.bb_nc1_orig, self.bb_ns1_orig), show=False) # grid with fitted transformation: spectrum trails xx = np.arange(0, self.bb_nc2_orig - self.bb_nc1_orig + 1, dtype=np.float) for spectrail in self.list_spectrails: yy0 = self.corr_yrect_a + \ self.corr_yrect_b * spectrail(self.x0_reference) yy = np.tile([yy0 - self.bb_ns1_orig], xx.size) ax.plot(xx + self.bb_nc1_orig, yy + self.bb_ns1_orig, "b") for spectrail in self.list_frontiers: yy0 = self.corr_yrect_a +\ self.corr_yrect_b * spectrail(self.x0_reference) yy = np.tile([yy0 - self.bb_ns1_orig], xx.size) ax.plot(xx + self.bb_nc1_orig, yy + self.bb_ns1_orig, "b:") # show plot pause_debugplot(self.debugplot, pltshow=True)
[ "def", "ximshow_rectified", "(", "self", ",", "slitlet2d_rect", ")", ":", "title", "=", "\"Slitlet#\"", "+", "str", "(", "self", ".", "islitlet", ")", "+", "\" (rectify)\"", "ax", "=", "ximshow", "(", "slitlet2d_rect", ",", "title", "=", "title", ",", "first_pixel", "=", "(", "self", ".", "bb_nc1_orig", ",", "self", ".", "bb_ns1_orig", ")", ",", "show", "=", "False", ")", "# grid with fitted transformation: spectrum trails", "xx", "=", "np", ".", "arange", "(", "0", ",", "self", ".", "bb_nc2_orig", "-", "self", ".", "bb_nc1_orig", "+", "1", ",", "dtype", "=", "np", ".", "float", ")", "for", "spectrail", "in", "self", ".", "list_spectrails", ":", "yy0", "=", "self", ".", "corr_yrect_a", "+", "self", ".", "corr_yrect_b", "*", "spectrail", "(", "self", ".", "x0_reference", ")", "yy", "=", "np", ".", "tile", "(", "[", "yy0", "-", "self", ".", "bb_ns1_orig", "]", ",", "xx", ".", "size", ")", "ax", ".", "plot", "(", "xx", "+", "self", ".", "bb_nc1_orig", ",", "yy", "+", "self", ".", "bb_ns1_orig", ",", "\"b\"", ")", "for", "spectrail", "in", "self", ".", "list_frontiers", ":", "yy0", "=", "self", ".", "corr_yrect_a", "+", "self", ".", "corr_yrect_b", "*", "spectrail", "(", "self", ".", "x0_reference", ")", "yy", "=", "np", ".", "tile", "(", "[", "yy0", "-", "self", ".", "bb_ns1_orig", "]", ",", "xx", ".", "size", ")", "ax", ".", "plot", "(", "xx", "+", "self", ".", "bb_nc1_orig", ",", "yy", "+", "self", ".", "bb_ns1_orig", ",", "\"b:\"", ")", "# show plot", "pause_debugplot", "(", "self", ".", "debugplot", ",", "pltshow", "=", "True", ")" ]
Display rectified image with spectrails and frontiers. Parameters ---------- slitlet2d_rect : numpy array Array containing the rectified slitlet image
[ "Display", "rectified", "image", "with", "spectrails", "and", "frontiers", "." ]
train
https://github.com/guaix-ucm/pyemir/blob/fef6bbabcb13f80123cafd1800a0f508a3c21702/emirdrp/processing/wavecal/slitlet2d.py#L433-L461
IdentityPython/oidcendpoint
src/oidcendpoint/jwt_token.py
JWTToken.info
def info(self, token): """ Return type of Token (A=Access code, T=Token, R=Refresh token) and the session id. :param token: A token :return: tuple of token type and session id """ verifier = JWT(key_jar=self.key_jar, allowed_sign_algs=[self.alg]) _payload = verifier.unpack(token) if is_expired(_payload['exp']): raise ToOld('Token has expired') return _payload['sid'], _payload['ttype']
python
def info(self, token): """ Return type of Token (A=Access code, T=Token, R=Refresh token) and the session id. :param token: A token :return: tuple of token type and session id """ verifier = JWT(key_jar=self.key_jar, allowed_sign_algs=[self.alg]) _payload = verifier.unpack(token) if is_expired(_payload['exp']): raise ToOld('Token has expired') return _payload['sid'], _payload['ttype']
[ "def", "info", "(", "self", ",", "token", ")", ":", "verifier", "=", "JWT", "(", "key_jar", "=", "self", ".", "key_jar", ",", "allowed_sign_algs", "=", "[", "self", ".", "alg", "]", ")", "_payload", "=", "verifier", ".", "unpack", "(", "token", ")", "if", "is_expired", "(", "_payload", "[", "'exp'", "]", ")", ":", "raise", "ToOld", "(", "'Token has expired'", ")", "return", "_payload", "[", "'sid'", "]", ",", "_payload", "[", "'ttype'", "]" ]
Return type of Token (A=Access code, T=Token, R=Refresh token) and the session id. :param token: A token :return: tuple of token type and session id
[ "Return", "type", "of", "Token", "(", "A", "=", "Access", "code", "T", "=", "Token", "R", "=", "Refresh", "token", ")", "and", "the", "session", "id", "." ]
train
https://github.com/IdentityPython/oidcendpoint/blob/6c1d729d51bfb6332816117fe476073df7a1d823/src/oidcendpoint/jwt_token.py#L53-L65
IdentityPython/oidcendpoint
src/oidcendpoint/jwt_token.py
JWTToken.is_expired
def is_expired(self, token, when=0): """ Evaluate whether the token has expired or not :param token: The token :param when: The time against which to check the expiration 0 means now. :return: True/False """ verifier = JWT(key_jar=self.key_jar, allowed_sign_algs=[self.alg]) _payload = verifier.unpack(token) return is_expired(_payload['exp'], when)
python
def is_expired(self, token, when=0): """ Evaluate whether the token has expired or not :param token: The token :param when: The time against which to check the expiration 0 means now. :return: True/False """ verifier = JWT(key_jar=self.key_jar, allowed_sign_algs=[self.alg]) _payload = verifier.unpack(token) return is_expired(_payload['exp'], when)
[ "def", "is_expired", "(", "self", ",", "token", ",", "when", "=", "0", ")", ":", "verifier", "=", "JWT", "(", "key_jar", "=", "self", ".", "key_jar", ",", "allowed_sign_algs", "=", "[", "self", ".", "alg", "]", ")", "_payload", "=", "verifier", ".", "unpack", "(", "token", ")", "return", "is_expired", "(", "_payload", "[", "'exp'", "]", ",", "when", ")" ]
Evaluate whether the token has expired or not :param token: The token :param when: The time against which to check the expiration 0 means now. :return: True/False
[ "Evaluate", "whether", "the", "token", "has", "expired", "or", "not" ]
train
https://github.com/IdentityPython/oidcendpoint/blob/6c1d729d51bfb6332816117fe476073df7a1d823/src/oidcendpoint/jwt_token.py#L67-L78
Fuyukai/asyncwebsockets
asyncwebsockets/client.py
open_websocket
async def open_websocket(url: str, headers: Optional[list] = None, subprotocols: Optional[list] = None): """ Opens a websocket. """ ws = await create_websocket( url, headers=headers, subprotocols=subprotocols) try: yield ws finally: await ws.close()
python
async def open_websocket(url: str, headers: Optional[list] = None, subprotocols: Optional[list] = None): """ Opens a websocket. """ ws = await create_websocket( url, headers=headers, subprotocols=subprotocols) try: yield ws finally: await ws.close()
[ "async", "def", "open_websocket", "(", "url", ":", "str", ",", "headers", ":", "Optional", "[", "list", "]", "=", "None", ",", "subprotocols", ":", "Optional", "[", "list", "]", "=", "None", ")", ":", "ws", "=", "await", "create_websocket", "(", "url", ",", "headers", "=", "headers", ",", "subprotocols", "=", "subprotocols", ")", "try", ":", "yield", "ws", "finally", ":", "await", "ws", ".", "close", "(", ")" ]
Opens a websocket.
[ "Opens", "a", "websocket", "." ]
train
https://github.com/Fuyukai/asyncwebsockets/blob/e33e75fd51ce5ae0feac244e8407d2672c5b4745/asyncwebsockets/client.py#L20-L31
Fuyukai/asyncwebsockets
asyncwebsockets/client.py
create_websocket
async def create_websocket(url: str, ssl: Optional[SSLContext] = None, headers: Optional[list] = None, subprotocols: Optional[list] = None): """ A more low-level form of open_websocket. You are responsible for closing this websocket. """ url = yarl.URL(url) args = {} if headers: args["headers"] = headers # automatically use ssl if it's websocket secure if ssl is None: ssl = url.scheme == "wss" if ssl: if ssl is True: ssl = SSLContext() args["ssl_context"] = ssl args["autostart_tls"] = True args["tls_standard_compatible"] = False addr = (url.host, int(url.port)) ws = Websocket() await ws.__ainit__( addr=addr, path=url.path_qs, subprotocols=subprotocols, **args) return ws
python
async def create_websocket(url: str, ssl: Optional[SSLContext] = None, headers: Optional[list] = None, subprotocols: Optional[list] = None): """ A more low-level form of open_websocket. You are responsible for closing this websocket. """ url = yarl.URL(url) args = {} if headers: args["headers"] = headers # automatically use ssl if it's websocket secure if ssl is None: ssl = url.scheme == "wss" if ssl: if ssl is True: ssl = SSLContext() args["ssl_context"] = ssl args["autostart_tls"] = True args["tls_standard_compatible"] = False addr = (url.host, int(url.port)) ws = Websocket() await ws.__ainit__( addr=addr, path=url.path_qs, subprotocols=subprotocols, **args) return ws
[ "async", "def", "create_websocket", "(", "url", ":", "str", ",", "ssl", ":", "Optional", "[", "SSLContext", "]", "=", "None", ",", "headers", ":", "Optional", "[", "list", "]", "=", "None", ",", "subprotocols", ":", "Optional", "[", "list", "]", "=", "None", ")", ":", "url", "=", "yarl", ".", "URL", "(", "url", ")", "args", "=", "{", "}", "if", "headers", ":", "args", "[", "\"headers\"", "]", "=", "headers", "# automatically use ssl if it's websocket secure", "if", "ssl", "is", "None", ":", "ssl", "=", "url", ".", "scheme", "==", "\"wss\"", "if", "ssl", ":", "if", "ssl", "is", "True", ":", "ssl", "=", "SSLContext", "(", ")", "args", "[", "\"ssl_context\"", "]", "=", "ssl", "args", "[", "\"autostart_tls\"", "]", "=", "True", "args", "[", "\"tls_standard_compatible\"", "]", "=", "False", "addr", "=", "(", "url", ".", "host", ",", "int", "(", "url", ".", "port", ")", ")", "ws", "=", "Websocket", "(", ")", "await", "ws", ".", "__ainit__", "(", "addr", "=", "addr", ",", "path", "=", "url", ".", "path_qs", ",", "subprotocols", "=", "subprotocols", ",", "*", "*", "args", ")", "return", "ws" ]
A more low-level form of open_websocket. You are responsible for closing this websocket.
[ "A", "more", "low", "-", "level", "form", "of", "open_websocket", ".", "You", "are", "responsible", "for", "closing", "this", "websocket", "." ]
train
https://github.com/Fuyukai/asyncwebsockets/blob/e33e75fd51ce5ae0feac244e8407d2672c5b4745/asyncwebsockets/client.py#L34-L61
Fuyukai/asyncwebsockets
asyncwebsockets/client.py
open_websocket_client
async def open_websocket_client(sock: anyio.abc.SocketStream, addr, path: str, headers: Optional[list] = None, subprotocols: Optional[list] = None): """Create a websocket on top of a socket.""" ws = await create_websocket_client( sock, addr=addr, path=path, headers=headers, subprotocols=subprotocols) try: yield ws finally: await ws.close()
python
async def open_websocket_client(sock: anyio.abc.SocketStream, addr, path: str, headers: Optional[list] = None, subprotocols: Optional[list] = None): """Create a websocket on top of a socket.""" ws = await create_websocket_client( sock, addr=addr, path=path, headers=headers, subprotocols=subprotocols) try: yield ws finally: await ws.close()
[ "async", "def", "open_websocket_client", "(", "sock", ":", "anyio", ".", "abc", ".", "SocketStream", ",", "addr", ",", "path", ":", "str", ",", "headers", ":", "Optional", "[", "list", "]", "=", "None", ",", "subprotocols", ":", "Optional", "[", "list", "]", "=", "None", ")", ":", "ws", "=", "await", "create_websocket_client", "(", "sock", ",", "addr", "=", "addr", ",", "path", "=", "path", ",", "headers", "=", "headers", ",", "subprotocols", "=", "subprotocols", ")", "try", ":", "yield", "ws", "finally", ":", "await", "ws", ".", "close", "(", ")" ]
Create a websocket on top of a socket.
[ "Create", "a", "websocket", "on", "top", "of", "a", "socket", "." ]
train
https://github.com/Fuyukai/asyncwebsockets/blob/e33e75fd51ce5ae0feac244e8407d2672c5b4745/asyncwebsockets/client.py#L65-L76
Fuyukai/asyncwebsockets
asyncwebsockets/client.py
create_websocket_client
async def create_websocket_client(sock: anyio.abc.SocketStream, addr, path: str, headers: Optional[List] = None, subprotocols: Optional[List[str]] = None): """ A more low-level form of create_websocket_client. You are responsible for closing this websocket. """ ws = Websocket() await ws.start_client( sock, addr=addr, path=path, headers=headers, subprotocols=subprotocols) return ws
python
async def create_websocket_client(sock: anyio.abc.SocketStream, addr, path: str, headers: Optional[List] = None, subprotocols: Optional[List[str]] = None): """ A more low-level form of create_websocket_client. You are responsible for closing this websocket. """ ws = Websocket() await ws.start_client( sock, addr=addr, path=path, headers=headers, subprotocols=subprotocols) return ws
[ "async", "def", "create_websocket_client", "(", "sock", ":", "anyio", ".", "abc", ".", "SocketStream", ",", "addr", ",", "path", ":", "str", ",", "headers", ":", "Optional", "[", "List", "]", "=", "None", ",", "subprotocols", ":", "Optional", "[", "List", "[", "str", "]", "]", "=", "None", ")", ":", "ws", "=", "Websocket", "(", ")", "await", "ws", ".", "start_client", "(", "sock", ",", "addr", "=", "addr", ",", "path", "=", "path", ",", "headers", "=", "headers", ",", "subprotocols", "=", "subprotocols", ")", "return", "ws" ]
A more low-level form of create_websocket_client. You are responsible for closing this websocket.
[ "A", "more", "low", "-", "level", "form", "of", "create_websocket_client", ".", "You", "are", "responsible", "for", "closing", "this", "websocket", "." ]
train
https://github.com/Fuyukai/asyncwebsockets/blob/e33e75fd51ce5ae0feac244e8407d2672c5b4745/asyncwebsockets/client.py#L79-L91
BeyondTheClouds/enoslib
docs/tutorials/grid5000/virt/tuto_grid5000_virt.py
range_mac
def range_mac(mac_start, mac_end, step=1): """Iterate over mac addresses (given as string).""" start = int(EUI(mac_start)) end = int(EUI(mac_end)) for i_mac in range(start, end, step): mac = EUI(int(EUI(i_mac)) + 1) ip = ['10'] + [str(int(i, 2)) for i in mac.bits().split('-')[-3:]] yield str(mac).replace('-', ':'), '.'.join(ip)
python
def range_mac(mac_start, mac_end, step=1): """Iterate over mac addresses (given as string).""" start = int(EUI(mac_start)) end = int(EUI(mac_end)) for i_mac in range(start, end, step): mac = EUI(int(EUI(i_mac)) + 1) ip = ['10'] + [str(int(i, 2)) for i in mac.bits().split('-')[-3:]] yield str(mac).replace('-', ':'), '.'.join(ip)
[ "def", "range_mac", "(", "mac_start", ",", "mac_end", ",", "step", "=", "1", ")", ":", "start", "=", "int", "(", "EUI", "(", "mac_start", ")", ")", "end", "=", "int", "(", "EUI", "(", "mac_end", ")", ")", "for", "i_mac", "in", "range", "(", "start", ",", "end", ",", "step", ")", ":", "mac", "=", "EUI", "(", "int", "(", "EUI", "(", "i_mac", ")", ")", "+", "1", ")", "ip", "=", "[", "'10'", "]", "+", "[", "str", "(", "int", "(", "i", ",", "2", ")", ")", "for", "i", "in", "mac", ".", "bits", "(", ")", ".", "split", "(", "'-'", ")", "[", "-", "3", ":", "]", "]", "yield", "str", "(", "mac", ")", ".", "replace", "(", "'-'", ",", "':'", ")", ",", "'.'", ".", "join", "(", "ip", ")" ]
Iterate over mac addresses (given as string).
[ "Iterate", "over", "mac", "addresses", "(", "given", "as", "string", ")", "." ]
train
https://github.com/BeyondTheClouds/enoslib/blob/fb00be58e56a7848cfe482187d659744919fe2f7/docs/tutorials/grid5000/virt/tuto_grid5000_virt.py#L16-L23
meraki-analytics/cassiopeia-datastores
cassiopeia-sqlstore/cassiopeia_sqlstore/SQLStore.py
SQLStore._one
def _one(self, query): """Gets one row from the query. Raises NotFoundError if there isn't a row or if there are multiple rows""" try: result = query.one() if result.has_expired(self._expirations): raise NotFoundError return result except (NoResultFound, MultipleResultsFound): raise NotFoundError
python
def _one(self, query): """Gets one row from the query. Raises NotFoundError if there isn't a row or if there are multiple rows""" try: result = query.one() if result.has_expired(self._expirations): raise NotFoundError return result except (NoResultFound, MultipleResultsFound): raise NotFoundError
[ "def", "_one", "(", "self", ",", "query", ")", ":", "try", ":", "result", "=", "query", ".", "one", "(", ")", "if", "result", ".", "has_expired", "(", "self", ".", "_expirations", ")", ":", "raise", "NotFoundError", "return", "result", "except", "(", "NoResultFound", ",", "MultipleResultsFound", ")", ":", "raise", "NotFoundError" ]
Gets one row from the query. Raises NotFoundError if there isn't a row or if there are multiple rows
[ "Gets", "one", "row", "from", "the", "query", ".", "Raises", "NotFoundError", "if", "there", "isn", "t", "a", "row", "or", "if", "there", "are", "multiple", "rows" ]
train
https://github.com/meraki-analytics/cassiopeia-datastores/blob/1919b79b8b036d48818eb648e712df41f8a1299c/cassiopeia-sqlstore/cassiopeia_sqlstore/SQLStore.py#L112-L120
meraki-analytics/cassiopeia-datastores
cassiopeia-sqlstore/cassiopeia_sqlstore/SQLStore.py
SQLStore._first
def _first(self, query): """Gets the first row of the query. Raises NotFoundError if there isn't a row""" result = query.first() if result is None: raise NotFoundError else: if result.has_expired(self._expirations): raise NotFoundError return result
python
def _first(self, query): """Gets the first row of the query. Raises NotFoundError if there isn't a row""" result = query.first() if result is None: raise NotFoundError else: if result.has_expired(self._expirations): raise NotFoundError return result
[ "def", "_first", "(", "self", ",", "query", ")", ":", "result", "=", "query", ".", "first", "(", ")", "if", "result", "is", "None", ":", "raise", "NotFoundError", "else", ":", "if", "result", ".", "has_expired", "(", "self", ".", "_expirations", ")", ":", "raise", "NotFoundError", "return", "result" ]
Gets the first row of the query. Raises NotFoundError if there isn't a row
[ "Gets", "the", "first", "row", "of", "the", "query", ".", "Raises", "NotFoundError", "if", "there", "isn", "t", "a", "row" ]
train
https://github.com/meraki-analytics/cassiopeia-datastores/blob/1919b79b8b036d48818eb648e712df41f8a1299c/cassiopeia-sqlstore/cassiopeia_sqlstore/SQLStore.py#L122-L130
meraki-analytics/cassiopeia-datastores
cassiopeia-sqlstore/cassiopeia_sqlstore/SQLStore.py
SQLStore._all
def _all(self, query): """Gets all rows of the query. Raises a NotFoundError if there are 0 rows""" if query.count() > 0: results = query.all() for result in results: if result.has_expired(self._expirations): raise NotFoundError return results else: raise NotFoundError
python
def _all(self, query): """Gets all rows of the query. Raises a NotFoundError if there are 0 rows""" if query.count() > 0: results = query.all() for result in results: if result.has_expired(self._expirations): raise NotFoundError return results else: raise NotFoundError
[ "def", "_all", "(", "self", ",", "query", ")", ":", "if", "query", ".", "count", "(", ")", ">", "0", ":", "results", "=", "query", ".", "all", "(", ")", "for", "result", "in", "results", ":", "if", "result", ".", "has_expired", "(", "self", ".", "_expirations", ")", ":", "raise", "NotFoundError", "return", "results", "else", ":", "raise", "NotFoundError" ]
Gets all rows of the query. Raises a NotFoundError if there are 0 rows
[ "Gets", "all", "rows", "of", "the", "query", ".", "Raises", "a", "NotFoundError", "if", "there", "are", "0", "rows" ]
train
https://github.com/meraki-analytics/cassiopeia-datastores/blob/1919b79b8b036d48818eb648e712df41f8a1299c/cassiopeia-sqlstore/cassiopeia_sqlstore/SQLStore.py#L132-L141
meraki-analytics/cassiopeia-datastores
cassiopeia-sqlstore/cassiopeia_sqlstore/SQLStore.py
SQLStore._put
def _put(self, item: SQLBaseObject): """Puts a item into the database. Updates lastUpdate column""" if item._dto_type in self._expirations and self._expirations[item._dto_type] == 0: # The expiration time has been set to 0 -> shoud not be cached return item.updated() self._session().merge(item)
python
def _put(self, item: SQLBaseObject): """Puts a item into the database. Updates lastUpdate column""" if item._dto_type in self._expirations and self._expirations[item._dto_type] == 0: # The expiration time has been set to 0 -> shoud not be cached return item.updated() self._session().merge(item)
[ "def", "_put", "(", "self", ",", "item", ":", "SQLBaseObject", ")", ":", "if", "item", ".", "_dto_type", "in", "self", ".", "_expirations", "and", "self", ".", "_expirations", "[", "item", ".", "_dto_type", "]", "==", "0", ":", "# The expiration time has been set to 0 -> shoud not be cached", "return", "item", ".", "updated", "(", ")", "self", ".", "_session", "(", ")", ".", "merge", "(", "item", ")" ]
Puts a item into the database. Updates lastUpdate column
[ "Puts", "a", "item", "into", "the", "database", ".", "Updates", "lastUpdate", "column" ]
train
https://github.com/meraki-analytics/cassiopeia-datastores/blob/1919b79b8b036d48818eb648e712df41f8a1299c/cassiopeia-sqlstore/cassiopeia_sqlstore/SQLStore.py#L144-L150
meraki-analytics/cassiopeia-datastores
cassiopeia-sqlstore/cassiopeia_sqlstore/SQLStore.py
SQLStore._put_many
def _put_many(self, items: Iterable[DtoObject], cls): """Puts many items into the database. Updates lastUpdate column for each of them""" if cls._dto_type in self._expirations and self._expirations[cls._dto_type] == 0: # The expiration time has been set to 0 -> shoud not be cached return session = self._session for item in items: item = cls(**item) item.updated() session.merge(item)
python
def _put_many(self, items: Iterable[DtoObject], cls): """Puts many items into the database. Updates lastUpdate column for each of them""" if cls._dto_type in self._expirations and self._expirations[cls._dto_type] == 0: # The expiration time has been set to 0 -> shoud not be cached return session = self._session for item in items: item = cls(**item) item.updated() session.merge(item)
[ "def", "_put_many", "(", "self", ",", "items", ":", "Iterable", "[", "DtoObject", "]", ",", "cls", ")", ":", "if", "cls", ".", "_dto_type", "in", "self", ".", "_expirations", "and", "self", ".", "_expirations", "[", "cls", ".", "_dto_type", "]", "==", "0", ":", "# The expiration time has been set to 0 -> shoud not be cached", "return", "session", "=", "self", ".", "_session", "for", "item", "in", "items", ":", "item", "=", "cls", "(", "*", "*", "item", ")", "item", ".", "updated", "(", ")", "session", ".", "merge", "(", "item", ")" ]
Puts many items into the database. Updates lastUpdate column for each of them
[ "Puts", "many", "items", "into", "the", "database", ".", "Updates", "lastUpdate", "column", "for", "each", "of", "them" ]
train
https://github.com/meraki-analytics/cassiopeia-datastores/blob/1919b79b8b036d48818eb648e712df41f8a1299c/cassiopeia-sqlstore/cassiopeia_sqlstore/SQLStore.py#L153-L162
IdentityPython/oidcendpoint
src/oidcendpoint/user_info/__init__.py
UserInfo.filter
def filter(self, userinfo, user_info_claims=None): """ Return only those claims that are asked for. It's a best effort task; if essential claims are not present no error is flagged. :param userinfo: A dictionary containing the available info for one user :param user_info_claims: A dictionary specifying the asked for claims :return: A dictionary of filtered claims. """ if user_info_claims is None: return copy.copy(userinfo) else: result = {} missing = [] optional = [] for key, restr in user_info_claims.items(): try: result[key] = userinfo[key] except KeyError: if restr == {"essential": True}: missing.append(key) else: optional.append(key) return result
python
def filter(self, userinfo, user_info_claims=None): """ Return only those claims that are asked for. It's a best effort task; if essential claims are not present no error is flagged. :param userinfo: A dictionary containing the available info for one user :param user_info_claims: A dictionary specifying the asked for claims :return: A dictionary of filtered claims. """ if user_info_claims is None: return copy.copy(userinfo) else: result = {} missing = [] optional = [] for key, restr in user_info_claims.items(): try: result[key] = userinfo[key] except KeyError: if restr == {"essential": True}: missing.append(key) else: optional.append(key) return result
[ "def", "filter", "(", "self", ",", "userinfo", ",", "user_info_claims", "=", "None", ")", ":", "if", "user_info_claims", "is", "None", ":", "return", "copy", ".", "copy", "(", "userinfo", ")", "else", ":", "result", "=", "{", "}", "missing", "=", "[", "]", "optional", "=", "[", "]", "for", "key", ",", "restr", "in", "user_info_claims", ".", "items", "(", ")", ":", "try", ":", "result", "[", "key", "]", "=", "userinfo", "[", "key", "]", "except", "KeyError", ":", "if", "restr", "==", "{", "\"essential\"", ":", "True", "}", ":", "missing", ".", "append", "(", "key", ")", "else", ":", "optional", ".", "append", "(", "key", ")", "return", "result" ]
Return only those claims that are asked for. It's a best effort task; if essential claims are not present no error is flagged. :param userinfo: A dictionary containing the available info for one user :param user_info_claims: A dictionary specifying the asked for claims :return: A dictionary of filtered claims.
[ "Return", "only", "those", "claims", "that", "are", "asked", "for", ".", "It", "s", "a", "best", "effort", "task", ";", "if", "essential", "claims", "are", "not", "present", "no", "error", "is", "flagged", "." ]
train
https://github.com/IdentityPython/oidcendpoint/blob/6c1d729d51bfb6332816117fe476073df7a1d823/src/oidcendpoint/user_info/__init__.py#L41-L66
volafiled/python-volapi
volapi/chat.py
ChatMessage.from_data
def from_data(room, conn, data): """Construct a ChatMessage instance from raw protocol data""" files = list() rooms = dict() msg = str() for part in data["message"]: ptype = part["type"] if ptype == "text": val = part["value"] msg += val elif ptype == "break": msg += "\n" elif ptype == "file": fileid = part["id"] fileobj = room.filedict.get(fileid) if fileobj: files += (fileobj,) fileid = f"@{fileid}" msg += fileid elif ptype == "room": roomid = part["id"] rooms[roomid] = part["name"] roomid = f"#{roomid}" msg += roomid elif ptype == "url": msg += part["text"] elif ptype == "raw": msg += html_to_text(part["value"]) else: import warnings warnings.warn(f"unknown message type '{ptype}'", Warning) nick = data.get("nick") or data.get("user") options = data.get("options", dict()) data = data.get("data", dict()) message = ChatMessage( room, conn, nick, msg, roles=Roles.from_options(options), options=options, data=data, files=files, rooms=rooms, ) return message
python
def from_data(room, conn, data): """Construct a ChatMessage instance from raw protocol data""" files = list() rooms = dict() msg = str() for part in data["message"]: ptype = part["type"] if ptype == "text": val = part["value"] msg += val elif ptype == "break": msg += "\n" elif ptype == "file": fileid = part["id"] fileobj = room.filedict.get(fileid) if fileobj: files += (fileobj,) fileid = f"@{fileid}" msg += fileid elif ptype == "room": roomid = part["id"] rooms[roomid] = part["name"] roomid = f"#{roomid}" msg += roomid elif ptype == "url": msg += part["text"] elif ptype == "raw": msg += html_to_text(part["value"]) else: import warnings warnings.warn(f"unknown message type '{ptype}'", Warning) nick = data.get("nick") or data.get("user") options = data.get("options", dict()) data = data.get("data", dict()) message = ChatMessage( room, conn, nick, msg, roles=Roles.from_options(options), options=options, data=data, files=files, rooms=rooms, ) return message
[ "def", "from_data", "(", "room", ",", "conn", ",", "data", ")", ":", "files", "=", "list", "(", ")", "rooms", "=", "dict", "(", ")", "msg", "=", "str", "(", ")", "for", "part", "in", "data", "[", "\"message\"", "]", ":", "ptype", "=", "part", "[", "\"type\"", "]", "if", "ptype", "==", "\"text\"", ":", "val", "=", "part", "[", "\"value\"", "]", "msg", "+=", "val", "elif", "ptype", "==", "\"break\"", ":", "msg", "+=", "\"\\n\"", "elif", "ptype", "==", "\"file\"", ":", "fileid", "=", "part", "[", "\"id\"", "]", "fileobj", "=", "room", ".", "filedict", ".", "get", "(", "fileid", ")", "if", "fileobj", ":", "files", "+=", "(", "fileobj", ",", ")", "fileid", "=", "f\"@{fileid}\"", "msg", "+=", "fileid", "elif", "ptype", "==", "\"room\"", ":", "roomid", "=", "part", "[", "\"id\"", "]", "rooms", "[", "roomid", "]", "=", "part", "[", "\"name\"", "]", "roomid", "=", "f\"#{roomid}\"", "msg", "+=", "roomid", "elif", "ptype", "==", "\"url\"", ":", "msg", "+=", "part", "[", "\"text\"", "]", "elif", "ptype", "==", "\"raw\"", ":", "msg", "+=", "html_to_text", "(", "part", "[", "\"value\"", "]", ")", "else", ":", "import", "warnings", "warnings", ".", "warn", "(", "f\"unknown message type '{ptype}'\"", ",", "Warning", ")", "nick", "=", "data", ".", "get", "(", "\"nick\"", ")", "or", "data", ".", "get", "(", "\"user\"", ")", "options", "=", "data", ".", "get", "(", "\"options\"", ",", "dict", "(", ")", ")", "data", "=", "data", ".", "get", "(", "\"data\"", ",", "dict", "(", ")", ")", "message", "=", "ChatMessage", "(", "room", ",", "conn", ",", "nick", ",", "msg", ",", "roles", "=", "Roles", ".", "from_options", "(", "options", ")", ",", "options", "=", "options", ",", "data", "=", "data", ",", "files", "=", "files", ",", "rooms", "=", "rooms", ",", ")", "return", "message" ]
Construct a ChatMessage instance from raw protocol data
[ "Construct", "a", "ChatMessage", "instance", "from", "raw", "protocol", "data" ]
train
https://github.com/volafiled/python-volapi/blob/5f0bc03dbde703264ac6ed494e2050761f688a3e/volapi/chat.py#L80-L129
guaix-ucm/pyemir
emirdrp/recipes/acquisition/maskcheck.py
create_rot2d
def create_rot2d(angle): """Create 2D rotation matrix""" ca = math.cos(angle) sa = math.sin(angle) return np.array([[ca, -sa], [sa, ca]])
python
def create_rot2d(angle): """Create 2D rotation matrix""" ca = math.cos(angle) sa = math.sin(angle) return np.array([[ca, -sa], [sa, ca]])
[ "def", "create_rot2d", "(", "angle", ")", ":", "ca", "=", "math", ".", "cos", "(", "angle", ")", "sa", "=", "math", ".", "sin", "(", "angle", ")", "return", "np", ".", "array", "(", "[", "[", "ca", ",", "-", "sa", "]", ",", "[", "sa", ",", "ca", "]", "]", ")" ]
Create 2D rotation matrix
[ "Create", "2D", "rotation", "matrix" ]
train
https://github.com/guaix-ucm/pyemir/blob/fef6bbabcb13f80123cafd1800a0f508a3c21702/emirdrp/recipes/acquisition/maskcheck.py#L47-L51
guaix-ucm/pyemir
emirdrp/recipes/acquisition/maskcheck.py
comp_centroid
def comp_centroid(data, bounding_box, debug_plot=False, plot_reference=None, logger=None): """Detect objects in a region and return the centroid of the brightest one""" from matplotlib.patches import Ellipse if logger is None: logger = logging.getLogger(__name__) region = bounding_box.slice ref_x = region[1].start ref_y = region[0].start logger.debug('region ofset is %s, %s', ref_x, ref_y) subimage = data[region].copy() bkg = sep.Background(subimage) data_sub = subimage - bkg objects = sep.extract(data_sub, 1.5, err=bkg.globalrms) # Select brightest object logger.debug('%d object found', len(objects)) if len(objects) == 0: # print('No objects') return None iadx = objects['flux'].argmax() # plot background-subtracted image maxflux = objects[iadx] if debug_plot: fig, ax = plt.subplots() m, s = np.mean(data_sub), np.std(data_sub) ax.imshow(data_sub, interpolation='nearest', cmap='gray', vmin=m - s, vmax=m + s, origin='lower', extent=bounding_box.extent) if plot_reference: e = Ellipse(xy=(plot_reference[0], plot_reference[1]), width=6, height=6, angle=0) e.set_facecolor('none') e.set_edgecolor('green') ax.add_artist(e) # plot an ellipse for each object for idx, obj in enumerate(objects): e = Ellipse(xy=(obj['x'] + ref_x, obj['y'] + ref_y), width=6 * obj['a'], height=6 * obj['b'], angle=obj['theta'] * 180. / np.pi) e.set_facecolor('none') if idx == iadx: e.set_edgecolor('blue') else: e.set_edgecolor('red') ax.add_artist(e) return maxflux['x'], maxflux['y'], ax else: return maxflux['x'], maxflux['y']
python
def comp_centroid(data, bounding_box, debug_plot=False, plot_reference=None, logger=None): """Detect objects in a region and return the centroid of the brightest one""" from matplotlib.patches import Ellipse if logger is None: logger = logging.getLogger(__name__) region = bounding_box.slice ref_x = region[1].start ref_y = region[0].start logger.debug('region ofset is %s, %s', ref_x, ref_y) subimage = data[region].copy() bkg = sep.Background(subimage) data_sub = subimage - bkg objects = sep.extract(data_sub, 1.5, err=bkg.globalrms) # Select brightest object logger.debug('%d object found', len(objects)) if len(objects) == 0: # print('No objects') return None iadx = objects['flux'].argmax() # plot background-subtracted image maxflux = objects[iadx] if debug_plot: fig, ax = plt.subplots() m, s = np.mean(data_sub), np.std(data_sub) ax.imshow(data_sub, interpolation='nearest', cmap='gray', vmin=m - s, vmax=m + s, origin='lower', extent=bounding_box.extent) if plot_reference: e = Ellipse(xy=(plot_reference[0], plot_reference[1]), width=6, height=6, angle=0) e.set_facecolor('none') e.set_edgecolor('green') ax.add_artist(e) # plot an ellipse for each object for idx, obj in enumerate(objects): e = Ellipse(xy=(obj['x'] + ref_x, obj['y'] + ref_y), width=6 * obj['a'], height=6 * obj['b'], angle=obj['theta'] * 180. / np.pi) e.set_facecolor('none') if idx == iadx: e.set_edgecolor('blue') else: e.set_edgecolor('red') ax.add_artist(e) return maxflux['x'], maxflux['y'], ax else: return maxflux['x'], maxflux['y']
[ "def", "comp_centroid", "(", "data", ",", "bounding_box", ",", "debug_plot", "=", "False", ",", "plot_reference", "=", "None", ",", "logger", "=", "None", ")", ":", "from", "matplotlib", ".", "patches", "import", "Ellipse", "if", "logger", "is", "None", ":", "logger", "=", "logging", ".", "getLogger", "(", "__name__", ")", "region", "=", "bounding_box", ".", "slice", "ref_x", "=", "region", "[", "1", "]", ".", "start", "ref_y", "=", "region", "[", "0", "]", ".", "start", "logger", ".", "debug", "(", "'region ofset is %s, %s'", ",", "ref_x", ",", "ref_y", ")", "subimage", "=", "data", "[", "region", "]", ".", "copy", "(", ")", "bkg", "=", "sep", ".", "Background", "(", "subimage", ")", "data_sub", "=", "subimage", "-", "bkg", "objects", "=", "sep", ".", "extract", "(", "data_sub", ",", "1.5", ",", "err", "=", "bkg", ".", "globalrms", ")", "# Select brightest object", "logger", ".", "debug", "(", "'%d object found'", ",", "len", "(", "objects", ")", ")", "if", "len", "(", "objects", ")", "==", "0", ":", "# print('No objects')", "return", "None", "iadx", "=", "objects", "[", "'flux'", "]", ".", "argmax", "(", ")", "# plot background-subtracted image", "maxflux", "=", "objects", "[", "iadx", "]", "if", "debug_plot", ":", "fig", ",", "ax", "=", "plt", ".", "subplots", "(", ")", "m", ",", "s", "=", "np", ".", "mean", "(", "data_sub", ")", ",", "np", ".", "std", "(", "data_sub", ")", "ax", ".", "imshow", "(", "data_sub", ",", "interpolation", "=", "'nearest'", ",", "cmap", "=", "'gray'", ",", "vmin", "=", "m", "-", "s", ",", "vmax", "=", "m", "+", "s", ",", "origin", "=", "'lower'", ",", "extent", "=", "bounding_box", ".", "extent", ")", "if", "plot_reference", ":", "e", "=", "Ellipse", "(", "xy", "=", "(", "plot_reference", "[", "0", "]", ",", "plot_reference", "[", "1", "]", ")", ",", "width", "=", "6", ",", "height", "=", "6", ",", "angle", "=", "0", ")", "e", ".", "set_facecolor", "(", "'none'", ")", "e", ".", "set_edgecolor", "(", "'green'", ")", "ax", ".", "add_artist", "(", "e", ")", "# plot an ellipse for each object", "for", "idx", ",", "obj", "in", "enumerate", "(", "objects", ")", ":", "e", "=", "Ellipse", "(", "xy", "=", "(", "obj", "[", "'x'", "]", "+", "ref_x", ",", "obj", "[", "'y'", "]", "+", "ref_y", ")", ",", "width", "=", "6", "*", "obj", "[", "'a'", "]", ",", "height", "=", "6", "*", "obj", "[", "'b'", "]", ",", "angle", "=", "obj", "[", "'theta'", "]", "*", "180.", "/", "np", ".", "pi", ")", "e", ".", "set_facecolor", "(", "'none'", ")", "if", "idx", "==", "iadx", ":", "e", ".", "set_edgecolor", "(", "'blue'", ")", "else", ":", "e", ".", "set_edgecolor", "(", "'red'", ")", "ax", ".", "add_artist", "(", "e", ")", "return", "maxflux", "[", "'x'", "]", ",", "maxflux", "[", "'y'", "]", ",", "ax", "else", ":", "return", "maxflux", "[", "'x'", "]", ",", "maxflux", "[", "'y'", "]" ]
Detect objects in a region and return the centroid of the brightest one
[ "Detect", "objects", "in", "a", "region", "and", "return", "the", "centroid", "of", "the", "brightest", "one" ]
train
https://github.com/guaix-ucm/pyemir/blob/fef6bbabcb13f80123cafd1800a0f508a3c21702/emirdrp/recipes/acquisition/maskcheck.py#L63-L119
IdentityPython/oidcendpoint
src/oidcendpoint/id_token.py
IDToken.sign_encrypt
def sign_encrypt(self, session_info, client_id, code=None, access_token=None, user_info=None, sign=True, encrypt=False, extra_claims=None): """ Signed and or encrypt a IDToken :param session_info: Session information :param client_id: Client ID :param code: Access grant :param access_token: Access Token :param user_info: User information :param sign: If the JWT should be signed :param encrypt: If the JWT should be encrypted :param extra_claims: Extra claims to be added to the ID Token :return: IDToken as a signed and/or encrypted JWT """ _cntx = self.endpoint_context client_info = _cntx.cdb[client_id] alg_dict = get_sign_and_encrypt_algorithms(_cntx, client_info, 'id_token', sign=sign, encrypt=encrypt) _authn_event = session_info['authn_event'] _idt_info = self.payload(session_info, acr=_authn_event["authn_info"], alg=alg_dict['sign_alg'], code=code, access_token=access_token, user_info=user_info, auth_time=_authn_event["authn_time"], extra_claims=extra_claims) _jwt = JWT(_cntx.keyjar, iss=_cntx.issuer, lifetime=_idt_info['lifetime'], **alg_dict) return _jwt.pack(_idt_info['payload'], recv=client_id)
python
def sign_encrypt(self, session_info, client_id, code=None, access_token=None, user_info=None, sign=True, encrypt=False, extra_claims=None): """ Signed and or encrypt a IDToken :param session_info: Session information :param client_id: Client ID :param code: Access grant :param access_token: Access Token :param user_info: User information :param sign: If the JWT should be signed :param encrypt: If the JWT should be encrypted :param extra_claims: Extra claims to be added to the ID Token :return: IDToken as a signed and/or encrypted JWT """ _cntx = self.endpoint_context client_info = _cntx.cdb[client_id] alg_dict = get_sign_and_encrypt_algorithms(_cntx, client_info, 'id_token', sign=sign, encrypt=encrypt) _authn_event = session_info['authn_event'] _idt_info = self.payload(session_info, acr=_authn_event["authn_info"], alg=alg_dict['sign_alg'], code=code, access_token=access_token, user_info=user_info, auth_time=_authn_event["authn_time"], extra_claims=extra_claims) _jwt = JWT(_cntx.keyjar, iss=_cntx.issuer, lifetime=_idt_info['lifetime'], **alg_dict) return _jwt.pack(_idt_info['payload'], recv=client_id)
[ "def", "sign_encrypt", "(", "self", ",", "session_info", ",", "client_id", ",", "code", "=", "None", ",", "access_token", "=", "None", ",", "user_info", "=", "None", ",", "sign", "=", "True", ",", "encrypt", "=", "False", ",", "extra_claims", "=", "None", ")", ":", "_cntx", "=", "self", ".", "endpoint_context", "client_info", "=", "_cntx", ".", "cdb", "[", "client_id", "]", "alg_dict", "=", "get_sign_and_encrypt_algorithms", "(", "_cntx", ",", "client_info", ",", "'id_token'", ",", "sign", "=", "sign", ",", "encrypt", "=", "encrypt", ")", "_authn_event", "=", "session_info", "[", "'authn_event'", "]", "_idt_info", "=", "self", ".", "payload", "(", "session_info", ",", "acr", "=", "_authn_event", "[", "\"authn_info\"", "]", ",", "alg", "=", "alg_dict", "[", "'sign_alg'", "]", ",", "code", "=", "code", ",", "access_token", "=", "access_token", ",", "user_info", "=", "user_info", ",", "auth_time", "=", "_authn_event", "[", "\"authn_time\"", "]", ",", "extra_claims", "=", "extra_claims", ")", "_jwt", "=", "JWT", "(", "_cntx", ".", "keyjar", ",", "iss", "=", "_cntx", ".", "issuer", ",", "lifetime", "=", "_idt_info", "[", "'lifetime'", "]", ",", "*", "*", "alg_dict", ")", "return", "_jwt", ".", "pack", "(", "_idt_info", "[", "'payload'", "]", ",", "recv", "=", "client_id", ")" ]
Signed and or encrypt a IDToken :param session_info: Session information :param client_id: Client ID :param code: Access grant :param access_token: Access Token :param user_info: User information :param sign: If the JWT should be signed :param encrypt: If the JWT should be encrypted :param extra_claims: Extra claims to be added to the ID Token :return: IDToken as a signed and/or encrypted JWT
[ "Signed", "and", "or", "encrypt", "a", "IDToken" ]
train
https://github.com/IdentityPython/oidcendpoint/blob/6c1d729d51bfb6332816117fe476073df7a1d823/src/oidcendpoint/id_token.py#L135-L171
BreakingBytes/simkit
simkit/core/layers.py
Layer.add
def add(self, src_cls, module, package=None): """ Add layer class to model. This method may be overloaded by layer. :param src_cls: layer class to add, should not start with underscores :type src_cls: str :param module: Python module that contains layer class :type module: str :param package: optional package containing module with layer class :type package: str :raises: :exc:`~exceptions.NotImplementedError` """ # import module containing the layer class mod = importlib.import_module(module, package) # get layer class definition from the module self.sources[src_cls] = getattr(mod, src_cls)
python
def add(self, src_cls, module, package=None): """ Add layer class to model. This method may be overloaded by layer. :param src_cls: layer class to add, should not start with underscores :type src_cls: str :param module: Python module that contains layer class :type module: str :param package: optional package containing module with layer class :type package: str :raises: :exc:`~exceptions.NotImplementedError` """ # import module containing the layer class mod = importlib.import_module(module, package) # get layer class definition from the module self.sources[src_cls] = getattr(mod, src_cls)
[ "def", "add", "(", "self", ",", "src_cls", ",", "module", ",", "package", "=", "None", ")", ":", "# import module containing the layer class", "mod", "=", "importlib", ".", "import_module", "(", "module", ",", "package", ")", "# get layer class definition from the module", "self", ".", "sources", "[", "src_cls", "]", "=", "getattr", "(", "mod", ",", "src_cls", ")" ]
Add layer class to model. This method may be overloaded by layer. :param src_cls: layer class to add, should not start with underscores :type src_cls: str :param module: Python module that contains layer class :type module: str :param package: optional package containing module with layer class :type package: str :raises: :exc:`~exceptions.NotImplementedError`
[ "Add", "layer", "class", "to", "model", ".", "This", "method", "may", "be", "overloaded", "by", "layer", "." ]
train
https://github.com/BreakingBytes/simkit/blob/205163d879d3880b6c9ef609f1b723a58773026b/simkit/core/layers.py#L64-L79
BreakingBytes/simkit
simkit/core/layers.py
Data.add
def add(self, data_source, module, package=None): """ Add data_source to model. Tries to import module, then looks for data source class definition. :param data_source: Name of data source to add. :type data_source: str :param module: Module in which data source resides. Can be absolute or relative. See :func:`importlib.import_module` :type module: str :param package: Optional, but must be used if module is relative. :type package: str .. seealso:: :func:`importlib.import_module` """ super(Data, self).add(data_source, module, package) # only update layer info if it is missing! if data_source not in self.layer: # copy data source parameters to :attr:`Layer.layer` self.layer[data_source] = {'module': module, 'package': package} # add a place holder for the data source object when it's constructed self.objects[data_source] = None
python
def add(self, data_source, module, package=None): """ Add data_source to model. Tries to import module, then looks for data source class definition. :param data_source: Name of data source to add. :type data_source: str :param module: Module in which data source resides. Can be absolute or relative. See :func:`importlib.import_module` :type module: str :param package: Optional, but must be used if module is relative. :type package: str .. seealso:: :func:`importlib.import_module` """ super(Data, self).add(data_source, module, package) # only update layer info if it is missing! if data_source not in self.layer: # copy data source parameters to :attr:`Layer.layer` self.layer[data_source] = {'module': module, 'package': package} # add a place holder for the data source object when it's constructed self.objects[data_source] = None
[ "def", "add", "(", "self", ",", "data_source", ",", "module", ",", "package", "=", "None", ")", ":", "super", "(", "Data", ",", "self", ")", ".", "add", "(", "data_source", ",", "module", ",", "package", ")", "# only update layer info if it is missing!", "if", "data_source", "not", "in", "self", ".", "layer", ":", "# copy data source parameters to :attr:`Layer.layer`", "self", ".", "layer", "[", "data_source", "]", "=", "{", "'module'", ":", "module", ",", "'package'", ":", "package", "}", "# add a place holder for the data source object when it's constructed", "self", ".", "objects", "[", "data_source", "]", "=", "None" ]
Add data_source to model. Tries to import module, then looks for data source class definition. :param data_source: Name of data source to add. :type data_source: str :param module: Module in which data source resides. Can be absolute or relative. See :func:`importlib.import_module` :type module: str :param package: Optional, but must be used if module is relative. :type package: str .. seealso:: :func:`importlib.import_module`
[ "Add", "data_source", "to", "model", ".", "Tries", "to", "import", "module", "then", "looks", "for", "data", "source", "class", "definition", "." ]
train
https://github.com/BreakingBytes/simkit/blob/205163d879d3880b6c9ef609f1b723a58773026b/simkit/core/layers.py#L125-L147
BreakingBytes/simkit
simkit/core/layers.py
Data.open
def open(self, data_source, *args, **kwargs): """ Open filename to get data for data_source. :param data_source: Data source for which the file contains data. :type data_source: str Positional and keyword arguments can contain either the data to use for the data source or the full path of the file which contains data for the data source. """ if self.sources[data_source]._meta.data_reader.is_file_reader: filename = kwargs.get('filename') path = kwargs.get('path', '') rel_path = kwargs.get('rel_path', '') if len(args) > 0: filename = args[0] if len(args) > 1: path = args[1] if len(args) > 2: rel_path = args[2] args = () kwargs = {'filename': os.path.join(rel_path, path, filename)} LOGGER.debug('filename: %s', kwargs['filename']) # call constructor of data source with filename argument self.objects[data_source] = self.sources[data_source](*args, **kwargs) # register data and uncertainty in registry data_src_obj = self.objects[data_source] meta = [getattr(data_src_obj, m) for m in self.reg.meta_names] self.reg.register(data_src_obj.data, *meta)
python
def open(self, data_source, *args, **kwargs): """ Open filename to get data for data_source. :param data_source: Data source for which the file contains data. :type data_source: str Positional and keyword arguments can contain either the data to use for the data source or the full path of the file which contains data for the data source. """ if self.sources[data_source]._meta.data_reader.is_file_reader: filename = kwargs.get('filename') path = kwargs.get('path', '') rel_path = kwargs.get('rel_path', '') if len(args) > 0: filename = args[0] if len(args) > 1: path = args[1] if len(args) > 2: rel_path = args[2] args = () kwargs = {'filename': os.path.join(rel_path, path, filename)} LOGGER.debug('filename: %s', kwargs['filename']) # call constructor of data source with filename argument self.objects[data_source] = self.sources[data_source](*args, **kwargs) # register data and uncertainty in registry data_src_obj = self.objects[data_source] meta = [getattr(data_src_obj, m) for m in self.reg.meta_names] self.reg.register(data_src_obj.data, *meta)
[ "def", "open", "(", "self", ",", "data_source", ",", "*", "args", ",", "*", "*", "kwargs", ")", ":", "if", "self", ".", "sources", "[", "data_source", "]", ".", "_meta", ".", "data_reader", ".", "is_file_reader", ":", "filename", "=", "kwargs", ".", "get", "(", "'filename'", ")", "path", "=", "kwargs", ".", "get", "(", "'path'", ",", "''", ")", "rel_path", "=", "kwargs", ".", "get", "(", "'rel_path'", ",", "''", ")", "if", "len", "(", "args", ")", ">", "0", ":", "filename", "=", "args", "[", "0", "]", "if", "len", "(", "args", ")", ">", "1", ":", "path", "=", "args", "[", "1", "]", "if", "len", "(", "args", ")", ">", "2", ":", "rel_path", "=", "args", "[", "2", "]", "args", "=", "(", ")", "kwargs", "=", "{", "'filename'", ":", "os", ".", "path", ".", "join", "(", "rel_path", ",", "path", ",", "filename", ")", "}", "LOGGER", ".", "debug", "(", "'filename: %s'", ",", "kwargs", "[", "'filename'", "]", ")", "# call constructor of data source with filename argument", "self", ".", "objects", "[", "data_source", "]", "=", "self", ".", "sources", "[", "data_source", "]", "(", "*", "args", ",", "*", "*", "kwargs", ")", "# register data and uncertainty in registry", "data_src_obj", "=", "self", ".", "objects", "[", "data_source", "]", "meta", "=", "[", "getattr", "(", "data_src_obj", ",", "m", ")", "for", "m", "in", "self", ".", "reg", ".", "meta_names", "]", "self", ".", "reg", ".", "register", "(", "data_src_obj", ".", "data", ",", "*", "meta", ")" ]
Open filename to get data for data_source. :param data_source: Data source for which the file contains data. :type data_source: str Positional and keyword arguments can contain either the data to use for the data source or the full path of the file which contains data for the data source.
[ "Open", "filename", "to", "get", "data", "for", "data_source", "." ]
train
https://github.com/BreakingBytes/simkit/blob/205163d879d3880b6c9ef609f1b723a58773026b/simkit/core/layers.py#L149-L178
BreakingBytes/simkit
simkit/core/layers.py
Data.load
def load(self, rel_path=None): """ Add data_sources to layer and open files with data for the data_source. """ for k, v in self.layer.iteritems(): self.add(k, v['module'], v.get('package')) filename = v.get('filename') path = v.get('path') if filename: # default path for data is in ../data if not path: path = rel_path else: path = os.path.join(rel_path, path) # filename can be a list or a string, concatenate list with # os.pathsep and append the full path to strings. if isinstance(filename, basestring): filename = os.path.join(path, filename) else: file_list = [os.path.join(path, f) for f in filename] filename = os.path.pathsep.join(file_list) self.open(k, filename)
python
def load(self, rel_path=None): """ Add data_sources to layer and open files with data for the data_source. """ for k, v in self.layer.iteritems(): self.add(k, v['module'], v.get('package')) filename = v.get('filename') path = v.get('path') if filename: # default path for data is in ../data if not path: path = rel_path else: path = os.path.join(rel_path, path) # filename can be a list or a string, concatenate list with # os.pathsep and append the full path to strings. if isinstance(filename, basestring): filename = os.path.join(path, filename) else: file_list = [os.path.join(path, f) for f in filename] filename = os.path.pathsep.join(file_list) self.open(k, filename)
[ "def", "load", "(", "self", ",", "rel_path", "=", "None", ")", ":", "for", "k", ",", "v", "in", "self", ".", "layer", ".", "iteritems", "(", ")", ":", "self", ".", "add", "(", "k", ",", "v", "[", "'module'", "]", ",", "v", ".", "get", "(", "'package'", ")", ")", "filename", "=", "v", ".", "get", "(", "'filename'", ")", "path", "=", "v", ".", "get", "(", "'path'", ")", "if", "filename", ":", "# default path for data is in ../data", "if", "not", "path", ":", "path", "=", "rel_path", "else", ":", "path", "=", "os", ".", "path", ".", "join", "(", "rel_path", ",", "path", ")", "# filename can be a list or a string, concatenate list with", "# os.pathsep and append the full path to strings.", "if", "isinstance", "(", "filename", ",", "basestring", ")", ":", "filename", "=", "os", ".", "path", ".", "join", "(", "path", ",", "filename", ")", "else", ":", "file_list", "=", "[", "os", ".", "path", ".", "join", "(", "path", ",", "f", ")", "for", "f", "in", "filename", "]", "filename", "=", "os", ".", "path", ".", "pathsep", ".", "join", "(", "file_list", ")", "self", ".", "open", "(", "k", ",", "filename", ")" ]
Add data_sources to layer and open files with data for the data_source.
[ "Add", "data_sources", "to", "layer", "and", "open", "files", "with", "data", "for", "the", "data_source", "." ]
train
https://github.com/BreakingBytes/simkit/blob/205163d879d3880b6c9ef609f1b723a58773026b/simkit/core/layers.py#L180-L201
BreakingBytes/simkit
simkit/core/layers.py
Data.edit
def edit(self, data_src, value): """ Edit data layer. :param data_src: Name of :class:`DataSource` to edit. :type data_src: str :param value: Values to edit. :type value: dict """ # check if opening file if 'filename' in value: items = [k for k, v in self.reg.data_source.iteritems() if v == data_src] self.reg.unregister(items) # remove items from Registry # open file and register new data self.open(data_src, value['filename'], value.get('path')) self.layer[data_src].update(value)
python
def edit(self, data_src, value): """ Edit data layer. :param data_src: Name of :class:`DataSource` to edit. :type data_src: str :param value: Values to edit. :type value: dict """ # check if opening file if 'filename' in value: items = [k for k, v in self.reg.data_source.iteritems() if v == data_src] self.reg.unregister(items) # remove items from Registry # open file and register new data self.open(data_src, value['filename'], value.get('path')) self.layer[data_src].update(value)
[ "def", "edit", "(", "self", ",", "data_src", ",", "value", ")", ":", "# check if opening file", "if", "'filename'", "in", "value", ":", "items", "=", "[", "k", "for", "k", ",", "v", "in", "self", ".", "reg", ".", "data_source", ".", "iteritems", "(", ")", "if", "v", "==", "data_src", "]", "self", ".", "reg", ".", "unregister", "(", "items", ")", "# remove items from Registry", "# open file and register new data", "self", ".", "open", "(", "data_src", ",", "value", "[", "'filename'", "]", ",", "value", ".", "get", "(", "'path'", ")", ")", "self", ".", "layer", "[", "data_src", "]", ".", "update", "(", "value", ")" ]
Edit data layer. :param data_src: Name of :class:`DataSource` to edit. :type data_src: str :param value: Values to edit. :type value: dict
[ "Edit", "data", "layer", "." ]
train
https://github.com/BreakingBytes/simkit/blob/205163d879d3880b6c9ef609f1b723a58773026b/simkit/core/layers.py#L203-L219
BreakingBytes/simkit
simkit/core/layers.py
Data.delete
def delete(self, data_src): """ Delete data sources. """ items = self.objects[data_src].data.keys() # items to edit self.reg.unregister(items) # remove items from Registry self.layer.pop(data_src) # remove data source from layer self.objects.pop(data_src) # remove data_source object self.sources.pop(data_src)
python
def delete(self, data_src): """ Delete data sources. """ items = self.objects[data_src].data.keys() # items to edit self.reg.unregister(items) # remove items from Registry self.layer.pop(data_src) # remove data source from layer self.objects.pop(data_src) # remove data_source object self.sources.pop(data_src)
[ "def", "delete", "(", "self", ",", "data_src", ")", ":", "items", "=", "self", ".", "objects", "[", "data_src", "]", ".", "data", ".", "keys", "(", ")", "# items to edit", "self", ".", "reg", ".", "unregister", "(", "items", ")", "# remove items from Registry", "self", ".", "layer", ".", "pop", "(", "data_src", ")", "# remove data source from layer", "self", ".", "objects", ".", "pop", "(", "data_src", ")", "# remove data_source object", "self", ".", "sources", ".", "pop", "(", "data_src", ")" ]
Delete data sources.
[ "Delete", "data", "sources", "." ]
train
https://github.com/BreakingBytes/simkit/blob/205163d879d3880b6c9ef609f1b723a58773026b/simkit/core/layers.py#L221-L229
BreakingBytes/simkit
simkit/core/layers.py
Formulas.add
def add(self, formula, module, package=None): """ Import module (from package) with formulas, import formulas and add them to formula registry. :param formula: Name of the formula source to add/open. :param module: Module containing formula source. :param package: [Optional] Package of formula source module. .. seealso:: :func:`importlib.import_module` """ super(Formulas, self).add(formula, module, package) # only update layer info if it is missing! if formula not in self.layer: # copy formula source parameters to :attr:`Layer.layer` self.layer[formula] = {'module': module, 'package': package} self.objects[formula] = self.sources[formula]() # register formula and linearity in registry formula_src_obj = self.objects[formula] meta = [getattr(formula_src_obj, m) for m in self.reg.meta_names] self.reg.register(formula_src_obj.formulas, *meta)
python
def add(self, formula, module, package=None): """ Import module (from package) with formulas, import formulas and add them to formula registry. :param formula: Name of the formula source to add/open. :param module: Module containing formula source. :param package: [Optional] Package of formula source module. .. seealso:: :func:`importlib.import_module` """ super(Formulas, self).add(formula, module, package) # only update layer info if it is missing! if formula not in self.layer: # copy formula source parameters to :attr:`Layer.layer` self.layer[formula] = {'module': module, 'package': package} self.objects[formula] = self.sources[formula]() # register formula and linearity in registry formula_src_obj = self.objects[formula] meta = [getattr(formula_src_obj, m) for m in self.reg.meta_names] self.reg.register(formula_src_obj.formulas, *meta)
[ "def", "add", "(", "self", ",", "formula", ",", "module", ",", "package", "=", "None", ")", ":", "super", "(", "Formulas", ",", "self", ")", ".", "add", "(", "formula", ",", "module", ",", "package", ")", "# only update layer info if it is missing!", "if", "formula", "not", "in", "self", ".", "layer", ":", "# copy formula source parameters to :attr:`Layer.layer`", "self", ".", "layer", "[", "formula", "]", "=", "{", "'module'", ":", "module", ",", "'package'", ":", "package", "}", "self", ".", "objects", "[", "formula", "]", "=", "self", ".", "sources", "[", "formula", "]", "(", ")", "# register formula and linearity in registry", "formula_src_obj", "=", "self", ".", "objects", "[", "formula", "]", "meta", "=", "[", "getattr", "(", "formula_src_obj", ",", "m", ")", "for", "m", "in", "self", ".", "reg", ".", "meta_names", "]", "self", ".", "reg", ".", "register", "(", "formula_src_obj", ".", "formulas", ",", "*", "meta", ")" ]
Import module (from package) with formulas, import formulas and add them to formula registry. :param formula: Name of the formula source to add/open. :param module: Module containing formula source. :param package: [Optional] Package of formula source module. .. seealso:: :func:`importlib.import_module`
[ "Import", "module", "(", "from", "package", ")", "with", "formulas", "import", "formulas", "and", "add", "them", "to", "formula", "registry", "." ]
train
https://github.com/BreakingBytes/simkit/blob/205163d879d3880b6c9ef609f1b723a58773026b/simkit/core/layers.py#L239-L260
BreakingBytes/simkit
simkit/core/layers.py
Formulas.load
def load(self, _=None): """ Add formulas to layer. """ for k, v in self.layer.iteritems(): self.add(k, v['module'], v.get('package'))
python
def load(self, _=None): """ Add formulas to layer. """ for k, v in self.layer.iteritems(): self.add(k, v['module'], v.get('package'))
[ "def", "load", "(", "self", ",", "_", "=", "None", ")", ":", "for", "k", ",", "v", "in", "self", ".", "layer", ".", "iteritems", "(", ")", ":", "self", ".", "add", "(", "k", ",", "v", "[", "'module'", "]", ",", "v", ".", "get", "(", "'package'", ")", ")" ]
Add formulas to layer.
[ "Add", "formulas", "to", "layer", "." ]
train
https://github.com/BreakingBytes/simkit/blob/205163d879d3880b6c9ef609f1b723a58773026b/simkit/core/layers.py#L265-L270
BreakingBytes/simkit
simkit/core/layers.py
Calculations.add
def add(self, calc, module, package=None): """ Add calc to layer. """ super(Calculations, self).add(calc, module, package) # only update layer info if it is missing! if calc not in self.layer: # copy calc source parameters to :attr:`Layer.layer` self.layer[calc] = {'module': module, 'package': package} # instantiate the calc object self.objects[calc] = self.sources[calc]() # register calc and dependencies in registry calc_src_obj = self.objects[calc] meta = [getattr(calc_src_obj, m) for m in self.reg.meta_names] self.reg.register(calc_src_obj.calcs, *meta)
python
def add(self, calc, module, package=None): """ Add calc to layer. """ super(Calculations, self).add(calc, module, package) # only update layer info if it is missing! if calc not in self.layer: # copy calc source parameters to :attr:`Layer.layer` self.layer[calc] = {'module': module, 'package': package} # instantiate the calc object self.objects[calc] = self.sources[calc]() # register calc and dependencies in registry calc_src_obj = self.objects[calc] meta = [getattr(calc_src_obj, m) for m in self.reg.meta_names] self.reg.register(calc_src_obj.calcs, *meta)
[ "def", "add", "(", "self", ",", "calc", ",", "module", ",", "package", "=", "None", ")", ":", "super", "(", "Calculations", ",", "self", ")", ".", "add", "(", "calc", ",", "module", ",", "package", ")", "# only update layer info if it is missing!", "if", "calc", "not", "in", "self", ".", "layer", ":", "# copy calc source parameters to :attr:`Layer.layer`", "self", ".", "layer", "[", "calc", "]", "=", "{", "'module'", ":", "module", ",", "'package'", ":", "package", "}", "# instantiate the calc object", "self", ".", "objects", "[", "calc", "]", "=", "self", ".", "sources", "[", "calc", "]", "(", ")", "# register calc and dependencies in registry", "calc_src_obj", "=", "self", ".", "objects", "[", "calc", "]", "meta", "=", "[", "getattr", "(", "calc_src_obj", ",", "m", ")", "for", "m", "in", "self", ".", "reg", ".", "meta_names", "]", "self", ".", "reg", ".", "register", "(", "calc_src_obj", ".", "calcs", ",", "*", "meta", ")" ]
Add calc to layer.
[ "Add", "calc", "to", "layer", "." ]
train
https://github.com/BreakingBytes/simkit/blob/205163d879d3880b6c9ef609f1b723a58773026b/simkit/core/layers.py#L286-L300
BreakingBytes/simkit
simkit/core/layers.py
Outputs.add
def add(self, output, module, package=None): """ Add output to """ super(Outputs, self).add(output, module, package) # only update layer info if it is missing! if output not in self.layer: # copy output source parameters to :attr:`Layer.layer` self.layer[output] = {'module': module, 'package': package} # instantiate the output object self.objects[output] = self.sources[output]() # register outputs and meta-data in registry out_src_obj = self.objects[output] meta = [getattr(out_src_obj, m) for m in self.reg.meta_names] self.reg.register(out_src_obj.outputs, *meta)
python
def add(self, output, module, package=None): """ Add output to """ super(Outputs, self).add(output, module, package) # only update layer info if it is missing! if output not in self.layer: # copy output source parameters to :attr:`Layer.layer` self.layer[output] = {'module': module, 'package': package} # instantiate the output object self.objects[output] = self.sources[output]() # register outputs and meta-data in registry out_src_obj = self.objects[output] meta = [getattr(out_src_obj, m) for m in self.reg.meta_names] self.reg.register(out_src_obj.outputs, *meta)
[ "def", "add", "(", "self", ",", "output", ",", "module", ",", "package", "=", "None", ")", ":", "super", "(", "Outputs", ",", "self", ")", ".", "add", "(", "output", ",", "module", ",", "package", ")", "# only update layer info if it is missing!", "if", "output", "not", "in", "self", ".", "layer", ":", "# copy output source parameters to :attr:`Layer.layer`", "self", ".", "layer", "[", "output", "]", "=", "{", "'module'", ":", "module", ",", "'package'", ":", "package", "}", "# instantiate the output object", "self", ".", "objects", "[", "output", "]", "=", "self", ".", "sources", "[", "output", "]", "(", ")", "# register outputs and meta-data in registry", "out_src_obj", "=", "self", ".", "objects", "[", "output", "]", "meta", "=", "[", "getattr", "(", "out_src_obj", ",", "m", ")", "for", "m", "in", "self", ".", "reg", ".", "meta_names", "]", "self", ".", "reg", ".", "register", "(", "out_src_obj", ".", "outputs", ",", "*", "meta", ")" ]
Add output to
[ "Add", "output", "to" ]
train
https://github.com/BreakingBytes/simkit/blob/205163d879d3880b6c9ef609f1b723a58773026b/simkit/core/layers.py#L326-L340
BreakingBytes/simkit
simkit/core/layers.py
Simulations.add
def add(self, sim, module, package=None): """ Add simulation to layer. """ super(Simulations, self).add(sim, module, package) # only update layer info if it is missing! if sim not in self.layer: # copy simulation source parameters to :attr:`Layer.layer` self.layer[sim] = {'module': module, 'package': package}
python
def add(self, sim, module, package=None): """ Add simulation to layer. """ super(Simulations, self).add(sim, module, package) # only update layer info if it is missing! if sim not in self.layer: # copy simulation source parameters to :attr:`Layer.layer` self.layer[sim] = {'module': module, 'package': package}
[ "def", "add", "(", "self", ",", "sim", ",", "module", ",", "package", "=", "None", ")", ":", "super", "(", "Simulations", ",", "self", ")", ".", "add", "(", "sim", ",", "module", ",", "package", ")", "# only update layer info if it is missing!", "if", "sim", "not", "in", "self", ".", "layer", ":", "# copy simulation source parameters to :attr:`Layer.layer`", "self", ".", "layer", "[", "sim", "]", "=", "{", "'module'", ":", "module", ",", "'package'", ":", "package", "}" ]
Add simulation to layer.
[ "Add", "simulation", "to", "layer", "." ]
train
https://github.com/BreakingBytes/simkit/blob/205163d879d3880b6c9ef609f1b723a58773026b/simkit/core/layers.py#L366-L374
BreakingBytes/simkit
simkit/core/layers.py
Simulations.load
def load(self, rel_path=None): """ Add sim_src to layer. """ for k, v in self.layer.iteritems(): self.add(k, v['module'], v.get('package')) filename = v.get('filename') path = v.get('path') if filename: warnings.warn(DeprecationWarning(SIMFILE_LOAD_WARNING)) # default path for data is in ../simulations if not path: path = rel_path else: path = os.path.join(rel_path, path) filename = os.path.join(path, filename) self.open(k, filename)
python
def load(self, rel_path=None): """ Add sim_src to layer. """ for k, v in self.layer.iteritems(): self.add(k, v['module'], v.get('package')) filename = v.get('filename') path = v.get('path') if filename: warnings.warn(DeprecationWarning(SIMFILE_LOAD_WARNING)) # default path for data is in ../simulations if not path: path = rel_path else: path = os.path.join(rel_path, path) filename = os.path.join(path, filename) self.open(k, filename)
[ "def", "load", "(", "self", ",", "rel_path", "=", "None", ")", ":", "for", "k", ",", "v", "in", "self", ".", "layer", ".", "iteritems", "(", ")", ":", "self", ".", "add", "(", "k", ",", "v", "[", "'module'", "]", ",", "v", ".", "get", "(", "'package'", ")", ")", "filename", "=", "v", ".", "get", "(", "'filename'", ")", "path", "=", "v", ".", "get", "(", "'path'", ")", "if", "filename", ":", "warnings", ".", "warn", "(", "DeprecationWarning", "(", "SIMFILE_LOAD_WARNING", ")", ")", "# default path for data is in ../simulations", "if", "not", "path", ":", "path", "=", "rel_path", "else", ":", "path", "=", "os", ".", "path", ".", "join", "(", "rel_path", ",", "path", ")", "filename", "=", "os", ".", "path", ".", "join", "(", "path", ",", "filename", ")", "self", ".", "open", "(", "k", ",", "filename", ")" ]
Add sim_src to layer.
[ "Add", "sim_src", "to", "layer", "." ]
train
https://github.com/BreakingBytes/simkit/blob/205163d879d3880b6c9ef609f1b723a58773026b/simkit/core/layers.py#L386-L402
BreakingBytes/simkit
examples/PVPower/pvpower/formulas/irradiance.py
f_solpos
def f_solpos(times, latitude, longitude): """ Calculate solar position for lat/long at times. :param times: Python :class:`datetime.datetime` object. :type times: list :param latitude: latitude [degrees] :type latitude: float :param longitude: longitude [degrees] :type longitude: float :returns: apparent zenith, azimuth """ # pvlib converts Python datetime objects to pandas DatetimeIndex solpos = pvlib.solarposition.get_solarposition(times, latitude, longitude) # solpos is a pandas DataFrame, so unpack the desired values # return shape is (2, NOBS), so unc_wrapper sees 2 dependent variables return solpos['apparent_zenith'].values, solpos['azimuth'].values
python
def f_solpos(times, latitude, longitude): """ Calculate solar position for lat/long at times. :param times: Python :class:`datetime.datetime` object. :type times: list :param latitude: latitude [degrees] :type latitude: float :param longitude: longitude [degrees] :type longitude: float :returns: apparent zenith, azimuth """ # pvlib converts Python datetime objects to pandas DatetimeIndex solpos = pvlib.solarposition.get_solarposition(times, latitude, longitude) # solpos is a pandas DataFrame, so unpack the desired values # return shape is (2, NOBS), so unc_wrapper sees 2 dependent variables return solpos['apparent_zenith'].values, solpos['azimuth'].values
[ "def", "f_solpos", "(", "times", ",", "latitude", ",", "longitude", ")", ":", "# pvlib converts Python datetime objects to pandas DatetimeIndex", "solpos", "=", "pvlib", ".", "solarposition", ".", "get_solarposition", "(", "times", ",", "latitude", ",", "longitude", ")", "# solpos is a pandas DataFrame, so unpack the desired values", "# return shape is (2, NOBS), so unc_wrapper sees 2 dependent variables", "return", "solpos", "[", "'apparent_zenith'", "]", ".", "values", ",", "solpos", "[", "'azimuth'", "]", ".", "values" ]
Calculate solar position for lat/long at times. :param times: Python :class:`datetime.datetime` object. :type times: list :param latitude: latitude [degrees] :type latitude: float :param longitude: longitude [degrees] :type longitude: float :returns: apparent zenith, azimuth
[ "Calculate", "solar", "position", "for", "lat", "/", "long", "at", "times", "." ]
train
https://github.com/BreakingBytes/simkit/blob/205163d879d3880b6c9ef609f1b723a58773026b/examples/PVPower/pvpower/formulas/irradiance.py#L25-L41
BreakingBytes/simkit
examples/PVPower/pvpower/formulas/irradiance.py
f_total_irrad
def f_total_irrad(times, surface_tilt, surface_azimuth, solar_zenith, solar_azimuth, dni, ghi, dhi, dni_extra, am_abs, model='haydavies'): """ Calculate total irradiance :param times: timestamps :param surface_tilt: panel tilt from horizontal [deg] :param surface_azimuth: panel azimuth from north [deg] :param solar_zenith: refracted solar zenith angle [deg] :param solar_azimuth: solar azimuth [deg] :param dni: direct normal irradiance [W/m**2] :param ghi: global horizonal irradiance [W/m**2] :param dhi: diffuse horizontal irradiance [W/m**2] :param dni_extra: extraterrestrial irradiance [W/m**2] :param am_abs: absolute airmass [dimensionless] :param model: irradiance model name, default is ``'haydavies'`` :type model: str :return: global, direct and diffuse plane of array irradiance [W/m**2] """ am_abs = am_abs.squeeze() # make a DataFrame for time series arguments df = pd.DataFrame( {'solar_zenith': solar_zenith, 'solar_azimuth': solar_azimuth, 'dni': dni, 'ghi': ghi, 'dhi': dhi, 'dni_extra': dni_extra, 'am_abs': am_abs}, index=times ) # calculate total irradiance using PVLIB total_irrad = pvlib.irradiance.total_irrad( surface_tilt, surface_azimuth, df['solar_zenith'], df['solar_azimuth'], df['dni'], df['ghi'], df['dhi'], dni_extra=df['dni_extra'], airmass=df['am_abs'], model=model ).fillna(0.0) # convert to ndarrays poa_global = total_irrad['poa_global'].values poa_direct = total_irrad['poa_direct'].values poa_diffuse = total_irrad['poa_diffuse'].values return poa_global, poa_direct, poa_diffuse
python
def f_total_irrad(times, surface_tilt, surface_azimuth, solar_zenith, solar_azimuth, dni, ghi, dhi, dni_extra, am_abs, model='haydavies'): """ Calculate total irradiance :param times: timestamps :param surface_tilt: panel tilt from horizontal [deg] :param surface_azimuth: panel azimuth from north [deg] :param solar_zenith: refracted solar zenith angle [deg] :param solar_azimuth: solar azimuth [deg] :param dni: direct normal irradiance [W/m**2] :param ghi: global horizonal irradiance [W/m**2] :param dhi: diffuse horizontal irradiance [W/m**2] :param dni_extra: extraterrestrial irradiance [W/m**2] :param am_abs: absolute airmass [dimensionless] :param model: irradiance model name, default is ``'haydavies'`` :type model: str :return: global, direct and diffuse plane of array irradiance [W/m**2] """ am_abs = am_abs.squeeze() # make a DataFrame for time series arguments df = pd.DataFrame( {'solar_zenith': solar_zenith, 'solar_azimuth': solar_azimuth, 'dni': dni, 'ghi': ghi, 'dhi': dhi, 'dni_extra': dni_extra, 'am_abs': am_abs}, index=times ) # calculate total irradiance using PVLIB total_irrad = pvlib.irradiance.total_irrad( surface_tilt, surface_azimuth, df['solar_zenith'], df['solar_azimuth'], df['dni'], df['ghi'], df['dhi'], dni_extra=df['dni_extra'], airmass=df['am_abs'], model=model ).fillna(0.0) # convert to ndarrays poa_global = total_irrad['poa_global'].values poa_direct = total_irrad['poa_direct'].values poa_diffuse = total_irrad['poa_diffuse'].values return poa_global, poa_direct, poa_diffuse
[ "def", "f_total_irrad", "(", "times", ",", "surface_tilt", ",", "surface_azimuth", ",", "solar_zenith", ",", "solar_azimuth", ",", "dni", ",", "ghi", ",", "dhi", ",", "dni_extra", ",", "am_abs", ",", "model", "=", "'haydavies'", ")", ":", "am_abs", "=", "am_abs", ".", "squeeze", "(", ")", "# make a DataFrame for time series arguments", "df", "=", "pd", ".", "DataFrame", "(", "{", "'solar_zenith'", ":", "solar_zenith", ",", "'solar_azimuth'", ":", "solar_azimuth", ",", "'dni'", ":", "dni", ",", "'ghi'", ":", "ghi", ",", "'dhi'", ":", "dhi", ",", "'dni_extra'", ":", "dni_extra", ",", "'am_abs'", ":", "am_abs", "}", ",", "index", "=", "times", ")", "# calculate total irradiance using PVLIB", "total_irrad", "=", "pvlib", ".", "irradiance", ".", "total_irrad", "(", "surface_tilt", ",", "surface_azimuth", ",", "df", "[", "'solar_zenith'", "]", ",", "df", "[", "'solar_azimuth'", "]", ",", "df", "[", "'dni'", "]", ",", "df", "[", "'ghi'", "]", ",", "df", "[", "'dhi'", "]", ",", "dni_extra", "=", "df", "[", "'dni_extra'", "]", ",", "airmass", "=", "df", "[", "'am_abs'", "]", ",", "model", "=", "model", ")", ".", "fillna", "(", "0.0", ")", "# convert to ndarrays", "poa_global", "=", "total_irrad", "[", "'poa_global'", "]", ".", "values", "poa_direct", "=", "total_irrad", "[", "'poa_direct'", "]", ".", "values", "poa_diffuse", "=", "total_irrad", "[", "'poa_diffuse'", "]", ".", "values", "return", "poa_global", ",", "poa_direct", ",", "poa_diffuse" ]
Calculate total irradiance :param times: timestamps :param surface_tilt: panel tilt from horizontal [deg] :param surface_azimuth: panel azimuth from north [deg] :param solar_zenith: refracted solar zenith angle [deg] :param solar_azimuth: solar azimuth [deg] :param dni: direct normal irradiance [W/m**2] :param ghi: global horizonal irradiance [W/m**2] :param dhi: diffuse horizontal irradiance [W/m**2] :param dni_extra: extraterrestrial irradiance [W/m**2] :param am_abs: absolute airmass [dimensionless] :param model: irradiance model name, default is ``'haydavies'`` :type model: str :return: global, direct and diffuse plane of array irradiance [W/m**2]
[ "Calculate", "total", "irradiance" ]
train
https://github.com/BreakingBytes/simkit/blob/205163d879d3880b6c9ef609f1b723a58773026b/examples/PVPower/pvpower/formulas/irradiance.py#L63-L101
guaix-ucm/pyemir
emirdrp/processing/wavecal/rescale_array_z1z2.py
rescale_array_to_z1z2
def rescale_array_to_z1z2(array, z1z2=(-1.0, 1.0)): """Rescale the values in a numpy array to the [z1,z2] interval. The transformation is carried out following the relation array_rs = b_flux * array - c_flux as explained in Appendix B1 of Cardiel (2009, MNRAS, 396, 680) Parameters ---------- array : numpy array Numpy array to be rescaled. z1z2 : tuple, floats Minimum and maximum values in the returned array. Returns ------- array_rs : numpy array Array with rescaled values. coef_rs : tuple, floats Coefficients b_flux and c_flux employed in the rescaling operation. """ if type(array) is not np.ndarray: raise ValueError("array=" + str(array) + " must be a numpy.ndarray") array_min = array.min() array_max = array.max() z1, z2 = z1z2 delta = array_max - array_min b_flux = (z2 - z1) / delta c_flux = (z2 * array_min - z1 * array_max) / delta array_rs = b_flux * array - c_flux return array_rs, (b_flux, c_flux)
python
def rescale_array_to_z1z2(array, z1z2=(-1.0, 1.0)): """Rescale the values in a numpy array to the [z1,z2] interval. The transformation is carried out following the relation array_rs = b_flux * array - c_flux as explained in Appendix B1 of Cardiel (2009, MNRAS, 396, 680) Parameters ---------- array : numpy array Numpy array to be rescaled. z1z2 : tuple, floats Minimum and maximum values in the returned array. Returns ------- array_rs : numpy array Array with rescaled values. coef_rs : tuple, floats Coefficients b_flux and c_flux employed in the rescaling operation. """ if type(array) is not np.ndarray: raise ValueError("array=" + str(array) + " must be a numpy.ndarray") array_min = array.min() array_max = array.max() z1, z2 = z1z2 delta = array_max - array_min b_flux = (z2 - z1) / delta c_flux = (z2 * array_min - z1 * array_max) / delta array_rs = b_flux * array - c_flux return array_rs, (b_flux, c_flux)
[ "def", "rescale_array_to_z1z2", "(", "array", ",", "z1z2", "=", "(", "-", "1.0", ",", "1.0", ")", ")", ":", "if", "type", "(", "array", ")", "is", "not", "np", ".", "ndarray", ":", "raise", "ValueError", "(", "\"array=\"", "+", "str", "(", "array", ")", "+", "\" must be a numpy.ndarray\"", ")", "array_min", "=", "array", ".", "min", "(", ")", "array_max", "=", "array", ".", "max", "(", ")", "z1", ",", "z2", "=", "z1z2", "delta", "=", "array_max", "-", "array_min", "b_flux", "=", "(", "z2", "-", "z1", ")", "/", "delta", "c_flux", "=", "(", "z2", "*", "array_min", "-", "z1", "*", "array_max", ")", "/", "delta", "array_rs", "=", "b_flux", "*", "array", "-", "c_flux", "return", "array_rs", ",", "(", "b_flux", ",", "c_flux", ")" ]
Rescale the values in a numpy array to the [z1,z2] interval. The transformation is carried out following the relation array_rs = b_flux * array - c_flux as explained in Appendix B1 of Cardiel (2009, MNRAS, 396, 680) Parameters ---------- array : numpy array Numpy array to be rescaled. z1z2 : tuple, floats Minimum and maximum values in the returned array. Returns ------- array_rs : numpy array Array with rescaled values. coef_rs : tuple, floats Coefficients b_flux and c_flux employed in the rescaling operation.
[ "Rescale", "the", "values", "in", "a", "numpy", "array", "to", "the", "[", "z1", "z2", "]", "interval", "." ]
train
https://github.com/guaix-ucm/pyemir/blob/fef6bbabcb13f80123cafd1800a0f508a3c21702/emirdrp/processing/wavecal/rescale_array_z1z2.py#L27-L64
guaix-ucm/pyemir
emirdrp/processing/wavecal/rescale_array_z1z2.py
rescale_array_from_z1z2
def rescale_array_from_z1z2(array_rs, coef_rs=None): """Restore the values in a numpy array rescaled to the [z1,z2] interval. The transformation is carried out following the relation array = (array_rs + c_flux)/b_flux as explained in Appendix B1 of Cardiel (2009, MNRAS, 396, 680) Parameters ---------- array_rs : numpy array Numpy array previously rescaled to the [z1,z2] interval with the function rescale_array_to_z1z2(). coef_rs : tuple, floats Coefficients b_flux and c_flux previously employed in the rescaling operation. This tuple is one of the parameters returned by function_rescale_array_to_z1z2(). Returns ------- array : numpy array Array with restored values. """ if type(array_rs) is not np.ndarray: raise ValueError( "array_rs=" + str(array_rs) + "must be a numpy.ndarray") b_flux, c_flux = coef_rs array = (array_rs + c_flux) / b_flux return array
python
def rescale_array_from_z1z2(array_rs, coef_rs=None): """Restore the values in a numpy array rescaled to the [z1,z2] interval. The transformation is carried out following the relation array = (array_rs + c_flux)/b_flux as explained in Appendix B1 of Cardiel (2009, MNRAS, 396, 680) Parameters ---------- array_rs : numpy array Numpy array previously rescaled to the [z1,z2] interval with the function rescale_array_to_z1z2(). coef_rs : tuple, floats Coefficients b_flux and c_flux previously employed in the rescaling operation. This tuple is one of the parameters returned by function_rescale_array_to_z1z2(). Returns ------- array : numpy array Array with restored values. """ if type(array_rs) is not np.ndarray: raise ValueError( "array_rs=" + str(array_rs) + "must be a numpy.ndarray") b_flux, c_flux = coef_rs array = (array_rs + c_flux) / b_flux return array
[ "def", "rescale_array_from_z1z2", "(", "array_rs", ",", "coef_rs", "=", "None", ")", ":", "if", "type", "(", "array_rs", ")", "is", "not", "np", ".", "ndarray", ":", "raise", "ValueError", "(", "\"array_rs=\"", "+", "str", "(", "array_rs", ")", "+", "\"must be a numpy.ndarray\"", ")", "b_flux", ",", "c_flux", "=", "coef_rs", "array", "=", "(", "array_rs", "+", "c_flux", ")", "/", "b_flux", "return", "array" ]
Restore the values in a numpy array rescaled to the [z1,z2] interval. The transformation is carried out following the relation array = (array_rs + c_flux)/b_flux as explained in Appendix B1 of Cardiel (2009, MNRAS, 396, 680) Parameters ---------- array_rs : numpy array Numpy array previously rescaled to the [z1,z2] interval with the function rescale_array_to_z1z2(). coef_rs : tuple, floats Coefficients b_flux and c_flux previously employed in the rescaling operation. This tuple is one of the parameters returned by function_rescale_array_to_z1z2(). Returns ------- array : numpy array Array with restored values.
[ "Restore", "the", "values", "in", "a", "numpy", "array", "rescaled", "to", "the", "[", "z1", "z2", "]", "interval", "." ]
train
https://github.com/guaix-ucm/pyemir/blob/fef6bbabcb13f80123cafd1800a0f508a3c21702/emirdrp/processing/wavecal/rescale_array_z1z2.py#L67-L99
TriOptima/tri.declarative
lib/tri/declarative/__init__.py
with_meta
def with_meta(class_to_decorate=None, add_init_kwargs=True): """ Class decorator to enable a class (and it's sub-classes) to have a 'Meta' class attribute. :type class_to_decorate: class :param bool add_init_kwargs: Pass Meta class members to constructor :rtype: class """ if class_to_decorate is None: return functools.partial(with_meta, add_init_kwargs=add_init_kwargs) if add_init_kwargs: def get_extra_args_function(self): return {k: v for k, v in self.get_meta().items() if not k.startswith('_')} add_args_to_init_call(class_to_decorate, get_extra_args_function) setattr(class_to_decorate, 'get_meta', classmethod(get_meta)) return class_to_decorate
python
def with_meta(class_to_decorate=None, add_init_kwargs=True): """ Class decorator to enable a class (and it's sub-classes) to have a 'Meta' class attribute. :type class_to_decorate: class :param bool add_init_kwargs: Pass Meta class members to constructor :rtype: class """ if class_to_decorate is None: return functools.partial(with_meta, add_init_kwargs=add_init_kwargs) if add_init_kwargs: def get_extra_args_function(self): return {k: v for k, v in self.get_meta().items() if not k.startswith('_')} add_args_to_init_call(class_to_decorate, get_extra_args_function) setattr(class_to_decorate, 'get_meta', classmethod(get_meta)) return class_to_decorate
[ "def", "with_meta", "(", "class_to_decorate", "=", "None", ",", "add_init_kwargs", "=", "True", ")", ":", "if", "class_to_decorate", "is", "None", ":", "return", "functools", ".", "partial", "(", "with_meta", ",", "add_init_kwargs", "=", "add_init_kwargs", ")", "if", "add_init_kwargs", ":", "def", "get_extra_args_function", "(", "self", ")", ":", "return", "{", "k", ":", "v", "for", "k", ",", "v", "in", "self", ".", "get_meta", "(", ")", ".", "items", "(", ")", "if", "not", "k", ".", "startswith", "(", "'_'", ")", "}", "add_args_to_init_call", "(", "class_to_decorate", ",", "get_extra_args_function", ")", "setattr", "(", "class_to_decorate", ",", "'get_meta'", ",", "classmethod", "(", "get_meta", ")", ")", "return", "class_to_decorate" ]
Class decorator to enable a class (and it's sub-classes) to have a 'Meta' class attribute. :type class_to_decorate: class :param bool add_init_kwargs: Pass Meta class members to constructor :rtype: class
[ "Class", "decorator", "to", "enable", "a", "class", "(", "and", "it", "s", "sub", "-", "classes", ")", "to", "have", "a", "Meta", "class", "attribute", "." ]
train
https://github.com/TriOptima/tri.declarative/blob/13d90d4c2a10934e37a4139e63d51a859fb3e303/lib/tri/declarative/__init__.py#L27-L48
TriOptima/tri.declarative
lib/tri/declarative/__init__.py
get_meta
def get_meta(cls): """ Collect all members of any contained :code:`Meta` class declarations from the given class or any of its base classes. (Sub class values take precedence.) :type cls: class :rtype: Struct """ merged_attributes = Struct() for class_ in reversed(cls.mro()): if hasattr(class_, 'Meta'): for key, value in class_.Meta.__dict__.items(): merged_attributes[key] = value return merged_attributes
python
def get_meta(cls): """ Collect all members of any contained :code:`Meta` class declarations from the given class or any of its base classes. (Sub class values take precedence.) :type cls: class :rtype: Struct """ merged_attributes = Struct() for class_ in reversed(cls.mro()): if hasattr(class_, 'Meta'): for key, value in class_.Meta.__dict__.items(): merged_attributes[key] = value return merged_attributes
[ "def", "get_meta", "(", "cls", ")", ":", "merged_attributes", "=", "Struct", "(", ")", "for", "class_", "in", "reversed", "(", "cls", ".", "mro", "(", ")", ")", ":", "if", "hasattr", "(", "class_", ",", "'Meta'", ")", ":", "for", "key", ",", "value", "in", "class_", ".", "Meta", ".", "__dict__", ".", "items", "(", ")", ":", "merged_attributes", "[", "key", "]", "=", "value", "return", "merged_attributes" ]
Collect all members of any contained :code:`Meta` class declarations from the given class or any of its base classes. (Sub class values take precedence.) :type cls: class :rtype: Struct
[ "Collect", "all", "members", "of", "any", "contained", ":", "code", ":", "Meta", "class", "declarations", "from", "the", "given", "class", "or", "any", "of", "its", "base", "classes", ".", "(", "Sub", "class", "values", "take", "precedence", ".", ")" ]
train
https://github.com/TriOptima/tri.declarative/blob/13d90d4c2a10934e37a4139e63d51a859fb3e303/lib/tri/declarative/__init__.py#L51-L64
TriOptima/tri.declarative
lib/tri/declarative/__init__.py
creation_ordered
def creation_ordered(class_to_decorate): """ Class decorator that ensures that instances will be ordered after creation order when sorted. :type class_to_decorate: class :rtype: class """ next_index = functools.partial(next, itertools.count()) __init__orig = class_to_decorate.__init__ @functools.wraps(__init__orig, assigned=['__doc__']) def __init__(self, *args, **kwargs): object.__setattr__(self, '_index', next_index()) __init__orig(self, *args, **kwargs) setattr(class_to_decorate, '__init__', __init__) # noinspection PyProtectedMember def __lt__(self, other): return self._index < other._index # pragma: no mutate setattr(class_to_decorate, '__lt__', __lt__) class_to_decorate = functools.total_ordering(class_to_decorate) return class_to_decorate
python
def creation_ordered(class_to_decorate): """ Class decorator that ensures that instances will be ordered after creation order when sorted. :type class_to_decorate: class :rtype: class """ next_index = functools.partial(next, itertools.count()) __init__orig = class_to_decorate.__init__ @functools.wraps(__init__orig, assigned=['__doc__']) def __init__(self, *args, **kwargs): object.__setattr__(self, '_index', next_index()) __init__orig(self, *args, **kwargs) setattr(class_to_decorate, '__init__', __init__) # noinspection PyProtectedMember def __lt__(self, other): return self._index < other._index # pragma: no mutate setattr(class_to_decorate, '__lt__', __lt__) class_to_decorate = functools.total_ordering(class_to_decorate) return class_to_decorate
[ "def", "creation_ordered", "(", "class_to_decorate", ")", ":", "next_index", "=", "functools", ".", "partial", "(", "next", ",", "itertools", ".", "count", "(", ")", ")", "__init__orig", "=", "class_to_decorate", ".", "__init__", "@", "functools", ".", "wraps", "(", "__init__orig", ",", "assigned", "=", "[", "'__doc__'", "]", ")", "def", "__init__", "(", "self", ",", "*", "args", ",", "*", "*", "kwargs", ")", ":", "object", ".", "__setattr__", "(", "self", ",", "'_index'", ",", "next_index", "(", ")", ")", "__init__orig", "(", "self", ",", "*", "args", ",", "*", "*", "kwargs", ")", "setattr", "(", "class_to_decorate", ",", "'__init__'", ",", "__init__", ")", "# noinspection PyProtectedMember", "def", "__lt__", "(", "self", ",", "other", ")", ":", "return", "self", ".", "_index", "<", "other", ".", "_index", "# pragma: no mutate", "setattr", "(", "class_to_decorate", ",", "'__lt__'", ",", "__lt__", ")", "class_to_decorate", "=", "functools", ".", "total_ordering", "(", "class_to_decorate", ")", "return", "class_to_decorate" ]
Class decorator that ensures that instances will be ordered after creation order when sorted. :type class_to_decorate: class :rtype: class
[ "Class", "decorator", "that", "ensures", "that", "instances", "will", "be", "ordered", "after", "creation", "order", "when", "sorted", "." ]
train
https://github.com/TriOptima/tri.declarative/blob/13d90d4c2a10934e37a4139e63d51a859fb3e303/lib/tri/declarative/__init__.py#L67-L94
TriOptima/tri.declarative
lib/tri/declarative/__init__.py
get_members
def get_members(cls, member_class=None, is_member=None, sort_key=None, _parameter=None): """ Collect all class level attributes matching the given criteria. :param class member_class: Class(es) to collect :param is_member: Function to determine if an object should be collected :param sort_key: Function to invoke on members to obtain ordering (Default is to use ordering from `creation_ordered`) :type is_member: (object) -> bool :type sort_key: (object) -> object """ if member_class is None and is_member is None: raise TypeError("get_members either needs a member_class parameter or an is_member check function (or both)") members = OrderedDict() for base in cls.__bases__: if _parameter is None: inherited_members = get_members(base, member_class=member_class, is_member=is_member, sort_key=sort_key) else: # When user by @declarative, only traverse up the class inheritance to the decorated class. inherited_members = get_declared(base, _parameter) members.update(inherited_members) def generate_member_bindings(): for name in cls.__dict__: if name.startswith('__'): continue obj = getattr(cls, name) if member_class is not None and isinstance(obj, member_class): yield name, obj elif is_member is not None and is_member(obj): yield name, obj elif type(obj) is tuple and len(obj) == 1 and isinstance(obj[0], member_class): raise TypeError("'%s' is a one-tuple containing what we are looking for. Trailing comma much? Don't... just don't." % name) # pragma: no mutate bindings = generate_member_bindings() if sort_key is not None: try: sorted_bindings = sorted(bindings, key=lambda x: sort_key(x[1])) except AttributeError: if sort_key is default_sort_key: raise TypeError('Missing member ordering definition. Use @creation_ordered or specify sort_key') else: # pragma: no covererage raise members.update(sorted_bindings) else: members.update(bindings) return members
python
def get_members(cls, member_class=None, is_member=None, sort_key=None, _parameter=None): """ Collect all class level attributes matching the given criteria. :param class member_class: Class(es) to collect :param is_member: Function to determine if an object should be collected :param sort_key: Function to invoke on members to obtain ordering (Default is to use ordering from `creation_ordered`) :type is_member: (object) -> bool :type sort_key: (object) -> object """ if member_class is None and is_member is None: raise TypeError("get_members either needs a member_class parameter or an is_member check function (or both)") members = OrderedDict() for base in cls.__bases__: if _parameter is None: inherited_members = get_members(base, member_class=member_class, is_member=is_member, sort_key=sort_key) else: # When user by @declarative, only traverse up the class inheritance to the decorated class. inherited_members = get_declared(base, _parameter) members.update(inherited_members) def generate_member_bindings(): for name in cls.__dict__: if name.startswith('__'): continue obj = getattr(cls, name) if member_class is not None and isinstance(obj, member_class): yield name, obj elif is_member is not None and is_member(obj): yield name, obj elif type(obj) is tuple and len(obj) == 1 and isinstance(obj[0], member_class): raise TypeError("'%s' is a one-tuple containing what we are looking for. Trailing comma much? Don't... just don't." % name) # pragma: no mutate bindings = generate_member_bindings() if sort_key is not None: try: sorted_bindings = sorted(bindings, key=lambda x: sort_key(x[1])) except AttributeError: if sort_key is default_sort_key: raise TypeError('Missing member ordering definition. Use @creation_ordered or specify sort_key') else: # pragma: no covererage raise members.update(sorted_bindings) else: members.update(bindings) return members
[ "def", "get_members", "(", "cls", ",", "member_class", "=", "None", ",", "is_member", "=", "None", ",", "sort_key", "=", "None", ",", "_parameter", "=", "None", ")", ":", "if", "member_class", "is", "None", "and", "is_member", "is", "None", ":", "raise", "TypeError", "(", "\"get_members either needs a member_class parameter or an is_member check function (or both)\"", ")", "members", "=", "OrderedDict", "(", ")", "for", "base", "in", "cls", ".", "__bases__", ":", "if", "_parameter", "is", "None", ":", "inherited_members", "=", "get_members", "(", "base", ",", "member_class", "=", "member_class", ",", "is_member", "=", "is_member", ",", "sort_key", "=", "sort_key", ")", "else", ":", "# When user by @declarative, only traverse up the class inheritance to the decorated class.", "inherited_members", "=", "get_declared", "(", "base", ",", "_parameter", ")", "members", ".", "update", "(", "inherited_members", ")", "def", "generate_member_bindings", "(", ")", ":", "for", "name", "in", "cls", ".", "__dict__", ":", "if", "name", ".", "startswith", "(", "'__'", ")", ":", "continue", "obj", "=", "getattr", "(", "cls", ",", "name", ")", "if", "member_class", "is", "not", "None", "and", "isinstance", "(", "obj", ",", "member_class", ")", ":", "yield", "name", ",", "obj", "elif", "is_member", "is", "not", "None", "and", "is_member", "(", "obj", ")", ":", "yield", "name", ",", "obj", "elif", "type", "(", "obj", ")", "is", "tuple", "and", "len", "(", "obj", ")", "==", "1", "and", "isinstance", "(", "obj", "[", "0", "]", ",", "member_class", ")", ":", "raise", "TypeError", "(", "\"'%s' is a one-tuple containing what we are looking for. Trailing comma much? Don't... just don't.\"", "%", "name", ")", "# pragma: no mutate", "bindings", "=", "generate_member_bindings", "(", ")", "if", "sort_key", "is", "not", "None", ":", "try", ":", "sorted_bindings", "=", "sorted", "(", "bindings", ",", "key", "=", "lambda", "x", ":", "sort_key", "(", "x", "[", "1", "]", ")", ")", "except", "AttributeError", ":", "if", "sort_key", "is", "default_sort_key", ":", "raise", "TypeError", "(", "'Missing member ordering definition. Use @creation_ordered or specify sort_key'", ")", "else", ":", "# pragma: no covererage", "raise", "members", ".", "update", "(", "sorted_bindings", ")", "else", ":", "members", ".", "update", "(", "bindings", ")", "return", "members" ]
Collect all class level attributes matching the given criteria. :param class member_class: Class(es) to collect :param is_member: Function to determine if an object should be collected :param sort_key: Function to invoke on members to obtain ordering (Default is to use ordering from `creation_ordered`) :type is_member: (object) -> bool :type sort_key: (object) -> object
[ "Collect", "all", "class", "level", "attributes", "matching", "the", "given", "criteria", "." ]
train
https://github.com/TriOptima/tri.declarative/blob/13d90d4c2a10934e37a4139e63d51a859fb3e303/lib/tri/declarative/__init__.py#L102-L151
TriOptima/tri.declarative
lib/tri/declarative/__init__.py
declarative
def declarative(member_class=None, parameter='members', add_init_kwargs=True, sort_key=default_sort_key, is_member=None): """ Class decorator to enable classes to be defined in the style of django models. That is, @declarative classes will get an additional argument to constructor, containing an OrderedDict with all class members matching the specified type. :param class member_class: Class(es) to collect :param is_member: Function to determine if an object should be collected :param str parameter: Name of constructor parameter to inject :param bool add_init_kwargs: If constructor parameter should be injected (Default: True) :param sort_key: Function to invoke on members to obtain ordering (Default is to use ordering from `creation_ordered`) :type is_member: (object) -> bool :type sort_key: (object) -> object """ if member_class is None and is_member is None: raise TypeError("The @declarative decorator needs either a member_class parameter or an is_member check function (or both)") def decorator(class_to_decorate): class DeclarativeMeta(class_to_decorate.__class__): # noinspection PyTypeChecker def __init__(cls, name, bases, dict): members = get_members(cls, member_class=member_class, is_member=is_member, sort_key=sort_key, _parameter=parameter) set_declared(cls, members, parameter) super(DeclarativeMeta, cls).__init__(name, bases, dict) new_class = DeclarativeMeta(class_to_decorate.__name__, class_to_decorate.__bases__, {k: v for k, v in class_to_decorate.__dict__.items() if k not in ['__dict__', '__weakref__']}) def get_extra_args_function(self): declared = get_declared(self, parameter) def copy_declared(): for k, v in declared.items(): try: v = copy(v) except TypeError: pass # Not always possible to copy methods yield (k, v) copied_members = OrderedDict(copy_declared()) self.__dict__.update(copied_members) return {parameter: copied_members} if add_init_kwargs: add_args_to_init_call(new_class, get_extra_args_function) else: add_init_call_hook(new_class, get_extra_args_function) setattr(new_class, 'get_declared', classmethod(get_declared)) setattr(new_class, 'set_declared', classmethod(set_declared)) return new_class return decorator
python
def declarative(member_class=None, parameter='members', add_init_kwargs=True, sort_key=default_sort_key, is_member=None): """ Class decorator to enable classes to be defined in the style of django models. That is, @declarative classes will get an additional argument to constructor, containing an OrderedDict with all class members matching the specified type. :param class member_class: Class(es) to collect :param is_member: Function to determine if an object should be collected :param str parameter: Name of constructor parameter to inject :param bool add_init_kwargs: If constructor parameter should be injected (Default: True) :param sort_key: Function to invoke on members to obtain ordering (Default is to use ordering from `creation_ordered`) :type is_member: (object) -> bool :type sort_key: (object) -> object """ if member_class is None and is_member is None: raise TypeError("The @declarative decorator needs either a member_class parameter or an is_member check function (or both)") def decorator(class_to_decorate): class DeclarativeMeta(class_to_decorate.__class__): # noinspection PyTypeChecker def __init__(cls, name, bases, dict): members = get_members(cls, member_class=member_class, is_member=is_member, sort_key=sort_key, _parameter=parameter) set_declared(cls, members, parameter) super(DeclarativeMeta, cls).__init__(name, bases, dict) new_class = DeclarativeMeta(class_to_decorate.__name__, class_to_decorate.__bases__, {k: v for k, v in class_to_decorate.__dict__.items() if k not in ['__dict__', '__weakref__']}) def get_extra_args_function(self): declared = get_declared(self, parameter) def copy_declared(): for k, v in declared.items(): try: v = copy(v) except TypeError: pass # Not always possible to copy methods yield (k, v) copied_members = OrderedDict(copy_declared()) self.__dict__.update(copied_members) return {parameter: copied_members} if add_init_kwargs: add_args_to_init_call(new_class, get_extra_args_function) else: add_init_call_hook(new_class, get_extra_args_function) setattr(new_class, 'get_declared', classmethod(get_declared)) setattr(new_class, 'set_declared', classmethod(set_declared)) return new_class return decorator
[ "def", "declarative", "(", "member_class", "=", "None", ",", "parameter", "=", "'members'", ",", "add_init_kwargs", "=", "True", ",", "sort_key", "=", "default_sort_key", ",", "is_member", "=", "None", ")", ":", "if", "member_class", "is", "None", "and", "is_member", "is", "None", ":", "raise", "TypeError", "(", "\"The @declarative decorator needs either a member_class parameter or an is_member check function (or both)\"", ")", "def", "decorator", "(", "class_to_decorate", ")", ":", "class", "DeclarativeMeta", "(", "class_to_decorate", ".", "__class__", ")", ":", "# noinspection PyTypeChecker", "def", "__init__", "(", "cls", ",", "name", ",", "bases", ",", "dict", ")", ":", "members", "=", "get_members", "(", "cls", ",", "member_class", "=", "member_class", ",", "is_member", "=", "is_member", ",", "sort_key", "=", "sort_key", ",", "_parameter", "=", "parameter", ")", "set_declared", "(", "cls", ",", "members", ",", "parameter", ")", "super", "(", "DeclarativeMeta", ",", "cls", ")", ".", "__init__", "(", "name", ",", "bases", ",", "dict", ")", "new_class", "=", "DeclarativeMeta", "(", "class_to_decorate", ".", "__name__", ",", "class_to_decorate", ".", "__bases__", ",", "{", "k", ":", "v", "for", "k", ",", "v", "in", "class_to_decorate", ".", "__dict__", ".", "items", "(", ")", "if", "k", "not", "in", "[", "'__dict__'", ",", "'__weakref__'", "]", "}", ")", "def", "get_extra_args_function", "(", "self", ")", ":", "declared", "=", "get_declared", "(", "self", ",", "parameter", ")", "def", "copy_declared", "(", ")", ":", "for", "k", ",", "v", "in", "declared", ".", "items", "(", ")", ":", "try", ":", "v", "=", "copy", "(", "v", ")", "except", "TypeError", ":", "pass", "# Not always possible to copy methods", "yield", "(", "k", ",", "v", ")", "copied_members", "=", "OrderedDict", "(", "copy_declared", "(", ")", ")", "self", ".", "__dict__", ".", "update", "(", "copied_members", ")", "return", "{", "parameter", ":", "copied_members", "}", "if", "add_init_kwargs", ":", "add_args_to_init_call", "(", "new_class", ",", "get_extra_args_function", ")", "else", ":", "add_init_call_hook", "(", "new_class", ",", "get_extra_args_function", ")", "setattr", "(", "new_class", ",", "'get_declared'", ",", "classmethod", "(", "get_declared", ")", ")", "setattr", "(", "new_class", ",", "'set_declared'", ",", "classmethod", "(", "set_declared", ")", ")", "return", "new_class", "return", "decorator" ]
Class decorator to enable classes to be defined in the style of django models. That is, @declarative classes will get an additional argument to constructor, containing an OrderedDict with all class members matching the specified type. :param class member_class: Class(es) to collect :param is_member: Function to determine if an object should be collected :param str parameter: Name of constructor parameter to inject :param bool add_init_kwargs: If constructor parameter should be injected (Default: True) :param sort_key: Function to invoke on members to obtain ordering (Default is to use ordering from `creation_ordered`) :type is_member: (object) -> bool :type sort_key: (object) -> object
[ "Class", "decorator", "to", "enable", "classes", "to", "be", "defined", "in", "the", "style", "of", "django", "models", ".", "That", "is", "@declarative", "classes", "will", "get", "an", "additional", "argument", "to", "constructor", "containing", "an", "OrderedDict", "with", "all", "class", "members", "matching", "the", "specified", "type", "." ]
train
https://github.com/TriOptima/tri.declarative/blob/13d90d4c2a10934e37a4139e63d51a859fb3e303/lib/tri/declarative/__init__.py#L154-L209
TriOptima/tri.declarative
lib/tri/declarative/__init__.py
get_signature
def get_signature(func): """ :type func: Callable :rtype: str """ try: return object.__getattribute__(func, '__tri_declarative_signature') except AttributeError: pass try: if sys.version_info[0] < 3: # pragma: no mutate names, _, varkw, defaults = inspect.getargspec(func) # pragma: no mutate else: names, _, varkw, defaults, _, _, _ = inspect.getfullargspec(func) # pragma: no covererage except TypeError: return None first_arg_index = 1 if inspect.ismethod(func) else 0 # Skip self argument on methods number_of_defaults = len(defaults) if defaults else 0 if number_of_defaults > 0: required = ','.join(sorted(names[first_arg_index:-number_of_defaults])) optional = ','.join(sorted(names[-number_of_defaults:])) else: required = ','.join(sorted(names[first_arg_index:])) optional = '' wildcard = '*' if varkw is not None else '' signature = '|'.join((required, optional, wildcard)) try: object.__setattr__(func, '__tri_declarative_signature', signature) except TypeError: # For classes type.__setattr__(func, '__tri_declarative_signature', signature) except AttributeError: pass return signature
python
def get_signature(func): """ :type func: Callable :rtype: str """ try: return object.__getattribute__(func, '__tri_declarative_signature') except AttributeError: pass try: if sys.version_info[0] < 3: # pragma: no mutate names, _, varkw, defaults = inspect.getargspec(func) # pragma: no mutate else: names, _, varkw, defaults, _, _, _ = inspect.getfullargspec(func) # pragma: no covererage except TypeError: return None first_arg_index = 1 if inspect.ismethod(func) else 0 # Skip self argument on methods number_of_defaults = len(defaults) if defaults else 0 if number_of_defaults > 0: required = ','.join(sorted(names[first_arg_index:-number_of_defaults])) optional = ','.join(sorted(names[-number_of_defaults:])) else: required = ','.join(sorted(names[first_arg_index:])) optional = '' wildcard = '*' if varkw is not None else '' signature = '|'.join((required, optional, wildcard)) try: object.__setattr__(func, '__tri_declarative_signature', signature) except TypeError: # For classes type.__setattr__(func, '__tri_declarative_signature', signature) except AttributeError: pass return signature
[ "def", "get_signature", "(", "func", ")", ":", "try", ":", "return", "object", ".", "__getattribute__", "(", "func", ",", "'__tri_declarative_signature'", ")", "except", "AttributeError", ":", "pass", "try", ":", "if", "sys", ".", "version_info", "[", "0", "]", "<", "3", ":", "# pragma: no mutate", "names", ",", "_", ",", "varkw", ",", "defaults", "=", "inspect", ".", "getargspec", "(", "func", ")", "# pragma: no mutate", "else", ":", "names", ",", "_", ",", "varkw", ",", "defaults", ",", "_", ",", "_", ",", "_", "=", "inspect", ".", "getfullargspec", "(", "func", ")", "# pragma: no covererage", "except", "TypeError", ":", "return", "None", "first_arg_index", "=", "1", "if", "inspect", ".", "ismethod", "(", "func", ")", "else", "0", "# Skip self argument on methods", "number_of_defaults", "=", "len", "(", "defaults", ")", "if", "defaults", "else", "0", "if", "number_of_defaults", ">", "0", ":", "required", "=", "','", ".", "join", "(", "sorted", "(", "names", "[", "first_arg_index", ":", "-", "number_of_defaults", "]", ")", ")", "optional", "=", "','", ".", "join", "(", "sorted", "(", "names", "[", "-", "number_of_defaults", ":", "]", ")", ")", "else", ":", "required", "=", "','", ".", "join", "(", "sorted", "(", "names", "[", "first_arg_index", ":", "]", ")", ")", "optional", "=", "''", "wildcard", "=", "'*'", "if", "varkw", "is", "not", "None", "else", "''", "signature", "=", "'|'", ".", "join", "(", "(", "required", ",", "optional", ",", "wildcard", ")", ")", "try", ":", "object", ".", "__setattr__", "(", "func", ",", "'__tri_declarative_signature'", ",", "signature", ")", "except", "TypeError", ":", "# For classes", "type", ".", "__setattr__", "(", "func", ",", "'__tri_declarative_signature'", ",", "signature", ")", "except", "AttributeError", ":", "pass", "return", "signature" ]
:type func: Callable :rtype: str
[ ":", "type", "func", ":", "Callable", ":", "rtype", ":", "str" ]
train
https://github.com/TriOptima/tri.declarative/blob/13d90d4c2a10934e37a4139e63d51a859fb3e303/lib/tri/declarative/__init__.py#L299-L336
TriOptima/tri.declarative
lib/tri/declarative/__init__.py
collect_namespaces
def collect_namespaces(values): """ Gather mappings with keys of the shape '<base_key>__<sub_key>' as new dicts under '<base_key>', indexed by '<sub_key>'. >>> foo = dict( ... foo__foo=1, ... foo__bar=2, ... bar__foo=3, ... bar__bar=4, ... foo_baz=5, ... baz=6 ... ) >>> assert collect_namespaces(foo) == dict( ... foo=dict(foo=1, bar=2), ... bar=dict(foo=3, bar=4), ... foo_baz=5, ... baz=6 ... ) :type values: dict :rtype: dict """ namespaces = {} result = dict(values) for key, value in values.items(): parts = key.split('__', 1) # pragma: no mutate if len(parts) == 2: prefix, name = parts if prefix not in namespaces: initial_namespace = values.get(prefix) if initial_namespace is None: initial_namespace = {} elif not isinstance(initial_namespace, dict): initial_namespace = {initial_namespace: True} namespaces[prefix] = initial_namespace namespaces[prefix][name] = result.pop(key) for prefix, namespace in namespaces.items(): result[prefix] = namespace return result
python
def collect_namespaces(values): """ Gather mappings with keys of the shape '<base_key>__<sub_key>' as new dicts under '<base_key>', indexed by '<sub_key>'. >>> foo = dict( ... foo__foo=1, ... foo__bar=2, ... bar__foo=3, ... bar__bar=4, ... foo_baz=5, ... baz=6 ... ) >>> assert collect_namespaces(foo) == dict( ... foo=dict(foo=1, bar=2), ... bar=dict(foo=3, bar=4), ... foo_baz=5, ... baz=6 ... ) :type values: dict :rtype: dict """ namespaces = {} result = dict(values) for key, value in values.items(): parts = key.split('__', 1) # pragma: no mutate if len(parts) == 2: prefix, name = parts if prefix not in namespaces: initial_namespace = values.get(prefix) if initial_namespace is None: initial_namespace = {} elif not isinstance(initial_namespace, dict): initial_namespace = {initial_namespace: True} namespaces[prefix] = initial_namespace namespaces[prefix][name] = result.pop(key) for prefix, namespace in namespaces.items(): result[prefix] = namespace return result
[ "def", "collect_namespaces", "(", "values", ")", ":", "namespaces", "=", "{", "}", "result", "=", "dict", "(", "values", ")", "for", "key", ",", "value", "in", "values", ".", "items", "(", ")", ":", "parts", "=", "key", ".", "split", "(", "'__'", ",", "1", ")", "# pragma: no mutate", "if", "len", "(", "parts", ")", "==", "2", ":", "prefix", ",", "name", "=", "parts", "if", "prefix", "not", "in", "namespaces", ":", "initial_namespace", "=", "values", ".", "get", "(", "prefix", ")", "if", "initial_namespace", "is", "None", ":", "initial_namespace", "=", "{", "}", "elif", "not", "isinstance", "(", "initial_namespace", ",", "dict", ")", ":", "initial_namespace", "=", "{", "initial_namespace", ":", "True", "}", "namespaces", "[", "prefix", "]", "=", "initial_namespace", "namespaces", "[", "prefix", "]", "[", "name", "]", "=", "result", ".", "pop", "(", "key", ")", "for", "prefix", ",", "namespace", "in", "namespaces", ".", "items", "(", ")", ":", "result", "[", "prefix", "]", "=", "namespace", "return", "result" ]
Gather mappings with keys of the shape '<base_key>__<sub_key>' as new dicts under '<base_key>', indexed by '<sub_key>'. >>> foo = dict( ... foo__foo=1, ... foo__bar=2, ... bar__foo=3, ... bar__bar=4, ... foo_baz=5, ... baz=6 ... ) >>> assert collect_namespaces(foo) == dict( ... foo=dict(foo=1, bar=2), ... bar=dict(foo=3, bar=4), ... foo_baz=5, ... baz=6 ... ) :type values: dict :rtype: dict
[ "Gather", "mappings", "with", "keys", "of", "the", "shape", "<base_key", ">", "__<sub_key", ">", "as", "new", "dicts", "under", "<base_key", ">", "indexed", "by", "<sub_key", ">", "." ]
train
https://github.com/TriOptima/tri.declarative/blob/13d90d4c2a10934e37a4139e63d51a859fb3e303/lib/tri/declarative/__init__.py#L440-L479
TriOptima/tri.declarative
lib/tri/declarative/__init__.py
extract_subkeys
def extract_subkeys(kwargs, prefix, defaults=None): """ Extract mappings of the shape '<base_key>__<sub_key>' to new mappings under '<sub_key>'. >>> foo = { ... 'foo__foo': 1, ... 'foo__bar': 2, ... 'baz': 3, ... } >>> assert extract_subkeys(foo, 'foo', defaults={'quux': 4}) == { ... 'foo': 1, ... 'bar': 2, ... 'quux': 4, ... } :type kwargs: dict :rtype: dict """ prefix += '__' result = {k[len(prefix):]: v for k, v in kwargs.items() if k.startswith(prefix)} if defaults is not None: return setdefaults(result, defaults) else: return result
python
def extract_subkeys(kwargs, prefix, defaults=None): """ Extract mappings of the shape '<base_key>__<sub_key>' to new mappings under '<sub_key>'. >>> foo = { ... 'foo__foo': 1, ... 'foo__bar': 2, ... 'baz': 3, ... } >>> assert extract_subkeys(foo, 'foo', defaults={'quux': 4}) == { ... 'foo': 1, ... 'bar': 2, ... 'quux': 4, ... } :type kwargs: dict :rtype: dict """ prefix += '__' result = {k[len(prefix):]: v for k, v in kwargs.items() if k.startswith(prefix)} if defaults is not None: return setdefaults(result, defaults) else: return result
[ "def", "extract_subkeys", "(", "kwargs", ",", "prefix", ",", "defaults", "=", "None", ")", ":", "prefix", "+=", "'__'", "result", "=", "{", "k", "[", "len", "(", "prefix", ")", ":", "]", ":", "v", "for", "k", ",", "v", "in", "kwargs", ".", "items", "(", ")", "if", "k", ".", "startswith", "(", "prefix", ")", "}", "if", "defaults", "is", "not", "None", ":", "return", "setdefaults", "(", "result", ",", "defaults", ")", "else", ":", "return", "result" ]
Extract mappings of the shape '<base_key>__<sub_key>' to new mappings under '<sub_key>'. >>> foo = { ... 'foo__foo': 1, ... 'foo__bar': 2, ... 'baz': 3, ... } >>> assert extract_subkeys(foo, 'foo', defaults={'quux': 4}) == { ... 'foo': 1, ... 'bar': 2, ... 'quux': 4, ... } :type kwargs: dict :rtype: dict
[ "Extract", "mappings", "of", "the", "shape", "<base_key", ">", "__<sub_key", ">", "to", "new", "mappings", "under", "<sub_key", ">", "." ]
train
https://github.com/TriOptima/tri.declarative/blob/13d90d4c2a10934e37a4139e63d51a859fb3e303/lib/tri/declarative/__init__.py#L482-L506
TriOptima/tri.declarative
lib/tri/declarative/__init__.py
setdefaults
def setdefaults(d, d2): """ :type d: dict :type d2: dict :rtype: dict """ for k, v in d2.items(): d.setdefault(k, v) return d
python
def setdefaults(d, d2): """ :type d: dict :type d2: dict :rtype: dict """ for k, v in d2.items(): d.setdefault(k, v) return d
[ "def", "setdefaults", "(", "d", ",", "d2", ")", ":", "for", "k", ",", "v", "in", "d2", ".", "items", "(", ")", ":", "d", ".", "setdefault", "(", "k", ",", "v", ")", "return", "d" ]
:type d: dict :type d2: dict :rtype: dict
[ ":", "type", "d", ":", "dict", ":", "type", "d2", ":", "dict", ":", "rtype", ":", "dict" ]
train
https://github.com/TriOptima/tri.declarative/blob/13d90d4c2a10934e37a4139e63d51a859fb3e303/lib/tri/declarative/__init__.py#L509-L517
TriOptima/tri.declarative
lib/tri/declarative/__init__.py
getattr_path
def getattr_path(obj, path): """ Get an attribute path, as defined by a string separated by '__'. getattr_path(foo, 'a__b__c') is roughly equivalent to foo.a.b.c but will short circuit to return None if something on the path is None. """ path = path.split('__') for name in path: obj = getattr(obj, name) if obj is None: return None return obj
python
def getattr_path(obj, path): """ Get an attribute path, as defined by a string separated by '__'. getattr_path(foo, 'a__b__c') is roughly equivalent to foo.a.b.c but will short circuit to return None if something on the path is None. """ path = path.split('__') for name in path: obj = getattr(obj, name) if obj is None: return None return obj
[ "def", "getattr_path", "(", "obj", ",", "path", ")", ":", "path", "=", "path", ".", "split", "(", "'__'", ")", "for", "name", "in", "path", ":", "obj", "=", "getattr", "(", "obj", ",", "name", ")", "if", "obj", "is", "None", ":", "return", "None", "return", "obj" ]
Get an attribute path, as defined by a string separated by '__'. getattr_path(foo, 'a__b__c') is roughly equivalent to foo.a.b.c but will short circuit to return None if something on the path is None.
[ "Get", "an", "attribute", "path", "as", "defined", "by", "a", "string", "separated", "by", "__", ".", "getattr_path", "(", "foo", "a__b__c", ")", "is", "roughly", "equivalent", "to", "foo", ".", "a", ".", "b", ".", "c", "but", "will", "short", "circuit", "to", "return", "None", "if", "something", "on", "the", "path", "is", "None", "." ]
train
https://github.com/TriOptima/tri.declarative/blob/13d90d4c2a10934e37a4139e63d51a859fb3e303/lib/tri/declarative/__init__.py#L704-L715
TriOptima/tri.declarative
lib/tri/declarative/__init__.py
setattr_path
def setattr_path(obj, path, value): """ Set an attribute path, as defined by a string separated by '__'. setattr_path(foo, 'a__b__c', value) is equivalent to "foo.a.b.c = value". """ path = path.split('__') o = obj for name in path[:-1]: o = getattr(o, name) setattr(o, path[-1], value) return obj
python
def setattr_path(obj, path, value): """ Set an attribute path, as defined by a string separated by '__'. setattr_path(foo, 'a__b__c', value) is equivalent to "foo.a.b.c = value". """ path = path.split('__') o = obj for name in path[:-1]: o = getattr(o, name) setattr(o, path[-1], value) return obj
[ "def", "setattr_path", "(", "obj", ",", "path", ",", "value", ")", ":", "path", "=", "path", ".", "split", "(", "'__'", ")", "o", "=", "obj", "for", "name", "in", "path", "[", ":", "-", "1", "]", ":", "o", "=", "getattr", "(", "o", ",", "name", ")", "setattr", "(", "o", ",", "path", "[", "-", "1", "]", ",", "value", ")", "return", "obj" ]
Set an attribute path, as defined by a string separated by '__'. setattr_path(foo, 'a__b__c', value) is equivalent to "foo.a.b.c = value".
[ "Set", "an", "attribute", "path", "as", "defined", "by", "a", "string", "separated", "by", "__", ".", "setattr_path", "(", "foo", "a__b__c", "value", ")", "is", "equivalent", "to", "foo", ".", "a", ".", "b", ".", "c", "=", "value", "." ]
train
https://github.com/TriOptima/tri.declarative/blob/13d90d4c2a10934e37a4139e63d51a859fb3e303/lib/tri/declarative/__init__.py#L718-L728
TriOptima/tri.declarative
lib/tri/declarative/__init__.py
generate_rst_docs
def generate_rst_docs(directory, classes, missing_objects=None): """ Generate documentation for tri.declarative APIs :param directory: directory to write the .rst files into :param classes: list of classes to generate documentation for :param missing_objects: tuple of objects to count as missing markers, if applicable """ doc_by_filename = _generate_rst_docs(classes=classes, missing_objects=missing_objects) # pragma: no mutate for filename, doc in doc_by_filename: # pragma: no mutate with open(directory + filename, 'w') as f2: # pragma: no mutate f2.write(doc)
python
def generate_rst_docs(directory, classes, missing_objects=None): """ Generate documentation for tri.declarative APIs :param directory: directory to write the .rst files into :param classes: list of classes to generate documentation for :param missing_objects: tuple of objects to count as missing markers, if applicable """ doc_by_filename = _generate_rst_docs(classes=classes, missing_objects=missing_objects) # pragma: no mutate for filename, doc in doc_by_filename: # pragma: no mutate with open(directory + filename, 'w') as f2: # pragma: no mutate f2.write(doc)
[ "def", "generate_rst_docs", "(", "directory", ",", "classes", ",", "missing_objects", "=", "None", ")", ":", "doc_by_filename", "=", "_generate_rst_docs", "(", "classes", "=", "classes", ",", "missing_objects", "=", "missing_objects", ")", "# pragma: no mutate", "for", "filename", ",", "doc", "in", "doc_by_filename", ":", "# pragma: no mutate", "with", "open", "(", "directory", "+", "filename", ",", "'w'", ")", "as", "f2", ":", "# pragma: no mutate", "f2", ".", "write", "(", "doc", ")" ]
Generate documentation for tri.declarative APIs :param directory: directory to write the .rst files into :param classes: list of classes to generate documentation for :param missing_objects: tuple of objects to count as missing markers, if applicable
[ "Generate", "documentation", "for", "tri", ".", "declarative", "APIs" ]
train
https://github.com/TriOptima/tri.declarative/blob/13d90d4c2a10934e37a4139e63d51a859fb3e303/lib/tri/declarative/__init__.py#L844-L856
BreakingBytes/simkit
simkit/contrib/lazy_looping_calculator.py
reg_copy
def reg_copy(reg, keys=None): """ Make a copy of a subset of a registry. :param reg: source registry :param keys: keys of registry items to copy :return: copied registry subset """ if keys is None: keys = reg.keys() reg_cls = type(reg) new_reg = reg_cls() mk = {} # empty dictionary for meta keys # loop over registry meta names for m in reg_cls.meta_names: mstar = getattr(reg, m, None) # current value of metakey in registry if not mstar: # if there is no value, the value is empty or None, set it to None # it's never false or zero, should be dictionary of reg items mk[m] = None continue mk[m] = {} # emtpy dictionary of registry meta # loop over keys to copy and set values of meta keys for each reg item for k in keys: kstar = mstar.get(k) # if key exists in registry meta and is not None, then copy it if kstar is not None: mk[m][k] = kstar new_reg.register({k: reg[k] for k in keys}, **mk) return new_reg
python
def reg_copy(reg, keys=None): """ Make a copy of a subset of a registry. :param reg: source registry :param keys: keys of registry items to copy :return: copied registry subset """ if keys is None: keys = reg.keys() reg_cls = type(reg) new_reg = reg_cls() mk = {} # empty dictionary for meta keys # loop over registry meta names for m in reg_cls.meta_names: mstar = getattr(reg, m, None) # current value of metakey in registry if not mstar: # if there is no value, the value is empty or None, set it to None # it's never false or zero, should be dictionary of reg items mk[m] = None continue mk[m] = {} # emtpy dictionary of registry meta # loop over keys to copy and set values of meta keys for each reg item for k in keys: kstar = mstar.get(k) # if key exists in registry meta and is not None, then copy it if kstar is not None: mk[m][k] = kstar new_reg.register({k: reg[k] for k in keys}, **mk) return new_reg
[ "def", "reg_copy", "(", "reg", ",", "keys", "=", "None", ")", ":", "if", "keys", "is", "None", ":", "keys", "=", "reg", ".", "keys", "(", ")", "reg_cls", "=", "type", "(", "reg", ")", "new_reg", "=", "reg_cls", "(", ")", "mk", "=", "{", "}", "# empty dictionary for meta keys", "# loop over registry meta names", "for", "m", "in", "reg_cls", ".", "meta_names", ":", "mstar", "=", "getattr", "(", "reg", ",", "m", ",", "None", ")", "# current value of metakey in registry", "if", "not", "mstar", ":", "# if there is no value, the value is empty or None, set it to None", "# it's never false or zero, should be dictionary of reg items", "mk", "[", "m", "]", "=", "None", "continue", "mk", "[", "m", "]", "=", "{", "}", "# emtpy dictionary of registry meta", "# loop over keys to copy and set values of meta keys for each reg item", "for", "k", "in", "keys", ":", "kstar", "=", "mstar", ".", "get", "(", "k", ")", "# if key exists in registry meta and is not None, then copy it", "if", "kstar", "is", "not", "None", ":", "mk", "[", "m", "]", "[", "k", "]", "=", "kstar", "new_reg", ".", "register", "(", "{", "k", ":", "reg", "[", "k", "]", "for", "k", "in", "keys", "}", ",", "*", "*", "mk", ")", "return", "new_reg" ]
Make a copy of a subset of a registry. :param reg: source registry :param keys: keys of registry items to copy :return: copied registry subset
[ "Make", "a", "copy", "of", "a", "subset", "of", "a", "registry", "." ]
train
https://github.com/BreakingBytes/simkit/blob/205163d879d3880b6c9ef609f1b723a58773026b/simkit/contrib/lazy_looping_calculator.py#L14-L43
BreakingBytes/simkit
simkit/contrib/lazy_looping_calculator.py
LazyLoopingCalculator.get_covariance
def get_covariance(datargs, outargs, vargs, datvar, outvar): """ Get covariance matrix. :param datargs: data arguments :param outargs: output arguments :param vargs: variable arguments :param datvar: variance of data arguments :param outvar: variance of output arguments :return: covariance """ # number of formula arguments that are not constant argn = len(vargs) # number of observations must be the same for all vargs nobs = 1 c = [] # FIXME: can just loop ver varg, don't need indices I think, do we? for m in xrange(argn): a = vargs[m] # get the variable formula arg in vargs at idx=m try: a = datargs[a] # get the calculation data arg except (KeyError, TypeError): a = outargs[a] # get the calculation output arg if not isinstance(a, basestring): # calculation arg might be sequence (arg, idx, [unit]) a = a[0] # if a is a sequence, get just the arg from a[0] LOGGER.debug('using output variance key: %r', a) avar = outvar[a] # get variance from output registry else: if not isinstance(a, basestring): # calculation arg might be sequence (arg, idx, [unit]) a = a[0] # if a is a sequence, get just the arg from a[0] LOGGER.debug('using data variance key: %r', a) avar = datvar[a] # get variance from data registry d = [] # avar is a dictionary with the variance of "a" vs all other vargs for n in xrange(argn): # FIXME: just get all of the calculation args one time b = vargs[n] # get the variable formula arg in vargs at idx=n try: b = datargs[b] # get the calculation data arg except (KeyError, TypeError): b = outargs[b] # get variance from output registry if not isinstance(b, basestring): # calculation arg might be sequence (arg, idx, [unit]) b = b[0] # if a is a sequence, get just the arg from b[0] LOGGER.debug('using variance key: %r', b) d.append(avar.get(b, 0.0)) # add covariance to sequence # figure out number of observations from longest covariance # only works if nobs is in one of the covariance # fails if nobs > 1, but covariance shape doesn't have nobs!!! # eg: if variance for data is uniform for all observations!!! try: nobs = max(nobs, len(d[-1])) except (TypeError, ValueError): LOGGER.debug('c of %s vs %s = %g', a, b, d[-1]) LOGGER.debug('d:\n%r', d) c.append(d) # covariance matrix is initially zeros cov = np.zeros((nobs, argn, argn)) # loop over arguments in both directions, fill in covariance for m in xrange(argn): d = c.pop() LOGGER.debug('pop row %d:\n%r', argn-1-m, d) for n in xrange(argn): LOGGER.debug('pop col %d:\n%r', argn - 1 - n, d[-1]) cov[:, argn-1-m, argn-1-n] = d.pop() if nobs == 1: cov = cov.squeeze() # squeeze out any extra dimensions LOGGER.debug('covariance:\n%r', cov) return cov
python
def get_covariance(datargs, outargs, vargs, datvar, outvar): """ Get covariance matrix. :param datargs: data arguments :param outargs: output arguments :param vargs: variable arguments :param datvar: variance of data arguments :param outvar: variance of output arguments :return: covariance """ # number of formula arguments that are not constant argn = len(vargs) # number of observations must be the same for all vargs nobs = 1 c = [] # FIXME: can just loop ver varg, don't need indices I think, do we? for m in xrange(argn): a = vargs[m] # get the variable formula arg in vargs at idx=m try: a = datargs[a] # get the calculation data arg except (KeyError, TypeError): a = outargs[a] # get the calculation output arg if not isinstance(a, basestring): # calculation arg might be sequence (arg, idx, [unit]) a = a[0] # if a is a sequence, get just the arg from a[0] LOGGER.debug('using output variance key: %r', a) avar = outvar[a] # get variance from output registry else: if not isinstance(a, basestring): # calculation arg might be sequence (arg, idx, [unit]) a = a[0] # if a is a sequence, get just the arg from a[0] LOGGER.debug('using data variance key: %r', a) avar = datvar[a] # get variance from data registry d = [] # avar is a dictionary with the variance of "a" vs all other vargs for n in xrange(argn): # FIXME: just get all of the calculation args one time b = vargs[n] # get the variable formula arg in vargs at idx=n try: b = datargs[b] # get the calculation data arg except (KeyError, TypeError): b = outargs[b] # get variance from output registry if not isinstance(b, basestring): # calculation arg might be sequence (arg, idx, [unit]) b = b[0] # if a is a sequence, get just the arg from b[0] LOGGER.debug('using variance key: %r', b) d.append(avar.get(b, 0.0)) # add covariance to sequence # figure out number of observations from longest covariance # only works if nobs is in one of the covariance # fails if nobs > 1, but covariance shape doesn't have nobs!!! # eg: if variance for data is uniform for all observations!!! try: nobs = max(nobs, len(d[-1])) except (TypeError, ValueError): LOGGER.debug('c of %s vs %s = %g', a, b, d[-1]) LOGGER.debug('d:\n%r', d) c.append(d) # covariance matrix is initially zeros cov = np.zeros((nobs, argn, argn)) # loop over arguments in both directions, fill in covariance for m in xrange(argn): d = c.pop() LOGGER.debug('pop row %d:\n%r', argn-1-m, d) for n in xrange(argn): LOGGER.debug('pop col %d:\n%r', argn - 1 - n, d[-1]) cov[:, argn-1-m, argn-1-n] = d.pop() if nobs == 1: cov = cov.squeeze() # squeeze out any extra dimensions LOGGER.debug('covariance:\n%r', cov) return cov
[ "def", "get_covariance", "(", "datargs", ",", "outargs", ",", "vargs", ",", "datvar", ",", "outvar", ")", ":", "# number of formula arguments that are not constant", "argn", "=", "len", "(", "vargs", ")", "# number of observations must be the same for all vargs", "nobs", "=", "1", "c", "=", "[", "]", "# FIXME: can just loop ver varg, don't need indices I think, do we?", "for", "m", "in", "xrange", "(", "argn", ")", ":", "a", "=", "vargs", "[", "m", "]", "# get the variable formula arg in vargs at idx=m", "try", ":", "a", "=", "datargs", "[", "a", "]", "# get the calculation data arg", "except", "(", "KeyError", ",", "TypeError", ")", ":", "a", "=", "outargs", "[", "a", "]", "# get the calculation output arg", "if", "not", "isinstance", "(", "a", ",", "basestring", ")", ":", "# calculation arg might be sequence (arg, idx, [unit])", "a", "=", "a", "[", "0", "]", "# if a is a sequence, get just the arg from a[0]", "LOGGER", ".", "debug", "(", "'using output variance key: %r'", ",", "a", ")", "avar", "=", "outvar", "[", "a", "]", "# get variance from output registry", "else", ":", "if", "not", "isinstance", "(", "a", ",", "basestring", ")", ":", "# calculation arg might be sequence (arg, idx, [unit])", "a", "=", "a", "[", "0", "]", "# if a is a sequence, get just the arg from a[0]", "LOGGER", ".", "debug", "(", "'using data variance key: %r'", ",", "a", ")", "avar", "=", "datvar", "[", "a", "]", "# get variance from data registry", "d", "=", "[", "]", "# avar is a dictionary with the variance of \"a\" vs all other vargs", "for", "n", "in", "xrange", "(", "argn", ")", ":", "# FIXME: just get all of the calculation args one time", "b", "=", "vargs", "[", "n", "]", "# get the variable formula arg in vargs at idx=n", "try", ":", "b", "=", "datargs", "[", "b", "]", "# get the calculation data arg", "except", "(", "KeyError", ",", "TypeError", ")", ":", "b", "=", "outargs", "[", "b", "]", "# get variance from output registry", "if", "not", "isinstance", "(", "b", ",", "basestring", ")", ":", "# calculation arg might be sequence (arg, idx, [unit])", "b", "=", "b", "[", "0", "]", "# if a is a sequence, get just the arg from b[0]", "LOGGER", ".", "debug", "(", "'using variance key: %r'", ",", "b", ")", "d", ".", "append", "(", "avar", ".", "get", "(", "b", ",", "0.0", ")", ")", "# add covariance to sequence", "# figure out number of observations from longest covariance", "# only works if nobs is in one of the covariance", "# fails if nobs > 1, but covariance shape doesn't have nobs!!!", "# eg: if variance for data is uniform for all observations!!!", "try", ":", "nobs", "=", "max", "(", "nobs", ",", "len", "(", "d", "[", "-", "1", "]", ")", ")", "except", "(", "TypeError", ",", "ValueError", ")", ":", "LOGGER", ".", "debug", "(", "'c of %s vs %s = %g'", ",", "a", ",", "b", ",", "d", "[", "-", "1", "]", ")", "LOGGER", ".", "debug", "(", "'d:\\n%r'", ",", "d", ")", "c", ".", "append", "(", "d", ")", "# covariance matrix is initially zeros", "cov", "=", "np", ".", "zeros", "(", "(", "nobs", ",", "argn", ",", "argn", ")", ")", "# loop over arguments in both directions, fill in covariance", "for", "m", "in", "xrange", "(", "argn", ")", ":", "d", "=", "c", ".", "pop", "(", ")", "LOGGER", ".", "debug", "(", "'pop row %d:\\n%r'", ",", "argn", "-", "1", "-", "m", ",", "d", ")", "for", "n", "in", "xrange", "(", "argn", ")", ":", "LOGGER", ".", "debug", "(", "'pop col %d:\\n%r'", ",", "argn", "-", "1", "-", "n", ",", "d", "[", "-", "1", "]", ")", "cov", "[", ":", ",", "argn", "-", "1", "-", "m", ",", "argn", "-", "1", "-", "n", "]", "=", "d", ".", "pop", "(", ")", "if", "nobs", "==", "1", ":", "cov", "=", "cov", ".", "squeeze", "(", ")", "# squeeze out any extra dimensions", "LOGGER", ".", "debug", "(", "'covariance:\\n%r'", ",", "cov", ")", "return", "cov" ]
Get covariance matrix. :param datargs: data arguments :param outargs: output arguments :param vargs: variable arguments :param datvar: variance of data arguments :param outvar: variance of output arguments :return: covariance
[ "Get", "covariance", "matrix", "." ]
train
https://github.com/BreakingBytes/simkit/blob/205163d879d3880b6c9ef609f1b723a58773026b/simkit/contrib/lazy_looping_calculator.py#L64-L134
BreakingBytes/simkit
simkit/contrib/lazy_looping_calculator.py
LazyLoopingCalculator.calculate
def calculate(self, calc, formula_reg, data_reg, out_reg, timestep=None, idx=None): """ Calculate looping over specified repeat arguments. :param calc: Calculation to loop over. :param formula_reg: Formula registry :param data_reg: Data registry :param out_reg: Outputs registry :param timestep: timestep used for dynamic calcs :param idx: index used in dynamic calcs """ # the superclass Calculator.calculate() method base_calculator = super(LazyLoopingCalculator, self).calculate # call base calculator and return if there are no repeat args if not self.repeat_args: base_calculator(calc, formula_reg, data_reg, out_reg, timestep, idx) return # make dictionaries of the calculation data and outputs argument maps # this maps what the formulas and registries call the repeats arguments data_rargs, out_rargs = {}, {} # allocate dictionaries for repeat args calc_data = calc['args'].get('data') calc_outs = calc['args'].get('outputs') # get dictionaries of repeat args from calculation data and outputs for rarg in self.repeat_args: # rarg could be either data or output so try both try: data_rargs[rarg] = calc_data[rarg] except (KeyError, TypeError): out_rargs[rarg] = calc_outs[rarg] # get values of repeat data and outputs from registries rargs = dict(index_registry(data_rargs, data_reg, timestep, idx), **index_registry(out_rargs, out_reg, timestep, idx)) rargkeys, rargvals = zip(*rargs.iteritems()) # split keys and values rargvals = zip(*rargvals) # reshuffle values, should be same size? # allocate dictionary of empty numpy arrays for each return value returns = calc['returns'] # return keys retvals = {rv: [] for rv in returns} # empty dictionary of return vals retvalu = {rv: None for rv in returns} # dictionary of return units ret_var = {rv: {rv: [] for rv in returns} for rv in returns} # variances ret_unc = {rv: {rv: [] for rv in returns} for rv in returns} # uncertainty ret_jac = dict.fromkeys(returns) # jacobian # get calc data and outputs keys to copy from registries try: calc_data_keys = calc_data.values() except (AttributeError, TypeError): calc_data_keys = [] # if there are no data, leave it empty try: calc_outs_keys = calc_outs.values() except (AttributeError, TypeError): calc_outs_keys = [] # if there are no outputs, leave it empty # copy returns and this calculations output arguments from output reg data_reg_copy = reg_copy(data_reg, calc_data_keys) out_reg_copy = reg_copy(out_reg, returns + calc_outs_keys) # loop over first repeat arg values and enumerate numpy indices as n for vals in rargvals: rargs_keys = dict(zip(rargkeys, vals)) # this is the magic or garbage depending on how you look at it, # change the registry copies to only contain the values for this # iteration of the repeats # TODO: instead of using copies rewrite index_registry to do this # copies means that calculations can't use a registry backend that # uses shared memory, which will limit ability to run asynchronously for k, v in data_rargs.iteritems(): data_reg_copy[v] = rargs_keys[k] for k, v in out_rargs.iteritems(): out_reg_copy[v] = rargs_keys[k] # run base calculator to get retvals, var, unc and jac base_calculator(calc, formula_reg, data_reg_copy, out_reg_copy, timestep, idx) # re-assign retvals for this index of repeats for rv, rval in retvals.iteritems(): rval.append(out_reg_copy[rv].m) # append magnitude to returns retvalu[rv] = out_reg_copy[rv].u # save units for this repeat # re-assign variance for this index of repeats if out_reg_copy.variance.get(rv) is None: continue for rv2, rval2 in ret_var.iteritems(): rval2[rv].append(out_reg_copy.variance[rv2][rv]) # uncertainty only on diagonal of variance if rv == rv2: ret_unc[rv][rv2].append(out_reg_copy.uncertainty[rv][rv2]) else: # FIXME: inefficient to get length every iteration! unc_size = len(out_reg_copy.uncertainty[rv][rv]) ret_unc[rv][rv2].append(Q_([0.]*unc_size, 'percent')) # jacobian is dictionary of returns versus arguments if ret_jac[rv] is None: # first time through create dictionary of sensitivities ret_jac[rv] = {o: v for o, v in out_reg_copy.jacobian[rv].iteritems()} else: # next time through, vstack the sensitivities to existing for o, v in out_reg_copy.jacobian[rv].iteritems(): ret_jac[rv][o] = np.vstack((ret_jac[rv][o], v)) LOGGER.debug('ret_jac:\n%r', ret_jac) # TODO: handle jacobian for repeat args and for dynamic simulations # apply units if they were for k in retvals: if retvalu[k] is not None: if retvalu[k] == out_reg[k].u: retvals[k] = Q_(retvals[k], retvalu[k]) else: retvals[k] = Q_(retvals[k], retvalu[k]).to(out_reg[k].u) # put return values into output registry if idx is None: out_reg.update(retvals) out_reg.variance.update(ret_var) out_reg.uncertainty.update(ret_unc) out_reg.jacobian.update(ret_jac) else: for k, v in retvals: out_reg[k][idx] = v
python
def calculate(self, calc, formula_reg, data_reg, out_reg, timestep=None, idx=None): """ Calculate looping over specified repeat arguments. :param calc: Calculation to loop over. :param formula_reg: Formula registry :param data_reg: Data registry :param out_reg: Outputs registry :param timestep: timestep used for dynamic calcs :param idx: index used in dynamic calcs """ # the superclass Calculator.calculate() method base_calculator = super(LazyLoopingCalculator, self).calculate # call base calculator and return if there are no repeat args if not self.repeat_args: base_calculator(calc, formula_reg, data_reg, out_reg, timestep, idx) return # make dictionaries of the calculation data and outputs argument maps # this maps what the formulas and registries call the repeats arguments data_rargs, out_rargs = {}, {} # allocate dictionaries for repeat args calc_data = calc['args'].get('data') calc_outs = calc['args'].get('outputs') # get dictionaries of repeat args from calculation data and outputs for rarg in self.repeat_args: # rarg could be either data or output so try both try: data_rargs[rarg] = calc_data[rarg] except (KeyError, TypeError): out_rargs[rarg] = calc_outs[rarg] # get values of repeat data and outputs from registries rargs = dict(index_registry(data_rargs, data_reg, timestep, idx), **index_registry(out_rargs, out_reg, timestep, idx)) rargkeys, rargvals = zip(*rargs.iteritems()) # split keys and values rargvals = zip(*rargvals) # reshuffle values, should be same size? # allocate dictionary of empty numpy arrays for each return value returns = calc['returns'] # return keys retvals = {rv: [] for rv in returns} # empty dictionary of return vals retvalu = {rv: None for rv in returns} # dictionary of return units ret_var = {rv: {rv: [] for rv in returns} for rv in returns} # variances ret_unc = {rv: {rv: [] for rv in returns} for rv in returns} # uncertainty ret_jac = dict.fromkeys(returns) # jacobian # get calc data and outputs keys to copy from registries try: calc_data_keys = calc_data.values() except (AttributeError, TypeError): calc_data_keys = [] # if there are no data, leave it empty try: calc_outs_keys = calc_outs.values() except (AttributeError, TypeError): calc_outs_keys = [] # if there are no outputs, leave it empty # copy returns and this calculations output arguments from output reg data_reg_copy = reg_copy(data_reg, calc_data_keys) out_reg_copy = reg_copy(out_reg, returns + calc_outs_keys) # loop over first repeat arg values and enumerate numpy indices as n for vals in rargvals: rargs_keys = dict(zip(rargkeys, vals)) # this is the magic or garbage depending on how you look at it, # change the registry copies to only contain the values for this # iteration of the repeats # TODO: instead of using copies rewrite index_registry to do this # copies means that calculations can't use a registry backend that # uses shared memory, which will limit ability to run asynchronously for k, v in data_rargs.iteritems(): data_reg_copy[v] = rargs_keys[k] for k, v in out_rargs.iteritems(): out_reg_copy[v] = rargs_keys[k] # run base calculator to get retvals, var, unc and jac base_calculator(calc, formula_reg, data_reg_copy, out_reg_copy, timestep, idx) # re-assign retvals for this index of repeats for rv, rval in retvals.iteritems(): rval.append(out_reg_copy[rv].m) # append magnitude to returns retvalu[rv] = out_reg_copy[rv].u # save units for this repeat # re-assign variance for this index of repeats if out_reg_copy.variance.get(rv) is None: continue for rv2, rval2 in ret_var.iteritems(): rval2[rv].append(out_reg_copy.variance[rv2][rv]) # uncertainty only on diagonal of variance if rv == rv2: ret_unc[rv][rv2].append(out_reg_copy.uncertainty[rv][rv2]) else: # FIXME: inefficient to get length every iteration! unc_size = len(out_reg_copy.uncertainty[rv][rv]) ret_unc[rv][rv2].append(Q_([0.]*unc_size, 'percent')) # jacobian is dictionary of returns versus arguments if ret_jac[rv] is None: # first time through create dictionary of sensitivities ret_jac[rv] = {o: v for o, v in out_reg_copy.jacobian[rv].iteritems()} else: # next time through, vstack the sensitivities to existing for o, v in out_reg_copy.jacobian[rv].iteritems(): ret_jac[rv][o] = np.vstack((ret_jac[rv][o], v)) LOGGER.debug('ret_jac:\n%r', ret_jac) # TODO: handle jacobian for repeat args and for dynamic simulations # apply units if they were for k in retvals: if retvalu[k] is not None: if retvalu[k] == out_reg[k].u: retvals[k] = Q_(retvals[k], retvalu[k]) else: retvals[k] = Q_(retvals[k], retvalu[k]).to(out_reg[k].u) # put return values into output registry if idx is None: out_reg.update(retvals) out_reg.variance.update(ret_var) out_reg.uncertainty.update(ret_unc) out_reg.jacobian.update(ret_jac) else: for k, v in retvals: out_reg[k][idx] = v
[ "def", "calculate", "(", "self", ",", "calc", ",", "formula_reg", ",", "data_reg", ",", "out_reg", ",", "timestep", "=", "None", ",", "idx", "=", "None", ")", ":", "# the superclass Calculator.calculate() method", "base_calculator", "=", "super", "(", "LazyLoopingCalculator", ",", "self", ")", ".", "calculate", "# call base calculator and return if there are no repeat args", "if", "not", "self", ".", "repeat_args", ":", "base_calculator", "(", "calc", ",", "formula_reg", ",", "data_reg", ",", "out_reg", ",", "timestep", ",", "idx", ")", "return", "# make dictionaries of the calculation data and outputs argument maps", "# this maps what the formulas and registries call the repeats arguments", "data_rargs", ",", "out_rargs", "=", "{", "}", ",", "{", "}", "# allocate dictionaries for repeat args", "calc_data", "=", "calc", "[", "'args'", "]", ".", "get", "(", "'data'", ")", "calc_outs", "=", "calc", "[", "'args'", "]", ".", "get", "(", "'outputs'", ")", "# get dictionaries of repeat args from calculation data and outputs", "for", "rarg", "in", "self", ".", "repeat_args", ":", "# rarg could be either data or output so try both", "try", ":", "data_rargs", "[", "rarg", "]", "=", "calc_data", "[", "rarg", "]", "except", "(", "KeyError", ",", "TypeError", ")", ":", "out_rargs", "[", "rarg", "]", "=", "calc_outs", "[", "rarg", "]", "# get values of repeat data and outputs from registries", "rargs", "=", "dict", "(", "index_registry", "(", "data_rargs", ",", "data_reg", ",", "timestep", ",", "idx", ")", ",", "*", "*", "index_registry", "(", "out_rargs", ",", "out_reg", ",", "timestep", ",", "idx", ")", ")", "rargkeys", ",", "rargvals", "=", "zip", "(", "*", "rargs", ".", "iteritems", "(", ")", ")", "# split keys and values", "rargvals", "=", "zip", "(", "*", "rargvals", ")", "# reshuffle values, should be same size?", "# allocate dictionary of empty numpy arrays for each return value", "returns", "=", "calc", "[", "'returns'", "]", "# return keys", "retvals", "=", "{", "rv", ":", "[", "]", "for", "rv", "in", "returns", "}", "# empty dictionary of return vals", "retvalu", "=", "{", "rv", ":", "None", "for", "rv", "in", "returns", "}", "# dictionary of return units", "ret_var", "=", "{", "rv", ":", "{", "rv", ":", "[", "]", "for", "rv", "in", "returns", "}", "for", "rv", "in", "returns", "}", "# variances", "ret_unc", "=", "{", "rv", ":", "{", "rv", ":", "[", "]", "for", "rv", "in", "returns", "}", "for", "rv", "in", "returns", "}", "# uncertainty", "ret_jac", "=", "dict", ".", "fromkeys", "(", "returns", ")", "# jacobian", "# get calc data and outputs keys to copy from registries", "try", ":", "calc_data_keys", "=", "calc_data", ".", "values", "(", ")", "except", "(", "AttributeError", ",", "TypeError", ")", ":", "calc_data_keys", "=", "[", "]", "# if there are no data, leave it empty", "try", ":", "calc_outs_keys", "=", "calc_outs", ".", "values", "(", ")", "except", "(", "AttributeError", ",", "TypeError", ")", ":", "calc_outs_keys", "=", "[", "]", "# if there are no outputs, leave it empty", "# copy returns and this calculations output arguments from output reg", "data_reg_copy", "=", "reg_copy", "(", "data_reg", ",", "calc_data_keys", ")", "out_reg_copy", "=", "reg_copy", "(", "out_reg", ",", "returns", "+", "calc_outs_keys", ")", "# loop over first repeat arg values and enumerate numpy indices as n", "for", "vals", "in", "rargvals", ":", "rargs_keys", "=", "dict", "(", "zip", "(", "rargkeys", ",", "vals", ")", ")", "# this is the magic or garbage depending on how you look at it,", "# change the registry copies to only contain the values for this", "# iteration of the repeats", "# TODO: instead of using copies rewrite index_registry to do this", "# copies means that calculations can't use a registry backend that", "# uses shared memory, which will limit ability to run asynchronously", "for", "k", ",", "v", "in", "data_rargs", ".", "iteritems", "(", ")", ":", "data_reg_copy", "[", "v", "]", "=", "rargs_keys", "[", "k", "]", "for", "k", ",", "v", "in", "out_rargs", ".", "iteritems", "(", ")", ":", "out_reg_copy", "[", "v", "]", "=", "rargs_keys", "[", "k", "]", "# run base calculator to get retvals, var, unc and jac", "base_calculator", "(", "calc", ",", "formula_reg", ",", "data_reg_copy", ",", "out_reg_copy", ",", "timestep", ",", "idx", ")", "# re-assign retvals for this index of repeats", "for", "rv", ",", "rval", "in", "retvals", ".", "iteritems", "(", ")", ":", "rval", ".", "append", "(", "out_reg_copy", "[", "rv", "]", ".", "m", ")", "# append magnitude to returns", "retvalu", "[", "rv", "]", "=", "out_reg_copy", "[", "rv", "]", ".", "u", "# save units for this repeat", "# re-assign variance for this index of repeats", "if", "out_reg_copy", ".", "variance", ".", "get", "(", "rv", ")", "is", "None", ":", "continue", "for", "rv2", ",", "rval2", "in", "ret_var", ".", "iteritems", "(", ")", ":", "rval2", "[", "rv", "]", ".", "append", "(", "out_reg_copy", ".", "variance", "[", "rv2", "]", "[", "rv", "]", ")", "# uncertainty only on diagonal of variance", "if", "rv", "==", "rv2", ":", "ret_unc", "[", "rv", "]", "[", "rv2", "]", ".", "append", "(", "out_reg_copy", ".", "uncertainty", "[", "rv", "]", "[", "rv2", "]", ")", "else", ":", "# FIXME: inefficient to get length every iteration!", "unc_size", "=", "len", "(", "out_reg_copy", ".", "uncertainty", "[", "rv", "]", "[", "rv", "]", ")", "ret_unc", "[", "rv", "]", "[", "rv2", "]", ".", "append", "(", "Q_", "(", "[", "0.", "]", "*", "unc_size", ",", "'percent'", ")", ")", "# jacobian is dictionary of returns versus arguments", "if", "ret_jac", "[", "rv", "]", "is", "None", ":", "# first time through create dictionary of sensitivities", "ret_jac", "[", "rv", "]", "=", "{", "o", ":", "v", "for", "o", ",", "v", "in", "out_reg_copy", ".", "jacobian", "[", "rv", "]", ".", "iteritems", "(", ")", "}", "else", ":", "# next time through, vstack the sensitivities to existing", "for", "o", ",", "v", "in", "out_reg_copy", ".", "jacobian", "[", "rv", "]", ".", "iteritems", "(", ")", ":", "ret_jac", "[", "rv", "]", "[", "o", "]", "=", "np", ".", "vstack", "(", "(", "ret_jac", "[", "rv", "]", "[", "o", "]", ",", "v", ")", ")", "LOGGER", ".", "debug", "(", "'ret_jac:\\n%r'", ",", "ret_jac", ")", "# TODO: handle jacobian for repeat args and for dynamic simulations", "# apply units if they were", "for", "k", "in", "retvals", ":", "if", "retvalu", "[", "k", "]", "is", "not", "None", ":", "if", "retvalu", "[", "k", "]", "==", "out_reg", "[", "k", "]", ".", "u", ":", "retvals", "[", "k", "]", "=", "Q_", "(", "retvals", "[", "k", "]", ",", "retvalu", "[", "k", "]", ")", "else", ":", "retvals", "[", "k", "]", "=", "Q_", "(", "retvals", "[", "k", "]", ",", "retvalu", "[", "k", "]", ")", ".", "to", "(", "out_reg", "[", "k", "]", ".", "u", ")", "# put return values into output registry", "if", "idx", "is", "None", ":", "out_reg", ".", "update", "(", "retvals", ")", "out_reg", ".", "variance", ".", "update", "(", "ret_var", ")", "out_reg", ".", "uncertainty", ".", "update", "(", "ret_unc", ")", "out_reg", ".", "jacobian", ".", "update", "(", "ret_jac", ")", "else", ":", "for", "k", ",", "v", "in", "retvals", ":", "out_reg", "[", "k", "]", "[", "idx", "]", "=", "v" ]
Calculate looping over specified repeat arguments. :param calc: Calculation to loop over. :param formula_reg: Formula registry :param data_reg: Data registry :param out_reg: Outputs registry :param timestep: timestep used for dynamic calcs :param idx: index used in dynamic calcs
[ "Calculate", "looping", "over", "specified", "repeat", "arguments", "." ]
train
https://github.com/BreakingBytes/simkit/blob/205163d879d3880b6c9ef609f1b723a58773026b/simkit/contrib/lazy_looping_calculator.py#L136-L250
volafiled/python-volapi
volapi/volapi.py
listen_many
def listen_many(*rooms): """Listen for changes in all registered listeners in all specified rooms""" rooms = set(r.conn for r in rooms) for room in rooms: room.validate_listeners() with ARBITRATOR.condition: while any(r.connected for r in rooms): ARBITRATOR.condition.wait() rooms = [r for r in rooms if r.run_queues()] if not rooms: return
python
def listen_many(*rooms): """Listen for changes in all registered listeners in all specified rooms""" rooms = set(r.conn for r in rooms) for room in rooms: room.validate_listeners() with ARBITRATOR.condition: while any(r.connected for r in rooms): ARBITRATOR.condition.wait() rooms = [r for r in rooms if r.run_queues()] if not rooms: return
[ "def", "listen_many", "(", "*", "rooms", ")", ":", "rooms", "=", "set", "(", "r", ".", "conn", "for", "r", "in", "rooms", ")", "for", "room", "in", "rooms", ":", "room", ".", "validate_listeners", "(", ")", "with", "ARBITRATOR", ".", "condition", ":", "while", "any", "(", "r", ".", "connected", "for", "r", "in", "rooms", ")", ":", "ARBITRATOR", ".", "condition", ".", "wait", "(", ")", "rooms", "=", "[", "r", "for", "r", "in", "rooms", "if", "r", ".", "run_queues", "(", ")", "]", "if", "not", "rooms", ":", "return" ]
Listen for changes in all registered listeners in all specified rooms
[ "Listen", "for", "changes", "in", "all", "registered", "listeners", "in", "all", "specified", "rooms" ]
train
https://github.com/volafiled/python-volapi/blob/5f0bc03dbde703264ac6ed494e2050761f688a3e/volapi/volapi.py#L907-L918
volafiled/python-volapi
volapi/volapi.py
Connection.connect
def connect(self, username, checksum, password=None, key=None): """Connect to websocket through asyncio http interface""" ws_url = ( f"{BASE_WS_URL}?room={self.room.room_id}&cs={checksum}&nick={username}" f"&rn={random_id(6)}&t={int(time.time() * 1000)}&transport=websocket&EIO=3" ) if password: ws_url += f"&password={password}" elif key: ws_url += f"&key={key}" ARBITRATOR.create_connection( self.proto, ws_url, self.headers["User-Agent"], self.cookies ) self.__conn_barrier.wait()
python
def connect(self, username, checksum, password=None, key=None): """Connect to websocket through asyncio http interface""" ws_url = ( f"{BASE_WS_URL}?room={self.room.room_id}&cs={checksum}&nick={username}" f"&rn={random_id(6)}&t={int(time.time() * 1000)}&transport=websocket&EIO=3" ) if password: ws_url += f"&password={password}" elif key: ws_url += f"&key={key}" ARBITRATOR.create_connection( self.proto, ws_url, self.headers["User-Agent"], self.cookies ) self.__conn_barrier.wait()
[ "def", "connect", "(", "self", ",", "username", ",", "checksum", ",", "password", "=", "None", ",", "key", "=", "None", ")", ":", "ws_url", "=", "(", "f\"{BASE_WS_URL}?room={self.room.room_id}&cs={checksum}&nick={username}\"", "f\"&rn={random_id(6)}&t={int(time.time() * 1000)}&transport=websocket&EIO=3\"", ")", "if", "password", ":", "ws_url", "+=", "f\"&password={password}\"", "elif", "key", ":", "ws_url", "+=", "f\"&key={key}\"", "ARBITRATOR", ".", "create_connection", "(", "self", ".", "proto", ",", "ws_url", ",", "self", ".", "headers", "[", "\"User-Agent\"", "]", ",", "self", ".", "cookies", ")", "self", ".", "__conn_barrier", ".", "wait", "(", ")" ]
Connect to websocket through asyncio http interface
[ "Connect", "to", "websocket", "through", "asyncio", "http", "interface" ]
train
https://github.com/volafiled/python-volapi/blob/5f0bc03dbde703264ac6ed494e2050761f688a3e/volapi/volapi.py#L74-L89
volafiled/python-volapi
volapi/volapi.py
Connection.send_ack
def send_ack(self): """Send an ack message""" if self.last_ack == self.proto.max_id: return LOGGER.debug("ack (%d)", self.proto.max_id) self.last_ack = self.proto.max_id self.send_message(f"4{to_json([self.proto.max_id])}")
python
def send_ack(self): """Send an ack message""" if self.last_ack == self.proto.max_id: return LOGGER.debug("ack (%d)", self.proto.max_id) self.last_ack = self.proto.max_id self.send_message(f"4{to_json([self.proto.max_id])}")
[ "def", "send_ack", "(", "self", ")", ":", "if", "self", ".", "last_ack", "==", "self", ".", "proto", ".", "max_id", ":", "return", "LOGGER", ".", "debug", "(", "\"ack (%d)\"", ",", "self", ".", "proto", ".", "max_id", ")", "self", ".", "last_ack", "=", "self", ".", "proto", ".", "max_id", "self", ".", "send_message", "(", "f\"4{to_json([self.proto.max_id])}\"", ")" ]
Send an ack message
[ "Send", "an", "ack", "message" ]
train
https://github.com/volafiled/python-volapi/blob/5f0bc03dbde703264ac6ed494e2050761f688a3e/volapi/volapi.py#L112-L119
volafiled/python-volapi
volapi/volapi.py
Connection.make_call
def make_call(self, fun, *args): """Makes a regular API call""" obj = {"fn": fun, "args": list(args)} obj = [self.proto.max_id, [[0, ["call", obj]], self.proto.send_count]] self.send_message(f"4{to_json(obj)}") self.proto.send_count += 1
python
def make_call(self, fun, *args): """Makes a regular API call""" obj = {"fn": fun, "args": list(args)} obj = [self.proto.max_id, [[0, ["call", obj]], self.proto.send_count]] self.send_message(f"4{to_json(obj)}") self.proto.send_count += 1
[ "def", "make_call", "(", "self", ",", "fun", ",", "*", "args", ")", ":", "obj", "=", "{", "\"fn\"", ":", "fun", ",", "\"args\"", ":", "list", "(", "args", ")", "}", "obj", "=", "[", "self", ".", "proto", ".", "max_id", ",", "[", "[", "0", ",", "[", "\"call\"", ",", "obj", "]", "]", ",", "self", ".", "proto", ".", "send_count", "]", "]", "self", ".", "send_message", "(", "f\"4{to_json(obj)}\"", ")", "self", ".", "proto", ".", "send_count", "+=", "1" ]
Makes a regular API call
[ "Makes", "a", "regular", "API", "call" ]
train
https://github.com/volafiled/python-volapi/blob/5f0bc03dbde703264ac6ed494e2050761f688a3e/volapi/volapi.py#L121-L127
volafiled/python-volapi
volapi/volapi.py
Connection.make_call_with_cb
def make_call_with_cb(self, fun, *args): """Makes an API call with a callback to wait for""" cid, event = self.handler.register_callback() argscp = list(args) argscp.append(cid) self.make_call(fun, *argscp) return event
python
def make_call_with_cb(self, fun, *args): """Makes an API call with a callback to wait for""" cid, event = self.handler.register_callback() argscp = list(args) argscp.append(cid) self.make_call(fun, *argscp) return event
[ "def", "make_call_with_cb", "(", "self", ",", "fun", ",", "*", "args", ")", ":", "cid", ",", "event", "=", "self", ".", "handler", ".", "register_callback", "(", ")", "argscp", "=", "list", "(", "args", ")", "argscp", ".", "append", "(", "cid", ")", "self", ".", "make_call", "(", "fun", ",", "*", "argscp", ")", "return", "event" ]
Makes an API call with a callback to wait for
[ "Makes", "an", "API", "call", "with", "a", "callback", "to", "wait", "for" ]
train
https://github.com/volafiled/python-volapi/blob/5f0bc03dbde703264ac6ed494e2050761f688a3e/volapi/volapi.py#L129-L136
volafiled/python-volapi
volapi/volapi.py
Connection.make_api_call
def make_api_call(self, call, params): """Make a REST API call""" if not isinstance(params, dict): raise ValueError("params argument must be a dictionary") kw = dict( params=params, headers={"Origin": BASE_URL, "Referer": f"{BASE_URL}/r/{self.room.name}"}, ) return self.get(BASE_REST_URL + call, **kw).json()
python
def make_api_call(self, call, params): """Make a REST API call""" if not isinstance(params, dict): raise ValueError("params argument must be a dictionary") kw = dict( params=params, headers={"Origin": BASE_URL, "Referer": f"{BASE_URL}/r/{self.room.name}"}, ) return self.get(BASE_REST_URL + call, **kw).json()
[ "def", "make_api_call", "(", "self", ",", "call", ",", "params", ")", ":", "if", "not", "isinstance", "(", "params", ",", "dict", ")", ":", "raise", "ValueError", "(", "\"params argument must be a dictionary\"", ")", "kw", "=", "dict", "(", "params", "=", "params", ",", "headers", "=", "{", "\"Origin\"", ":", "BASE_URL", ",", "\"Referer\"", ":", "f\"{BASE_URL}/r/{self.room.name}\"", "}", ",", ")", "return", "self", ".", "get", "(", "BASE_REST_URL", "+", "call", ",", "*", "*", "kw", ")", ".", "json", "(", ")" ]
Make a REST API call
[ "Make", "a", "REST", "API", "call" ]
train
https://github.com/volafiled/python-volapi/blob/5f0bc03dbde703264ac6ed494e2050761f688a3e/volapi/volapi.py#L138-L147
volafiled/python-volapi
volapi/volapi.py
Connection.reraise
def reraise(self, ex): """Reraise an exception passed by the event thread""" self.exception = ex self.process_queues(forced=True)
python
def reraise(self, ex): """Reraise an exception passed by the event thread""" self.exception = ex self.process_queues(forced=True)
[ "def", "reraise", "(", "self", ",", "ex", ")", ":", "self", ".", "exception", "=", "ex", "self", ".", "process_queues", "(", "forced", "=", "True", ")" ]
Reraise an exception passed by the event thread
[ "Reraise", "an", "exception", "passed", "by", "the", "event", "thread" ]
train
https://github.com/volafiled/python-volapi/blob/5f0bc03dbde703264ac6ed494e2050761f688a3e/volapi/volapi.py#L149-L153
volafiled/python-volapi
volapi/volapi.py
Connection.close
def close(self): """Closes connection pair""" if self.connected: obj = [self.proto.max_id, [[2], self.proto.send_count]] ARBITRATOR.send_sync_message(self.proto, f"4{to_json(obj)}") self.proto.send_count += 1 ARBITRATOR.close(self.proto) self.listeners.clear() self.proto.connected = False super().close() del self.room del self.proto
python
def close(self): """Closes connection pair""" if self.connected: obj = [self.proto.max_id, [[2], self.proto.send_count]] ARBITRATOR.send_sync_message(self.proto, f"4{to_json(obj)}") self.proto.send_count += 1 ARBITRATOR.close(self.proto) self.listeners.clear() self.proto.connected = False super().close() del self.room del self.proto
[ "def", "close", "(", "self", ")", ":", "if", "self", ".", "connected", ":", "obj", "=", "[", "self", ".", "proto", ".", "max_id", ",", "[", "[", "2", "]", ",", "self", ".", "proto", ".", "send_count", "]", "]", "ARBITRATOR", ".", "send_sync_message", "(", "self", ".", "proto", ",", "f\"4{to_json(obj)}\"", ")", "self", ".", "proto", ".", "send_count", "+=", "1", "ARBITRATOR", ".", "close", "(", "self", ".", "proto", ")", "self", ".", "listeners", ".", "clear", "(", ")", "self", ".", "proto", ".", "connected", "=", "False", "super", "(", ")", ".", "close", "(", ")", "del", "self", ".", "room", "del", "self", ".", "proto" ]
Closes connection pair
[ "Closes", "connection", "pair" ]
train
https://github.com/volafiled/python-volapi/blob/5f0bc03dbde703264ac6ed494e2050761f688a3e/volapi/volapi.py#L155-L167
volafiled/python-volapi
volapi/volapi.py
Connection.on_open
async def on_open(self): """DingDongmaster the connection is open""" self.__ensure_barrier() while self.connected: try: if self.__lastping > self.__lastpong: raise IOError("Last ping remained unanswered") self.send_message("2") self.send_ack() self.__lastping = time.time() await asyncio.sleep(self.ping_interval) except Exception as ex: LOGGER.exception("Failed to ping") try: self.reraise(ex) except Exception: LOGGER.exception( "failed to force close connection after ping error" ) break
python
async def on_open(self): """DingDongmaster the connection is open""" self.__ensure_barrier() while self.connected: try: if self.__lastping > self.__lastpong: raise IOError("Last ping remained unanswered") self.send_message("2") self.send_ack() self.__lastping = time.time() await asyncio.sleep(self.ping_interval) except Exception as ex: LOGGER.exception("Failed to ping") try: self.reraise(ex) except Exception: LOGGER.exception( "failed to force close connection after ping error" ) break
[ "async", "def", "on_open", "(", "self", ")", ":", "self", ".", "__ensure_barrier", "(", ")", "while", "self", ".", "connected", ":", "try", ":", "if", "self", ".", "__lastping", ">", "self", ".", "__lastpong", ":", "raise", "IOError", "(", "\"Last ping remained unanswered\"", ")", "self", ".", "send_message", "(", "\"2\"", ")", "self", ".", "send_ack", "(", ")", "self", ".", "__lastping", "=", "time", ".", "time", "(", ")", "await", "asyncio", ".", "sleep", "(", "self", ".", "ping_interval", ")", "except", "Exception", "as", "ex", ":", "LOGGER", ".", "exception", "(", "\"Failed to ping\"", ")", "try", ":", "self", ".", "reraise", "(", "ex", ")", "except", "Exception", ":", "LOGGER", ".", "exception", "(", "\"failed to force close connection after ping error\"", ")", "break" ]
DingDongmaster the connection is open
[ "DingDongmaster", "the", "connection", "is", "open" ]
train
https://github.com/volafiled/python-volapi/blob/5f0bc03dbde703264ac6ed494e2050761f688a3e/volapi/volapi.py#L174-L195
volafiled/python-volapi
volapi/volapi.py
Connection.on_message
def on_message(self, new_data): """Processes incoming messages according to engine-io rules""" # https://github.com/socketio/engine.io-protocol LOGGER.debug("new frame [%r]", new_data) try: what = int(new_data[0]) data = new_data[1:] data = data and from_json(data) if what == 0: self.ping_interval = float(data["pingInterval"]) / 1000 LOGGER.debug("adjusted ping interval") return if what == 1: LOGGER.debug("received close") self.reraise(IOError("Connection closed remotely")) return if what == 3: self.__lastpong = time.time() LOGGER.debug("received a pong") return if what == 4: self._on_frame(data) return if what == 6: LOGGER.debug("received noop") self.send_message("5") return LOGGER.debug("unhandled message: [%d] [%r]", what, data) except Exception as ex: self.reraise(ex)
python
def on_message(self, new_data): """Processes incoming messages according to engine-io rules""" # https://github.com/socketio/engine.io-protocol LOGGER.debug("new frame [%r]", new_data) try: what = int(new_data[0]) data = new_data[1:] data = data and from_json(data) if what == 0: self.ping_interval = float(data["pingInterval"]) / 1000 LOGGER.debug("adjusted ping interval") return if what == 1: LOGGER.debug("received close") self.reraise(IOError("Connection closed remotely")) return if what == 3: self.__lastpong = time.time() LOGGER.debug("received a pong") return if what == 4: self._on_frame(data) return if what == 6: LOGGER.debug("received noop") self.send_message("5") return LOGGER.debug("unhandled message: [%d] [%r]", what, data) except Exception as ex: self.reraise(ex)
[ "def", "on_message", "(", "self", ",", "new_data", ")", ":", "# https://github.com/socketio/engine.io-protocol", "LOGGER", ".", "debug", "(", "\"new frame [%r]\"", ",", "new_data", ")", "try", ":", "what", "=", "int", "(", "new_data", "[", "0", "]", ")", "data", "=", "new_data", "[", "1", ":", "]", "data", "=", "data", "and", "from_json", "(", "data", ")", "if", "what", "==", "0", ":", "self", ".", "ping_interval", "=", "float", "(", "data", "[", "\"pingInterval\"", "]", ")", "/", "1000", "LOGGER", ".", "debug", "(", "\"adjusted ping interval\"", ")", "return", "if", "what", "==", "1", ":", "LOGGER", ".", "debug", "(", "\"received close\"", ")", "self", ".", "reraise", "(", "IOError", "(", "\"Connection closed remotely\"", ")", ")", "return", "if", "what", "==", "3", ":", "self", ".", "__lastpong", "=", "time", ".", "time", "(", ")", "LOGGER", ".", "debug", "(", "\"received a pong\"", ")", "return", "if", "what", "==", "4", ":", "self", ".", "_on_frame", "(", "data", ")", "return", "if", "what", "==", "6", ":", "LOGGER", ".", "debug", "(", "\"received noop\"", ")", "self", ".", "send_message", "(", "\"5\"", ")", "return", "LOGGER", ".", "debug", "(", "\"unhandled message: [%d] [%r]\"", ",", "what", ",", "data", ")", "except", "Exception", "as", "ex", ":", "self", ".", "reraise", "(", "ex", ")" ]
Processes incoming messages according to engine-io rules
[ "Processes", "incoming", "messages", "according", "to", "engine", "-", "io", "rules" ]
train
https://github.com/volafiled/python-volapi/blob/5f0bc03dbde703264ac6ed494e2050761f688a3e/volapi/volapi.py#L232-L267
volafiled/python-volapi
volapi/volapi.py
Connection.add_listener
def add_listener(self, event_type, callback): """Add a listener for specific event type. You'll need to actually listen for changes using the listen method""" if not self.connected: # wait for errors set by reraise method time.sleep(1) if self.exception: # pylint: disable=raising-bad-type raise self.exception raise ConnectionError(f"{self.room} is not connected") thread = get_thread_ident() with self.lock: listener = self.listeners[thread] listener.add(event_type, callback) # use "initial_files" event to listen for whole filelist on room join self.process_queues()
python
def add_listener(self, event_type, callback): """Add a listener for specific event type. You'll need to actually listen for changes using the listen method""" if not self.connected: # wait for errors set by reraise method time.sleep(1) if self.exception: # pylint: disable=raising-bad-type raise self.exception raise ConnectionError(f"{self.room} is not connected") thread = get_thread_ident() with self.lock: listener = self.listeners[thread] listener.add(event_type, callback) # use "initial_files" event to listen for whole filelist on room join self.process_queues()
[ "def", "add_listener", "(", "self", ",", "event_type", ",", "callback", ")", ":", "if", "not", "self", ".", "connected", ":", "# wait for errors set by reraise method", "time", ".", "sleep", "(", "1", ")", "if", "self", ".", "exception", ":", "# pylint: disable=raising-bad-type", "raise", "self", ".", "exception", "raise", "ConnectionError", "(", "f\"{self.room} is not connected\"", ")", "thread", "=", "get_thread_ident", "(", ")", "with", "self", ".", "lock", ":", "listener", "=", "self", ".", "listeners", "[", "thread", "]", "listener", ".", "add", "(", "event_type", ",", "callback", ")", "# use \"initial_files\" event to listen for whole filelist on room join", "self", ".", "process_queues", "(", ")" ]
Add a listener for specific event type. You'll need to actually listen for changes using the listen method
[ "Add", "a", "listener", "for", "specific", "event", "type", ".", "You", "ll", "need", "to", "actually", "listen", "for", "changes", "using", "the", "listen", "method" ]
train
https://github.com/volafiled/python-volapi/blob/5f0bc03dbde703264ac6ed494e2050761f688a3e/volapi/volapi.py#L269-L286
volafiled/python-volapi
volapi/volapi.py
Connection.enqueue_data
def enqueue_data(self, event_type, data): """Enqueue a data item for specific event type""" with self.lock: listeners = self.listeners.values() for listener in listeners: listener.enqueue(event_type, data) self.must_process = True
python
def enqueue_data(self, event_type, data): """Enqueue a data item for specific event type""" with self.lock: listeners = self.listeners.values() for listener in listeners: listener.enqueue(event_type, data) self.must_process = True
[ "def", "enqueue_data", "(", "self", ",", "event_type", ",", "data", ")", ":", "with", "self", ".", "lock", ":", "listeners", "=", "self", ".", "listeners", ".", "values", "(", ")", "for", "listener", "in", "listeners", ":", "listener", ".", "enqueue", "(", "event_type", ",", "data", ")", "self", ".", "must_process", "=", "True" ]
Enqueue a data item for specific event type
[ "Enqueue", "a", "data", "item", "for", "specific", "event", "type" ]
train
https://github.com/volafiled/python-volapi/blob/5f0bc03dbde703264ac6ed494e2050761f688a3e/volapi/volapi.py#L288-L295
volafiled/python-volapi
volapi/volapi.py
Connection.process_queues
def process_queues(self, forced=False): """Process queues if any have data queued""" with self.lock: if (not forced and not self.must_process) or not self.queues_enabled: return self.must_process = False ARBITRATOR.awaken()
python
def process_queues(self, forced=False): """Process queues if any have data queued""" with self.lock: if (not forced and not self.must_process) or not self.queues_enabled: return self.must_process = False ARBITRATOR.awaken()
[ "def", "process_queues", "(", "self", ",", "forced", "=", "False", ")", ":", "with", "self", ".", "lock", ":", "if", "(", "not", "forced", "and", "not", "self", ".", "must_process", ")", "or", "not", "self", ".", "queues_enabled", ":", "return", "self", ".", "must_process", "=", "False", "ARBITRATOR", ".", "awaken", "(", ")" ]
Process queues if any have data queued
[ "Process", "queues", "if", "any", "have", "data", "queued" ]
train
https://github.com/volafiled/python-volapi/blob/5f0bc03dbde703264ac6ed494e2050761f688a3e/volapi/volapi.py#L310-L317
volafiled/python-volapi
volapi/volapi.py
Connection.__listeners_for_thread
def __listeners_for_thread(self): """All Listeners for the current thread""" thread = get_thread_ident() with self.lock: return [l for tid, l in self.listeners.items() if tid == thread]
python
def __listeners_for_thread(self): """All Listeners for the current thread""" thread = get_thread_ident() with self.lock: return [l for tid, l in self.listeners.items() if tid == thread]
[ "def", "__listeners_for_thread", "(", "self", ")", ":", "thread", "=", "get_thread_ident", "(", ")", "with", "self", ".", "lock", ":", "return", "[", "l", "for", "tid", ",", "l", "in", "self", ".", "listeners", ".", "items", "(", ")", "if", "tid", "==", "thread", "]" ]
All Listeners for the current thread
[ "All", "Listeners", "for", "the", "current", "thread" ]
train
https://github.com/volafiled/python-volapi/blob/5f0bc03dbde703264ac6ed494e2050761f688a3e/volapi/volapi.py#L320-L325
volafiled/python-volapi
volapi/volapi.py
Connection.validate_listeners
def validate_listeners(self): """Validates that some listeners are actually registered""" if self.exception: # pylint: disable=raising-bad-type raise self.exception listeners = self.__listeners_for_thread if not sum(len(l) for l in listeners): raise ValueError("No active listeners")
python
def validate_listeners(self): """Validates that some listeners are actually registered""" if self.exception: # pylint: disable=raising-bad-type raise self.exception listeners = self.__listeners_for_thread if not sum(len(l) for l in listeners): raise ValueError("No active listeners")
[ "def", "validate_listeners", "(", "self", ")", ":", "if", "self", ".", "exception", ":", "# pylint: disable=raising-bad-type", "raise", "self", ".", "exception", "listeners", "=", "self", ".", "__listeners_for_thread", "if", "not", "sum", "(", "len", "(", "l", ")", "for", "l", "in", "listeners", ")", ":", "raise", "ValueError", "(", "\"No active listeners\"", ")" ]
Validates that some listeners are actually registered
[ "Validates", "that", "some", "listeners", "are", "actually", "registered" ]
train
https://github.com/volafiled/python-volapi/blob/5f0bc03dbde703264ac6ed494e2050761f688a3e/volapi/volapi.py#L327-L336
volafiled/python-volapi
volapi/volapi.py
Connection.listen
def listen(self): """Listen for changes in all registered listeners.""" self.validate_listeners() with ARBITRATOR.condition: while self.connected: ARBITRATOR.condition.wait() if not self.run_queues(): break
python
def listen(self): """Listen for changes in all registered listeners.""" self.validate_listeners() with ARBITRATOR.condition: while self.connected: ARBITRATOR.condition.wait() if not self.run_queues(): break
[ "def", "listen", "(", "self", ")", ":", "self", ".", "validate_listeners", "(", ")", "with", "ARBITRATOR", ".", "condition", ":", "while", "self", ".", "connected", ":", "ARBITRATOR", ".", "condition", ".", "wait", "(", ")", "if", "not", "self", ".", "run_queues", "(", ")", ":", "break" ]
Listen for changes in all registered listeners.
[ "Listen", "for", "changes", "in", "all", "registered", "listeners", "." ]
train
https://github.com/volafiled/python-volapi/blob/5f0bc03dbde703264ac6ed494e2050761f688a3e/volapi/volapi.py#L338-L346
volafiled/python-volapi
volapi/volapi.py
Connection.run_queues
def run_queues(self): """Run all queues that have data queued""" if self.exception: # pylint: disable=raising-bad-type raise self.exception listeners = self.__listeners_for_thread return sum(l.process() for l in listeners) > 0
python
def run_queues(self): """Run all queues that have data queued""" if self.exception: # pylint: disable=raising-bad-type raise self.exception listeners = self.__listeners_for_thread return sum(l.process() for l in listeners) > 0
[ "def", "run_queues", "(", "self", ")", ":", "if", "self", ".", "exception", ":", "# pylint: disable=raising-bad-type", "raise", "self", ".", "exception", "listeners", "=", "self", ".", "__listeners_for_thread", "return", "sum", "(", "l", ".", "process", "(", ")", "for", "l", "in", "listeners", ")", ">", "0" ]
Run all queues that have data queued
[ "Run", "all", "queues", "that", "have", "data", "queued" ]
train
https://github.com/volafiled/python-volapi/blob/5f0bc03dbde703264ac6ed494e2050761f688a3e/volapi/volapi.py#L348-L355
volafiled/python-volapi
volapi/volapi.py
Room.__add_prop
def __add_prop(self, key, admin=False): """Add gettable and settable room config property during runtime""" def getter(self): return self.config[key] def setter(self, val): if admin and not self.admin: raise RuntimeError( f"You can't set the {key} key without mod privileges" ) self.__set_config_value(self.config.get_real_key(key), val) setattr(self.__class__, key, property(getter, setter))
python
def __add_prop(self, key, admin=False): """Add gettable and settable room config property during runtime""" def getter(self): return self.config[key] def setter(self, val): if admin and not self.admin: raise RuntimeError( f"You can't set the {key} key without mod privileges" ) self.__set_config_value(self.config.get_real_key(key), val) setattr(self.__class__, key, property(getter, setter))
[ "def", "__add_prop", "(", "self", ",", "key", ",", "admin", "=", "False", ")", ":", "def", "getter", "(", "self", ")", ":", "return", "self", ".", "config", "[", "key", "]", "def", "setter", "(", "self", ",", "val", ")", ":", "if", "admin", "and", "not", "self", ".", "admin", ":", "raise", "RuntimeError", "(", "f\"You can't set the {key} key without mod privileges\"", ")", "self", ".", "__set_config_value", "(", "self", ".", "config", ".", "get_real_key", "(", "key", ")", ",", "val", ")", "setattr", "(", "self", ".", "__class__", ",", "key", ",", "property", "(", "getter", ",", "setter", ")", ")" ]
Add gettable and settable room config property during runtime
[ "Add", "gettable", "and", "settable", "room", "config", "property", "during", "runtime" ]
train
https://github.com/volafiled/python-volapi/blob/5f0bc03dbde703264ac6ed494e2050761f688a3e/volapi/volapi.py#L417-L430
volafiled/python-volapi
volapi/volapi.py
Room.__set_config_value
def __set_config_value(self, key, value): """Sets a value for a room config""" self.check_owner() params = {"room": self.room_id, "config": to_json({key: value})} resp = self.conn.make_api_call("setRoomConfig", params) if "error" in resp: raise RuntimeError(f"{resp['error'].get('message') or resp['error']}") return resp
python
def __set_config_value(self, key, value): """Sets a value for a room config""" self.check_owner() params = {"room": self.room_id, "config": to_json({key: value})} resp = self.conn.make_api_call("setRoomConfig", params) if "error" in resp: raise RuntimeError(f"{resp['error'].get('message') or resp['error']}") return resp
[ "def", "__set_config_value", "(", "self", ",", "key", ",", "value", ")", ":", "self", ".", "check_owner", "(", ")", "params", "=", "{", "\"room\"", ":", "self", ".", "room_id", ",", "\"config\"", ":", "to_json", "(", "{", "key", ":", "value", "}", ")", "}", "resp", "=", "self", ".", "conn", ".", "make_api_call", "(", "\"setRoomConfig\"", ",", "params", ")", "if", "\"error\"", "in", "resp", ":", "raise", "RuntimeError", "(", "f\"{resp['error'].get('message') or resp['error']}\"", ")", "return", "resp" ]
Sets a value for a room config
[ "Sets", "a", "value", "for", "a", "room", "config" ]
train
https://github.com/volafiled/python-volapi/blob/5f0bc03dbde703264ac6ed494e2050761f688a3e/volapi/volapi.py#L432-L440