id
int32
0
252k
repo
stringlengths
7
55
path
stringlengths
4
127
func_name
stringlengths
1
88
original_string
stringlengths
75
19.8k
language
stringclasses
1 value
code
stringlengths
75
19.8k
code_tokens
sequence
docstring
stringlengths
3
17.3k
docstring_tokens
sequence
sha
stringlengths
40
40
url
stringlengths
87
242
300
gem/oq-engine
openquake/baselib/parallel.py
Result.get
def get(self): """ Returns the underlying value or raise the underlying exception """ val = self.pik.unpickle() if self.tb_str: etype = val.__class__ msg = '\n%s%s: %s' % (self.tb_str, etype.__name__, val) if issubclass(etype, KeyError): raise RuntimeError(msg) # nicer message else: raise etype(msg) return val
python
def get(self): """ Returns the underlying value or raise the underlying exception """ val = self.pik.unpickle() if self.tb_str: etype = val.__class__ msg = '\n%s%s: %s' % (self.tb_str, etype.__name__, val) if issubclass(etype, KeyError): raise RuntimeError(msg) # nicer message else: raise etype(msg) return val
[ "def", "get", "(", "self", ")", ":", "val", "=", "self", ".", "pik", ".", "unpickle", "(", ")", "if", "self", ".", "tb_str", ":", "etype", "=", "val", ".", "__class__", "msg", "=", "'\\n%s%s: %s'", "%", "(", "self", ".", "tb_str", ",", "etype", ".", "__name__", ",", "val", ")", "if", "issubclass", "(", "etype", ",", "KeyError", ")", ":", "raise", "RuntimeError", "(", "msg", ")", "# nicer message", "else", ":", "raise", "etype", "(", "msg", ")", "return", "val" ]
Returns the underlying value or raise the underlying exception
[ "Returns", "the", "underlying", "value", "or", "raise", "the", "underlying", "exception" ]
8294553a0b8aba33fd96437a35065d03547d0040
https://github.com/gem/oq-engine/blob/8294553a0b8aba33fd96437a35065d03547d0040/openquake/baselib/parallel.py#L337-L349
301
gem/oq-engine
openquake/baselib/parallel.py
IterResult.sum
def sum(cls, iresults): """ Sum the data transfer information of a set of results """ res = object.__new__(cls) res.received = [] res.sent = 0 for iresult in iresults: res.received.extend(iresult.received) res.sent += iresult.sent name = iresult.name.split('#', 1)[0] if hasattr(res, 'name'): assert res.name.split('#', 1)[0] == name, (res.name, name) else: res.name = iresult.name.split('#')[0] return res
python
def sum(cls, iresults): """ Sum the data transfer information of a set of results """ res = object.__new__(cls) res.received = [] res.sent = 0 for iresult in iresults: res.received.extend(iresult.received) res.sent += iresult.sent name = iresult.name.split('#', 1)[0] if hasattr(res, 'name'): assert res.name.split('#', 1)[0] == name, (res.name, name) else: res.name = iresult.name.split('#')[0] return res
[ "def", "sum", "(", "cls", ",", "iresults", ")", ":", "res", "=", "object", ".", "__new__", "(", "cls", ")", "res", ".", "received", "=", "[", "]", "res", ".", "sent", "=", "0", "for", "iresult", "in", "iresults", ":", "res", ".", "received", ".", "extend", "(", "iresult", ".", "received", ")", "res", ".", "sent", "+=", "iresult", ".", "sent", "name", "=", "iresult", ".", "name", ".", "split", "(", "'#'", ",", "1", ")", "[", "0", "]", "if", "hasattr", "(", "res", ",", "'name'", ")", ":", "assert", "res", ".", "name", ".", "split", "(", "'#'", ",", "1", ")", "[", "0", "]", "==", "name", ",", "(", "res", ".", "name", ",", "name", ")", "else", ":", "res", ".", "name", "=", "iresult", ".", "name", ".", "split", "(", "'#'", ")", "[", "0", "]", "return", "res" ]
Sum the data transfer information of a set of results
[ "Sum", "the", "data", "transfer", "information", "of", "a", "set", "of", "results" ]
8294553a0b8aba33fd96437a35065d03547d0040
https://github.com/gem/oq-engine/blob/8294553a0b8aba33fd96437a35065d03547d0040/openquake/baselib/parallel.py#L532-L547
302
gem/oq-engine
openquake/baselib/parallel.py
Starmap.log_percent
def log_percent(self): """ Log the progress of the computation in percentage """ done = self.total - self.todo percent = int(float(done) / self.total * 100) if not hasattr(self, 'prev_percent'): # first time self.prev_percent = 0 self.progress('Sent %s of data in %d %s task(s)', humansize(self.sent.sum()), self.total, self.name) elif percent > self.prev_percent: self.progress('%s %3d%% [of %d tasks]', self.name, percent, len(self.tasks)) self.prev_percent = percent return done
python
def log_percent(self): """ Log the progress of the computation in percentage """ done = self.total - self.todo percent = int(float(done) / self.total * 100) if not hasattr(self, 'prev_percent'): # first time self.prev_percent = 0 self.progress('Sent %s of data in %d %s task(s)', humansize(self.sent.sum()), self.total, self.name) elif percent > self.prev_percent: self.progress('%s %3d%% [of %d tasks]', self.name, percent, len(self.tasks)) self.prev_percent = percent return done
[ "def", "log_percent", "(", "self", ")", ":", "done", "=", "self", ".", "total", "-", "self", ".", "todo", "percent", "=", "int", "(", "float", "(", "done", ")", "/", "self", ".", "total", "*", "100", ")", "if", "not", "hasattr", "(", "self", ",", "'prev_percent'", ")", ":", "# first time", "self", ".", "prev_percent", "=", "0", "self", ".", "progress", "(", "'Sent %s of data in %d %s task(s)'", ",", "humansize", "(", "self", ".", "sent", ".", "sum", "(", ")", ")", ",", "self", ".", "total", ",", "self", ".", "name", ")", "elif", "percent", ">", "self", ".", "prev_percent", ":", "self", ".", "progress", "(", "'%s %3d%% [of %d tasks]'", ",", "self", ".", "name", ",", "percent", ",", "len", "(", "self", ".", "tasks", ")", ")", "self", ".", "prev_percent", "=", "percent", "return", "done" ]
Log the progress of the computation in percentage
[ "Log", "the", "progress", "of", "the", "computation", "in", "percentage" ]
8294553a0b8aba33fd96437a35065d03547d0040
https://github.com/gem/oq-engine/blob/8294553a0b8aba33fd96437a35065d03547d0040/openquake/baselib/parallel.py#L691-L705
303
gem/oq-engine
openquake/baselib/parallel.py
Starmap.submit
def submit(self, *args, func=None, monitor=None): """ Submit the given arguments to the underlying task """ monitor = monitor or self.monitor func = func or self.task_func if not hasattr(self, 'socket'): # first time self.__class__.running_tasks = self.tasks self.socket = Socket(self.receiver, zmq.PULL, 'bind').__enter__() monitor.backurl = 'tcp://%s:%s' % ( config.dbserver.host, self.socket.port) assert not isinstance(args[-1], Monitor) # sanity check dist = 'no' if self.num_tasks == 1 else self.distribute if dist != 'no': args = pickle_sequence(args) self.sent += numpy.array([len(p) for p in args]) res = submit[dist](self, func, args, monitor) self.tasks.append(res)
python
def submit(self, *args, func=None, monitor=None): """ Submit the given arguments to the underlying task """ monitor = monitor or self.monitor func = func or self.task_func if not hasattr(self, 'socket'): # first time self.__class__.running_tasks = self.tasks self.socket = Socket(self.receiver, zmq.PULL, 'bind').__enter__() monitor.backurl = 'tcp://%s:%s' % ( config.dbserver.host, self.socket.port) assert not isinstance(args[-1], Monitor) # sanity check dist = 'no' if self.num_tasks == 1 else self.distribute if dist != 'no': args = pickle_sequence(args) self.sent += numpy.array([len(p) for p in args]) res = submit[dist](self, func, args, monitor) self.tasks.append(res)
[ "def", "submit", "(", "self", ",", "*", "args", ",", "func", "=", "None", ",", "monitor", "=", "None", ")", ":", "monitor", "=", "monitor", "or", "self", ".", "monitor", "func", "=", "func", "or", "self", ".", "task_func", "if", "not", "hasattr", "(", "self", ",", "'socket'", ")", ":", "# first time", "self", ".", "__class__", ".", "running_tasks", "=", "self", ".", "tasks", "self", ".", "socket", "=", "Socket", "(", "self", ".", "receiver", ",", "zmq", ".", "PULL", ",", "'bind'", ")", ".", "__enter__", "(", ")", "monitor", ".", "backurl", "=", "'tcp://%s:%s'", "%", "(", "config", ".", "dbserver", ".", "host", ",", "self", ".", "socket", ".", "port", ")", "assert", "not", "isinstance", "(", "args", "[", "-", "1", "]", ",", "Monitor", ")", "# sanity check", "dist", "=", "'no'", "if", "self", ".", "num_tasks", "==", "1", "else", "self", ".", "distribute", "if", "dist", "!=", "'no'", ":", "args", "=", "pickle_sequence", "(", "args", ")", "self", ".", "sent", "+=", "numpy", ".", "array", "(", "[", "len", "(", "p", ")", "for", "p", "in", "args", "]", ")", "res", "=", "submit", "[", "dist", "]", "(", "self", ",", "func", ",", "args", ",", "monitor", ")", "self", ".", "tasks", ".", "append", "(", "res", ")" ]
Submit the given arguments to the underlying task
[ "Submit", "the", "given", "arguments", "to", "the", "underlying", "task" ]
8294553a0b8aba33fd96437a35065d03547d0040
https://github.com/gem/oq-engine/blob/8294553a0b8aba33fd96437a35065d03547d0040/openquake/baselib/parallel.py#L707-L724
304
gem/oq-engine
openquake/baselib/parallel.py
Starmap.reduce
def reduce(self, agg=operator.add, acc=None): """ Submit all tasks and reduce the results """ return self.submit_all().reduce(agg, acc)
python
def reduce(self, agg=operator.add, acc=None): """ Submit all tasks and reduce the results """ return self.submit_all().reduce(agg, acc)
[ "def", "reduce", "(", "self", ",", "agg", "=", "operator", ".", "add", ",", "acc", "=", "None", ")", ":", "return", "self", ".", "submit_all", "(", ")", ".", "reduce", "(", "agg", ",", "acc", ")" ]
Submit all tasks and reduce the results
[ "Submit", "all", "tasks", "and", "reduce", "the", "results" ]
8294553a0b8aba33fd96437a35065d03547d0040
https://github.com/gem/oq-engine/blob/8294553a0b8aba33fd96437a35065d03547d0040/openquake/baselib/parallel.py#L748-L752
305
gem/oq-engine
openquake/hazardlib/probability_map.py
ProbabilityCurve.convert
def convert(self, imtls, idx=0): """ Convert a probability curve into a record of dtype `imtls.dt`. :param imtls: DictArray instance :param idx: extract the data corresponding to the given inner index """ curve = numpy.zeros(1, imtls.dt) for imt in imtls: curve[imt] = self.array[imtls(imt), idx] return curve[0]
python
def convert(self, imtls, idx=0): """ Convert a probability curve into a record of dtype `imtls.dt`. :param imtls: DictArray instance :param idx: extract the data corresponding to the given inner index """ curve = numpy.zeros(1, imtls.dt) for imt in imtls: curve[imt] = self.array[imtls(imt), idx] return curve[0]
[ "def", "convert", "(", "self", ",", "imtls", ",", "idx", "=", "0", ")", ":", "curve", "=", "numpy", ".", "zeros", "(", "1", ",", "imtls", ".", "dt", ")", "for", "imt", "in", "imtls", ":", "curve", "[", "imt", "]", "=", "self", ".", "array", "[", "imtls", "(", "imt", ")", ",", "idx", "]", "return", "curve", "[", "0", "]" ]
Convert a probability curve into a record of dtype `imtls.dt`. :param imtls: DictArray instance :param idx: extract the data corresponding to the given inner index
[ "Convert", "a", "probability", "curve", "into", "a", "record", "of", "dtype", "imtls", ".", "dt", "." ]
8294553a0b8aba33fd96437a35065d03547d0040
https://github.com/gem/oq-engine/blob/8294553a0b8aba33fd96437a35065d03547d0040/openquake/hazardlib/probability_map.py#L96-L106
306
gem/oq-engine
openquake/hazardlib/probability_map.py
ProbabilityMap.nbytes
def nbytes(self): """The size of the underlying array""" try: N, L, I = get_shape([self]) except AllEmptyProbabilityMaps: return 0 return BYTES_PER_FLOAT * N * L * I
python
def nbytes(self): """The size of the underlying array""" try: N, L, I = get_shape([self]) except AllEmptyProbabilityMaps: return 0 return BYTES_PER_FLOAT * N * L * I
[ "def", "nbytes", "(", "self", ")", ":", "try", ":", "N", ",", "L", ",", "I", "=", "get_shape", "(", "[", "self", "]", ")", "except", "AllEmptyProbabilityMaps", ":", "return", "0", "return", "BYTES_PER_FLOAT", "*", "N", "*", "L", "*", "I" ]
The size of the underlying array
[ "The", "size", "of", "the", "underlying", "array" ]
8294553a0b8aba33fd96437a35065d03547d0040
https://github.com/gem/oq-engine/blob/8294553a0b8aba33fd96437a35065d03547d0040/openquake/hazardlib/probability_map.py#L194-L200
307
gem/oq-engine
openquake/hazardlib/probability_map.py
ProbabilityMap.convert
def convert(self, imtls, nsites, idx=0): """ Convert a probability map into a composite array of length `nsites` and dtype `imtls.dt`. :param imtls: DictArray instance :param nsites: the total number of sites :param idx: index on the z-axis (default 0) """ curves = numpy.zeros(nsites, imtls.dt) for imt in curves.dtype.names: curves_by_imt = curves[imt] for sid in self: curves_by_imt[sid] = self[sid].array[imtls(imt), idx] return curves
python
def convert(self, imtls, nsites, idx=0): """ Convert a probability map into a composite array of length `nsites` and dtype `imtls.dt`. :param imtls: DictArray instance :param nsites: the total number of sites :param idx: index on the z-axis (default 0) """ curves = numpy.zeros(nsites, imtls.dt) for imt in curves.dtype.names: curves_by_imt = curves[imt] for sid in self: curves_by_imt[sid] = self[sid].array[imtls(imt), idx] return curves
[ "def", "convert", "(", "self", ",", "imtls", ",", "nsites", ",", "idx", "=", "0", ")", ":", "curves", "=", "numpy", ".", "zeros", "(", "nsites", ",", "imtls", ".", "dt", ")", "for", "imt", "in", "curves", ".", "dtype", ".", "names", ":", "curves_by_imt", "=", "curves", "[", "imt", "]", "for", "sid", "in", "self", ":", "curves_by_imt", "[", "sid", "]", "=", "self", "[", "sid", "]", ".", "array", "[", "imtls", "(", "imt", ")", ",", "idx", "]", "return", "curves" ]
Convert a probability map into a composite array of length `nsites` and dtype `imtls.dt`. :param imtls: DictArray instance :param nsites: the total number of sites :param idx: index on the z-axis (default 0)
[ "Convert", "a", "probability", "map", "into", "a", "composite", "array", "of", "length", "nsites", "and", "dtype", "imtls", ".", "dt", "." ]
8294553a0b8aba33fd96437a35065d03547d0040
https://github.com/gem/oq-engine/blob/8294553a0b8aba33fd96437a35065d03547d0040/openquake/hazardlib/probability_map.py#L203-L220
308
gem/oq-engine
openquake/hazardlib/probability_map.py
ProbabilityMap.filter
def filter(self, sids): """ Extracs a submap of self for the given sids. """ dic = self.__class__(self.shape_y, self.shape_z) for sid in sids: try: dic[sid] = self[sid] except KeyError: pass return dic
python
def filter(self, sids): """ Extracs a submap of self for the given sids. """ dic = self.__class__(self.shape_y, self.shape_z) for sid in sids: try: dic[sid] = self[sid] except KeyError: pass return dic
[ "def", "filter", "(", "self", ",", "sids", ")", ":", "dic", "=", "self", ".", "__class__", "(", "self", ".", "shape_y", ",", "self", ".", "shape_z", ")", "for", "sid", "in", "sids", ":", "try", ":", "dic", "[", "sid", "]", "=", "self", "[", "sid", "]", "except", "KeyError", ":", "pass", "return", "dic" ]
Extracs a submap of self for the given sids.
[ "Extracs", "a", "submap", "of", "self", "for", "the", "given", "sids", "." ]
8294553a0b8aba33fd96437a35065d03547d0040
https://github.com/gem/oq-engine/blob/8294553a0b8aba33fd96437a35065d03547d0040/openquake/hazardlib/probability_map.py#L247-L257
309
gem/oq-engine
openquake/hazardlib/probability_map.py
ProbabilityMap.extract
def extract(self, inner_idx): """ Extracts a component of the underlying ProbabilityCurves, specified by the index `inner_idx`. """ out = self.__class__(self.shape_y, 1) for sid in self: curve = self[sid] array = curve.array[:, inner_idx].reshape(-1, 1) out[sid] = ProbabilityCurve(array) return out
python
def extract(self, inner_idx): """ Extracts a component of the underlying ProbabilityCurves, specified by the index `inner_idx`. """ out = self.__class__(self.shape_y, 1) for sid in self: curve = self[sid] array = curve.array[:, inner_idx].reshape(-1, 1) out[sid] = ProbabilityCurve(array) return out
[ "def", "extract", "(", "self", ",", "inner_idx", ")", ":", "out", "=", "self", ".", "__class__", "(", "self", ".", "shape_y", ",", "1", ")", "for", "sid", "in", "self", ":", "curve", "=", "self", "[", "sid", "]", "array", "=", "curve", ".", "array", "[", ":", ",", "inner_idx", "]", ".", "reshape", "(", "-", "1", ",", "1", ")", "out", "[", "sid", "]", "=", "ProbabilityCurve", "(", "array", ")", "return", "out" ]
Extracts a component of the underlying ProbabilityCurves, specified by the index `inner_idx`.
[ "Extracts", "a", "component", "of", "the", "underlying", "ProbabilityCurves", "specified", "by", "the", "index", "inner_idx", "." ]
8294553a0b8aba33fd96437a35065d03547d0040
https://github.com/gem/oq-engine/blob/8294553a0b8aba33fd96437a35065d03547d0040/openquake/hazardlib/probability_map.py#L259-L269
310
gem/oq-engine
openquake/commands/compare.py
compare
def compare(what, imt, calc_ids, files, samplesites=100, rtol=.1, atol=1E-4): """ Compare the hazard curves or maps of two or more calculations """ sids, imtls, poes, arrays = getdata(what, calc_ids, samplesites) try: levels = imtls[imt] except KeyError: sys.exit( '%s not found. The available IMTs are %s' % (imt, list(imtls))) imt2idx = {imt: i for i, imt in enumerate(imtls)} head = ['site_id'] if files else ['site_id', 'calc_id'] if what == 'hcurves': array_imt = arrays[:, :, imtls(imt)] header = head + ['%.5f' % lvl for lvl in levels] else: # hmaps array_imt = arrays[:, :, imt2idx[imt]] header = head + [str(poe) for poe in poes] rows = collections.defaultdict(list) diff_idxs = get_diff_idxs(array_imt, rtol, atol) if len(diff_idxs) == 0: print('There are no differences within the tolerance of %d%%' % (rtol * 100)) return arr = array_imt.transpose(1, 0, 2) # shape (N, C, L) for sid, array in sorted(zip(sids[diff_idxs], arr[diff_idxs])): for calc_id, cols in zip(calc_ids, array): if files: rows[calc_id].append([sid] + list(cols)) else: rows['all'].append([sid, calc_id] + list(cols)) if files: fdict = {calc_id: open('%s.txt' % calc_id, 'w') for calc_id in calc_ids} for calc_id, f in fdict.items(): f.write(views.rst_table(rows[calc_id], header)) print('Generated %s' % f.name) else: print(views.rst_table(rows['all'], header))
python
def compare(what, imt, calc_ids, files, samplesites=100, rtol=.1, atol=1E-4): """ Compare the hazard curves or maps of two or more calculations """ sids, imtls, poes, arrays = getdata(what, calc_ids, samplesites) try: levels = imtls[imt] except KeyError: sys.exit( '%s not found. The available IMTs are %s' % (imt, list(imtls))) imt2idx = {imt: i for i, imt in enumerate(imtls)} head = ['site_id'] if files else ['site_id', 'calc_id'] if what == 'hcurves': array_imt = arrays[:, :, imtls(imt)] header = head + ['%.5f' % lvl for lvl in levels] else: # hmaps array_imt = arrays[:, :, imt2idx[imt]] header = head + [str(poe) for poe in poes] rows = collections.defaultdict(list) diff_idxs = get_diff_idxs(array_imt, rtol, atol) if len(diff_idxs) == 0: print('There are no differences within the tolerance of %d%%' % (rtol * 100)) return arr = array_imt.transpose(1, 0, 2) # shape (N, C, L) for sid, array in sorted(zip(sids[diff_idxs], arr[diff_idxs])): for calc_id, cols in zip(calc_ids, array): if files: rows[calc_id].append([sid] + list(cols)) else: rows['all'].append([sid, calc_id] + list(cols)) if files: fdict = {calc_id: open('%s.txt' % calc_id, 'w') for calc_id in calc_ids} for calc_id, f in fdict.items(): f.write(views.rst_table(rows[calc_id], header)) print('Generated %s' % f.name) else: print(views.rst_table(rows['all'], header))
[ "def", "compare", "(", "what", ",", "imt", ",", "calc_ids", ",", "files", ",", "samplesites", "=", "100", ",", "rtol", "=", ".1", ",", "atol", "=", "1E-4", ")", ":", "sids", ",", "imtls", ",", "poes", ",", "arrays", "=", "getdata", "(", "what", ",", "calc_ids", ",", "samplesites", ")", "try", ":", "levels", "=", "imtls", "[", "imt", "]", "except", "KeyError", ":", "sys", ".", "exit", "(", "'%s not found. The available IMTs are %s'", "%", "(", "imt", ",", "list", "(", "imtls", ")", ")", ")", "imt2idx", "=", "{", "imt", ":", "i", "for", "i", ",", "imt", "in", "enumerate", "(", "imtls", ")", "}", "head", "=", "[", "'site_id'", "]", "if", "files", "else", "[", "'site_id'", ",", "'calc_id'", "]", "if", "what", "==", "'hcurves'", ":", "array_imt", "=", "arrays", "[", ":", ",", ":", ",", "imtls", "(", "imt", ")", "]", "header", "=", "head", "+", "[", "'%.5f'", "%", "lvl", "for", "lvl", "in", "levels", "]", "else", ":", "# hmaps", "array_imt", "=", "arrays", "[", ":", ",", ":", ",", "imt2idx", "[", "imt", "]", "]", "header", "=", "head", "+", "[", "str", "(", "poe", ")", "for", "poe", "in", "poes", "]", "rows", "=", "collections", ".", "defaultdict", "(", "list", ")", "diff_idxs", "=", "get_diff_idxs", "(", "array_imt", ",", "rtol", ",", "atol", ")", "if", "len", "(", "diff_idxs", ")", "==", "0", ":", "print", "(", "'There are no differences within the tolerance of %d%%'", "%", "(", "rtol", "*", "100", ")", ")", "return", "arr", "=", "array_imt", ".", "transpose", "(", "1", ",", "0", ",", "2", ")", "# shape (N, C, L)", "for", "sid", ",", "array", "in", "sorted", "(", "zip", "(", "sids", "[", "diff_idxs", "]", ",", "arr", "[", "diff_idxs", "]", ")", ")", ":", "for", "calc_id", ",", "cols", "in", "zip", "(", "calc_ids", ",", "array", ")", ":", "if", "files", ":", "rows", "[", "calc_id", "]", ".", "append", "(", "[", "sid", "]", "+", "list", "(", "cols", ")", ")", "else", ":", "rows", "[", "'all'", "]", ".", "append", "(", "[", "sid", ",", "calc_id", "]", "+", "list", "(", "cols", ")", ")", "if", "files", ":", "fdict", "=", "{", "calc_id", ":", "open", "(", "'%s.txt'", "%", "calc_id", ",", "'w'", ")", "for", "calc_id", "in", "calc_ids", "}", "for", "calc_id", ",", "f", "in", "fdict", ".", "items", "(", ")", ":", "f", ".", "write", "(", "views", ".", "rst_table", "(", "rows", "[", "calc_id", "]", ",", "header", ")", ")", "print", "(", "'Generated %s'", "%", "f", ".", "name", ")", "else", ":", "print", "(", "views", ".", "rst_table", "(", "rows", "[", "'all'", "]", ",", "header", ")", ")" ]
Compare the hazard curves or maps of two or more calculations
[ "Compare", "the", "hazard", "curves", "or", "maps", "of", "two", "or", "more", "calculations" ]
8294553a0b8aba33fd96437a35065d03547d0040
https://github.com/gem/oq-engine/blob/8294553a0b8aba33fd96437a35065d03547d0040/openquake/commands/compare.py#L69-L107
311
gem/oq-engine
openquake/hmtk/plotting/seismicity/catalogue_plots.py
build_filename
def build_filename(filename, filetype='png', resolution=300): """ Uses the input properties to create the string of the filename :param str filename: Name of the file :param str filetype: Type of file :param int resolution: DPI resolution of the output figure """ filevals = os.path.splitext(filename) if filevals[1]: filetype = filevals[1][1:] if not filetype: filetype = 'png' filename = filevals[0] + '.' + filetype if not resolution: resolution = 300 return filename, filetype, resolution
python
def build_filename(filename, filetype='png', resolution=300): """ Uses the input properties to create the string of the filename :param str filename: Name of the file :param str filetype: Type of file :param int resolution: DPI resolution of the output figure """ filevals = os.path.splitext(filename) if filevals[1]: filetype = filevals[1][1:] if not filetype: filetype = 'png' filename = filevals[0] + '.' + filetype if not resolution: resolution = 300 return filename, filetype, resolution
[ "def", "build_filename", "(", "filename", ",", "filetype", "=", "'png'", ",", "resolution", "=", "300", ")", ":", "filevals", "=", "os", ".", "path", ".", "splitext", "(", "filename", ")", "if", "filevals", "[", "1", "]", ":", "filetype", "=", "filevals", "[", "1", "]", "[", "1", ":", "]", "if", "not", "filetype", ":", "filetype", "=", "'png'", "filename", "=", "filevals", "[", "0", "]", "+", "'.'", "+", "filetype", "if", "not", "resolution", ":", "resolution", "=", "300", "return", "filename", ",", "filetype", ",", "resolution" ]
Uses the input properties to create the string of the filename :param str filename: Name of the file :param str filetype: Type of file :param int resolution: DPI resolution of the output figure
[ "Uses", "the", "input", "properties", "to", "create", "the", "string", "of", "the", "filename" ]
8294553a0b8aba33fd96437a35065d03547d0040
https://github.com/gem/oq-engine/blob/8294553a0b8aba33fd96437a35065d03547d0040/openquake/hmtk/plotting/seismicity/catalogue_plots.py#L61-L81
312
gem/oq-engine
openquake/hmtk/plotting/seismicity/catalogue_plots.py
_get_catalogue_bin_limits
def _get_catalogue_bin_limits(catalogue, dmag): """ Returns the magnitude bins corresponing to the catalogue """ mag_bins = np.arange( float(np.floor(np.min(catalogue.data['magnitude']))) - dmag, float(np.ceil(np.max(catalogue.data['magnitude']))) + dmag, dmag) counter = np.histogram(catalogue.data['magnitude'], mag_bins)[0] idx = np.where(counter > 0)[0] mag_bins = mag_bins[idx[0]:(idx[-1] + 2)] return mag_bins
python
def _get_catalogue_bin_limits(catalogue, dmag): """ Returns the magnitude bins corresponing to the catalogue """ mag_bins = np.arange( float(np.floor(np.min(catalogue.data['magnitude']))) - dmag, float(np.ceil(np.max(catalogue.data['magnitude']))) + dmag, dmag) counter = np.histogram(catalogue.data['magnitude'], mag_bins)[0] idx = np.where(counter > 0)[0] mag_bins = mag_bins[idx[0]:(idx[-1] + 2)] return mag_bins
[ "def", "_get_catalogue_bin_limits", "(", "catalogue", ",", "dmag", ")", ":", "mag_bins", "=", "np", ".", "arange", "(", "float", "(", "np", ".", "floor", "(", "np", ".", "min", "(", "catalogue", ".", "data", "[", "'magnitude'", "]", ")", ")", ")", "-", "dmag", ",", "float", "(", "np", ".", "ceil", "(", "np", ".", "max", "(", "catalogue", ".", "data", "[", "'magnitude'", "]", ")", ")", ")", "+", "dmag", ",", "dmag", ")", "counter", "=", "np", ".", "histogram", "(", "catalogue", ".", "data", "[", "'magnitude'", "]", ",", "mag_bins", ")", "[", "0", "]", "idx", "=", "np", ".", "where", "(", "counter", ">", "0", ")", "[", "0", "]", "mag_bins", "=", "mag_bins", "[", "idx", "[", "0", "]", ":", "(", "idx", "[", "-", "1", "]", "+", "2", ")", "]", "return", "mag_bins" ]
Returns the magnitude bins corresponing to the catalogue
[ "Returns", "the", "magnitude", "bins", "corresponing", "to", "the", "catalogue" ]
8294553a0b8aba33fd96437a35065d03547d0040
https://github.com/gem/oq-engine/blob/8294553a0b8aba33fd96437a35065d03547d0040/openquake/hmtk/plotting/seismicity/catalogue_plots.py#L103-L114
313
gem/oq-engine
openquake/hmtk/plotting/seismicity/catalogue_plots.py
plot_depth_histogram
def plot_depth_histogram( catalogue, bin_width, normalisation=False, bootstrap=None, filename=None, figure_size=(8, 6), filetype='png', dpi=300, ax=None): """ Creates a histogram of the depths in the catalogue :param catalogue: Earthquake catalogue as instance of :class: openquake.hmtk.seismicity.catalogue.Catalogue :param float bin_width: Width of the histogram for the depth bins :param bool normalisation: Normalise the histogram to give output as PMF (True) or count (False) :param int bootstrap: To sample depth uncertainty choose number of samples """ if ax is None: fig, ax = plt.subplots(figsize=figure_size) else: fig = ax.get_figure() # Create depth range if len(catalogue.data['depth']) == 0: # pylint: disable=len-as-condition raise ValueError('No depths reported in catalogue!') depth_bins = np.arange(0., np.max(catalogue.data['depth']) + bin_width, bin_width) depth_hist = catalogue.get_depth_distribution(depth_bins, normalisation, bootstrap) ax.bar(depth_bins[:-1], depth_hist, width=0.95 * bin_width, edgecolor='k') ax.set_xlabel('Depth (km)') if normalisation: ax.set_ylabel('Probability Mass Function') else: ax.set_ylabel('Count') ax.set_title('Depth Histogram') _save_image(fig, filename, filetype, dpi)
python
def plot_depth_histogram( catalogue, bin_width, normalisation=False, bootstrap=None, filename=None, figure_size=(8, 6), filetype='png', dpi=300, ax=None): """ Creates a histogram of the depths in the catalogue :param catalogue: Earthquake catalogue as instance of :class: openquake.hmtk.seismicity.catalogue.Catalogue :param float bin_width: Width of the histogram for the depth bins :param bool normalisation: Normalise the histogram to give output as PMF (True) or count (False) :param int bootstrap: To sample depth uncertainty choose number of samples """ if ax is None: fig, ax = plt.subplots(figsize=figure_size) else: fig = ax.get_figure() # Create depth range if len(catalogue.data['depth']) == 0: # pylint: disable=len-as-condition raise ValueError('No depths reported in catalogue!') depth_bins = np.arange(0., np.max(catalogue.data['depth']) + bin_width, bin_width) depth_hist = catalogue.get_depth_distribution(depth_bins, normalisation, bootstrap) ax.bar(depth_bins[:-1], depth_hist, width=0.95 * bin_width, edgecolor='k') ax.set_xlabel('Depth (km)') if normalisation: ax.set_ylabel('Probability Mass Function') else: ax.set_ylabel('Count') ax.set_title('Depth Histogram') _save_image(fig, filename, filetype, dpi)
[ "def", "plot_depth_histogram", "(", "catalogue", ",", "bin_width", ",", "normalisation", "=", "False", ",", "bootstrap", "=", "None", ",", "filename", "=", "None", ",", "figure_size", "=", "(", "8", ",", "6", ")", ",", "filetype", "=", "'png'", ",", "dpi", "=", "300", ",", "ax", "=", "None", ")", ":", "if", "ax", "is", "None", ":", "fig", ",", "ax", "=", "plt", ".", "subplots", "(", "figsize", "=", "figure_size", ")", "else", ":", "fig", "=", "ax", ".", "get_figure", "(", ")", "# Create depth range", "if", "len", "(", "catalogue", ".", "data", "[", "'depth'", "]", ")", "==", "0", ":", "# pylint: disable=len-as-condition", "raise", "ValueError", "(", "'No depths reported in catalogue!'", ")", "depth_bins", "=", "np", ".", "arange", "(", "0.", ",", "np", ".", "max", "(", "catalogue", ".", "data", "[", "'depth'", "]", ")", "+", "bin_width", ",", "bin_width", ")", "depth_hist", "=", "catalogue", ".", "get_depth_distribution", "(", "depth_bins", ",", "normalisation", ",", "bootstrap", ")", "ax", ".", "bar", "(", "depth_bins", "[", ":", "-", "1", "]", ",", "depth_hist", ",", "width", "=", "0.95", "*", "bin_width", ",", "edgecolor", "=", "'k'", ")", "ax", ".", "set_xlabel", "(", "'Depth (km)'", ")", "if", "normalisation", ":", "ax", ".", "set_ylabel", "(", "'Probability Mass Function'", ")", "else", ":", "ax", ".", "set_ylabel", "(", "'Count'", ")", "ax", ".", "set_title", "(", "'Depth Histogram'", ")", "_save_image", "(", "fig", ",", "filename", ",", "filetype", ",", "dpi", ")" ]
Creates a histogram of the depths in the catalogue :param catalogue: Earthquake catalogue as instance of :class: openquake.hmtk.seismicity.catalogue.Catalogue :param float bin_width: Width of the histogram for the depth bins :param bool normalisation: Normalise the histogram to give output as PMF (True) or count (False) :param int bootstrap: To sample depth uncertainty choose number of samples
[ "Creates", "a", "histogram", "of", "the", "depths", "in", "the", "catalogue" ]
8294553a0b8aba33fd96437a35065d03547d0040
https://github.com/gem/oq-engine/blob/8294553a0b8aba33fd96437a35065d03547d0040/openquake/hmtk/plotting/seismicity/catalogue_plots.py#L117-L158
314
gem/oq-engine
openquake/hmtk/plotting/seismicity/catalogue_plots.py
plot_magnitude_depth_density
def plot_magnitude_depth_density( catalogue, mag_int, depth_int, logscale=False, normalisation=False, bootstrap=None, filename=None, figure_size=(8, 6), filetype='png', dpi=300, ax=None): """ Creates a density plot of the magnitude and depth distribution :param catalogue: Earthquake catalogue as instance of :class: openquake.hmtk.seismicity.catalogue.Catalogue :param float mag_int: Width of the histogram for the magnitude bins :param float depth_int: Width of the histogram for the depth bins :param bool logscale: Choose to scale the colours in a log-scale (True) or linear (False) :param bool normalisation: Normalise the histogram to give output as PMF (True) or count (False) :param int bootstrap: To sample magnitude and depth uncertainties choose number of samples """ if len(catalogue.data['depth']) == 0: # pylint: disable=len-as-condition raise ValueError('No depths reported in catalogue!') depth_bins = np.arange(0., np.max(catalogue.data['depth']) + depth_int, depth_int) mag_bins = _get_catalogue_bin_limits(catalogue, mag_int) mag_depth_dist = catalogue.get_magnitude_depth_distribution(mag_bins, depth_bins, normalisation, bootstrap) vmin_val = np.min(mag_depth_dist[mag_depth_dist > 0.]) if ax is None: fig, ax = plt.subplots(figsize=figure_size) else: fig = ax.get_figure() if logscale: normaliser = LogNorm(vmin=vmin_val, vmax=np.max(mag_depth_dist)) else: normaliser = Normalize(vmin=0, vmax=np.max(mag_depth_dist)) im = ax.pcolor(mag_bins[:-1], depth_bins[:-1], mag_depth_dist.T, norm=normaliser) ax.set_xlabel('Magnitude') ax.set_ylabel('Depth (km)') ax.set_xlim(mag_bins[0], mag_bins[-1]) ax.set_ylim(depth_bins[0], depth_bins[-1]) fig.colorbar(im, ax=ax) if normalisation: ax.set_title('Magnitude-Depth Density') else: ax.set_title('Magnitude-Depth Count') _save_image(fig, filename, filetype, dpi)
python
def plot_magnitude_depth_density( catalogue, mag_int, depth_int, logscale=False, normalisation=False, bootstrap=None, filename=None, figure_size=(8, 6), filetype='png', dpi=300, ax=None): """ Creates a density plot of the magnitude and depth distribution :param catalogue: Earthquake catalogue as instance of :class: openquake.hmtk.seismicity.catalogue.Catalogue :param float mag_int: Width of the histogram for the magnitude bins :param float depth_int: Width of the histogram for the depth bins :param bool logscale: Choose to scale the colours in a log-scale (True) or linear (False) :param bool normalisation: Normalise the histogram to give output as PMF (True) or count (False) :param int bootstrap: To sample magnitude and depth uncertainties choose number of samples """ if len(catalogue.data['depth']) == 0: # pylint: disable=len-as-condition raise ValueError('No depths reported in catalogue!') depth_bins = np.arange(0., np.max(catalogue.data['depth']) + depth_int, depth_int) mag_bins = _get_catalogue_bin_limits(catalogue, mag_int) mag_depth_dist = catalogue.get_magnitude_depth_distribution(mag_bins, depth_bins, normalisation, bootstrap) vmin_val = np.min(mag_depth_dist[mag_depth_dist > 0.]) if ax is None: fig, ax = plt.subplots(figsize=figure_size) else: fig = ax.get_figure() if logscale: normaliser = LogNorm(vmin=vmin_val, vmax=np.max(mag_depth_dist)) else: normaliser = Normalize(vmin=0, vmax=np.max(mag_depth_dist)) im = ax.pcolor(mag_bins[:-1], depth_bins[:-1], mag_depth_dist.T, norm=normaliser) ax.set_xlabel('Magnitude') ax.set_ylabel('Depth (km)') ax.set_xlim(mag_bins[0], mag_bins[-1]) ax.set_ylim(depth_bins[0], depth_bins[-1]) fig.colorbar(im, ax=ax) if normalisation: ax.set_title('Magnitude-Depth Density') else: ax.set_title('Magnitude-Depth Count') _save_image(fig, filename, filetype, dpi)
[ "def", "plot_magnitude_depth_density", "(", "catalogue", ",", "mag_int", ",", "depth_int", ",", "logscale", "=", "False", ",", "normalisation", "=", "False", ",", "bootstrap", "=", "None", ",", "filename", "=", "None", ",", "figure_size", "=", "(", "8", ",", "6", ")", ",", "filetype", "=", "'png'", ",", "dpi", "=", "300", ",", "ax", "=", "None", ")", ":", "if", "len", "(", "catalogue", ".", "data", "[", "'depth'", "]", ")", "==", "0", ":", "# pylint: disable=len-as-condition", "raise", "ValueError", "(", "'No depths reported in catalogue!'", ")", "depth_bins", "=", "np", ".", "arange", "(", "0.", ",", "np", ".", "max", "(", "catalogue", ".", "data", "[", "'depth'", "]", ")", "+", "depth_int", ",", "depth_int", ")", "mag_bins", "=", "_get_catalogue_bin_limits", "(", "catalogue", ",", "mag_int", ")", "mag_depth_dist", "=", "catalogue", ".", "get_magnitude_depth_distribution", "(", "mag_bins", ",", "depth_bins", ",", "normalisation", ",", "bootstrap", ")", "vmin_val", "=", "np", ".", "min", "(", "mag_depth_dist", "[", "mag_depth_dist", ">", "0.", "]", ")", "if", "ax", "is", "None", ":", "fig", ",", "ax", "=", "plt", ".", "subplots", "(", "figsize", "=", "figure_size", ")", "else", ":", "fig", "=", "ax", ".", "get_figure", "(", ")", "if", "logscale", ":", "normaliser", "=", "LogNorm", "(", "vmin", "=", "vmin_val", ",", "vmax", "=", "np", ".", "max", "(", "mag_depth_dist", ")", ")", "else", ":", "normaliser", "=", "Normalize", "(", "vmin", "=", "0", ",", "vmax", "=", "np", ".", "max", "(", "mag_depth_dist", ")", ")", "im", "=", "ax", ".", "pcolor", "(", "mag_bins", "[", ":", "-", "1", "]", ",", "depth_bins", "[", ":", "-", "1", "]", ",", "mag_depth_dist", ".", "T", ",", "norm", "=", "normaliser", ")", "ax", ".", "set_xlabel", "(", "'Magnitude'", ")", "ax", ".", "set_ylabel", "(", "'Depth (km)'", ")", "ax", ".", "set_xlim", "(", "mag_bins", "[", "0", "]", ",", "mag_bins", "[", "-", "1", "]", ")", "ax", ".", "set_ylim", "(", "depth_bins", "[", "0", "]", ",", "depth_bins", "[", "-", "1", "]", ")", "fig", ".", "colorbar", "(", "im", ",", "ax", "=", "ax", ")", "if", "normalisation", ":", "ax", ".", "set_title", "(", "'Magnitude-Depth Density'", ")", "else", ":", "ax", ".", "set_title", "(", "'Magnitude-Depth Count'", ")", "_save_image", "(", "fig", ",", "filename", ",", "filetype", ",", "dpi", ")" ]
Creates a density plot of the magnitude and depth distribution :param catalogue: Earthquake catalogue as instance of :class: openquake.hmtk.seismicity.catalogue.Catalogue :param float mag_int: Width of the histogram for the magnitude bins :param float depth_int: Width of the histogram for the depth bins :param bool logscale: Choose to scale the colours in a log-scale (True) or linear (False) :param bool normalisation: Normalise the histogram to give output as PMF (True) or count (False) :param int bootstrap: To sample magnitude and depth uncertainties choose number of samples
[ "Creates", "a", "density", "plot", "of", "the", "magnitude", "and", "depth", "distribution" ]
8294553a0b8aba33fd96437a35065d03547d0040
https://github.com/gem/oq-engine/blob/8294553a0b8aba33fd96437a35065d03547d0040/openquake/hmtk/plotting/seismicity/catalogue_plots.py#L161-L218
315
gem/oq-engine
openquake/hmtk/plotting/seismicity/catalogue_plots.py
plot_magnitude_time_scatter
def plot_magnitude_time_scatter( catalogue, plot_error=False, fmt_string='o', filename=None, figure_size=(8, 6), filetype='png', dpi=300, ax=None): """ Creates a simple scatter plot of magnitude with time :param catalogue: Earthquake catalogue as instance of :class: openquake.hmtk.seismicity.catalogue.Catalogue :param bool plot_error: Choose to plot error bars (True) or not (False) :param str fmt_string: Symbology of plot """ if ax is None: fig, ax = plt.subplots(figsize=figure_size) else: fig = ax.get_figure() dtime = catalogue.get_decimal_time() # pylint: disable=len-as-condition if len(catalogue.data['sigmaMagnitude']) == 0: print('Magnitude Error is missing - neglecting error bars!') plot_error = False if plot_error: ax.errorbar(dtime, catalogue.data['magnitude'], xerr=None, yerr=catalogue.data['sigmaMagnitude'], fmt=fmt_string) else: ax.plot(dtime, catalogue.data['magnitude'], fmt_string) ax.set_xlabel('Year') ax.set_ylabel('Magnitude') ax.set_title('Magnitude-Time Plot') _save_image(fig, filename, filetype, dpi)
python
def plot_magnitude_time_scatter( catalogue, plot_error=False, fmt_string='o', filename=None, figure_size=(8, 6), filetype='png', dpi=300, ax=None): """ Creates a simple scatter plot of magnitude with time :param catalogue: Earthquake catalogue as instance of :class: openquake.hmtk.seismicity.catalogue.Catalogue :param bool plot_error: Choose to plot error bars (True) or not (False) :param str fmt_string: Symbology of plot """ if ax is None: fig, ax = plt.subplots(figsize=figure_size) else: fig = ax.get_figure() dtime = catalogue.get_decimal_time() # pylint: disable=len-as-condition if len(catalogue.data['sigmaMagnitude']) == 0: print('Magnitude Error is missing - neglecting error bars!') plot_error = False if plot_error: ax.errorbar(dtime, catalogue.data['magnitude'], xerr=None, yerr=catalogue.data['sigmaMagnitude'], fmt=fmt_string) else: ax.plot(dtime, catalogue.data['magnitude'], fmt_string) ax.set_xlabel('Year') ax.set_ylabel('Magnitude') ax.set_title('Magnitude-Time Plot') _save_image(fig, filename, filetype, dpi)
[ "def", "plot_magnitude_time_scatter", "(", "catalogue", ",", "plot_error", "=", "False", ",", "fmt_string", "=", "'o'", ",", "filename", "=", "None", ",", "figure_size", "=", "(", "8", ",", "6", ")", ",", "filetype", "=", "'png'", ",", "dpi", "=", "300", ",", "ax", "=", "None", ")", ":", "if", "ax", "is", "None", ":", "fig", ",", "ax", "=", "plt", ".", "subplots", "(", "figsize", "=", "figure_size", ")", "else", ":", "fig", "=", "ax", ".", "get_figure", "(", ")", "dtime", "=", "catalogue", ".", "get_decimal_time", "(", ")", "# pylint: disable=len-as-condition", "if", "len", "(", "catalogue", ".", "data", "[", "'sigmaMagnitude'", "]", ")", "==", "0", ":", "print", "(", "'Magnitude Error is missing - neglecting error bars!'", ")", "plot_error", "=", "False", "if", "plot_error", ":", "ax", ".", "errorbar", "(", "dtime", ",", "catalogue", ".", "data", "[", "'magnitude'", "]", ",", "xerr", "=", "None", ",", "yerr", "=", "catalogue", ".", "data", "[", "'sigmaMagnitude'", "]", ",", "fmt", "=", "fmt_string", ")", "else", ":", "ax", ".", "plot", "(", "dtime", ",", "catalogue", ".", "data", "[", "'magnitude'", "]", ",", "fmt_string", ")", "ax", ".", "set_xlabel", "(", "'Year'", ")", "ax", ".", "set_ylabel", "(", "'Magnitude'", ")", "ax", ".", "set_title", "(", "'Magnitude-Time Plot'", ")", "_save_image", "(", "fig", ",", "filename", ",", "filetype", ",", "dpi", ")" ]
Creates a simple scatter plot of magnitude with time :param catalogue: Earthquake catalogue as instance of :class: openquake.hmtk.seismicity.catalogue.Catalogue :param bool plot_error: Choose to plot error bars (True) or not (False) :param str fmt_string: Symbology of plot
[ "Creates", "a", "simple", "scatter", "plot", "of", "magnitude", "with", "time" ]
8294553a0b8aba33fd96437a35065d03547d0040
https://github.com/gem/oq-engine/blob/8294553a0b8aba33fd96437a35065d03547d0040/openquake/hmtk/plotting/seismicity/catalogue_plots.py#L221-L258
316
gem/oq-engine
openquake/hmtk/plotting/seismicity/catalogue_plots.py
plot_magnitude_time_density
def plot_magnitude_time_density( catalogue, mag_int, time_int, completeness=None, normalisation=False, logscale=True, bootstrap=None, xlim=[], ylim=[], filename=None, figure_size=(8, 6), filetype='png', dpi=300, ax=None): """ Creates a plot of magnitude-time density :param catalogue: Earthquake catalogue as instance of :class: openquake.hmtk.seismicity.catalogue.Catalogue :param float mag_int: Width of the histogram for the magnitude bins :param float time_int: Width of the histogram for the time bin (in decimal years) :param bool normalisation: Normalise the histogram to give output as PMF (True) or count (False) :param int bootstrap: To sample magnitude and depth uncertainties choose number of samples """ if ax is None: fig, ax = plt.subplots(figsize=figure_size) else: fig = ax.get_figure() # Create the magnitude bins if isinstance(mag_int, (np.ndarray, list)): mag_bins = mag_int else: mag_bins = np.arange( np.min(catalogue.data['magnitude']), np.max(catalogue.data['magnitude']) + mag_int / 2., mag_int) # Creates the time bins if isinstance(time_int, (np.ndarray, list)): time_bins = time_int else: time_bins = np.arange( float(np.min(catalogue.data['year'])), float(np.max(catalogue.data['year'])) + 1., float(time_int)) # Get magnitude-time distribution mag_time_dist = catalogue.get_magnitude_time_distribution( mag_bins, time_bins, normalisation, bootstrap) # Get smallest non-zero value vmin_val = np.min(mag_time_dist[mag_time_dist > 0.]) # Create plot if logscale: norm_data = LogNorm(vmin=vmin_val, vmax=np.max(mag_time_dist)) else: if normalisation: norm_data = Normalize(vmin=vmin_val, vmax=np.max(mag_time_dist)) else: norm_data = Normalize(vmin=1.0, vmax=np.max(mag_time_dist)) im = ax.pcolor(time_bins[:-1], mag_bins[:-1], mag_time_dist.T, norm=norm_data) ax.set_xlabel('Time (year)') ax.set_ylabel('Magnitude') if len(xlim) == 2: ax.set_xlim(xlim[0], xlim[1]) else: ax.set_xlim(time_bins[0], time_bins[-1]) if len(ylim) == 2: ax.set_ylim(ylim[0], ylim[1]) else: ax.set_ylim(mag_bins[0], mag_bins[-1] + (mag_bins[-1] - mag_bins[-2])) # Fix the title if normalisation: fig.colorbar(im, label='Event Density', shrink=0.9, ax=ax) else: fig.colorbar(im, label='Event Count', shrink=0.9, ax=ax) ax.grid(True) # Plot completeness if completeness is not None: _plot_completeness(ax, completeness, time_bins[0], time_bins[-1]) _save_image(fig, filename, filetype, dpi)
python
def plot_magnitude_time_density( catalogue, mag_int, time_int, completeness=None, normalisation=False, logscale=True, bootstrap=None, xlim=[], ylim=[], filename=None, figure_size=(8, 6), filetype='png', dpi=300, ax=None): """ Creates a plot of magnitude-time density :param catalogue: Earthquake catalogue as instance of :class: openquake.hmtk.seismicity.catalogue.Catalogue :param float mag_int: Width of the histogram for the magnitude bins :param float time_int: Width of the histogram for the time bin (in decimal years) :param bool normalisation: Normalise the histogram to give output as PMF (True) or count (False) :param int bootstrap: To sample magnitude and depth uncertainties choose number of samples """ if ax is None: fig, ax = plt.subplots(figsize=figure_size) else: fig = ax.get_figure() # Create the magnitude bins if isinstance(mag_int, (np.ndarray, list)): mag_bins = mag_int else: mag_bins = np.arange( np.min(catalogue.data['magnitude']), np.max(catalogue.data['magnitude']) + mag_int / 2., mag_int) # Creates the time bins if isinstance(time_int, (np.ndarray, list)): time_bins = time_int else: time_bins = np.arange( float(np.min(catalogue.data['year'])), float(np.max(catalogue.data['year'])) + 1., float(time_int)) # Get magnitude-time distribution mag_time_dist = catalogue.get_magnitude_time_distribution( mag_bins, time_bins, normalisation, bootstrap) # Get smallest non-zero value vmin_val = np.min(mag_time_dist[mag_time_dist > 0.]) # Create plot if logscale: norm_data = LogNorm(vmin=vmin_val, vmax=np.max(mag_time_dist)) else: if normalisation: norm_data = Normalize(vmin=vmin_val, vmax=np.max(mag_time_dist)) else: norm_data = Normalize(vmin=1.0, vmax=np.max(mag_time_dist)) im = ax.pcolor(time_bins[:-1], mag_bins[:-1], mag_time_dist.T, norm=norm_data) ax.set_xlabel('Time (year)') ax.set_ylabel('Magnitude') if len(xlim) == 2: ax.set_xlim(xlim[0], xlim[1]) else: ax.set_xlim(time_bins[0], time_bins[-1]) if len(ylim) == 2: ax.set_ylim(ylim[0], ylim[1]) else: ax.set_ylim(mag_bins[0], mag_bins[-1] + (mag_bins[-1] - mag_bins[-2])) # Fix the title if normalisation: fig.colorbar(im, label='Event Density', shrink=0.9, ax=ax) else: fig.colorbar(im, label='Event Count', shrink=0.9, ax=ax) ax.grid(True) # Plot completeness if completeness is not None: _plot_completeness(ax, completeness, time_bins[0], time_bins[-1]) _save_image(fig, filename, filetype, dpi)
[ "def", "plot_magnitude_time_density", "(", "catalogue", ",", "mag_int", ",", "time_int", ",", "completeness", "=", "None", ",", "normalisation", "=", "False", ",", "logscale", "=", "True", ",", "bootstrap", "=", "None", ",", "xlim", "=", "[", "]", ",", "ylim", "=", "[", "]", ",", "filename", "=", "None", ",", "figure_size", "=", "(", "8", ",", "6", ")", ",", "filetype", "=", "'png'", ",", "dpi", "=", "300", ",", "ax", "=", "None", ")", ":", "if", "ax", "is", "None", ":", "fig", ",", "ax", "=", "plt", ".", "subplots", "(", "figsize", "=", "figure_size", ")", "else", ":", "fig", "=", "ax", ".", "get_figure", "(", ")", "# Create the magnitude bins", "if", "isinstance", "(", "mag_int", ",", "(", "np", ".", "ndarray", ",", "list", ")", ")", ":", "mag_bins", "=", "mag_int", "else", ":", "mag_bins", "=", "np", ".", "arange", "(", "np", ".", "min", "(", "catalogue", ".", "data", "[", "'magnitude'", "]", ")", ",", "np", ".", "max", "(", "catalogue", ".", "data", "[", "'magnitude'", "]", ")", "+", "mag_int", "/", "2.", ",", "mag_int", ")", "# Creates the time bins", "if", "isinstance", "(", "time_int", ",", "(", "np", ".", "ndarray", ",", "list", ")", ")", ":", "time_bins", "=", "time_int", "else", ":", "time_bins", "=", "np", ".", "arange", "(", "float", "(", "np", ".", "min", "(", "catalogue", ".", "data", "[", "'year'", "]", ")", ")", ",", "float", "(", "np", ".", "max", "(", "catalogue", ".", "data", "[", "'year'", "]", ")", ")", "+", "1.", ",", "float", "(", "time_int", ")", ")", "# Get magnitude-time distribution", "mag_time_dist", "=", "catalogue", ".", "get_magnitude_time_distribution", "(", "mag_bins", ",", "time_bins", ",", "normalisation", ",", "bootstrap", ")", "# Get smallest non-zero value", "vmin_val", "=", "np", ".", "min", "(", "mag_time_dist", "[", "mag_time_dist", ">", "0.", "]", ")", "# Create plot", "if", "logscale", ":", "norm_data", "=", "LogNorm", "(", "vmin", "=", "vmin_val", ",", "vmax", "=", "np", ".", "max", "(", "mag_time_dist", ")", ")", "else", ":", "if", "normalisation", ":", "norm_data", "=", "Normalize", "(", "vmin", "=", "vmin_val", ",", "vmax", "=", "np", ".", "max", "(", "mag_time_dist", ")", ")", "else", ":", "norm_data", "=", "Normalize", "(", "vmin", "=", "1.0", ",", "vmax", "=", "np", ".", "max", "(", "mag_time_dist", ")", ")", "im", "=", "ax", ".", "pcolor", "(", "time_bins", "[", ":", "-", "1", "]", ",", "mag_bins", "[", ":", "-", "1", "]", ",", "mag_time_dist", ".", "T", ",", "norm", "=", "norm_data", ")", "ax", ".", "set_xlabel", "(", "'Time (year)'", ")", "ax", ".", "set_ylabel", "(", "'Magnitude'", ")", "if", "len", "(", "xlim", ")", "==", "2", ":", "ax", ".", "set_xlim", "(", "xlim", "[", "0", "]", ",", "xlim", "[", "1", "]", ")", "else", ":", "ax", ".", "set_xlim", "(", "time_bins", "[", "0", "]", ",", "time_bins", "[", "-", "1", "]", ")", "if", "len", "(", "ylim", ")", "==", "2", ":", "ax", ".", "set_ylim", "(", "ylim", "[", "0", "]", ",", "ylim", "[", "1", "]", ")", "else", ":", "ax", ".", "set_ylim", "(", "mag_bins", "[", "0", "]", ",", "mag_bins", "[", "-", "1", "]", "+", "(", "mag_bins", "[", "-", "1", "]", "-", "mag_bins", "[", "-", "2", "]", ")", ")", "# Fix the title", "if", "normalisation", ":", "fig", ".", "colorbar", "(", "im", ",", "label", "=", "'Event Density'", ",", "shrink", "=", "0.9", ",", "ax", "=", "ax", ")", "else", ":", "fig", ".", "colorbar", "(", "im", ",", "label", "=", "'Event Count'", ",", "shrink", "=", "0.9", ",", "ax", "=", "ax", ")", "ax", ".", "grid", "(", "True", ")", "# Plot completeness", "if", "completeness", "is", "not", "None", ":", "_plot_completeness", "(", "ax", ",", "completeness", ",", "time_bins", "[", "0", "]", ",", "time_bins", "[", "-", "1", "]", ")", "_save_image", "(", "fig", ",", "filename", ",", "filetype", ",", "dpi", ")" ]
Creates a plot of magnitude-time density :param catalogue: Earthquake catalogue as instance of :class: openquake.hmtk.seismicity.catalogue.Catalogue :param float mag_int: Width of the histogram for the magnitude bins :param float time_int: Width of the histogram for the time bin (in decimal years) :param bool normalisation: Normalise the histogram to give output as PMF (True) or count (False) :param int bootstrap: To sample magnitude and depth uncertainties choose number of samples
[ "Creates", "a", "plot", "of", "magnitude", "-", "time", "density" ]
8294553a0b8aba33fd96437a35065d03547d0040
https://github.com/gem/oq-engine/blob/8294553a0b8aba33fd96437a35065d03547d0040/openquake/hmtk/plotting/seismicity/catalogue_plots.py#L261-L342
317
gem/oq-engine
openquake/hmtk/plotting/seismicity/catalogue_plots.py
_plot_completeness
def _plot_completeness(ax, comw, start_time, end_time): ''' Adds completeness intervals to a plot ''' comw = np.array(comw) comp = np.column_stack([np.hstack([end_time, comw[:, 0], start_time]), np.hstack([comw[0, 1], comw[:, 1], comw[-1, 1]])]) ax.step(comp[:-1, 0], comp[1:, 1], linestyle='-', where="post", linewidth=3, color='brown')
python
def _plot_completeness(ax, comw, start_time, end_time): ''' Adds completeness intervals to a plot ''' comw = np.array(comw) comp = np.column_stack([np.hstack([end_time, comw[:, 0], start_time]), np.hstack([comw[0, 1], comw[:, 1], comw[-1, 1]])]) ax.step(comp[:-1, 0], comp[1:, 1], linestyle='-', where="post", linewidth=3, color='brown')
[ "def", "_plot_completeness", "(", "ax", ",", "comw", ",", "start_time", ",", "end_time", ")", ":", "comw", "=", "np", ".", "array", "(", "comw", ")", "comp", "=", "np", ".", "column_stack", "(", "[", "np", ".", "hstack", "(", "[", "end_time", ",", "comw", "[", ":", ",", "0", "]", ",", "start_time", "]", ")", ",", "np", ".", "hstack", "(", "[", "comw", "[", "0", ",", "1", "]", ",", "comw", "[", ":", ",", "1", "]", ",", "comw", "[", "-", "1", ",", "1", "]", "]", ")", "]", ")", "ax", ".", "step", "(", "comp", "[", ":", "-", "1", ",", "0", "]", ",", "comp", "[", "1", ":", ",", "1", "]", ",", "linestyle", "=", "'-'", ",", "where", "=", "\"post\"", ",", "linewidth", "=", "3", ",", "color", "=", "'brown'", ")" ]
Adds completeness intervals to a plot
[ "Adds", "completeness", "intervals", "to", "a", "plot" ]
8294553a0b8aba33fd96437a35065d03547d0040
https://github.com/gem/oq-engine/blob/8294553a0b8aba33fd96437a35065d03547d0040/openquake/hmtk/plotting/seismicity/catalogue_plots.py#L345-L353
318
gem/oq-engine
openquake/hmtk/plotting/seismicity/catalogue_plots.py
get_completeness_adjusted_table
def get_completeness_adjusted_table(catalogue, completeness, dmag, offset=1.0E-5, end_year=None, plot=False, figure_size=(8, 6), filename=None, filetype='png', dpi=300, ax=None): """ Counts the number of earthquakes in each magnitude bin and normalises the rate to annual rates, taking into account the completeness """ if not end_year: end_year = catalogue.end_year # Find the natural bin limits mag_bins = _get_catalogue_bin_limits(catalogue, dmag) obs_time = end_year - completeness[:, 0] + 1. obs_rates = np.zeros_like(mag_bins) durations = np.zeros_like(mag_bins) n_comp = np.shape(completeness)[0] for iloc in range(n_comp): low_mag = completeness[iloc, 1] comp_year = completeness[iloc, 0] if iloc == (n_comp - 1): idx = np.logical_and( catalogue.data['magnitude'] >= low_mag - offset, catalogue.data['year'] >= comp_year) high_mag = mag_bins[-1] obs_idx = mag_bins >= (low_mag - offset) else: high_mag = completeness[iloc + 1, 1] mag_idx = np.logical_and( catalogue.data['magnitude'] >= low_mag - offset, catalogue.data['magnitude'] < (high_mag - offset)) idx = np.logical_and(mag_idx, catalogue.data['year'] >= (comp_year - offset)) obs_idx = np.logical_and(mag_bins >= (low_mag - offset), mag_bins < (high_mag + offset)) temp_rates = np.histogram(catalogue.data['magnitude'][idx], mag_bins[obs_idx])[0] temp_rates = temp_rates.astype(float) / obs_time[iloc] obs_rates[obs_idx[:-1]] = temp_rates durations[obs_idx[:-1]] = obs_time[iloc] selector = np.where(obs_rates > 0.)[0] mag_bins = mag_bins[selector] obs_rates = obs_rates[selector] durations = durations[selector] # Get cumulative rates cum_rates = np.array([sum(obs_rates[iloc:]) for iloc in range(0, len(obs_rates))]) if plot: plt.figure(figsize=figure_size) plt.semilogy(mag_bins + dmag / 2., obs_rates, "bo", label="Incremental") plt.semilogy(mag_bins + dmag / 2., cum_rates, "rs", label="Cumulative") plt.xlabel("Magnitude (M)", fontsize=16) plt.ylabel("Annual Rate", fontsize=16) plt.grid(True) plt.legend(fontsize=16) if filename: plt.savefig(filename, format=filetype, dpi=dpi, bbox_inches="tight") return np.column_stack([mag_bins, durations, obs_rates, cum_rates, np.log10(cum_rates)])
python
def get_completeness_adjusted_table(catalogue, completeness, dmag, offset=1.0E-5, end_year=None, plot=False, figure_size=(8, 6), filename=None, filetype='png', dpi=300, ax=None): """ Counts the number of earthquakes in each magnitude bin and normalises the rate to annual rates, taking into account the completeness """ if not end_year: end_year = catalogue.end_year # Find the natural bin limits mag_bins = _get_catalogue_bin_limits(catalogue, dmag) obs_time = end_year - completeness[:, 0] + 1. obs_rates = np.zeros_like(mag_bins) durations = np.zeros_like(mag_bins) n_comp = np.shape(completeness)[0] for iloc in range(n_comp): low_mag = completeness[iloc, 1] comp_year = completeness[iloc, 0] if iloc == (n_comp - 1): idx = np.logical_and( catalogue.data['magnitude'] >= low_mag - offset, catalogue.data['year'] >= comp_year) high_mag = mag_bins[-1] obs_idx = mag_bins >= (low_mag - offset) else: high_mag = completeness[iloc + 1, 1] mag_idx = np.logical_and( catalogue.data['magnitude'] >= low_mag - offset, catalogue.data['magnitude'] < (high_mag - offset)) idx = np.logical_and(mag_idx, catalogue.data['year'] >= (comp_year - offset)) obs_idx = np.logical_and(mag_bins >= (low_mag - offset), mag_bins < (high_mag + offset)) temp_rates = np.histogram(catalogue.data['magnitude'][idx], mag_bins[obs_idx])[0] temp_rates = temp_rates.astype(float) / obs_time[iloc] obs_rates[obs_idx[:-1]] = temp_rates durations[obs_idx[:-1]] = obs_time[iloc] selector = np.where(obs_rates > 0.)[0] mag_bins = mag_bins[selector] obs_rates = obs_rates[selector] durations = durations[selector] # Get cumulative rates cum_rates = np.array([sum(obs_rates[iloc:]) for iloc in range(0, len(obs_rates))]) if plot: plt.figure(figsize=figure_size) plt.semilogy(mag_bins + dmag / 2., obs_rates, "bo", label="Incremental") plt.semilogy(mag_bins + dmag / 2., cum_rates, "rs", label="Cumulative") plt.xlabel("Magnitude (M)", fontsize=16) plt.ylabel("Annual Rate", fontsize=16) plt.grid(True) plt.legend(fontsize=16) if filename: plt.savefig(filename, format=filetype, dpi=dpi, bbox_inches="tight") return np.column_stack([mag_bins, durations, obs_rates, cum_rates, np.log10(cum_rates)])
[ "def", "get_completeness_adjusted_table", "(", "catalogue", ",", "completeness", ",", "dmag", ",", "offset", "=", "1.0E-5", ",", "end_year", "=", "None", ",", "plot", "=", "False", ",", "figure_size", "=", "(", "8", ",", "6", ")", ",", "filename", "=", "None", ",", "filetype", "=", "'png'", ",", "dpi", "=", "300", ",", "ax", "=", "None", ")", ":", "if", "not", "end_year", ":", "end_year", "=", "catalogue", ".", "end_year", "# Find the natural bin limits", "mag_bins", "=", "_get_catalogue_bin_limits", "(", "catalogue", ",", "dmag", ")", "obs_time", "=", "end_year", "-", "completeness", "[", ":", ",", "0", "]", "+", "1.", "obs_rates", "=", "np", ".", "zeros_like", "(", "mag_bins", ")", "durations", "=", "np", ".", "zeros_like", "(", "mag_bins", ")", "n_comp", "=", "np", ".", "shape", "(", "completeness", ")", "[", "0", "]", "for", "iloc", "in", "range", "(", "n_comp", ")", ":", "low_mag", "=", "completeness", "[", "iloc", ",", "1", "]", "comp_year", "=", "completeness", "[", "iloc", ",", "0", "]", "if", "iloc", "==", "(", "n_comp", "-", "1", ")", ":", "idx", "=", "np", ".", "logical_and", "(", "catalogue", ".", "data", "[", "'magnitude'", "]", ">=", "low_mag", "-", "offset", ",", "catalogue", ".", "data", "[", "'year'", "]", ">=", "comp_year", ")", "high_mag", "=", "mag_bins", "[", "-", "1", "]", "obs_idx", "=", "mag_bins", ">=", "(", "low_mag", "-", "offset", ")", "else", ":", "high_mag", "=", "completeness", "[", "iloc", "+", "1", ",", "1", "]", "mag_idx", "=", "np", ".", "logical_and", "(", "catalogue", ".", "data", "[", "'magnitude'", "]", ">=", "low_mag", "-", "offset", ",", "catalogue", ".", "data", "[", "'magnitude'", "]", "<", "(", "high_mag", "-", "offset", ")", ")", "idx", "=", "np", ".", "logical_and", "(", "mag_idx", ",", "catalogue", ".", "data", "[", "'year'", "]", ">=", "(", "comp_year", "-", "offset", ")", ")", "obs_idx", "=", "np", ".", "logical_and", "(", "mag_bins", ">=", "(", "low_mag", "-", "offset", ")", ",", "mag_bins", "<", "(", "high_mag", "+", "offset", ")", ")", "temp_rates", "=", "np", ".", "histogram", "(", "catalogue", ".", "data", "[", "'magnitude'", "]", "[", "idx", "]", ",", "mag_bins", "[", "obs_idx", "]", ")", "[", "0", "]", "temp_rates", "=", "temp_rates", ".", "astype", "(", "float", ")", "/", "obs_time", "[", "iloc", "]", "obs_rates", "[", "obs_idx", "[", ":", "-", "1", "]", "]", "=", "temp_rates", "durations", "[", "obs_idx", "[", ":", "-", "1", "]", "]", "=", "obs_time", "[", "iloc", "]", "selector", "=", "np", ".", "where", "(", "obs_rates", ">", "0.", ")", "[", "0", "]", "mag_bins", "=", "mag_bins", "[", "selector", "]", "obs_rates", "=", "obs_rates", "[", "selector", "]", "durations", "=", "durations", "[", "selector", "]", "# Get cumulative rates", "cum_rates", "=", "np", ".", "array", "(", "[", "sum", "(", "obs_rates", "[", "iloc", ":", "]", ")", "for", "iloc", "in", "range", "(", "0", ",", "len", "(", "obs_rates", ")", ")", "]", ")", "if", "plot", ":", "plt", ".", "figure", "(", "figsize", "=", "figure_size", ")", "plt", ".", "semilogy", "(", "mag_bins", "+", "dmag", "/", "2.", ",", "obs_rates", ",", "\"bo\"", ",", "label", "=", "\"Incremental\"", ")", "plt", ".", "semilogy", "(", "mag_bins", "+", "dmag", "/", "2.", ",", "cum_rates", ",", "\"rs\"", ",", "label", "=", "\"Cumulative\"", ")", "plt", ".", "xlabel", "(", "\"Magnitude (M)\"", ",", "fontsize", "=", "16", ")", "plt", ".", "ylabel", "(", "\"Annual Rate\"", ",", "fontsize", "=", "16", ")", "plt", ".", "grid", "(", "True", ")", "plt", ".", "legend", "(", "fontsize", "=", "16", ")", "if", "filename", ":", "plt", ".", "savefig", "(", "filename", ",", "format", "=", "filetype", ",", "dpi", "=", "dpi", ",", "bbox_inches", "=", "\"tight\"", ")", "return", "np", ".", "column_stack", "(", "[", "mag_bins", ",", "durations", ",", "obs_rates", ",", "cum_rates", ",", "np", ".", "log10", "(", "cum_rates", ")", "]", ")" ]
Counts the number of earthquakes in each magnitude bin and normalises the rate to annual rates, taking into account the completeness
[ "Counts", "the", "number", "of", "earthquakes", "in", "each", "magnitude", "bin", "and", "normalises", "the", "rate", "to", "annual", "rates", "taking", "into", "account", "the", "completeness" ]
8294553a0b8aba33fd96437a35065d03547d0040
https://github.com/gem/oq-engine/blob/8294553a0b8aba33fd96437a35065d03547d0040/openquake/hmtk/plotting/seismicity/catalogue_plots.py#L356-L417
319
gem/oq-engine
openquake/hmtk/plotting/seismicity/catalogue_plots.py
plot_observed_recurrence
def plot_observed_recurrence( catalogue, completeness, dmag, end_year=None, filename=None, figure_size=(8, 6), filetype='png', dpi=300, ax=None): """ Plots the observed recurrence taking into account the completeness """ # Get completeness adjusted recurrence table if isinstance(completeness, float): # Unique completeness completeness = np.array([[np.min(catalogue.data['year']), completeness]]) if not end_year: end_year = catalogue.update_end_year() catalogue.data["dtime"] = catalogue.get_decimal_time() cent_mag, t_per, n_obs = get_completeness_counts(catalogue, completeness, dmag) obs_rates = n_obs / t_per cum_obs_rates = np.array([np.sum(obs_rates[i:]) for i in range(len(obs_rates))]) if ax is None: fig, ax = plt.subplots(figsize=figure_size) else: fig = ax.get_figure() ax.semilogy(cent_mag, obs_rates, 'bo', label="Incremental") ax.semilogy(cent_mag, cum_obs_rates, 'rs', label="Cumulative") ax.set_xlim([cent_mag[0] - 0.1, cent_mag[-1] + 0.1]) ax.set_xlabel('Magnitude') ax.set_ylabel('Annual Rate') ax.legend() _save_image(fig, filename, filetype, dpi)
python
def plot_observed_recurrence( catalogue, completeness, dmag, end_year=None, filename=None, figure_size=(8, 6), filetype='png', dpi=300, ax=None): """ Plots the observed recurrence taking into account the completeness """ # Get completeness adjusted recurrence table if isinstance(completeness, float): # Unique completeness completeness = np.array([[np.min(catalogue.data['year']), completeness]]) if not end_year: end_year = catalogue.update_end_year() catalogue.data["dtime"] = catalogue.get_decimal_time() cent_mag, t_per, n_obs = get_completeness_counts(catalogue, completeness, dmag) obs_rates = n_obs / t_per cum_obs_rates = np.array([np.sum(obs_rates[i:]) for i in range(len(obs_rates))]) if ax is None: fig, ax = plt.subplots(figsize=figure_size) else: fig = ax.get_figure() ax.semilogy(cent_mag, obs_rates, 'bo', label="Incremental") ax.semilogy(cent_mag, cum_obs_rates, 'rs', label="Cumulative") ax.set_xlim([cent_mag[0] - 0.1, cent_mag[-1] + 0.1]) ax.set_xlabel('Magnitude') ax.set_ylabel('Annual Rate') ax.legend() _save_image(fig, filename, filetype, dpi)
[ "def", "plot_observed_recurrence", "(", "catalogue", ",", "completeness", ",", "dmag", ",", "end_year", "=", "None", ",", "filename", "=", "None", ",", "figure_size", "=", "(", "8", ",", "6", ")", ",", "filetype", "=", "'png'", ",", "dpi", "=", "300", ",", "ax", "=", "None", ")", ":", "# Get completeness adjusted recurrence table", "if", "isinstance", "(", "completeness", ",", "float", ")", ":", "# Unique completeness", "completeness", "=", "np", ".", "array", "(", "[", "[", "np", ".", "min", "(", "catalogue", ".", "data", "[", "'year'", "]", ")", ",", "completeness", "]", "]", ")", "if", "not", "end_year", ":", "end_year", "=", "catalogue", ".", "update_end_year", "(", ")", "catalogue", ".", "data", "[", "\"dtime\"", "]", "=", "catalogue", ".", "get_decimal_time", "(", ")", "cent_mag", ",", "t_per", ",", "n_obs", "=", "get_completeness_counts", "(", "catalogue", ",", "completeness", ",", "dmag", ")", "obs_rates", "=", "n_obs", "/", "t_per", "cum_obs_rates", "=", "np", ".", "array", "(", "[", "np", ".", "sum", "(", "obs_rates", "[", "i", ":", "]", ")", "for", "i", "in", "range", "(", "len", "(", "obs_rates", ")", ")", "]", ")", "if", "ax", "is", "None", ":", "fig", ",", "ax", "=", "plt", ".", "subplots", "(", "figsize", "=", "figure_size", ")", "else", ":", "fig", "=", "ax", ".", "get_figure", "(", ")", "ax", ".", "semilogy", "(", "cent_mag", ",", "obs_rates", ",", "'bo'", ",", "label", "=", "\"Incremental\"", ")", "ax", ".", "semilogy", "(", "cent_mag", ",", "cum_obs_rates", ",", "'rs'", ",", "label", "=", "\"Cumulative\"", ")", "ax", ".", "set_xlim", "(", "[", "cent_mag", "[", "0", "]", "-", "0.1", ",", "cent_mag", "[", "-", "1", "]", "+", "0.1", "]", ")", "ax", ".", "set_xlabel", "(", "'Magnitude'", ")", "ax", ".", "set_ylabel", "(", "'Annual Rate'", ")", "ax", ".", "legend", "(", ")", "_save_image", "(", "fig", ",", "filename", ",", "filetype", ",", "dpi", ")" ]
Plots the observed recurrence taking into account the completeness
[ "Plots", "the", "observed", "recurrence", "taking", "into", "account", "the", "completeness" ]
8294553a0b8aba33fd96437a35065d03547d0040
https://github.com/gem/oq-engine/blob/8294553a0b8aba33fd96437a35065d03547d0040/openquake/hmtk/plotting/seismicity/catalogue_plots.py#L420-L452
320
gem/oq-engine
openquake/hmtk/strain/geodetic_strain.py
GeodeticStrain.get_number_observations
def get_number_observations(self): ''' Returns the number of observations in the data file ''' if isinstance(self.data, dict) and ('exx' in self.data.keys()): return len(self.data['exx']) else: return 0
python
def get_number_observations(self): ''' Returns the number of observations in the data file ''' if isinstance(self.data, dict) and ('exx' in self.data.keys()): return len(self.data['exx']) else: return 0
[ "def", "get_number_observations", "(", "self", ")", ":", "if", "isinstance", "(", "self", ".", "data", ",", "dict", ")", "and", "(", "'exx'", "in", "self", ".", "data", ".", "keys", "(", ")", ")", ":", "return", "len", "(", "self", ".", "data", "[", "'exx'", "]", ")", "else", ":", "return", "0" ]
Returns the number of observations in the data file
[ "Returns", "the", "number", "of", "observations", "in", "the", "data", "file" ]
8294553a0b8aba33fd96437a35065d03547d0040
https://github.com/gem/oq-engine/blob/8294553a0b8aba33fd96437a35065d03547d0040/openquake/hmtk/strain/geodetic_strain.py#L137-L144
321
gem/oq-engine
openquake/commands/plot_lc.py
plot_lc
def plot_lc(calc_id, aid=None): """ Plot loss curves given a calculation id and an asset ordinal. """ # read the hazard data dstore = util.read(calc_id) dset = dstore['agg_curves-rlzs'] if aid is None: # plot the global curves plt = make_figure(dset.attrs['return_periods'], dset.value) else: sys.exit('Not implemented yet') plt.show()
python
def plot_lc(calc_id, aid=None): """ Plot loss curves given a calculation id and an asset ordinal. """ # read the hazard data dstore = util.read(calc_id) dset = dstore['agg_curves-rlzs'] if aid is None: # plot the global curves plt = make_figure(dset.attrs['return_periods'], dset.value) else: sys.exit('Not implemented yet') plt.show()
[ "def", "plot_lc", "(", "calc_id", ",", "aid", "=", "None", ")", ":", "# read the hazard data", "dstore", "=", "util", ".", "read", "(", "calc_id", ")", "dset", "=", "dstore", "[", "'agg_curves-rlzs'", "]", "if", "aid", "is", "None", ":", "# plot the global curves", "plt", "=", "make_figure", "(", "dset", ".", "attrs", "[", "'return_periods'", "]", ",", "dset", ".", "value", ")", "else", ":", "sys", ".", "exit", "(", "'Not implemented yet'", ")", "plt", ".", "show", "(", ")" ]
Plot loss curves given a calculation id and an asset ordinal.
[ "Plot", "loss", "curves", "given", "a", "calculation", "id", "and", "an", "asset", "ordinal", "." ]
8294553a0b8aba33fd96437a35065d03547d0040
https://github.com/gem/oq-engine/blob/8294553a0b8aba33fd96437a35065d03547d0040/openquake/commands/plot_lc.py#L41-L52
322
gem/oq-engine
openquake/hazardlib/gsim/nshmp_2014.py
get_weighted_poes
def get_weighted_poes(gsim, sctx, rctx, dctx, imt, imls, truncation_level, weighting=DEFAULT_WEIGHTING): """ This function implements the NGA West 2 GMPE epistemic uncertainty adjustment factor without re-calculating the actual GMPE each time. :param gsim: Instance of the GMPE :param list weighting: Weightings as a list of tuples of (weight, number standard deviations of the epistemic uncertainty adjustment) """ if truncation_level is not None and truncation_level < 0: raise ValueError('truncation level must be zero, positive number ' 'or None') gsim._check_imt(imt) adjustment = nga_west2_epistemic_adjustment(rctx.mag, dctx.rrup) adjustment = adjustment.reshape(adjustment.shape + (1, )) if truncation_level == 0: # zero truncation mode, just compare imls to mean imls = gsim.to_distribution_values(imls) mean, _ = gsim.get_mean_and_stddevs(sctx, rctx, dctx, imt, []) mean = mean.reshape(mean.shape + (1, )) output = np.zeros([mean.shape[0], imls.shape[0]]) for (wgt, fct) in weighting: output += (wgt * (imls <= (mean + (fct * adjustment))).astype(float)) return output else: # use real normal distribution assert (const.StdDev.TOTAL in gsim.DEFINED_FOR_STANDARD_DEVIATION_TYPES) imls = gsim.to_distribution_values(imls) mean, [stddev] = gsim.get_mean_and_stddevs(sctx, rctx, dctx, imt, [const.StdDev.TOTAL]) mean = mean.reshape(mean.shape + (1, )) stddev = stddev.reshape(stddev.shape + (1, )) output = np.zeros([mean.shape[0], imls.shape[0]]) for (wgt, fct) in weighting: values = (imls - (mean + (fct * adjustment))) / stddev if truncation_level is None: output += (wgt * _norm_sf(values)) else: output += (wgt * _truncnorm_sf(truncation_level, values)) return output
python
def get_weighted_poes(gsim, sctx, rctx, dctx, imt, imls, truncation_level, weighting=DEFAULT_WEIGHTING): """ This function implements the NGA West 2 GMPE epistemic uncertainty adjustment factor without re-calculating the actual GMPE each time. :param gsim: Instance of the GMPE :param list weighting: Weightings as a list of tuples of (weight, number standard deviations of the epistemic uncertainty adjustment) """ if truncation_level is not None and truncation_level < 0: raise ValueError('truncation level must be zero, positive number ' 'or None') gsim._check_imt(imt) adjustment = nga_west2_epistemic_adjustment(rctx.mag, dctx.rrup) adjustment = adjustment.reshape(adjustment.shape + (1, )) if truncation_level == 0: # zero truncation mode, just compare imls to mean imls = gsim.to_distribution_values(imls) mean, _ = gsim.get_mean_and_stddevs(sctx, rctx, dctx, imt, []) mean = mean.reshape(mean.shape + (1, )) output = np.zeros([mean.shape[0], imls.shape[0]]) for (wgt, fct) in weighting: output += (wgt * (imls <= (mean + (fct * adjustment))).astype(float)) return output else: # use real normal distribution assert (const.StdDev.TOTAL in gsim.DEFINED_FOR_STANDARD_DEVIATION_TYPES) imls = gsim.to_distribution_values(imls) mean, [stddev] = gsim.get_mean_and_stddevs(sctx, rctx, dctx, imt, [const.StdDev.TOTAL]) mean = mean.reshape(mean.shape + (1, )) stddev = stddev.reshape(stddev.shape + (1, )) output = np.zeros([mean.shape[0], imls.shape[0]]) for (wgt, fct) in weighting: values = (imls - (mean + (fct * adjustment))) / stddev if truncation_level is None: output += (wgt * _norm_sf(values)) else: output += (wgt * _truncnorm_sf(truncation_level, values)) return output
[ "def", "get_weighted_poes", "(", "gsim", ",", "sctx", ",", "rctx", ",", "dctx", ",", "imt", ",", "imls", ",", "truncation_level", ",", "weighting", "=", "DEFAULT_WEIGHTING", ")", ":", "if", "truncation_level", "is", "not", "None", "and", "truncation_level", "<", "0", ":", "raise", "ValueError", "(", "'truncation level must be zero, positive number '", "'or None'", ")", "gsim", ".", "_check_imt", "(", "imt", ")", "adjustment", "=", "nga_west2_epistemic_adjustment", "(", "rctx", ".", "mag", ",", "dctx", ".", "rrup", ")", "adjustment", "=", "adjustment", ".", "reshape", "(", "adjustment", ".", "shape", "+", "(", "1", ",", ")", ")", "if", "truncation_level", "==", "0", ":", "# zero truncation mode, just compare imls to mean", "imls", "=", "gsim", ".", "to_distribution_values", "(", "imls", ")", "mean", ",", "_", "=", "gsim", ".", "get_mean_and_stddevs", "(", "sctx", ",", "rctx", ",", "dctx", ",", "imt", ",", "[", "]", ")", "mean", "=", "mean", ".", "reshape", "(", "mean", ".", "shape", "+", "(", "1", ",", ")", ")", "output", "=", "np", ".", "zeros", "(", "[", "mean", ".", "shape", "[", "0", "]", ",", "imls", ".", "shape", "[", "0", "]", "]", ")", "for", "(", "wgt", ",", "fct", ")", "in", "weighting", ":", "output", "+=", "(", "wgt", "*", "(", "imls", "<=", "(", "mean", "+", "(", "fct", "*", "adjustment", ")", ")", ")", ".", "astype", "(", "float", ")", ")", "return", "output", "else", ":", "# use real normal distribution", "assert", "(", "const", ".", "StdDev", ".", "TOTAL", "in", "gsim", ".", "DEFINED_FOR_STANDARD_DEVIATION_TYPES", ")", "imls", "=", "gsim", ".", "to_distribution_values", "(", "imls", ")", "mean", ",", "[", "stddev", "]", "=", "gsim", ".", "get_mean_and_stddevs", "(", "sctx", ",", "rctx", ",", "dctx", ",", "imt", ",", "[", "const", ".", "StdDev", ".", "TOTAL", "]", ")", "mean", "=", "mean", ".", "reshape", "(", "mean", ".", "shape", "+", "(", "1", ",", ")", ")", "stddev", "=", "stddev", ".", "reshape", "(", "stddev", ".", "shape", "+", "(", "1", ",", ")", ")", "output", "=", "np", ".", "zeros", "(", "[", "mean", ".", "shape", "[", "0", "]", ",", "imls", ".", "shape", "[", "0", "]", "]", ")", "for", "(", "wgt", ",", "fct", ")", "in", "weighting", ":", "values", "=", "(", "imls", "-", "(", "mean", "+", "(", "fct", "*", "adjustment", ")", ")", ")", "/", "stddev", "if", "truncation_level", "is", "None", ":", "output", "+=", "(", "wgt", "*", "_norm_sf", "(", "values", ")", ")", "else", ":", "output", "+=", "(", "wgt", "*", "_truncnorm_sf", "(", "truncation_level", ",", "values", ")", ")", "return", "output" ]
This function implements the NGA West 2 GMPE epistemic uncertainty adjustment factor without re-calculating the actual GMPE each time. :param gsim: Instance of the GMPE :param list weighting: Weightings as a list of tuples of (weight, number standard deviations of the epistemic uncertainty adjustment)
[ "This", "function", "implements", "the", "NGA", "West", "2", "GMPE", "epistemic", "uncertainty", "adjustment", "factor", "without", "re", "-", "calculating", "the", "actual", "GMPE", "each", "time", "." ]
8294553a0b8aba33fd96437a35065d03547d0040
https://github.com/gem/oq-engine/blob/8294553a0b8aba33fd96437a35065d03547d0040/openquake/hazardlib/gsim/nshmp_2014.py#L102-L146
323
gem/oq-engine
openquake/commonlib/shapefileparser.py
register_fields
def register_fields(w): """ Register shapefile fields. """ PARAMS_LIST = [BASE_PARAMS, GEOMETRY_PARAMS, MFD_PARAMS] for PARAMS in PARAMS_LIST: for _, param, dtype in PARAMS: w.field(param, fieldType=dtype, size=FIELD_SIZE) PARAMS_LIST = [ RATE_PARAMS, STRIKE_PARAMS, DIP_PARAMS, RAKE_PARAMS, NPW_PARAMS, HDEPTH_PARAMS, HDW_PARAMS, PLANES_STRIKES_PARAM, PLANES_DIPS_PARAM] for PARAMS in PARAMS_LIST: for param, dtype in PARAMS: w.field(param, fieldType=dtype, size=FIELD_SIZE) # source typology w.field('sourcetype', 'C')
python
def register_fields(w): """ Register shapefile fields. """ PARAMS_LIST = [BASE_PARAMS, GEOMETRY_PARAMS, MFD_PARAMS] for PARAMS in PARAMS_LIST: for _, param, dtype in PARAMS: w.field(param, fieldType=dtype, size=FIELD_SIZE) PARAMS_LIST = [ RATE_PARAMS, STRIKE_PARAMS, DIP_PARAMS, RAKE_PARAMS, NPW_PARAMS, HDEPTH_PARAMS, HDW_PARAMS, PLANES_STRIKES_PARAM, PLANES_DIPS_PARAM] for PARAMS in PARAMS_LIST: for param, dtype in PARAMS: w.field(param, fieldType=dtype, size=FIELD_SIZE) # source typology w.field('sourcetype', 'C')
[ "def", "register_fields", "(", "w", ")", ":", "PARAMS_LIST", "=", "[", "BASE_PARAMS", ",", "GEOMETRY_PARAMS", ",", "MFD_PARAMS", "]", "for", "PARAMS", "in", "PARAMS_LIST", ":", "for", "_", ",", "param", ",", "dtype", "in", "PARAMS", ":", "w", ".", "field", "(", "param", ",", "fieldType", "=", "dtype", ",", "size", "=", "FIELD_SIZE", ")", "PARAMS_LIST", "=", "[", "RATE_PARAMS", ",", "STRIKE_PARAMS", ",", "DIP_PARAMS", ",", "RAKE_PARAMS", ",", "NPW_PARAMS", ",", "HDEPTH_PARAMS", ",", "HDW_PARAMS", ",", "PLANES_STRIKES_PARAM", ",", "PLANES_DIPS_PARAM", "]", "for", "PARAMS", "in", "PARAMS_LIST", ":", "for", "param", ",", "dtype", "in", "PARAMS", ":", "w", ".", "field", "(", "param", ",", "fieldType", "=", "dtype", ",", "size", "=", "FIELD_SIZE", ")", "# source typology", "w", ".", "field", "(", "'sourcetype'", ",", "'C'", ")" ]
Register shapefile fields.
[ "Register", "shapefile", "fields", "." ]
8294553a0b8aba33fd96437a35065d03547d0040
https://github.com/gem/oq-engine/blob/8294553a0b8aba33fd96437a35065d03547d0040/openquake/commonlib/shapefileparser.py#L86-L103
324
gem/oq-engine
openquake/commonlib/shapefileparser.py
extract_source_params
def extract_source_params(src): """ Extract params from source object. """ tags = get_taglist(src) data = [] for key, param, vtype in BASE_PARAMS: if key in src.attrib: if vtype == "c": data.append((param, src.attrib[key])) elif vtype == "f": data.append((param, float(src.attrib[key]))) else: data.append((param, None)) elif key in tags: if vtype == "c": data.append((param, src.nodes[tags.index(key)].text)) elif vtype == "f": data.append((param, float(src.nodes[tags.index(key)].text))) else: data.append((param, None)) else: data.append((param, None)) return dict(data)
python
def extract_source_params(src): """ Extract params from source object. """ tags = get_taglist(src) data = [] for key, param, vtype in BASE_PARAMS: if key in src.attrib: if vtype == "c": data.append((param, src.attrib[key])) elif vtype == "f": data.append((param, float(src.attrib[key]))) else: data.append((param, None)) elif key in tags: if vtype == "c": data.append((param, src.nodes[tags.index(key)].text)) elif vtype == "f": data.append((param, float(src.nodes[tags.index(key)].text))) else: data.append((param, None)) else: data.append((param, None)) return dict(data)
[ "def", "extract_source_params", "(", "src", ")", ":", "tags", "=", "get_taglist", "(", "src", ")", "data", "=", "[", "]", "for", "key", ",", "param", ",", "vtype", "in", "BASE_PARAMS", ":", "if", "key", "in", "src", ".", "attrib", ":", "if", "vtype", "==", "\"c\"", ":", "data", ".", "append", "(", "(", "param", ",", "src", ".", "attrib", "[", "key", "]", ")", ")", "elif", "vtype", "==", "\"f\"", ":", "data", ".", "append", "(", "(", "param", ",", "float", "(", "src", ".", "attrib", "[", "key", "]", ")", ")", ")", "else", ":", "data", ".", "append", "(", "(", "param", ",", "None", ")", ")", "elif", "key", "in", "tags", ":", "if", "vtype", "==", "\"c\"", ":", "data", ".", "append", "(", "(", "param", ",", "src", ".", "nodes", "[", "tags", ".", "index", "(", "key", ")", "]", ".", "text", ")", ")", "elif", "vtype", "==", "\"f\"", ":", "data", ".", "append", "(", "(", "param", ",", "float", "(", "src", ".", "nodes", "[", "tags", ".", "index", "(", "key", ")", "]", ".", "text", ")", ")", ")", "else", ":", "data", ".", "append", "(", "(", "param", ",", "None", ")", ")", "else", ":", "data", ".", "append", "(", "(", "param", ",", "None", ")", ")", "return", "dict", "(", "data", ")" ]
Extract params from source object.
[ "Extract", "params", "from", "source", "object", "." ]
8294553a0b8aba33fd96437a35065d03547d0040
https://github.com/gem/oq-engine/blob/8294553a0b8aba33fd96437a35065d03547d0040/openquake/commonlib/shapefileparser.py#L120-L143
325
gem/oq-engine
openquake/commonlib/shapefileparser.py
parse_complex_fault_geometry
def parse_complex_fault_geometry(node): """ Parses a complex fault geometry node returning both the attributes and parameters in a dictionary """ assert "complexFaultGeometry" in node.tag # Get general attributes geometry = {"intermediateEdges": []} for subnode in node: crds = subnode.nodes[0].nodes[0].text if "faultTopEdge" in subnode.tag: geometry["faultTopEdge"] = numpy.array( [[crds[i], crds[i + 1], crds[i + 2]] for i in range(0, len(crds), 3)]) geometry["upperSeismoDepth"] = numpy.min( geometry["faultTopEdge"][:, 2]) elif "faultBottomEdge" in subnode.tag: geometry["faultBottomEdge"] = numpy.array( [[crds[i], crds[i + 1], crds[i + 2]] for i in range(0, len(crds), 3)]) geometry["lowerSeismoDepth"] = numpy.max( geometry["faultBottomEdge"][:, 2]) elif "intermediateEdge" in subnode.tag: geometry["intermediateEdges"].append( numpy.array([[crds[i], crds[i + 1], crds[i + 2]] for i in range(0, len(crds), 3)])) else: pass geometry["dip"] = None return geometry
python
def parse_complex_fault_geometry(node): """ Parses a complex fault geometry node returning both the attributes and parameters in a dictionary """ assert "complexFaultGeometry" in node.tag # Get general attributes geometry = {"intermediateEdges": []} for subnode in node: crds = subnode.nodes[0].nodes[0].text if "faultTopEdge" in subnode.tag: geometry["faultTopEdge"] = numpy.array( [[crds[i], crds[i + 1], crds[i + 2]] for i in range(0, len(crds), 3)]) geometry["upperSeismoDepth"] = numpy.min( geometry["faultTopEdge"][:, 2]) elif "faultBottomEdge" in subnode.tag: geometry["faultBottomEdge"] = numpy.array( [[crds[i], crds[i + 1], crds[i + 2]] for i in range(0, len(crds), 3)]) geometry["lowerSeismoDepth"] = numpy.max( geometry["faultBottomEdge"][:, 2]) elif "intermediateEdge" in subnode.tag: geometry["intermediateEdges"].append( numpy.array([[crds[i], crds[i + 1], crds[i + 2]] for i in range(0, len(crds), 3)])) else: pass geometry["dip"] = None return geometry
[ "def", "parse_complex_fault_geometry", "(", "node", ")", ":", "assert", "\"complexFaultGeometry\"", "in", "node", ".", "tag", "# Get general attributes", "geometry", "=", "{", "\"intermediateEdges\"", ":", "[", "]", "}", "for", "subnode", "in", "node", ":", "crds", "=", "subnode", ".", "nodes", "[", "0", "]", ".", "nodes", "[", "0", "]", ".", "text", "if", "\"faultTopEdge\"", "in", "subnode", ".", "tag", ":", "geometry", "[", "\"faultTopEdge\"", "]", "=", "numpy", ".", "array", "(", "[", "[", "crds", "[", "i", "]", ",", "crds", "[", "i", "+", "1", "]", ",", "crds", "[", "i", "+", "2", "]", "]", "for", "i", "in", "range", "(", "0", ",", "len", "(", "crds", ")", ",", "3", ")", "]", ")", "geometry", "[", "\"upperSeismoDepth\"", "]", "=", "numpy", ".", "min", "(", "geometry", "[", "\"faultTopEdge\"", "]", "[", ":", ",", "2", "]", ")", "elif", "\"faultBottomEdge\"", "in", "subnode", ".", "tag", ":", "geometry", "[", "\"faultBottomEdge\"", "]", "=", "numpy", ".", "array", "(", "[", "[", "crds", "[", "i", "]", ",", "crds", "[", "i", "+", "1", "]", ",", "crds", "[", "i", "+", "2", "]", "]", "for", "i", "in", "range", "(", "0", ",", "len", "(", "crds", ")", ",", "3", ")", "]", ")", "geometry", "[", "\"lowerSeismoDepth\"", "]", "=", "numpy", ".", "max", "(", "geometry", "[", "\"faultBottomEdge\"", "]", "[", ":", ",", "2", "]", ")", "elif", "\"intermediateEdge\"", "in", "subnode", ".", "tag", ":", "geometry", "[", "\"intermediateEdges\"", "]", ".", "append", "(", "numpy", ".", "array", "(", "[", "[", "crds", "[", "i", "]", ",", "crds", "[", "i", "+", "1", "]", ",", "crds", "[", "i", "+", "2", "]", "]", "for", "i", "in", "range", "(", "0", ",", "len", "(", "crds", ")", ",", "3", ")", "]", ")", ")", "else", ":", "pass", "geometry", "[", "\"dip\"", "]", "=", "None", "return", "geometry" ]
Parses a complex fault geometry node returning both the attributes and parameters in a dictionary
[ "Parses", "a", "complex", "fault", "geometry", "node", "returning", "both", "the", "attributes", "and", "parameters", "in", "a", "dictionary" ]
8294553a0b8aba33fd96437a35065d03547d0040
https://github.com/gem/oq-engine/blob/8294553a0b8aba33fd96437a35065d03547d0040/openquake/commonlib/shapefileparser.py#L201-L231
326
gem/oq-engine
openquake/commonlib/shapefileparser.py
parse_planar_fault_geometry
def parse_planar_fault_geometry(node): """ Parses a planar fault geometry node returning both the attributes and parameters in a dictionary """ assert "planarSurface" in node.tag geometry = {"strike": node.attrib["strike"], "dip": node.attrib["dip"]} upper_depth = numpy.inf lower_depth = 0.0 tags = get_taglist(node) corner_points = [] for locn in ["topLeft", "topRight", "bottomRight", "bottomLeft"]: plane = node.nodes[tags.index(locn)] upper_depth = plane["depth"] if plane["depth"] < upper_depth else\ upper_depth lower_depth = plane["depth"] if plane["depth"] > lower_depth else\ lower_depth corner_points.append([plane["lon"], plane["lat"], plane["depth"]]) geometry["upperSeismoDepth"] = upper_depth geometry["lowerSeismoDepth"] = lower_depth geometry["corners"] = numpy.array(corner_points) return geometry
python
def parse_planar_fault_geometry(node): """ Parses a planar fault geometry node returning both the attributes and parameters in a dictionary """ assert "planarSurface" in node.tag geometry = {"strike": node.attrib["strike"], "dip": node.attrib["dip"]} upper_depth = numpy.inf lower_depth = 0.0 tags = get_taglist(node) corner_points = [] for locn in ["topLeft", "topRight", "bottomRight", "bottomLeft"]: plane = node.nodes[tags.index(locn)] upper_depth = plane["depth"] if plane["depth"] < upper_depth else\ upper_depth lower_depth = plane["depth"] if plane["depth"] > lower_depth else\ lower_depth corner_points.append([plane["lon"], plane["lat"], plane["depth"]]) geometry["upperSeismoDepth"] = upper_depth geometry["lowerSeismoDepth"] = lower_depth geometry["corners"] = numpy.array(corner_points) return geometry
[ "def", "parse_planar_fault_geometry", "(", "node", ")", ":", "assert", "\"planarSurface\"", "in", "node", ".", "tag", "geometry", "=", "{", "\"strike\"", ":", "node", ".", "attrib", "[", "\"strike\"", "]", ",", "\"dip\"", ":", "node", ".", "attrib", "[", "\"dip\"", "]", "}", "upper_depth", "=", "numpy", ".", "inf", "lower_depth", "=", "0.0", "tags", "=", "get_taglist", "(", "node", ")", "corner_points", "=", "[", "]", "for", "locn", "in", "[", "\"topLeft\"", ",", "\"topRight\"", ",", "\"bottomRight\"", ",", "\"bottomLeft\"", "]", ":", "plane", "=", "node", ".", "nodes", "[", "tags", ".", "index", "(", "locn", ")", "]", "upper_depth", "=", "plane", "[", "\"depth\"", "]", "if", "plane", "[", "\"depth\"", "]", "<", "upper_depth", "else", "upper_depth", "lower_depth", "=", "plane", "[", "\"depth\"", "]", "if", "plane", "[", "\"depth\"", "]", ">", "lower_depth", "else", "lower_depth", "corner_points", ".", "append", "(", "[", "plane", "[", "\"lon\"", "]", ",", "plane", "[", "\"lat\"", "]", ",", "plane", "[", "\"depth\"", "]", "]", ")", "geometry", "[", "\"upperSeismoDepth\"", "]", "=", "upper_depth", "geometry", "[", "\"lowerSeismoDepth\"", "]", "=", "lower_depth", "geometry", "[", "\"corners\"", "]", "=", "numpy", ".", "array", "(", "corner_points", ")", "return", "geometry" ]
Parses a planar fault geometry node returning both the attributes and parameters in a dictionary
[ "Parses", "a", "planar", "fault", "geometry", "node", "returning", "both", "the", "attributes", "and", "parameters", "in", "a", "dictionary" ]
8294553a0b8aba33fd96437a35065d03547d0040
https://github.com/gem/oq-engine/blob/8294553a0b8aba33fd96437a35065d03547d0040/openquake/commonlib/shapefileparser.py#L234-L256
327
gem/oq-engine
openquake/commonlib/shapefileparser.py
extract_mfd_params
def extract_mfd_params(src): """ Extracts the MFD parameters from an object """ tags = get_taglist(src) if "incrementalMFD" in tags: mfd_node = src.nodes[tags.index("incrementalMFD")] elif "truncGutenbergRichterMFD" in tags: mfd_node = src.nodes[tags.index("truncGutenbergRichterMFD")] elif "arbitraryMFD" in tags: mfd_node = src.nodes[tags.index("arbitraryMFD")] elif "YoungsCoppersmithMFD" in tags: mfd_node = src.nodes[tags.index("YoungsCoppersmithMFD")] else: raise ValueError("Source %s contains no supported MFD type!" % src.tag) data = [] rates = [] for key, param, vtype in MFD_PARAMS: if key in mfd_node.attrib and mfd_node.attrib[key] is not None: data.append((param, mfd_node.attrib[key])) else: data.append((param, None)) if ("incrementalMFD" or "arbitraryMFD") in mfd_node.tag: # Extract Rates rates = ~mfd_node.occurRates n_r = len(rates) if n_r > MAX_RATES: raise ValueError("Number of rates in source %s too large " "to be placed into shapefile" % src.tag) rate_dict = dict([(key, rates[i] if i < n_r else None) for i, (key, _) in enumerate(RATE_PARAMS)]) elif "YoungsCoppersmithMFD" in mfd_node.tag: rate_dict = dict([(key, mfd_node.attrib['characteristicRate']) for i, (key, _) in enumerate(RATE_PARAMS)]) else: rate_dict = dict([(key, None) for i, (key, _) in enumerate(RATE_PARAMS)]) return dict(data), rate_dict
python
def extract_mfd_params(src): """ Extracts the MFD parameters from an object """ tags = get_taglist(src) if "incrementalMFD" in tags: mfd_node = src.nodes[tags.index("incrementalMFD")] elif "truncGutenbergRichterMFD" in tags: mfd_node = src.nodes[tags.index("truncGutenbergRichterMFD")] elif "arbitraryMFD" in tags: mfd_node = src.nodes[tags.index("arbitraryMFD")] elif "YoungsCoppersmithMFD" in tags: mfd_node = src.nodes[tags.index("YoungsCoppersmithMFD")] else: raise ValueError("Source %s contains no supported MFD type!" % src.tag) data = [] rates = [] for key, param, vtype in MFD_PARAMS: if key in mfd_node.attrib and mfd_node.attrib[key] is not None: data.append((param, mfd_node.attrib[key])) else: data.append((param, None)) if ("incrementalMFD" or "arbitraryMFD") in mfd_node.tag: # Extract Rates rates = ~mfd_node.occurRates n_r = len(rates) if n_r > MAX_RATES: raise ValueError("Number of rates in source %s too large " "to be placed into shapefile" % src.tag) rate_dict = dict([(key, rates[i] if i < n_r else None) for i, (key, _) in enumerate(RATE_PARAMS)]) elif "YoungsCoppersmithMFD" in mfd_node.tag: rate_dict = dict([(key, mfd_node.attrib['characteristicRate']) for i, (key, _) in enumerate(RATE_PARAMS)]) else: rate_dict = dict([(key, None) for i, (key, _) in enumerate(RATE_PARAMS)]) return dict(data), rate_dict
[ "def", "extract_mfd_params", "(", "src", ")", ":", "tags", "=", "get_taglist", "(", "src", ")", "if", "\"incrementalMFD\"", "in", "tags", ":", "mfd_node", "=", "src", ".", "nodes", "[", "tags", ".", "index", "(", "\"incrementalMFD\"", ")", "]", "elif", "\"truncGutenbergRichterMFD\"", "in", "tags", ":", "mfd_node", "=", "src", ".", "nodes", "[", "tags", ".", "index", "(", "\"truncGutenbergRichterMFD\"", ")", "]", "elif", "\"arbitraryMFD\"", "in", "tags", ":", "mfd_node", "=", "src", ".", "nodes", "[", "tags", ".", "index", "(", "\"arbitraryMFD\"", ")", "]", "elif", "\"YoungsCoppersmithMFD\"", "in", "tags", ":", "mfd_node", "=", "src", ".", "nodes", "[", "tags", ".", "index", "(", "\"YoungsCoppersmithMFD\"", ")", "]", "else", ":", "raise", "ValueError", "(", "\"Source %s contains no supported MFD type!\"", "%", "src", ".", "tag", ")", "data", "=", "[", "]", "rates", "=", "[", "]", "for", "key", ",", "param", ",", "vtype", "in", "MFD_PARAMS", ":", "if", "key", "in", "mfd_node", ".", "attrib", "and", "mfd_node", ".", "attrib", "[", "key", "]", "is", "not", "None", ":", "data", ".", "append", "(", "(", "param", ",", "mfd_node", ".", "attrib", "[", "key", "]", ")", ")", "else", ":", "data", ".", "append", "(", "(", "param", ",", "None", ")", ")", "if", "(", "\"incrementalMFD\"", "or", "\"arbitraryMFD\"", ")", "in", "mfd_node", ".", "tag", ":", "# Extract Rates", "rates", "=", "~", "mfd_node", ".", "occurRates", "n_r", "=", "len", "(", "rates", ")", "if", "n_r", ">", "MAX_RATES", ":", "raise", "ValueError", "(", "\"Number of rates in source %s too large \"", "\"to be placed into shapefile\"", "%", "src", ".", "tag", ")", "rate_dict", "=", "dict", "(", "[", "(", "key", ",", "rates", "[", "i", "]", "if", "i", "<", "n_r", "else", "None", ")", "for", "i", ",", "(", "key", ",", "_", ")", "in", "enumerate", "(", "RATE_PARAMS", ")", "]", ")", "elif", "\"YoungsCoppersmithMFD\"", "in", "mfd_node", ".", "tag", ":", "rate_dict", "=", "dict", "(", "[", "(", "key", ",", "mfd_node", ".", "attrib", "[", "'characteristicRate'", "]", ")", "for", "i", ",", "(", "key", ",", "_", ")", "in", "enumerate", "(", "RATE_PARAMS", ")", "]", ")", "else", ":", "rate_dict", "=", "dict", "(", "[", "(", "key", ",", "None", ")", "for", "i", ",", "(", "key", ",", "_", ")", "in", "enumerate", "(", "RATE_PARAMS", ")", "]", ")", "return", "dict", "(", "data", ")", ",", "rate_dict" ]
Extracts the MFD parameters from an object
[ "Extracts", "the", "MFD", "parameters", "from", "an", "object" ]
8294553a0b8aba33fd96437a35065d03547d0040
https://github.com/gem/oq-engine/blob/8294553a0b8aba33fd96437a35065d03547d0040/openquake/commonlib/shapefileparser.py#L322-L359
328
gem/oq-engine
openquake/commonlib/shapefileparser.py
extract_source_hypocentral_depths
def extract_source_hypocentral_depths(src): """ Extract source hypocentral depths. """ if "pointSource" not in src.tag and "areaSource" not in src.tag: hds = dict([(key, None) for key, _ in HDEPTH_PARAMS]) hdsw = dict([(key, None) for key, _ in HDW_PARAMS]) return hds, hdsw tags = get_taglist(src) hdd_nodeset = src.nodes[tags.index("hypoDepthDist")] if len(hdd_nodeset) > MAX_HYPO_DEPTHS: raise ValueError("Number of hypocentral depths %s exceeds stated " "maximum of %s" % (str(len(hdd_nodeset)), str(MAX_HYPO_DEPTHS))) if len(hdd_nodeset): hds = [] hdws = [] for hdd_node in hdd_nodeset: hds.append(float(hdd_node.attrib["depth"])) hdws.append(float(hdd_node.attrib["probability"])) hds = expand_src_param(hds, HDEPTH_PARAMS) hdsw = expand_src_param(hdws, HDW_PARAMS) else: hds = dict([(key, None) for key, _ in HDEPTH_PARAMS]) hdsw = dict([(key, None) for key, _ in HDW_PARAMS]) return hds, hdsw
python
def extract_source_hypocentral_depths(src): """ Extract source hypocentral depths. """ if "pointSource" not in src.tag and "areaSource" not in src.tag: hds = dict([(key, None) for key, _ in HDEPTH_PARAMS]) hdsw = dict([(key, None) for key, _ in HDW_PARAMS]) return hds, hdsw tags = get_taglist(src) hdd_nodeset = src.nodes[tags.index("hypoDepthDist")] if len(hdd_nodeset) > MAX_HYPO_DEPTHS: raise ValueError("Number of hypocentral depths %s exceeds stated " "maximum of %s" % (str(len(hdd_nodeset)), str(MAX_HYPO_DEPTHS))) if len(hdd_nodeset): hds = [] hdws = [] for hdd_node in hdd_nodeset: hds.append(float(hdd_node.attrib["depth"])) hdws.append(float(hdd_node.attrib["probability"])) hds = expand_src_param(hds, HDEPTH_PARAMS) hdsw = expand_src_param(hdws, HDW_PARAMS) else: hds = dict([(key, None) for key, _ in HDEPTH_PARAMS]) hdsw = dict([(key, None) for key, _ in HDW_PARAMS]) return hds, hdsw
[ "def", "extract_source_hypocentral_depths", "(", "src", ")", ":", "if", "\"pointSource\"", "not", "in", "src", ".", "tag", "and", "\"areaSource\"", "not", "in", "src", ".", "tag", ":", "hds", "=", "dict", "(", "[", "(", "key", ",", "None", ")", "for", "key", ",", "_", "in", "HDEPTH_PARAMS", "]", ")", "hdsw", "=", "dict", "(", "[", "(", "key", ",", "None", ")", "for", "key", ",", "_", "in", "HDW_PARAMS", "]", ")", "return", "hds", ",", "hdsw", "tags", "=", "get_taglist", "(", "src", ")", "hdd_nodeset", "=", "src", ".", "nodes", "[", "tags", ".", "index", "(", "\"hypoDepthDist\"", ")", "]", "if", "len", "(", "hdd_nodeset", ")", ">", "MAX_HYPO_DEPTHS", ":", "raise", "ValueError", "(", "\"Number of hypocentral depths %s exceeds stated \"", "\"maximum of %s\"", "%", "(", "str", "(", "len", "(", "hdd_nodeset", ")", ")", ",", "str", "(", "MAX_HYPO_DEPTHS", ")", ")", ")", "if", "len", "(", "hdd_nodeset", ")", ":", "hds", "=", "[", "]", "hdws", "=", "[", "]", "for", "hdd_node", "in", "hdd_nodeset", ":", "hds", ".", "append", "(", "float", "(", "hdd_node", ".", "attrib", "[", "\"depth\"", "]", ")", ")", "hdws", ".", "append", "(", "float", "(", "hdd_node", ".", "attrib", "[", "\"probability\"", "]", ")", ")", "hds", "=", "expand_src_param", "(", "hds", ",", "HDEPTH_PARAMS", ")", "hdsw", "=", "expand_src_param", "(", "hdws", ",", "HDW_PARAMS", ")", "else", ":", "hds", "=", "dict", "(", "[", "(", "key", ",", "None", ")", "for", "key", ",", "_", "in", "HDEPTH_PARAMS", "]", ")", "hdsw", "=", "dict", "(", "[", "(", "key", ",", "None", ")", "for", "key", ",", "_", "in", "HDW_PARAMS", "]", ")", "return", "hds", ",", "hdsw" ]
Extract source hypocentral depths.
[ "Extract", "source", "hypocentral", "depths", "." ]
8294553a0b8aba33fd96437a35065d03547d0040
https://github.com/gem/oq-engine/blob/8294553a0b8aba33fd96437a35065d03547d0040/openquake/commonlib/shapefileparser.py#L397-L425
329
gem/oq-engine
openquake/commonlib/shapefileparser.py
extract_source_planes_strikes_dips
def extract_source_planes_strikes_dips(src): """ Extract strike and dip angles for source defined by multiple planes. """ if "characteristicFaultSource" not in src.tag: strikes = dict([(key, None) for key, _ in PLANES_STRIKES_PARAM]) dips = dict([(key, None) for key, _ in PLANES_DIPS_PARAM]) return strikes, dips tags = get_taglist(src) surface_set = src.nodes[tags.index("surface")] strikes = [] dips = [] num_planes = 0 for surface in surface_set: if "planarSurface" in surface.tag: strikes.append(float(surface.attrib["strike"])) dips.append(float(surface.attrib["dip"])) num_planes += 1 if num_planes > MAX_PLANES: raise ValueError("Number of planes in sourcs %s exceededs maximum " "of %s" % (str(num_planes), str(MAX_PLANES))) if num_planes: strikes = expand_src_param(strikes, PLANES_STRIKES_PARAM) dips = expand_src_param(dips, PLANES_DIPS_PARAM) else: strikes = dict([(key, None) for key, _ in PLANES_STRIKES_PARAM]) dips = dict([(key, None) for key, _ in PLANES_DIPS_PARAM]) return strikes, dips
python
def extract_source_planes_strikes_dips(src): """ Extract strike and dip angles for source defined by multiple planes. """ if "characteristicFaultSource" not in src.tag: strikes = dict([(key, None) for key, _ in PLANES_STRIKES_PARAM]) dips = dict([(key, None) for key, _ in PLANES_DIPS_PARAM]) return strikes, dips tags = get_taglist(src) surface_set = src.nodes[tags.index("surface")] strikes = [] dips = [] num_planes = 0 for surface in surface_set: if "planarSurface" in surface.tag: strikes.append(float(surface.attrib["strike"])) dips.append(float(surface.attrib["dip"])) num_planes += 1 if num_planes > MAX_PLANES: raise ValueError("Number of planes in sourcs %s exceededs maximum " "of %s" % (str(num_planes), str(MAX_PLANES))) if num_planes: strikes = expand_src_param(strikes, PLANES_STRIKES_PARAM) dips = expand_src_param(dips, PLANES_DIPS_PARAM) else: strikes = dict([(key, None) for key, _ in PLANES_STRIKES_PARAM]) dips = dict([(key, None) for key, _ in PLANES_DIPS_PARAM]) return strikes, dips
[ "def", "extract_source_planes_strikes_dips", "(", "src", ")", ":", "if", "\"characteristicFaultSource\"", "not", "in", "src", ".", "tag", ":", "strikes", "=", "dict", "(", "[", "(", "key", ",", "None", ")", "for", "key", ",", "_", "in", "PLANES_STRIKES_PARAM", "]", ")", "dips", "=", "dict", "(", "[", "(", "key", ",", "None", ")", "for", "key", ",", "_", "in", "PLANES_DIPS_PARAM", "]", ")", "return", "strikes", ",", "dips", "tags", "=", "get_taglist", "(", "src", ")", "surface_set", "=", "src", ".", "nodes", "[", "tags", ".", "index", "(", "\"surface\"", ")", "]", "strikes", "=", "[", "]", "dips", "=", "[", "]", "num_planes", "=", "0", "for", "surface", "in", "surface_set", ":", "if", "\"planarSurface\"", "in", "surface", ".", "tag", ":", "strikes", ".", "append", "(", "float", "(", "surface", ".", "attrib", "[", "\"strike\"", "]", ")", ")", "dips", ".", "append", "(", "float", "(", "surface", ".", "attrib", "[", "\"dip\"", "]", ")", ")", "num_planes", "+=", "1", "if", "num_planes", ">", "MAX_PLANES", ":", "raise", "ValueError", "(", "\"Number of planes in sourcs %s exceededs maximum \"", "\"of %s\"", "%", "(", "str", "(", "num_planes", ")", ",", "str", "(", "MAX_PLANES", ")", ")", ")", "if", "num_planes", ":", "strikes", "=", "expand_src_param", "(", "strikes", ",", "PLANES_STRIKES_PARAM", ")", "dips", "=", "expand_src_param", "(", "dips", ",", "PLANES_DIPS_PARAM", ")", "else", ":", "strikes", "=", "dict", "(", "[", "(", "key", ",", "None", ")", "for", "key", ",", "_", "in", "PLANES_STRIKES_PARAM", "]", ")", "dips", "=", "dict", "(", "[", "(", "key", ",", "None", ")", "for", "key", ",", "_", "in", "PLANES_DIPS_PARAM", "]", ")", "return", "strikes", ",", "dips" ]
Extract strike and dip angles for source defined by multiple planes.
[ "Extract", "strike", "and", "dip", "angles", "for", "source", "defined", "by", "multiple", "planes", "." ]
8294553a0b8aba33fd96437a35065d03547d0040
https://github.com/gem/oq-engine/blob/8294553a0b8aba33fd96437a35065d03547d0040/openquake/commonlib/shapefileparser.py#L428-L456
330
gem/oq-engine
openquake/commonlib/shapefileparser.py
set_params
def set_params(w, src): """ Set source parameters. """ params = extract_source_params(src) # this is done because for characteristic sources geometry is in # 'surface' attribute params.update(extract_geometry_params(src)) mfd_pars, rate_pars = extract_mfd_params(src) params.update(mfd_pars) params.update(rate_pars) strikes, dips, rakes, np_weights = extract_source_nodal_planes(src) params.update(strikes) params.update(dips) params.update(rakes) params.update(np_weights) hds, hdsw = extract_source_hypocentral_depths(src) params.update(hds) params.update(hdsw) pstrikes, pdips = extract_source_planes_strikes_dips(src) params.update(pstrikes) params.update(pdips) params['sourcetype'] = striptag(src.tag) w.record(**params)
python
def set_params(w, src): """ Set source parameters. """ params = extract_source_params(src) # this is done because for characteristic sources geometry is in # 'surface' attribute params.update(extract_geometry_params(src)) mfd_pars, rate_pars = extract_mfd_params(src) params.update(mfd_pars) params.update(rate_pars) strikes, dips, rakes, np_weights = extract_source_nodal_planes(src) params.update(strikes) params.update(dips) params.update(rakes) params.update(np_weights) hds, hdsw = extract_source_hypocentral_depths(src) params.update(hds) params.update(hdsw) pstrikes, pdips = extract_source_planes_strikes_dips(src) params.update(pstrikes) params.update(pdips) params['sourcetype'] = striptag(src.tag) w.record(**params)
[ "def", "set_params", "(", "w", ",", "src", ")", ":", "params", "=", "extract_source_params", "(", "src", ")", "# this is done because for characteristic sources geometry is in", "# 'surface' attribute", "params", ".", "update", "(", "extract_geometry_params", "(", "src", ")", ")", "mfd_pars", ",", "rate_pars", "=", "extract_mfd_params", "(", "src", ")", "params", ".", "update", "(", "mfd_pars", ")", "params", ".", "update", "(", "rate_pars", ")", "strikes", ",", "dips", ",", "rakes", ",", "np_weights", "=", "extract_source_nodal_planes", "(", "src", ")", "params", ".", "update", "(", "strikes", ")", "params", ".", "update", "(", "dips", ")", "params", ".", "update", "(", "rakes", ")", "params", ".", "update", "(", "np_weights", ")", "hds", ",", "hdsw", "=", "extract_source_hypocentral_depths", "(", "src", ")", "params", ".", "update", "(", "hds", ")", "params", ".", "update", "(", "hdsw", ")", "pstrikes", ",", "pdips", "=", "extract_source_planes_strikes_dips", "(", "src", ")", "params", ".", "update", "(", "pstrikes", ")", "params", ".", "update", "(", "pdips", ")", "params", "[", "'sourcetype'", "]", "=", "striptag", "(", "src", ".", "tag", ")", "w", ".", "record", "(", "*", "*", "params", ")" ]
Set source parameters.
[ "Set", "source", "parameters", "." ]
8294553a0b8aba33fd96437a35065d03547d0040
https://github.com/gem/oq-engine/blob/8294553a0b8aba33fd96437a35065d03547d0040/openquake/commonlib/shapefileparser.py#L459-L486
331
gem/oq-engine
openquake/commonlib/shapefileparser.py
set_area_geometry
def set_area_geometry(w, src): """ Set area polygon as shapefile geometry """ assert "areaSource" in src.tag geometry_node = src.nodes[get_taglist(src).index("areaGeometry")] area_attrs = parse_area_geometry(geometry_node) w.poly(parts=[area_attrs["polygon"].tolist()])
python
def set_area_geometry(w, src): """ Set area polygon as shapefile geometry """ assert "areaSource" in src.tag geometry_node = src.nodes[get_taglist(src).index("areaGeometry")] area_attrs = parse_area_geometry(geometry_node) w.poly(parts=[area_attrs["polygon"].tolist()])
[ "def", "set_area_geometry", "(", "w", ",", "src", ")", ":", "assert", "\"areaSource\"", "in", "src", ".", "tag", "geometry_node", "=", "src", ".", "nodes", "[", "get_taglist", "(", "src", ")", ".", "index", "(", "\"areaGeometry\"", ")", "]", "area_attrs", "=", "parse_area_geometry", "(", "geometry_node", ")", "w", ".", "poly", "(", "parts", "=", "[", "area_attrs", "[", "\"polygon\"", "]", ".", "tolist", "(", ")", "]", ")" ]
Set area polygon as shapefile geometry
[ "Set", "area", "polygon", "as", "shapefile", "geometry" ]
8294553a0b8aba33fd96437a35065d03547d0040
https://github.com/gem/oq-engine/blob/8294553a0b8aba33fd96437a35065d03547d0040/openquake/commonlib/shapefileparser.py#L489-L496
332
gem/oq-engine
openquake/commonlib/shapefileparser.py
set_point_geometry
def set_point_geometry(w, src): """ Set point location as shapefile geometry. """ assert "pointSource" in src.tag geometry_node = src.nodes[get_taglist(src).index("pointGeometry")] point_attrs = parse_point_geometry(geometry_node) w.point(point_attrs["point"][0], point_attrs["point"][1])
python
def set_point_geometry(w, src): """ Set point location as shapefile geometry. """ assert "pointSource" in src.tag geometry_node = src.nodes[get_taglist(src).index("pointGeometry")] point_attrs = parse_point_geometry(geometry_node) w.point(point_attrs["point"][0], point_attrs["point"][1])
[ "def", "set_point_geometry", "(", "w", ",", "src", ")", ":", "assert", "\"pointSource\"", "in", "src", ".", "tag", "geometry_node", "=", "src", ".", "nodes", "[", "get_taglist", "(", "src", ")", ".", "index", "(", "\"pointGeometry\"", ")", "]", "point_attrs", "=", "parse_point_geometry", "(", "geometry_node", ")", "w", ".", "point", "(", "point_attrs", "[", "\"point\"", "]", "[", "0", "]", ",", "point_attrs", "[", "\"point\"", "]", "[", "1", "]", ")" ]
Set point location as shapefile geometry.
[ "Set", "point", "location", "as", "shapefile", "geometry", "." ]
8294553a0b8aba33fd96437a35065d03547d0040
https://github.com/gem/oq-engine/blob/8294553a0b8aba33fd96437a35065d03547d0040/openquake/commonlib/shapefileparser.py#L499-L506
333
gem/oq-engine
openquake/commonlib/shapefileparser.py
set_simple_fault_geometry
def set_simple_fault_geometry(w, src): """ Set simple fault trace coordinates as shapefile geometry. :parameter w: Writer :parameter src: source """ assert "simpleFaultSource" in src.tag geometry_node = src.nodes[get_taglist(src).index("simpleFaultGeometry")] fault_attrs = parse_simple_fault_geometry(geometry_node) w.line(parts=[fault_attrs["trace"].tolist()])
python
def set_simple_fault_geometry(w, src): """ Set simple fault trace coordinates as shapefile geometry. :parameter w: Writer :parameter src: source """ assert "simpleFaultSource" in src.tag geometry_node = src.nodes[get_taglist(src).index("simpleFaultGeometry")] fault_attrs = parse_simple_fault_geometry(geometry_node) w.line(parts=[fault_attrs["trace"].tolist()])
[ "def", "set_simple_fault_geometry", "(", "w", ",", "src", ")", ":", "assert", "\"simpleFaultSource\"", "in", "src", ".", "tag", "geometry_node", "=", "src", ".", "nodes", "[", "get_taglist", "(", "src", ")", ".", "index", "(", "\"simpleFaultGeometry\"", ")", "]", "fault_attrs", "=", "parse_simple_fault_geometry", "(", "geometry_node", ")", "w", ".", "line", "(", "parts", "=", "[", "fault_attrs", "[", "\"trace\"", "]", ".", "tolist", "(", ")", "]", ")" ]
Set simple fault trace coordinates as shapefile geometry. :parameter w: Writer :parameter src: source
[ "Set", "simple", "fault", "trace", "coordinates", "as", "shapefile", "geometry", "." ]
8294553a0b8aba33fd96437a35065d03547d0040
https://github.com/gem/oq-engine/blob/8294553a0b8aba33fd96437a35065d03547d0040/openquake/commonlib/shapefileparser.py#L509-L521
334
gem/oq-engine
openquake/commonlib/shapefileparser.py
set_simple_fault_geometry_3D
def set_simple_fault_geometry_3D(w, src): """ Builds a 3D polygon from a node instance """ assert "simpleFaultSource" in src.tag geometry_node = src.nodes[get_taglist(src).index("simpleFaultGeometry")] fault_attrs = parse_simple_fault_geometry(geometry_node) build_polygon_from_fault_attrs(w, fault_attrs)
python
def set_simple_fault_geometry_3D(w, src): """ Builds a 3D polygon from a node instance """ assert "simpleFaultSource" in src.tag geometry_node = src.nodes[get_taglist(src).index("simpleFaultGeometry")] fault_attrs = parse_simple_fault_geometry(geometry_node) build_polygon_from_fault_attrs(w, fault_attrs)
[ "def", "set_simple_fault_geometry_3D", "(", "w", ",", "src", ")", ":", "assert", "\"simpleFaultSource\"", "in", "src", ".", "tag", "geometry_node", "=", "src", ".", "nodes", "[", "get_taglist", "(", "src", ")", ".", "index", "(", "\"simpleFaultGeometry\"", ")", "]", "fault_attrs", "=", "parse_simple_fault_geometry", "(", "geometry_node", ")", "build_polygon_from_fault_attrs", "(", "w", ",", "fault_attrs", ")" ]
Builds a 3D polygon from a node instance
[ "Builds", "a", "3D", "polygon", "from", "a", "node", "instance" ]
8294553a0b8aba33fd96437a35065d03547d0040
https://github.com/gem/oq-engine/blob/8294553a0b8aba33fd96437a35065d03547d0040/openquake/commonlib/shapefileparser.py#L543-L550
335
gem/oq-engine
openquake/commonlib/shapefileparser.py
SourceModel.appraise_source_model
def appraise_source_model(self): """ Identify parameters defined in NRML source model file, so that shapefile contains only source model specific fields. """ for src in self.sources: # source params src_taglist = get_taglist(src) if "areaSource" in src.tag: self.has_area_source = True npd_node = src.nodes[src_taglist.index("nodalPlaneDist")] npd_size = len(npd_node) hdd_node = src.nodes[src_taglist.index("hypoDepthDist")] hdd_size = len(hdd_node) self.num_np = (npd_size if npd_size > self.num_np else self.num_np) self.num_hd = (hdd_size if hdd_size > self.num_hd else self.num_hd) elif "pointSource" in src.tag: self.has_point_source = True npd_node = src.nodes[src_taglist.index("nodalPlaneDist")] npd_size = len(npd_node) hdd_node = src.nodes[src_taglist.index("hypoDepthDist")] hdd_size = len(hdd_node) self.num_np = (npd_size if npd_size > self.num_np else self.num_np) self.num_hd = (hdd_size if hdd_size > self.num_hd else self.num_hd) elif "simpleFaultSource" in src.tag: self.has_simple_fault_geometry = True elif "complexFaultSource" in src.tag: self.has_complex_fault_geometry = True elif "characteristicFaultSource" in src.tag: # Get the surface node surface_node = src.nodes[src_taglist.index("surface")] p_size = 0 for surface in surface_node.nodes: if "simpleFaultGeometry" in surface.tag: self.has_simple_fault_geometry = True elif "complexFaultGeometry" in surface.tag: self.has_complex_fault_geometry = True elif "planarSurface" in surface.tag: self.has_planar_geometry = True p_size += 1 self.num_p = p_size if p_size > self.num_p else self.num_p else: pass # MFD params if "truncGutenbergRichterMFD" in src_taglist: self.has_mfd_gr = True elif "incrementalMFD" in src_taglist: self.has_mfd_incremental = True # Get rate size mfd_node = src.nodes[src_taglist.index("incrementalMFD")] r_size = len(mfd_node.nodes[0].text) self.num_r = r_size if r_size > self.num_r else self.num_r else: pass
python
def appraise_source_model(self): """ Identify parameters defined in NRML source model file, so that shapefile contains only source model specific fields. """ for src in self.sources: # source params src_taglist = get_taglist(src) if "areaSource" in src.tag: self.has_area_source = True npd_node = src.nodes[src_taglist.index("nodalPlaneDist")] npd_size = len(npd_node) hdd_node = src.nodes[src_taglist.index("hypoDepthDist")] hdd_size = len(hdd_node) self.num_np = (npd_size if npd_size > self.num_np else self.num_np) self.num_hd = (hdd_size if hdd_size > self.num_hd else self.num_hd) elif "pointSource" in src.tag: self.has_point_source = True npd_node = src.nodes[src_taglist.index("nodalPlaneDist")] npd_size = len(npd_node) hdd_node = src.nodes[src_taglist.index("hypoDepthDist")] hdd_size = len(hdd_node) self.num_np = (npd_size if npd_size > self.num_np else self.num_np) self.num_hd = (hdd_size if hdd_size > self.num_hd else self.num_hd) elif "simpleFaultSource" in src.tag: self.has_simple_fault_geometry = True elif "complexFaultSource" in src.tag: self.has_complex_fault_geometry = True elif "characteristicFaultSource" in src.tag: # Get the surface node surface_node = src.nodes[src_taglist.index("surface")] p_size = 0 for surface in surface_node.nodes: if "simpleFaultGeometry" in surface.tag: self.has_simple_fault_geometry = True elif "complexFaultGeometry" in surface.tag: self.has_complex_fault_geometry = True elif "planarSurface" in surface.tag: self.has_planar_geometry = True p_size += 1 self.num_p = p_size if p_size > self.num_p else self.num_p else: pass # MFD params if "truncGutenbergRichterMFD" in src_taglist: self.has_mfd_gr = True elif "incrementalMFD" in src_taglist: self.has_mfd_incremental = True # Get rate size mfd_node = src.nodes[src_taglist.index("incrementalMFD")] r_size = len(mfd_node.nodes[0].text) self.num_r = r_size if r_size > self.num_r else self.num_r else: pass
[ "def", "appraise_source_model", "(", "self", ")", ":", "for", "src", "in", "self", ".", "sources", ":", "# source params", "src_taglist", "=", "get_taglist", "(", "src", ")", "if", "\"areaSource\"", "in", "src", ".", "tag", ":", "self", ".", "has_area_source", "=", "True", "npd_node", "=", "src", ".", "nodes", "[", "src_taglist", ".", "index", "(", "\"nodalPlaneDist\"", ")", "]", "npd_size", "=", "len", "(", "npd_node", ")", "hdd_node", "=", "src", ".", "nodes", "[", "src_taglist", ".", "index", "(", "\"hypoDepthDist\"", ")", "]", "hdd_size", "=", "len", "(", "hdd_node", ")", "self", ".", "num_np", "=", "(", "npd_size", "if", "npd_size", ">", "self", ".", "num_np", "else", "self", ".", "num_np", ")", "self", ".", "num_hd", "=", "(", "hdd_size", "if", "hdd_size", ">", "self", ".", "num_hd", "else", "self", ".", "num_hd", ")", "elif", "\"pointSource\"", "in", "src", ".", "tag", ":", "self", ".", "has_point_source", "=", "True", "npd_node", "=", "src", ".", "nodes", "[", "src_taglist", ".", "index", "(", "\"nodalPlaneDist\"", ")", "]", "npd_size", "=", "len", "(", "npd_node", ")", "hdd_node", "=", "src", ".", "nodes", "[", "src_taglist", ".", "index", "(", "\"hypoDepthDist\"", ")", "]", "hdd_size", "=", "len", "(", "hdd_node", ")", "self", ".", "num_np", "=", "(", "npd_size", "if", "npd_size", ">", "self", ".", "num_np", "else", "self", ".", "num_np", ")", "self", ".", "num_hd", "=", "(", "hdd_size", "if", "hdd_size", ">", "self", ".", "num_hd", "else", "self", ".", "num_hd", ")", "elif", "\"simpleFaultSource\"", "in", "src", ".", "tag", ":", "self", ".", "has_simple_fault_geometry", "=", "True", "elif", "\"complexFaultSource\"", "in", "src", ".", "tag", ":", "self", ".", "has_complex_fault_geometry", "=", "True", "elif", "\"characteristicFaultSource\"", "in", "src", ".", "tag", ":", "# Get the surface node", "surface_node", "=", "src", ".", "nodes", "[", "src_taglist", ".", "index", "(", "\"surface\"", ")", "]", "p_size", "=", "0", "for", "surface", "in", "surface_node", ".", "nodes", ":", "if", "\"simpleFaultGeometry\"", "in", "surface", ".", "tag", ":", "self", ".", "has_simple_fault_geometry", "=", "True", "elif", "\"complexFaultGeometry\"", "in", "surface", ".", "tag", ":", "self", ".", "has_complex_fault_geometry", "=", "True", "elif", "\"planarSurface\"", "in", "surface", ".", "tag", ":", "self", ".", "has_planar_geometry", "=", "True", "p_size", "+=", "1", "self", ".", "num_p", "=", "p_size", "if", "p_size", ">", "self", ".", "num_p", "else", "self", ".", "num_p", "else", ":", "pass", "# MFD params", "if", "\"truncGutenbergRichterMFD\"", "in", "src_taglist", ":", "self", ".", "has_mfd_gr", "=", "True", "elif", "\"incrementalMFD\"", "in", "src_taglist", ":", "self", ".", "has_mfd_incremental", "=", "True", "# Get rate size", "mfd_node", "=", "src", ".", "nodes", "[", "src_taglist", ".", "index", "(", "\"incrementalMFD\"", ")", "]", "r_size", "=", "len", "(", "mfd_node", ".", "nodes", "[", "0", "]", ".", "text", ")", "self", ".", "num_r", "=", "r_size", "if", "r_size", ">", "self", ".", "num_r", "else", "self", ".", "num_r", "else", ":", "pass" ]
Identify parameters defined in NRML source model file, so that shapefile contains only source model specific fields.
[ "Identify", "parameters", "defined", "in", "NRML", "source", "model", "file", "so", "that", "shapefile", "contains", "only", "source", "model", "specific", "fields", "." ]
8294553a0b8aba33fd96437a35065d03547d0040
https://github.com/gem/oq-engine/blob/8294553a0b8aba33fd96437a35065d03547d0040/openquake/commonlib/shapefileparser.py#L826-L884
336
gem/oq-engine
openquake/commonlib/shapefileparser.py
SourceModelParser.write
def write(self, destination, source_model, name=None): """ Exports to NRML """ if os.path.exists(destination): os.remove(destination) self.destination = destination if name: source_model.name = name output_source_model = Node("sourceModel", {"name": name}) dic = groupby(source_model.sources, operator.itemgetter('tectonicRegion')) for i, (trt, srcs) in enumerate(dic.items(), 1): output_source_model.append( Node('sourceGroup', {'tectonicRegion': trt, 'name': 'group %d' % i}, nodes=srcs)) print("Exporting Source Model to %s" % self.destination) with open(self.destination, "wb") as f: nrml.write([output_source_model], f, "%s")
python
def write(self, destination, source_model, name=None): """ Exports to NRML """ if os.path.exists(destination): os.remove(destination) self.destination = destination if name: source_model.name = name output_source_model = Node("sourceModel", {"name": name}) dic = groupby(source_model.sources, operator.itemgetter('tectonicRegion')) for i, (trt, srcs) in enumerate(dic.items(), 1): output_source_model.append( Node('sourceGroup', {'tectonicRegion': trt, 'name': 'group %d' % i}, nodes=srcs)) print("Exporting Source Model to %s" % self.destination) with open(self.destination, "wb") as f: nrml.write([output_source_model], f, "%s")
[ "def", "write", "(", "self", ",", "destination", ",", "source_model", ",", "name", "=", "None", ")", ":", "if", "os", ".", "path", ".", "exists", "(", "destination", ")", ":", "os", ".", "remove", "(", "destination", ")", "self", ".", "destination", "=", "destination", "if", "name", ":", "source_model", ".", "name", "=", "name", "output_source_model", "=", "Node", "(", "\"sourceModel\"", ",", "{", "\"name\"", ":", "name", "}", ")", "dic", "=", "groupby", "(", "source_model", ".", "sources", ",", "operator", ".", "itemgetter", "(", "'tectonicRegion'", ")", ")", "for", "i", ",", "(", "trt", ",", "srcs", ")", "in", "enumerate", "(", "dic", ".", "items", "(", ")", ",", "1", ")", ":", "output_source_model", ".", "append", "(", "Node", "(", "'sourceGroup'", ",", "{", "'tectonicRegion'", ":", "trt", ",", "'name'", ":", "'group %d'", "%", "i", "}", ",", "nodes", "=", "srcs", ")", ")", "print", "(", "\"Exporting Source Model to %s\"", "%", "self", ".", "destination", ")", "with", "open", "(", "self", ".", "destination", ",", "\"wb\"", ")", "as", "f", ":", "nrml", ".", "write", "(", "[", "output_source_model", "]", ",", "f", ",", "\"%s\"", ")" ]
Exports to NRML
[ "Exports", "to", "NRML" ]
8294553a0b8aba33fd96437a35065d03547d0040
https://github.com/gem/oq-engine/blob/8294553a0b8aba33fd96437a35065d03547d0040/openquake/commonlib/shapefileparser.py#L937-L956
337
gem/oq-engine
openquake/commonlib/shapefileparser.py
ShapefileParser.filter_params
def filter_params(self, src_mod): """ Remove params uneeded by source_model """ # point and area related params STRIKE_PARAMS[src_mod.num_np:] = [] DIP_PARAMS[src_mod.num_np:] = [] RAKE_PARAMS[src_mod.num_np:] = [] NPW_PARAMS[src_mod.num_np:] = [] HDEPTH_PARAMS[src_mod.num_hd:] = [] HDW_PARAMS[src_mod.num_hd:] = [] # planar rupture related params PLANES_STRIKES_PARAM[src_mod.num_p:] = [] PLANES_DIPS_PARAM[src_mod.num_p:] = [] # rate params RATE_PARAMS[src_mod.num_r:] = [] if src_mod.has_simple_fault_geometry is False: GEOMETRY_PARAMS.remove(('dip', 'dip', 'f')) if (src_mod.has_simple_fault_geometry is False and src_mod.has_complex_fault_geometry is False and src_mod.has_planar_geometry is False): BASE_PARAMS.remove(('rake', 'rake', 'f')) if (src_mod.has_simple_fault_geometry is False and src_mod.has_complex_fault_geometry is False and src_mod.has_area_source is False and src_mod.has_point_source is False): GEOMETRY_PARAMS[:] = [] if src_mod.has_mfd_incremental is False: MFD_PARAMS.remove(('binWidth', 'bin_width', 'f'))
python
def filter_params(self, src_mod): """ Remove params uneeded by source_model """ # point and area related params STRIKE_PARAMS[src_mod.num_np:] = [] DIP_PARAMS[src_mod.num_np:] = [] RAKE_PARAMS[src_mod.num_np:] = [] NPW_PARAMS[src_mod.num_np:] = [] HDEPTH_PARAMS[src_mod.num_hd:] = [] HDW_PARAMS[src_mod.num_hd:] = [] # planar rupture related params PLANES_STRIKES_PARAM[src_mod.num_p:] = [] PLANES_DIPS_PARAM[src_mod.num_p:] = [] # rate params RATE_PARAMS[src_mod.num_r:] = [] if src_mod.has_simple_fault_geometry is False: GEOMETRY_PARAMS.remove(('dip', 'dip', 'f')) if (src_mod.has_simple_fault_geometry is False and src_mod.has_complex_fault_geometry is False and src_mod.has_planar_geometry is False): BASE_PARAMS.remove(('rake', 'rake', 'f')) if (src_mod.has_simple_fault_geometry is False and src_mod.has_complex_fault_geometry is False and src_mod.has_area_source is False and src_mod.has_point_source is False): GEOMETRY_PARAMS[:] = [] if src_mod.has_mfd_incremental is False: MFD_PARAMS.remove(('binWidth', 'bin_width', 'f'))
[ "def", "filter_params", "(", "self", ",", "src_mod", ")", ":", "# point and area related params", "STRIKE_PARAMS", "[", "src_mod", ".", "num_np", ":", "]", "=", "[", "]", "DIP_PARAMS", "[", "src_mod", ".", "num_np", ":", "]", "=", "[", "]", "RAKE_PARAMS", "[", "src_mod", ".", "num_np", ":", "]", "=", "[", "]", "NPW_PARAMS", "[", "src_mod", ".", "num_np", ":", "]", "=", "[", "]", "HDEPTH_PARAMS", "[", "src_mod", ".", "num_hd", ":", "]", "=", "[", "]", "HDW_PARAMS", "[", "src_mod", ".", "num_hd", ":", "]", "=", "[", "]", "# planar rupture related params", "PLANES_STRIKES_PARAM", "[", "src_mod", ".", "num_p", ":", "]", "=", "[", "]", "PLANES_DIPS_PARAM", "[", "src_mod", ".", "num_p", ":", "]", "=", "[", "]", "# rate params", "RATE_PARAMS", "[", "src_mod", ".", "num_r", ":", "]", "=", "[", "]", "if", "src_mod", ".", "has_simple_fault_geometry", "is", "False", ":", "GEOMETRY_PARAMS", ".", "remove", "(", "(", "'dip'", ",", "'dip'", ",", "'f'", ")", ")", "if", "(", "src_mod", ".", "has_simple_fault_geometry", "is", "False", "and", "src_mod", ".", "has_complex_fault_geometry", "is", "False", "and", "src_mod", ".", "has_planar_geometry", "is", "False", ")", ":", "BASE_PARAMS", ".", "remove", "(", "(", "'rake'", ",", "'rake'", ",", "'f'", ")", ")", "if", "(", "src_mod", ".", "has_simple_fault_geometry", "is", "False", "and", "src_mod", ".", "has_complex_fault_geometry", "is", "False", "and", "src_mod", ".", "has_area_source", "is", "False", "and", "src_mod", ".", "has_point_source", "is", "False", ")", ":", "GEOMETRY_PARAMS", "[", ":", "]", "=", "[", "]", "if", "src_mod", ".", "has_mfd_incremental", "is", "False", ":", "MFD_PARAMS", ".", "remove", "(", "(", "'binWidth'", ",", "'bin_width'", ",", "'f'", ")", ")" ]
Remove params uneeded by source_model
[ "Remove", "params", "uneeded", "by", "source_model" ]
8294553a0b8aba33fd96437a35065d03547d0040
https://github.com/gem/oq-engine/blob/8294553a0b8aba33fd96437a35065d03547d0040/openquake/commonlib/shapefileparser.py#L960-L992
338
gem/oq-engine
openquake/baselib/node.py
tostring
def tostring(node, indent=4, nsmap=None): """ Convert a node into an XML string by using the StreamingXMLWriter. This is useful for testing purposes. :param node: a node object (typically an ElementTree object) :param indent: the indentation to use in the XML (default 4 spaces) """ out = io.BytesIO() writer = StreamingXMLWriter(out, indent, nsmap=nsmap) writer.serialize(node) return out.getvalue()
python
def tostring(node, indent=4, nsmap=None): """ Convert a node into an XML string by using the StreamingXMLWriter. This is useful for testing purposes. :param node: a node object (typically an ElementTree object) :param indent: the indentation to use in the XML (default 4 spaces) """ out = io.BytesIO() writer = StreamingXMLWriter(out, indent, nsmap=nsmap) writer.serialize(node) return out.getvalue()
[ "def", "tostring", "(", "node", ",", "indent", "=", "4", ",", "nsmap", "=", "None", ")", ":", "out", "=", "io", ".", "BytesIO", "(", ")", "writer", "=", "StreamingXMLWriter", "(", "out", ",", "indent", ",", "nsmap", "=", "nsmap", ")", "writer", ".", "serialize", "(", "node", ")", "return", "out", ".", "getvalue", "(", ")" ]
Convert a node into an XML string by using the StreamingXMLWriter. This is useful for testing purposes. :param node: a node object (typically an ElementTree object) :param indent: the indentation to use in the XML (default 4 spaces)
[ "Convert", "a", "node", "into", "an", "XML", "string", "by", "using", "the", "StreamingXMLWriter", ".", "This", "is", "useful", "for", "testing", "purposes", "." ]
8294553a0b8aba33fd96437a35065d03547d0040
https://github.com/gem/oq-engine/blob/8294553a0b8aba33fd96437a35065d03547d0040/openquake/baselib/node.py#L216-L227
339
gem/oq-engine
openquake/baselib/node.py
parse
def parse(source, remove_comments=True, **kw): """Thin wrapper around ElementTree.parse""" return ElementTree.parse(source, SourceLineParser(), **kw)
python
def parse(source, remove_comments=True, **kw): """Thin wrapper around ElementTree.parse""" return ElementTree.parse(source, SourceLineParser(), **kw)
[ "def", "parse", "(", "source", ",", "remove_comments", "=", "True", ",", "*", "*", "kw", ")", ":", "return", "ElementTree", ".", "parse", "(", "source", ",", "SourceLineParser", "(", ")", ",", "*", "*", "kw", ")" ]
Thin wrapper around ElementTree.parse
[ "Thin", "wrapper", "around", "ElementTree", ".", "parse" ]
8294553a0b8aba33fd96437a35065d03547d0040
https://github.com/gem/oq-engine/blob/8294553a0b8aba33fd96437a35065d03547d0040/openquake/baselib/node.py#L350-L352
340
gem/oq-engine
openquake/baselib/node.py
iterparse
def iterparse(source, events=('end',), remove_comments=True, **kw): """Thin wrapper around ElementTree.iterparse""" return ElementTree.iterparse(source, events, SourceLineParser(), **kw)
python
def iterparse(source, events=('end',), remove_comments=True, **kw): """Thin wrapper around ElementTree.iterparse""" return ElementTree.iterparse(source, events, SourceLineParser(), **kw)
[ "def", "iterparse", "(", "source", ",", "events", "=", "(", "'end'", ",", ")", ",", "remove_comments", "=", "True", ",", "*", "*", "kw", ")", ":", "return", "ElementTree", ".", "iterparse", "(", "source", ",", "events", ",", "SourceLineParser", "(", ")", ",", "*", "*", "kw", ")" ]
Thin wrapper around ElementTree.iterparse
[ "Thin", "wrapper", "around", "ElementTree", ".", "iterparse" ]
8294553a0b8aba33fd96437a35065d03547d0040
https://github.com/gem/oq-engine/blob/8294553a0b8aba33fd96437a35065d03547d0040/openquake/baselib/node.py#L355-L357
341
gem/oq-engine
openquake/baselib/node.py
_displayattrs
def _displayattrs(attrib, expandattrs): """ Helper function to display the attributes of a Node object in lexicographic order. :param attrib: dictionary with the attributes :param expandattrs: if True also displays the value of the attributes """ if not attrib: return '' if expandattrs: alist = ['%s=%r' % item for item in sorted(attrib.items())] else: alist = list(attrib) return '{%s}' % ', '.join(alist)
python
def _displayattrs(attrib, expandattrs): """ Helper function to display the attributes of a Node object in lexicographic order. :param attrib: dictionary with the attributes :param expandattrs: if True also displays the value of the attributes """ if not attrib: return '' if expandattrs: alist = ['%s=%r' % item for item in sorted(attrib.items())] else: alist = list(attrib) return '{%s}' % ', '.join(alist)
[ "def", "_displayattrs", "(", "attrib", ",", "expandattrs", ")", ":", "if", "not", "attrib", ":", "return", "''", "if", "expandattrs", ":", "alist", "=", "[", "'%s=%r'", "%", "item", "for", "item", "in", "sorted", "(", "attrib", ".", "items", "(", ")", ")", "]", "else", ":", "alist", "=", "list", "(", "attrib", ")", "return", "'{%s}'", "%", "', '", ".", "join", "(", "alist", ")" ]
Helper function to display the attributes of a Node object in lexicographic order. :param attrib: dictionary with the attributes :param expandattrs: if True also displays the value of the attributes
[ "Helper", "function", "to", "display", "the", "attributes", "of", "a", "Node", "object", "in", "lexicographic", "order", "." ]
8294553a0b8aba33fd96437a35065d03547d0040
https://github.com/gem/oq-engine/blob/8294553a0b8aba33fd96437a35065d03547d0040/openquake/baselib/node.py#L363-L377
342
gem/oq-engine
openquake/baselib/node.py
_display
def _display(node, indent, expandattrs, expandvals, output): """Core function to display a Node object""" attrs = _displayattrs(node.attrib, expandattrs) if node.text is None or not expandvals: val = '' elif isinstance(node.text, str): val = ' %s' % repr(node.text.strip()) else: val = ' %s' % repr(node.text) # node.text can be a tuple output.write(encode(indent + striptag(node.tag) + attrs + val + '\n')) for sub_node in node: _display(sub_node, indent + ' ', expandattrs, expandvals, output)
python
def _display(node, indent, expandattrs, expandvals, output): """Core function to display a Node object""" attrs = _displayattrs(node.attrib, expandattrs) if node.text is None or not expandvals: val = '' elif isinstance(node.text, str): val = ' %s' % repr(node.text.strip()) else: val = ' %s' % repr(node.text) # node.text can be a tuple output.write(encode(indent + striptag(node.tag) + attrs + val + '\n')) for sub_node in node: _display(sub_node, indent + ' ', expandattrs, expandvals, output)
[ "def", "_display", "(", "node", ",", "indent", ",", "expandattrs", ",", "expandvals", ",", "output", ")", ":", "attrs", "=", "_displayattrs", "(", "node", ".", "attrib", ",", "expandattrs", ")", "if", "node", ".", "text", "is", "None", "or", "not", "expandvals", ":", "val", "=", "''", "elif", "isinstance", "(", "node", ".", "text", ",", "str", ")", ":", "val", "=", "' %s'", "%", "repr", "(", "node", ".", "text", ".", "strip", "(", ")", ")", "else", ":", "val", "=", "' %s'", "%", "repr", "(", "node", ".", "text", ")", "# node.text can be a tuple", "output", ".", "write", "(", "encode", "(", "indent", "+", "striptag", "(", "node", ".", "tag", ")", "+", "attrs", "+", "val", "+", "'\\n'", ")", ")", "for", "sub_node", "in", "node", ":", "_display", "(", "sub_node", ",", "indent", "+", "' '", ",", "expandattrs", ",", "expandvals", ",", "output", ")" ]
Core function to display a Node object
[ "Core", "function", "to", "display", "a", "Node", "object" ]
8294553a0b8aba33fd96437a35065d03547d0040
https://github.com/gem/oq-engine/blob/8294553a0b8aba33fd96437a35065d03547d0040/openquake/baselib/node.py#L380-L391
343
gem/oq-engine
openquake/baselib/node.py
to_literal
def to_literal(self): """ Convert the node into a literal Python object """ if not self.nodes: return (self.tag, self.attrib, self.text, []) else: return (self.tag, self.attrib, self.text, list(map(to_literal, self.nodes)))
python
def to_literal(self): """ Convert the node into a literal Python object """ if not self.nodes: return (self.tag, self.attrib, self.text, []) else: return (self.tag, self.attrib, self.text, list(map(to_literal, self.nodes)))
[ "def", "to_literal", "(", "self", ")", ":", "if", "not", "self", ".", "nodes", ":", "return", "(", "self", ".", "tag", ",", "self", ".", "attrib", ",", "self", ".", "text", ",", "[", "]", ")", "else", ":", "return", "(", "self", ".", "tag", ",", "self", ".", "attrib", ",", "self", ".", "text", ",", "list", "(", "map", "(", "to_literal", ",", "self", ".", "nodes", ")", ")", ")" ]
Convert the node into a literal Python object
[ "Convert", "the", "node", "into", "a", "literal", "Python", "object" ]
8294553a0b8aba33fd96437a35065d03547d0040
https://github.com/gem/oq-engine/blob/8294553a0b8aba33fd96437a35065d03547d0040/openquake/baselib/node.py#L574-L582
344
gem/oq-engine
openquake/baselib/node.py
pprint
def pprint(self, stream=None, indent=1, width=80, depth=None): """ Pretty print the underlying literal Python object """ pp.pprint(to_literal(self), stream, indent, width, depth)
python
def pprint(self, stream=None, indent=1, width=80, depth=None): """ Pretty print the underlying literal Python object """ pp.pprint(to_literal(self), stream, indent, width, depth)
[ "def", "pprint", "(", "self", ",", "stream", "=", "None", ",", "indent", "=", "1", ",", "width", "=", "80", ",", "depth", "=", "None", ")", ":", "pp", ".", "pprint", "(", "to_literal", "(", "self", ")", ",", "stream", ",", "indent", ",", "width", ",", "depth", ")" ]
Pretty print the underlying literal Python object
[ "Pretty", "print", "the", "underlying", "literal", "Python", "object" ]
8294553a0b8aba33fd96437a35065d03547d0040
https://github.com/gem/oq-engine/blob/8294553a0b8aba33fd96437a35065d03547d0040/openquake/baselib/node.py#L585-L589
345
gem/oq-engine
openquake/baselib/node.py
read_nodes
def read_nodes(fname, filter_elem, nodefactory=Node, remove_comments=True): """ Convert an XML file into a lazy iterator over Node objects satifying the given specification, i.e. a function element -> boolean. :param fname: file name of file object :param filter_elem: element specification In case of errors, add the file name to the error message. """ try: for _, el in iterparse(fname, remove_comments=remove_comments): if filter_elem(el): yield node_from_elem(el, nodefactory) el.clear() # save memory except Exception: etype, exc, tb = sys.exc_info() msg = str(exc) if not str(fname) in msg: msg = '%s in %s' % (msg, fname) raise_(etype, msg, tb)
python
def read_nodes(fname, filter_elem, nodefactory=Node, remove_comments=True): """ Convert an XML file into a lazy iterator over Node objects satifying the given specification, i.e. a function element -> boolean. :param fname: file name of file object :param filter_elem: element specification In case of errors, add the file name to the error message. """ try: for _, el in iterparse(fname, remove_comments=remove_comments): if filter_elem(el): yield node_from_elem(el, nodefactory) el.clear() # save memory except Exception: etype, exc, tb = sys.exc_info() msg = str(exc) if not str(fname) in msg: msg = '%s in %s' % (msg, fname) raise_(etype, msg, tb)
[ "def", "read_nodes", "(", "fname", ",", "filter_elem", ",", "nodefactory", "=", "Node", ",", "remove_comments", "=", "True", ")", ":", "try", ":", "for", "_", ",", "el", "in", "iterparse", "(", "fname", ",", "remove_comments", "=", "remove_comments", ")", ":", "if", "filter_elem", "(", "el", ")", ":", "yield", "node_from_elem", "(", "el", ",", "nodefactory", ")", "el", ".", "clear", "(", ")", "# save memory", "except", "Exception", ":", "etype", ",", "exc", ",", "tb", "=", "sys", ".", "exc_info", "(", ")", "msg", "=", "str", "(", "exc", ")", "if", "not", "str", "(", "fname", ")", "in", "msg", ":", "msg", "=", "'%s in %s'", "%", "(", "msg", ",", "fname", ")", "raise_", "(", "etype", ",", "msg", ",", "tb", ")" ]
Convert an XML file into a lazy iterator over Node objects satifying the given specification, i.e. a function element -> boolean. :param fname: file name of file object :param filter_elem: element specification In case of errors, add the file name to the error message.
[ "Convert", "an", "XML", "file", "into", "a", "lazy", "iterator", "over", "Node", "objects", "satifying", "the", "given", "specification", "i", ".", "e", ".", "a", "function", "element", "-", ">", "boolean", "." ]
8294553a0b8aba33fd96437a35065d03547d0040
https://github.com/gem/oq-engine/blob/8294553a0b8aba33fd96437a35065d03547d0040/openquake/baselib/node.py#L666-L686
346
gem/oq-engine
openquake/baselib/node.py
node_from_xml
def node_from_xml(xmlfile, nodefactory=Node): """ Convert a .xml file into a Node object. :param xmlfile: a file name or file object open for reading """ root = parse(xmlfile).getroot() return node_from_elem(root, nodefactory)
python
def node_from_xml(xmlfile, nodefactory=Node): """ Convert a .xml file into a Node object. :param xmlfile: a file name or file object open for reading """ root = parse(xmlfile).getroot() return node_from_elem(root, nodefactory)
[ "def", "node_from_xml", "(", "xmlfile", ",", "nodefactory", "=", "Node", ")", ":", "root", "=", "parse", "(", "xmlfile", ")", ".", "getroot", "(", ")", "return", "node_from_elem", "(", "root", ",", "nodefactory", ")" ]
Convert a .xml file into a Node object. :param xmlfile: a file name or file object open for reading
[ "Convert", "a", ".", "xml", "file", "into", "a", "Node", "object", "." ]
8294553a0b8aba33fd96437a35065d03547d0040
https://github.com/gem/oq-engine/blob/8294553a0b8aba33fd96437a35065d03547d0040/openquake/baselib/node.py#L689-L696
347
gem/oq-engine
openquake/baselib/node.py
node_from_ini
def node_from_ini(ini_file, nodefactory=Node, root_name='ini'): """ Convert a .ini file into a Node object. :param ini_file: a filename or a file like object in read mode """ fileobj = open(ini_file) if isinstance(ini_file, str) else ini_file cfp = configparser.RawConfigParser() cfp.read_file(fileobj) root = nodefactory(root_name) sections = cfp.sections() for section in sections: params = dict(cfp.items(section)) root.append(Node(section, params)) return root
python
def node_from_ini(ini_file, nodefactory=Node, root_name='ini'): """ Convert a .ini file into a Node object. :param ini_file: a filename or a file like object in read mode """ fileobj = open(ini_file) if isinstance(ini_file, str) else ini_file cfp = configparser.RawConfigParser() cfp.read_file(fileobj) root = nodefactory(root_name) sections = cfp.sections() for section in sections: params = dict(cfp.items(section)) root.append(Node(section, params)) return root
[ "def", "node_from_ini", "(", "ini_file", ",", "nodefactory", "=", "Node", ",", "root_name", "=", "'ini'", ")", ":", "fileobj", "=", "open", "(", "ini_file", ")", "if", "isinstance", "(", "ini_file", ",", "str", ")", "else", "ini_file", "cfp", "=", "configparser", ".", "RawConfigParser", "(", ")", "cfp", ".", "read_file", "(", "fileobj", ")", "root", "=", "nodefactory", "(", "root_name", ")", "sections", "=", "cfp", ".", "sections", "(", ")", "for", "section", "in", "sections", ":", "params", "=", "dict", "(", "cfp", ".", "items", "(", "section", ")", ")", "root", ".", "append", "(", "Node", "(", "section", ",", "params", ")", ")", "return", "root" ]
Convert a .ini file into a Node object. :param ini_file: a filename or a file like object in read mode
[ "Convert", "a", ".", "ini", "file", "into", "a", "Node", "object", "." ]
8294553a0b8aba33fd96437a35065d03547d0040
https://github.com/gem/oq-engine/blob/8294553a0b8aba33fd96437a35065d03547d0040/openquake/baselib/node.py#L719-L733
348
gem/oq-engine
openquake/baselib/node.py
node_to_ini
def node_to_ini(node, output=sys.stdout): """ Convert a Node object with the right structure into a .ini file. :params node: a Node object :params output: a file-like object opened in write mode """ for subnode in node: output.write(u'\n[%s]\n' % subnode.tag) for name, value in sorted(subnode.attrib.items()): output.write(u'%s=%s\n' % (name, value)) output.flush()
python
def node_to_ini(node, output=sys.stdout): """ Convert a Node object with the right structure into a .ini file. :params node: a Node object :params output: a file-like object opened in write mode """ for subnode in node: output.write(u'\n[%s]\n' % subnode.tag) for name, value in sorted(subnode.attrib.items()): output.write(u'%s=%s\n' % (name, value)) output.flush()
[ "def", "node_to_ini", "(", "node", ",", "output", "=", "sys", ".", "stdout", ")", ":", "for", "subnode", "in", "node", ":", "output", ".", "write", "(", "u'\\n[%s]\\n'", "%", "subnode", ".", "tag", ")", "for", "name", ",", "value", "in", "sorted", "(", "subnode", ".", "attrib", ".", "items", "(", ")", ")", ":", "output", ".", "write", "(", "u'%s=%s\\n'", "%", "(", "name", ",", "value", ")", ")", "output", ".", "flush", "(", ")" ]
Convert a Node object with the right structure into a .ini file. :params node: a Node object :params output: a file-like object opened in write mode
[ "Convert", "a", "Node", "object", "with", "the", "right", "structure", "into", "a", ".", "ini", "file", "." ]
8294553a0b8aba33fd96437a35065d03547d0040
https://github.com/gem/oq-engine/blob/8294553a0b8aba33fd96437a35065d03547d0040/openquake/baselib/node.py#L736-L747
349
gem/oq-engine
openquake/baselib/node.py
node_copy
def node_copy(node, nodefactory=Node): """Make a deep copy of the node""" return nodefactory(node.tag, node.attrib.copy(), node.text, [node_copy(n, nodefactory) for n in node])
python
def node_copy(node, nodefactory=Node): """Make a deep copy of the node""" return nodefactory(node.tag, node.attrib.copy(), node.text, [node_copy(n, nodefactory) for n in node])
[ "def", "node_copy", "(", "node", ",", "nodefactory", "=", "Node", ")", ":", "return", "nodefactory", "(", "node", ".", "tag", ",", "node", ".", "attrib", ".", "copy", "(", ")", ",", "node", ".", "text", ",", "[", "node_copy", "(", "n", ",", "nodefactory", ")", "for", "n", "in", "node", "]", ")" ]
Make a deep copy of the node
[ "Make", "a", "deep", "copy", "of", "the", "node" ]
8294553a0b8aba33fd96437a35065d03547d0040
https://github.com/gem/oq-engine/blob/8294553a0b8aba33fd96437a35065d03547d0040/openquake/baselib/node.py#L750-L753
350
gem/oq-engine
openquake/baselib/node.py
context
def context(fname, node): """ Context manager managing exceptions and adding line number of the current node and name of the current file to the error message. :param fname: the current file being processed :param node: the current node being processed """ try: yield node except Exception: etype, exc, tb = sys.exc_info() msg = 'node %s: %s, line %s of %s' % ( striptag(node.tag), exc, getattr(node, 'lineno', '?'), fname) raise_(etype, msg, tb)
python
def context(fname, node): """ Context manager managing exceptions and adding line number of the current node and name of the current file to the error message. :param fname: the current file being processed :param node: the current node being processed """ try: yield node except Exception: etype, exc, tb = sys.exc_info() msg = 'node %s: %s, line %s of %s' % ( striptag(node.tag), exc, getattr(node, 'lineno', '?'), fname) raise_(etype, msg, tb)
[ "def", "context", "(", "fname", ",", "node", ")", ":", "try", ":", "yield", "node", "except", "Exception", ":", "etype", ",", "exc", ",", "tb", "=", "sys", ".", "exc_info", "(", ")", "msg", "=", "'node %s: %s, line %s of %s'", "%", "(", "striptag", "(", "node", ".", "tag", ")", ",", "exc", ",", "getattr", "(", "node", ",", "'lineno'", ",", "'?'", ")", ",", "fname", ")", "raise_", "(", "etype", ",", "msg", ",", "tb", ")" ]
Context manager managing exceptions and adding line number of the current node and name of the current file to the error message. :param fname: the current file being processed :param node: the current node being processed
[ "Context", "manager", "managing", "exceptions", "and", "adding", "line", "number", "of", "the", "current", "node", "and", "name", "of", "the", "current", "file", "to", "the", "error", "message", "." ]
8294553a0b8aba33fd96437a35065d03547d0040
https://github.com/gem/oq-engine/blob/8294553a0b8aba33fd96437a35065d03547d0040/openquake/baselib/node.py#L757-L771
351
gem/oq-engine
openquake/baselib/node.py
StreamingXMLWriter.shorten
def shorten(self, tag): """ Get the short representation of a fully qualified tag :param str tag: a (fully qualified or not) XML tag """ if tag.startswith('{'): ns, _tag = tag.rsplit('}') tag = self.nsmap.get(ns[1:], '') + _tag return tag
python
def shorten(self, tag): """ Get the short representation of a fully qualified tag :param str tag: a (fully qualified or not) XML tag """ if tag.startswith('{'): ns, _tag = tag.rsplit('}') tag = self.nsmap.get(ns[1:], '') + _tag return tag
[ "def", "shorten", "(", "self", ",", "tag", ")", ":", "if", "tag", ".", "startswith", "(", "'{'", ")", ":", "ns", ",", "_tag", "=", "tag", ".", "rsplit", "(", "'}'", ")", "tag", "=", "self", ".", "nsmap", ".", "get", "(", "ns", "[", "1", ":", "]", ",", "''", ")", "+", "_tag", "return", "tag" ]
Get the short representation of a fully qualified tag :param str tag: a (fully qualified or not) XML tag
[ "Get", "the", "short", "representation", "of", "a", "fully", "qualified", "tag" ]
8294553a0b8aba33fd96437a35065d03547d0040
https://github.com/gem/oq-engine/blob/8294553a0b8aba33fd96437a35065d03547d0040/openquake/baselib/node.py#L254-L263
352
gem/oq-engine
openquake/baselib/node.py
StreamingXMLWriter._write
def _write(self, text): """Write text by respecting the current indentlevel""" spaces = ' ' * (self.indent * self.indentlevel) t = spaces + text.strip() + '\n' if hasattr(t, 'encode'): t = t.encode(self.encoding, 'xmlcharrefreplace') self.stream.write(t)
python
def _write(self, text): """Write text by respecting the current indentlevel""" spaces = ' ' * (self.indent * self.indentlevel) t = spaces + text.strip() + '\n' if hasattr(t, 'encode'): t = t.encode(self.encoding, 'xmlcharrefreplace') self.stream.write(t)
[ "def", "_write", "(", "self", ",", "text", ")", ":", "spaces", "=", "' '", "*", "(", "self", ".", "indent", "*", "self", ".", "indentlevel", ")", "t", "=", "spaces", "+", "text", ".", "strip", "(", ")", "+", "'\\n'", "if", "hasattr", "(", "t", ",", "'encode'", ")", ":", "t", "=", "t", ".", "encode", "(", "self", ".", "encoding", ",", "'xmlcharrefreplace'", ")", "self", ".", "stream", ".", "write", "(", "t", ")" ]
Write text by respecting the current indentlevel
[ "Write", "text", "by", "respecting", "the", "current", "indentlevel" ]
8294553a0b8aba33fd96437a35065d03547d0040
https://github.com/gem/oq-engine/blob/8294553a0b8aba33fd96437a35065d03547d0040/openquake/baselib/node.py#L265-L271
353
gem/oq-engine
openquake/baselib/node.py
StreamingXMLWriter.start_tag
def start_tag(self, name, attrs=None): """Open an XML tag""" if not attrs: self._write('<%s>' % name) else: self._write('<' + name) for (name, value) in sorted(attrs.items()): self._write( ' %s=%s' % (name, quoteattr(scientificformat(value)))) self._write('>') self.indentlevel += 1
python
def start_tag(self, name, attrs=None): """Open an XML tag""" if not attrs: self._write('<%s>' % name) else: self._write('<' + name) for (name, value) in sorted(attrs.items()): self._write( ' %s=%s' % (name, quoteattr(scientificformat(value)))) self._write('>') self.indentlevel += 1
[ "def", "start_tag", "(", "self", ",", "name", ",", "attrs", "=", "None", ")", ":", "if", "not", "attrs", ":", "self", ".", "_write", "(", "'<%s>'", "%", "name", ")", "else", ":", "self", ".", "_write", "(", "'<'", "+", "name", ")", "for", "(", "name", ",", "value", ")", "in", "sorted", "(", "attrs", ".", "items", "(", ")", ")", ":", "self", ".", "_write", "(", "' %s=%s'", "%", "(", "name", ",", "quoteattr", "(", "scientificformat", "(", "value", ")", ")", ")", ")", "self", ".", "_write", "(", "'>'", ")", "self", ".", "indentlevel", "+=", "1" ]
Open an XML tag
[ "Open", "an", "XML", "tag" ]
8294553a0b8aba33fd96437a35065d03547d0040
https://github.com/gem/oq-engine/blob/8294553a0b8aba33fd96437a35065d03547d0040/openquake/baselib/node.py#L279-L289
354
gem/oq-engine
openquake/baselib/node.py
Node.getnodes
def getnodes(self, name): "Return the direct subnodes with name 'name'" for node in self.nodes: if striptag(node.tag) == name: yield node
python
def getnodes(self, name): "Return the direct subnodes with name 'name'" for node in self.nodes: if striptag(node.tag) == name: yield node
[ "def", "getnodes", "(", "self", ",", "name", ")", ":", "for", "node", "in", "self", ".", "nodes", ":", "if", "striptag", "(", "node", ".", "tag", ")", "==", "name", ":", "yield", "node" ]
Return the direct subnodes with name 'name
[ "Return", "the", "direct", "subnodes", "with", "name", "name" ]
8294553a0b8aba33fd96437a35065d03547d0040
https://github.com/gem/oq-engine/blob/8294553a0b8aba33fd96437a35065d03547d0040/openquake/baselib/node.py#L458-L462
355
gem/oq-engine
openquake/baselib/node.py
Node.append
def append(self, node): "Append a new subnode" if not isinstance(node, self.__class__): raise TypeError('Expected Node instance, got %r' % node) self.nodes.append(node)
python
def append(self, node): "Append a new subnode" if not isinstance(node, self.__class__): raise TypeError('Expected Node instance, got %r' % node) self.nodes.append(node)
[ "def", "append", "(", "self", ",", "node", ")", ":", "if", "not", "isinstance", "(", "node", ",", "self", ".", "__class__", ")", ":", "raise", "TypeError", "(", "'Expected Node instance, got %r'", "%", "node", ")", "self", ".", "nodes", ".", "append", "(", "node", ")" ]
Append a new subnode
[ "Append", "a", "new", "subnode" ]
8294553a0b8aba33fd96437a35065d03547d0040
https://github.com/gem/oq-engine/blob/8294553a0b8aba33fd96437a35065d03547d0040/openquake/baselib/node.py#L464-L468
356
gem/oq-engine
openquake/baselib/node.py
ValidatingXmlParser.parse_bytes
def parse_bytes(self, bytestr, isfinal=True): """ Parse a byte string. If the string is very large, split it in chuncks and parse each chunk with isfinal=False, then parse an empty chunk with isfinal=True. """ with self._context(): self.filename = None self.p.Parse(bytestr, isfinal) return self._root
python
def parse_bytes(self, bytestr, isfinal=True): """ Parse a byte string. If the string is very large, split it in chuncks and parse each chunk with isfinal=False, then parse an empty chunk with isfinal=True. """ with self._context(): self.filename = None self.p.Parse(bytestr, isfinal) return self._root
[ "def", "parse_bytes", "(", "self", ",", "bytestr", ",", "isfinal", "=", "True", ")", ":", "with", "self", ".", "_context", "(", ")", ":", "self", ".", "filename", "=", "None", "self", ".", "p", ".", "Parse", "(", "bytestr", ",", "isfinal", ")", "return", "self", ".", "_root" ]
Parse a byte string. If the string is very large, split it in chuncks and parse each chunk with isfinal=False, then parse an empty chunk with isfinal=True.
[ "Parse", "a", "byte", "string", ".", "If", "the", "string", "is", "very", "large", "split", "it", "in", "chuncks", "and", "parse", "each", "chunk", "with", "isfinal", "=", "False", "then", "parse", "an", "empty", "chunk", "with", "isfinal", "=", "True", "." ]
8294553a0b8aba33fd96437a35065d03547d0040
https://github.com/gem/oq-engine/blob/8294553a0b8aba33fd96437a35065d03547d0040/openquake/baselib/node.py#L815-L824
357
gem/oq-engine
openquake/baselib/node.py
ValidatingXmlParser.parse_file
def parse_file(self, file_or_fname): """ Parse a file or a filename """ with self._context(): if hasattr(file_or_fname, 'read'): self.filename = getattr( file_or_fname, 'name', file_or_fname.__class__.__name__) self.p.ParseFile(file_or_fname) else: self.filename = file_or_fname with open(file_or_fname, 'rb') as f: self.p.ParseFile(f) return self._root
python
def parse_file(self, file_or_fname): """ Parse a file or a filename """ with self._context(): if hasattr(file_or_fname, 'read'): self.filename = getattr( file_or_fname, 'name', file_or_fname.__class__.__name__) self.p.ParseFile(file_or_fname) else: self.filename = file_or_fname with open(file_or_fname, 'rb') as f: self.p.ParseFile(f) return self._root
[ "def", "parse_file", "(", "self", ",", "file_or_fname", ")", ":", "with", "self", ".", "_context", "(", ")", ":", "if", "hasattr", "(", "file_or_fname", ",", "'read'", ")", ":", "self", ".", "filename", "=", "getattr", "(", "file_or_fname", ",", "'name'", ",", "file_or_fname", ".", "__class__", ".", "__name__", ")", "self", ".", "p", ".", "ParseFile", "(", "file_or_fname", ")", "else", ":", "self", ".", "filename", "=", "file_or_fname", "with", "open", "(", "file_or_fname", ",", "'rb'", ")", "as", "f", ":", "self", ".", "p", ".", "ParseFile", "(", "f", ")", "return", "self", ".", "_root" ]
Parse a file or a filename
[ "Parse", "a", "file", "or", "a", "filename" ]
8294553a0b8aba33fd96437a35065d03547d0040
https://github.com/gem/oq-engine/blob/8294553a0b8aba33fd96437a35065d03547d0040/openquake/baselib/node.py#L826-L839
358
gem/oq-engine
openquake/hmtk/plotting/seismicity/completeness/cumulative_rate_analysis.py
SimpleCumulativeRate._get_magnitudes_from_spacing
def _get_magnitudes_from_spacing(self, magnitudes, delta_m): '''If a single magnitude spacing is input then create the bins :param numpy.ndarray magnitudes: Vector of earthquake magnitudes :param float delta_m: Magnitude bin width :returns: Vector of magnitude bin edges (numpy.ndarray) ''' min_mag = np.min(magnitudes) max_mag = np.max(magnitudes) if (max_mag - min_mag) < delta_m: raise ValueError('Bin width greater than magnitude range!') mag_bins = np.arange(np.floor(min_mag), np.ceil(max_mag), delta_m) # Check to see if there are magnitudes in lower and upper bins is_mag = np.logical_and(mag_bins - max_mag < delta_m, min_mag - mag_bins < delta_m) mag_bins = mag_bins[is_mag] return mag_bins
python
def _get_magnitudes_from_spacing(self, magnitudes, delta_m): '''If a single magnitude spacing is input then create the bins :param numpy.ndarray magnitudes: Vector of earthquake magnitudes :param float delta_m: Magnitude bin width :returns: Vector of magnitude bin edges (numpy.ndarray) ''' min_mag = np.min(magnitudes) max_mag = np.max(magnitudes) if (max_mag - min_mag) < delta_m: raise ValueError('Bin width greater than magnitude range!') mag_bins = np.arange(np.floor(min_mag), np.ceil(max_mag), delta_m) # Check to see if there are magnitudes in lower and upper bins is_mag = np.logical_and(mag_bins - max_mag < delta_m, min_mag - mag_bins < delta_m) mag_bins = mag_bins[is_mag] return mag_bins
[ "def", "_get_magnitudes_from_spacing", "(", "self", ",", "magnitudes", ",", "delta_m", ")", ":", "min_mag", "=", "np", ".", "min", "(", "magnitudes", ")", "max_mag", "=", "np", ".", "max", "(", "magnitudes", ")", "if", "(", "max_mag", "-", "min_mag", ")", "<", "delta_m", ":", "raise", "ValueError", "(", "'Bin width greater than magnitude range!'", ")", "mag_bins", "=", "np", ".", "arange", "(", "np", ".", "floor", "(", "min_mag", ")", ",", "np", ".", "ceil", "(", "max_mag", ")", ",", "delta_m", ")", "# Check to see if there are magnitudes in lower and upper bins", "is_mag", "=", "np", ".", "logical_and", "(", "mag_bins", "-", "max_mag", "<", "delta_m", ",", "min_mag", "-", "mag_bins", "<", "delta_m", ")", "mag_bins", "=", "mag_bins", "[", "is_mag", "]", "return", "mag_bins" ]
If a single magnitude spacing is input then create the bins :param numpy.ndarray magnitudes: Vector of earthquake magnitudes :param float delta_m: Magnitude bin width :returns: Vector of magnitude bin edges (numpy.ndarray)
[ "If", "a", "single", "magnitude", "spacing", "is", "input", "then", "create", "the", "bins" ]
8294553a0b8aba33fd96437a35065d03547d0040
https://github.com/gem/oq-engine/blob/8294553a0b8aba33fd96437a35065d03547d0040/openquake/hmtk/plotting/seismicity/completeness/cumulative_rate_analysis.py#L132-L152
359
gem/oq-engine
openquake/hmtk/seismicity/catalogue.py
_merge_data
def _merge_data(dat1, dat2): """ Merge two data dictionaries containing catalogue data :parameter dictionary dat1: Catalogue data dictionary :parameter dictionary dat2: Catalogue data dictionary :returns: A catalogue data dictionary containing the information originally included in dat1 and dat2 """ cnt = 0 for key in dat1: flg1 = len(dat1[key]) > 0 flg2 = len(dat2[key]) > 0 if flg1 != flg2: cnt += 1 if cnt: raise Warning('Cannot merge catalogues with different' + ' attributes') return None else: for key in dat1: if isinstance(dat1[key], np.ndarray): dat1[key] = np.concatenate((dat1[key], dat2[key]), axis=0) elif isinstance(dat1[key], list): dat1[key] += dat2[key] else: raise ValueError('Unknown type') return dat1
python
def _merge_data(dat1, dat2): """ Merge two data dictionaries containing catalogue data :parameter dictionary dat1: Catalogue data dictionary :parameter dictionary dat2: Catalogue data dictionary :returns: A catalogue data dictionary containing the information originally included in dat1 and dat2 """ cnt = 0 for key in dat1: flg1 = len(dat1[key]) > 0 flg2 = len(dat2[key]) > 0 if flg1 != flg2: cnt += 1 if cnt: raise Warning('Cannot merge catalogues with different' + ' attributes') return None else: for key in dat1: if isinstance(dat1[key], np.ndarray): dat1[key] = np.concatenate((dat1[key], dat2[key]), axis=0) elif isinstance(dat1[key], list): dat1[key] += dat2[key] else: raise ValueError('Unknown type') return dat1
[ "def", "_merge_data", "(", "dat1", ",", "dat2", ")", ":", "cnt", "=", "0", "for", "key", "in", "dat1", ":", "flg1", "=", "len", "(", "dat1", "[", "key", "]", ")", ">", "0", "flg2", "=", "len", "(", "dat2", "[", "key", "]", ")", ">", "0", "if", "flg1", "!=", "flg2", ":", "cnt", "+=", "1", "if", "cnt", ":", "raise", "Warning", "(", "'Cannot merge catalogues with different'", "+", "' attributes'", ")", "return", "None", "else", ":", "for", "key", "in", "dat1", ":", "if", "isinstance", "(", "dat1", "[", "key", "]", ",", "np", ".", "ndarray", ")", ":", "dat1", "[", "key", "]", "=", "np", ".", "concatenate", "(", "(", "dat1", "[", "key", "]", ",", "dat2", "[", "key", "]", ")", ",", "axis", "=", "0", ")", "elif", "isinstance", "(", "dat1", "[", "key", "]", ",", "list", ")", ":", "dat1", "[", "key", "]", "+=", "dat2", "[", "key", "]", "else", ":", "raise", "ValueError", "(", "'Unknown type'", ")", "return", "dat1" ]
Merge two data dictionaries containing catalogue data :parameter dictionary dat1: Catalogue data dictionary :parameter dictionary dat2: Catalogue data dictionary :returns: A catalogue data dictionary containing the information originally included in dat1 and dat2
[ "Merge", "two", "data", "dictionaries", "containing", "catalogue", "data" ]
8294553a0b8aba33fd96437a35065d03547d0040
https://github.com/gem/oq-engine/blob/8294553a0b8aba33fd96437a35065d03547d0040/openquake/hmtk/seismicity/catalogue.py#L566-L600
360
gem/oq-engine
openquake/hmtk/seismicity/catalogue.py
Catalogue._get_row_str
def _get_row_str(self, i): """ Returns a string representation of the key information in a row """ row_data = ["{:s}".format(self.data['eventID'][i]), "{:g}".format(self.data['year'][i]), "{:g}".format(self.data['month'][i]), "{:g}".format(self.data['day'][i]), "{:g}".format(self.data['hour'][i]), "{:g}".format(self.data['minute'][i]), "{:.1f}".format(self.data['second'][i]), "{:.3f}".format(self.data['longitude'][i]), "{:.3f}".format(self.data['latitude'][i]), "{:.1f}".format(self.data['depth'][i]), "{:.1f}".format(self.data['magnitude'][i])] return " ".join(row_data)
python
def _get_row_str(self, i): """ Returns a string representation of the key information in a row """ row_data = ["{:s}".format(self.data['eventID'][i]), "{:g}".format(self.data['year'][i]), "{:g}".format(self.data['month'][i]), "{:g}".format(self.data['day'][i]), "{:g}".format(self.data['hour'][i]), "{:g}".format(self.data['minute'][i]), "{:.1f}".format(self.data['second'][i]), "{:.3f}".format(self.data['longitude'][i]), "{:.3f}".format(self.data['latitude'][i]), "{:.1f}".format(self.data['depth'][i]), "{:.1f}".format(self.data['magnitude'][i])] return " ".join(row_data)
[ "def", "_get_row_str", "(", "self", ",", "i", ")", ":", "row_data", "=", "[", "\"{:s}\"", ".", "format", "(", "self", ".", "data", "[", "'eventID'", "]", "[", "i", "]", ")", ",", "\"{:g}\"", ".", "format", "(", "self", ".", "data", "[", "'year'", "]", "[", "i", "]", ")", ",", "\"{:g}\"", ".", "format", "(", "self", ".", "data", "[", "'month'", "]", "[", "i", "]", ")", ",", "\"{:g}\"", ".", "format", "(", "self", ".", "data", "[", "'day'", "]", "[", "i", "]", ")", ",", "\"{:g}\"", ".", "format", "(", "self", ".", "data", "[", "'hour'", "]", "[", "i", "]", ")", ",", "\"{:g}\"", ".", "format", "(", "self", ".", "data", "[", "'minute'", "]", "[", "i", "]", ")", ",", "\"{:.1f}\"", ".", "format", "(", "self", ".", "data", "[", "'second'", "]", "[", "i", "]", ")", ",", "\"{:.3f}\"", ".", "format", "(", "self", ".", "data", "[", "'longitude'", "]", "[", "i", "]", ")", ",", "\"{:.3f}\"", ".", "format", "(", "self", ".", "data", "[", "'latitude'", "]", "[", "i", "]", ")", ",", "\"{:.1f}\"", ".", "format", "(", "self", ".", "data", "[", "'depth'", "]", "[", "i", "]", ")", ",", "\"{:.1f}\"", ".", "format", "(", "self", ".", "data", "[", "'magnitude'", "]", "[", "i", "]", ")", "]", "return", "\" \"", ".", "join", "(", "row_data", ")" ]
Returns a string representation of the key information in a row
[ "Returns", "a", "string", "representation", "of", "the", "key", "information", "in", "a", "row" ]
8294553a0b8aba33fd96437a35065d03547d0040
https://github.com/gem/oq-engine/blob/8294553a0b8aba33fd96437a35065d03547d0040/openquake/hmtk/seismicity/catalogue.py#L138-L153
361
gem/oq-engine
openquake/hmtk/seismicity/catalogue.py
Catalogue.load_to_array
def load_to_array(self, keys): """ This loads the data contained in the catalogue into a numpy array. The method works only for float data :param keys: A list of keys to be uploaded into the array :type list: """ # Preallocate the numpy array data = np.empty((len(self.data[keys[0]]), len(keys))) for i in range(0, len(self.data[keys[0]])): for j, key in enumerate(keys): data[i, j] = self.data[key][i] return data
python
def load_to_array(self, keys): """ This loads the data contained in the catalogue into a numpy array. The method works only for float data :param keys: A list of keys to be uploaded into the array :type list: """ # Preallocate the numpy array data = np.empty((len(self.data[keys[0]]), len(keys))) for i in range(0, len(self.data[keys[0]])): for j, key in enumerate(keys): data[i, j] = self.data[key][i] return data
[ "def", "load_to_array", "(", "self", ",", "keys", ")", ":", "# Preallocate the numpy array", "data", "=", "np", ".", "empty", "(", "(", "len", "(", "self", ".", "data", "[", "keys", "[", "0", "]", "]", ")", ",", "len", "(", "keys", ")", ")", ")", "for", "i", "in", "range", "(", "0", ",", "len", "(", "self", ".", "data", "[", "keys", "[", "0", "]", "]", ")", ")", ":", "for", "j", ",", "key", "in", "enumerate", "(", "keys", ")", ":", "data", "[", "i", ",", "j", "]", "=", "self", ".", "data", "[", "key", "]", "[", "i", "]", "return", "data" ]
This loads the data contained in the catalogue into a numpy array. The method works only for float data :param keys: A list of keys to be uploaded into the array :type list:
[ "This", "loads", "the", "data", "contained", "in", "the", "catalogue", "into", "a", "numpy", "array", ".", "The", "method", "works", "only", "for", "float", "data" ]
8294553a0b8aba33fd96437a35065d03547d0040
https://github.com/gem/oq-engine/blob/8294553a0b8aba33fd96437a35065d03547d0040/openquake/hmtk/seismicity/catalogue.py#L223-L237
362
gem/oq-engine
openquake/hmtk/seismicity/catalogue.py
Catalogue.load_from_array
def load_from_array(self, keys, data_array): """ This loads the data contained in an array into the catalogue object :param keys: A list of keys explaining the content of the columns in the array :type list: """ if len(keys) != np.shape(data_array)[1]: raise ValueError('Key list does not match shape of array!') for i, key in enumerate(keys): if key in self.INT_ATTRIBUTE_LIST: self.data[key] = data_array[:, i].astype(int) else: self.data[key] = data_array[:, i] if key not in self.TOTAL_ATTRIBUTE_LIST: print('Key %s not a recognised catalogue attribute' % key) self.update_end_year()
python
def load_from_array(self, keys, data_array): """ This loads the data contained in an array into the catalogue object :param keys: A list of keys explaining the content of the columns in the array :type list: """ if len(keys) != np.shape(data_array)[1]: raise ValueError('Key list does not match shape of array!') for i, key in enumerate(keys): if key in self.INT_ATTRIBUTE_LIST: self.data[key] = data_array[:, i].astype(int) else: self.data[key] = data_array[:, i] if key not in self.TOTAL_ATTRIBUTE_LIST: print('Key %s not a recognised catalogue attribute' % key) self.update_end_year()
[ "def", "load_from_array", "(", "self", ",", "keys", ",", "data_array", ")", ":", "if", "len", "(", "keys", ")", "!=", "np", ".", "shape", "(", "data_array", ")", "[", "1", "]", ":", "raise", "ValueError", "(", "'Key list does not match shape of array!'", ")", "for", "i", ",", "key", "in", "enumerate", "(", "keys", ")", ":", "if", "key", "in", "self", ".", "INT_ATTRIBUTE_LIST", ":", "self", ".", "data", "[", "key", "]", "=", "data_array", "[", ":", ",", "i", "]", ".", "astype", "(", "int", ")", "else", ":", "self", ".", "data", "[", "key", "]", "=", "data_array", "[", ":", ",", "i", "]", "if", "key", "not", "in", "self", ".", "TOTAL_ATTRIBUTE_LIST", ":", "print", "(", "'Key %s not a recognised catalogue attribute'", "%", "key", ")", "self", ".", "update_end_year", "(", ")" ]
This loads the data contained in an array into the catalogue object :param keys: A list of keys explaining the content of the columns in the array :type list:
[ "This", "loads", "the", "data", "contained", "in", "an", "array", "into", "the", "catalogue", "object" ]
8294553a0b8aba33fd96437a35065d03547d0040
https://github.com/gem/oq-engine/blob/8294553a0b8aba33fd96437a35065d03547d0040/openquake/hmtk/seismicity/catalogue.py#L239-L259
363
gem/oq-engine
openquake/hmtk/seismicity/catalogue.py
Catalogue.catalogue_mt_filter
def catalogue_mt_filter(self, mt_table, flag=None): """ Filter the catalogue using a magnitude-time table. The table has two columns and n-rows. :param nump.ndarray mt_table: Magnitude time table with n-rows where column 1 is year and column 2 is magnitude """ if flag is None: # No flag defined, therefore all events are initially valid flag = np.ones(self.get_number_events(), dtype=bool) for comp_val in mt_table: id0 = np.logical_and(self.data['year'].astype(float) < comp_val[0], self.data['magnitude'] < comp_val[1]) print(id0) flag[id0] = False if not np.all(flag): self.purge_catalogue(flag)
python
def catalogue_mt_filter(self, mt_table, flag=None): """ Filter the catalogue using a magnitude-time table. The table has two columns and n-rows. :param nump.ndarray mt_table: Magnitude time table with n-rows where column 1 is year and column 2 is magnitude """ if flag is None: # No flag defined, therefore all events are initially valid flag = np.ones(self.get_number_events(), dtype=bool) for comp_val in mt_table: id0 = np.logical_and(self.data['year'].astype(float) < comp_val[0], self.data['magnitude'] < comp_val[1]) print(id0) flag[id0] = False if not np.all(flag): self.purge_catalogue(flag)
[ "def", "catalogue_mt_filter", "(", "self", ",", "mt_table", ",", "flag", "=", "None", ")", ":", "if", "flag", "is", "None", ":", "# No flag defined, therefore all events are initially valid", "flag", "=", "np", ".", "ones", "(", "self", ".", "get_number_events", "(", ")", ",", "dtype", "=", "bool", ")", "for", "comp_val", "in", "mt_table", ":", "id0", "=", "np", ".", "logical_and", "(", "self", ".", "data", "[", "'year'", "]", ".", "astype", "(", "float", ")", "<", "comp_val", "[", "0", "]", ",", "self", ".", "data", "[", "'magnitude'", "]", "<", "comp_val", "[", "1", "]", ")", "print", "(", "id0", ")", "flag", "[", "id0", "]", "=", "False", "if", "not", "np", ".", "all", "(", "flag", ")", ":", "self", ".", "purge_catalogue", "(", "flag", ")" ]
Filter the catalogue using a magnitude-time table. The table has two columns and n-rows. :param nump.ndarray mt_table: Magnitude time table with n-rows where column 1 is year and column 2 is magnitude
[ "Filter", "the", "catalogue", "using", "a", "magnitude", "-", "time", "table", ".", "The", "table", "has", "two", "columns", "and", "n", "-", "rows", "." ]
8294553a0b8aba33fd96437a35065d03547d0040
https://github.com/gem/oq-engine/blob/8294553a0b8aba33fd96437a35065d03547d0040/openquake/hmtk/seismicity/catalogue.py#L282-L302
364
gem/oq-engine
openquake/hmtk/seismicity/catalogue.py
Catalogue.get_bounding_box
def get_bounding_box(self): """ Returns the bounding box of the catalogue :returns: (West, East, South, North) """ return (np.min(self.data["longitude"]), np.max(self.data["longitude"]), np.min(self.data["latitude"]), np.max(self.data["latitude"]))
python
def get_bounding_box(self): """ Returns the bounding box of the catalogue :returns: (West, East, South, North) """ return (np.min(self.data["longitude"]), np.max(self.data["longitude"]), np.min(self.data["latitude"]), np.max(self.data["latitude"]))
[ "def", "get_bounding_box", "(", "self", ")", ":", "return", "(", "np", ".", "min", "(", "self", ".", "data", "[", "\"longitude\"", "]", ")", ",", "np", ".", "max", "(", "self", ".", "data", "[", "\"longitude\"", "]", ")", ",", "np", ".", "min", "(", "self", ".", "data", "[", "\"latitude\"", "]", ")", ",", "np", ".", "max", "(", "self", ".", "data", "[", "\"latitude\"", "]", ")", ")" ]
Returns the bounding box of the catalogue :returns: (West, East, South, North)
[ "Returns", "the", "bounding", "box", "of", "the", "catalogue" ]
8294553a0b8aba33fd96437a35065d03547d0040
https://github.com/gem/oq-engine/blob/8294553a0b8aba33fd96437a35065d03547d0040/openquake/hmtk/seismicity/catalogue.py#L304-L313
365
gem/oq-engine
openquake/hmtk/seismicity/catalogue.py
Catalogue.get_decimal_time
def get_decimal_time(self): ''' Returns the time of the catalogue as a decimal ''' return decimal_time(self.data['year'], self.data['month'], self.data['day'], self.data['hour'], self.data['minute'], self.data['second'])
python
def get_decimal_time(self): ''' Returns the time of the catalogue as a decimal ''' return decimal_time(self.data['year'], self.data['month'], self.data['day'], self.data['hour'], self.data['minute'], self.data['second'])
[ "def", "get_decimal_time", "(", "self", ")", ":", "return", "decimal_time", "(", "self", ".", "data", "[", "'year'", "]", ",", "self", ".", "data", "[", "'month'", "]", ",", "self", ".", "data", "[", "'day'", "]", ",", "self", ".", "data", "[", "'hour'", "]", ",", "self", ".", "data", "[", "'minute'", "]", ",", "self", ".", "data", "[", "'second'", "]", ")" ]
Returns the time of the catalogue as a decimal
[ "Returns", "the", "time", "of", "the", "catalogue", "as", "a", "decimal" ]
8294553a0b8aba33fd96437a35065d03547d0040
https://github.com/gem/oq-engine/blob/8294553a0b8aba33fd96437a35065d03547d0040/openquake/hmtk/seismicity/catalogue.py#L326-L335
366
gem/oq-engine
openquake/hmtk/seismicity/catalogue.py
Catalogue.sort_catalogue_chronologically
def sort_catalogue_chronologically(self): ''' Sorts the catalogue into chronological order ''' dec_time = self.get_decimal_time() idx = np.argsort(dec_time) if np.all((idx[1:] - idx[:-1]) > 0.): # Catalogue was already in chronological order return self.select_catalogue_events(idx)
python
def sort_catalogue_chronologically(self): ''' Sorts the catalogue into chronological order ''' dec_time = self.get_decimal_time() idx = np.argsort(dec_time) if np.all((idx[1:] - idx[:-1]) > 0.): # Catalogue was already in chronological order return self.select_catalogue_events(idx)
[ "def", "sort_catalogue_chronologically", "(", "self", ")", ":", "dec_time", "=", "self", ".", "get_decimal_time", "(", ")", "idx", "=", "np", ".", "argsort", "(", "dec_time", ")", "if", "np", ".", "all", "(", "(", "idx", "[", "1", ":", "]", "-", "idx", "[", ":", "-", "1", "]", ")", ">", "0.", ")", ":", "# Catalogue was already in chronological order", "return", "self", ".", "select_catalogue_events", "(", "idx", ")" ]
Sorts the catalogue into chronological order
[ "Sorts", "the", "catalogue", "into", "chronological", "order" ]
8294553a0b8aba33fd96437a35065d03547d0040
https://github.com/gem/oq-engine/blob/8294553a0b8aba33fd96437a35065d03547d0040/openquake/hmtk/seismicity/catalogue.py#L353-L362
367
gem/oq-engine
openquake/hmtk/seismicity/catalogue.py
Catalogue.purge_catalogue
def purge_catalogue(self, flag_vector): ''' Purges present catalogue with invalid events defined by flag_vector :param numpy.ndarray flag_vector: Boolean vector showing if events are selected (True) or not (False) ''' id0 = np.where(flag_vector)[0] self.select_catalogue_events(id0) self.get_number_events()
python
def purge_catalogue(self, flag_vector): ''' Purges present catalogue with invalid events defined by flag_vector :param numpy.ndarray flag_vector: Boolean vector showing if events are selected (True) or not (False) ''' id0 = np.where(flag_vector)[0] self.select_catalogue_events(id0) self.get_number_events()
[ "def", "purge_catalogue", "(", "self", ",", "flag_vector", ")", ":", "id0", "=", "np", ".", "where", "(", "flag_vector", ")", "[", "0", "]", "self", ".", "select_catalogue_events", "(", "id0", ")", "self", ".", "get_number_events", "(", ")" ]
Purges present catalogue with invalid events defined by flag_vector :param numpy.ndarray flag_vector: Boolean vector showing if events are selected (True) or not (False)
[ "Purges", "present", "catalogue", "with", "invalid", "events", "defined", "by", "flag_vector" ]
8294553a0b8aba33fd96437a35065d03547d0040
https://github.com/gem/oq-engine/blob/8294553a0b8aba33fd96437a35065d03547d0040/openquake/hmtk/seismicity/catalogue.py#L364-L374
368
gem/oq-engine
openquake/hmtk/seismicity/catalogue.py
Catalogue.select_catalogue_events
def select_catalogue_events(self, id0): ''' Orders the events in the catalogue according to an indexing vector. :param np.ndarray id0: Pointer array indicating the locations of selected events ''' for key in self.data: if isinstance( self.data[key], np.ndarray) and len(self.data[key]) > 0: # Dictionary element is numpy array - use logical indexing self.data[key] = self.data[key][id0] elif isinstance( self.data[key], list) and len(self.data[key]) > 0: # Dictionary element is list self.data[key] = [self.data[key][iloc] for iloc in id0] else: continue
python
def select_catalogue_events(self, id0): ''' Orders the events in the catalogue according to an indexing vector. :param np.ndarray id0: Pointer array indicating the locations of selected events ''' for key in self.data: if isinstance( self.data[key], np.ndarray) and len(self.data[key]) > 0: # Dictionary element is numpy array - use logical indexing self.data[key] = self.data[key][id0] elif isinstance( self.data[key], list) and len(self.data[key]) > 0: # Dictionary element is list self.data[key] = [self.data[key][iloc] for iloc in id0] else: continue
[ "def", "select_catalogue_events", "(", "self", ",", "id0", ")", ":", "for", "key", "in", "self", ".", "data", ":", "if", "isinstance", "(", "self", ".", "data", "[", "key", "]", ",", "np", ".", "ndarray", ")", "and", "len", "(", "self", ".", "data", "[", "key", "]", ")", ">", "0", ":", "# Dictionary element is numpy array - use logical indexing", "self", ".", "data", "[", "key", "]", "=", "self", ".", "data", "[", "key", "]", "[", "id0", "]", "elif", "isinstance", "(", "self", ".", "data", "[", "key", "]", ",", "list", ")", "and", "len", "(", "self", ".", "data", "[", "key", "]", ")", ">", "0", ":", "# Dictionary element is list", "self", ".", "data", "[", "key", "]", "=", "[", "self", ".", "data", "[", "key", "]", "[", "iloc", "]", "for", "iloc", "in", "id0", "]", "else", ":", "continue" ]
Orders the events in the catalogue according to an indexing vector. :param np.ndarray id0: Pointer array indicating the locations of selected events
[ "Orders", "the", "events", "in", "the", "catalogue", "according", "to", "an", "indexing", "vector", "." ]
8294553a0b8aba33fd96437a35065d03547d0040
https://github.com/gem/oq-engine/blob/8294553a0b8aba33fd96437a35065d03547d0040/openquake/hmtk/seismicity/catalogue.py#L376-L393
369
gem/oq-engine
openquake/hmtk/seismicity/catalogue.py
Catalogue.get_depth_distribution
def get_depth_distribution(self, depth_bins, normalisation=False, bootstrap=None): ''' Gets the depth distribution of the earthquake catalogue to return a single histogram. Depths may be normalised. If uncertainties are found in the catalogue the distrbution may be bootstrap sampled :param numpy.ndarray depth_bins: getBin edges for the depths :param bool normalisation: Choose to normalise the results such that the total contributions sum to 1.0 (True) or not (False) :param int bootstrap: Number of bootstrap samples :returns: Histogram of depth values ''' if len(self.data['depth']) == 0: # If depth information is missing raise ValueError('Depths missing in catalogue') if len(self.data['depthError']) == 0: self.data['depthError'] = np.zeros(self.get_number_events(), dtype=float) return bootstrap_histogram_1D(self.data['depth'], depth_bins, self.data['depthError'], normalisation=normalisation, number_bootstraps=bootstrap, boundaries=(0., None))
python
def get_depth_distribution(self, depth_bins, normalisation=False, bootstrap=None): ''' Gets the depth distribution of the earthquake catalogue to return a single histogram. Depths may be normalised. If uncertainties are found in the catalogue the distrbution may be bootstrap sampled :param numpy.ndarray depth_bins: getBin edges for the depths :param bool normalisation: Choose to normalise the results such that the total contributions sum to 1.0 (True) or not (False) :param int bootstrap: Number of bootstrap samples :returns: Histogram of depth values ''' if len(self.data['depth']) == 0: # If depth information is missing raise ValueError('Depths missing in catalogue') if len(self.data['depthError']) == 0: self.data['depthError'] = np.zeros(self.get_number_events(), dtype=float) return bootstrap_histogram_1D(self.data['depth'], depth_bins, self.data['depthError'], normalisation=normalisation, number_bootstraps=bootstrap, boundaries=(0., None))
[ "def", "get_depth_distribution", "(", "self", ",", "depth_bins", ",", "normalisation", "=", "False", ",", "bootstrap", "=", "None", ")", ":", "if", "len", "(", "self", ".", "data", "[", "'depth'", "]", ")", "==", "0", ":", "# If depth information is missing", "raise", "ValueError", "(", "'Depths missing in catalogue'", ")", "if", "len", "(", "self", ".", "data", "[", "'depthError'", "]", ")", "==", "0", ":", "self", ".", "data", "[", "'depthError'", "]", "=", "np", ".", "zeros", "(", "self", ".", "get_number_events", "(", ")", ",", "dtype", "=", "float", ")", "return", "bootstrap_histogram_1D", "(", "self", ".", "data", "[", "'depth'", "]", ",", "depth_bins", ",", "self", ".", "data", "[", "'depthError'", "]", ",", "normalisation", "=", "normalisation", ",", "number_bootstraps", "=", "bootstrap", ",", "boundaries", "=", "(", "0.", ",", "None", ")", ")" ]
Gets the depth distribution of the earthquake catalogue to return a single histogram. Depths may be normalised. If uncertainties are found in the catalogue the distrbution may be bootstrap sampled :param numpy.ndarray depth_bins: getBin edges for the depths :param bool normalisation: Choose to normalise the results such that the total contributions sum to 1.0 (True) or not (False) :param int bootstrap: Number of bootstrap samples :returns: Histogram of depth values
[ "Gets", "the", "depth", "distribution", "of", "the", "earthquake", "catalogue", "to", "return", "a", "single", "histogram", ".", "Depths", "may", "be", "normalised", ".", "If", "uncertainties", "are", "found", "in", "the", "catalogue", "the", "distrbution", "may", "be", "bootstrap", "sampled" ]
8294553a0b8aba33fd96437a35065d03547d0040
https://github.com/gem/oq-engine/blob/8294553a0b8aba33fd96437a35065d03547d0040/openquake/hmtk/seismicity/catalogue.py#L395-L429
370
gem/oq-engine
openquake/hmtk/seismicity/catalogue.py
Catalogue.get_depth_pmf
def get_depth_pmf(self, depth_bins, default_depth=5.0, bootstrap=None): """ Returns the depth distribution of the catalogue as a probability mass function """ if len(self.data['depth']) == 0: # If depth information is missing return PMF([(1.0, default_depth)]) # Get the depth distribution depth_hist = self.get_depth_distribution(depth_bins, normalisation=True, bootstrap=bootstrap) # If the histogram does not sum to 1.0 then remove the difference # from the lowest bin depth_hist = np.around(depth_hist, 3) while depth_hist.sum() - 1.0: depth_hist[-1] -= depth_hist.sum() - 1.0 depth_hist = np.around(depth_hist, 3) pmf_list = [] for iloc, prob in enumerate(depth_hist): pmf_list.append((prob, (depth_bins[iloc] + depth_bins[iloc + 1]) / 2.0)) return PMF(pmf_list)
python
def get_depth_pmf(self, depth_bins, default_depth=5.0, bootstrap=None): """ Returns the depth distribution of the catalogue as a probability mass function """ if len(self.data['depth']) == 0: # If depth information is missing return PMF([(1.0, default_depth)]) # Get the depth distribution depth_hist = self.get_depth_distribution(depth_bins, normalisation=True, bootstrap=bootstrap) # If the histogram does not sum to 1.0 then remove the difference # from the lowest bin depth_hist = np.around(depth_hist, 3) while depth_hist.sum() - 1.0: depth_hist[-1] -= depth_hist.sum() - 1.0 depth_hist = np.around(depth_hist, 3) pmf_list = [] for iloc, prob in enumerate(depth_hist): pmf_list.append((prob, (depth_bins[iloc] + depth_bins[iloc + 1]) / 2.0)) return PMF(pmf_list)
[ "def", "get_depth_pmf", "(", "self", ",", "depth_bins", ",", "default_depth", "=", "5.0", ",", "bootstrap", "=", "None", ")", ":", "if", "len", "(", "self", ".", "data", "[", "'depth'", "]", ")", "==", "0", ":", "# If depth information is missing", "return", "PMF", "(", "[", "(", "1.0", ",", "default_depth", ")", "]", ")", "# Get the depth distribution", "depth_hist", "=", "self", ".", "get_depth_distribution", "(", "depth_bins", ",", "normalisation", "=", "True", ",", "bootstrap", "=", "bootstrap", ")", "# If the histogram does not sum to 1.0 then remove the difference", "# from the lowest bin", "depth_hist", "=", "np", ".", "around", "(", "depth_hist", ",", "3", ")", "while", "depth_hist", ".", "sum", "(", ")", "-", "1.0", ":", "depth_hist", "[", "-", "1", "]", "-=", "depth_hist", ".", "sum", "(", ")", "-", "1.0", "depth_hist", "=", "np", ".", "around", "(", "depth_hist", ",", "3", ")", "pmf_list", "=", "[", "]", "for", "iloc", ",", "prob", "in", "enumerate", "(", "depth_hist", ")", ":", "pmf_list", ".", "append", "(", "(", "prob", ",", "(", "depth_bins", "[", "iloc", "]", "+", "depth_bins", "[", "iloc", "+", "1", "]", ")", "/", "2.0", ")", ")", "return", "PMF", "(", "pmf_list", ")" ]
Returns the depth distribution of the catalogue as a probability mass function
[ "Returns", "the", "depth", "distribution", "of", "the", "catalogue", "as", "a", "probability", "mass", "function" ]
8294553a0b8aba33fd96437a35065d03547d0040
https://github.com/gem/oq-engine/blob/8294553a0b8aba33fd96437a35065d03547d0040/openquake/hmtk/seismicity/catalogue.py#L431-L454
371
gem/oq-engine
openquake/hmtk/seismicity/catalogue.py
Catalogue.get_magnitude_depth_distribution
def get_magnitude_depth_distribution(self, magnitude_bins, depth_bins, normalisation=False, bootstrap=None): ''' Returns a 2-D magnitude-depth histogram for the catalogue :param numpy.ndarray magnitude_bins: Bin edges for the magnitudes :param numpy.ndarray depth_bins: Bin edges for the depths :param bool normalisation: Choose to normalise the results such that the total contributions sum to 1.0 (True) or not (False) :param int bootstrap: Number of bootstrap samples :returns: 2D histogram of events in magnitude-depth bins ''' if len(self.data['depth']) == 0: # If depth information is missing raise ValueError('Depths missing in catalogue') if len(self.data['depthError']) == 0: self.data['depthError'] = np.zeros(self.get_number_events(), dtype=float) if len(self.data['sigmaMagnitude']) == 0: self.data['sigmaMagnitude'] = np.zeros(self.get_number_events(), dtype=float) return bootstrap_histogram_2D(self.data['magnitude'], self.data['depth'], magnitude_bins, depth_bins, boundaries=[(0., None), (None, None)], xsigma=self.data['sigmaMagnitude'], ysigma=self.data['depthError'], normalisation=normalisation, number_bootstraps=bootstrap)
python
def get_magnitude_depth_distribution(self, magnitude_bins, depth_bins, normalisation=False, bootstrap=None): ''' Returns a 2-D magnitude-depth histogram for the catalogue :param numpy.ndarray magnitude_bins: Bin edges for the magnitudes :param numpy.ndarray depth_bins: Bin edges for the depths :param bool normalisation: Choose to normalise the results such that the total contributions sum to 1.0 (True) or not (False) :param int bootstrap: Number of bootstrap samples :returns: 2D histogram of events in magnitude-depth bins ''' if len(self.data['depth']) == 0: # If depth information is missing raise ValueError('Depths missing in catalogue') if len(self.data['depthError']) == 0: self.data['depthError'] = np.zeros(self.get_number_events(), dtype=float) if len(self.data['sigmaMagnitude']) == 0: self.data['sigmaMagnitude'] = np.zeros(self.get_number_events(), dtype=float) return bootstrap_histogram_2D(self.data['magnitude'], self.data['depth'], magnitude_bins, depth_bins, boundaries=[(0., None), (None, None)], xsigma=self.data['sigmaMagnitude'], ysigma=self.data['depthError'], normalisation=normalisation, number_bootstraps=bootstrap)
[ "def", "get_magnitude_depth_distribution", "(", "self", ",", "magnitude_bins", ",", "depth_bins", ",", "normalisation", "=", "False", ",", "bootstrap", "=", "None", ")", ":", "if", "len", "(", "self", ".", "data", "[", "'depth'", "]", ")", "==", "0", ":", "# If depth information is missing", "raise", "ValueError", "(", "'Depths missing in catalogue'", ")", "if", "len", "(", "self", ".", "data", "[", "'depthError'", "]", ")", "==", "0", ":", "self", ".", "data", "[", "'depthError'", "]", "=", "np", ".", "zeros", "(", "self", ".", "get_number_events", "(", ")", ",", "dtype", "=", "float", ")", "if", "len", "(", "self", ".", "data", "[", "'sigmaMagnitude'", "]", ")", "==", "0", ":", "self", ".", "data", "[", "'sigmaMagnitude'", "]", "=", "np", ".", "zeros", "(", "self", ".", "get_number_events", "(", ")", ",", "dtype", "=", "float", ")", "return", "bootstrap_histogram_2D", "(", "self", ".", "data", "[", "'magnitude'", "]", ",", "self", ".", "data", "[", "'depth'", "]", ",", "magnitude_bins", ",", "depth_bins", ",", "boundaries", "=", "[", "(", "0.", ",", "None", ")", ",", "(", "None", ",", "None", ")", "]", ",", "xsigma", "=", "self", ".", "data", "[", "'sigmaMagnitude'", "]", ",", "ysigma", "=", "self", ".", "data", "[", "'depthError'", "]", ",", "normalisation", "=", "normalisation", ",", "number_bootstraps", "=", "bootstrap", ")" ]
Returns a 2-D magnitude-depth histogram for the catalogue :param numpy.ndarray magnitude_bins: Bin edges for the magnitudes :param numpy.ndarray depth_bins: Bin edges for the depths :param bool normalisation: Choose to normalise the results such that the total contributions sum to 1.0 (True) or not (False) :param int bootstrap: Number of bootstrap samples :returns: 2D histogram of events in magnitude-depth bins
[ "Returns", "a", "2", "-", "D", "magnitude", "-", "depth", "histogram", "for", "the", "catalogue" ]
8294553a0b8aba33fd96437a35065d03547d0040
https://github.com/gem/oq-engine/blob/8294553a0b8aba33fd96437a35065d03547d0040/openquake/hmtk/seismicity/catalogue.py#L456-L497
372
gem/oq-engine
openquake/hmtk/seismicity/catalogue.py
Catalogue.get_magnitude_time_distribution
def get_magnitude_time_distribution(self, magnitude_bins, time_bins, normalisation=False, bootstrap=None): ''' Returns a 2-D histogram indicating the number of earthquakes in a set of time-magnitude bins. Time is in decimal years! :param numpy.ndarray magnitude_bins: Bin edges for the magnitudes :param numpy.ndarray time_bins: Bin edges for the times :param bool normalisation: Choose to normalise the results such that the total contributions sum to 1.0 (True) or not (False) :param int bootstrap: Number of bootstrap samples :returns: 2D histogram of events in magnitude-year bins ''' return bootstrap_histogram_2D( self.get_decimal_time(), self.data['magnitude'], time_bins, magnitude_bins, xsigma=np.zeros(self.get_number_events()), ysigma=self.data['sigmaMagnitude'], normalisation=normalisation, number_bootstraps=bootstrap)
python
def get_magnitude_time_distribution(self, magnitude_bins, time_bins, normalisation=False, bootstrap=None): ''' Returns a 2-D histogram indicating the number of earthquakes in a set of time-magnitude bins. Time is in decimal years! :param numpy.ndarray magnitude_bins: Bin edges for the magnitudes :param numpy.ndarray time_bins: Bin edges for the times :param bool normalisation: Choose to normalise the results such that the total contributions sum to 1.0 (True) or not (False) :param int bootstrap: Number of bootstrap samples :returns: 2D histogram of events in magnitude-year bins ''' return bootstrap_histogram_2D( self.get_decimal_time(), self.data['magnitude'], time_bins, magnitude_bins, xsigma=np.zeros(self.get_number_events()), ysigma=self.data['sigmaMagnitude'], normalisation=normalisation, number_bootstraps=bootstrap)
[ "def", "get_magnitude_time_distribution", "(", "self", ",", "magnitude_bins", ",", "time_bins", ",", "normalisation", "=", "False", ",", "bootstrap", "=", "None", ")", ":", "return", "bootstrap_histogram_2D", "(", "self", ".", "get_decimal_time", "(", ")", ",", "self", ".", "data", "[", "'magnitude'", "]", ",", "time_bins", ",", "magnitude_bins", ",", "xsigma", "=", "np", ".", "zeros", "(", "self", ".", "get_number_events", "(", ")", ")", ",", "ysigma", "=", "self", ".", "data", "[", "'sigmaMagnitude'", "]", ",", "normalisation", "=", "normalisation", ",", "number_bootstraps", "=", "bootstrap", ")" ]
Returns a 2-D histogram indicating the number of earthquakes in a set of time-magnitude bins. Time is in decimal years! :param numpy.ndarray magnitude_bins: Bin edges for the magnitudes :param numpy.ndarray time_bins: Bin edges for the times :param bool normalisation: Choose to normalise the results such that the total contributions sum to 1.0 (True) or not (False) :param int bootstrap: Number of bootstrap samples :returns: 2D histogram of events in magnitude-year bins
[ "Returns", "a", "2", "-", "D", "histogram", "indicating", "the", "number", "of", "earthquakes", "in", "a", "set", "of", "time", "-", "magnitude", "bins", ".", "Time", "is", "in", "decimal", "years!" ]
8294553a0b8aba33fd96437a35065d03547d0040
https://github.com/gem/oq-engine/blob/8294553a0b8aba33fd96437a35065d03547d0040/openquake/hmtk/seismicity/catalogue.py#L499-L529
373
gem/oq-engine
openquake/hmtk/seismicity/catalogue.py
Catalogue.concatenate
def concatenate(self, catalogue): """ This method attaches one catalogue to the current one :parameter catalogue: An instance of :class:`htmk.seismicity.catalogue.Catalogue` """ atts = getattr(self, 'data') attn = getattr(catalogue, 'data') data = _merge_data(atts, attn) if data is not None: setattr(self, 'data', data) for attrib in vars(self): atts = getattr(self, attrib) attn = getattr(catalogue, attrib) if attrib is 'end_year': setattr(self, attrib, max(atts, attn)) elif attrib is 'start_year': setattr(self, attrib, min(atts, attn)) elif attrib is 'data': pass elif attrib is 'number_earthquakes': setattr(self, attrib, atts + attn) elif attrib is 'processes': if atts != attn: raise ValueError('The catalogues cannot be merged' + ' since the they have' + ' a different processing history') else: raise ValueError('unknown attribute: %s' % attrib) self.sort_catalogue_chronologically()
python
def concatenate(self, catalogue): """ This method attaches one catalogue to the current one :parameter catalogue: An instance of :class:`htmk.seismicity.catalogue.Catalogue` """ atts = getattr(self, 'data') attn = getattr(catalogue, 'data') data = _merge_data(atts, attn) if data is not None: setattr(self, 'data', data) for attrib in vars(self): atts = getattr(self, attrib) attn = getattr(catalogue, attrib) if attrib is 'end_year': setattr(self, attrib, max(atts, attn)) elif attrib is 'start_year': setattr(self, attrib, min(atts, attn)) elif attrib is 'data': pass elif attrib is 'number_earthquakes': setattr(self, attrib, atts + attn) elif attrib is 'processes': if atts != attn: raise ValueError('The catalogues cannot be merged' + ' since the they have' + ' a different processing history') else: raise ValueError('unknown attribute: %s' % attrib) self.sort_catalogue_chronologically()
[ "def", "concatenate", "(", "self", ",", "catalogue", ")", ":", "atts", "=", "getattr", "(", "self", ",", "'data'", ")", "attn", "=", "getattr", "(", "catalogue", ",", "'data'", ")", "data", "=", "_merge_data", "(", "atts", ",", "attn", ")", "if", "data", "is", "not", "None", ":", "setattr", "(", "self", ",", "'data'", ",", "data", ")", "for", "attrib", "in", "vars", "(", "self", ")", ":", "atts", "=", "getattr", "(", "self", ",", "attrib", ")", "attn", "=", "getattr", "(", "catalogue", ",", "attrib", ")", "if", "attrib", "is", "'end_year'", ":", "setattr", "(", "self", ",", "attrib", ",", "max", "(", "atts", ",", "attn", ")", ")", "elif", "attrib", "is", "'start_year'", ":", "setattr", "(", "self", ",", "attrib", ",", "min", "(", "atts", ",", "attn", ")", ")", "elif", "attrib", "is", "'data'", ":", "pass", "elif", "attrib", "is", "'number_earthquakes'", ":", "setattr", "(", "self", ",", "attrib", ",", "atts", "+", "attn", ")", "elif", "attrib", "is", "'processes'", ":", "if", "atts", "!=", "attn", ":", "raise", "ValueError", "(", "'The catalogues cannot be merged'", "+", "' since the they have'", "+", "' a different processing history'", ")", "else", ":", "raise", "ValueError", "(", "'unknown attribute: %s'", "%", "attrib", ")", "self", ".", "sort_catalogue_chronologically", "(", ")" ]
This method attaches one catalogue to the current one :parameter catalogue: An instance of :class:`htmk.seismicity.catalogue.Catalogue`
[ "This", "method", "attaches", "one", "catalogue", "to", "the", "current", "one" ]
8294553a0b8aba33fd96437a35065d03547d0040
https://github.com/gem/oq-engine/blob/8294553a0b8aba33fd96437a35065d03547d0040/openquake/hmtk/seismicity/catalogue.py#L531-L563
374
gem/oq-engine
openquake/engine/engine.py
expose_outputs
def expose_outputs(dstore, owner=getpass.getuser(), status='complete'): """ Build a correspondence between the outputs in the datastore and the ones in the database. :param dstore: datastore """ oq = dstore['oqparam'] exportable = set(ekey[0] for ekey in export.export) calcmode = oq.calculation_mode dskeys = set(dstore) & exportable # exportable datastore keys dskeys.add('fullreport') rlzs = dstore['csm_info'].rlzs if len(rlzs) > 1: dskeys.add('realizations') if len(dstore['csm_info/sg_data']) > 1: # export sourcegroups.csv dskeys.add('sourcegroups') hdf5 = dstore.hdf5 if 'hcurves-stats' in hdf5 or 'hcurves-rlzs' in hdf5: if oq.hazard_stats() or oq.individual_curves or len(rlzs) == 1: dskeys.add('hcurves') if oq.uniform_hazard_spectra: dskeys.add('uhs') # export them if oq.hazard_maps: dskeys.add('hmaps') # export them if 'avg_losses-stats' in dstore or ( 'avg_losses-rlzs' in dstore and len(rlzs)): dskeys.add('avg_losses-stats') if 'curves-rlzs' in dstore and len(rlzs) == 1: dskeys.add('loss_curves-rlzs') if 'curves-stats' in dstore and len(rlzs) > 1: dskeys.add('loss_curves-stats') if oq.conditional_loss_poes: # expose loss_maps outputs if 'loss_curves-stats' in dstore: dskeys.add('loss_maps-stats') if 'all_loss_ratios' in dskeys: dskeys.remove('all_loss_ratios') # export only specific IDs if 'ruptures' in dskeys and 'scenario' in calcmode: exportable.remove('ruptures') # do not export, as requested by Vitor if 'rup_loss_table' in dskeys: # keep it hidden for the moment dskeys.remove('rup_loss_table') if 'hmaps' in dskeys and not oq.hazard_maps: dskeys.remove('hmaps') # do not export the hazard maps if logs.dbcmd('get_job', dstore.calc_id) is None: # the calculation has not been imported in the db yet logs.dbcmd('import_job', dstore.calc_id, oq.calculation_mode, oq.description + ' [parent]', owner, status, oq.hazard_calculation_id, dstore.datadir) keysize = [] for key in sorted(dskeys & exportable): try: size_mb = dstore.get_attr(key, 'nbytes') / MB except (KeyError, AttributeError): size_mb = None keysize.append((key, size_mb)) ds_size = os.path.getsize(dstore.filename) / MB logs.dbcmd('create_outputs', dstore.calc_id, keysize, ds_size)
python
def expose_outputs(dstore, owner=getpass.getuser(), status='complete'): """ Build a correspondence between the outputs in the datastore and the ones in the database. :param dstore: datastore """ oq = dstore['oqparam'] exportable = set(ekey[0] for ekey in export.export) calcmode = oq.calculation_mode dskeys = set(dstore) & exportable # exportable datastore keys dskeys.add('fullreport') rlzs = dstore['csm_info'].rlzs if len(rlzs) > 1: dskeys.add('realizations') if len(dstore['csm_info/sg_data']) > 1: # export sourcegroups.csv dskeys.add('sourcegroups') hdf5 = dstore.hdf5 if 'hcurves-stats' in hdf5 or 'hcurves-rlzs' in hdf5: if oq.hazard_stats() or oq.individual_curves or len(rlzs) == 1: dskeys.add('hcurves') if oq.uniform_hazard_spectra: dskeys.add('uhs') # export them if oq.hazard_maps: dskeys.add('hmaps') # export them if 'avg_losses-stats' in dstore or ( 'avg_losses-rlzs' in dstore and len(rlzs)): dskeys.add('avg_losses-stats') if 'curves-rlzs' in dstore and len(rlzs) == 1: dskeys.add('loss_curves-rlzs') if 'curves-stats' in dstore and len(rlzs) > 1: dskeys.add('loss_curves-stats') if oq.conditional_loss_poes: # expose loss_maps outputs if 'loss_curves-stats' in dstore: dskeys.add('loss_maps-stats') if 'all_loss_ratios' in dskeys: dskeys.remove('all_loss_ratios') # export only specific IDs if 'ruptures' in dskeys and 'scenario' in calcmode: exportable.remove('ruptures') # do not export, as requested by Vitor if 'rup_loss_table' in dskeys: # keep it hidden for the moment dskeys.remove('rup_loss_table') if 'hmaps' in dskeys and not oq.hazard_maps: dskeys.remove('hmaps') # do not export the hazard maps if logs.dbcmd('get_job', dstore.calc_id) is None: # the calculation has not been imported in the db yet logs.dbcmd('import_job', dstore.calc_id, oq.calculation_mode, oq.description + ' [parent]', owner, status, oq.hazard_calculation_id, dstore.datadir) keysize = [] for key in sorted(dskeys & exportable): try: size_mb = dstore.get_attr(key, 'nbytes') / MB except (KeyError, AttributeError): size_mb = None keysize.append((key, size_mb)) ds_size = os.path.getsize(dstore.filename) / MB logs.dbcmd('create_outputs', dstore.calc_id, keysize, ds_size)
[ "def", "expose_outputs", "(", "dstore", ",", "owner", "=", "getpass", ".", "getuser", "(", ")", ",", "status", "=", "'complete'", ")", ":", "oq", "=", "dstore", "[", "'oqparam'", "]", "exportable", "=", "set", "(", "ekey", "[", "0", "]", "for", "ekey", "in", "export", ".", "export", ")", "calcmode", "=", "oq", ".", "calculation_mode", "dskeys", "=", "set", "(", "dstore", ")", "&", "exportable", "# exportable datastore keys", "dskeys", ".", "add", "(", "'fullreport'", ")", "rlzs", "=", "dstore", "[", "'csm_info'", "]", ".", "rlzs", "if", "len", "(", "rlzs", ")", ">", "1", ":", "dskeys", ".", "add", "(", "'realizations'", ")", "if", "len", "(", "dstore", "[", "'csm_info/sg_data'", "]", ")", ">", "1", ":", "# export sourcegroups.csv", "dskeys", ".", "add", "(", "'sourcegroups'", ")", "hdf5", "=", "dstore", ".", "hdf5", "if", "'hcurves-stats'", "in", "hdf5", "or", "'hcurves-rlzs'", "in", "hdf5", ":", "if", "oq", ".", "hazard_stats", "(", ")", "or", "oq", ".", "individual_curves", "or", "len", "(", "rlzs", ")", "==", "1", ":", "dskeys", ".", "add", "(", "'hcurves'", ")", "if", "oq", ".", "uniform_hazard_spectra", ":", "dskeys", ".", "add", "(", "'uhs'", ")", "# export them", "if", "oq", ".", "hazard_maps", ":", "dskeys", ".", "add", "(", "'hmaps'", ")", "# export them", "if", "'avg_losses-stats'", "in", "dstore", "or", "(", "'avg_losses-rlzs'", "in", "dstore", "and", "len", "(", "rlzs", ")", ")", ":", "dskeys", ".", "add", "(", "'avg_losses-stats'", ")", "if", "'curves-rlzs'", "in", "dstore", "and", "len", "(", "rlzs", ")", "==", "1", ":", "dskeys", ".", "add", "(", "'loss_curves-rlzs'", ")", "if", "'curves-stats'", "in", "dstore", "and", "len", "(", "rlzs", ")", ">", "1", ":", "dskeys", ".", "add", "(", "'loss_curves-stats'", ")", "if", "oq", ".", "conditional_loss_poes", ":", "# expose loss_maps outputs", "if", "'loss_curves-stats'", "in", "dstore", ":", "dskeys", ".", "add", "(", "'loss_maps-stats'", ")", "if", "'all_loss_ratios'", "in", "dskeys", ":", "dskeys", ".", "remove", "(", "'all_loss_ratios'", ")", "# export only specific IDs", "if", "'ruptures'", "in", "dskeys", "and", "'scenario'", "in", "calcmode", ":", "exportable", ".", "remove", "(", "'ruptures'", ")", "# do not export, as requested by Vitor", "if", "'rup_loss_table'", "in", "dskeys", ":", "# keep it hidden for the moment", "dskeys", ".", "remove", "(", "'rup_loss_table'", ")", "if", "'hmaps'", "in", "dskeys", "and", "not", "oq", ".", "hazard_maps", ":", "dskeys", ".", "remove", "(", "'hmaps'", ")", "# do not export the hazard maps", "if", "logs", ".", "dbcmd", "(", "'get_job'", ",", "dstore", ".", "calc_id", ")", "is", "None", ":", "# the calculation has not been imported in the db yet", "logs", ".", "dbcmd", "(", "'import_job'", ",", "dstore", ".", "calc_id", ",", "oq", ".", "calculation_mode", ",", "oq", ".", "description", "+", "' [parent]'", ",", "owner", ",", "status", ",", "oq", ".", "hazard_calculation_id", ",", "dstore", ".", "datadir", ")", "keysize", "=", "[", "]", "for", "key", "in", "sorted", "(", "dskeys", "&", "exportable", ")", ":", "try", ":", "size_mb", "=", "dstore", ".", "get_attr", "(", "key", ",", "'nbytes'", ")", "/", "MB", "except", "(", "KeyError", ",", "AttributeError", ")", ":", "size_mb", "=", "None", "keysize", ".", "append", "(", "(", "key", ",", "size_mb", ")", ")", "ds_size", "=", "os", ".", "path", ".", "getsize", "(", "dstore", ".", "filename", ")", "/", "MB", "logs", ".", "dbcmd", "(", "'create_outputs'", ",", "dstore", ".", "calc_id", ",", "keysize", ",", "ds_size", ")" ]
Build a correspondence between the outputs in the datastore and the ones in the database. :param dstore: datastore
[ "Build", "a", "correspondence", "between", "the", "outputs", "in", "the", "datastore", "and", "the", "ones", "in", "the", "database", "." ]
8294553a0b8aba33fd96437a35065d03547d0040
https://github.com/gem/oq-engine/blob/8294553a0b8aba33fd96437a35065d03547d0040/openquake/engine/engine.py#L119-L175
375
gem/oq-engine
openquake/engine/engine.py
raiseMasterKilled
def raiseMasterKilled(signum, _stack): """ When a SIGTERM is received, raise the MasterKilled exception with an appropriate error message. :param int signum: the number of the received signal :param _stack: the current frame object, ignored """ # Disable further CTRL-C to allow tasks revocation when Celery is used if OQ_DISTRIBUTE.startswith('celery'): signal.signal(signal.SIGINT, inhibitSigInt) msg = 'Received a signal %d' % signum if signum in (signal.SIGTERM, signal.SIGINT): msg = 'The openquake master process was killed manually' # kill the calculation only if os.getppid() != _PPID, i.e. the controlling # terminal died; in the workers, do nothing # NB: there is no SIGHUP on Windows if hasattr(signal, 'SIGHUP'): if signum == signal.SIGHUP: if os.getppid() == _PPID: return else: msg = 'The openquake master lost its controlling terminal' raise MasterKilled(msg)
python
def raiseMasterKilled(signum, _stack): """ When a SIGTERM is received, raise the MasterKilled exception with an appropriate error message. :param int signum: the number of the received signal :param _stack: the current frame object, ignored """ # Disable further CTRL-C to allow tasks revocation when Celery is used if OQ_DISTRIBUTE.startswith('celery'): signal.signal(signal.SIGINT, inhibitSigInt) msg = 'Received a signal %d' % signum if signum in (signal.SIGTERM, signal.SIGINT): msg = 'The openquake master process was killed manually' # kill the calculation only if os.getppid() != _PPID, i.e. the controlling # terminal died; in the workers, do nothing # NB: there is no SIGHUP on Windows if hasattr(signal, 'SIGHUP'): if signum == signal.SIGHUP: if os.getppid() == _PPID: return else: msg = 'The openquake master lost its controlling terminal' raise MasterKilled(msg)
[ "def", "raiseMasterKilled", "(", "signum", ",", "_stack", ")", ":", "# Disable further CTRL-C to allow tasks revocation when Celery is used", "if", "OQ_DISTRIBUTE", ".", "startswith", "(", "'celery'", ")", ":", "signal", ".", "signal", "(", "signal", ".", "SIGINT", ",", "inhibitSigInt", ")", "msg", "=", "'Received a signal %d'", "%", "signum", "if", "signum", "in", "(", "signal", ".", "SIGTERM", ",", "signal", ".", "SIGINT", ")", ":", "msg", "=", "'The openquake master process was killed manually'", "# kill the calculation only if os.getppid() != _PPID, i.e. the controlling", "# terminal died; in the workers, do nothing", "# NB: there is no SIGHUP on Windows", "if", "hasattr", "(", "signal", ",", "'SIGHUP'", ")", ":", "if", "signum", "==", "signal", ".", "SIGHUP", ":", "if", "os", ".", "getppid", "(", ")", "==", "_PPID", ":", "return", "else", ":", "msg", "=", "'The openquake master lost its controlling terminal'", "raise", "MasterKilled", "(", "msg", ")" ]
When a SIGTERM is received, raise the MasterKilled exception with an appropriate error message. :param int signum: the number of the received signal :param _stack: the current frame object, ignored
[ "When", "a", "SIGTERM", "is", "received", "raise", "the", "MasterKilled", "exception", "with", "an", "appropriate", "error", "message", "." ]
8294553a0b8aba33fd96437a35065d03547d0040
https://github.com/gem/oq-engine/blob/8294553a0b8aba33fd96437a35065d03547d0040/openquake/engine/engine.py#L186-L212
376
gem/oq-engine
openquake/engine/engine.py
job_from_file
def job_from_file(job_ini, job_id, username, **kw): """ Create a full job profile from a job config file. :param job_ini: Path to a job.ini file :param job_id: ID of the created job :param username: The user who will own this job profile and all results :param kw: Extra parameters including `calculation_mode` and `exposure_file` :returns: an oqparam instance """ hc_id = kw.get('hazard_calculation_id') try: oq = readinput.get_oqparam(job_ini, hc_id=hc_id) except Exception: logs.dbcmd('finish', job_id, 'failed') raise if 'calculation_mode' in kw: oq.calculation_mode = kw.pop('calculation_mode') if 'description' in kw: oq.description = kw.pop('description') if 'exposure_file' in kw: # hack used in commands.engine fnames = kw.pop('exposure_file').split() if fnames: oq.inputs['exposure'] = fnames elif 'exposure' in oq.inputs: del oq.inputs['exposure'] logs.dbcmd('update_job', job_id, dict(calculation_mode=oq.calculation_mode, description=oq.description, user_name=username, hazard_calculation_id=hc_id)) return oq
python
def job_from_file(job_ini, job_id, username, **kw): """ Create a full job profile from a job config file. :param job_ini: Path to a job.ini file :param job_id: ID of the created job :param username: The user who will own this job profile and all results :param kw: Extra parameters including `calculation_mode` and `exposure_file` :returns: an oqparam instance """ hc_id = kw.get('hazard_calculation_id') try: oq = readinput.get_oqparam(job_ini, hc_id=hc_id) except Exception: logs.dbcmd('finish', job_id, 'failed') raise if 'calculation_mode' in kw: oq.calculation_mode = kw.pop('calculation_mode') if 'description' in kw: oq.description = kw.pop('description') if 'exposure_file' in kw: # hack used in commands.engine fnames = kw.pop('exposure_file').split() if fnames: oq.inputs['exposure'] = fnames elif 'exposure' in oq.inputs: del oq.inputs['exposure'] logs.dbcmd('update_job', job_id, dict(calculation_mode=oq.calculation_mode, description=oq.description, user_name=username, hazard_calculation_id=hc_id)) return oq
[ "def", "job_from_file", "(", "job_ini", ",", "job_id", ",", "username", ",", "*", "*", "kw", ")", ":", "hc_id", "=", "kw", ".", "get", "(", "'hazard_calculation_id'", ")", "try", ":", "oq", "=", "readinput", ".", "get_oqparam", "(", "job_ini", ",", "hc_id", "=", "hc_id", ")", "except", "Exception", ":", "logs", ".", "dbcmd", "(", "'finish'", ",", "job_id", ",", "'failed'", ")", "raise", "if", "'calculation_mode'", "in", "kw", ":", "oq", ".", "calculation_mode", "=", "kw", ".", "pop", "(", "'calculation_mode'", ")", "if", "'description'", "in", "kw", ":", "oq", ".", "description", "=", "kw", ".", "pop", "(", "'description'", ")", "if", "'exposure_file'", "in", "kw", ":", "# hack used in commands.engine", "fnames", "=", "kw", ".", "pop", "(", "'exposure_file'", ")", ".", "split", "(", ")", "if", "fnames", ":", "oq", ".", "inputs", "[", "'exposure'", "]", "=", "fnames", "elif", "'exposure'", "in", "oq", ".", "inputs", ":", "del", "oq", ".", "inputs", "[", "'exposure'", "]", "logs", ".", "dbcmd", "(", "'update_job'", ",", "job_id", ",", "dict", "(", "calculation_mode", "=", "oq", ".", "calculation_mode", ",", "description", "=", "oq", ".", "description", ",", "user_name", "=", "username", ",", "hazard_calculation_id", "=", "hc_id", ")", ")", "return", "oq" ]
Create a full job profile from a job config file. :param job_ini: Path to a job.ini file :param job_id: ID of the created job :param username: The user who will own this job profile and all results :param kw: Extra parameters including `calculation_mode` and `exposure_file` :returns: an oqparam instance
[ "Create", "a", "full", "job", "profile", "from", "a", "job", "config", "file", "." ]
8294553a0b8aba33fd96437a35065d03547d0040
https://github.com/gem/oq-engine/blob/8294553a0b8aba33fd96437a35065d03547d0040/openquake/engine/engine.py#L230-L266
377
gem/oq-engine
openquake/engine/engine.py
check_obsolete_version
def check_obsolete_version(calculation_mode='WebUI'): """ Check if there is a newer version of the engine. :param calculation_mode: - the calculation mode when called from the engine - an empty string when called from the WebUI :returns: - a message if the running version of the engine is obsolete - the empty string if the engine is updated - None if the check could not be performed (i.e. github is down) """ if os.environ.get('JENKINS_URL') or os.environ.get('TRAVIS'): # avoid flooding our API server with requests from CI systems return headers = {'User-Agent': 'OpenQuake Engine %s;%s;%s;%s' % (__version__, calculation_mode, platform.platform(), config.distribution.oq_distribute)} try: req = Request(OQ_API + '/engine/latest', headers=headers) # NB: a timeout < 1 does not work data = urlopen(req, timeout=1).read() # bytes tag_name = json.loads(decode(data))['tag_name'] current = version_triple(__version__) latest = version_triple(tag_name) except Exception: # page not available or wrong version tag return if current < latest: return ('Version %s of the engine is available, but you are ' 'still using version %s' % (tag_name, __version__)) else: return ''
python
def check_obsolete_version(calculation_mode='WebUI'): """ Check if there is a newer version of the engine. :param calculation_mode: - the calculation mode when called from the engine - an empty string when called from the WebUI :returns: - a message if the running version of the engine is obsolete - the empty string if the engine is updated - None if the check could not be performed (i.e. github is down) """ if os.environ.get('JENKINS_URL') or os.environ.get('TRAVIS'): # avoid flooding our API server with requests from CI systems return headers = {'User-Agent': 'OpenQuake Engine %s;%s;%s;%s' % (__version__, calculation_mode, platform.platform(), config.distribution.oq_distribute)} try: req = Request(OQ_API + '/engine/latest', headers=headers) # NB: a timeout < 1 does not work data = urlopen(req, timeout=1).read() # bytes tag_name = json.loads(decode(data))['tag_name'] current = version_triple(__version__) latest = version_triple(tag_name) except Exception: # page not available or wrong version tag return if current < latest: return ('Version %s of the engine is available, but you are ' 'still using version %s' % (tag_name, __version__)) else: return ''
[ "def", "check_obsolete_version", "(", "calculation_mode", "=", "'WebUI'", ")", ":", "if", "os", ".", "environ", ".", "get", "(", "'JENKINS_URL'", ")", "or", "os", ".", "environ", ".", "get", "(", "'TRAVIS'", ")", ":", "# avoid flooding our API server with requests from CI systems", "return", "headers", "=", "{", "'User-Agent'", ":", "'OpenQuake Engine %s;%s;%s;%s'", "%", "(", "__version__", ",", "calculation_mode", ",", "platform", ".", "platform", "(", ")", ",", "config", ".", "distribution", ".", "oq_distribute", ")", "}", "try", ":", "req", "=", "Request", "(", "OQ_API", "+", "'/engine/latest'", ",", "headers", "=", "headers", ")", "# NB: a timeout < 1 does not work", "data", "=", "urlopen", "(", "req", ",", "timeout", "=", "1", ")", ".", "read", "(", ")", "# bytes", "tag_name", "=", "json", ".", "loads", "(", "decode", "(", "data", ")", ")", "[", "'tag_name'", "]", "current", "=", "version_triple", "(", "__version__", ")", "latest", "=", "version_triple", "(", "tag_name", ")", "except", "Exception", ":", "# page not available or wrong version tag", "return", "if", "current", "<", "latest", ":", "return", "(", "'Version %s of the engine is available, but you are '", "'still using version %s'", "%", "(", "tag_name", ",", "__version__", ")", ")", "else", ":", "return", "''" ]
Check if there is a newer version of the engine. :param calculation_mode: - the calculation mode when called from the engine - an empty string when called from the WebUI :returns: - a message if the running version of the engine is obsolete - the empty string if the engine is updated - None if the check could not be performed (i.e. github is down)
[ "Check", "if", "there", "is", "a", "newer", "version", "of", "the", "engine", "." ]
8294553a0b8aba33fd96437a35065d03547d0040
https://github.com/gem/oq-engine/blob/8294553a0b8aba33fd96437a35065d03547d0040/openquake/engine/engine.py#L384-L416
378
gem/oq-engine
openquake/baselib/python3compat.py
encode
def encode(val): """ Encode a string assuming the encoding is UTF-8. :param: a unicode or bytes object :returns: bytes """ if isinstance(val, (list, tuple)): # encode a list or tuple of strings return [encode(v) for v in val] elif isinstance(val, str): return val.encode('utf-8') else: # assume it was an already encoded object return val
python
def encode(val): """ Encode a string assuming the encoding is UTF-8. :param: a unicode or bytes object :returns: bytes """ if isinstance(val, (list, tuple)): # encode a list or tuple of strings return [encode(v) for v in val] elif isinstance(val, str): return val.encode('utf-8') else: # assume it was an already encoded object return val
[ "def", "encode", "(", "val", ")", ":", "if", "isinstance", "(", "val", ",", "(", "list", ",", "tuple", ")", ")", ":", "# encode a list or tuple of strings", "return", "[", "encode", "(", "v", ")", "for", "v", "in", "val", "]", "elif", "isinstance", "(", "val", ",", "str", ")", ":", "return", "val", ".", "encode", "(", "'utf-8'", ")", "else", ":", "# assume it was an already encoded object", "return", "val" ]
Encode a string assuming the encoding is UTF-8. :param: a unicode or bytes object :returns: bytes
[ "Encode", "a", "string", "assuming", "the", "encoding", "is", "UTF", "-", "8", "." ]
8294553a0b8aba33fd96437a35065d03547d0040
https://github.com/gem/oq-engine/blob/8294553a0b8aba33fd96437a35065d03547d0040/openquake/baselib/python3compat.py#L28-L41
379
gem/oq-engine
openquake/baselib/python3compat.py
raise_
def raise_(tp, value=None, tb=None): """ A function that matches the Python 2.x ``raise`` statement. This allows re-raising exceptions with the cls value and traceback on Python 2 and 3. """ if value is not None and isinstance(tp, Exception): raise TypeError("instance exception may not have a separate value") if value is not None: exc = tp(value) else: exc = tp if exc.__traceback__ is not tb: raise exc.with_traceback(tb) raise exc
python
def raise_(tp, value=None, tb=None): """ A function that matches the Python 2.x ``raise`` statement. This allows re-raising exceptions with the cls value and traceback on Python 2 and 3. """ if value is not None and isinstance(tp, Exception): raise TypeError("instance exception may not have a separate value") if value is not None: exc = tp(value) else: exc = tp if exc.__traceback__ is not tb: raise exc.with_traceback(tb) raise exc
[ "def", "raise_", "(", "tp", ",", "value", "=", "None", ",", "tb", "=", "None", ")", ":", "if", "value", "is", "not", "None", "and", "isinstance", "(", "tp", ",", "Exception", ")", ":", "raise", "TypeError", "(", "\"instance exception may not have a separate value\"", ")", "if", "value", "is", "not", "None", ":", "exc", "=", "tp", "(", "value", ")", "else", ":", "exc", "=", "tp", "if", "exc", ".", "__traceback__", "is", "not", "tb", ":", "raise", "exc", ".", "with_traceback", "(", "tb", ")", "raise", "exc" ]
A function that matches the Python 2.x ``raise`` statement. This allows re-raising exceptions with the cls value and traceback on Python 2 and 3.
[ "A", "function", "that", "matches", "the", "Python", "2", ".", "x", "raise", "statement", ".", "This", "allows", "re", "-", "raising", "exceptions", "with", "the", "cls", "value", "and", "traceback", "on", "Python", "2", "and", "3", "." ]
8294553a0b8aba33fd96437a35065d03547d0040
https://github.com/gem/oq-engine/blob/8294553a0b8aba33fd96437a35065d03547d0040/openquake/baselib/python3compat.py#L70-L84
380
gem/oq-engine
openquake/commands/plot_pyro.py
plot_pyro
def plot_pyro(calc_id=-1): """ Plot the pyroclastic cloud and the assets """ # NB: matplotlib is imported inside since it is a costly import import matplotlib.pyplot as p dstore = util.read(calc_id) sitecol = dstore['sitecol'] asset_risk = dstore['asset_risk'].value pyro, = numpy.where(dstore['multi_peril']['PYRO'] == 1) lons = sitecol.lons[pyro] lats = sitecol.lats[pyro] p.scatter(lons, lats, marker='o', color='red') building_pyro, = numpy.where(asset_risk['building-PYRO'] == 1) lons = sitecol.lons[building_pyro] lats = sitecol.lats[building_pyro] p.scatter(lons, lats, marker='.', color='green') p.show()
python
def plot_pyro(calc_id=-1): """ Plot the pyroclastic cloud and the assets """ # NB: matplotlib is imported inside since it is a costly import import matplotlib.pyplot as p dstore = util.read(calc_id) sitecol = dstore['sitecol'] asset_risk = dstore['asset_risk'].value pyro, = numpy.where(dstore['multi_peril']['PYRO'] == 1) lons = sitecol.lons[pyro] lats = sitecol.lats[pyro] p.scatter(lons, lats, marker='o', color='red') building_pyro, = numpy.where(asset_risk['building-PYRO'] == 1) lons = sitecol.lons[building_pyro] lats = sitecol.lats[building_pyro] p.scatter(lons, lats, marker='.', color='green') p.show()
[ "def", "plot_pyro", "(", "calc_id", "=", "-", "1", ")", ":", "# NB: matplotlib is imported inside since it is a costly import", "import", "matplotlib", ".", "pyplot", "as", "p", "dstore", "=", "util", ".", "read", "(", "calc_id", ")", "sitecol", "=", "dstore", "[", "'sitecol'", "]", "asset_risk", "=", "dstore", "[", "'asset_risk'", "]", ".", "value", "pyro", ",", "=", "numpy", ".", "where", "(", "dstore", "[", "'multi_peril'", "]", "[", "'PYRO'", "]", "==", "1", ")", "lons", "=", "sitecol", ".", "lons", "[", "pyro", "]", "lats", "=", "sitecol", ".", "lats", "[", "pyro", "]", "p", ".", "scatter", "(", "lons", ",", "lats", ",", "marker", "=", "'o'", ",", "color", "=", "'red'", ")", "building_pyro", ",", "=", "numpy", ".", "where", "(", "asset_risk", "[", "'building-PYRO'", "]", "==", "1", ")", "lons", "=", "sitecol", ".", "lons", "[", "building_pyro", "]", "lats", "=", "sitecol", ".", "lats", "[", "building_pyro", "]", "p", ".", "scatter", "(", "lons", ",", "lats", ",", "marker", "=", "'.'", ",", "color", "=", "'green'", ")", "p", ".", "show", "(", ")" ]
Plot the pyroclastic cloud and the assets
[ "Plot", "the", "pyroclastic", "cloud", "and", "the", "assets" ]
8294553a0b8aba33fd96437a35065d03547d0040
https://github.com/gem/oq-engine/blob/8294553a0b8aba33fd96437a35065d03547d0040/openquake/commands/plot_pyro.py#L24-L42
381
gem/oq-engine
openquake/hazardlib/geo/polygon.py
get_resampled_coordinates
def get_resampled_coordinates(lons, lats): """ Resample polygon line segments and return the coordinates of the new vertices. This limits distortions when projecting a polygon onto a spherical surface. Parameters define longitudes and latitudes of a point collection in the form of lists or numpy arrays. :return: A tuple of two numpy arrays: longitudes and latitudes of resampled vertices. """ num_coords = len(lons) assert num_coords == len(lats) lons1 = numpy.array(lons) lats1 = numpy.array(lats) lons2 = numpy.concatenate((lons1[1:], lons1[:1])) lats2 = numpy.concatenate((lats1[1:], lats1[:1])) distances = geodetic.geodetic_distance(lons1, lats1, lons2, lats2) resampled_lons = [lons[0]] resampled_lats = [lats[0]] for i in range(num_coords): next_point = (i + 1) % num_coords lon1, lat1 = lons[i], lats[i] lon2, lat2 = lons[next_point], lats[next_point] distance = distances[i] num_points = int(distance / UPSAMPLING_STEP_KM) + 1 if num_points >= 2: # We need to increase the resolution of this arc by adding new # points. new_lons, new_lats, _ = geodetic.npoints_between( lon1, lat1, 0, lon2, lat2, 0, num_points) resampled_lons.extend(new_lons[1:]) resampled_lats.extend(new_lats[1:]) else: resampled_lons.append(lon2) resampled_lats.append(lat2) # NB: we cut off the last point because it repeats the first one return numpy.array(resampled_lons[:-1]), numpy.array(resampled_lats[:-1])
python
def get_resampled_coordinates(lons, lats): """ Resample polygon line segments and return the coordinates of the new vertices. This limits distortions when projecting a polygon onto a spherical surface. Parameters define longitudes and latitudes of a point collection in the form of lists or numpy arrays. :return: A tuple of two numpy arrays: longitudes and latitudes of resampled vertices. """ num_coords = len(lons) assert num_coords == len(lats) lons1 = numpy.array(lons) lats1 = numpy.array(lats) lons2 = numpy.concatenate((lons1[1:], lons1[:1])) lats2 = numpy.concatenate((lats1[1:], lats1[:1])) distances = geodetic.geodetic_distance(lons1, lats1, lons2, lats2) resampled_lons = [lons[0]] resampled_lats = [lats[0]] for i in range(num_coords): next_point = (i + 1) % num_coords lon1, lat1 = lons[i], lats[i] lon2, lat2 = lons[next_point], lats[next_point] distance = distances[i] num_points = int(distance / UPSAMPLING_STEP_KM) + 1 if num_points >= 2: # We need to increase the resolution of this arc by adding new # points. new_lons, new_lats, _ = geodetic.npoints_between( lon1, lat1, 0, lon2, lat2, 0, num_points) resampled_lons.extend(new_lons[1:]) resampled_lats.extend(new_lats[1:]) else: resampled_lons.append(lon2) resampled_lats.append(lat2) # NB: we cut off the last point because it repeats the first one return numpy.array(resampled_lons[:-1]), numpy.array(resampled_lats[:-1])
[ "def", "get_resampled_coordinates", "(", "lons", ",", "lats", ")", ":", "num_coords", "=", "len", "(", "lons", ")", "assert", "num_coords", "==", "len", "(", "lats", ")", "lons1", "=", "numpy", ".", "array", "(", "lons", ")", "lats1", "=", "numpy", ".", "array", "(", "lats", ")", "lons2", "=", "numpy", ".", "concatenate", "(", "(", "lons1", "[", "1", ":", "]", ",", "lons1", "[", ":", "1", "]", ")", ")", "lats2", "=", "numpy", ".", "concatenate", "(", "(", "lats1", "[", "1", ":", "]", ",", "lats1", "[", ":", "1", "]", ")", ")", "distances", "=", "geodetic", ".", "geodetic_distance", "(", "lons1", ",", "lats1", ",", "lons2", ",", "lats2", ")", "resampled_lons", "=", "[", "lons", "[", "0", "]", "]", "resampled_lats", "=", "[", "lats", "[", "0", "]", "]", "for", "i", "in", "range", "(", "num_coords", ")", ":", "next_point", "=", "(", "i", "+", "1", ")", "%", "num_coords", "lon1", ",", "lat1", "=", "lons", "[", "i", "]", ",", "lats", "[", "i", "]", "lon2", ",", "lat2", "=", "lons", "[", "next_point", "]", ",", "lats", "[", "next_point", "]", "distance", "=", "distances", "[", "i", "]", "num_points", "=", "int", "(", "distance", "/", "UPSAMPLING_STEP_KM", ")", "+", "1", "if", "num_points", ">=", "2", ":", "# We need to increase the resolution of this arc by adding new", "# points.", "new_lons", ",", "new_lats", ",", "_", "=", "geodetic", ".", "npoints_between", "(", "lon1", ",", "lat1", ",", "0", ",", "lon2", ",", "lat2", ",", "0", ",", "num_points", ")", "resampled_lons", ".", "extend", "(", "new_lons", "[", "1", ":", "]", ")", "resampled_lats", ".", "extend", "(", "new_lats", "[", "1", ":", "]", ")", "else", ":", "resampled_lons", ".", "append", "(", "lon2", ")", "resampled_lats", ".", "append", "(", "lat2", ")", "# NB: we cut off the last point because it repeats the first one", "return", "numpy", ".", "array", "(", "resampled_lons", "[", ":", "-", "1", "]", ")", ",", "numpy", ".", "array", "(", "resampled_lats", "[", ":", "-", "1", "]", ")" ]
Resample polygon line segments and return the coordinates of the new vertices. This limits distortions when projecting a polygon onto a spherical surface. Parameters define longitudes and latitudes of a point collection in the form of lists or numpy arrays. :return: A tuple of two numpy arrays: longitudes and latitudes of resampled vertices.
[ "Resample", "polygon", "line", "segments", "and", "return", "the", "coordinates", "of", "the", "new", "vertices", ".", "This", "limits", "distortions", "when", "projecting", "a", "polygon", "onto", "a", "spherical", "surface", "." ]
8294553a0b8aba33fd96437a35065d03547d0040
https://github.com/gem/oq-engine/blob/8294553a0b8aba33fd96437a35065d03547d0040/openquake/hazardlib/geo/polygon.py#L249-L291
382
gem/oq-engine
openquake/hazardlib/geo/surface/gridded.py
GriddedSurface.get_middle_point
def get_middle_point(self): """ Compute coordinates of surface middle point. The actual definition of ``middle point`` depends on the type of surface geometry. :return: instance of :class:`openquake.hazardlib.geo.point.Point` representing surface middle point. """ lons = self.mesh.lons.squeeze() lats = self.mesh.lats.squeeze() depths = self.mesh.depths.squeeze() lon_bar = lons.mean() lat_bar = lats.mean() idx = np.argmin((lons - lon_bar)**2 + (lats - lat_bar)**2) return Point(lons[idx], lats[idx], depths[idx])
python
def get_middle_point(self): """ Compute coordinates of surface middle point. The actual definition of ``middle point`` depends on the type of surface geometry. :return: instance of :class:`openquake.hazardlib.geo.point.Point` representing surface middle point. """ lons = self.mesh.lons.squeeze() lats = self.mesh.lats.squeeze() depths = self.mesh.depths.squeeze() lon_bar = lons.mean() lat_bar = lats.mean() idx = np.argmin((lons - lon_bar)**2 + (lats - lat_bar)**2) return Point(lons[idx], lats[idx], depths[idx])
[ "def", "get_middle_point", "(", "self", ")", ":", "lons", "=", "self", ".", "mesh", ".", "lons", ".", "squeeze", "(", ")", "lats", "=", "self", ".", "mesh", ".", "lats", ".", "squeeze", "(", ")", "depths", "=", "self", ".", "mesh", ".", "depths", ".", "squeeze", "(", ")", "lon_bar", "=", "lons", ".", "mean", "(", ")", "lat_bar", "=", "lats", ".", "mean", "(", ")", "idx", "=", "np", ".", "argmin", "(", "(", "lons", "-", "lon_bar", ")", "**", "2", "+", "(", "lats", "-", "lat_bar", ")", "**", "2", ")", "return", "Point", "(", "lons", "[", "idx", "]", ",", "lats", "[", "idx", "]", ",", "depths", "[", "idx", "]", ")" ]
Compute coordinates of surface middle point. The actual definition of ``middle point`` depends on the type of surface geometry. :return: instance of :class:`openquake.hazardlib.geo.point.Point` representing surface middle point.
[ "Compute", "coordinates", "of", "surface", "middle", "point", "." ]
8294553a0b8aba33fd96437a35065d03547d0040
https://github.com/gem/oq-engine/blob/8294553a0b8aba33fd96437a35065d03547d0040/openquake/hazardlib/geo/surface/gridded.py#L164-L181
383
gem/oq-engine
openquake/hazardlib/mfd/base.py
BaseMFD.modify
def modify(self, modification, parameters): """ Apply a single modification to an MFD parameters. Reflects the modification method and calls it passing ``parameters`` as keyword arguments. See also :attr:`MODIFICATIONS`. Modifications can be applied one on top of another. The logic of stacking modifications is up to a specific MFD implementation. :param modification: String name representing the type of modification. :param parameters: Dictionary of parameters needed for modification. :raises ValueError: If ``modification`` is missing from :attr:`MODIFICATIONS`. """ if modification not in self.MODIFICATIONS: raise ValueError('Modification %s is not supported by %s' % (modification, type(self).__name__)) meth = getattr(self, 'modify_%s' % modification) meth(**parameters) self.check_constraints()
python
def modify(self, modification, parameters): """ Apply a single modification to an MFD parameters. Reflects the modification method and calls it passing ``parameters`` as keyword arguments. See also :attr:`MODIFICATIONS`. Modifications can be applied one on top of another. The logic of stacking modifications is up to a specific MFD implementation. :param modification: String name representing the type of modification. :param parameters: Dictionary of parameters needed for modification. :raises ValueError: If ``modification`` is missing from :attr:`MODIFICATIONS`. """ if modification not in self.MODIFICATIONS: raise ValueError('Modification %s is not supported by %s' % (modification, type(self).__name__)) meth = getattr(self, 'modify_%s' % modification) meth(**parameters) self.check_constraints()
[ "def", "modify", "(", "self", ",", "modification", ",", "parameters", ")", ":", "if", "modification", "not", "in", "self", ".", "MODIFICATIONS", ":", "raise", "ValueError", "(", "'Modification %s is not supported by %s'", "%", "(", "modification", ",", "type", "(", "self", ")", ".", "__name__", ")", ")", "meth", "=", "getattr", "(", "self", ",", "'modify_%s'", "%", "modification", ")", "meth", "(", "*", "*", "parameters", ")", "self", ".", "check_constraints", "(", ")" ]
Apply a single modification to an MFD parameters. Reflects the modification method and calls it passing ``parameters`` as keyword arguments. See also :attr:`MODIFICATIONS`. Modifications can be applied one on top of another. The logic of stacking modifications is up to a specific MFD implementation. :param modification: String name representing the type of modification. :param parameters: Dictionary of parameters needed for modification. :raises ValueError: If ``modification`` is missing from :attr:`MODIFICATIONS`.
[ "Apply", "a", "single", "modification", "to", "an", "MFD", "parameters", "." ]
8294553a0b8aba33fd96437a35065d03547d0040
https://github.com/gem/oq-engine/blob/8294553a0b8aba33fd96437a35065d03547d0040/openquake/hazardlib/mfd/base.py#L34-L56
384
gem/oq-engine
openquake/hazardlib/gsim/travasarou_2003.py
TravasarouEtAl2003._get_stddevs
def _get_stddevs(self, rup, arias, stddev_types, sites): """ Return standard deviations as defined in table 1, p. 200. """ stddevs = [] # Magnitude dependent inter-event term (Eq. 13) if rup.mag < 4.7: tau = 0.611 elif rup.mag > 7.6: tau = 0.475 else: tau = 0.611 - 0.047 * (rup.mag - 4.7) # Retrieve site-class dependent sigma sigma1, sigma2 = self._get_intra_event_sigmas(sites) sigma = np.copy(sigma1) # Implements the nonlinear intra-event sigma (Eq. 14) idx = arias >= 0.125 sigma[idx] = sigma2[idx] idx = np.logical_and(arias > 0.013, arias < 0.125) sigma[idx] = sigma1[idx] - 0.106 * (np.log(arias[idx]) - np.log(0.0132)) sigma_total = np.sqrt(tau ** 2. + sigma ** 2.) for stddev_type in stddev_types: assert stddev_type in self.DEFINED_FOR_STANDARD_DEVIATION_TYPES if stddev_type == const.StdDev.TOTAL: stddevs.append(sigma_total) elif stddev_type == const.StdDev.INTRA_EVENT: stddevs.append(sigma) elif stddev_type == const.StdDev.INTER_EVENT: stddevs.append(tau * np.ones_like(sites.vs30)) return stddevs
python
def _get_stddevs(self, rup, arias, stddev_types, sites): """ Return standard deviations as defined in table 1, p. 200. """ stddevs = [] # Magnitude dependent inter-event term (Eq. 13) if rup.mag < 4.7: tau = 0.611 elif rup.mag > 7.6: tau = 0.475 else: tau = 0.611 - 0.047 * (rup.mag - 4.7) # Retrieve site-class dependent sigma sigma1, sigma2 = self._get_intra_event_sigmas(sites) sigma = np.copy(sigma1) # Implements the nonlinear intra-event sigma (Eq. 14) idx = arias >= 0.125 sigma[idx] = sigma2[idx] idx = np.logical_and(arias > 0.013, arias < 0.125) sigma[idx] = sigma1[idx] - 0.106 * (np.log(arias[idx]) - np.log(0.0132)) sigma_total = np.sqrt(tau ** 2. + sigma ** 2.) for stddev_type in stddev_types: assert stddev_type in self.DEFINED_FOR_STANDARD_DEVIATION_TYPES if stddev_type == const.StdDev.TOTAL: stddevs.append(sigma_total) elif stddev_type == const.StdDev.INTRA_EVENT: stddevs.append(sigma) elif stddev_type == const.StdDev.INTER_EVENT: stddevs.append(tau * np.ones_like(sites.vs30)) return stddevs
[ "def", "_get_stddevs", "(", "self", ",", "rup", ",", "arias", ",", "stddev_types", ",", "sites", ")", ":", "stddevs", "=", "[", "]", "# Magnitude dependent inter-event term (Eq. 13)", "if", "rup", ".", "mag", "<", "4.7", ":", "tau", "=", "0.611", "elif", "rup", ".", "mag", ">", "7.6", ":", "tau", "=", "0.475", "else", ":", "tau", "=", "0.611", "-", "0.047", "*", "(", "rup", ".", "mag", "-", "4.7", ")", "# Retrieve site-class dependent sigma", "sigma1", ",", "sigma2", "=", "self", ".", "_get_intra_event_sigmas", "(", "sites", ")", "sigma", "=", "np", ".", "copy", "(", "sigma1", ")", "# Implements the nonlinear intra-event sigma (Eq. 14)", "idx", "=", "arias", ">=", "0.125", "sigma", "[", "idx", "]", "=", "sigma2", "[", "idx", "]", "idx", "=", "np", ".", "logical_and", "(", "arias", ">", "0.013", ",", "arias", "<", "0.125", ")", "sigma", "[", "idx", "]", "=", "sigma1", "[", "idx", "]", "-", "0.106", "*", "(", "np", ".", "log", "(", "arias", "[", "idx", "]", ")", "-", "np", ".", "log", "(", "0.0132", ")", ")", "sigma_total", "=", "np", ".", "sqrt", "(", "tau", "**", "2.", "+", "sigma", "**", "2.", ")", "for", "stddev_type", "in", "stddev_types", ":", "assert", "stddev_type", "in", "self", ".", "DEFINED_FOR_STANDARD_DEVIATION_TYPES", "if", "stddev_type", "==", "const", ".", "StdDev", ".", "TOTAL", ":", "stddevs", ".", "append", "(", "sigma_total", ")", "elif", "stddev_type", "==", "const", ".", "StdDev", ".", "INTRA_EVENT", ":", "stddevs", ".", "append", "(", "sigma", ")", "elif", "stddev_type", "==", "const", ".", "StdDev", ".", "INTER_EVENT", ":", "stddevs", ".", "append", "(", "tau", "*", "np", ".", "ones_like", "(", "sites", ".", "vs30", ")", ")", "return", "stddevs" ]
Return standard deviations as defined in table 1, p. 200.
[ "Return", "standard", "deviations", "as", "defined", "in", "table", "1", "p", ".", "200", "." ]
8294553a0b8aba33fd96437a35065d03547d0040
https://github.com/gem/oq-engine/blob/8294553a0b8aba33fd96437a35065d03547d0040/openquake/hazardlib/gsim/travasarou_2003.py#L95-L128
385
gem/oq-engine
openquake/hazardlib/gsim/travasarou_2003.py
TravasarouEtAl2003._get_intra_event_sigmas
def _get_intra_event_sigmas(self, sites): """ The intra-event term nonlinear and dependent on both the site class and the expected ground motion. In this case the sigma coefficients are determined from the site class as described below Eq. 14 """ sigma1 = 1.18 * np.ones_like(sites.vs30) sigma2 = 0.94 * np.ones_like(sites.vs30) idx1 = np.logical_and(sites.vs30 >= 360.0, sites.vs30 < 760.0) idx2 = sites.vs30 < 360.0 sigma1[idx1] = 1.17 sigma2[idx1] = 0.93 sigma1[idx2] = 0.96 sigma2[idx2] = 0.73 return sigma1, sigma2
python
def _get_intra_event_sigmas(self, sites): """ The intra-event term nonlinear and dependent on both the site class and the expected ground motion. In this case the sigma coefficients are determined from the site class as described below Eq. 14 """ sigma1 = 1.18 * np.ones_like(sites.vs30) sigma2 = 0.94 * np.ones_like(sites.vs30) idx1 = np.logical_and(sites.vs30 >= 360.0, sites.vs30 < 760.0) idx2 = sites.vs30 < 360.0 sigma1[idx1] = 1.17 sigma2[idx1] = 0.93 sigma1[idx2] = 0.96 sigma2[idx2] = 0.73 return sigma1, sigma2
[ "def", "_get_intra_event_sigmas", "(", "self", ",", "sites", ")", ":", "sigma1", "=", "1.18", "*", "np", ".", "ones_like", "(", "sites", ".", "vs30", ")", "sigma2", "=", "0.94", "*", "np", ".", "ones_like", "(", "sites", ".", "vs30", ")", "idx1", "=", "np", ".", "logical_and", "(", "sites", ".", "vs30", ">=", "360.0", ",", "sites", ".", "vs30", "<", "760.0", ")", "idx2", "=", "sites", ".", "vs30", "<", "360.0", "sigma1", "[", "idx1", "]", "=", "1.17", "sigma2", "[", "idx1", "]", "=", "0.93", "sigma1", "[", "idx2", "]", "=", "0.96", "sigma2", "[", "idx2", "]", "=", "0.73", "return", "sigma1", ",", "sigma2" ]
The intra-event term nonlinear and dependent on both the site class and the expected ground motion. In this case the sigma coefficients are determined from the site class as described below Eq. 14
[ "The", "intra", "-", "event", "term", "nonlinear", "and", "dependent", "on", "both", "the", "site", "class", "and", "the", "expected", "ground", "motion", ".", "In", "this", "case", "the", "sigma", "coefficients", "are", "determined", "from", "the", "site", "class", "as", "described", "below", "Eq", ".", "14" ]
8294553a0b8aba33fd96437a35065d03547d0040
https://github.com/gem/oq-engine/blob/8294553a0b8aba33fd96437a35065d03547d0040/openquake/hazardlib/gsim/travasarou_2003.py#L130-L145
386
gem/oq-engine
openquake/hazardlib/gsim/boore_2014.py
BooreEtAl2014._get_pga_on_rock
def _get_pga_on_rock(self, C, rup, dists): """ Returns the median PGA on rock, which is a sum of the magnitude and distance scaling """ return np.exp(self._get_magnitude_scaling_term(C, rup) + self._get_path_scaling(C, dists, rup.mag))
python
def _get_pga_on_rock(self, C, rup, dists): """ Returns the median PGA on rock, which is a sum of the magnitude and distance scaling """ return np.exp(self._get_magnitude_scaling_term(C, rup) + self._get_path_scaling(C, dists, rup.mag))
[ "def", "_get_pga_on_rock", "(", "self", ",", "C", ",", "rup", ",", "dists", ")", ":", "return", "np", ".", "exp", "(", "self", ".", "_get_magnitude_scaling_term", "(", "C", ",", "rup", ")", "+", "self", ".", "_get_path_scaling", "(", "C", ",", "dists", ",", "rup", ".", "mag", ")", ")" ]
Returns the median PGA on rock, which is a sum of the magnitude and distance scaling
[ "Returns", "the", "median", "PGA", "on", "rock", "which", "is", "a", "sum", "of", "the", "magnitude", "and", "distance", "scaling" ]
8294553a0b8aba33fd96437a35065d03547d0040
https://github.com/gem/oq-engine/blob/8294553a0b8aba33fd96437a35065d03547d0040/openquake/hazardlib/gsim/boore_2014.py#L103-L109
387
gem/oq-engine
openquake/hazardlib/mfd/multi_mfd.py
MultiMFD.modify
def modify(self, modification, parameters): """ Apply a modification to the underlying point sources, with the same parameters for all sources """ for src in self: src.modify(modification, parameters)
python
def modify(self, modification, parameters): """ Apply a modification to the underlying point sources, with the same parameters for all sources """ for src in self: src.modify(modification, parameters)
[ "def", "modify", "(", "self", ",", "modification", ",", "parameters", ")", ":", "for", "src", "in", "self", ":", "src", ".", "modify", "(", "modification", ",", "parameters", ")" ]
Apply a modification to the underlying point sources, with the same parameters for all sources
[ "Apply", "a", "modification", "to", "the", "underlying", "point", "sources", "with", "the", "same", "parameters", "for", "all", "sources" ]
8294553a0b8aba33fd96437a35065d03547d0040
https://github.com/gem/oq-engine/blob/8294553a0b8aba33fd96437a35065d03547d0040/openquake/hazardlib/mfd/multi_mfd.py#L143-L149
388
gem/oq-engine
openquake/hazardlib/gsim/geomatrix_1993.py
Geomatrix1993SSlabNSHMP2008._compute_mean
def _compute_mean(self, C, mag, ztor, rrup): """ Compute mean value as in ``subroutine getGeom`` in ``hazgridXnga2.f`` """ gc0 = 0.2418 ci = 0.3846 gch = 0.00607 g4 = 1.7818 ge = 0.554 gm = 1.414 mean = ( gc0 + ci + ztor * gch + C['gc1'] + gm * mag + C['gc2'] * (10 - mag) ** 3 + C['gc3'] * np.log(rrup + g4 * np.exp(ge * mag)) ) return mean
python
def _compute_mean(self, C, mag, ztor, rrup): """ Compute mean value as in ``subroutine getGeom`` in ``hazgridXnga2.f`` """ gc0 = 0.2418 ci = 0.3846 gch = 0.00607 g4 = 1.7818 ge = 0.554 gm = 1.414 mean = ( gc0 + ci + ztor * gch + C['gc1'] + gm * mag + C['gc2'] * (10 - mag) ** 3 + C['gc3'] * np.log(rrup + g4 * np.exp(ge * mag)) ) return mean
[ "def", "_compute_mean", "(", "self", ",", "C", ",", "mag", ",", "ztor", ",", "rrup", ")", ":", "gc0", "=", "0.2418", "ci", "=", "0.3846", "gch", "=", "0.00607", "g4", "=", "1.7818", "ge", "=", "0.554", "gm", "=", "1.414", "mean", "=", "(", "gc0", "+", "ci", "+", "ztor", "*", "gch", "+", "C", "[", "'gc1'", "]", "+", "gm", "*", "mag", "+", "C", "[", "'gc2'", "]", "*", "(", "10", "-", "mag", ")", "**", "3", "+", "C", "[", "'gc3'", "]", "*", "np", ".", "log", "(", "rrup", "+", "g4", "*", "np", ".", "exp", "(", "ge", "*", "mag", ")", ")", ")", "return", "mean" ]
Compute mean value as in ``subroutine getGeom`` in ``hazgridXnga2.f``
[ "Compute", "mean", "value", "as", "in", "subroutine", "getGeom", "in", "hazgridXnga2", ".", "f" ]
8294553a0b8aba33fd96437a35065d03547d0040
https://github.com/gem/oq-engine/blob/8294553a0b8aba33fd96437a35065d03547d0040/openquake/hazardlib/gsim/geomatrix_1993.py#L92-L109
389
gem/oq-engine
openquake/commands/abort.py
abort
def abort(job_id): """ Abort the given job """ job = logs.dbcmd('get_job', job_id) # job_id can be negative if job is None: print('There is no job %d' % job_id) return elif job.status not in ('executing', 'running'): print('Job %d is %s' % (job.id, job.status)) return name = 'oq-job-%d' % job.id for p in psutil.process_iter(): if p.name() == name: try: os.kill(p.pid, signal.SIGTERM) logs.dbcmd('set_status', job.id, 'aborted') print('Job %d aborted' % job.id) except Exception as exc: print(exc) break else: # no break # set job as failed if it is set as 'executing' or 'running' in the db # but the corresponding process is not running anymore logs.dbcmd('set_status', job.id, 'failed') print('Unable to find a process for job %d,' ' setting it as failed' % job.id)
python
def abort(job_id): """ Abort the given job """ job = logs.dbcmd('get_job', job_id) # job_id can be negative if job is None: print('There is no job %d' % job_id) return elif job.status not in ('executing', 'running'): print('Job %d is %s' % (job.id, job.status)) return name = 'oq-job-%d' % job.id for p in psutil.process_iter(): if p.name() == name: try: os.kill(p.pid, signal.SIGTERM) logs.dbcmd('set_status', job.id, 'aborted') print('Job %d aborted' % job.id) except Exception as exc: print(exc) break else: # no break # set job as failed if it is set as 'executing' or 'running' in the db # but the corresponding process is not running anymore logs.dbcmd('set_status', job.id, 'failed') print('Unable to find a process for job %d,' ' setting it as failed' % job.id)
[ "def", "abort", "(", "job_id", ")", ":", "job", "=", "logs", ".", "dbcmd", "(", "'get_job'", ",", "job_id", ")", "# job_id can be negative", "if", "job", "is", "None", ":", "print", "(", "'There is no job %d'", "%", "job_id", ")", "return", "elif", "job", ".", "status", "not", "in", "(", "'executing'", ",", "'running'", ")", ":", "print", "(", "'Job %d is %s'", "%", "(", "job", ".", "id", ",", "job", ".", "status", ")", ")", "return", "name", "=", "'oq-job-%d'", "%", "job", ".", "id", "for", "p", "in", "psutil", ".", "process_iter", "(", ")", ":", "if", "p", ".", "name", "(", ")", "==", "name", ":", "try", ":", "os", ".", "kill", "(", "p", ".", "pid", ",", "signal", ".", "SIGTERM", ")", "logs", ".", "dbcmd", "(", "'set_status'", ",", "job", ".", "id", ",", "'aborted'", ")", "print", "(", "'Job %d aborted'", "%", "job", ".", "id", ")", "except", "Exception", "as", "exc", ":", "print", "(", "exc", ")", "break", "else", ":", "# no break", "# set job as failed if it is set as 'executing' or 'running' in the db", "# but the corresponding process is not running anymore", "logs", ".", "dbcmd", "(", "'set_status'", ",", "job", ".", "id", ",", "'failed'", ")", "print", "(", "'Unable to find a process for job %d,'", "' setting it as failed'", "%", "job", ".", "id", ")" ]
Abort the given job
[ "Abort", "the", "given", "job" ]
8294553a0b8aba33fd96437a35065d03547d0040
https://github.com/gem/oq-engine/blob/8294553a0b8aba33fd96437a35065d03547d0040/openquake/commands/abort.py#L27-L53
390
gem/oq-engine
openquake/baselib/sap.py
compose
def compose(scripts, name='main', description=None, prog=None, version=None): """ Collects together different scripts and builds a single script dispatching to the subparsers depending on the first argument, i.e. the name of the subparser to invoke. :param scripts: a list of script instances :param name: the name of the composed parser :param description: description of the composed parser :param prog: name of the script printed in the usage message :param version: version of the script printed with --version """ assert len(scripts) >= 1, scripts parentparser = argparse.ArgumentParser( description=description, add_help=False) parentparser.add_argument( '--version', '-v', action='version', version=version) subparsers = parentparser.add_subparsers( help='available subcommands; use %s help <subcmd>' % prog, prog=prog) def gethelp(cmd=None): if cmd is None: print(parentparser.format_help()) return subp = subparsers._name_parser_map.get(cmd) if subp is None: print('No help for unknown command %r' % cmd) else: print(subp.format_help()) help_script = Script(gethelp, 'help', help=False) progname = '%s ' % prog if prog else '' help_script.arg('cmd', progname + 'subcommand') for s in list(scripts) + [help_script]: subp = subparsers.add_parser(s.name, description=s.description) for args, kw in s.all_arguments: subp.add_argument(*args, **kw) subp.set_defaults(_func=s.func) def main(**kw): try: func = kw.pop('_func') except KeyError: parentparser.print_usage() else: return func(**kw) main.__name__ = name return Script(main, name, parentparser)
python
def compose(scripts, name='main', description=None, prog=None, version=None): """ Collects together different scripts and builds a single script dispatching to the subparsers depending on the first argument, i.e. the name of the subparser to invoke. :param scripts: a list of script instances :param name: the name of the composed parser :param description: description of the composed parser :param prog: name of the script printed in the usage message :param version: version of the script printed with --version """ assert len(scripts) >= 1, scripts parentparser = argparse.ArgumentParser( description=description, add_help=False) parentparser.add_argument( '--version', '-v', action='version', version=version) subparsers = parentparser.add_subparsers( help='available subcommands; use %s help <subcmd>' % prog, prog=prog) def gethelp(cmd=None): if cmd is None: print(parentparser.format_help()) return subp = subparsers._name_parser_map.get(cmd) if subp is None: print('No help for unknown command %r' % cmd) else: print(subp.format_help()) help_script = Script(gethelp, 'help', help=False) progname = '%s ' % prog if prog else '' help_script.arg('cmd', progname + 'subcommand') for s in list(scripts) + [help_script]: subp = subparsers.add_parser(s.name, description=s.description) for args, kw in s.all_arguments: subp.add_argument(*args, **kw) subp.set_defaults(_func=s.func) def main(**kw): try: func = kw.pop('_func') except KeyError: parentparser.print_usage() else: return func(**kw) main.__name__ = name return Script(main, name, parentparser)
[ "def", "compose", "(", "scripts", ",", "name", "=", "'main'", ",", "description", "=", "None", ",", "prog", "=", "None", ",", "version", "=", "None", ")", ":", "assert", "len", "(", "scripts", ")", ">=", "1", ",", "scripts", "parentparser", "=", "argparse", ".", "ArgumentParser", "(", "description", "=", "description", ",", "add_help", "=", "False", ")", "parentparser", ".", "add_argument", "(", "'--version'", ",", "'-v'", ",", "action", "=", "'version'", ",", "version", "=", "version", ")", "subparsers", "=", "parentparser", ".", "add_subparsers", "(", "help", "=", "'available subcommands; use %s help <subcmd>'", "%", "prog", ",", "prog", "=", "prog", ")", "def", "gethelp", "(", "cmd", "=", "None", ")", ":", "if", "cmd", "is", "None", ":", "print", "(", "parentparser", ".", "format_help", "(", ")", ")", "return", "subp", "=", "subparsers", ".", "_name_parser_map", ".", "get", "(", "cmd", ")", "if", "subp", "is", "None", ":", "print", "(", "'No help for unknown command %r'", "%", "cmd", ")", "else", ":", "print", "(", "subp", ".", "format_help", "(", ")", ")", "help_script", "=", "Script", "(", "gethelp", ",", "'help'", ",", "help", "=", "False", ")", "progname", "=", "'%s '", "%", "prog", "if", "prog", "else", "''", "help_script", ".", "arg", "(", "'cmd'", ",", "progname", "+", "'subcommand'", ")", "for", "s", "in", "list", "(", "scripts", ")", "+", "[", "help_script", "]", ":", "subp", "=", "subparsers", ".", "add_parser", "(", "s", ".", "name", ",", "description", "=", "s", ".", "description", ")", "for", "args", ",", "kw", "in", "s", ".", "all_arguments", ":", "subp", ".", "add_argument", "(", "*", "args", ",", "*", "*", "kw", ")", "subp", ".", "set_defaults", "(", "_func", "=", "s", ".", "func", ")", "def", "main", "(", "*", "*", "kw", ")", ":", "try", ":", "func", "=", "kw", ".", "pop", "(", "'_func'", ")", "except", "KeyError", ":", "parentparser", ".", "print_usage", "(", ")", "else", ":", "return", "func", "(", "*", "*", "kw", ")", "main", ".", "__name__", "=", "name", "return", "Script", "(", "main", ",", "name", ",", "parentparser", ")" ]
Collects together different scripts and builds a single script dispatching to the subparsers depending on the first argument, i.e. the name of the subparser to invoke. :param scripts: a list of script instances :param name: the name of the composed parser :param description: description of the composed parser :param prog: name of the script printed in the usage message :param version: version of the script printed with --version
[ "Collects", "together", "different", "scripts", "and", "builds", "a", "single", "script", "dispatching", "to", "the", "subparsers", "depending", "on", "the", "first", "argument", "i", ".", "e", ".", "the", "name", "of", "the", "subparser", "to", "invoke", "." ]
8294553a0b8aba33fd96437a35065d03547d0040
https://github.com/gem/oq-engine/blob/8294553a0b8aba33fd96437a35065d03547d0040/openquake/baselib/sap.py#L205-L253
391
gem/oq-engine
openquake/baselib/sap.py
Script._add
def _add(self, name, *args, **kw): """ Add an argument to the underlying parser and grow the list .all_arguments and the set .names """ argname = list(self.argdict)[self._argno] if argname != name: raise NameError( 'Setting argument %s, but it should be %s' % (name, argname)) self._group.add_argument(*args, **kw) self.all_arguments.append((args, kw)) self.names.append(name) self._argno += 1
python
def _add(self, name, *args, **kw): """ Add an argument to the underlying parser and grow the list .all_arguments and the set .names """ argname = list(self.argdict)[self._argno] if argname != name: raise NameError( 'Setting argument %s, but it should be %s' % (name, argname)) self._group.add_argument(*args, **kw) self.all_arguments.append((args, kw)) self.names.append(name) self._argno += 1
[ "def", "_add", "(", "self", ",", "name", ",", "*", "args", ",", "*", "*", "kw", ")", ":", "argname", "=", "list", "(", "self", ".", "argdict", ")", "[", "self", ".", "_argno", "]", "if", "argname", "!=", "name", ":", "raise", "NameError", "(", "'Setting argument %s, but it should be %s'", "%", "(", "name", ",", "argname", ")", ")", "self", ".", "_group", ".", "add_argument", "(", "*", "args", ",", "*", "*", "kw", ")", "self", ".", "all_arguments", ".", "append", "(", "(", "args", ",", "kw", ")", ")", "self", ".", "names", ".", "append", "(", "name", ")", "self", ".", "_argno", "+=", "1" ]
Add an argument to the underlying parser and grow the list .all_arguments and the set .names
[ "Add", "an", "argument", "to", "the", "underlying", "parser", "and", "grow", "the", "list", ".", "all_arguments", "and", "the", "set", ".", "names" ]
8294553a0b8aba33fd96437a35065d03547d0040
https://github.com/gem/oq-engine/blob/8294553a0b8aba33fd96437a35065d03547d0040/openquake/baselib/sap.py#L116-L128
392
gem/oq-engine
openquake/baselib/sap.py
Script.arg
def arg(self, name, help, type=None, choices=None, metavar=None, nargs=None): """Describe a positional argument""" kw = dict(help=help, type=type, choices=choices, metavar=metavar, nargs=nargs) default = self.argdict[name] if default is not NODEFAULT: kw['nargs'] = nargs or '?' kw['default'] = default kw['help'] = kw['help'] + ' [default: %s]' % repr(default) self._add(name, name, **kw)
python
def arg(self, name, help, type=None, choices=None, metavar=None, nargs=None): """Describe a positional argument""" kw = dict(help=help, type=type, choices=choices, metavar=metavar, nargs=nargs) default = self.argdict[name] if default is not NODEFAULT: kw['nargs'] = nargs or '?' kw['default'] = default kw['help'] = kw['help'] + ' [default: %s]' % repr(default) self._add(name, name, **kw)
[ "def", "arg", "(", "self", ",", "name", ",", "help", ",", "type", "=", "None", ",", "choices", "=", "None", ",", "metavar", "=", "None", ",", "nargs", "=", "None", ")", ":", "kw", "=", "dict", "(", "help", "=", "help", ",", "type", "=", "type", ",", "choices", "=", "choices", ",", "metavar", "=", "metavar", ",", "nargs", "=", "nargs", ")", "default", "=", "self", ".", "argdict", "[", "name", "]", "if", "default", "is", "not", "NODEFAULT", ":", "kw", "[", "'nargs'", "]", "=", "nargs", "or", "'?'", "kw", "[", "'default'", "]", "=", "default", "kw", "[", "'help'", "]", "=", "kw", "[", "'help'", "]", "+", "' [default: %s]'", "%", "repr", "(", "default", ")", "self", ".", "_add", "(", "name", ",", "name", ",", "*", "*", "kw", ")" ]
Describe a positional argument
[ "Describe", "a", "positional", "argument" ]
8294553a0b8aba33fd96437a35065d03547d0040
https://github.com/gem/oq-engine/blob/8294553a0b8aba33fd96437a35065d03547d0040/openquake/baselib/sap.py#L130-L140
393
gem/oq-engine
openquake/baselib/sap.py
Script.opt
def opt(self, name, help, abbrev=None, type=None, choices=None, metavar=None, nargs=None): """Describe an option""" kw = dict(help=help, type=type, choices=choices, metavar=metavar, nargs=nargs) default = self.argdict[name] if default is not NODEFAULT: kw['default'] = default kw['metavar'] = metavar or str_choices(choices) or str(default) abbrev = abbrev or '-' + name[0] abbrevs = set(args[0] for args, kw in self.all_arguments) longname = '--' + name.replace('_', '-') if abbrev == '-h' or abbrev in abbrevs: # avoid conflicts with predefined abbreviations self._add(name, longname, **kw) else: self._add(name, abbrev, longname, **kw)
python
def opt(self, name, help, abbrev=None, type=None, choices=None, metavar=None, nargs=None): """Describe an option""" kw = dict(help=help, type=type, choices=choices, metavar=metavar, nargs=nargs) default = self.argdict[name] if default is not NODEFAULT: kw['default'] = default kw['metavar'] = metavar or str_choices(choices) or str(default) abbrev = abbrev or '-' + name[0] abbrevs = set(args[0] for args, kw in self.all_arguments) longname = '--' + name.replace('_', '-') if abbrev == '-h' or abbrev in abbrevs: # avoid conflicts with predefined abbreviations self._add(name, longname, **kw) else: self._add(name, abbrev, longname, **kw)
[ "def", "opt", "(", "self", ",", "name", ",", "help", ",", "abbrev", "=", "None", ",", "type", "=", "None", ",", "choices", "=", "None", ",", "metavar", "=", "None", ",", "nargs", "=", "None", ")", ":", "kw", "=", "dict", "(", "help", "=", "help", ",", "type", "=", "type", ",", "choices", "=", "choices", ",", "metavar", "=", "metavar", ",", "nargs", "=", "nargs", ")", "default", "=", "self", ".", "argdict", "[", "name", "]", "if", "default", "is", "not", "NODEFAULT", ":", "kw", "[", "'default'", "]", "=", "default", "kw", "[", "'metavar'", "]", "=", "metavar", "or", "str_choices", "(", "choices", ")", "or", "str", "(", "default", ")", "abbrev", "=", "abbrev", "or", "'-'", "+", "name", "[", "0", "]", "abbrevs", "=", "set", "(", "args", "[", "0", "]", "for", "args", ",", "kw", "in", "self", ".", "all_arguments", ")", "longname", "=", "'--'", "+", "name", ".", "replace", "(", "'_'", ",", "'-'", ")", "if", "abbrev", "==", "'-h'", "or", "abbrev", "in", "abbrevs", ":", "# avoid conflicts with predefined abbreviations", "self", ".", "_add", "(", "name", ",", "longname", ",", "*", "*", "kw", ")", "else", ":", "self", ".", "_add", "(", "name", ",", "abbrev", ",", "longname", ",", "*", "*", "kw", ")" ]
Describe an option
[ "Describe", "an", "option" ]
8294553a0b8aba33fd96437a35065d03547d0040
https://github.com/gem/oq-engine/blob/8294553a0b8aba33fd96437a35065d03547d0040/openquake/baselib/sap.py#L142-L158
394
gem/oq-engine
openquake/baselib/sap.py
Script.flg
def flg(self, name, help, abbrev=None): """Describe a flag""" abbrev = abbrev or '-' + name[0] longname = '--' + name.replace('_', '-') self._add(name, abbrev, longname, action='store_true', help=help)
python
def flg(self, name, help, abbrev=None): """Describe a flag""" abbrev = abbrev or '-' + name[0] longname = '--' + name.replace('_', '-') self._add(name, abbrev, longname, action='store_true', help=help)
[ "def", "flg", "(", "self", ",", "name", ",", "help", ",", "abbrev", "=", "None", ")", ":", "abbrev", "=", "abbrev", "or", "'-'", "+", "name", "[", "0", "]", "longname", "=", "'--'", "+", "name", ".", "replace", "(", "'_'", ",", "'-'", ")", "self", ".", "_add", "(", "name", ",", "abbrev", ",", "longname", ",", "action", "=", "'store_true'", ",", "help", "=", "help", ")" ]
Describe a flag
[ "Describe", "a", "flag" ]
8294553a0b8aba33fd96437a35065d03547d0040
https://github.com/gem/oq-engine/blob/8294553a0b8aba33fd96437a35065d03547d0040/openquake/baselib/sap.py#L160-L164
395
gem/oq-engine
openquake/baselib/sap.py
Script.check_arguments
def check_arguments(self): """Make sure all arguments have a specification""" for name, default in self.argdict.items(): if name not in self.names and default is NODEFAULT: raise NameError('Missing argparse specification for %r' % name)
python
def check_arguments(self): """Make sure all arguments have a specification""" for name, default in self.argdict.items(): if name not in self.names and default is NODEFAULT: raise NameError('Missing argparse specification for %r' % name)
[ "def", "check_arguments", "(", "self", ")", ":", "for", "name", ",", "default", "in", "self", ".", "argdict", ".", "items", "(", ")", ":", "if", "name", "not", "in", "self", ".", "names", "and", "default", "is", "NODEFAULT", ":", "raise", "NameError", "(", "'Missing argparse specification for %r'", "%", "name", ")" ]
Make sure all arguments have a specification
[ "Make", "sure", "all", "arguments", "have", "a", "specification" ]
8294553a0b8aba33fd96437a35065d03547d0040
https://github.com/gem/oq-engine/blob/8294553a0b8aba33fd96437a35065d03547d0040/openquake/baselib/sap.py#L166-L170
396
gem/oq-engine
openquake/baselib/sap.py
Script.callfunc
def callfunc(self, argv=None): """ Parse the argv list and extract a dictionary of arguments which is then passed to the function underlying the script. """ if not self.checked: self.check_arguments() self.checked = True namespace = self.parentparser.parse_args(argv or sys.argv[1:]) return self.func(**vars(namespace))
python
def callfunc(self, argv=None): """ Parse the argv list and extract a dictionary of arguments which is then passed to the function underlying the script. """ if not self.checked: self.check_arguments() self.checked = True namespace = self.parentparser.parse_args(argv or sys.argv[1:]) return self.func(**vars(namespace))
[ "def", "callfunc", "(", "self", ",", "argv", "=", "None", ")", ":", "if", "not", "self", ".", "checked", ":", "self", ".", "check_arguments", "(", ")", "self", ".", "checked", "=", "True", "namespace", "=", "self", ".", "parentparser", ".", "parse_args", "(", "argv", "or", "sys", ".", "argv", "[", "1", ":", "]", ")", "return", "self", ".", "func", "(", "*", "*", "vars", "(", "namespace", ")", ")" ]
Parse the argv list and extract a dictionary of arguments which is then passed to the function underlying the script.
[ "Parse", "the", "argv", "list", "and", "extract", "a", "dictionary", "of", "arguments", "which", "is", "then", "passed", "to", "the", "function", "underlying", "the", "script", "." ]
8294553a0b8aba33fd96437a35065d03547d0040
https://github.com/gem/oq-engine/blob/8294553a0b8aba33fd96437a35065d03547d0040/openquake/baselib/sap.py#L172-L181
397
gem/oq-engine
openquake/hmtk/faults/mfd/anderson_luco_arbitrary.py
Type1RecurrenceModel.incremental_value
def incremental_value(self, slip_moment, mmax, mag_value, bbar, dbar): """ Returns the incremental rate of earthquakes with M = mag_value """ delta_m = mmax - mag_value dirac_term = np.zeros_like(mag_value) dirac_term[np.fabs(delta_m) < 1.0E-12] = 1.0 a_1 = self._get_a1(bbar, dbar, slip_moment, mmax) return a_1 * (bbar * np.exp(bbar * delta_m) * (delta_m > 0.0)) +\ a_1 * dirac_term
python
def incremental_value(self, slip_moment, mmax, mag_value, bbar, dbar): """ Returns the incremental rate of earthquakes with M = mag_value """ delta_m = mmax - mag_value dirac_term = np.zeros_like(mag_value) dirac_term[np.fabs(delta_m) < 1.0E-12] = 1.0 a_1 = self._get_a1(bbar, dbar, slip_moment, mmax) return a_1 * (bbar * np.exp(bbar * delta_m) * (delta_m > 0.0)) +\ a_1 * dirac_term
[ "def", "incremental_value", "(", "self", ",", "slip_moment", ",", "mmax", ",", "mag_value", ",", "bbar", ",", "dbar", ")", ":", "delta_m", "=", "mmax", "-", "mag_value", "dirac_term", "=", "np", ".", "zeros_like", "(", "mag_value", ")", "dirac_term", "[", "np", ".", "fabs", "(", "delta_m", ")", "<", "1.0E-12", "]", "=", "1.0", "a_1", "=", "self", ".", "_get_a1", "(", "bbar", ",", "dbar", ",", "slip_moment", ",", "mmax", ")", "return", "a_1", "*", "(", "bbar", "*", "np", ".", "exp", "(", "bbar", "*", "delta_m", ")", "*", "(", "delta_m", ">", "0.0", ")", ")", "+", "a_1", "*", "dirac_term" ]
Returns the incremental rate of earthquakes with M = mag_value
[ "Returns", "the", "incremental", "rate", "of", "earthquakes", "with", "M", "=", "mag_value" ]
8294553a0b8aba33fd96437a35065d03547d0040
https://github.com/gem/oq-engine/blob/8294553a0b8aba33fd96437a35065d03547d0040/openquake/hmtk/faults/mfd/anderson_luco_arbitrary.py#L128-L137
398
gem/oq-engine
openquake/hmtk/faults/mfd/anderson_luco_arbitrary.py
Type2RecurrenceModel._get_a2
def _get_a2(bbar, dbar, slip_moment, mmax): """ Returns the A2 value defined in II.4 of Table 2 """ return ((dbar - bbar) / bbar) * (slip_moment / _scale_moment(mmax))
python
def _get_a2(bbar, dbar, slip_moment, mmax): """ Returns the A2 value defined in II.4 of Table 2 """ return ((dbar - bbar) / bbar) * (slip_moment / _scale_moment(mmax))
[ "def", "_get_a2", "(", "bbar", ",", "dbar", ",", "slip_moment", ",", "mmax", ")", ":", "return", "(", "(", "dbar", "-", "bbar", ")", "/", "bbar", ")", "*", "(", "slip_moment", "/", "_scale_moment", "(", "mmax", ")", ")" ]
Returns the A2 value defined in II.4 of Table 2
[ "Returns", "the", "A2", "value", "defined", "in", "II", ".", "4", "of", "Table", "2" ]
8294553a0b8aba33fd96437a35065d03547d0040
https://github.com/gem/oq-engine/blob/8294553a0b8aba33fd96437a35065d03547d0040/openquake/hmtk/faults/mfd/anderson_luco_arbitrary.py#L166-L170
399
gem/oq-engine
openquake/hmtk/faults/mfd/anderson_luco_arbitrary.py
Type3RecurrenceModel.incremental_value
def incremental_value(self, slip_moment, mmax, mag_value, bbar, dbar): """ Returns the incremental rate with Mmax = Mag_value """ delta_m = mmax - mag_value a_3 = self._get_a3(bbar, dbar, slip_moment, mmax) return a_3 * bbar * (np.exp(bbar * delta_m) - 1.0) * (delta_m > 0.0)
python
def incremental_value(self, slip_moment, mmax, mag_value, bbar, dbar): """ Returns the incremental rate with Mmax = Mag_value """ delta_m = mmax - mag_value a_3 = self._get_a3(bbar, dbar, slip_moment, mmax) return a_3 * bbar * (np.exp(bbar * delta_m) - 1.0) * (delta_m > 0.0)
[ "def", "incremental_value", "(", "self", ",", "slip_moment", ",", "mmax", ",", "mag_value", ",", "bbar", ",", "dbar", ")", ":", "delta_m", "=", "mmax", "-", "mag_value", "a_3", "=", "self", ".", "_get_a3", "(", "bbar", ",", "dbar", ",", "slip_moment", ",", "mmax", ")", "return", "a_3", "*", "bbar", "*", "(", "np", ".", "exp", "(", "bbar", "*", "delta_m", ")", "-", "1.0", ")", "*", "(", "delta_m", ">", "0.0", ")" ]
Returns the incremental rate with Mmax = Mag_value
[ "Returns", "the", "incremental", "rate", "with", "Mmax", "=", "Mag_value" ]
8294553a0b8aba33fd96437a35065d03547d0040
https://github.com/gem/oq-engine/blob/8294553a0b8aba33fd96437a35065d03547d0040/openquake/hmtk/faults/mfd/anderson_luco_arbitrary.py#L215-L221