id
int32
0
252k
repo
stringlengths
7
55
path
stringlengths
4
127
func_name
stringlengths
1
88
original_string
stringlengths
75
19.8k
language
stringclasses
1 value
code
stringlengths
75
19.8k
code_tokens
sequence
docstring
stringlengths
3
17.3k
docstring_tokens
sequence
sha
stringlengths
40
40
url
stringlengths
87
242
400
gem/oq-engine
openquake/hmtk/faults/mfd/anderson_luco_arbitrary.py
AndersonLucoArbitrary.get_mmax
def get_mmax(self, mfd_conf, msr, rake, area): ''' Gets the mmax for the fault - reading directly from the config file or using the msr otherwise :param dict mfd_config: Configuration file (see setUp for paramters) :param msr: Instance of :class:`nhlib.scalerel` :param float rake: Rake of the fault (in range -180 to 180) :param float area: Area of the fault surface (km^2) ''' if mfd_conf['Maximum_Magnitude']: self.mmax = mfd_conf['Maximum_Magnitude'] else: self.mmax = msr.get_median_mag(area, rake) if ('Maximum_Magnitude_Uncertainty' in mfd_conf and mfd_conf['Maximum_Magnitude_Uncertainty']): self.mmax_sigma = mfd_conf['Maximum_Magnitude_Uncertainty'] else: self.mmax_sigma = msr.get_std_dev_mag(rake)
python
def get_mmax(self, mfd_conf, msr, rake, area): ''' Gets the mmax for the fault - reading directly from the config file or using the msr otherwise :param dict mfd_config: Configuration file (see setUp for paramters) :param msr: Instance of :class:`nhlib.scalerel` :param float rake: Rake of the fault (in range -180 to 180) :param float area: Area of the fault surface (km^2) ''' if mfd_conf['Maximum_Magnitude']: self.mmax = mfd_conf['Maximum_Magnitude'] else: self.mmax = msr.get_median_mag(area, rake) if ('Maximum_Magnitude_Uncertainty' in mfd_conf and mfd_conf['Maximum_Magnitude_Uncertainty']): self.mmax_sigma = mfd_conf['Maximum_Magnitude_Uncertainty'] else: self.mmax_sigma = msr.get_std_dev_mag(rake)
[ "def", "get_mmax", "(", "self", ",", "mfd_conf", ",", "msr", ",", "rake", ",", "area", ")", ":", "if", "mfd_conf", "[", "'Maximum_Magnitude'", "]", ":", "self", ".", "mmax", "=", "mfd_conf", "[", "'Maximum_Magnitude'", "]", "else", ":", "self", ".", "mmax", "=", "msr", ".", "get_median_mag", "(", "area", ",", "rake", ")", "if", "(", "'Maximum_Magnitude_Uncertainty'", "in", "mfd_conf", "and", "mfd_conf", "[", "'Maximum_Magnitude_Uncertainty'", "]", ")", ":", "self", ".", "mmax_sigma", "=", "mfd_conf", "[", "'Maximum_Magnitude_Uncertainty'", "]", "else", ":", "self", ".", "mmax_sigma", "=", "msr", ".", "get_std_dev_mag", "(", "rake", ")" ]
Gets the mmax for the fault - reading directly from the config file or using the msr otherwise :param dict mfd_config: Configuration file (see setUp for paramters) :param msr: Instance of :class:`nhlib.scalerel` :param float rake: Rake of the fault (in range -180 to 180) :param float area: Area of the fault surface (km^2)
[ "Gets", "the", "mmax", "for", "the", "fault", "-", "reading", "directly", "from", "the", "config", "file", "or", "using", "the", "msr", "otherwise" ]
8294553a0b8aba33fd96437a35065d03547d0040
https://github.com/gem/oq-engine/blob/8294553a0b8aba33fd96437a35065d03547d0040/openquake/hmtk/faults/mfd/anderson_luco_arbitrary.py#L290-L316
401
gem/oq-engine
openquake/hazardlib/gsim/lin_2009.py
Lin2009._get_magnitude_term
def _get_magnitude_term(self, C, mag): """ Returns the magnitude scaling term. """ lny = C['C1'] + (C['C3'] * ((8.5 - mag) ** 2.)) if mag > 6.3: return lny + (-C['H'] * C['C5']) * (mag - 6.3) else: return lny + C['C2'] * (mag - 6.3)
python
def _get_magnitude_term(self, C, mag): """ Returns the magnitude scaling term. """ lny = C['C1'] + (C['C3'] * ((8.5 - mag) ** 2.)) if mag > 6.3: return lny + (-C['H'] * C['C5']) * (mag - 6.3) else: return lny + C['C2'] * (mag - 6.3)
[ "def", "_get_magnitude_term", "(", "self", ",", "C", ",", "mag", ")", ":", "lny", "=", "C", "[", "'C1'", "]", "+", "(", "C", "[", "'C3'", "]", "*", "(", "(", "8.5", "-", "mag", ")", "**", "2.", ")", ")", "if", "mag", ">", "6.3", ":", "return", "lny", "+", "(", "-", "C", "[", "'H'", "]", "*", "C", "[", "'C5'", "]", ")", "*", "(", "mag", "-", "6.3", ")", "else", ":", "return", "lny", "+", "C", "[", "'C2'", "]", "*", "(", "mag", "-", "6.3", ")" ]
Returns the magnitude scaling term.
[ "Returns", "the", "magnitude", "scaling", "term", "." ]
8294553a0b8aba33fd96437a35065d03547d0040
https://github.com/gem/oq-engine/blob/8294553a0b8aba33fd96437a35065d03547d0040/openquake/hazardlib/gsim/lin_2009.py#L83-L91
402
gem/oq-engine
openquake/hazardlib/gsim/lin_2009.py
Lin2009._get_style_of_faulting_term
def _get_style_of_faulting_term(self, C, rake): """ Returns the style of faulting factor """ f_n, f_r = self._get_fault_type_dummy_variables(rake) return C['C6'] * f_n + C['C7'] * f_r
python
def _get_style_of_faulting_term(self, C, rake): """ Returns the style of faulting factor """ f_n, f_r = self._get_fault_type_dummy_variables(rake) return C['C6'] * f_n + C['C7'] * f_r
[ "def", "_get_style_of_faulting_term", "(", "self", ",", "C", ",", "rake", ")", ":", "f_n", ",", "f_r", "=", "self", ".", "_get_fault_type_dummy_variables", "(", "rake", ")", "return", "C", "[", "'C6'", "]", "*", "f_n", "+", "C", "[", "'C7'", "]", "*", "f_r" ]
Returns the style of faulting factor
[ "Returns", "the", "style", "of", "faulting", "factor" ]
8294553a0b8aba33fd96437a35065d03547d0040
https://github.com/gem/oq-engine/blob/8294553a0b8aba33fd96437a35065d03547d0040/openquake/hazardlib/gsim/lin_2009.py#L100-L105
403
gem/oq-engine
openquake/hazardlib/gsim/lin_2009.py
Lin2009._get_stddevs
def _get_stddevs(self, C, stddev_types, nsites): """ Compute total standard deviation, see table 4.2, page 50. """ stddevs = [] for stddev_type in stddev_types: assert stddev_type in self.DEFINED_FOR_STANDARD_DEVIATION_TYPES if stddev_type == const.StdDev.TOTAL: stddevs.append(C['sigma'] + np.zeros(nsites, dtype=float)) return stddevs
python
def _get_stddevs(self, C, stddev_types, nsites): """ Compute total standard deviation, see table 4.2, page 50. """ stddevs = [] for stddev_type in stddev_types: assert stddev_type in self.DEFINED_FOR_STANDARD_DEVIATION_TYPES if stddev_type == const.StdDev.TOTAL: stddevs.append(C['sigma'] + np.zeros(nsites, dtype=float)) return stddevs
[ "def", "_get_stddevs", "(", "self", ",", "C", ",", "stddev_types", ",", "nsites", ")", ":", "stddevs", "=", "[", "]", "for", "stddev_type", "in", "stddev_types", ":", "assert", "stddev_type", "in", "self", ".", "DEFINED_FOR_STANDARD_DEVIATION_TYPES", "if", "stddev_type", "==", "const", ".", "StdDev", ".", "TOTAL", ":", "stddevs", ".", "append", "(", "C", "[", "'sigma'", "]", "+", "np", ".", "zeros", "(", "nsites", ",", "dtype", "=", "float", ")", ")", "return", "stddevs" ]
Compute total standard deviation, see table 4.2, page 50.
[ "Compute", "total", "standard", "deviation", "see", "table", "4", ".", "2", "page", "50", "." ]
8294553a0b8aba33fd96437a35065d03547d0040
https://github.com/gem/oq-engine/blob/8294553a0b8aba33fd96437a35065d03547d0040/openquake/hazardlib/gsim/lin_2009.py#L128-L137
404
gem/oq-engine
openquake/hazardlib/calc/disagg.py
lon_lat_bins
def lon_lat_bins(bb, coord_bin_width): """ Define bin edges for disaggregation histograms. Given bins data as provided by :func:`collect_bin_data`, this function finds edges of histograms, taking into account maximum and minimum values of magnitude, distance and coordinates as well as requested sizes/numbers of bins. """ west, south, east, north = bb west = numpy.floor(west / coord_bin_width) * coord_bin_width east = numpy.ceil(east / coord_bin_width) * coord_bin_width lon_extent = get_longitudinal_extent(west, east) lon_bins, _, _ = npoints_between( west, 0, 0, east, 0, 0, numpy.round(lon_extent / coord_bin_width + 1)) lat_bins = coord_bin_width * numpy.arange( int(numpy.floor(south / coord_bin_width)), int(numpy.ceil(north / coord_bin_width) + 1)) return lon_bins, lat_bins
python
def lon_lat_bins(bb, coord_bin_width): """ Define bin edges for disaggregation histograms. Given bins data as provided by :func:`collect_bin_data`, this function finds edges of histograms, taking into account maximum and minimum values of magnitude, distance and coordinates as well as requested sizes/numbers of bins. """ west, south, east, north = bb west = numpy.floor(west / coord_bin_width) * coord_bin_width east = numpy.ceil(east / coord_bin_width) * coord_bin_width lon_extent = get_longitudinal_extent(west, east) lon_bins, _, _ = npoints_between( west, 0, 0, east, 0, 0, numpy.round(lon_extent / coord_bin_width + 1)) lat_bins = coord_bin_width * numpy.arange( int(numpy.floor(south / coord_bin_width)), int(numpy.ceil(north / coord_bin_width) + 1)) return lon_bins, lat_bins
[ "def", "lon_lat_bins", "(", "bb", ",", "coord_bin_width", ")", ":", "west", ",", "south", ",", "east", ",", "north", "=", "bb", "west", "=", "numpy", ".", "floor", "(", "west", "/", "coord_bin_width", ")", "*", "coord_bin_width", "east", "=", "numpy", ".", "ceil", "(", "east", "/", "coord_bin_width", ")", "*", "coord_bin_width", "lon_extent", "=", "get_longitudinal_extent", "(", "west", ",", "east", ")", "lon_bins", ",", "_", ",", "_", "=", "npoints_between", "(", "west", ",", "0", ",", "0", ",", "east", ",", "0", ",", "0", ",", "numpy", ".", "round", "(", "lon_extent", "/", "coord_bin_width", "+", "1", ")", ")", "lat_bins", "=", "coord_bin_width", "*", "numpy", ".", "arange", "(", "int", "(", "numpy", ".", "floor", "(", "south", "/", "coord_bin_width", ")", ")", ",", "int", "(", "numpy", ".", "ceil", "(", "north", "/", "coord_bin_width", ")", "+", "1", ")", ")", "return", "lon_bins", ",", "lat_bins" ]
Define bin edges for disaggregation histograms. Given bins data as provided by :func:`collect_bin_data`, this function finds edges of histograms, taking into account maximum and minimum values of magnitude, distance and coordinates as well as requested sizes/numbers of bins.
[ "Define", "bin", "edges", "for", "disaggregation", "histograms", "." ]
8294553a0b8aba33fd96437a35065d03547d0040
https://github.com/gem/oq-engine/blob/8294553a0b8aba33fd96437a35065d03547d0040/openquake/hazardlib/calc/disagg.py#L92-L111
405
gem/oq-engine
openquake/hazardlib/calc/disagg.py
_digitize_lons
def _digitize_lons(lons, lon_bins): """ Return indices of the bins to which each value in lons belongs. Takes into account the case in which longitude values cross the international date line. :parameter lons: An instance of `numpy.ndarray`. :parameter lons_bins: An instance of `numpy.ndarray`. """ if cross_idl(lon_bins[0], lon_bins[-1]): idx = numpy.zeros_like(lons, dtype=numpy.int) for i_lon in range(len(lon_bins) - 1): extents = get_longitudinal_extent(lons, lon_bins[i_lon + 1]) lon_idx = extents > 0 if i_lon != 0: extents = get_longitudinal_extent(lon_bins[i_lon], lons) lon_idx &= extents >= 0 idx[lon_idx] = i_lon return numpy.array(idx) else: return numpy.digitize(lons, lon_bins) - 1
python
def _digitize_lons(lons, lon_bins): """ Return indices of the bins to which each value in lons belongs. Takes into account the case in which longitude values cross the international date line. :parameter lons: An instance of `numpy.ndarray`. :parameter lons_bins: An instance of `numpy.ndarray`. """ if cross_idl(lon_bins[0], lon_bins[-1]): idx = numpy.zeros_like(lons, dtype=numpy.int) for i_lon in range(len(lon_bins) - 1): extents = get_longitudinal_extent(lons, lon_bins[i_lon + 1]) lon_idx = extents > 0 if i_lon != 0: extents = get_longitudinal_extent(lon_bins[i_lon], lons) lon_idx &= extents >= 0 idx[lon_idx] = i_lon return numpy.array(idx) else: return numpy.digitize(lons, lon_bins) - 1
[ "def", "_digitize_lons", "(", "lons", ",", "lon_bins", ")", ":", "if", "cross_idl", "(", "lon_bins", "[", "0", "]", ",", "lon_bins", "[", "-", "1", "]", ")", ":", "idx", "=", "numpy", ".", "zeros_like", "(", "lons", ",", "dtype", "=", "numpy", ".", "int", ")", "for", "i_lon", "in", "range", "(", "len", "(", "lon_bins", ")", "-", "1", ")", ":", "extents", "=", "get_longitudinal_extent", "(", "lons", ",", "lon_bins", "[", "i_lon", "+", "1", "]", ")", "lon_idx", "=", "extents", ">", "0", "if", "i_lon", "!=", "0", ":", "extents", "=", "get_longitudinal_extent", "(", "lon_bins", "[", "i_lon", "]", ",", "lons", ")", "lon_idx", "&=", "extents", ">=", "0", "idx", "[", "lon_idx", "]", "=", "i_lon", "return", "numpy", ".", "array", "(", "idx", ")", "else", ":", "return", "numpy", ".", "digitize", "(", "lons", ",", "lon_bins", ")", "-", "1" ]
Return indices of the bins to which each value in lons belongs. Takes into account the case in which longitude values cross the international date line. :parameter lons: An instance of `numpy.ndarray`. :parameter lons_bins: An instance of `numpy.ndarray`.
[ "Return", "indices", "of", "the", "bins", "to", "which", "each", "value", "in", "lons", "belongs", ".", "Takes", "into", "account", "the", "case", "in", "which", "longitude", "values", "cross", "the", "international", "date", "line", "." ]
8294553a0b8aba33fd96437a35065d03547d0040
https://github.com/gem/oq-engine/blob/8294553a0b8aba33fd96437a35065d03547d0040/openquake/hazardlib/calc/disagg.py#L188-L210
406
gem/oq-engine
openquake/hazardlib/calc/disagg.py
mag_pmf
def mag_pmf(matrix): """ Fold full disaggregation matrix to magnitude PMF. :returns: 1d array, a histogram representing magnitude PMF. """ nmags, ndists, nlons, nlats, neps = matrix.shape mag_pmf = numpy.zeros(nmags) for i in range(nmags): mag_pmf[i] = numpy.prod( [1. - matrix[i, j, k, l, m] for j in range(ndists) for k in range(nlons) for l in range(nlats) for m in range(neps)]) return 1. - mag_pmf
python
def mag_pmf(matrix): """ Fold full disaggregation matrix to magnitude PMF. :returns: 1d array, a histogram representing magnitude PMF. """ nmags, ndists, nlons, nlats, neps = matrix.shape mag_pmf = numpy.zeros(nmags) for i in range(nmags): mag_pmf[i] = numpy.prod( [1. - matrix[i, j, k, l, m] for j in range(ndists) for k in range(nlons) for l in range(nlats) for m in range(neps)]) return 1. - mag_pmf
[ "def", "mag_pmf", "(", "matrix", ")", ":", "nmags", ",", "ndists", ",", "nlons", ",", "nlats", ",", "neps", "=", "matrix", ".", "shape", "mag_pmf", "=", "numpy", ".", "zeros", "(", "nmags", ")", "for", "i", "in", "range", "(", "nmags", ")", ":", "mag_pmf", "[", "i", "]", "=", "numpy", ".", "prod", "(", "[", "1.", "-", "matrix", "[", "i", ",", "j", ",", "k", ",", "l", ",", "m", "]", "for", "j", "in", "range", "(", "ndists", ")", "for", "k", "in", "range", "(", "nlons", ")", "for", "l", "in", "range", "(", "nlats", ")", "for", "m", "in", "range", "(", "neps", ")", "]", ")", "return", "1.", "-", "mag_pmf" ]
Fold full disaggregation matrix to magnitude PMF. :returns: 1d array, a histogram representing magnitude PMF.
[ "Fold", "full", "disaggregation", "matrix", "to", "magnitude", "PMF", "." ]
8294553a0b8aba33fd96437a35065d03547d0040
https://github.com/gem/oq-engine/blob/8294553a0b8aba33fd96437a35065d03547d0040/openquake/hazardlib/calc/disagg.py#L338-L354
407
gem/oq-engine
openquake/hazardlib/calc/disagg.py
trt_pmf
def trt_pmf(matrices): """ Fold full disaggregation matrix to tectonic region type PMF. :param matrices: a matrix with T submatrices :returns: an array of T probabilities one per each tectonic region type """ ntrts, nmags, ndists, nlons, nlats, neps = matrices.shape pmf = numpy.zeros(ntrts) for t in range(ntrts): pmf[t] = 1. - numpy.prod( [1. - matrices[t, i, j, k, l, m] for i in range(nmags) for j in range(ndists) for k in range(nlons) for l in range(nlats) for m in range(neps)]) return pmf
python
def trt_pmf(matrices): """ Fold full disaggregation matrix to tectonic region type PMF. :param matrices: a matrix with T submatrices :returns: an array of T probabilities one per each tectonic region type """ ntrts, nmags, ndists, nlons, nlats, neps = matrices.shape pmf = numpy.zeros(ntrts) for t in range(ntrts): pmf[t] = 1. - numpy.prod( [1. - matrices[t, i, j, k, l, m] for i in range(nmags) for j in range(ndists) for k in range(nlons) for l in range(nlats) for m in range(neps)]) return pmf
[ "def", "trt_pmf", "(", "matrices", ")", ":", "ntrts", ",", "nmags", ",", "ndists", ",", "nlons", ",", "nlats", ",", "neps", "=", "matrices", ".", "shape", "pmf", "=", "numpy", ".", "zeros", "(", "ntrts", ")", "for", "t", "in", "range", "(", "ntrts", ")", ":", "pmf", "[", "t", "]", "=", "1.", "-", "numpy", ".", "prod", "(", "[", "1.", "-", "matrices", "[", "t", ",", "i", ",", "j", ",", "k", ",", "l", ",", "m", "]", "for", "i", "in", "range", "(", "nmags", ")", "for", "j", "in", "range", "(", "ndists", ")", "for", "k", "in", "range", "(", "nlons", ")", "for", "l", "in", "range", "(", "nlats", ")", "for", "m", "in", "range", "(", "neps", ")", "]", ")", "return", "pmf" ]
Fold full disaggregation matrix to tectonic region type PMF. :param matrices: a matrix with T submatrices :returns: an array of T probabilities one per each tectonic region type
[ "Fold", "full", "disaggregation", "matrix", "to", "tectonic", "region", "type", "PMF", "." ]
8294553a0b8aba33fd96437a35065d03547d0040
https://github.com/gem/oq-engine/blob/8294553a0b8aba33fd96437a35065d03547d0040/openquake/hazardlib/calc/disagg.py#L376-L395
408
gem/oq-engine
openquake/commands/db.py
db
def db(cmd, args=()): """ Run a database command """ if cmd not in commands: okcmds = '\n'.join( '%s %s' % (name, repr(' '.join(args)) if args else '') for name, args in sorted(commands.items())) print('Invalid command "%s": choose one from\n%s' % (cmd, okcmds)) elif len(args) != len(commands[cmd]): print('Wrong number of arguments, expected %s, got %s' % ( commands[cmd], args)) else: dbserver.ensure_on() res = logs.dbcmd(cmd, *convert(args)) if hasattr(res, '_fields') and res.__class__.__name__ != 'Row': print(rst_table(res)) else: print(res)
python
def db(cmd, args=()): """ Run a database command """ if cmd not in commands: okcmds = '\n'.join( '%s %s' % (name, repr(' '.join(args)) if args else '') for name, args in sorted(commands.items())) print('Invalid command "%s": choose one from\n%s' % (cmd, okcmds)) elif len(args) != len(commands[cmd]): print('Wrong number of arguments, expected %s, got %s' % ( commands[cmd], args)) else: dbserver.ensure_on() res = logs.dbcmd(cmd, *convert(args)) if hasattr(res, '_fields') and res.__class__.__name__ != 'Row': print(rst_table(res)) else: print(res)
[ "def", "db", "(", "cmd", ",", "args", "=", "(", ")", ")", ":", "if", "cmd", "not", "in", "commands", ":", "okcmds", "=", "'\\n'", ".", "join", "(", "'%s %s'", "%", "(", "name", ",", "repr", "(", "' '", ".", "join", "(", "args", ")", ")", "if", "args", "else", "''", ")", "for", "name", ",", "args", "in", "sorted", "(", "commands", ".", "items", "(", ")", ")", ")", "print", "(", "'Invalid command \"%s\": choose one from\\n%s'", "%", "(", "cmd", ",", "okcmds", ")", ")", "elif", "len", "(", "args", ")", "!=", "len", "(", "commands", "[", "cmd", "]", ")", ":", "print", "(", "'Wrong number of arguments, expected %s, got %s'", "%", "(", "commands", "[", "cmd", "]", ",", "args", ")", ")", "else", ":", "dbserver", ".", "ensure_on", "(", ")", "res", "=", "logs", ".", "dbcmd", "(", "cmd", ",", "*", "convert", "(", "args", ")", ")", "if", "hasattr", "(", "res", ",", "'_fields'", ")", "and", "res", ".", "__class__", ".", "__name__", "!=", "'Row'", ":", "print", "(", "rst_table", "(", "res", ")", ")", "else", ":", "print", "(", "res", ")" ]
Run a database command
[ "Run", "a", "database", "command" ]
8294553a0b8aba33fd96437a35065d03547d0040
https://github.com/gem/oq-engine/blob/8294553a0b8aba33fd96437a35065d03547d0040/openquake/commands/db.py#L47-L65
409
gem/oq-engine
openquake/hazardlib/stats.py
mean_curve
def mean_curve(values, weights=None): """ Compute the mean by using numpy.average on the first axis. """ if weights is None: weights = [1. / len(values)] * len(values) if not isinstance(values, numpy.ndarray): values = numpy.array(values) return numpy.average(values, axis=0, weights=weights)
python
def mean_curve(values, weights=None): """ Compute the mean by using numpy.average on the first axis. """ if weights is None: weights = [1. / len(values)] * len(values) if not isinstance(values, numpy.ndarray): values = numpy.array(values) return numpy.average(values, axis=0, weights=weights)
[ "def", "mean_curve", "(", "values", ",", "weights", "=", "None", ")", ":", "if", "weights", "is", "None", ":", "weights", "=", "[", "1.", "/", "len", "(", "values", ")", "]", "*", "len", "(", "values", ")", "if", "not", "isinstance", "(", "values", ",", "numpy", ".", "ndarray", ")", ":", "values", "=", "numpy", ".", "array", "(", "values", ")", "return", "numpy", ".", "average", "(", "values", ",", "axis", "=", "0", ",", "weights", "=", "weights", ")" ]
Compute the mean by using numpy.average on the first axis.
[ "Compute", "the", "mean", "by", "using", "numpy", ".", "average", "on", "the", "first", "axis", "." ]
8294553a0b8aba33fd96437a35065d03547d0040
https://github.com/gem/oq-engine/blob/8294553a0b8aba33fd96437a35065d03547d0040/openquake/hazardlib/stats.py#L25-L33
410
gem/oq-engine
openquake/hazardlib/stats.py
quantile_curve
def quantile_curve(quantile, curves, weights=None): """ Compute the weighted quantile aggregate of a set of curves. :param quantile: Quantile value to calculate. Should be in the range [0.0, 1.0]. :param curves: Array of R PoEs (possibly arrays) :param weights: Array-like of weights, 1 for each input curve, or None :returns: A numpy array representing the quantile aggregate """ if not isinstance(curves, numpy.ndarray): curves = numpy.array(curves) R = len(curves) if weights is None: weights = numpy.ones(R) / R else: weights = numpy.array(weights) assert len(weights) == R, (len(weights), R) result = numpy.zeros(curves.shape[1:]) for idx, _ in numpy.ndenumerate(result): data = numpy.array([a[idx] for a in curves]) sorted_idxs = numpy.argsort(data) sorted_weights = weights[sorted_idxs] sorted_data = data[sorted_idxs] cum_weights = numpy.cumsum(sorted_weights) # get the quantile from the interpolated CDF result[idx] = numpy.interp(quantile, cum_weights, sorted_data) return result
python
def quantile_curve(quantile, curves, weights=None): """ Compute the weighted quantile aggregate of a set of curves. :param quantile: Quantile value to calculate. Should be in the range [0.0, 1.0]. :param curves: Array of R PoEs (possibly arrays) :param weights: Array-like of weights, 1 for each input curve, or None :returns: A numpy array representing the quantile aggregate """ if not isinstance(curves, numpy.ndarray): curves = numpy.array(curves) R = len(curves) if weights is None: weights = numpy.ones(R) / R else: weights = numpy.array(weights) assert len(weights) == R, (len(weights), R) result = numpy.zeros(curves.shape[1:]) for idx, _ in numpy.ndenumerate(result): data = numpy.array([a[idx] for a in curves]) sorted_idxs = numpy.argsort(data) sorted_weights = weights[sorted_idxs] sorted_data = data[sorted_idxs] cum_weights = numpy.cumsum(sorted_weights) # get the quantile from the interpolated CDF result[idx] = numpy.interp(quantile, cum_weights, sorted_data) return result
[ "def", "quantile_curve", "(", "quantile", ",", "curves", ",", "weights", "=", "None", ")", ":", "if", "not", "isinstance", "(", "curves", ",", "numpy", ".", "ndarray", ")", ":", "curves", "=", "numpy", ".", "array", "(", "curves", ")", "R", "=", "len", "(", "curves", ")", "if", "weights", "is", "None", ":", "weights", "=", "numpy", ".", "ones", "(", "R", ")", "/", "R", "else", ":", "weights", "=", "numpy", ".", "array", "(", "weights", ")", "assert", "len", "(", "weights", ")", "==", "R", ",", "(", "len", "(", "weights", ")", ",", "R", ")", "result", "=", "numpy", ".", "zeros", "(", "curves", ".", "shape", "[", "1", ":", "]", ")", "for", "idx", ",", "_", "in", "numpy", ".", "ndenumerate", "(", "result", ")", ":", "data", "=", "numpy", ".", "array", "(", "[", "a", "[", "idx", "]", "for", "a", "in", "curves", "]", ")", "sorted_idxs", "=", "numpy", ".", "argsort", "(", "data", ")", "sorted_weights", "=", "weights", "[", "sorted_idxs", "]", "sorted_data", "=", "data", "[", "sorted_idxs", "]", "cum_weights", "=", "numpy", ".", "cumsum", "(", "sorted_weights", ")", "# get the quantile from the interpolated CDF", "result", "[", "idx", "]", "=", "numpy", ".", "interp", "(", "quantile", ",", "cum_weights", ",", "sorted_data", ")", "return", "result" ]
Compute the weighted quantile aggregate of a set of curves. :param quantile: Quantile value to calculate. Should be in the range [0.0, 1.0]. :param curves: Array of R PoEs (possibly arrays) :param weights: Array-like of weights, 1 for each input curve, or None :returns: A numpy array representing the quantile aggregate
[ "Compute", "the", "weighted", "quantile", "aggregate", "of", "a", "set", "of", "curves", "." ]
8294553a0b8aba33fd96437a35065d03547d0040
https://github.com/gem/oq-engine/blob/8294553a0b8aba33fd96437a35065d03547d0040/openquake/hazardlib/stats.py#L44-L74
411
gem/oq-engine
openquake/hazardlib/gsim/toro_2002.py
ToroEtAl2002._compute_mean
def _compute_mean(self, C, mag, rjb): """ Compute mean value according to equation 3, page 46. """ mean = (C['c1'] + self._compute_term1(C, mag) + self._compute_term2(C, mag, rjb)) return mean
python
def _compute_mean(self, C, mag, rjb): """ Compute mean value according to equation 3, page 46. """ mean = (C['c1'] + self._compute_term1(C, mag) + self._compute_term2(C, mag, rjb)) return mean
[ "def", "_compute_mean", "(", "self", ",", "C", ",", "mag", ",", "rjb", ")", ":", "mean", "=", "(", "C", "[", "'c1'", "]", "+", "self", ".", "_compute_term1", "(", "C", ",", "mag", ")", "+", "self", ".", "_compute_term2", "(", "C", ",", "mag", ",", "rjb", ")", ")", "return", "mean" ]
Compute mean value according to equation 3, page 46.
[ "Compute", "mean", "value", "according", "to", "equation", "3", "page", "46", "." ]
8294553a0b8aba33fd96437a35065d03547d0040
https://github.com/gem/oq-engine/blob/8294553a0b8aba33fd96437a35065d03547d0040/openquake/hazardlib/gsim/toro_2002.py#L121-L128
412
gem/oq-engine
openquake/hazardlib/gsim/toro_2002.py
ToroEtAl2002._compute_stddevs
def _compute_stddevs(self, C, mag, rjb, imt, stddev_types): """ Compute total standard deviation, equations 5 and 6, page 48. """ # aleatory uncertainty sigma_ale_m = np.interp(mag, [5.0, 5.5, 8.0], [C['m50'], C['m55'], C['m80']]) sigma_ale_rjb = np.interp(rjb, [5.0, 20.0], [C['r5'], C['r20']]) sigma_ale = np.sqrt(sigma_ale_m ** 2 + sigma_ale_rjb ** 2) # epistemic uncertainty if imt.period < 1: sigma_epi = 0.36 + 0.07 * (mag - 6) else: sigma_epi = 0.34 + 0.06 * (mag - 6) sigma_total = np.sqrt(sigma_ale ** 2 + sigma_epi ** 2) stddevs = [] for _ in stddev_types: stddevs.append(sigma_total) return stddevs
python
def _compute_stddevs(self, C, mag, rjb, imt, stddev_types): """ Compute total standard deviation, equations 5 and 6, page 48. """ # aleatory uncertainty sigma_ale_m = np.interp(mag, [5.0, 5.5, 8.0], [C['m50'], C['m55'], C['m80']]) sigma_ale_rjb = np.interp(rjb, [5.0, 20.0], [C['r5'], C['r20']]) sigma_ale = np.sqrt(sigma_ale_m ** 2 + sigma_ale_rjb ** 2) # epistemic uncertainty if imt.period < 1: sigma_epi = 0.36 + 0.07 * (mag - 6) else: sigma_epi = 0.34 + 0.06 * (mag - 6) sigma_total = np.sqrt(sigma_ale ** 2 + sigma_epi ** 2) stddevs = [] for _ in stddev_types: stddevs.append(sigma_total) return stddevs
[ "def", "_compute_stddevs", "(", "self", ",", "C", ",", "mag", ",", "rjb", ",", "imt", ",", "stddev_types", ")", ":", "# aleatory uncertainty", "sigma_ale_m", "=", "np", ".", "interp", "(", "mag", ",", "[", "5.0", ",", "5.5", ",", "8.0", "]", ",", "[", "C", "[", "'m50'", "]", ",", "C", "[", "'m55'", "]", ",", "C", "[", "'m80'", "]", "]", ")", "sigma_ale_rjb", "=", "np", ".", "interp", "(", "rjb", ",", "[", "5.0", ",", "20.0", "]", ",", "[", "C", "[", "'r5'", "]", ",", "C", "[", "'r20'", "]", "]", ")", "sigma_ale", "=", "np", ".", "sqrt", "(", "sigma_ale_m", "**", "2", "+", "sigma_ale_rjb", "**", "2", ")", "# epistemic uncertainty", "if", "imt", ".", "period", "<", "1", ":", "sigma_epi", "=", "0.36", "+", "0.07", "*", "(", "mag", "-", "6", ")", "else", ":", "sigma_epi", "=", "0.34", "+", "0.06", "*", "(", "mag", "-", "6", ")", "sigma_total", "=", "np", ".", "sqrt", "(", "sigma_ale", "**", "2", "+", "sigma_epi", "**", "2", ")", "stddevs", "=", "[", "]", "for", "_", "in", "stddev_types", ":", "stddevs", ".", "append", "(", "sigma_total", ")", "return", "stddevs" ]
Compute total standard deviation, equations 5 and 6, page 48.
[ "Compute", "total", "standard", "deviation", "equations", "5", "and", "6", "page", "48", "." ]
8294553a0b8aba33fd96437a35065d03547d0040
https://github.com/gem/oq-engine/blob/8294553a0b8aba33fd96437a35065d03547d0040/openquake/hazardlib/gsim/toro_2002.py#L130-L152
413
gem/oq-engine
openquake/hazardlib/gsim/mgmpe/nrcan15_site_term.py
NRCan15SiteTerm.BA08_AB06
def BA08_AB06(self, vs30, imt, pgar): """ Computes amplification factor similarly to what is done in the 2015 version of the Canada building code. An initial version of this code was kindly provided by Michal Kolaj - Geological Survey of Canada :param vs30: Can be either a scalar or a :class:`~numpy.ndarray` instance :param imt: The intensity measure type :param pgar: The value of hazard on rock (vs30=760). Can be either a scalar or a :class:`~numpy.ndarray` instance. Unit of measure is fractions of gravity acceleration. :return: A scalar or a :class:`~numpy.ndarray` instance with the amplification factor. """ fa = np.ones_like(vs30) if np.isscalar(vs30): vs30 = np.array([vs30]) if np.isscalar(pgar): pgar = np.array([pgar]) # # Fixing vs30 for hard rock to 1999 m/s. Beyond this threshold the # motion will not be deamplified further vs = copy.copy(vs30) vs[vs >= 2000] = 1999. # # Computing motion on rock idx = np.where(vs30 > 760) if np.size(idx) > 0: """ # This is the original implementation - Since this code is # experimental we keep it for possible further developments # For values of Vs30 greater than 760 a linear interpolation is # used between the gm factor at 2000 m/s and 760 m/s C2 = self.COEFFS_AB06r[imt] fa[idx] = 10**(np.interp(np.log10(vs[idx]), np.log10([760.0, 2000.0]), np.log10([1.0, C2['c']]))) """ C = self.COEFFS_BA08[imt] nl = BooreAtkinson2008()._get_site_amplification_non_linear( vs[idx], pgar[idx], C) lin = BooreAtkinson2008()._get_site_amplification_linear( vs[idx], C) tmp = np.exp(nl+lin) fa[idx] = tmp # # For values of Vs30 lower than 760 the amplification is computed # using the site term of Boore and Atkinson (2008) idx = np.where(vs < 760.) if np.size(idx) > 0: C = self.COEFFS_BA08[imt] nl = BooreAtkinson2008()._get_site_amplification_non_linear( vs[idx], pgar[idx], C) lin = BooreAtkinson2008()._get_site_amplification_linear( vs[idx], C) fa[idx] = np.exp(nl+lin) return fa
python
def BA08_AB06(self, vs30, imt, pgar): """ Computes amplification factor similarly to what is done in the 2015 version of the Canada building code. An initial version of this code was kindly provided by Michal Kolaj - Geological Survey of Canada :param vs30: Can be either a scalar or a :class:`~numpy.ndarray` instance :param imt: The intensity measure type :param pgar: The value of hazard on rock (vs30=760). Can be either a scalar or a :class:`~numpy.ndarray` instance. Unit of measure is fractions of gravity acceleration. :return: A scalar or a :class:`~numpy.ndarray` instance with the amplification factor. """ fa = np.ones_like(vs30) if np.isscalar(vs30): vs30 = np.array([vs30]) if np.isscalar(pgar): pgar = np.array([pgar]) # # Fixing vs30 for hard rock to 1999 m/s. Beyond this threshold the # motion will not be deamplified further vs = copy.copy(vs30) vs[vs >= 2000] = 1999. # # Computing motion on rock idx = np.where(vs30 > 760) if np.size(idx) > 0: """ # This is the original implementation - Since this code is # experimental we keep it for possible further developments # For values of Vs30 greater than 760 a linear interpolation is # used between the gm factor at 2000 m/s and 760 m/s C2 = self.COEFFS_AB06r[imt] fa[idx] = 10**(np.interp(np.log10(vs[idx]), np.log10([760.0, 2000.0]), np.log10([1.0, C2['c']]))) """ C = self.COEFFS_BA08[imt] nl = BooreAtkinson2008()._get_site_amplification_non_linear( vs[idx], pgar[idx], C) lin = BooreAtkinson2008()._get_site_amplification_linear( vs[idx], C) tmp = np.exp(nl+lin) fa[idx] = tmp # # For values of Vs30 lower than 760 the amplification is computed # using the site term of Boore and Atkinson (2008) idx = np.where(vs < 760.) if np.size(idx) > 0: C = self.COEFFS_BA08[imt] nl = BooreAtkinson2008()._get_site_amplification_non_linear( vs[idx], pgar[idx], C) lin = BooreAtkinson2008()._get_site_amplification_linear( vs[idx], C) fa[idx] = np.exp(nl+lin) return fa
[ "def", "BA08_AB06", "(", "self", ",", "vs30", ",", "imt", ",", "pgar", ")", ":", "fa", "=", "np", ".", "ones_like", "(", "vs30", ")", "if", "np", ".", "isscalar", "(", "vs30", ")", ":", "vs30", "=", "np", ".", "array", "(", "[", "vs30", "]", ")", "if", "np", ".", "isscalar", "(", "pgar", ")", ":", "pgar", "=", "np", ".", "array", "(", "[", "pgar", "]", ")", "#", "# Fixing vs30 for hard rock to 1999 m/s. Beyond this threshold the", "# motion will not be deamplified further", "vs", "=", "copy", ".", "copy", "(", "vs30", ")", "vs", "[", "vs", ">=", "2000", "]", "=", "1999.", "#", "# Computing motion on rock", "idx", "=", "np", ".", "where", "(", "vs30", ">", "760", ")", "if", "np", ".", "size", "(", "idx", ")", ">", "0", ":", "\"\"\"\n # This is the original implementation - Since this code is\n # experimental we keep it for possible further developments\n # For values of Vs30 greater than 760 a linear interpolation is\n # used between the gm factor at 2000 m/s and 760 m/s\n C2 = self.COEFFS_AB06r[imt]\n fa[idx] = 10**(np.interp(np.log10(vs[idx]),\n np.log10([760.0, 2000.0]),\n np.log10([1.0, C2['c']])))\n \"\"\"", "C", "=", "self", ".", "COEFFS_BA08", "[", "imt", "]", "nl", "=", "BooreAtkinson2008", "(", ")", ".", "_get_site_amplification_non_linear", "(", "vs", "[", "idx", "]", ",", "pgar", "[", "idx", "]", ",", "C", ")", "lin", "=", "BooreAtkinson2008", "(", ")", ".", "_get_site_amplification_linear", "(", "vs", "[", "idx", "]", ",", "C", ")", "tmp", "=", "np", ".", "exp", "(", "nl", "+", "lin", ")", "fa", "[", "idx", "]", "=", "tmp", "#", "# For values of Vs30 lower than 760 the amplification is computed", "# using the site term of Boore and Atkinson (2008)", "idx", "=", "np", ".", "where", "(", "vs", "<", "760.", ")", "if", "np", ".", "size", "(", "idx", ")", ">", "0", ":", "C", "=", "self", ".", "COEFFS_BA08", "[", "imt", "]", "nl", "=", "BooreAtkinson2008", "(", ")", ".", "_get_site_amplification_non_linear", "(", "vs", "[", "idx", "]", ",", "pgar", "[", "idx", "]", ",", "C", ")", "lin", "=", "BooreAtkinson2008", "(", ")", ".", "_get_site_amplification_linear", "(", "vs", "[", "idx", "]", ",", "C", ")", "fa", "[", "idx", "]", "=", "np", ".", "exp", "(", "nl", "+", "lin", ")", "return", "fa" ]
Computes amplification factor similarly to what is done in the 2015 version of the Canada building code. An initial version of this code was kindly provided by Michal Kolaj - Geological Survey of Canada :param vs30: Can be either a scalar or a :class:`~numpy.ndarray` instance :param imt: The intensity measure type :param pgar: The value of hazard on rock (vs30=760). Can be either a scalar or a :class:`~numpy.ndarray` instance. Unit of measure is fractions of gravity acceleration. :return: A scalar or a :class:`~numpy.ndarray` instance with the amplification factor.
[ "Computes", "amplification", "factor", "similarly", "to", "what", "is", "done", "in", "the", "2015", "version", "of", "the", "Canada", "building", "code", ".", "An", "initial", "version", "of", "this", "code", "was", "kindly", "provided", "by", "Michal", "Kolaj", "-", "Geological", "Survey", "of", "Canada" ]
8294553a0b8aba33fd96437a35065d03547d0040
https://github.com/gem/oq-engine/blob/8294553a0b8aba33fd96437a35065d03547d0040/openquake/hazardlib/gsim/mgmpe/nrcan15_site_term.py#L89-L149
414
gem/oq-engine
openquake/calculators/scenario_damage.py
scenario_damage
def scenario_damage(riskinputs, riskmodel, param, monitor): """ Core function for a damage computation. :param riskinputs: :class:`openquake.risklib.riskinput.RiskInput` objects :param riskmodel: a :class:`openquake.risklib.riskinput.CompositeRiskModel` instance :param monitor: :class:`openquake.baselib.performance.Monitor` instance :param param: dictionary of extra parameters :returns: a dictionary {'d_asset': [(l, r, a, mean-stddev), ...], 'd_event': damage array of shape R, L, E, D, 'c_asset': [(l, r, a, mean-stddev), ...], 'c_event': damage array of shape R, L, E} `d_asset` and `d_tag` are related to the damage distributions whereas `c_asset` and `c_tag` are the consequence distributions. If there is no consequence model `c_asset` is an empty list and `c_tag` is a zero-valued array. """ L = len(riskmodel.loss_types) D = len(riskmodel.damage_states) E = param['number_of_ground_motion_fields'] R = riskinputs[0].hazard_getter.num_rlzs result = dict(d_asset=[], d_event=numpy.zeros((E, R, L, D), F64), c_asset=[], c_event=numpy.zeros((E, R, L), F64)) for ri in riskinputs: for out in riskmodel.gen_outputs(ri, monitor): r = out.rlzi for l, loss_type in enumerate(riskmodel.loss_types): for asset, fractions in zip(ri.assets, out[loss_type]): dmg = fractions[:, :D] * asset['number'] # shape (E, D) result['d_event'][:, r, l] += dmg result['d_asset'].append( (l, r, asset['ordinal'], scientific.mean_std(dmg))) if riskmodel.consequences: csq = fractions[:, D] * asset['value-' + loss_type] result['c_asset'].append( (l, r, asset['ordinal'], scientific.mean_std(csq))) result['c_event'][:, r, l] += csq return result
python
def scenario_damage(riskinputs, riskmodel, param, monitor): """ Core function for a damage computation. :param riskinputs: :class:`openquake.risklib.riskinput.RiskInput` objects :param riskmodel: a :class:`openquake.risklib.riskinput.CompositeRiskModel` instance :param monitor: :class:`openquake.baselib.performance.Monitor` instance :param param: dictionary of extra parameters :returns: a dictionary {'d_asset': [(l, r, a, mean-stddev), ...], 'd_event': damage array of shape R, L, E, D, 'c_asset': [(l, r, a, mean-stddev), ...], 'c_event': damage array of shape R, L, E} `d_asset` and `d_tag` are related to the damage distributions whereas `c_asset` and `c_tag` are the consequence distributions. If there is no consequence model `c_asset` is an empty list and `c_tag` is a zero-valued array. """ L = len(riskmodel.loss_types) D = len(riskmodel.damage_states) E = param['number_of_ground_motion_fields'] R = riskinputs[0].hazard_getter.num_rlzs result = dict(d_asset=[], d_event=numpy.zeros((E, R, L, D), F64), c_asset=[], c_event=numpy.zeros((E, R, L), F64)) for ri in riskinputs: for out in riskmodel.gen_outputs(ri, monitor): r = out.rlzi for l, loss_type in enumerate(riskmodel.loss_types): for asset, fractions in zip(ri.assets, out[loss_type]): dmg = fractions[:, :D] * asset['number'] # shape (E, D) result['d_event'][:, r, l] += dmg result['d_asset'].append( (l, r, asset['ordinal'], scientific.mean_std(dmg))) if riskmodel.consequences: csq = fractions[:, D] * asset['value-' + loss_type] result['c_asset'].append( (l, r, asset['ordinal'], scientific.mean_std(csq))) result['c_event'][:, r, l] += csq return result
[ "def", "scenario_damage", "(", "riskinputs", ",", "riskmodel", ",", "param", ",", "monitor", ")", ":", "L", "=", "len", "(", "riskmodel", ".", "loss_types", ")", "D", "=", "len", "(", "riskmodel", ".", "damage_states", ")", "E", "=", "param", "[", "'number_of_ground_motion_fields'", "]", "R", "=", "riskinputs", "[", "0", "]", ".", "hazard_getter", ".", "num_rlzs", "result", "=", "dict", "(", "d_asset", "=", "[", "]", ",", "d_event", "=", "numpy", ".", "zeros", "(", "(", "E", ",", "R", ",", "L", ",", "D", ")", ",", "F64", ")", ",", "c_asset", "=", "[", "]", ",", "c_event", "=", "numpy", ".", "zeros", "(", "(", "E", ",", "R", ",", "L", ")", ",", "F64", ")", ")", "for", "ri", "in", "riskinputs", ":", "for", "out", "in", "riskmodel", ".", "gen_outputs", "(", "ri", ",", "monitor", ")", ":", "r", "=", "out", ".", "rlzi", "for", "l", ",", "loss_type", "in", "enumerate", "(", "riskmodel", ".", "loss_types", ")", ":", "for", "asset", ",", "fractions", "in", "zip", "(", "ri", ".", "assets", ",", "out", "[", "loss_type", "]", ")", ":", "dmg", "=", "fractions", "[", ":", ",", ":", "D", "]", "*", "asset", "[", "'number'", "]", "# shape (E, D)", "result", "[", "'d_event'", "]", "[", ":", ",", "r", ",", "l", "]", "+=", "dmg", "result", "[", "'d_asset'", "]", ".", "append", "(", "(", "l", ",", "r", ",", "asset", "[", "'ordinal'", "]", ",", "scientific", ".", "mean_std", "(", "dmg", ")", ")", ")", "if", "riskmodel", ".", "consequences", ":", "csq", "=", "fractions", "[", ":", ",", "D", "]", "*", "asset", "[", "'value-'", "+", "loss_type", "]", "result", "[", "'c_asset'", "]", ".", "append", "(", "(", "l", ",", "r", ",", "asset", "[", "'ordinal'", "]", ",", "scientific", ".", "mean_std", "(", "csq", ")", ")", ")", "result", "[", "'c_event'", "]", "[", ":", ",", "r", ",", "l", "]", "+=", "csq", "return", "result" ]
Core function for a damage computation. :param riskinputs: :class:`openquake.risklib.riskinput.RiskInput` objects :param riskmodel: a :class:`openquake.risklib.riskinput.CompositeRiskModel` instance :param monitor: :class:`openquake.baselib.performance.Monitor` instance :param param: dictionary of extra parameters :returns: a dictionary {'d_asset': [(l, r, a, mean-stddev), ...], 'd_event': damage array of shape R, L, E, D, 'c_asset': [(l, r, a, mean-stddev), ...], 'c_event': damage array of shape R, L, E} `d_asset` and `d_tag` are related to the damage distributions whereas `c_asset` and `c_tag` are the consequence distributions. If there is no consequence model `c_asset` is an empty list and `c_tag` is a zero-valued array.
[ "Core", "function", "for", "a", "damage", "computation", "." ]
8294553a0b8aba33fd96437a35065d03547d0040
https://github.com/gem/oq-engine/blob/8294553a0b8aba33fd96437a35065d03547d0040/openquake/calculators/scenario_damage.py#L28-L71
415
gem/oq-engine
openquake/calculators/views.py
form
def form(value): """ Format numbers in a nice way. >>> form(0) '0' >>> form(0.0) '0.0' >>> form(0.0001) '1.000E-04' >>> form(1003.4) '1,003' >>> form(103.4) '103' >>> form(9.3) '9.30000' >>> form(-1.2) '-1.2' """ if isinstance(value, FLOAT + INT): if value <= 0: return str(value) elif value < .001: return '%.3E' % value elif value < 10 and isinstance(value, FLOAT): return '%.5f' % value elif value > 1000: return '{:,d}'.format(int(round(value))) elif numpy.isnan(value): return 'NaN' else: # in the range 10-1000 return str(int(value)) elif isinstance(value, bytes): return decode(value) elif isinstance(value, str): return value elif isinstance(value, numpy.object_): return str(value) elif hasattr(value, '__len__') and len(value) > 1: return ' '.join(map(form, value)) return str(value)
python
def form(value): """ Format numbers in a nice way. >>> form(0) '0' >>> form(0.0) '0.0' >>> form(0.0001) '1.000E-04' >>> form(1003.4) '1,003' >>> form(103.4) '103' >>> form(9.3) '9.30000' >>> form(-1.2) '-1.2' """ if isinstance(value, FLOAT + INT): if value <= 0: return str(value) elif value < .001: return '%.3E' % value elif value < 10 and isinstance(value, FLOAT): return '%.5f' % value elif value > 1000: return '{:,d}'.format(int(round(value))) elif numpy.isnan(value): return 'NaN' else: # in the range 10-1000 return str(int(value)) elif isinstance(value, bytes): return decode(value) elif isinstance(value, str): return value elif isinstance(value, numpy.object_): return str(value) elif hasattr(value, '__len__') and len(value) > 1: return ' '.join(map(form, value)) return str(value)
[ "def", "form", "(", "value", ")", ":", "if", "isinstance", "(", "value", ",", "FLOAT", "+", "INT", ")", ":", "if", "value", "<=", "0", ":", "return", "str", "(", "value", ")", "elif", "value", "<", ".001", ":", "return", "'%.3E'", "%", "value", "elif", "value", "<", "10", "and", "isinstance", "(", "value", ",", "FLOAT", ")", ":", "return", "'%.5f'", "%", "value", "elif", "value", ">", "1000", ":", "return", "'{:,d}'", ".", "format", "(", "int", "(", "round", "(", "value", ")", ")", ")", "elif", "numpy", ".", "isnan", "(", "value", ")", ":", "return", "'NaN'", "else", ":", "# in the range 10-1000", "return", "str", "(", "int", "(", "value", ")", ")", "elif", "isinstance", "(", "value", ",", "bytes", ")", ":", "return", "decode", "(", "value", ")", "elif", "isinstance", "(", "value", ",", "str", ")", ":", "return", "value", "elif", "isinstance", "(", "value", ",", "numpy", ".", "object_", ")", ":", "return", "str", "(", "value", ")", "elif", "hasattr", "(", "value", ",", "'__len__'", ")", "and", "len", "(", "value", ")", ">", "1", ":", "return", "' '", ".", "join", "(", "map", "(", "form", ",", "value", ")", ")", "return", "str", "(", "value", ")" ]
Format numbers in a nice way. >>> form(0) '0' >>> form(0.0) '0.0' >>> form(0.0001) '1.000E-04' >>> form(1003.4) '1,003' >>> form(103.4) '103' >>> form(9.3) '9.30000' >>> form(-1.2) '-1.2'
[ "Format", "numbers", "in", "a", "nice", "way", "." ]
8294553a0b8aba33fd96437a35065d03547d0040
https://github.com/gem/oq-engine/blob/8294553a0b8aba33fd96437a35065d03547d0040/openquake/calculators/views.py#L51-L91
416
gem/oq-engine
openquake/calculators/views.py
sum_tbl
def sum_tbl(tbl, kfield, vfields): """ Aggregate a composite array and compute the totals on a given key. >>> dt = numpy.dtype([('name', (bytes, 10)), ('value', int)]) >>> tbl = numpy.array([('a', 1), ('a', 2), ('b', 3)], dt) >>> sum_tbl(tbl, 'name', ['value'])['value'] array([3, 3]) """ pairs = [(n, tbl.dtype[n]) for n in [kfield] + vfields] dt = numpy.dtype(pairs + [('counts', int)]) def sum_all(group): vals = numpy.zeros(1, dt)[0] for rec in group: for vfield in vfields: vals[vfield] += rec[vfield] vals['counts'] += 1 vals[kfield] = rec[kfield] return vals rows = groupby(tbl, operator.itemgetter(kfield), sum_all).values() array = numpy.zeros(len(rows), dt) for i, row in enumerate(rows): for j, name in enumerate(dt.names): array[i][name] = row[j] return array
python
def sum_tbl(tbl, kfield, vfields): """ Aggregate a composite array and compute the totals on a given key. >>> dt = numpy.dtype([('name', (bytes, 10)), ('value', int)]) >>> tbl = numpy.array([('a', 1), ('a', 2), ('b', 3)], dt) >>> sum_tbl(tbl, 'name', ['value'])['value'] array([3, 3]) """ pairs = [(n, tbl.dtype[n]) for n in [kfield] + vfields] dt = numpy.dtype(pairs + [('counts', int)]) def sum_all(group): vals = numpy.zeros(1, dt)[0] for rec in group: for vfield in vfields: vals[vfield] += rec[vfield] vals['counts'] += 1 vals[kfield] = rec[kfield] return vals rows = groupby(tbl, operator.itemgetter(kfield), sum_all).values() array = numpy.zeros(len(rows), dt) for i, row in enumerate(rows): for j, name in enumerate(dt.names): array[i][name] = row[j] return array
[ "def", "sum_tbl", "(", "tbl", ",", "kfield", ",", "vfields", ")", ":", "pairs", "=", "[", "(", "n", ",", "tbl", ".", "dtype", "[", "n", "]", ")", "for", "n", "in", "[", "kfield", "]", "+", "vfields", "]", "dt", "=", "numpy", ".", "dtype", "(", "pairs", "+", "[", "(", "'counts'", ",", "int", ")", "]", ")", "def", "sum_all", "(", "group", ")", ":", "vals", "=", "numpy", ".", "zeros", "(", "1", ",", "dt", ")", "[", "0", "]", "for", "rec", "in", "group", ":", "for", "vfield", "in", "vfields", ":", "vals", "[", "vfield", "]", "+=", "rec", "[", "vfield", "]", "vals", "[", "'counts'", "]", "+=", "1", "vals", "[", "kfield", "]", "=", "rec", "[", "kfield", "]", "return", "vals", "rows", "=", "groupby", "(", "tbl", ",", "operator", ".", "itemgetter", "(", "kfield", ")", ",", "sum_all", ")", ".", "values", "(", ")", "array", "=", "numpy", ".", "zeros", "(", "len", "(", "rows", ")", ",", "dt", ")", "for", "i", ",", "row", "in", "enumerate", "(", "rows", ")", ":", "for", "j", ",", "name", "in", "enumerate", "(", "dt", ".", "names", ")", ":", "array", "[", "i", "]", "[", "name", "]", "=", "row", "[", "j", "]", "return", "array" ]
Aggregate a composite array and compute the totals on a given key. >>> dt = numpy.dtype([('name', (bytes, 10)), ('value', int)]) >>> tbl = numpy.array([('a', 1), ('a', 2), ('b', 3)], dt) >>> sum_tbl(tbl, 'name', ['value'])['value'] array([3, 3])
[ "Aggregate", "a", "composite", "array", "and", "compute", "the", "totals", "on", "a", "given", "key", "." ]
8294553a0b8aba33fd96437a35065d03547d0040
https://github.com/gem/oq-engine/blob/8294553a0b8aba33fd96437a35065d03547d0040/openquake/calculators/views.py#L145-L170
417
gem/oq-engine
openquake/calculators/views.py
view_slow_sources
def view_slow_sources(token, dstore, maxrows=20): """ Returns the slowest sources """ info = dstore['source_info'].value info.sort(order='calc_time') return rst_table(info[::-1][:maxrows])
python
def view_slow_sources(token, dstore, maxrows=20): """ Returns the slowest sources """ info = dstore['source_info'].value info.sort(order='calc_time') return rst_table(info[::-1][:maxrows])
[ "def", "view_slow_sources", "(", "token", ",", "dstore", ",", "maxrows", "=", "20", ")", ":", "info", "=", "dstore", "[", "'source_info'", "]", ".", "value", "info", ".", "sort", "(", "order", "=", "'calc_time'", ")", "return", "rst_table", "(", "info", "[", ":", ":", "-", "1", "]", "[", ":", "maxrows", "]", ")" ]
Returns the slowest sources
[ "Returns", "the", "slowest", "sources" ]
8294553a0b8aba33fd96437a35065d03547d0040
https://github.com/gem/oq-engine/blob/8294553a0b8aba33fd96437a35065d03547d0040/openquake/calculators/views.py#L183-L189
418
gem/oq-engine
openquake/calculators/views.py
view_contents
def view_contents(token, dstore): """ Returns the size of the contents of the datastore and its total size """ try: desc = dstore['oqparam'].description except KeyError: desc = '' data = sorted((dstore.getsize(key), key) for key in dstore) rows = [(key, humansize(nbytes)) for nbytes, key in data] total = '\n%s : %s' % ( dstore.filename, humansize(os.path.getsize(dstore.filename))) return rst_table(rows, header=(desc, '')) + total
python
def view_contents(token, dstore): """ Returns the size of the contents of the datastore and its total size """ try: desc = dstore['oqparam'].description except KeyError: desc = '' data = sorted((dstore.getsize(key), key) for key in dstore) rows = [(key, humansize(nbytes)) for nbytes, key in data] total = '\n%s : %s' % ( dstore.filename, humansize(os.path.getsize(dstore.filename))) return rst_table(rows, header=(desc, '')) + total
[ "def", "view_contents", "(", "token", ",", "dstore", ")", ":", "try", ":", "desc", "=", "dstore", "[", "'oqparam'", "]", ".", "description", "except", "KeyError", ":", "desc", "=", "''", "data", "=", "sorted", "(", "(", "dstore", ".", "getsize", "(", "key", ")", ",", "key", ")", "for", "key", "in", "dstore", ")", "rows", "=", "[", "(", "key", ",", "humansize", "(", "nbytes", ")", ")", "for", "nbytes", ",", "key", "in", "data", "]", "total", "=", "'\\n%s : %s'", "%", "(", "dstore", ".", "filename", ",", "humansize", "(", "os", ".", "path", ".", "getsize", "(", "dstore", ".", "filename", ")", ")", ")", "return", "rst_table", "(", "rows", ",", "header", "=", "(", "desc", ",", "''", ")", ")", "+", "total" ]
Returns the size of the contents of the datastore and its total size
[ "Returns", "the", "size", "of", "the", "contents", "of", "the", "datastore", "and", "its", "total", "size" ]
8294553a0b8aba33fd96437a35065d03547d0040
https://github.com/gem/oq-engine/blob/8294553a0b8aba33fd96437a35065d03547d0040/openquake/calculators/views.py#L193-L205
419
gem/oq-engine
openquake/calculators/views.py
view_job_info
def view_job_info(token, dstore): """ Determine the amount of data transferred from the controller node to the workers and back in a classical calculation. """ data = [['task', 'sent', 'received']] for task in dstore['task_info']: dset = dstore['task_info/' + task] if 'argnames' in dset.attrs: argnames = dset.attrs['argnames'].split() totsent = dset.attrs['sent'] sent = ['%s=%s' % (a, humansize(s)) for s, a in sorted(zip(totsent, argnames), reverse=True)] recv = dset['received'].sum() data.append((task, ' '.join(sent), humansize(recv))) return rst_table(data)
python
def view_job_info(token, dstore): """ Determine the amount of data transferred from the controller node to the workers and back in a classical calculation. """ data = [['task', 'sent', 'received']] for task in dstore['task_info']: dset = dstore['task_info/' + task] if 'argnames' in dset.attrs: argnames = dset.attrs['argnames'].split() totsent = dset.attrs['sent'] sent = ['%s=%s' % (a, humansize(s)) for s, a in sorted(zip(totsent, argnames), reverse=True)] recv = dset['received'].sum() data.append((task, ' '.join(sent), humansize(recv))) return rst_table(data)
[ "def", "view_job_info", "(", "token", ",", "dstore", ")", ":", "data", "=", "[", "[", "'task'", ",", "'sent'", ",", "'received'", "]", "]", "for", "task", "in", "dstore", "[", "'task_info'", "]", ":", "dset", "=", "dstore", "[", "'task_info/'", "+", "task", "]", "if", "'argnames'", "in", "dset", ".", "attrs", ":", "argnames", "=", "dset", ".", "attrs", "[", "'argnames'", "]", ".", "split", "(", ")", "totsent", "=", "dset", ".", "attrs", "[", "'sent'", "]", "sent", "=", "[", "'%s=%s'", "%", "(", "a", ",", "humansize", "(", "s", ")", ")", "for", "s", ",", "a", "in", "sorted", "(", "zip", "(", "totsent", ",", "argnames", ")", ",", "reverse", "=", "True", ")", "]", "recv", "=", "dset", "[", "'received'", "]", ".", "sum", "(", ")", "data", ".", "append", "(", "(", "task", ",", "' '", ".", "join", "(", "sent", ")", ",", "humansize", "(", "recv", ")", ")", ")", "return", "rst_table", "(", "data", ")" ]
Determine the amount of data transferred from the controller node to the workers and back in a classical calculation.
[ "Determine", "the", "amount", "of", "data", "transferred", "from", "the", "controller", "node", "to", "the", "workers", "and", "back", "in", "a", "classical", "calculation", "." ]
8294553a0b8aba33fd96437a35065d03547d0040
https://github.com/gem/oq-engine/blob/8294553a0b8aba33fd96437a35065d03547d0040/openquake/calculators/views.py#L309-L324
420
gem/oq-engine
openquake/calculators/views.py
avglosses_data_transfer
def avglosses_data_transfer(token, dstore): """ Determine the amount of average losses transferred from the workers to the controller node in a risk calculation. """ oq = dstore['oqparam'] N = len(dstore['assetcol']) R = dstore['csm_info'].get_num_rlzs() L = len(dstore.get_attr('risk_model', 'loss_types')) ct = oq.concurrent_tasks size_bytes = N * R * L * 8 * ct # 8 byte floats return ( '%d asset(s) x %d realization(s) x %d loss type(s) losses x ' '8 bytes x %d tasks = %s' % (N, R, L, ct, humansize(size_bytes)))
python
def avglosses_data_transfer(token, dstore): """ Determine the amount of average losses transferred from the workers to the controller node in a risk calculation. """ oq = dstore['oqparam'] N = len(dstore['assetcol']) R = dstore['csm_info'].get_num_rlzs() L = len(dstore.get_attr('risk_model', 'loss_types')) ct = oq.concurrent_tasks size_bytes = N * R * L * 8 * ct # 8 byte floats return ( '%d asset(s) x %d realization(s) x %d loss type(s) losses x ' '8 bytes x %d tasks = %s' % (N, R, L, ct, humansize(size_bytes)))
[ "def", "avglosses_data_transfer", "(", "token", ",", "dstore", ")", ":", "oq", "=", "dstore", "[", "'oqparam'", "]", "N", "=", "len", "(", "dstore", "[", "'assetcol'", "]", ")", "R", "=", "dstore", "[", "'csm_info'", "]", ".", "get_num_rlzs", "(", ")", "L", "=", "len", "(", "dstore", ".", "get_attr", "(", "'risk_model'", ",", "'loss_types'", ")", ")", "ct", "=", "oq", ".", "concurrent_tasks", "size_bytes", "=", "N", "*", "R", "*", "L", "*", "8", "*", "ct", "# 8 byte floats", "return", "(", "'%d asset(s) x %d realization(s) x %d loss type(s) losses x '", "'8 bytes x %d tasks = %s'", "%", "(", "N", ",", "R", ",", "L", ",", "ct", ",", "humansize", "(", "size_bytes", ")", ")", ")" ]
Determine the amount of average losses transferred from the workers to the controller node in a risk calculation.
[ "Determine", "the", "amount", "of", "average", "losses", "transferred", "from", "the", "workers", "to", "the", "controller", "node", "in", "a", "risk", "calculation", "." ]
8294553a0b8aba33fd96437a35065d03547d0040
https://github.com/gem/oq-engine/blob/8294553a0b8aba33fd96437a35065d03547d0040/openquake/calculators/views.py#L328-L341
421
gem/oq-engine
openquake/calculators/views.py
ebr_data_transfer
def ebr_data_transfer(token, dstore): """ Display the data transferred in an event based risk calculation """ attrs = dstore['losses_by_event'].attrs sent = humansize(attrs['sent']) received = humansize(attrs['tot_received']) return 'Event Based Risk: sent %s, received %s' % (sent, received)
python
def ebr_data_transfer(token, dstore): """ Display the data transferred in an event based risk calculation """ attrs = dstore['losses_by_event'].attrs sent = humansize(attrs['sent']) received = humansize(attrs['tot_received']) return 'Event Based Risk: sent %s, received %s' % (sent, received)
[ "def", "ebr_data_transfer", "(", "token", ",", "dstore", ")", ":", "attrs", "=", "dstore", "[", "'losses_by_event'", "]", ".", "attrs", "sent", "=", "humansize", "(", "attrs", "[", "'sent'", "]", ")", "received", "=", "humansize", "(", "attrs", "[", "'tot_received'", "]", ")", "return", "'Event Based Risk: sent %s, received %s'", "%", "(", "sent", ",", "received", ")" ]
Display the data transferred in an event based risk calculation
[ "Display", "the", "data", "transferred", "in", "an", "event", "based", "risk", "calculation" ]
8294553a0b8aba33fd96437a35065d03547d0040
https://github.com/gem/oq-engine/blob/8294553a0b8aba33fd96437a35065d03547d0040/openquake/calculators/views.py#L345-L352
422
gem/oq-engine
openquake/calculators/views.py
view_totlosses
def view_totlosses(token, dstore): """ This is a debugging view. You can use it to check that the total losses, i.e. the losses obtained by summing the average losses on all assets are indeed equal to the aggregate losses. This is a sanity check for the correctness of the implementation. """ oq = dstore['oqparam'] tot_losses = dstore['losses_by_asset']['mean'].sum(axis=0) return rst_table(tot_losses.view(oq.loss_dt()), fmt='%.6E')
python
def view_totlosses(token, dstore): """ This is a debugging view. You can use it to check that the total losses, i.e. the losses obtained by summing the average losses on all assets are indeed equal to the aggregate losses. This is a sanity check for the correctness of the implementation. """ oq = dstore['oqparam'] tot_losses = dstore['losses_by_asset']['mean'].sum(axis=0) return rst_table(tot_losses.view(oq.loss_dt()), fmt='%.6E')
[ "def", "view_totlosses", "(", "token", ",", "dstore", ")", ":", "oq", "=", "dstore", "[", "'oqparam'", "]", "tot_losses", "=", "dstore", "[", "'losses_by_asset'", "]", "[", "'mean'", "]", ".", "sum", "(", "axis", "=", "0", ")", "return", "rst_table", "(", "tot_losses", ".", "view", "(", "oq", ".", "loss_dt", "(", ")", ")", ",", "fmt", "=", "'%.6E'", ")" ]
This is a debugging view. You can use it to check that the total losses, i.e. the losses obtained by summing the average losses on all assets are indeed equal to the aggregate losses. This is a sanity check for the correctness of the implementation.
[ "This", "is", "a", "debugging", "view", ".", "You", "can", "use", "it", "to", "check", "that", "the", "total", "losses", "i", ".", "e", ".", "the", "losses", "obtained", "by", "summing", "the", "average", "losses", "on", "all", "assets", "are", "indeed", "equal", "to", "the", "aggregate", "losses", ".", "This", "is", "a", "sanity", "check", "for", "the", "correctness", "of", "the", "implementation", "." ]
8294553a0b8aba33fd96437a35065d03547d0040
https://github.com/gem/oq-engine/blob/8294553a0b8aba33fd96437a35065d03547d0040/openquake/calculators/views.py#L357-L366
423
gem/oq-engine
openquake/calculators/views.py
view_portfolio_losses
def view_portfolio_losses(token, dstore): """ The losses for the full portfolio, for each realization and loss type, extracted from the event loss table. """ oq = dstore['oqparam'] loss_dt = oq.loss_dt() data = portfolio_loss(dstore).view(loss_dt)[:, 0] rlzids = [str(r) for r in range(len(data))] array = util.compose_arrays(numpy.array(rlzids), data, 'rlz') # this is very sensitive to rounding errors, so I am using a low precision return rst_table(array, fmt='%.5E')
python
def view_portfolio_losses(token, dstore): """ The losses for the full portfolio, for each realization and loss type, extracted from the event loss table. """ oq = dstore['oqparam'] loss_dt = oq.loss_dt() data = portfolio_loss(dstore).view(loss_dt)[:, 0] rlzids = [str(r) for r in range(len(data))] array = util.compose_arrays(numpy.array(rlzids), data, 'rlz') # this is very sensitive to rounding errors, so I am using a low precision return rst_table(array, fmt='%.5E')
[ "def", "view_portfolio_losses", "(", "token", ",", "dstore", ")", ":", "oq", "=", "dstore", "[", "'oqparam'", "]", "loss_dt", "=", "oq", ".", "loss_dt", "(", ")", "data", "=", "portfolio_loss", "(", "dstore", ")", ".", "view", "(", "loss_dt", ")", "[", ":", ",", "0", "]", "rlzids", "=", "[", "str", "(", "r", ")", "for", "r", "in", "range", "(", "len", "(", "data", ")", ")", "]", "array", "=", "util", ".", "compose_arrays", "(", "numpy", ".", "array", "(", "rlzids", ")", ",", "data", ",", "'rlz'", ")", "# this is very sensitive to rounding errors, so I am using a low precision", "return", "rst_table", "(", "array", ",", "fmt", "=", "'%.5E'", ")" ]
The losses for the full portfolio, for each realization and loss type, extracted from the event loss table.
[ "The", "losses", "for", "the", "full", "portfolio", "for", "each", "realization", "and", "loss", "type", "extracted", "from", "the", "event", "loss", "table", "." ]
8294553a0b8aba33fd96437a35065d03547d0040
https://github.com/gem/oq-engine/blob/8294553a0b8aba33fd96437a35065d03547d0040/openquake/calculators/views.py#L382-L393
424
gem/oq-engine
openquake/calculators/views.py
view_portfolio_loss
def view_portfolio_loss(token, dstore): """ The mean and stddev loss for the full portfolio for each loss type, extracted from the event loss table, averaged over the realizations """ data = portfolio_loss(dstore) # shape (R, L) loss_types = list(dstore['oqparam'].loss_dt().names) header = ['portfolio_loss'] + loss_types mean = ['mean'] + [row.mean() for row in data.T] stddev = ['stddev'] + [row.std(ddof=1) for row in data.T] return rst_table([mean, stddev], header)
python
def view_portfolio_loss(token, dstore): """ The mean and stddev loss for the full portfolio for each loss type, extracted from the event loss table, averaged over the realizations """ data = portfolio_loss(dstore) # shape (R, L) loss_types = list(dstore['oqparam'].loss_dt().names) header = ['portfolio_loss'] + loss_types mean = ['mean'] + [row.mean() for row in data.T] stddev = ['stddev'] + [row.std(ddof=1) for row in data.T] return rst_table([mean, stddev], header)
[ "def", "view_portfolio_loss", "(", "token", ",", "dstore", ")", ":", "data", "=", "portfolio_loss", "(", "dstore", ")", "# shape (R, L)", "loss_types", "=", "list", "(", "dstore", "[", "'oqparam'", "]", ".", "loss_dt", "(", ")", ".", "names", ")", "header", "=", "[", "'portfolio_loss'", "]", "+", "loss_types", "mean", "=", "[", "'mean'", "]", "+", "[", "row", ".", "mean", "(", ")", "for", "row", "in", "data", ".", "T", "]", "stddev", "=", "[", "'stddev'", "]", "+", "[", "row", ".", "std", "(", "ddof", "=", "1", ")", "for", "row", "in", "data", ".", "T", "]", "return", "rst_table", "(", "[", "mean", ",", "stddev", "]", ",", "header", ")" ]
The mean and stddev loss for the full portfolio for each loss type, extracted from the event loss table, averaged over the realizations
[ "The", "mean", "and", "stddev", "loss", "for", "the", "full", "portfolio", "for", "each", "loss", "type", "extracted", "from", "the", "event", "loss", "table", "averaged", "over", "the", "realizations" ]
8294553a0b8aba33fd96437a35065d03547d0040
https://github.com/gem/oq-engine/blob/8294553a0b8aba33fd96437a35065d03547d0040/openquake/calculators/views.py#L397-L407
425
gem/oq-engine
openquake/calculators/views.py
view_exposure_info
def view_exposure_info(token, dstore): """ Display info about the exposure model """ assetcol = dstore['assetcol/array'][:] taxonomies = sorted(set(dstore['assetcol'].taxonomies)) cc = dstore['assetcol/cost_calculator'] ra_flag = ['relative', 'absolute'] data = [('#assets', len(assetcol)), ('#taxonomies', len(taxonomies)), ('deductibile', ra_flag[int(cc.deduct_abs)]), ('insurance_limit', ra_flag[int(cc.limit_abs)]), ] return rst_table(data) + '\n\n' + view_assets_by_site(token, dstore)
python
def view_exposure_info(token, dstore): """ Display info about the exposure model """ assetcol = dstore['assetcol/array'][:] taxonomies = sorted(set(dstore['assetcol'].taxonomies)) cc = dstore['assetcol/cost_calculator'] ra_flag = ['relative', 'absolute'] data = [('#assets', len(assetcol)), ('#taxonomies', len(taxonomies)), ('deductibile', ra_flag[int(cc.deduct_abs)]), ('insurance_limit', ra_flag[int(cc.limit_abs)]), ] return rst_table(data) + '\n\n' + view_assets_by_site(token, dstore)
[ "def", "view_exposure_info", "(", "token", ",", "dstore", ")", ":", "assetcol", "=", "dstore", "[", "'assetcol/array'", "]", "[", ":", "]", "taxonomies", "=", "sorted", "(", "set", "(", "dstore", "[", "'assetcol'", "]", ".", "taxonomies", ")", ")", "cc", "=", "dstore", "[", "'assetcol/cost_calculator'", "]", "ra_flag", "=", "[", "'relative'", ",", "'absolute'", "]", "data", "=", "[", "(", "'#assets'", ",", "len", "(", "assetcol", ")", ")", ",", "(", "'#taxonomies'", ",", "len", "(", "taxonomies", ")", ")", ",", "(", "'deductibile'", ",", "ra_flag", "[", "int", "(", "cc", ".", "deduct_abs", ")", "]", ")", ",", "(", "'insurance_limit'", ",", "ra_flag", "[", "int", "(", "cc", ".", "limit_abs", ")", "]", ")", ",", "]", "return", "rst_table", "(", "data", ")", "+", "'\\n\\n'", "+", "view_assets_by_site", "(", "token", ",", "dstore", ")" ]
Display info about the exposure model
[ "Display", "info", "about", "the", "exposure", "model" ]
8294553a0b8aba33fd96437a35065d03547d0040
https://github.com/gem/oq-engine/blob/8294553a0b8aba33fd96437a35065d03547d0040/openquake/calculators/views.py#L431-L444
426
gem/oq-engine
openquake/calculators/views.py
view_fullreport
def view_fullreport(token, dstore): """ Display an .rst report about the computation """ # avoid circular imports from openquake.calculators.reportwriter import ReportWriter return ReportWriter(dstore).make_report()
python
def view_fullreport(token, dstore): """ Display an .rst report about the computation """ # avoid circular imports from openquake.calculators.reportwriter import ReportWriter return ReportWriter(dstore).make_report()
[ "def", "view_fullreport", "(", "token", ",", "dstore", ")", ":", "# avoid circular imports", "from", "openquake", ".", "calculators", ".", "reportwriter", "import", "ReportWriter", "return", "ReportWriter", "(", "dstore", ")", ".", "make_report", "(", ")" ]
Display an .rst report about the computation
[ "Display", "an", ".", "rst", "report", "about", "the", "computation" ]
8294553a0b8aba33fd96437a35065d03547d0040
https://github.com/gem/oq-engine/blob/8294553a0b8aba33fd96437a35065d03547d0040/openquake/calculators/views.py#L461-L467
427
gem/oq-engine
openquake/calculators/views.py
performance_view
def performance_view(dstore): """ Returns the performance view as a numpy array. """ data = sorted(dstore['performance_data'], key=operator.itemgetter(0)) out = [] for operation, group in itertools.groupby(data, operator.itemgetter(0)): counts = 0 time = 0 mem = 0 for _operation, time_sec, memory_mb, counts_ in group: counts += counts_ time += time_sec mem = max(mem, memory_mb) out.append((operation, time, mem, counts)) out.sort(key=operator.itemgetter(1), reverse=True) # sort by time return numpy.array(out, perf_dt)
python
def performance_view(dstore): """ Returns the performance view as a numpy array. """ data = sorted(dstore['performance_data'], key=operator.itemgetter(0)) out = [] for operation, group in itertools.groupby(data, operator.itemgetter(0)): counts = 0 time = 0 mem = 0 for _operation, time_sec, memory_mb, counts_ in group: counts += counts_ time += time_sec mem = max(mem, memory_mb) out.append((operation, time, mem, counts)) out.sort(key=operator.itemgetter(1), reverse=True) # sort by time return numpy.array(out, perf_dt)
[ "def", "performance_view", "(", "dstore", ")", ":", "data", "=", "sorted", "(", "dstore", "[", "'performance_data'", "]", ",", "key", "=", "operator", ".", "itemgetter", "(", "0", ")", ")", "out", "=", "[", "]", "for", "operation", ",", "group", "in", "itertools", ".", "groupby", "(", "data", ",", "operator", ".", "itemgetter", "(", "0", ")", ")", ":", "counts", "=", "0", "time", "=", "0", "mem", "=", "0", "for", "_operation", ",", "time_sec", ",", "memory_mb", ",", "counts_", "in", "group", ":", "counts", "+=", "counts_", "time", "+=", "time_sec", "mem", "=", "max", "(", "mem", ",", "memory_mb", ")", "out", ".", "append", "(", "(", "operation", ",", "time", ",", "mem", ",", "counts", ")", ")", "out", ".", "sort", "(", "key", "=", "operator", ".", "itemgetter", "(", "1", ")", ",", "reverse", "=", "True", ")", "# sort by time", "return", "numpy", ".", "array", "(", "out", ",", "perf_dt", ")" ]
Returns the performance view as a numpy array.
[ "Returns", "the", "performance", "view", "as", "a", "numpy", "array", "." ]
8294553a0b8aba33fd96437a35065d03547d0040
https://github.com/gem/oq-engine/blob/8294553a0b8aba33fd96437a35065d03547d0040/openquake/calculators/views.py#L470-L486
428
gem/oq-engine
openquake/calculators/views.py
stats
def stats(name, array, *extras): """ Returns statistics from an array of numbers. :param name: a descriptive string :returns: (name, mean, std, min, max, len) """ std = numpy.nan if len(array) == 1 else numpy.std(array, ddof=1) return (name, numpy.mean(array), std, numpy.min(array), numpy.max(array), len(array)) + extras
python
def stats(name, array, *extras): """ Returns statistics from an array of numbers. :param name: a descriptive string :returns: (name, mean, std, min, max, len) """ std = numpy.nan if len(array) == 1 else numpy.std(array, ddof=1) return (name, numpy.mean(array), std, numpy.min(array), numpy.max(array), len(array)) + extras
[ "def", "stats", "(", "name", ",", "array", ",", "*", "extras", ")", ":", "std", "=", "numpy", ".", "nan", "if", "len", "(", "array", ")", "==", "1", "else", "numpy", ".", "std", "(", "array", ",", "ddof", "=", "1", ")", "return", "(", "name", ",", "numpy", ".", "mean", "(", "array", ")", ",", "std", ",", "numpy", ".", "min", "(", "array", ")", ",", "numpy", ".", "max", "(", "array", ")", ",", "len", "(", "array", ")", ")", "+", "extras" ]
Returns statistics from an array of numbers. :param name: a descriptive string :returns: (name, mean, std, min, max, len)
[ "Returns", "statistics", "from", "an", "array", "of", "numbers", "." ]
8294553a0b8aba33fd96437a35065d03547d0040
https://github.com/gem/oq-engine/blob/8294553a0b8aba33fd96437a35065d03547d0040/openquake/calculators/views.py#L497-L506
429
gem/oq-engine
openquake/calculators/views.py
view_num_units
def view_num_units(token, dstore): """ Display the number of units by taxonomy """ taxo = dstore['assetcol/tagcol/taxonomy'].value counts = collections.Counter() for asset in dstore['assetcol']: counts[taxo[asset['taxonomy']]] += asset['number'] data = sorted(counts.items()) data.append(('*ALL*', sum(d[1] for d in data))) return rst_table(data, header=['taxonomy', 'num_units'])
python
def view_num_units(token, dstore): """ Display the number of units by taxonomy """ taxo = dstore['assetcol/tagcol/taxonomy'].value counts = collections.Counter() for asset in dstore['assetcol']: counts[taxo[asset['taxonomy']]] += asset['number'] data = sorted(counts.items()) data.append(('*ALL*', sum(d[1] for d in data))) return rst_table(data, header=['taxonomy', 'num_units'])
[ "def", "view_num_units", "(", "token", ",", "dstore", ")", ":", "taxo", "=", "dstore", "[", "'assetcol/tagcol/taxonomy'", "]", ".", "value", "counts", "=", "collections", ".", "Counter", "(", ")", "for", "asset", "in", "dstore", "[", "'assetcol'", "]", ":", "counts", "[", "taxo", "[", "asset", "[", "'taxonomy'", "]", "]", "]", "+=", "asset", "[", "'number'", "]", "data", "=", "sorted", "(", "counts", ".", "items", "(", ")", ")", "data", ".", "append", "(", "(", "'*ALL*'", ",", "sum", "(", "d", "[", "1", "]", "for", "d", "in", "data", ")", ")", ")", "return", "rst_table", "(", "data", ",", "header", "=", "[", "'taxonomy'", ",", "'num_units'", "]", ")" ]
Display the number of units by taxonomy
[ "Display", "the", "number", "of", "units", "by", "taxonomy" ]
8294553a0b8aba33fd96437a35065d03547d0040
https://github.com/gem/oq-engine/blob/8294553a0b8aba33fd96437a35065d03547d0040/openquake/calculators/views.py#L510-L520
430
gem/oq-engine
openquake/calculators/views.py
view_assets_by_site
def view_assets_by_site(token, dstore): """ Display statistical information about the distribution of the assets """ taxonomies = dstore['assetcol/tagcol/taxonomy'].value assets_by_site = dstore['assetcol'].assets_by_site() data = ['taxonomy mean stddev min max num_sites num_assets'.split()] num_assets = AccumDict() for assets in assets_by_site: num_assets += {k: [len(v)] for k, v in group_array( assets, 'taxonomy').items()} for taxo in sorted(num_assets): val = numpy.array(num_assets[taxo]) data.append(stats(taxonomies[taxo], val, val.sum())) if len(num_assets) > 1: # more than one taxonomy, add a summary n_assets = numpy.array([len(assets) for assets in assets_by_site]) data.append(stats('*ALL*', n_assets, n_assets.sum())) return rst_table(data)
python
def view_assets_by_site(token, dstore): """ Display statistical information about the distribution of the assets """ taxonomies = dstore['assetcol/tagcol/taxonomy'].value assets_by_site = dstore['assetcol'].assets_by_site() data = ['taxonomy mean stddev min max num_sites num_assets'.split()] num_assets = AccumDict() for assets in assets_by_site: num_assets += {k: [len(v)] for k, v in group_array( assets, 'taxonomy').items()} for taxo in sorted(num_assets): val = numpy.array(num_assets[taxo]) data.append(stats(taxonomies[taxo], val, val.sum())) if len(num_assets) > 1: # more than one taxonomy, add a summary n_assets = numpy.array([len(assets) for assets in assets_by_site]) data.append(stats('*ALL*', n_assets, n_assets.sum())) return rst_table(data)
[ "def", "view_assets_by_site", "(", "token", ",", "dstore", ")", ":", "taxonomies", "=", "dstore", "[", "'assetcol/tagcol/taxonomy'", "]", ".", "value", "assets_by_site", "=", "dstore", "[", "'assetcol'", "]", ".", "assets_by_site", "(", ")", "data", "=", "[", "'taxonomy mean stddev min max num_sites num_assets'", ".", "split", "(", ")", "]", "num_assets", "=", "AccumDict", "(", ")", "for", "assets", "in", "assets_by_site", ":", "num_assets", "+=", "{", "k", ":", "[", "len", "(", "v", ")", "]", "for", "k", ",", "v", "in", "group_array", "(", "assets", ",", "'taxonomy'", ")", ".", "items", "(", ")", "}", "for", "taxo", "in", "sorted", "(", "num_assets", ")", ":", "val", "=", "numpy", ".", "array", "(", "num_assets", "[", "taxo", "]", ")", "data", ".", "append", "(", "stats", "(", "taxonomies", "[", "taxo", "]", ",", "val", ",", "val", ".", "sum", "(", ")", ")", ")", "if", "len", "(", "num_assets", ")", ">", "1", ":", "# more than one taxonomy, add a summary", "n_assets", "=", "numpy", ".", "array", "(", "[", "len", "(", "assets", ")", "for", "assets", "in", "assets_by_site", "]", ")", "data", ".", "append", "(", "stats", "(", "'*ALL*'", ",", "n_assets", ",", "n_assets", ".", "sum", "(", ")", ")", ")", "return", "rst_table", "(", "data", ")" ]
Display statistical information about the distribution of the assets
[ "Display", "statistical", "information", "about", "the", "distribution", "of", "the", "assets" ]
8294553a0b8aba33fd96437a35065d03547d0040
https://github.com/gem/oq-engine/blob/8294553a0b8aba33fd96437a35065d03547d0040/openquake/calculators/views.py#L524-L541
431
gem/oq-engine
openquake/calculators/views.py
view_required_params_per_trt
def view_required_params_per_trt(token, dstore): """ Display the parameters needed by each tectonic region type """ csm_info = dstore['csm_info'] tbl = [] for grp_id, trt in sorted(csm_info.grp_by("trt").items()): gsims = csm_info.gsim_lt.get_gsims(trt) maker = ContextMaker(trt, gsims) distances = sorted(maker.REQUIRES_DISTANCES) siteparams = sorted(maker.REQUIRES_SITES_PARAMETERS) ruptparams = sorted(maker.REQUIRES_RUPTURE_PARAMETERS) tbl.append((grp_id, ' '.join(map(repr, map(repr, gsims))), distances, siteparams, ruptparams)) return rst_table( tbl, header='grp_id gsims distances siteparams ruptparams'.split(), fmt=scientificformat)
python
def view_required_params_per_trt(token, dstore): """ Display the parameters needed by each tectonic region type """ csm_info = dstore['csm_info'] tbl = [] for grp_id, trt in sorted(csm_info.grp_by("trt").items()): gsims = csm_info.gsim_lt.get_gsims(trt) maker = ContextMaker(trt, gsims) distances = sorted(maker.REQUIRES_DISTANCES) siteparams = sorted(maker.REQUIRES_SITES_PARAMETERS) ruptparams = sorted(maker.REQUIRES_RUPTURE_PARAMETERS) tbl.append((grp_id, ' '.join(map(repr, map(repr, gsims))), distances, siteparams, ruptparams)) return rst_table( tbl, header='grp_id gsims distances siteparams ruptparams'.split(), fmt=scientificformat)
[ "def", "view_required_params_per_trt", "(", "token", ",", "dstore", ")", ":", "csm_info", "=", "dstore", "[", "'csm_info'", "]", "tbl", "=", "[", "]", "for", "grp_id", ",", "trt", "in", "sorted", "(", "csm_info", ".", "grp_by", "(", "\"trt\"", ")", ".", "items", "(", ")", ")", ":", "gsims", "=", "csm_info", ".", "gsim_lt", ".", "get_gsims", "(", "trt", ")", "maker", "=", "ContextMaker", "(", "trt", ",", "gsims", ")", "distances", "=", "sorted", "(", "maker", ".", "REQUIRES_DISTANCES", ")", "siteparams", "=", "sorted", "(", "maker", ".", "REQUIRES_SITES_PARAMETERS", ")", "ruptparams", "=", "sorted", "(", "maker", ".", "REQUIRES_RUPTURE_PARAMETERS", ")", "tbl", ".", "append", "(", "(", "grp_id", ",", "' '", ".", "join", "(", "map", "(", "repr", ",", "map", "(", "repr", ",", "gsims", ")", ")", ")", ",", "distances", ",", "siteparams", ",", "ruptparams", ")", ")", "return", "rst_table", "(", "tbl", ",", "header", "=", "'grp_id gsims distances siteparams ruptparams'", ".", "split", "(", ")", ",", "fmt", "=", "scientificformat", ")" ]
Display the parameters needed by each tectonic region type
[ "Display", "the", "parameters", "needed", "by", "each", "tectonic", "region", "type" ]
8294553a0b8aba33fd96437a35065d03547d0040
https://github.com/gem/oq-engine/blob/8294553a0b8aba33fd96437a35065d03547d0040/openquake/calculators/views.py#L545-L561
432
gem/oq-engine
openquake/calculators/views.py
view_global_hcurves
def view_global_hcurves(token, dstore): """ Display the global hazard curves for the calculation. They are used for debugging purposes when comparing the results of two calculations. They are the mean over the sites of the mean hazard curves. """ oq = dstore['oqparam'] nsites = len(dstore['sitecol']) rlzs_assoc = dstore['csm_info'].get_rlzs_assoc() mean = getters.PmapGetter(dstore, rlzs_assoc).get_mean() array = calc.convert_to_array(mean, nsites, oq.imtls) res = numpy.zeros(1, array.dtype) for name in array.dtype.names: res[name] = array[name].mean() return rst_table(res)
python
def view_global_hcurves(token, dstore): """ Display the global hazard curves for the calculation. They are used for debugging purposes when comparing the results of two calculations. They are the mean over the sites of the mean hazard curves. """ oq = dstore['oqparam'] nsites = len(dstore['sitecol']) rlzs_assoc = dstore['csm_info'].get_rlzs_assoc() mean = getters.PmapGetter(dstore, rlzs_assoc).get_mean() array = calc.convert_to_array(mean, nsites, oq.imtls) res = numpy.zeros(1, array.dtype) for name in array.dtype.names: res[name] = array[name].mean() return rst_table(res)
[ "def", "view_global_hcurves", "(", "token", ",", "dstore", ")", ":", "oq", "=", "dstore", "[", "'oqparam'", "]", "nsites", "=", "len", "(", "dstore", "[", "'sitecol'", "]", ")", "rlzs_assoc", "=", "dstore", "[", "'csm_info'", "]", ".", "get_rlzs_assoc", "(", ")", "mean", "=", "getters", ".", "PmapGetter", "(", "dstore", ",", "rlzs_assoc", ")", ".", "get_mean", "(", ")", "array", "=", "calc", ".", "convert_to_array", "(", "mean", ",", "nsites", ",", "oq", ".", "imtls", ")", "res", "=", "numpy", ".", "zeros", "(", "1", ",", "array", ".", "dtype", ")", "for", "name", "in", "array", ".", "dtype", ".", "names", ":", "res", "[", "name", "]", "=", "array", "[", "name", "]", ".", "mean", "(", ")", "return", "rst_table", "(", "res", ")" ]
Display the global hazard curves for the calculation. They are used for debugging purposes when comparing the results of two calculations. They are the mean over the sites of the mean hazard curves.
[ "Display", "the", "global", "hazard", "curves", "for", "the", "calculation", ".", "They", "are", "used", "for", "debugging", "purposes", "when", "comparing", "the", "results", "of", "two", "calculations", ".", "They", "are", "the", "mean", "over", "the", "sites", "of", "the", "mean", "hazard", "curves", "." ]
8294553a0b8aba33fd96437a35065d03547d0040
https://github.com/gem/oq-engine/blob/8294553a0b8aba33fd96437a35065d03547d0040/openquake/calculators/views.py#L671-L686
433
gem/oq-engine
openquake/calculators/views.py
view_dupl_sources_time
def view_dupl_sources_time(token, dstore): """ Display the time spent computing duplicated sources """ info = dstore['source_info'] items = sorted(group_array(info.value, 'source_id').items()) tbl = [] tot_time = 0 for source_id, records in items: if len(records) > 1: # dupl calc_time = records['calc_time'].sum() tot_time += calc_time + records['split_time'].sum() tbl.append((source_id, calc_time, len(records))) if tbl and info.attrs.get('has_dupl_sources'): tot = info['calc_time'].sum() + info['split_time'].sum() percent = tot_time / tot * 100 m = '\nTotal time in duplicated sources: %d/%d (%d%%)' % ( tot_time, tot, percent) return rst_table(tbl, ['source_id', 'calc_time', 'num_dupl']) + m else: return 'There are no duplicated sources'
python
def view_dupl_sources_time(token, dstore): """ Display the time spent computing duplicated sources """ info = dstore['source_info'] items = sorted(group_array(info.value, 'source_id').items()) tbl = [] tot_time = 0 for source_id, records in items: if len(records) > 1: # dupl calc_time = records['calc_time'].sum() tot_time += calc_time + records['split_time'].sum() tbl.append((source_id, calc_time, len(records))) if tbl and info.attrs.get('has_dupl_sources'): tot = info['calc_time'].sum() + info['split_time'].sum() percent = tot_time / tot * 100 m = '\nTotal time in duplicated sources: %d/%d (%d%%)' % ( tot_time, tot, percent) return rst_table(tbl, ['source_id', 'calc_time', 'num_dupl']) + m else: return 'There are no duplicated sources'
[ "def", "view_dupl_sources_time", "(", "token", ",", "dstore", ")", ":", "info", "=", "dstore", "[", "'source_info'", "]", "items", "=", "sorted", "(", "group_array", "(", "info", ".", "value", ",", "'source_id'", ")", ".", "items", "(", ")", ")", "tbl", "=", "[", "]", "tot_time", "=", "0", "for", "source_id", ",", "records", "in", "items", ":", "if", "len", "(", "records", ")", ">", "1", ":", "# dupl", "calc_time", "=", "records", "[", "'calc_time'", "]", ".", "sum", "(", ")", "tot_time", "+=", "calc_time", "+", "records", "[", "'split_time'", "]", ".", "sum", "(", ")", "tbl", ".", "append", "(", "(", "source_id", ",", "calc_time", ",", "len", "(", "records", ")", ")", ")", "if", "tbl", "and", "info", ".", "attrs", ".", "get", "(", "'has_dupl_sources'", ")", ":", "tot", "=", "info", "[", "'calc_time'", "]", ".", "sum", "(", ")", "+", "info", "[", "'split_time'", "]", ".", "sum", "(", ")", "percent", "=", "tot_time", "/", "tot", "*", "100", "m", "=", "'\\nTotal time in duplicated sources: %d/%d (%d%%)'", "%", "(", "tot_time", ",", "tot", ",", "percent", ")", "return", "rst_table", "(", "tbl", ",", "[", "'source_id'", ",", "'calc_time'", ",", "'num_dupl'", "]", ")", "+", "m", "else", ":", "return", "'There are no duplicated sources'" ]
Display the time spent computing duplicated sources
[ "Display", "the", "time", "spent", "computing", "duplicated", "sources" ]
8294553a0b8aba33fd96437a35065d03547d0040
https://github.com/gem/oq-engine/blob/8294553a0b8aba33fd96437a35065d03547d0040/openquake/calculators/views.py#L690-L710
434
gem/oq-engine
openquake/calculators/views.py
view_global_poes
def view_global_poes(token, dstore): """ Display global probabilities averaged on all sites and all GMPEs """ tbl = [] imtls = dstore['oqparam'].imtls header = ['grp_id'] + [str(poe) for poe in imtls.array] for grp in sorted(dstore['poes']): poes = dstore['poes/' + grp] nsites = len(poes) site_avg = sum(poes[sid].array for sid in poes) / nsites gsim_avg = site_avg.sum(axis=1) / poes.shape_z tbl.append([grp] + list(gsim_avg)) return rst_table(tbl, header=header)
python
def view_global_poes(token, dstore): """ Display global probabilities averaged on all sites and all GMPEs """ tbl = [] imtls = dstore['oqparam'].imtls header = ['grp_id'] + [str(poe) for poe in imtls.array] for grp in sorted(dstore['poes']): poes = dstore['poes/' + grp] nsites = len(poes) site_avg = sum(poes[sid].array for sid in poes) / nsites gsim_avg = site_avg.sum(axis=1) / poes.shape_z tbl.append([grp] + list(gsim_avg)) return rst_table(tbl, header=header)
[ "def", "view_global_poes", "(", "token", ",", "dstore", ")", ":", "tbl", "=", "[", "]", "imtls", "=", "dstore", "[", "'oqparam'", "]", ".", "imtls", "header", "=", "[", "'grp_id'", "]", "+", "[", "str", "(", "poe", ")", "for", "poe", "in", "imtls", ".", "array", "]", "for", "grp", "in", "sorted", "(", "dstore", "[", "'poes'", "]", ")", ":", "poes", "=", "dstore", "[", "'poes/'", "+", "grp", "]", "nsites", "=", "len", "(", "poes", ")", "site_avg", "=", "sum", "(", "poes", "[", "sid", "]", ".", "array", "for", "sid", "in", "poes", ")", "/", "nsites", "gsim_avg", "=", "site_avg", ".", "sum", "(", "axis", "=", "1", ")", "/", "poes", ".", "shape_z", "tbl", ".", "append", "(", "[", "grp", "]", "+", "list", "(", "gsim_avg", ")", ")", "return", "rst_table", "(", "tbl", ",", "header", "=", "header", ")" ]
Display global probabilities averaged on all sites and all GMPEs
[ "Display", "global", "probabilities", "averaged", "on", "all", "sites", "and", "all", "GMPEs" ]
8294553a0b8aba33fd96437a35065d03547d0040
https://github.com/gem/oq-engine/blob/8294553a0b8aba33fd96437a35065d03547d0040/openquake/calculators/views.py#L714-L727
435
gem/oq-engine
openquake/calculators/views.py
view_global_hmaps
def view_global_hmaps(token, dstore): """ Display the global hazard maps for the calculation. They are used for debugging purposes when comparing the results of two calculations. They are the mean over the sites of the mean hazard maps. """ oq = dstore['oqparam'] dt = numpy.dtype([('%s-%s' % (imt, poe), F32) for imt in oq.imtls for poe in oq.poes]) array = dstore['hmaps/mean'].value.view(dt)[:, 0] res = numpy.zeros(1, array.dtype) for name in array.dtype.names: res[name] = array[name].mean() return rst_table(res)
python
def view_global_hmaps(token, dstore): """ Display the global hazard maps for the calculation. They are used for debugging purposes when comparing the results of two calculations. They are the mean over the sites of the mean hazard maps. """ oq = dstore['oqparam'] dt = numpy.dtype([('%s-%s' % (imt, poe), F32) for imt in oq.imtls for poe in oq.poes]) array = dstore['hmaps/mean'].value.view(dt)[:, 0] res = numpy.zeros(1, array.dtype) for name in array.dtype.names: res[name] = array[name].mean() return rst_table(res)
[ "def", "view_global_hmaps", "(", "token", ",", "dstore", ")", ":", "oq", "=", "dstore", "[", "'oqparam'", "]", "dt", "=", "numpy", ".", "dtype", "(", "[", "(", "'%s-%s'", "%", "(", "imt", ",", "poe", ")", ",", "F32", ")", "for", "imt", "in", "oq", ".", "imtls", "for", "poe", "in", "oq", ".", "poes", "]", ")", "array", "=", "dstore", "[", "'hmaps/mean'", "]", ".", "value", ".", "view", "(", "dt", ")", "[", ":", ",", "0", "]", "res", "=", "numpy", ".", "zeros", "(", "1", ",", "array", ".", "dtype", ")", "for", "name", "in", "array", ".", "dtype", ".", "names", ":", "res", "[", "name", "]", "=", "array", "[", "name", "]", ".", "mean", "(", ")", "return", "rst_table", "(", "res", ")" ]
Display the global hazard maps for the calculation. They are used for debugging purposes when comparing the results of two calculations. They are the mean over the sites of the mean hazard maps.
[ "Display", "the", "global", "hazard", "maps", "for", "the", "calculation", ".", "They", "are", "used", "for", "debugging", "purposes", "when", "comparing", "the", "results", "of", "two", "calculations", ".", "They", "are", "the", "mean", "over", "the", "sites", "of", "the", "mean", "hazard", "maps", "." ]
8294553a0b8aba33fd96437a35065d03547d0040
https://github.com/gem/oq-engine/blob/8294553a0b8aba33fd96437a35065d03547d0040/openquake/calculators/views.py#L731-L745
436
gem/oq-engine
openquake/calculators/views.py
view_global_gmfs
def view_global_gmfs(token, dstore): """ Display GMFs averaged on everything for debugging purposes """ imtls = dstore['oqparam'].imtls row = dstore['gmf_data/data']['gmv'].mean(axis=0) return rst_table([row], header=imtls)
python
def view_global_gmfs(token, dstore): """ Display GMFs averaged on everything for debugging purposes """ imtls = dstore['oqparam'].imtls row = dstore['gmf_data/data']['gmv'].mean(axis=0) return rst_table([row], header=imtls)
[ "def", "view_global_gmfs", "(", "token", ",", "dstore", ")", ":", "imtls", "=", "dstore", "[", "'oqparam'", "]", ".", "imtls", "row", "=", "dstore", "[", "'gmf_data/data'", "]", "[", "'gmv'", "]", ".", "mean", "(", "axis", "=", "0", ")", "return", "rst_table", "(", "[", "row", "]", ",", "header", "=", "imtls", ")" ]
Display GMFs averaged on everything for debugging purposes
[ "Display", "GMFs", "averaged", "on", "everything", "for", "debugging", "purposes" ]
8294553a0b8aba33fd96437a35065d03547d0040
https://github.com/gem/oq-engine/blob/8294553a0b8aba33fd96437a35065d03547d0040/openquake/calculators/views.py#L749-L755
437
gem/oq-engine
openquake/calculators/views.py
view_mean_disagg
def view_mean_disagg(token, dstore): """ Display mean quantities for the disaggregation. Useful for checking differences between two calculations. """ tbl = [] for key, dset in sorted(dstore['disagg'].items()): vals = [ds.value.mean() for k, ds in sorted(dset.items())] tbl.append([key] + vals) header = ['key'] + sorted(dset) return rst_table(sorted(tbl), header=header)
python
def view_mean_disagg(token, dstore): """ Display mean quantities for the disaggregation. Useful for checking differences between two calculations. """ tbl = [] for key, dset in sorted(dstore['disagg'].items()): vals = [ds.value.mean() for k, ds in sorted(dset.items())] tbl.append([key] + vals) header = ['key'] + sorted(dset) return rst_table(sorted(tbl), header=header)
[ "def", "view_mean_disagg", "(", "token", ",", "dstore", ")", ":", "tbl", "=", "[", "]", "for", "key", ",", "dset", "in", "sorted", "(", "dstore", "[", "'disagg'", "]", ".", "items", "(", ")", ")", ":", "vals", "=", "[", "ds", ".", "value", ".", "mean", "(", ")", "for", "k", ",", "ds", "in", "sorted", "(", "dset", ".", "items", "(", ")", ")", "]", "tbl", ".", "append", "(", "[", "key", "]", "+", "vals", ")", "header", "=", "[", "'key'", "]", "+", "sorted", "(", "dset", ")", "return", "rst_table", "(", "sorted", "(", "tbl", ")", ",", "header", "=", "header", ")" ]
Display mean quantities for the disaggregation. Useful for checking differences between two calculations.
[ "Display", "mean", "quantities", "for", "the", "disaggregation", ".", "Useful", "for", "checking", "differences", "between", "two", "calculations", "." ]
8294553a0b8aba33fd96437a35065d03547d0040
https://github.com/gem/oq-engine/blob/8294553a0b8aba33fd96437a35065d03547d0040/openquake/calculators/views.py#L759-L769
438
gem/oq-engine
openquake/calculators/views.py
view_elt
def view_elt(token, dstore): """ Display the event loss table averaged by event """ oq = dstore['oqparam'] R = len(dstore['csm_info'].rlzs) dic = group_array(dstore['losses_by_event'].value, 'rlzi') header = oq.loss_dt().names tbl = [] for rlzi in range(R): if rlzi in dic: tbl.append(dic[rlzi]['loss'].mean(axis=0)) else: tbl.append([0.] * len(header)) return rst_table(tbl, header)
python
def view_elt(token, dstore): """ Display the event loss table averaged by event """ oq = dstore['oqparam'] R = len(dstore['csm_info'].rlzs) dic = group_array(dstore['losses_by_event'].value, 'rlzi') header = oq.loss_dt().names tbl = [] for rlzi in range(R): if rlzi in dic: tbl.append(dic[rlzi]['loss'].mean(axis=0)) else: tbl.append([0.] * len(header)) return rst_table(tbl, header)
[ "def", "view_elt", "(", "token", ",", "dstore", ")", ":", "oq", "=", "dstore", "[", "'oqparam'", "]", "R", "=", "len", "(", "dstore", "[", "'csm_info'", "]", ".", "rlzs", ")", "dic", "=", "group_array", "(", "dstore", "[", "'losses_by_event'", "]", ".", "value", ",", "'rlzi'", ")", "header", "=", "oq", ".", "loss_dt", "(", ")", ".", "names", "tbl", "=", "[", "]", "for", "rlzi", "in", "range", "(", "R", ")", ":", "if", "rlzi", "in", "dic", ":", "tbl", ".", "append", "(", "dic", "[", "rlzi", "]", "[", "'loss'", "]", ".", "mean", "(", "axis", "=", "0", ")", ")", "else", ":", "tbl", ".", "append", "(", "[", "0.", "]", "*", "len", "(", "header", ")", ")", "return", "rst_table", "(", "tbl", ",", "header", ")" ]
Display the event loss table averaged by event
[ "Display", "the", "event", "loss", "table", "averaged", "by", "event" ]
8294553a0b8aba33fd96437a35065d03547d0040
https://github.com/gem/oq-engine/blob/8294553a0b8aba33fd96437a35065d03547d0040/openquake/calculators/views.py#L773-L787
439
gem/oq-engine
openquake/calculators/views.py
view_pmap
def view_pmap(token, dstore): """ Display the mean ProbabilityMap associated to a given source group name """ grp = token.split(':')[1] # called as pmap:grp pmap = {} rlzs_assoc = dstore['csm_info'].get_rlzs_assoc() pgetter = getters.PmapGetter(dstore, rlzs_assoc) pmap = pgetter.get_mean(grp) return str(pmap)
python
def view_pmap(token, dstore): """ Display the mean ProbabilityMap associated to a given source group name """ grp = token.split(':')[1] # called as pmap:grp pmap = {} rlzs_assoc = dstore['csm_info'].get_rlzs_assoc() pgetter = getters.PmapGetter(dstore, rlzs_assoc) pmap = pgetter.get_mean(grp) return str(pmap)
[ "def", "view_pmap", "(", "token", ",", "dstore", ")", ":", "grp", "=", "token", ".", "split", "(", "':'", ")", "[", "1", "]", "# called as pmap:grp", "pmap", "=", "{", "}", "rlzs_assoc", "=", "dstore", "[", "'csm_info'", "]", ".", "get_rlzs_assoc", "(", ")", "pgetter", "=", "getters", ".", "PmapGetter", "(", "dstore", ",", "rlzs_assoc", ")", "pmap", "=", "pgetter", ".", "get_mean", "(", "grp", ")", "return", "str", "(", "pmap", ")" ]
Display the mean ProbabilityMap associated to a given source group name
[ "Display", "the", "mean", "ProbabilityMap", "associated", "to", "a", "given", "source", "group", "name" ]
8294553a0b8aba33fd96437a35065d03547d0040
https://github.com/gem/oq-engine/blob/8294553a0b8aba33fd96437a35065d03547d0040/openquake/calculators/views.py#L791-L800
440
gem/oq-engine
openquake/calculators/views.py
view_act_ruptures_by_src
def view_act_ruptures_by_src(token, dstore): """ Display the actual number of ruptures by source in event based calculations """ data = dstore['ruptures'].value[['srcidx', 'serial']] counts = sorted(countby(data, 'srcidx').items(), key=operator.itemgetter(1), reverse=True) src_info = dstore['source_info'].value[['grp_id', 'source_id']] table = [['src_id', 'grp_id', 'act_ruptures']] for srcidx, act_ruptures in counts: src = src_info[srcidx] table.append([src['source_id'], src['grp_id'], act_ruptures]) return rst_table(table)
python
def view_act_ruptures_by_src(token, dstore): """ Display the actual number of ruptures by source in event based calculations """ data = dstore['ruptures'].value[['srcidx', 'serial']] counts = sorted(countby(data, 'srcidx').items(), key=operator.itemgetter(1), reverse=True) src_info = dstore['source_info'].value[['grp_id', 'source_id']] table = [['src_id', 'grp_id', 'act_ruptures']] for srcidx, act_ruptures in counts: src = src_info[srcidx] table.append([src['source_id'], src['grp_id'], act_ruptures]) return rst_table(table)
[ "def", "view_act_ruptures_by_src", "(", "token", ",", "dstore", ")", ":", "data", "=", "dstore", "[", "'ruptures'", "]", ".", "value", "[", "[", "'srcidx'", ",", "'serial'", "]", "]", "counts", "=", "sorted", "(", "countby", "(", "data", ",", "'srcidx'", ")", ".", "items", "(", ")", ",", "key", "=", "operator", ".", "itemgetter", "(", "1", ")", ",", "reverse", "=", "True", ")", "src_info", "=", "dstore", "[", "'source_info'", "]", ".", "value", "[", "[", "'grp_id'", ",", "'source_id'", "]", "]", "table", "=", "[", "[", "'src_id'", ",", "'grp_id'", ",", "'act_ruptures'", "]", "]", "for", "srcidx", ",", "act_ruptures", "in", "counts", ":", "src", "=", "src_info", "[", "srcidx", "]", "table", ".", "append", "(", "[", "src", "[", "'source_id'", "]", ",", "src", "[", "'grp_id'", "]", ",", "act_ruptures", "]", ")", "return", "rst_table", "(", "table", ")" ]
Display the actual number of ruptures by source in event based calculations
[ "Display", "the", "actual", "number", "of", "ruptures", "by", "source", "in", "event", "based", "calculations" ]
8294553a0b8aba33fd96437a35065d03547d0040
https://github.com/gem/oq-engine/blob/8294553a0b8aba33fd96437a35065d03547d0040/openquake/calculators/views.py#L804-L816
441
gem/oq-engine
openquake/calculators/views.py
view_dupl_sources
def view_dupl_sources(token, dstore): """ Show the sources with the same ID and the truly duplicated sources """ fields = ['source_id', 'code', 'gidx1', 'gidx2', 'num_ruptures'] dic = group_array(dstore['source_info'].value[fields], 'source_id') sameid = [] dupl = [] for source_id, group in dic.items(): if len(group) > 1: # same ID sources sources = [] for rec in group: geom = dstore['source_geom'][rec['gidx1']:rec['gidx2']] src = Source(source_id, rec['code'], geom, rec['num_ruptures']) sources.append(src) if all_equal(sources): dupl.append(source_id) sameid.append(source_id) if not dupl: return '' msg = str(dupl) + '\n' msg += ('Found %d source(s) with the same ID and %d true duplicate(s)' % (len(sameid), len(dupl))) fakedupl = set(sameid) - set(dupl) if fakedupl: msg += '\nHere is a fake duplicate: %s' % fakedupl.pop() return msg
python
def view_dupl_sources(token, dstore): """ Show the sources with the same ID and the truly duplicated sources """ fields = ['source_id', 'code', 'gidx1', 'gidx2', 'num_ruptures'] dic = group_array(dstore['source_info'].value[fields], 'source_id') sameid = [] dupl = [] for source_id, group in dic.items(): if len(group) > 1: # same ID sources sources = [] for rec in group: geom = dstore['source_geom'][rec['gidx1']:rec['gidx2']] src = Source(source_id, rec['code'], geom, rec['num_ruptures']) sources.append(src) if all_equal(sources): dupl.append(source_id) sameid.append(source_id) if not dupl: return '' msg = str(dupl) + '\n' msg += ('Found %d source(s) with the same ID and %d true duplicate(s)' % (len(sameid), len(dupl))) fakedupl = set(sameid) - set(dupl) if fakedupl: msg += '\nHere is a fake duplicate: %s' % fakedupl.pop() return msg
[ "def", "view_dupl_sources", "(", "token", ",", "dstore", ")", ":", "fields", "=", "[", "'source_id'", ",", "'code'", ",", "'gidx1'", ",", "'gidx2'", ",", "'num_ruptures'", "]", "dic", "=", "group_array", "(", "dstore", "[", "'source_info'", "]", ".", "value", "[", "fields", "]", ",", "'source_id'", ")", "sameid", "=", "[", "]", "dupl", "=", "[", "]", "for", "source_id", ",", "group", "in", "dic", ".", "items", "(", ")", ":", "if", "len", "(", "group", ")", ">", "1", ":", "# same ID sources", "sources", "=", "[", "]", "for", "rec", "in", "group", ":", "geom", "=", "dstore", "[", "'source_geom'", "]", "[", "rec", "[", "'gidx1'", "]", ":", "rec", "[", "'gidx2'", "]", "]", "src", "=", "Source", "(", "source_id", ",", "rec", "[", "'code'", "]", ",", "geom", ",", "rec", "[", "'num_ruptures'", "]", ")", "sources", ".", "append", "(", "src", ")", "if", "all_equal", "(", "sources", ")", ":", "dupl", ".", "append", "(", "source_id", ")", "sameid", ".", "append", "(", "source_id", ")", "if", "not", "dupl", ":", "return", "''", "msg", "=", "str", "(", "dupl", ")", "+", "'\\n'", "msg", "+=", "(", "'Found %d source(s) with the same ID and %d true duplicate(s)'", "%", "(", "len", "(", "sameid", ")", ",", "len", "(", "dupl", ")", ")", ")", "fakedupl", "=", "set", "(", "sameid", ")", "-", "set", "(", "dupl", ")", "if", "fakedupl", ":", "msg", "+=", "'\\nHere is a fake duplicate: %s'", "%", "fakedupl", ".", "pop", "(", ")", "return", "msg" ]
Show the sources with the same ID and the truly duplicated sources
[ "Show", "the", "sources", "with", "the", "same", "ID", "and", "the", "truly", "duplicated", "sources" ]
8294553a0b8aba33fd96437a35065d03547d0040
https://github.com/gem/oq-engine/blob/8294553a0b8aba33fd96437a35065d03547d0040/openquake/calculators/views.py#L838-L864
442
gem/oq-engine
openquake/calculators/views.py
view_extreme_groups
def view_extreme_groups(token, dstore): """ Show the source groups contributing the most to the highest IML """ data = dstore['disagg_by_grp'].value data.sort(order='extreme_poe') return rst_table(data[::-1])
python
def view_extreme_groups(token, dstore): """ Show the source groups contributing the most to the highest IML """ data = dstore['disagg_by_grp'].value data.sort(order='extreme_poe') return rst_table(data[::-1])
[ "def", "view_extreme_groups", "(", "token", ",", "dstore", ")", ":", "data", "=", "dstore", "[", "'disagg_by_grp'", "]", ".", "value", "data", ".", "sort", "(", "order", "=", "'extreme_poe'", ")", "return", "rst_table", "(", "data", "[", ":", ":", "-", "1", "]", ")" ]
Show the source groups contributing the most to the highest IML
[ "Show", "the", "source", "groups", "contributing", "the", "most", "to", "the", "highest", "IML" ]
8294553a0b8aba33fd96437a35065d03547d0040
https://github.com/gem/oq-engine/blob/8294553a0b8aba33fd96437a35065d03547d0040/openquake/calculators/views.py#L868-L874
443
gem/oq-engine
openquake/commonlib/oqzip.py
zip_all
def zip_all(directory): """ Zip source models and exposures recursively """ zips = [] for cwd, dirs, files in os.walk(directory): if 'ssmLT.xml' in files: zips.append(zip_source_model(os.path.join(cwd, 'ssmLT.xml'))) for f in files: if f.endswith('.xml') and 'exposure' in f.lower(): zips.append(zip_exposure(os.path.join(cwd, f))) total = sum(os.path.getsize(z) for z in zips) logging.info('Generated %s of zipped data', general.humansize(total))
python
def zip_all(directory): """ Zip source models and exposures recursively """ zips = [] for cwd, dirs, files in os.walk(directory): if 'ssmLT.xml' in files: zips.append(zip_source_model(os.path.join(cwd, 'ssmLT.xml'))) for f in files: if f.endswith('.xml') and 'exposure' in f.lower(): zips.append(zip_exposure(os.path.join(cwd, f))) total = sum(os.path.getsize(z) for z in zips) logging.info('Generated %s of zipped data', general.humansize(total))
[ "def", "zip_all", "(", "directory", ")", ":", "zips", "=", "[", "]", "for", "cwd", ",", "dirs", ",", "files", "in", "os", ".", "walk", "(", "directory", ")", ":", "if", "'ssmLT.xml'", "in", "files", ":", "zips", ".", "append", "(", "zip_source_model", "(", "os", ".", "path", ".", "join", "(", "cwd", ",", "'ssmLT.xml'", ")", ")", ")", "for", "f", "in", "files", ":", "if", "f", ".", "endswith", "(", "'.xml'", ")", "and", "'exposure'", "in", "f", ".", "lower", "(", ")", ":", "zips", ".", "append", "(", "zip_exposure", "(", "os", ".", "path", ".", "join", "(", "cwd", ",", "f", ")", ")", ")", "total", "=", "sum", "(", "os", ".", "path", ".", "getsize", "(", "z", ")", "for", "z", "in", "zips", ")", "logging", ".", "info", "(", "'Generated %s of zipped data'", ",", "general", ".", "humansize", "(", "total", ")", ")" ]
Zip source models and exposures recursively
[ "Zip", "source", "models", "and", "exposures", "recursively" ]
8294553a0b8aba33fd96437a35065d03547d0040
https://github.com/gem/oq-engine/blob/8294553a0b8aba33fd96437a35065d03547d0040/openquake/commonlib/oqzip.py#L27-L39
444
gem/oq-engine
openquake/commonlib/oqzip.py
zip_source_model
def zip_source_model(ssmLT, archive_zip='', log=logging.info): """ Zip the source model files starting from the smmLT.xml file """ basedir = os.path.dirname(ssmLT) if os.path.basename(ssmLT) != 'ssmLT.xml': orig = ssmLT ssmLT = os.path.join(basedir, 'ssmLT.xml') with open(ssmLT, 'wb') as f: f.write(open(orig, 'rb').read()) archive_zip = archive_zip or os.path.join(basedir, 'ssmLT.zip') if os.path.exists(archive_zip): sys.exit('%s exists already' % archive_zip) oq = mock.Mock(inputs={'source_model_logic_tree': ssmLT}) checksum = readinput.get_checksum32(oq) checkfile = os.path.join(os.path.dirname(ssmLT), 'CHECKSUM.txt') with open(checkfile, 'w') as f: f.write(str(checksum)) files = logictree.collect_info(ssmLT).smpaths + [ os.path.abspath(ssmLT), os.path.abspath(checkfile)] general.zipfiles(files, archive_zip, log=log, cleanup=True) return archive_zip
python
def zip_source_model(ssmLT, archive_zip='', log=logging.info): """ Zip the source model files starting from the smmLT.xml file """ basedir = os.path.dirname(ssmLT) if os.path.basename(ssmLT) != 'ssmLT.xml': orig = ssmLT ssmLT = os.path.join(basedir, 'ssmLT.xml') with open(ssmLT, 'wb') as f: f.write(open(orig, 'rb').read()) archive_zip = archive_zip or os.path.join(basedir, 'ssmLT.zip') if os.path.exists(archive_zip): sys.exit('%s exists already' % archive_zip) oq = mock.Mock(inputs={'source_model_logic_tree': ssmLT}) checksum = readinput.get_checksum32(oq) checkfile = os.path.join(os.path.dirname(ssmLT), 'CHECKSUM.txt') with open(checkfile, 'w') as f: f.write(str(checksum)) files = logictree.collect_info(ssmLT).smpaths + [ os.path.abspath(ssmLT), os.path.abspath(checkfile)] general.zipfiles(files, archive_zip, log=log, cleanup=True) return archive_zip
[ "def", "zip_source_model", "(", "ssmLT", ",", "archive_zip", "=", "''", ",", "log", "=", "logging", ".", "info", ")", ":", "basedir", "=", "os", ".", "path", ".", "dirname", "(", "ssmLT", ")", "if", "os", ".", "path", ".", "basename", "(", "ssmLT", ")", "!=", "'ssmLT.xml'", ":", "orig", "=", "ssmLT", "ssmLT", "=", "os", ".", "path", ".", "join", "(", "basedir", ",", "'ssmLT.xml'", ")", "with", "open", "(", "ssmLT", ",", "'wb'", ")", "as", "f", ":", "f", ".", "write", "(", "open", "(", "orig", ",", "'rb'", ")", ".", "read", "(", ")", ")", "archive_zip", "=", "archive_zip", "or", "os", ".", "path", ".", "join", "(", "basedir", ",", "'ssmLT.zip'", ")", "if", "os", ".", "path", ".", "exists", "(", "archive_zip", ")", ":", "sys", ".", "exit", "(", "'%s exists already'", "%", "archive_zip", ")", "oq", "=", "mock", ".", "Mock", "(", "inputs", "=", "{", "'source_model_logic_tree'", ":", "ssmLT", "}", ")", "checksum", "=", "readinput", ".", "get_checksum32", "(", "oq", ")", "checkfile", "=", "os", ".", "path", ".", "join", "(", "os", ".", "path", ".", "dirname", "(", "ssmLT", ")", ",", "'CHECKSUM.txt'", ")", "with", "open", "(", "checkfile", ",", "'w'", ")", "as", "f", ":", "f", ".", "write", "(", "str", "(", "checksum", ")", ")", "files", "=", "logictree", ".", "collect_info", "(", "ssmLT", ")", ".", "smpaths", "+", "[", "os", ".", "path", ".", "abspath", "(", "ssmLT", ")", ",", "os", ".", "path", ".", "abspath", "(", "checkfile", ")", "]", "general", ".", "zipfiles", "(", "files", ",", "archive_zip", ",", "log", "=", "log", ",", "cleanup", "=", "True", ")", "return", "archive_zip" ]
Zip the source model files starting from the smmLT.xml file
[ "Zip", "the", "source", "model", "files", "starting", "from", "the", "smmLT", ".", "xml", "file" ]
8294553a0b8aba33fd96437a35065d03547d0040
https://github.com/gem/oq-engine/blob/8294553a0b8aba33fd96437a35065d03547d0040/openquake/commonlib/oqzip.py#L42-L64
445
gem/oq-engine
openquake/commonlib/oqzip.py
zip_job
def zip_job(job_ini, archive_zip='', risk_ini='', oq=None, log=logging.info): """ Zip the given job.ini file into the given archive, together with all related files. """ if not os.path.exists(job_ini): sys.exit('%s does not exist' % job_ini) archive_zip = archive_zip or 'job.zip' if isinstance(archive_zip, str): # actually it should be path-like if not archive_zip.endswith('.zip'): sys.exit('%s does not end with .zip' % archive_zip) if os.path.exists(archive_zip): sys.exit('%s exists already' % archive_zip) # do not validate to avoid permissions error on the export_dir oq = oq or readinput.get_oqparam(job_ini, validate=False) if risk_ini: risk_ini = os.path.normpath(os.path.abspath(risk_ini)) risk_inputs = readinput.get_params([risk_ini])['inputs'] del risk_inputs['job_ini'] oq.inputs.update(risk_inputs) files = readinput.get_input_files(oq) if risk_ini: files = [risk_ini] + files return general.zipfiles(files, archive_zip, log=log)
python
def zip_job(job_ini, archive_zip='', risk_ini='', oq=None, log=logging.info): """ Zip the given job.ini file into the given archive, together with all related files. """ if not os.path.exists(job_ini): sys.exit('%s does not exist' % job_ini) archive_zip = archive_zip or 'job.zip' if isinstance(archive_zip, str): # actually it should be path-like if not archive_zip.endswith('.zip'): sys.exit('%s does not end with .zip' % archive_zip) if os.path.exists(archive_zip): sys.exit('%s exists already' % archive_zip) # do not validate to avoid permissions error on the export_dir oq = oq or readinput.get_oqparam(job_ini, validate=False) if risk_ini: risk_ini = os.path.normpath(os.path.abspath(risk_ini)) risk_inputs = readinput.get_params([risk_ini])['inputs'] del risk_inputs['job_ini'] oq.inputs.update(risk_inputs) files = readinput.get_input_files(oq) if risk_ini: files = [risk_ini] + files return general.zipfiles(files, archive_zip, log=log)
[ "def", "zip_job", "(", "job_ini", ",", "archive_zip", "=", "''", ",", "risk_ini", "=", "''", ",", "oq", "=", "None", ",", "log", "=", "logging", ".", "info", ")", ":", "if", "not", "os", ".", "path", ".", "exists", "(", "job_ini", ")", ":", "sys", ".", "exit", "(", "'%s does not exist'", "%", "job_ini", ")", "archive_zip", "=", "archive_zip", "or", "'job.zip'", "if", "isinstance", "(", "archive_zip", ",", "str", ")", ":", "# actually it should be path-like", "if", "not", "archive_zip", ".", "endswith", "(", "'.zip'", ")", ":", "sys", ".", "exit", "(", "'%s does not end with .zip'", "%", "archive_zip", ")", "if", "os", ".", "path", ".", "exists", "(", "archive_zip", ")", ":", "sys", ".", "exit", "(", "'%s exists already'", "%", "archive_zip", ")", "# do not validate to avoid permissions error on the export_dir", "oq", "=", "oq", "or", "readinput", ".", "get_oqparam", "(", "job_ini", ",", "validate", "=", "False", ")", "if", "risk_ini", ":", "risk_ini", "=", "os", ".", "path", ".", "normpath", "(", "os", ".", "path", ".", "abspath", "(", "risk_ini", ")", ")", "risk_inputs", "=", "readinput", ".", "get_params", "(", "[", "risk_ini", "]", ")", "[", "'inputs'", "]", "del", "risk_inputs", "[", "'job_ini'", "]", "oq", ".", "inputs", ".", "update", "(", "risk_inputs", ")", "files", "=", "readinput", ".", "get_input_files", "(", "oq", ")", "if", "risk_ini", ":", "files", "=", "[", "risk_ini", "]", "+", "files", "return", "general", ".", "zipfiles", "(", "files", ",", "archive_zip", ",", "log", "=", "log", ")" ]
Zip the given job.ini file into the given archive, together with all related files.
[ "Zip", "the", "given", "job", ".", "ini", "file", "into", "the", "given", "archive", "together", "with", "all", "related", "files", "." ]
8294553a0b8aba33fd96437a35065d03547d0040
https://github.com/gem/oq-engine/blob/8294553a0b8aba33fd96437a35065d03547d0040/openquake/commonlib/oqzip.py#L80-L103
446
gem/oq-engine
openquake/calculators/reportwriter.py
build_report
def build_report(job_ini, output_dir=None): """ Write a `report.csv` file with information about the calculation without running it :param job_ini: full pathname of the job.ini file :param output_dir: the directory where the report is written (default the input directory) """ calc_id = logs.init() oq = readinput.get_oqparam(job_ini) if oq.calculation_mode == 'classical': oq.calculation_mode = 'preclassical' oq.ground_motion_fields = False output_dir = output_dir or os.path.dirname(job_ini) from openquake.calculators import base # ugly calc = base.calculators(oq, calc_id) calc.save_params() # needed to save oqparam # some taken is care so that the real calculation is not run: # the goal is to extract information about the source management only calc.pre_execute() if oq.calculation_mode == 'preclassical': calc.execute() rw = ReportWriter(calc.datastore) rw.make_report() report = (os.path.join(output_dir, 'report.rst') if output_dir else calc.datastore.export_path('report.rst')) try: rw.save(report) except IOError as exc: # permission error sys.stderr.write(str(exc) + '\n') readinput.exposure = None # ugly hack return report
python
def build_report(job_ini, output_dir=None): """ Write a `report.csv` file with information about the calculation without running it :param job_ini: full pathname of the job.ini file :param output_dir: the directory where the report is written (default the input directory) """ calc_id = logs.init() oq = readinput.get_oqparam(job_ini) if oq.calculation_mode == 'classical': oq.calculation_mode = 'preclassical' oq.ground_motion_fields = False output_dir = output_dir or os.path.dirname(job_ini) from openquake.calculators import base # ugly calc = base.calculators(oq, calc_id) calc.save_params() # needed to save oqparam # some taken is care so that the real calculation is not run: # the goal is to extract information about the source management only calc.pre_execute() if oq.calculation_mode == 'preclassical': calc.execute() rw = ReportWriter(calc.datastore) rw.make_report() report = (os.path.join(output_dir, 'report.rst') if output_dir else calc.datastore.export_path('report.rst')) try: rw.save(report) except IOError as exc: # permission error sys.stderr.write(str(exc) + '\n') readinput.exposure = None # ugly hack return report
[ "def", "build_report", "(", "job_ini", ",", "output_dir", "=", "None", ")", ":", "calc_id", "=", "logs", ".", "init", "(", ")", "oq", "=", "readinput", ".", "get_oqparam", "(", "job_ini", ")", "if", "oq", ".", "calculation_mode", "==", "'classical'", ":", "oq", ".", "calculation_mode", "=", "'preclassical'", "oq", ".", "ground_motion_fields", "=", "False", "output_dir", "=", "output_dir", "or", "os", ".", "path", ".", "dirname", "(", "job_ini", ")", "from", "openquake", ".", "calculators", "import", "base", "# ugly", "calc", "=", "base", ".", "calculators", "(", "oq", ",", "calc_id", ")", "calc", ".", "save_params", "(", ")", "# needed to save oqparam", "# some taken is care so that the real calculation is not run:", "# the goal is to extract information about the source management only", "calc", ".", "pre_execute", "(", ")", "if", "oq", ".", "calculation_mode", "==", "'preclassical'", ":", "calc", ".", "execute", "(", ")", "rw", "=", "ReportWriter", "(", "calc", ".", "datastore", ")", "rw", ".", "make_report", "(", ")", "report", "=", "(", "os", ".", "path", ".", "join", "(", "output_dir", ",", "'report.rst'", ")", "if", "output_dir", "else", "calc", ".", "datastore", ".", "export_path", "(", "'report.rst'", ")", ")", "try", ":", "rw", ".", "save", "(", "report", ")", "except", "IOError", "as", "exc", ":", "# permission error", "sys", ".", "stderr", ".", "write", "(", "str", "(", "exc", ")", "+", "'\\n'", ")", "readinput", ".", "exposure", "=", "None", "# ugly hack", "return", "report" ]
Write a `report.csv` file with information about the calculation without running it :param job_ini: full pathname of the job.ini file :param output_dir: the directory where the report is written (default the input directory)
[ "Write", "a", "report", ".", "csv", "file", "with", "information", "about", "the", "calculation", "without", "running", "it" ]
8294553a0b8aba33fd96437a35065d03547d0040
https://github.com/gem/oq-engine/blob/8294553a0b8aba33fd96437a35065d03547d0040/openquake/calculators/reportwriter.py#L125-L159
447
gem/oq-engine
openquake/calculators/reportwriter.py
ReportWriter.add
def add(self, name, obj=None): """Add the view named `name` to the report text""" if obj: text = '\n::\n\n' + indent(str(obj)) else: text = views.view(name, self.dstore) if text: title = self.title[name] line = '-' * len(title) self.text += '\n'.join(['\n\n' + title, line, text])
python
def add(self, name, obj=None): """Add the view named `name` to the report text""" if obj: text = '\n::\n\n' + indent(str(obj)) else: text = views.view(name, self.dstore) if text: title = self.title[name] line = '-' * len(title) self.text += '\n'.join(['\n\n' + title, line, text])
[ "def", "add", "(", "self", ",", "name", ",", "obj", "=", "None", ")", ":", "if", "obj", ":", "text", "=", "'\\n::\\n\\n'", "+", "indent", "(", "str", "(", "obj", ")", ")", "else", ":", "text", "=", "views", ".", "view", "(", "name", ",", "self", ".", "dstore", ")", "if", "text", ":", "title", "=", "self", ".", "title", "[", "name", "]", "line", "=", "'-'", "*", "len", "(", "title", ")", "self", ".", "text", "+=", "'\\n'", ".", "join", "(", "[", "'\\n\\n'", "+", "title", ",", "line", ",", "text", "]", ")" ]
Add the view named `name` to the report text
[ "Add", "the", "view", "named", "name", "to", "the", "report", "text" ]
8294553a0b8aba33fd96437a35065d03547d0040
https://github.com/gem/oq-engine/blob/8294553a0b8aba33fd96437a35065d03547d0040/openquake/calculators/reportwriter.py#L74-L83
448
gem/oq-engine
openquake/calculators/reportwriter.py
ReportWriter.make_report
def make_report(self): """Build the report and return a restructed text string""" oq, ds = self.oq, self.dstore for name in ('params', 'inputs'): self.add(name) if 'csm_info' in ds: self.add('csm_info') if ds['csm_info'].source_models[0].name != 'scenario': # required_params_per_trt makes no sense for GMFs from file self.add('required_params_per_trt') self.add('rlzs_assoc', ds['csm_info'].get_rlzs_assoc()) if 'csm_info' in ds: self.add('ruptures_per_trt') if 'rup_data' in ds: self.add('ruptures_events') if oq.calculation_mode in ('event_based_risk',): self.add('avglosses_data_transfer') if 'exposure' in oq.inputs: self.add('exposure_info') if 'source_info' in ds: self.add('slow_sources') self.add('times_by_source_class') self.add('dupl_sources') if 'task_info' in ds: self.add('task_info') tasks = set(ds['task_info']) if 'classical' in tasks: self.add('task_hazard:0') self.add('task_hazard:-1') self.add('job_info') if 'performance_data' in ds: self.add('performance') return self.text
python
def make_report(self): """Build the report and return a restructed text string""" oq, ds = self.oq, self.dstore for name in ('params', 'inputs'): self.add(name) if 'csm_info' in ds: self.add('csm_info') if ds['csm_info'].source_models[0].name != 'scenario': # required_params_per_trt makes no sense for GMFs from file self.add('required_params_per_trt') self.add('rlzs_assoc', ds['csm_info'].get_rlzs_assoc()) if 'csm_info' in ds: self.add('ruptures_per_trt') if 'rup_data' in ds: self.add('ruptures_events') if oq.calculation_mode in ('event_based_risk',): self.add('avglosses_data_transfer') if 'exposure' in oq.inputs: self.add('exposure_info') if 'source_info' in ds: self.add('slow_sources') self.add('times_by_source_class') self.add('dupl_sources') if 'task_info' in ds: self.add('task_info') tasks = set(ds['task_info']) if 'classical' in tasks: self.add('task_hazard:0') self.add('task_hazard:-1') self.add('job_info') if 'performance_data' in ds: self.add('performance') return self.text
[ "def", "make_report", "(", "self", ")", ":", "oq", ",", "ds", "=", "self", ".", "oq", ",", "self", ".", "dstore", "for", "name", "in", "(", "'params'", ",", "'inputs'", ")", ":", "self", ".", "add", "(", "name", ")", "if", "'csm_info'", "in", "ds", ":", "self", ".", "add", "(", "'csm_info'", ")", "if", "ds", "[", "'csm_info'", "]", ".", "source_models", "[", "0", "]", ".", "name", "!=", "'scenario'", ":", "# required_params_per_trt makes no sense for GMFs from file", "self", ".", "add", "(", "'required_params_per_trt'", ")", "self", ".", "add", "(", "'rlzs_assoc'", ",", "ds", "[", "'csm_info'", "]", ".", "get_rlzs_assoc", "(", ")", ")", "if", "'csm_info'", "in", "ds", ":", "self", ".", "add", "(", "'ruptures_per_trt'", ")", "if", "'rup_data'", "in", "ds", ":", "self", ".", "add", "(", "'ruptures_events'", ")", "if", "oq", ".", "calculation_mode", "in", "(", "'event_based_risk'", ",", ")", ":", "self", ".", "add", "(", "'avglosses_data_transfer'", ")", "if", "'exposure'", "in", "oq", ".", "inputs", ":", "self", ".", "add", "(", "'exposure_info'", ")", "if", "'source_info'", "in", "ds", ":", "self", ".", "add", "(", "'slow_sources'", ")", "self", ".", "add", "(", "'times_by_source_class'", ")", "self", ".", "add", "(", "'dupl_sources'", ")", "if", "'task_info'", "in", "ds", ":", "self", ".", "add", "(", "'task_info'", ")", "tasks", "=", "set", "(", "ds", "[", "'task_info'", "]", ")", "if", "'classical'", "in", "tasks", ":", "self", ".", "add", "(", "'task_hazard:0'", ")", "self", ".", "add", "(", "'task_hazard:-1'", ")", "self", ".", "add", "(", "'job_info'", ")", "if", "'performance_data'", "in", "ds", ":", "self", ".", "add", "(", "'performance'", ")", "return", "self", ".", "text" ]
Build the report and return a restructed text string
[ "Build", "the", "report", "and", "return", "a", "restructed", "text", "string" ]
8294553a0b8aba33fd96437a35065d03547d0040
https://github.com/gem/oq-engine/blob/8294553a0b8aba33fd96437a35065d03547d0040/openquake/calculators/reportwriter.py#L85-L117
449
gem/oq-engine
openquake/calculators/reportwriter.py
ReportWriter.save
def save(self, fname): """Save the report""" with open(fname, 'wb') as f: f.write(encode(self.text))
python
def save(self, fname): """Save the report""" with open(fname, 'wb') as f: f.write(encode(self.text))
[ "def", "save", "(", "self", ",", "fname", ")", ":", "with", "open", "(", "fname", ",", "'wb'", ")", "as", "f", ":", "f", ".", "write", "(", "encode", "(", "self", ".", "text", ")", ")" ]
Save the report
[ "Save", "the", "report" ]
8294553a0b8aba33fd96437a35065d03547d0040
https://github.com/gem/oq-engine/blob/8294553a0b8aba33fd96437a35065d03547d0040/openquake/calculators/reportwriter.py#L119-L122
450
gem/oq-engine
openquake/hazardlib/gsim/frankel_1996.py
FrankelEtAl1996MblgAB1987NSHMP2008._compute_mean
def _compute_mean(self, imt, mag, rhypo): """ Compute mean value from lookup table. Lookup table defines log10(IMT) (in g) for combinations of Mw and log10(rhypo) values. ``mag`` is therefore converted from Mblg to Mw using Atkinson and Boore 1987 conversion equation. Mean value is finally converted from base 10 to base e. """ mag = np.zeros_like(rhypo) + self._convert_magnitude(mag) # to avoid run time warning in case rhypo is zero set minimum distance # to 10, which is anyhow the minimum distance allowed by the tables rhypo[rhypo < 10] = 10 rhypo = np.log10(rhypo) # create lookup table and interpolate it at magnitude/distance values table = RectBivariateSpline( self.MAGS, self.DISTS, self.IMTS_TABLES[imt].T ) mean = table.ev(mag, rhypo) # convert mean from base 10 to base e return mean * np.log(10)
python
def _compute_mean(self, imt, mag, rhypo): """ Compute mean value from lookup table. Lookup table defines log10(IMT) (in g) for combinations of Mw and log10(rhypo) values. ``mag`` is therefore converted from Mblg to Mw using Atkinson and Boore 1987 conversion equation. Mean value is finally converted from base 10 to base e. """ mag = np.zeros_like(rhypo) + self._convert_magnitude(mag) # to avoid run time warning in case rhypo is zero set minimum distance # to 10, which is anyhow the minimum distance allowed by the tables rhypo[rhypo < 10] = 10 rhypo = np.log10(rhypo) # create lookup table and interpolate it at magnitude/distance values table = RectBivariateSpline( self.MAGS, self.DISTS, self.IMTS_TABLES[imt].T ) mean = table.ev(mag, rhypo) # convert mean from base 10 to base e return mean * np.log(10)
[ "def", "_compute_mean", "(", "self", ",", "imt", ",", "mag", ",", "rhypo", ")", ":", "mag", "=", "np", ".", "zeros_like", "(", "rhypo", ")", "+", "self", ".", "_convert_magnitude", "(", "mag", ")", "# to avoid run time warning in case rhypo is zero set minimum distance", "# to 10, which is anyhow the minimum distance allowed by the tables", "rhypo", "[", "rhypo", "<", "10", "]", "=", "10", "rhypo", "=", "np", ".", "log10", "(", "rhypo", ")", "# create lookup table and interpolate it at magnitude/distance values", "table", "=", "RectBivariateSpline", "(", "self", ".", "MAGS", ",", "self", ".", "DISTS", ",", "self", ".", "IMTS_TABLES", "[", "imt", "]", ".", "T", ")", "mean", "=", "table", ".", "ev", "(", "mag", ",", "rhypo", ")", "# convert mean from base 10 to base e", "return", "mean", "*", "np", ".", "log", "(", "10", ")" ]
Compute mean value from lookup table. Lookup table defines log10(IMT) (in g) for combinations of Mw and log10(rhypo) values. ``mag`` is therefore converted from Mblg to Mw using Atkinson and Boore 1987 conversion equation. Mean value is finally converted from base 10 to base e.
[ "Compute", "mean", "value", "from", "lookup", "table", "." ]
8294553a0b8aba33fd96437a35065d03547d0040
https://github.com/gem/oq-engine/blob/8294553a0b8aba33fd96437a35065d03547d0040/openquake/hazardlib/gsim/frankel_1996.py#L129-L152
451
gem/oq-engine
openquake/hmtk/plotting/seismicity/occurrence/recurrence_plot.py
_get_recurrence_model
def _get_recurrence_model(input_model): """ Returns the annual and cumulative recurrence rates predicted by the recurrence model """ if not isinstance(input_model, (TruncatedGRMFD, EvenlyDiscretizedMFD, YoungsCoppersmith1985MFD)): raise ValueError('Recurrence model not recognised') # Get model annual occurrence rates annual_rates = input_model.get_annual_occurrence_rates() annual_rates = np.array([[val[0], val[1]] for val in annual_rates]) # Get cumulative rates cumulative_rates = np.array([np.sum(annual_rates[iloc:, 1]) for iloc in range(0, len(annual_rates), 1)]) return annual_rates, cumulative_rates
python
def _get_recurrence_model(input_model): """ Returns the annual and cumulative recurrence rates predicted by the recurrence model """ if not isinstance(input_model, (TruncatedGRMFD, EvenlyDiscretizedMFD, YoungsCoppersmith1985MFD)): raise ValueError('Recurrence model not recognised') # Get model annual occurrence rates annual_rates = input_model.get_annual_occurrence_rates() annual_rates = np.array([[val[0], val[1]] for val in annual_rates]) # Get cumulative rates cumulative_rates = np.array([np.sum(annual_rates[iloc:, 1]) for iloc in range(0, len(annual_rates), 1)]) return annual_rates, cumulative_rates
[ "def", "_get_recurrence_model", "(", "input_model", ")", ":", "if", "not", "isinstance", "(", "input_model", ",", "(", "TruncatedGRMFD", ",", "EvenlyDiscretizedMFD", ",", "YoungsCoppersmith1985MFD", ")", ")", ":", "raise", "ValueError", "(", "'Recurrence model not recognised'", ")", "# Get model annual occurrence rates", "annual_rates", "=", "input_model", ".", "get_annual_occurrence_rates", "(", ")", "annual_rates", "=", "np", ".", "array", "(", "[", "[", "val", "[", "0", "]", ",", "val", "[", "1", "]", "]", "for", "val", "in", "annual_rates", "]", ")", "# Get cumulative rates", "cumulative_rates", "=", "np", ".", "array", "(", "[", "np", ".", "sum", "(", "annual_rates", "[", "iloc", ":", ",", "1", "]", ")", "for", "iloc", "in", "range", "(", "0", ",", "len", "(", "annual_rates", ")", ",", "1", ")", "]", ")", "return", "annual_rates", ",", "cumulative_rates" ]
Returns the annual and cumulative recurrence rates predicted by the recurrence model
[ "Returns", "the", "annual", "and", "cumulative", "recurrence", "rates", "predicted", "by", "the", "recurrence", "model" ]
8294553a0b8aba33fd96437a35065d03547d0040
https://github.com/gem/oq-engine/blob/8294553a0b8aba33fd96437a35065d03547d0040/openquake/hmtk/plotting/seismicity/occurrence/recurrence_plot.py#L62-L77
452
gem/oq-engine
openquake/hmtk/plotting/seismicity/occurrence/recurrence_plot.py
_check_completeness_table
def _check_completeness_table(completeness, catalogue): """ Generates the completeness table according to different instances """ if isinstance(completeness, np.ndarray) and np.shape(completeness)[1] == 2: return completeness elif isinstance(completeness, float): return np.array([[float(np.min(catalogue.data['year'])), completeness]]) elif completeness is None: return np.array([[float(np.min(catalogue.data['year'])), np.min(catalogue.data['magnitude'])]]) else: raise ValueError('Completeness representation not recognised')
python
def _check_completeness_table(completeness, catalogue): """ Generates the completeness table according to different instances """ if isinstance(completeness, np.ndarray) and np.shape(completeness)[1] == 2: return completeness elif isinstance(completeness, float): return np.array([[float(np.min(catalogue.data['year'])), completeness]]) elif completeness is None: return np.array([[float(np.min(catalogue.data['year'])), np.min(catalogue.data['magnitude'])]]) else: raise ValueError('Completeness representation not recognised')
[ "def", "_check_completeness_table", "(", "completeness", ",", "catalogue", ")", ":", "if", "isinstance", "(", "completeness", ",", "np", ".", "ndarray", ")", "and", "np", ".", "shape", "(", "completeness", ")", "[", "1", "]", "==", "2", ":", "return", "completeness", "elif", "isinstance", "(", "completeness", ",", "float", ")", ":", "return", "np", ".", "array", "(", "[", "[", "float", "(", "np", ".", "min", "(", "catalogue", ".", "data", "[", "'year'", "]", ")", ")", ",", "completeness", "]", "]", ")", "elif", "completeness", "is", "None", ":", "return", "np", ".", "array", "(", "[", "[", "float", "(", "np", ".", "min", "(", "catalogue", ".", "data", "[", "'year'", "]", ")", ")", ",", "np", ".", "min", "(", "catalogue", ".", "data", "[", "'magnitude'", "]", ")", "]", "]", ")", "else", ":", "raise", "ValueError", "(", "'Completeness representation not recognised'", ")" ]
Generates the completeness table according to different instances
[ "Generates", "the", "completeness", "table", "according", "to", "different", "instances" ]
8294553a0b8aba33fd96437a35065d03547d0040
https://github.com/gem/oq-engine/blob/8294553a0b8aba33fd96437a35065d03547d0040/openquake/hmtk/plotting/seismicity/occurrence/recurrence_plot.py#L80-L93
453
gem/oq-engine
openquake/hmtk/plotting/seismicity/occurrence/recurrence_plot.py
plot_recurrence_model
def plot_recurrence_model( input_model, catalogue, completeness, dmag=0.1, filename=None, figure_size=(8, 6), filetype='png', dpi=300, ax=None): """ Plot a calculated recurrence model over an observed catalogue, adjusted for time-varying completeness """ annual_rates, cumulative_rates = _get_recurrence_model(input_model) # Get observed annual recurrence if not catalogue.end_year: catalogue.update_end_year() cent_mag, t_per, n_obs = get_completeness_counts(catalogue, completeness, dmag) obs_rates = n_obs / t_per cum_obs_rates = np.array([np.sum(obs_rates[i:]) for i in range(len(obs_rates))]) if ax is None: fig, ax = plt.subplots(figsize=figure_size) else: fig = ax.get_figure() ax.semilogy(cent_mag, obs_rates, 'bo') ax.semilogy(annual_rates[:, 0], annual_rates[:, 1], 'b-') ax.semilogy(cent_mag, cum_obs_rates, 'rs') ax.semilogy(annual_rates[:, 0], cumulative_rates, 'r-') ax.grid(which='both') ax.set_xlabel('Magnitude') ax.set_ylabel('Annual Rate') ax.legend(['Observed Incremental Rate', 'Model Incremental Rate', 'Observed Cumulative Rate', 'Model Cumulative Rate']) ax.tick_params(labelsize=12) _save_image(fig, filename, filetype, dpi)
python
def plot_recurrence_model( input_model, catalogue, completeness, dmag=0.1, filename=None, figure_size=(8, 6), filetype='png', dpi=300, ax=None): """ Plot a calculated recurrence model over an observed catalogue, adjusted for time-varying completeness """ annual_rates, cumulative_rates = _get_recurrence_model(input_model) # Get observed annual recurrence if not catalogue.end_year: catalogue.update_end_year() cent_mag, t_per, n_obs = get_completeness_counts(catalogue, completeness, dmag) obs_rates = n_obs / t_per cum_obs_rates = np.array([np.sum(obs_rates[i:]) for i in range(len(obs_rates))]) if ax is None: fig, ax = plt.subplots(figsize=figure_size) else: fig = ax.get_figure() ax.semilogy(cent_mag, obs_rates, 'bo') ax.semilogy(annual_rates[:, 0], annual_rates[:, 1], 'b-') ax.semilogy(cent_mag, cum_obs_rates, 'rs') ax.semilogy(annual_rates[:, 0], cumulative_rates, 'r-') ax.grid(which='both') ax.set_xlabel('Magnitude') ax.set_ylabel('Annual Rate') ax.legend(['Observed Incremental Rate', 'Model Incremental Rate', 'Observed Cumulative Rate', 'Model Cumulative Rate']) ax.tick_params(labelsize=12) _save_image(fig, filename, filetype, dpi)
[ "def", "plot_recurrence_model", "(", "input_model", ",", "catalogue", ",", "completeness", ",", "dmag", "=", "0.1", ",", "filename", "=", "None", ",", "figure_size", "=", "(", "8", ",", "6", ")", ",", "filetype", "=", "'png'", ",", "dpi", "=", "300", ",", "ax", "=", "None", ")", ":", "annual_rates", ",", "cumulative_rates", "=", "_get_recurrence_model", "(", "input_model", ")", "# Get observed annual recurrence", "if", "not", "catalogue", ".", "end_year", ":", "catalogue", ".", "update_end_year", "(", ")", "cent_mag", ",", "t_per", ",", "n_obs", "=", "get_completeness_counts", "(", "catalogue", ",", "completeness", ",", "dmag", ")", "obs_rates", "=", "n_obs", "/", "t_per", "cum_obs_rates", "=", "np", ".", "array", "(", "[", "np", ".", "sum", "(", "obs_rates", "[", "i", ":", "]", ")", "for", "i", "in", "range", "(", "len", "(", "obs_rates", ")", ")", "]", ")", "if", "ax", "is", "None", ":", "fig", ",", "ax", "=", "plt", ".", "subplots", "(", "figsize", "=", "figure_size", ")", "else", ":", "fig", "=", "ax", ".", "get_figure", "(", ")", "ax", ".", "semilogy", "(", "cent_mag", ",", "obs_rates", ",", "'bo'", ")", "ax", ".", "semilogy", "(", "annual_rates", "[", ":", ",", "0", "]", ",", "annual_rates", "[", ":", ",", "1", "]", ",", "'b-'", ")", "ax", ".", "semilogy", "(", "cent_mag", ",", "cum_obs_rates", ",", "'rs'", ")", "ax", ".", "semilogy", "(", "annual_rates", "[", ":", ",", "0", "]", ",", "cumulative_rates", ",", "'r-'", ")", "ax", ".", "grid", "(", "which", "=", "'both'", ")", "ax", ".", "set_xlabel", "(", "'Magnitude'", ")", "ax", ".", "set_ylabel", "(", "'Annual Rate'", ")", "ax", ".", "legend", "(", "[", "'Observed Incremental Rate'", ",", "'Model Incremental Rate'", ",", "'Observed Cumulative Rate'", ",", "'Model Cumulative Rate'", "]", ")", "ax", ".", "tick_params", "(", "labelsize", "=", "12", ")", "_save_image", "(", "fig", ",", "filename", ",", "filetype", ",", "dpi", ")" ]
Plot a calculated recurrence model over an observed catalogue, adjusted for time-varying completeness
[ "Plot", "a", "calculated", "recurrence", "model", "over", "an", "observed", "catalogue", "adjusted", "for", "time", "-", "varying", "completeness" ]
8294553a0b8aba33fd96437a35065d03547d0040
https://github.com/gem/oq-engine/blob/8294553a0b8aba33fd96437a35065d03547d0040/openquake/hmtk/plotting/seismicity/occurrence/recurrence_plot.py#L96-L132
454
gem/oq-engine
openquake/hmtk/plotting/seismicity/occurrence/recurrence_plot.py
plot_trunc_gr_model
def plot_trunc_gr_model( aval, bval, min_mag, max_mag, dmag, catalogue=None, completeness=None, filename=None, figure_size=(8, 6), filetype='png', dpi=300, ax=None): """ Plots a Gutenberg-Richter model """ input_model = TruncatedGRMFD(min_mag, max_mag, dmag, aval, bval) if not catalogue: # Plot only the modelled recurrence annual_rates, cumulative_rates = _get_recurrence_model(input_model) if ax is None: fig, ax = plt.subplots(figsize=figure_size) else: fig = ax.get_figure() ax.semilogy(annual_rates[:, 0], annual_rates[:, 1], 'b-') ax.semilogy(annual_rates[:, 0], cumulative_rates, 'r-') ax.xlabel('Magnitude') ax.set_ylabel('Annual Rate') ax.set_legend(['Incremental Rate', 'Cumulative Rate']) _save_image(fig, filename, filetype, dpi) else: completeness = _check_completeness_table(completeness, catalogue) plot_recurrence_model( input_model, catalogue, completeness, dmag, filename=filename, figure_size=figure_size, filetype=filetype, dpi=dpi, ax=ax)
python
def plot_trunc_gr_model( aval, bval, min_mag, max_mag, dmag, catalogue=None, completeness=None, filename=None, figure_size=(8, 6), filetype='png', dpi=300, ax=None): """ Plots a Gutenberg-Richter model """ input_model = TruncatedGRMFD(min_mag, max_mag, dmag, aval, bval) if not catalogue: # Plot only the modelled recurrence annual_rates, cumulative_rates = _get_recurrence_model(input_model) if ax is None: fig, ax = plt.subplots(figsize=figure_size) else: fig = ax.get_figure() ax.semilogy(annual_rates[:, 0], annual_rates[:, 1], 'b-') ax.semilogy(annual_rates[:, 0], cumulative_rates, 'r-') ax.xlabel('Magnitude') ax.set_ylabel('Annual Rate') ax.set_legend(['Incremental Rate', 'Cumulative Rate']) _save_image(fig, filename, filetype, dpi) else: completeness = _check_completeness_table(completeness, catalogue) plot_recurrence_model( input_model, catalogue, completeness, dmag, filename=filename, figure_size=figure_size, filetype=filetype, dpi=dpi, ax=ax)
[ "def", "plot_trunc_gr_model", "(", "aval", ",", "bval", ",", "min_mag", ",", "max_mag", ",", "dmag", ",", "catalogue", "=", "None", ",", "completeness", "=", "None", ",", "filename", "=", "None", ",", "figure_size", "=", "(", "8", ",", "6", ")", ",", "filetype", "=", "'png'", ",", "dpi", "=", "300", ",", "ax", "=", "None", ")", ":", "input_model", "=", "TruncatedGRMFD", "(", "min_mag", ",", "max_mag", ",", "dmag", ",", "aval", ",", "bval", ")", "if", "not", "catalogue", ":", "# Plot only the modelled recurrence", "annual_rates", ",", "cumulative_rates", "=", "_get_recurrence_model", "(", "input_model", ")", "if", "ax", "is", "None", ":", "fig", ",", "ax", "=", "plt", ".", "subplots", "(", "figsize", "=", "figure_size", ")", "else", ":", "fig", "=", "ax", ".", "get_figure", "(", ")", "ax", ".", "semilogy", "(", "annual_rates", "[", ":", ",", "0", "]", ",", "annual_rates", "[", ":", ",", "1", "]", ",", "'b-'", ")", "ax", ".", "semilogy", "(", "annual_rates", "[", ":", ",", "0", "]", ",", "cumulative_rates", ",", "'r-'", ")", "ax", ".", "xlabel", "(", "'Magnitude'", ")", "ax", ".", "set_ylabel", "(", "'Annual Rate'", ")", "ax", ".", "set_legend", "(", "[", "'Incremental Rate'", ",", "'Cumulative Rate'", "]", ")", "_save_image", "(", "fig", ",", "filename", ",", "filetype", ",", "dpi", ")", "else", ":", "completeness", "=", "_check_completeness_table", "(", "completeness", ",", "catalogue", ")", "plot_recurrence_model", "(", "input_model", ",", "catalogue", ",", "completeness", ",", "dmag", ",", "filename", "=", "filename", ",", "figure_size", "=", "figure_size", ",", "filetype", "=", "filetype", ",", "dpi", "=", "dpi", ",", "ax", "=", "ax", ")" ]
Plots a Gutenberg-Richter model
[ "Plots", "a", "Gutenberg", "-", "Richter", "model" ]
8294553a0b8aba33fd96437a35065d03547d0040
https://github.com/gem/oq-engine/blob/8294553a0b8aba33fd96437a35065d03547d0040/openquake/hmtk/plotting/seismicity/occurrence/recurrence_plot.py#L135-L163
455
gem/oq-engine
openquake/hazardlib/nrml.py
read
def read(source, chatty=True, stop=None): """ Convert a NRML file into a validated Node object. Keeps the entire tree in memory. :param source: a file name or file object open for reading """ vparser = ValidatingXmlParser(validators, stop) nrml = vparser.parse_file(source) if striptag(nrml.tag) != 'nrml': raise ValueError('%s: expected a node of kind nrml, got %s' % (source, nrml.tag)) # extract the XML namespace URL ('http://openquake.org/xmlns/nrml/0.5') xmlns = nrml.tag.split('}')[0][1:] if xmlns != NRML05 and chatty: # for the moment NRML04 is still supported, so we hide the warning logging.debug('%s is at an outdated version: %s', source, xmlns) nrml['xmlns'] = xmlns nrml['xmlns:gml'] = GML_NAMESPACE return nrml
python
def read(source, chatty=True, stop=None): """ Convert a NRML file into a validated Node object. Keeps the entire tree in memory. :param source: a file name or file object open for reading """ vparser = ValidatingXmlParser(validators, stop) nrml = vparser.parse_file(source) if striptag(nrml.tag) != 'nrml': raise ValueError('%s: expected a node of kind nrml, got %s' % (source, nrml.tag)) # extract the XML namespace URL ('http://openquake.org/xmlns/nrml/0.5') xmlns = nrml.tag.split('}')[0][1:] if xmlns != NRML05 and chatty: # for the moment NRML04 is still supported, so we hide the warning logging.debug('%s is at an outdated version: %s', source, xmlns) nrml['xmlns'] = xmlns nrml['xmlns:gml'] = GML_NAMESPACE return nrml
[ "def", "read", "(", "source", ",", "chatty", "=", "True", ",", "stop", "=", "None", ")", ":", "vparser", "=", "ValidatingXmlParser", "(", "validators", ",", "stop", ")", "nrml", "=", "vparser", ".", "parse_file", "(", "source", ")", "if", "striptag", "(", "nrml", ".", "tag", ")", "!=", "'nrml'", ":", "raise", "ValueError", "(", "'%s: expected a node of kind nrml, got %s'", "%", "(", "source", ",", "nrml", ".", "tag", ")", ")", "# extract the XML namespace URL ('http://openquake.org/xmlns/nrml/0.5')", "xmlns", "=", "nrml", ".", "tag", ".", "split", "(", "'}'", ")", "[", "0", "]", "[", "1", ":", "]", "if", "xmlns", "!=", "NRML05", "and", "chatty", ":", "# for the moment NRML04 is still supported, so we hide the warning", "logging", ".", "debug", "(", "'%s is at an outdated version: %s'", ",", "source", ",", "xmlns", ")", "nrml", "[", "'xmlns'", "]", "=", "xmlns", "nrml", "[", "'xmlns:gml'", "]", "=", "GML_NAMESPACE", "return", "nrml" ]
Convert a NRML file into a validated Node object. Keeps the entire tree in memory. :param source: a file name or file object open for reading
[ "Convert", "a", "NRML", "file", "into", "a", "validated", "Node", "object", ".", "Keeps", "the", "entire", "tree", "in", "memory", "." ]
8294553a0b8aba33fd96437a35065d03547d0040
https://github.com/gem/oq-engine/blob/8294553a0b8aba33fd96437a35065d03547d0040/openquake/hazardlib/nrml.py#L329-L349
456
gem/oq-engine
openquake/hazardlib/nrml.py
write
def write(nodes, output=sys.stdout, fmt='%.7E', gml=True, xmlns=None): """ Convert nodes into a NRML file. output must be a file object open in write mode. If you want to perform a consistency check, open it in read-write mode, then it will be read after creation and validated. :params nodes: an iterable over Node objects :params output: a file-like object in write or read-write mode :param fmt: format used for writing the floats (default '%.7E') :param gml: add the http://www.opengis.net/gml namespace :param xmlns: NRML namespace like http://openquake.org/xmlns/nrml/0.4 """ root = Node('nrml', nodes=nodes) namespaces = {xmlns or NRML05: ''} if gml: namespaces[GML_NAMESPACE] = 'gml:' with floatformat(fmt): node_to_xml(root, output, namespaces) if hasattr(output, 'mode') and '+' in output.mode: # read-write mode output.seek(0) read(output)
python
def write(nodes, output=sys.stdout, fmt='%.7E', gml=True, xmlns=None): """ Convert nodes into a NRML file. output must be a file object open in write mode. If you want to perform a consistency check, open it in read-write mode, then it will be read after creation and validated. :params nodes: an iterable over Node objects :params output: a file-like object in write or read-write mode :param fmt: format used for writing the floats (default '%.7E') :param gml: add the http://www.opengis.net/gml namespace :param xmlns: NRML namespace like http://openquake.org/xmlns/nrml/0.4 """ root = Node('nrml', nodes=nodes) namespaces = {xmlns or NRML05: ''} if gml: namespaces[GML_NAMESPACE] = 'gml:' with floatformat(fmt): node_to_xml(root, output, namespaces) if hasattr(output, 'mode') and '+' in output.mode: # read-write mode output.seek(0) read(output)
[ "def", "write", "(", "nodes", ",", "output", "=", "sys", ".", "stdout", ",", "fmt", "=", "'%.7E'", ",", "gml", "=", "True", ",", "xmlns", "=", "None", ")", ":", "root", "=", "Node", "(", "'nrml'", ",", "nodes", "=", "nodes", ")", "namespaces", "=", "{", "xmlns", "or", "NRML05", ":", "''", "}", "if", "gml", ":", "namespaces", "[", "GML_NAMESPACE", "]", "=", "'gml:'", "with", "floatformat", "(", "fmt", ")", ":", "node_to_xml", "(", "root", ",", "output", ",", "namespaces", ")", "if", "hasattr", "(", "output", ",", "'mode'", ")", "and", "'+'", "in", "output", ".", "mode", ":", "# read-write mode", "output", ".", "seek", "(", "0", ")", "read", "(", "output", ")" ]
Convert nodes into a NRML file. output must be a file object open in write mode. If you want to perform a consistency check, open it in read-write mode, then it will be read after creation and validated. :params nodes: an iterable over Node objects :params output: a file-like object in write or read-write mode :param fmt: format used for writing the floats (default '%.7E') :param gml: add the http://www.opengis.net/gml namespace :param xmlns: NRML namespace like http://openquake.org/xmlns/nrml/0.4
[ "Convert", "nodes", "into", "a", "NRML", "file", ".", "output", "must", "be", "a", "file", "object", "open", "in", "write", "mode", ".", "If", "you", "want", "to", "perform", "a", "consistency", "check", "open", "it", "in", "read", "-", "write", "mode", "then", "it", "will", "be", "read", "after", "creation", "and", "validated", "." ]
8294553a0b8aba33fd96437a35065d03547d0040
https://github.com/gem/oq-engine/blob/8294553a0b8aba33fd96437a35065d03547d0040/openquake/hazardlib/nrml.py#L352-L373
457
gem/oq-engine
openquake/hazardlib/nrml.py
to_string
def to_string(node): """ Convert a node into a string in NRML format """ with io.BytesIO() as f: write([node], f) return f.getvalue().decode('utf-8')
python
def to_string(node): """ Convert a node into a string in NRML format """ with io.BytesIO() as f: write([node], f) return f.getvalue().decode('utf-8')
[ "def", "to_string", "(", "node", ")", ":", "with", "io", ".", "BytesIO", "(", ")", "as", "f", ":", "write", "(", "[", "node", "]", ",", "f", ")", "return", "f", ".", "getvalue", "(", ")", ".", "decode", "(", "'utf-8'", ")" ]
Convert a node into a string in NRML format
[ "Convert", "a", "node", "into", "a", "string", "in", "NRML", "format" ]
8294553a0b8aba33fd96437a35065d03547d0040
https://github.com/gem/oq-engine/blob/8294553a0b8aba33fd96437a35065d03547d0040/openquake/hazardlib/nrml.py#L376-L382
458
gem/oq-engine
openquake/hazardlib/gsim/campbell_bozorgnia_2014.py
CampbellBozorgnia2014.get_mean_values
def get_mean_values(self, C, sites, rup, dists, a1100): """ Returns the mean values for a specific IMT """ if isinstance(a1100, np.ndarray): # Site model defined temp_vs30 = sites.vs30 temp_z2pt5 = sites.z2pt5 else: # Default site and basin model temp_vs30 = 1100.0 * np.ones(len(sites.vs30)) temp_z2pt5 = self._select_basin_model(1100.0) *\ np.ones_like(temp_vs30) return (self._get_magnitude_term(C, rup.mag) + self._get_geometric_attenuation_term(C, rup.mag, dists.rrup) + self._get_style_of_faulting_term(C, rup) + self._get_hanging_wall_term(C, rup, dists) + self._get_shallow_site_response_term(C, temp_vs30, a1100) + self._get_basin_response_term(C, temp_z2pt5) + self._get_hypocentral_depth_term(C, rup) + self._get_fault_dip_term(C, rup) + self._get_anelastic_attenuation_term(C, dists.rrup))
python
def get_mean_values(self, C, sites, rup, dists, a1100): """ Returns the mean values for a specific IMT """ if isinstance(a1100, np.ndarray): # Site model defined temp_vs30 = sites.vs30 temp_z2pt5 = sites.z2pt5 else: # Default site and basin model temp_vs30 = 1100.0 * np.ones(len(sites.vs30)) temp_z2pt5 = self._select_basin_model(1100.0) *\ np.ones_like(temp_vs30) return (self._get_magnitude_term(C, rup.mag) + self._get_geometric_attenuation_term(C, rup.mag, dists.rrup) + self._get_style_of_faulting_term(C, rup) + self._get_hanging_wall_term(C, rup, dists) + self._get_shallow_site_response_term(C, temp_vs30, a1100) + self._get_basin_response_term(C, temp_z2pt5) + self._get_hypocentral_depth_term(C, rup) + self._get_fault_dip_term(C, rup) + self._get_anelastic_attenuation_term(C, dists.rrup))
[ "def", "get_mean_values", "(", "self", ",", "C", ",", "sites", ",", "rup", ",", "dists", ",", "a1100", ")", ":", "if", "isinstance", "(", "a1100", ",", "np", ".", "ndarray", ")", ":", "# Site model defined", "temp_vs30", "=", "sites", ".", "vs30", "temp_z2pt5", "=", "sites", ".", "z2pt5", "else", ":", "# Default site and basin model", "temp_vs30", "=", "1100.0", "*", "np", ".", "ones", "(", "len", "(", "sites", ".", "vs30", ")", ")", "temp_z2pt5", "=", "self", ".", "_select_basin_model", "(", "1100.0", ")", "*", "np", ".", "ones_like", "(", "temp_vs30", ")", "return", "(", "self", ".", "_get_magnitude_term", "(", "C", ",", "rup", ".", "mag", ")", "+", "self", ".", "_get_geometric_attenuation_term", "(", "C", ",", "rup", ".", "mag", ",", "dists", ".", "rrup", ")", "+", "self", ".", "_get_style_of_faulting_term", "(", "C", ",", "rup", ")", "+", "self", ".", "_get_hanging_wall_term", "(", "C", ",", "rup", ",", "dists", ")", "+", "self", ".", "_get_shallow_site_response_term", "(", "C", ",", "temp_vs30", ",", "a1100", ")", "+", "self", ".", "_get_basin_response_term", "(", "C", ",", "temp_z2pt5", ")", "+", "self", ".", "_get_hypocentral_depth_term", "(", "C", ",", "rup", ")", "+", "self", ".", "_get_fault_dip_term", "(", "C", ",", "rup", ")", "+", "self", ".", "_get_anelastic_attenuation_term", "(", "C", ",", "dists", ".", "rrup", ")", ")" ]
Returns the mean values for a specific IMT
[ "Returns", "the", "mean", "values", "for", "a", "specific", "IMT" ]
8294553a0b8aba33fd96437a35065d03547d0040
https://github.com/gem/oq-engine/blob/8294553a0b8aba33fd96437a35065d03547d0040/openquake/hazardlib/gsim/campbell_bozorgnia_2014.py#L122-L144
459
gem/oq-engine
openquake/hazardlib/gsim/campbell_bozorgnia_2014.py
CampbellBozorgnia2014._get_magnitude_term
def _get_magnitude_term(self, C, mag): """ Returns the magnitude scaling term defined in equation 2 """ f_mag = C["c0"] + C["c1"] * mag if (mag > 4.5) and (mag <= 5.5): return f_mag + (C["c2"] * (mag - 4.5)) elif (mag > 5.5) and (mag <= 6.5): return f_mag + (C["c2"] * (mag - 4.5)) + (C["c3"] * (mag - 5.5)) elif mag > 6.5: return f_mag + (C["c2"] * (mag - 4.5)) + (C["c3"] * (mag - 5.5)) +\ (C["c4"] * (mag - 6.5)) else: return f_mag
python
def _get_magnitude_term(self, C, mag): """ Returns the magnitude scaling term defined in equation 2 """ f_mag = C["c0"] + C["c1"] * mag if (mag > 4.5) and (mag <= 5.5): return f_mag + (C["c2"] * (mag - 4.5)) elif (mag > 5.5) and (mag <= 6.5): return f_mag + (C["c2"] * (mag - 4.5)) + (C["c3"] * (mag - 5.5)) elif mag > 6.5: return f_mag + (C["c2"] * (mag - 4.5)) + (C["c3"] * (mag - 5.5)) +\ (C["c4"] * (mag - 6.5)) else: return f_mag
[ "def", "_get_magnitude_term", "(", "self", ",", "C", ",", "mag", ")", ":", "f_mag", "=", "C", "[", "\"c0\"", "]", "+", "C", "[", "\"c1\"", "]", "*", "mag", "if", "(", "mag", ">", "4.5", ")", "and", "(", "mag", "<=", "5.5", ")", ":", "return", "f_mag", "+", "(", "C", "[", "\"c2\"", "]", "*", "(", "mag", "-", "4.5", ")", ")", "elif", "(", "mag", ">", "5.5", ")", "and", "(", "mag", "<=", "6.5", ")", ":", "return", "f_mag", "+", "(", "C", "[", "\"c2\"", "]", "*", "(", "mag", "-", "4.5", ")", ")", "+", "(", "C", "[", "\"c3\"", "]", "*", "(", "mag", "-", "5.5", ")", ")", "elif", "mag", ">", "6.5", ":", "return", "f_mag", "+", "(", "C", "[", "\"c2\"", "]", "*", "(", "mag", "-", "4.5", ")", ")", "+", "(", "C", "[", "\"c3\"", "]", "*", "(", "mag", "-", "5.5", ")", ")", "+", "(", "C", "[", "\"c4\"", "]", "*", "(", "mag", "-", "6.5", ")", ")", "else", ":", "return", "f_mag" ]
Returns the magnitude scaling term defined in equation 2
[ "Returns", "the", "magnitude", "scaling", "term", "defined", "in", "equation", "2" ]
8294553a0b8aba33fd96437a35065d03547d0040
https://github.com/gem/oq-engine/blob/8294553a0b8aba33fd96437a35065d03547d0040/openquake/hazardlib/gsim/campbell_bozorgnia_2014.py#L146-L159
460
gem/oq-engine
openquake/hazardlib/gsim/campbell_bozorgnia_2014.py
CampbellBozorgnia2014._get_geometric_attenuation_term
def _get_geometric_attenuation_term(self, C, mag, rrup): """ Returns the geometric attenuation term defined in equation 3 """ return (C["c5"] + C["c6"] * mag) * np.log(np.sqrt((rrup ** 2.) + (C["c7"] ** 2.)))
python
def _get_geometric_attenuation_term(self, C, mag, rrup): """ Returns the geometric attenuation term defined in equation 3 """ return (C["c5"] + C["c6"] * mag) * np.log(np.sqrt((rrup ** 2.) + (C["c7"] ** 2.)))
[ "def", "_get_geometric_attenuation_term", "(", "self", ",", "C", ",", "mag", ",", "rrup", ")", ":", "return", "(", "C", "[", "\"c5\"", "]", "+", "C", "[", "\"c6\"", "]", "*", "mag", ")", "*", "np", ".", "log", "(", "np", ".", "sqrt", "(", "(", "rrup", "**", "2.", ")", "+", "(", "C", "[", "\"c7\"", "]", "**", "2.", ")", ")", ")" ]
Returns the geometric attenuation term defined in equation 3
[ "Returns", "the", "geometric", "attenuation", "term", "defined", "in", "equation", "3" ]
8294553a0b8aba33fd96437a35065d03547d0040
https://github.com/gem/oq-engine/blob/8294553a0b8aba33fd96437a35065d03547d0040/openquake/hazardlib/gsim/campbell_bozorgnia_2014.py#L161-L166
461
gem/oq-engine
openquake/hazardlib/gsim/campbell_bozorgnia_2014.py
CampbellBozorgnia2014._get_style_of_faulting_term
def _get_style_of_faulting_term(self, C, rup): """ Returns the style-of-faulting scaling term defined in equations 4 to 6 """ if (rup.rake > 30.0) and (rup.rake < 150.): frv = 1.0 fnm = 0.0 elif (rup.rake > -150.0) and (rup.rake < -30.0): fnm = 1.0 frv = 0.0 else: fnm = 0.0 frv = 0.0 fflt_f = (self.CONSTS["c8"] * frv) + (C["c9"] * fnm) if rup.mag <= 4.5: fflt_m = 0.0 elif rup.mag > 5.5: fflt_m = 1.0 else: fflt_m = rup.mag - 4.5 return fflt_f * fflt_m
python
def _get_style_of_faulting_term(self, C, rup): """ Returns the style-of-faulting scaling term defined in equations 4 to 6 """ if (rup.rake > 30.0) and (rup.rake < 150.): frv = 1.0 fnm = 0.0 elif (rup.rake > -150.0) and (rup.rake < -30.0): fnm = 1.0 frv = 0.0 else: fnm = 0.0 frv = 0.0 fflt_f = (self.CONSTS["c8"] * frv) + (C["c9"] * fnm) if rup.mag <= 4.5: fflt_m = 0.0 elif rup.mag > 5.5: fflt_m = 1.0 else: fflt_m = rup.mag - 4.5 return fflt_f * fflt_m
[ "def", "_get_style_of_faulting_term", "(", "self", ",", "C", ",", "rup", ")", ":", "if", "(", "rup", ".", "rake", ">", "30.0", ")", "and", "(", "rup", ".", "rake", "<", "150.", ")", ":", "frv", "=", "1.0", "fnm", "=", "0.0", "elif", "(", "rup", ".", "rake", ">", "-", "150.0", ")", "and", "(", "rup", ".", "rake", "<", "-", "30.0", ")", ":", "fnm", "=", "1.0", "frv", "=", "0.0", "else", ":", "fnm", "=", "0.0", "frv", "=", "0.0", "fflt_f", "=", "(", "self", ".", "CONSTS", "[", "\"c8\"", "]", "*", "frv", ")", "+", "(", "C", "[", "\"c9\"", "]", "*", "fnm", ")", "if", "rup", ".", "mag", "<=", "4.5", ":", "fflt_m", "=", "0.0", "elif", "rup", ".", "mag", ">", "5.5", ":", "fflt_m", "=", "1.0", "else", ":", "fflt_m", "=", "rup", ".", "mag", "-", "4.5", "return", "fflt_f", "*", "fflt_m" ]
Returns the style-of-faulting scaling term defined in equations 4 to 6
[ "Returns", "the", "style", "-", "of", "-", "faulting", "scaling", "term", "defined", "in", "equations", "4", "to", "6" ]
8294553a0b8aba33fd96437a35065d03547d0040
https://github.com/gem/oq-engine/blob/8294553a0b8aba33fd96437a35065d03547d0040/openquake/hazardlib/gsim/campbell_bozorgnia_2014.py#L168-L189
462
gem/oq-engine
openquake/hazardlib/gsim/campbell_bozorgnia_2014.py
CampbellBozorgnia2014._get_hanging_wall_term
def _get_hanging_wall_term(self, C, rup, dists): """ Returns the hanging wall scaling term defined in equations 7 to 16 """ return (C["c10"] * self._get_hanging_wall_coeffs_rx(C, rup, dists.rx) * self._get_hanging_wall_coeffs_rrup(dists) * self._get_hanging_wall_coeffs_mag(C, rup.mag) * self._get_hanging_wall_coeffs_ztor(rup.ztor) * self._get_hanging_wall_coeffs_dip(rup.dip))
python
def _get_hanging_wall_term(self, C, rup, dists): """ Returns the hanging wall scaling term defined in equations 7 to 16 """ return (C["c10"] * self._get_hanging_wall_coeffs_rx(C, rup, dists.rx) * self._get_hanging_wall_coeffs_rrup(dists) * self._get_hanging_wall_coeffs_mag(C, rup.mag) * self._get_hanging_wall_coeffs_ztor(rup.ztor) * self._get_hanging_wall_coeffs_dip(rup.dip))
[ "def", "_get_hanging_wall_term", "(", "self", ",", "C", ",", "rup", ",", "dists", ")", ":", "return", "(", "C", "[", "\"c10\"", "]", "*", "self", ".", "_get_hanging_wall_coeffs_rx", "(", "C", ",", "rup", ",", "dists", ".", "rx", ")", "*", "self", ".", "_get_hanging_wall_coeffs_rrup", "(", "dists", ")", "*", "self", ".", "_get_hanging_wall_coeffs_mag", "(", "C", ",", "rup", ".", "mag", ")", "*", "self", ".", "_get_hanging_wall_coeffs_ztor", "(", "rup", ".", "ztor", ")", "*", "self", ".", "_get_hanging_wall_coeffs_dip", "(", "rup", ".", "dip", ")", ")" ]
Returns the hanging wall scaling term defined in equations 7 to 16
[ "Returns", "the", "hanging", "wall", "scaling", "term", "defined", "in", "equations", "7", "to", "16" ]
8294553a0b8aba33fd96437a35065d03547d0040
https://github.com/gem/oq-engine/blob/8294553a0b8aba33fd96437a35065d03547d0040/openquake/hazardlib/gsim/campbell_bozorgnia_2014.py#L191-L200
463
gem/oq-engine
openquake/hazardlib/gsim/campbell_bozorgnia_2014.py
CampbellBozorgnia2014._get_hanging_wall_coeffs_rx
def _get_hanging_wall_coeffs_rx(self, C, rup, r_x): """ Returns the hanging wall r-x caling term defined in equation 7 to 12 """ # Define coefficients R1 and R2 r_1 = rup.width * cos(radians(rup.dip)) r_2 = 62.0 * rup.mag - 350.0 fhngrx = np.zeros(len(r_x)) # Case when 0 <= Rx <= R1 idx = np.logical_and(r_x >= 0., r_x < r_1) fhngrx[idx] = self._get_f1rx(C, r_x[idx], r_1) # Case when Rx > R1 idx = r_x >= r_1 f2rx = self._get_f2rx(C, r_x[idx], r_1, r_2) f2rx[f2rx < 0.0] = 0.0 fhngrx[idx] = f2rx return fhngrx
python
def _get_hanging_wall_coeffs_rx(self, C, rup, r_x): """ Returns the hanging wall r-x caling term defined in equation 7 to 12 """ # Define coefficients R1 and R2 r_1 = rup.width * cos(radians(rup.dip)) r_2 = 62.0 * rup.mag - 350.0 fhngrx = np.zeros(len(r_x)) # Case when 0 <= Rx <= R1 idx = np.logical_and(r_x >= 0., r_x < r_1) fhngrx[idx] = self._get_f1rx(C, r_x[idx], r_1) # Case when Rx > R1 idx = r_x >= r_1 f2rx = self._get_f2rx(C, r_x[idx], r_1, r_2) f2rx[f2rx < 0.0] = 0.0 fhngrx[idx] = f2rx return fhngrx
[ "def", "_get_hanging_wall_coeffs_rx", "(", "self", ",", "C", ",", "rup", ",", "r_x", ")", ":", "# Define coefficients R1 and R2", "r_1", "=", "rup", ".", "width", "*", "cos", "(", "radians", "(", "rup", ".", "dip", ")", ")", "r_2", "=", "62.0", "*", "rup", ".", "mag", "-", "350.0", "fhngrx", "=", "np", ".", "zeros", "(", "len", "(", "r_x", ")", ")", "# Case when 0 <= Rx <= R1", "idx", "=", "np", ".", "logical_and", "(", "r_x", ">=", "0.", ",", "r_x", "<", "r_1", ")", "fhngrx", "[", "idx", "]", "=", "self", ".", "_get_f1rx", "(", "C", ",", "r_x", "[", "idx", "]", ",", "r_1", ")", "# Case when Rx > R1", "idx", "=", "r_x", ">=", "r_1", "f2rx", "=", "self", ".", "_get_f2rx", "(", "C", ",", "r_x", "[", "idx", "]", ",", "r_1", ",", "r_2", ")", "f2rx", "[", "f2rx", "<", "0.0", "]", "=", "0.0", "fhngrx", "[", "idx", "]", "=", "f2rx", "return", "fhngrx" ]
Returns the hanging wall r-x caling term defined in equation 7 to 12
[ "Returns", "the", "hanging", "wall", "r", "-", "x", "caling", "term", "defined", "in", "equation", "7", "to", "12" ]
8294553a0b8aba33fd96437a35065d03547d0040
https://github.com/gem/oq-engine/blob/8294553a0b8aba33fd96437a35065d03547d0040/openquake/hazardlib/gsim/campbell_bozorgnia_2014.py#L202-L218
464
gem/oq-engine
openquake/hazardlib/gsim/campbell_bozorgnia_2014.py
CampbellBozorgnia2014._get_f1rx
def _get_f1rx(self, C, r_x, r_1): """ Defines the f1 scaling coefficient defined in equation 9 """ rxr1 = r_x / r_1 return C["h1"] + (C["h2"] * rxr1) + (C["h3"] * (rxr1 ** 2.))
python
def _get_f1rx(self, C, r_x, r_1): """ Defines the f1 scaling coefficient defined in equation 9 """ rxr1 = r_x / r_1 return C["h1"] + (C["h2"] * rxr1) + (C["h3"] * (rxr1 ** 2.))
[ "def", "_get_f1rx", "(", "self", ",", "C", ",", "r_x", ",", "r_1", ")", ":", "rxr1", "=", "r_x", "/", "r_1", "return", "C", "[", "\"h1\"", "]", "+", "(", "C", "[", "\"h2\"", "]", "*", "rxr1", ")", "+", "(", "C", "[", "\"h3\"", "]", "*", "(", "rxr1", "**", "2.", ")", ")" ]
Defines the f1 scaling coefficient defined in equation 9
[ "Defines", "the", "f1", "scaling", "coefficient", "defined", "in", "equation", "9" ]
8294553a0b8aba33fd96437a35065d03547d0040
https://github.com/gem/oq-engine/blob/8294553a0b8aba33fd96437a35065d03547d0040/openquake/hazardlib/gsim/campbell_bozorgnia_2014.py#L220-L225
465
gem/oq-engine
openquake/hazardlib/gsim/campbell_bozorgnia_2014.py
CampbellBozorgnia2014._get_f2rx
def _get_f2rx(self, C, r_x, r_1, r_2): """ Defines the f2 scaling coefficient defined in equation 10 """ drx = (r_x - r_1) / (r_2 - r_1) return self.CONSTS["h4"] + (C["h5"] * drx) + (C["h6"] * (drx ** 2.))
python
def _get_f2rx(self, C, r_x, r_1, r_2): """ Defines the f2 scaling coefficient defined in equation 10 """ drx = (r_x - r_1) / (r_2 - r_1) return self.CONSTS["h4"] + (C["h5"] * drx) + (C["h6"] * (drx ** 2.))
[ "def", "_get_f2rx", "(", "self", ",", "C", ",", "r_x", ",", "r_1", ",", "r_2", ")", ":", "drx", "=", "(", "r_x", "-", "r_1", ")", "/", "(", "r_2", "-", "r_1", ")", "return", "self", ".", "CONSTS", "[", "\"h4\"", "]", "+", "(", "C", "[", "\"h5\"", "]", "*", "drx", ")", "+", "(", "C", "[", "\"h6\"", "]", "*", "(", "drx", "**", "2.", ")", ")" ]
Defines the f2 scaling coefficient defined in equation 10
[ "Defines", "the", "f2", "scaling", "coefficient", "defined", "in", "equation", "10" ]
8294553a0b8aba33fd96437a35065d03547d0040
https://github.com/gem/oq-engine/blob/8294553a0b8aba33fd96437a35065d03547d0040/openquake/hazardlib/gsim/campbell_bozorgnia_2014.py#L227-L232
466
gem/oq-engine
openquake/hazardlib/gsim/campbell_bozorgnia_2014.py
CampbellBozorgnia2014._get_hanging_wall_coeffs_rrup
def _get_hanging_wall_coeffs_rrup(self, dists): """ Returns the hanging wall rrup term defined in equation 13 """ fhngrrup = np.ones(len(dists.rrup)) idx = dists.rrup > 0.0 fhngrrup[idx] = (dists.rrup[idx] - dists.rjb[idx]) / dists.rrup[idx] return fhngrrup
python
def _get_hanging_wall_coeffs_rrup(self, dists): """ Returns the hanging wall rrup term defined in equation 13 """ fhngrrup = np.ones(len(dists.rrup)) idx = dists.rrup > 0.0 fhngrrup[idx] = (dists.rrup[idx] - dists.rjb[idx]) / dists.rrup[idx] return fhngrrup
[ "def", "_get_hanging_wall_coeffs_rrup", "(", "self", ",", "dists", ")", ":", "fhngrrup", "=", "np", ".", "ones", "(", "len", "(", "dists", ".", "rrup", ")", ")", "idx", "=", "dists", ".", "rrup", ">", "0.0", "fhngrrup", "[", "idx", "]", "=", "(", "dists", ".", "rrup", "[", "idx", "]", "-", "dists", ".", "rjb", "[", "idx", "]", ")", "/", "dists", ".", "rrup", "[", "idx", "]", "return", "fhngrrup" ]
Returns the hanging wall rrup term defined in equation 13
[ "Returns", "the", "hanging", "wall", "rrup", "term", "defined", "in", "equation", "13" ]
8294553a0b8aba33fd96437a35065d03547d0040
https://github.com/gem/oq-engine/blob/8294553a0b8aba33fd96437a35065d03547d0040/openquake/hazardlib/gsim/campbell_bozorgnia_2014.py#L234-L241
467
gem/oq-engine
openquake/hazardlib/gsim/campbell_bozorgnia_2014.py
CampbellBozorgnia2014._get_hanging_wall_coeffs_mag
def _get_hanging_wall_coeffs_mag(self, C, mag): """ Returns the hanging wall magnitude term defined in equation 14 """ if mag < 5.5: return 0.0 elif mag > 6.5: return 1.0 + C["a2"] * (mag - 6.5) else: return (mag - 5.5) * (1.0 + C["a2"] * (mag - 6.5))
python
def _get_hanging_wall_coeffs_mag(self, C, mag): """ Returns the hanging wall magnitude term defined in equation 14 """ if mag < 5.5: return 0.0 elif mag > 6.5: return 1.0 + C["a2"] * (mag - 6.5) else: return (mag - 5.5) * (1.0 + C["a2"] * (mag - 6.5))
[ "def", "_get_hanging_wall_coeffs_mag", "(", "self", ",", "C", ",", "mag", ")", ":", "if", "mag", "<", "5.5", ":", "return", "0.0", "elif", "mag", ">", "6.5", ":", "return", "1.0", "+", "C", "[", "\"a2\"", "]", "*", "(", "mag", "-", "6.5", ")", "else", ":", "return", "(", "mag", "-", "5.5", ")", "*", "(", "1.0", "+", "C", "[", "\"a2\"", "]", "*", "(", "mag", "-", "6.5", ")", ")" ]
Returns the hanging wall magnitude term defined in equation 14
[ "Returns", "the", "hanging", "wall", "magnitude", "term", "defined", "in", "equation", "14" ]
8294553a0b8aba33fd96437a35065d03547d0040
https://github.com/gem/oq-engine/blob/8294553a0b8aba33fd96437a35065d03547d0040/openquake/hazardlib/gsim/campbell_bozorgnia_2014.py#L243-L252
468
gem/oq-engine
openquake/hazardlib/gsim/campbell_bozorgnia_2014.py
CampbellBozorgnia2014._get_hypocentral_depth_term
def _get_hypocentral_depth_term(self, C, rup): """ Returns the hypocentral depth scaling term defined in equations 21 - 23 """ if rup.hypo_depth <= 7.0: fhyp_h = 0.0 elif rup.hypo_depth > 20.0: fhyp_h = 13.0 else: fhyp_h = rup.hypo_depth - 7.0 if rup.mag <= 5.5: fhyp_m = C["c17"] elif rup.mag > 6.5: fhyp_m = C["c18"] else: fhyp_m = C["c17"] + ((C["c18"] - C["c17"]) * (rup.mag - 5.5)) return fhyp_h * fhyp_m
python
def _get_hypocentral_depth_term(self, C, rup): """ Returns the hypocentral depth scaling term defined in equations 21 - 23 """ if rup.hypo_depth <= 7.0: fhyp_h = 0.0 elif rup.hypo_depth > 20.0: fhyp_h = 13.0 else: fhyp_h = rup.hypo_depth - 7.0 if rup.mag <= 5.5: fhyp_m = C["c17"] elif rup.mag > 6.5: fhyp_m = C["c18"] else: fhyp_m = C["c17"] + ((C["c18"] - C["c17"]) * (rup.mag - 5.5)) return fhyp_h * fhyp_m
[ "def", "_get_hypocentral_depth_term", "(", "self", ",", "C", ",", "rup", ")", ":", "if", "rup", ".", "hypo_depth", "<=", "7.0", ":", "fhyp_h", "=", "0.0", "elif", "rup", ".", "hypo_depth", ">", "20.0", ":", "fhyp_h", "=", "13.0", "else", ":", "fhyp_h", "=", "rup", ".", "hypo_depth", "-", "7.0", "if", "rup", ".", "mag", "<=", "5.5", ":", "fhyp_m", "=", "C", "[", "\"c17\"", "]", "elif", "rup", ".", "mag", ">", "6.5", ":", "fhyp_m", "=", "C", "[", "\"c18\"", "]", "else", ":", "fhyp_m", "=", "C", "[", "\"c17\"", "]", "+", "(", "(", "C", "[", "\"c18\"", "]", "-", "C", "[", "\"c17\"", "]", ")", "*", "(", "rup", ".", "mag", "-", "5.5", ")", ")", "return", "fhyp_h", "*", "fhyp_m" ]
Returns the hypocentral depth scaling term defined in equations 21 - 23
[ "Returns", "the", "hypocentral", "depth", "scaling", "term", "defined", "in", "equations", "21", "-", "23" ]
8294553a0b8aba33fd96437a35065d03547d0040
https://github.com/gem/oq-engine/blob/8294553a0b8aba33fd96437a35065d03547d0040/openquake/hazardlib/gsim/campbell_bozorgnia_2014.py#L269-L286
469
gem/oq-engine
openquake/hazardlib/gsim/campbell_bozorgnia_2014.py
CampbellBozorgnia2014._get_fault_dip_term
def _get_fault_dip_term(self, C, rup): """ Returns the fault dip term, defined in equation 24 """ if rup.mag < 4.5: return C["c19"] * rup.dip elif rup.mag > 5.5: return 0.0 else: return C["c19"] * (5.5 - rup.mag) * rup.dip
python
def _get_fault_dip_term(self, C, rup): """ Returns the fault dip term, defined in equation 24 """ if rup.mag < 4.5: return C["c19"] * rup.dip elif rup.mag > 5.5: return 0.0 else: return C["c19"] * (5.5 - rup.mag) * rup.dip
[ "def", "_get_fault_dip_term", "(", "self", ",", "C", ",", "rup", ")", ":", "if", "rup", ".", "mag", "<", "4.5", ":", "return", "C", "[", "\"c19\"", "]", "*", "rup", ".", "dip", "elif", "rup", ".", "mag", ">", "5.5", ":", "return", "0.0", "else", ":", "return", "C", "[", "\"c19\"", "]", "*", "(", "5.5", "-", "rup", ".", "mag", ")", "*", "rup", ".", "dip" ]
Returns the fault dip term, defined in equation 24
[ "Returns", "the", "fault", "dip", "term", "defined", "in", "equation", "24" ]
8294553a0b8aba33fd96437a35065d03547d0040
https://github.com/gem/oq-engine/blob/8294553a0b8aba33fd96437a35065d03547d0040/openquake/hazardlib/gsim/campbell_bozorgnia_2014.py#L288-L297
470
gem/oq-engine
openquake/hazardlib/gsim/campbell_bozorgnia_2014.py
CampbellBozorgnia2014._get_anelastic_attenuation_term
def _get_anelastic_attenuation_term(self, C, rrup): """ Returns the anelastic attenuation term defined in equation 25 """ f_atn = np.zeros(len(rrup)) idx = rrup >= 80.0 f_atn[idx] = (C["c20"] + C["Dc20"]) * (rrup[idx] - 80.0) return f_atn
python
def _get_anelastic_attenuation_term(self, C, rrup): """ Returns the anelastic attenuation term defined in equation 25 """ f_atn = np.zeros(len(rrup)) idx = rrup >= 80.0 f_atn[idx] = (C["c20"] + C["Dc20"]) * (rrup[idx] - 80.0) return f_atn
[ "def", "_get_anelastic_attenuation_term", "(", "self", ",", "C", ",", "rrup", ")", ":", "f_atn", "=", "np", ".", "zeros", "(", "len", "(", "rrup", ")", ")", "idx", "=", "rrup", ">=", "80.0", "f_atn", "[", "idx", "]", "=", "(", "C", "[", "\"c20\"", "]", "+", "C", "[", "\"Dc20\"", "]", ")", "*", "(", "rrup", "[", "idx", "]", "-", "80.0", ")", "return", "f_atn" ]
Returns the anelastic attenuation term defined in equation 25
[ "Returns", "the", "anelastic", "attenuation", "term", "defined", "in", "equation", "25" ]
8294553a0b8aba33fd96437a35065d03547d0040
https://github.com/gem/oq-engine/blob/8294553a0b8aba33fd96437a35065d03547d0040/openquake/hazardlib/gsim/campbell_bozorgnia_2014.py#L299-L306
471
gem/oq-engine
openquake/hazardlib/gsim/campbell_bozorgnia_2014.py
CampbellBozorgnia2014._get_basin_response_term
def _get_basin_response_term(self, C, z2pt5): """ Returns the basin response term defined in equation 20 """ f_sed = np.zeros(len(z2pt5)) idx = z2pt5 < 1.0 f_sed[idx] = (C["c14"] + C["c15"] * float(self.CONSTS["SJ"])) *\ (z2pt5[idx] - 1.0) idx = z2pt5 > 3.0 f_sed[idx] = C["c16"] * C["k3"] * exp(-0.75) *\ (1.0 - np.exp(-0.25 * (z2pt5[idx] - 3.0))) return f_sed
python
def _get_basin_response_term(self, C, z2pt5): """ Returns the basin response term defined in equation 20 """ f_sed = np.zeros(len(z2pt5)) idx = z2pt5 < 1.0 f_sed[idx] = (C["c14"] + C["c15"] * float(self.CONSTS["SJ"])) *\ (z2pt5[idx] - 1.0) idx = z2pt5 > 3.0 f_sed[idx] = C["c16"] * C["k3"] * exp(-0.75) *\ (1.0 - np.exp(-0.25 * (z2pt5[idx] - 3.0))) return f_sed
[ "def", "_get_basin_response_term", "(", "self", ",", "C", ",", "z2pt5", ")", ":", "f_sed", "=", "np", ".", "zeros", "(", "len", "(", "z2pt5", ")", ")", "idx", "=", "z2pt5", "<", "1.0", "f_sed", "[", "idx", "]", "=", "(", "C", "[", "\"c14\"", "]", "+", "C", "[", "\"c15\"", "]", "*", "float", "(", "self", ".", "CONSTS", "[", "\"SJ\"", "]", ")", ")", "*", "(", "z2pt5", "[", "idx", "]", "-", "1.0", ")", "idx", "=", "z2pt5", ">", "3.0", "f_sed", "[", "idx", "]", "=", "C", "[", "\"c16\"", "]", "*", "C", "[", "\"k3\"", "]", "*", "exp", "(", "-", "0.75", ")", "*", "(", "1.0", "-", "np", ".", "exp", "(", "-", "0.25", "*", "(", "z2pt5", "[", "idx", "]", "-", "3.0", ")", ")", ")", "return", "f_sed" ]
Returns the basin response term defined in equation 20
[ "Returns", "the", "basin", "response", "term", "defined", "in", "equation", "20" ]
8294553a0b8aba33fd96437a35065d03547d0040
https://github.com/gem/oq-engine/blob/8294553a0b8aba33fd96437a35065d03547d0040/openquake/hazardlib/gsim/campbell_bozorgnia_2014.py#L321-L332
472
gem/oq-engine
openquake/hazardlib/gsim/campbell_bozorgnia_2014.py
CampbellBozorgnia2014._get_shallow_site_response_term
def _get_shallow_site_response_term(self, C, vs30, pga_rock): """ Returns the shallow site response term defined in equations 17, 18 and 19 """ vs_mod = vs30 / C["k1"] # Get linear global site response term f_site_g = C["c11"] * np.log(vs_mod) idx = vs30 > C["k1"] f_site_g[idx] = f_site_g[idx] + (C["k2"] * self.CONSTS["n"] * np.log(vs_mod[idx])) # Get nonlinear site response term idx = np.logical_not(idx) if np.any(idx): f_site_g[idx] = f_site_g[idx] + C["k2"] * ( np.log(pga_rock[idx] + self.CONSTS["c"] * (vs_mod[idx] ** self.CONSTS["n"])) - np.log(pga_rock[idx] + self.CONSTS["c"]) ) # For Japan sites (SJ = 1) further scaling is needed (equation 19) if self.CONSTS["SJ"]: fsite_j = np.log(vs_mod) idx = vs30 > 200.0 if np.any(idx): fsite_j[idx] = (C["c13"] + C["k2"] * self.CONSTS["n"]) *\ fsite_j[idx] idx = np.logical_not(idx) if np.any(idx): fsite_j[idx] = (C["c12"] + C["k2"] * self.CONSTS["n"]) *\ (fsite_j[idx] - np.log(200.0 / C["k1"])) return f_site_g + fsite_j else: return f_site_g
python
def _get_shallow_site_response_term(self, C, vs30, pga_rock): """ Returns the shallow site response term defined in equations 17, 18 and 19 """ vs_mod = vs30 / C["k1"] # Get linear global site response term f_site_g = C["c11"] * np.log(vs_mod) idx = vs30 > C["k1"] f_site_g[idx] = f_site_g[idx] + (C["k2"] * self.CONSTS["n"] * np.log(vs_mod[idx])) # Get nonlinear site response term idx = np.logical_not(idx) if np.any(idx): f_site_g[idx] = f_site_g[idx] + C["k2"] * ( np.log(pga_rock[idx] + self.CONSTS["c"] * (vs_mod[idx] ** self.CONSTS["n"])) - np.log(pga_rock[idx] + self.CONSTS["c"]) ) # For Japan sites (SJ = 1) further scaling is needed (equation 19) if self.CONSTS["SJ"]: fsite_j = np.log(vs_mod) idx = vs30 > 200.0 if np.any(idx): fsite_j[idx] = (C["c13"] + C["k2"] * self.CONSTS["n"]) *\ fsite_j[idx] idx = np.logical_not(idx) if np.any(idx): fsite_j[idx] = (C["c12"] + C["k2"] * self.CONSTS["n"]) *\ (fsite_j[idx] - np.log(200.0 / C["k1"])) return f_site_g + fsite_j else: return f_site_g
[ "def", "_get_shallow_site_response_term", "(", "self", ",", "C", ",", "vs30", ",", "pga_rock", ")", ":", "vs_mod", "=", "vs30", "/", "C", "[", "\"k1\"", "]", "# Get linear global site response term", "f_site_g", "=", "C", "[", "\"c11\"", "]", "*", "np", ".", "log", "(", "vs_mod", ")", "idx", "=", "vs30", ">", "C", "[", "\"k1\"", "]", "f_site_g", "[", "idx", "]", "=", "f_site_g", "[", "idx", "]", "+", "(", "C", "[", "\"k2\"", "]", "*", "self", ".", "CONSTS", "[", "\"n\"", "]", "*", "np", ".", "log", "(", "vs_mod", "[", "idx", "]", ")", ")", "# Get nonlinear site response term", "idx", "=", "np", ".", "logical_not", "(", "idx", ")", "if", "np", ".", "any", "(", "idx", ")", ":", "f_site_g", "[", "idx", "]", "=", "f_site_g", "[", "idx", "]", "+", "C", "[", "\"k2\"", "]", "*", "(", "np", ".", "log", "(", "pga_rock", "[", "idx", "]", "+", "self", ".", "CONSTS", "[", "\"c\"", "]", "*", "(", "vs_mod", "[", "idx", "]", "**", "self", ".", "CONSTS", "[", "\"n\"", "]", ")", ")", "-", "np", ".", "log", "(", "pga_rock", "[", "idx", "]", "+", "self", ".", "CONSTS", "[", "\"c\"", "]", ")", ")", "# For Japan sites (SJ = 1) further scaling is needed (equation 19)", "if", "self", ".", "CONSTS", "[", "\"SJ\"", "]", ":", "fsite_j", "=", "np", ".", "log", "(", "vs_mod", ")", "idx", "=", "vs30", ">", "200.0", "if", "np", ".", "any", "(", "idx", ")", ":", "fsite_j", "[", "idx", "]", "=", "(", "C", "[", "\"c13\"", "]", "+", "C", "[", "\"k2\"", "]", "*", "self", ".", "CONSTS", "[", "\"n\"", "]", ")", "*", "fsite_j", "[", "idx", "]", "idx", "=", "np", ".", "logical_not", "(", "idx", ")", "if", "np", ".", "any", "(", "idx", ")", ":", "fsite_j", "[", "idx", "]", "=", "(", "C", "[", "\"c12\"", "]", "+", "C", "[", "\"k2\"", "]", "*", "self", ".", "CONSTS", "[", "\"n\"", "]", ")", "*", "(", "fsite_j", "[", "idx", "]", "-", "np", ".", "log", "(", "200.0", "/", "C", "[", "\"k1\"", "]", ")", ")", "return", "f_site_g", "+", "fsite_j", "else", ":", "return", "f_site_g" ]
Returns the shallow site response term defined in equations 17, 18 and 19
[ "Returns", "the", "shallow", "site", "response", "term", "defined", "in", "equations", "17", "18", "and", "19" ]
8294553a0b8aba33fd96437a35065d03547d0040
https://github.com/gem/oq-engine/blob/8294553a0b8aba33fd96437a35065d03547d0040/openquake/hazardlib/gsim/campbell_bozorgnia_2014.py#L334-L369
473
gem/oq-engine
openquake/hazardlib/gsim/campbell_bozorgnia_2014.py
CampbellBozorgnia2014._get_stddevs
def _get_stddevs(self, C, C_PGA, rup, sites, pga1100, stddev_types): """ Returns the inter- and intra-event and total standard deviations """ # Get stddevs for PGA on basement rock tau_lnpga_b, phi_lnpga_b = self._get_stddevs_pga(C_PGA, rup) num_sites = len(sites.vs30) # Get tau_lny on the basement rock tau_lnyb = self._get_taulny(C, rup.mag) # Get phi_lny on the basement rock phi_lnyb = np.sqrt(self._get_philny(C, rup.mag) ** 2. - self.CONSTS["philnAF"] ** 2.) # Get site scaling term alpha = self._get_alpha(C, sites.vs30, pga1100) # Evaluate tau according to equation 29 tau = np.sqrt( (tau_lnyb ** 2.) + ((alpha ** 2.) * (tau_lnpga_b ** 2.)) + (2.0 * alpha * C["rholny"] * tau_lnyb * tau_lnpga_b)) # Evaluate phi according to equation 30 phi = np.sqrt( (phi_lnyb ** 2.) + (self.CONSTS["philnAF"] ** 2.) + ((alpha ** 2.) * (phi_lnpga_b ** 2.)) + (2.0 * alpha * C["rholny"] * phi_lnyb * phi_lnpga_b)) stddevs = [] for stddev_type in stddev_types: assert stddev_type in self.DEFINED_FOR_STANDARD_DEVIATION_TYPES if stddev_type == const.StdDev.TOTAL: stddevs.append(np.sqrt((tau ** 2.) + (phi ** 2.)) + np.zeros(num_sites)) elif stddev_type == const.StdDev.INTRA_EVENT: stddevs.append(phi + np.zeros(num_sites)) elif stddev_type == const.StdDev.INTER_EVENT: stddevs.append(tau + np.zeros(num_sites)) return stddevs
python
def _get_stddevs(self, C, C_PGA, rup, sites, pga1100, stddev_types): """ Returns the inter- and intra-event and total standard deviations """ # Get stddevs for PGA on basement rock tau_lnpga_b, phi_lnpga_b = self._get_stddevs_pga(C_PGA, rup) num_sites = len(sites.vs30) # Get tau_lny on the basement rock tau_lnyb = self._get_taulny(C, rup.mag) # Get phi_lny on the basement rock phi_lnyb = np.sqrt(self._get_philny(C, rup.mag) ** 2. - self.CONSTS["philnAF"] ** 2.) # Get site scaling term alpha = self._get_alpha(C, sites.vs30, pga1100) # Evaluate tau according to equation 29 tau = np.sqrt( (tau_lnyb ** 2.) + ((alpha ** 2.) * (tau_lnpga_b ** 2.)) + (2.0 * alpha * C["rholny"] * tau_lnyb * tau_lnpga_b)) # Evaluate phi according to equation 30 phi = np.sqrt( (phi_lnyb ** 2.) + (self.CONSTS["philnAF"] ** 2.) + ((alpha ** 2.) * (phi_lnpga_b ** 2.)) + (2.0 * alpha * C["rholny"] * phi_lnyb * phi_lnpga_b)) stddevs = [] for stddev_type in stddev_types: assert stddev_type in self.DEFINED_FOR_STANDARD_DEVIATION_TYPES if stddev_type == const.StdDev.TOTAL: stddevs.append(np.sqrt((tau ** 2.) + (phi ** 2.)) + np.zeros(num_sites)) elif stddev_type == const.StdDev.INTRA_EVENT: stddevs.append(phi + np.zeros(num_sites)) elif stddev_type == const.StdDev.INTER_EVENT: stddevs.append(tau + np.zeros(num_sites)) return stddevs
[ "def", "_get_stddevs", "(", "self", ",", "C", ",", "C_PGA", ",", "rup", ",", "sites", ",", "pga1100", ",", "stddev_types", ")", ":", "# Get stddevs for PGA on basement rock", "tau_lnpga_b", ",", "phi_lnpga_b", "=", "self", ".", "_get_stddevs_pga", "(", "C_PGA", ",", "rup", ")", "num_sites", "=", "len", "(", "sites", ".", "vs30", ")", "# Get tau_lny on the basement rock", "tau_lnyb", "=", "self", ".", "_get_taulny", "(", "C", ",", "rup", ".", "mag", ")", "# Get phi_lny on the basement rock", "phi_lnyb", "=", "np", ".", "sqrt", "(", "self", ".", "_get_philny", "(", "C", ",", "rup", ".", "mag", ")", "**", "2.", "-", "self", ".", "CONSTS", "[", "\"philnAF\"", "]", "**", "2.", ")", "# Get site scaling term", "alpha", "=", "self", ".", "_get_alpha", "(", "C", ",", "sites", ".", "vs30", ",", "pga1100", ")", "# Evaluate tau according to equation 29", "tau", "=", "np", ".", "sqrt", "(", "(", "tau_lnyb", "**", "2.", ")", "+", "(", "(", "alpha", "**", "2.", ")", "*", "(", "tau_lnpga_b", "**", "2.", ")", ")", "+", "(", "2.0", "*", "alpha", "*", "C", "[", "\"rholny\"", "]", "*", "tau_lnyb", "*", "tau_lnpga_b", ")", ")", "# Evaluate phi according to equation 30", "phi", "=", "np", ".", "sqrt", "(", "(", "phi_lnyb", "**", "2.", ")", "+", "(", "self", ".", "CONSTS", "[", "\"philnAF\"", "]", "**", "2.", ")", "+", "(", "(", "alpha", "**", "2.", ")", "*", "(", "phi_lnpga_b", "**", "2.", ")", ")", "+", "(", "2.0", "*", "alpha", "*", "C", "[", "\"rholny\"", "]", "*", "phi_lnyb", "*", "phi_lnpga_b", ")", ")", "stddevs", "=", "[", "]", "for", "stddev_type", "in", "stddev_types", ":", "assert", "stddev_type", "in", "self", ".", "DEFINED_FOR_STANDARD_DEVIATION_TYPES", "if", "stddev_type", "==", "const", ".", "StdDev", ".", "TOTAL", ":", "stddevs", ".", "append", "(", "np", ".", "sqrt", "(", "(", "tau", "**", "2.", ")", "+", "(", "phi", "**", "2.", ")", ")", "+", "np", ".", "zeros", "(", "num_sites", ")", ")", "elif", "stddev_type", "==", "const", ".", "StdDev", ".", "INTRA_EVENT", ":", "stddevs", ".", "append", "(", "phi", "+", "np", ".", "zeros", "(", "num_sites", ")", ")", "elif", "stddev_type", "==", "const", ".", "StdDev", ".", "INTER_EVENT", ":", "stddevs", ".", "append", "(", "tau", "+", "np", ".", "zeros", "(", "num_sites", ")", ")", "return", "stddevs" ]
Returns the inter- and intra-event and total standard deviations
[ "Returns", "the", "inter", "-", "and", "intra", "-", "event", "and", "total", "standard", "deviations" ]
8294553a0b8aba33fd96437a35065d03547d0040
https://github.com/gem/oq-engine/blob/8294553a0b8aba33fd96437a35065d03547d0040/openquake/hazardlib/gsim/campbell_bozorgnia_2014.py#L371-L407
474
gem/oq-engine
openquake/hazardlib/gsim/campbell_bozorgnia_2014.py
CampbellBozorgnia2014._get_stddevs_pga
def _get_stddevs_pga(self, C, rup): """ Returns the inter- and intra-event coefficients for PGA """ tau_lnpga_b = self._get_taulny(C, rup.mag) phi_lnpga_b = np.sqrt(self._get_philny(C, rup.mag) ** 2. - self.CONSTS["philnAF"] ** 2.) return tau_lnpga_b, phi_lnpga_b
python
def _get_stddevs_pga(self, C, rup): """ Returns the inter- and intra-event coefficients for PGA """ tau_lnpga_b = self._get_taulny(C, rup.mag) phi_lnpga_b = np.sqrt(self._get_philny(C, rup.mag) ** 2. - self.CONSTS["philnAF"] ** 2.) return tau_lnpga_b, phi_lnpga_b
[ "def", "_get_stddevs_pga", "(", "self", ",", "C", ",", "rup", ")", ":", "tau_lnpga_b", "=", "self", ".", "_get_taulny", "(", "C", ",", "rup", ".", "mag", ")", "phi_lnpga_b", "=", "np", ".", "sqrt", "(", "self", ".", "_get_philny", "(", "C", ",", "rup", ".", "mag", ")", "**", "2.", "-", "self", ".", "CONSTS", "[", "\"philnAF\"", "]", "**", "2.", ")", "return", "tau_lnpga_b", ",", "phi_lnpga_b" ]
Returns the inter- and intra-event coefficients for PGA
[ "Returns", "the", "inter", "-", "and", "intra", "-", "event", "coefficients", "for", "PGA" ]
8294553a0b8aba33fd96437a35065d03547d0040
https://github.com/gem/oq-engine/blob/8294553a0b8aba33fd96437a35065d03547d0040/openquake/hazardlib/gsim/campbell_bozorgnia_2014.py#L409-L416
475
gem/oq-engine
openquake/hazardlib/gsim/campbell_bozorgnia_2014.py
CampbellBozorgnia2014._get_alpha
def _get_alpha(self, C, vs30, pga_rock): """ Returns the alpha, the linearised functional relationship between the site amplification and the PGA on rock. Equation 31. """ alpha = np.zeros(len(pga_rock)) idx = vs30 < C["k1"] if np.any(idx): af1 = pga_rock[idx] +\ self.CONSTS["c"] * ((vs30[idx] / C["k1"]) ** self.CONSTS["n"]) af2 = pga_rock[idx] + self.CONSTS["c"] alpha[idx] = C["k2"] * pga_rock[idx] * ((1.0 / af1) - (1.0 / af2)) return alpha
python
def _get_alpha(self, C, vs30, pga_rock): """ Returns the alpha, the linearised functional relationship between the site amplification and the PGA on rock. Equation 31. """ alpha = np.zeros(len(pga_rock)) idx = vs30 < C["k1"] if np.any(idx): af1 = pga_rock[idx] +\ self.CONSTS["c"] * ((vs30[idx] / C["k1"]) ** self.CONSTS["n"]) af2 = pga_rock[idx] + self.CONSTS["c"] alpha[idx] = C["k2"] * pga_rock[idx] * ((1.0 / af1) - (1.0 / af2)) return alpha
[ "def", "_get_alpha", "(", "self", ",", "C", ",", "vs30", ",", "pga_rock", ")", ":", "alpha", "=", "np", ".", "zeros", "(", "len", "(", "pga_rock", ")", ")", "idx", "=", "vs30", "<", "C", "[", "\"k1\"", "]", "if", "np", ".", "any", "(", "idx", ")", ":", "af1", "=", "pga_rock", "[", "idx", "]", "+", "self", ".", "CONSTS", "[", "\"c\"", "]", "*", "(", "(", "vs30", "[", "idx", "]", "/", "C", "[", "\"k1\"", "]", ")", "**", "self", ".", "CONSTS", "[", "\"n\"", "]", ")", "af2", "=", "pga_rock", "[", "idx", "]", "+", "self", ".", "CONSTS", "[", "\"c\"", "]", "alpha", "[", "idx", "]", "=", "C", "[", "\"k2\"", "]", "*", "pga_rock", "[", "idx", "]", "*", "(", "(", "1.0", "/", "af1", ")", "-", "(", "1.0", "/", "af2", ")", ")", "return", "alpha" ]
Returns the alpha, the linearised functional relationship between the site amplification and the PGA on rock. Equation 31.
[ "Returns", "the", "alpha", "the", "linearised", "functional", "relationship", "between", "the", "site", "amplification", "and", "the", "PGA", "on", "rock", ".", "Equation", "31", "." ]
8294553a0b8aba33fd96437a35065d03547d0040
https://github.com/gem/oq-engine/blob/8294553a0b8aba33fd96437a35065d03547d0040/openquake/hazardlib/gsim/campbell_bozorgnia_2014.py#L442-L454
476
gem/oq-engine
openquake/hmtk/seismicity/utils.py
decimal_time
def decimal_time(year, month, day, hour, minute, second): """ Returns the full time as a decimal value :param year: Year of events (integer numpy.ndarray) :param month: Month of events (integer numpy.ndarray) :param day: Days of event (integer numpy.ndarray) :param hour: Hour of event (integer numpy.ndarray) :param minute: Minute of event (integer numpy.ndarray) :param second: Second of event (float numpy.ndarray) :returns decimal_time: Decimal representation of the time (as numpy.ndarray) """ tmo = np.ones_like(year, dtype=int) tda = np.ones_like(year, dtype=int) tho = np.zeros_like(year, dtype=int) tmi = np.zeros_like(year, dtype=int) tse = np.zeros_like(year, dtype=float) # # Checking inputs if any(month < 1) or any(month > 12): raise ValueError('Month must be in [1, 12]') if any(day < 1) or any(day > 31): raise ValueError('Day must be in [1, 31]') if any(hour < 0) or any(hour > 24): raise ValueError('Hour must be in [0, 24]') if any(minute < 0) or any(minute > 60): raise ValueError('Minute must be in [0, 60]') if any(second < 0) or any(second > 60): raise ValueError('Second must be in [0, 60]') # # Initialising values if any(month): tmo = month if any(day): tda = day if any(hour): tho = hour if any(minute): tmi = minute if any(second): tse = second # # Computing decimal tmonth = tmo - 1 day_count = MARKER_NORMAL[tmonth] + tda - 1 id_leap = leap_check(year) leap_loc = np.where(id_leap)[0] day_count[leap_loc] = MARKER_LEAP[tmonth[leap_loc]] + tda[leap_loc] - 1 year_secs = ((day_count.astype(float) * SECONDS_PER_DAY) + tse + (60. * tmi.astype(float)) + (3600. * tho.astype(float))) dtime = year.astype(float) + (year_secs / (365. * 24. * 3600.)) dtime[leap_loc] = year[leap_loc].astype(float) + \ (year_secs[leap_loc] / (366. * 24. * 3600.)) return dtime
python
def decimal_time(year, month, day, hour, minute, second): """ Returns the full time as a decimal value :param year: Year of events (integer numpy.ndarray) :param month: Month of events (integer numpy.ndarray) :param day: Days of event (integer numpy.ndarray) :param hour: Hour of event (integer numpy.ndarray) :param minute: Minute of event (integer numpy.ndarray) :param second: Second of event (float numpy.ndarray) :returns decimal_time: Decimal representation of the time (as numpy.ndarray) """ tmo = np.ones_like(year, dtype=int) tda = np.ones_like(year, dtype=int) tho = np.zeros_like(year, dtype=int) tmi = np.zeros_like(year, dtype=int) tse = np.zeros_like(year, dtype=float) # # Checking inputs if any(month < 1) or any(month > 12): raise ValueError('Month must be in [1, 12]') if any(day < 1) or any(day > 31): raise ValueError('Day must be in [1, 31]') if any(hour < 0) or any(hour > 24): raise ValueError('Hour must be in [0, 24]') if any(minute < 0) or any(minute > 60): raise ValueError('Minute must be in [0, 60]') if any(second < 0) or any(second > 60): raise ValueError('Second must be in [0, 60]') # # Initialising values if any(month): tmo = month if any(day): tda = day if any(hour): tho = hour if any(minute): tmi = minute if any(second): tse = second # # Computing decimal tmonth = tmo - 1 day_count = MARKER_NORMAL[tmonth] + tda - 1 id_leap = leap_check(year) leap_loc = np.where(id_leap)[0] day_count[leap_loc] = MARKER_LEAP[tmonth[leap_loc]] + tda[leap_loc] - 1 year_secs = ((day_count.astype(float) * SECONDS_PER_DAY) + tse + (60. * tmi.astype(float)) + (3600. * tho.astype(float))) dtime = year.astype(float) + (year_secs / (365. * 24. * 3600.)) dtime[leap_loc] = year[leap_loc].astype(float) + \ (year_secs[leap_loc] / (366. * 24. * 3600.)) return dtime
[ "def", "decimal_time", "(", "year", ",", "month", ",", "day", ",", "hour", ",", "minute", ",", "second", ")", ":", "tmo", "=", "np", ".", "ones_like", "(", "year", ",", "dtype", "=", "int", ")", "tda", "=", "np", ".", "ones_like", "(", "year", ",", "dtype", "=", "int", ")", "tho", "=", "np", ".", "zeros_like", "(", "year", ",", "dtype", "=", "int", ")", "tmi", "=", "np", ".", "zeros_like", "(", "year", ",", "dtype", "=", "int", ")", "tse", "=", "np", ".", "zeros_like", "(", "year", ",", "dtype", "=", "float", ")", "#", "# Checking inputs", "if", "any", "(", "month", "<", "1", ")", "or", "any", "(", "month", ">", "12", ")", ":", "raise", "ValueError", "(", "'Month must be in [1, 12]'", ")", "if", "any", "(", "day", "<", "1", ")", "or", "any", "(", "day", ">", "31", ")", ":", "raise", "ValueError", "(", "'Day must be in [1, 31]'", ")", "if", "any", "(", "hour", "<", "0", ")", "or", "any", "(", "hour", ">", "24", ")", ":", "raise", "ValueError", "(", "'Hour must be in [0, 24]'", ")", "if", "any", "(", "minute", "<", "0", ")", "or", "any", "(", "minute", ">", "60", ")", ":", "raise", "ValueError", "(", "'Minute must be in [0, 60]'", ")", "if", "any", "(", "second", "<", "0", ")", "or", "any", "(", "second", ">", "60", ")", ":", "raise", "ValueError", "(", "'Second must be in [0, 60]'", ")", "#", "# Initialising values", "if", "any", "(", "month", ")", ":", "tmo", "=", "month", "if", "any", "(", "day", ")", ":", "tda", "=", "day", "if", "any", "(", "hour", ")", ":", "tho", "=", "hour", "if", "any", "(", "minute", ")", ":", "tmi", "=", "minute", "if", "any", "(", "second", ")", ":", "tse", "=", "second", "#", "# Computing decimal", "tmonth", "=", "tmo", "-", "1", "day_count", "=", "MARKER_NORMAL", "[", "tmonth", "]", "+", "tda", "-", "1", "id_leap", "=", "leap_check", "(", "year", ")", "leap_loc", "=", "np", ".", "where", "(", "id_leap", ")", "[", "0", "]", "day_count", "[", "leap_loc", "]", "=", "MARKER_LEAP", "[", "tmonth", "[", "leap_loc", "]", "]", "+", "tda", "[", "leap_loc", "]", "-", "1", "year_secs", "=", "(", "(", "day_count", ".", "astype", "(", "float", ")", "*", "SECONDS_PER_DAY", ")", "+", "tse", "+", "(", "60.", "*", "tmi", ".", "astype", "(", "float", ")", ")", "+", "(", "3600.", "*", "tho", ".", "astype", "(", "float", ")", ")", ")", "dtime", "=", "year", ".", "astype", "(", "float", ")", "+", "(", "year_secs", "/", "(", "365.", "*", "24.", "*", "3600.", ")", ")", "dtime", "[", "leap_loc", "]", "=", "year", "[", "leap_loc", "]", ".", "astype", "(", "float", ")", "+", "(", "year_secs", "[", "leap_loc", "]", "/", "(", "366.", "*", "24.", "*", "3600.", ")", ")", "return", "dtime" ]
Returns the full time as a decimal value :param year: Year of events (integer numpy.ndarray) :param month: Month of events (integer numpy.ndarray) :param day: Days of event (integer numpy.ndarray) :param hour: Hour of event (integer numpy.ndarray) :param minute: Minute of event (integer numpy.ndarray) :param second: Second of event (float numpy.ndarray) :returns decimal_time: Decimal representation of the time (as numpy.ndarray)
[ "Returns", "the", "full", "time", "as", "a", "decimal", "value" ]
8294553a0b8aba33fd96437a35065d03547d0040
https://github.com/gem/oq-engine/blob/8294553a0b8aba33fd96437a35065d03547d0040/openquake/hmtk/seismicity/utils.py#L137-L197
477
gem/oq-engine
openquake/hmtk/seismicity/utils.py
haversine
def haversine(lon1, lat1, lon2, lat2, radians=False, earth_rad=6371.227): """ Allows to calculate geographical distance using the haversine formula. :param lon1: longitude of the first set of locations :type lon1: numpy.ndarray :param lat1: latitude of the frist set of locations :type lat1: numpy.ndarray :param lon2: longitude of the second set of locations :type lon2: numpy.float64 :param lat2: latitude of the second set of locations :type lat2: numpy.float64 :keyword radians: states if locations are given in terms of radians :type radians: bool :keyword earth_rad: radius of the earth in km :type earth_rad: float :returns: geographical distance in km :rtype: numpy.ndarray """ if not radians: cfact = np.pi / 180. lon1 = cfact * lon1 lat1 = cfact * lat1 lon2 = cfact * lon2 lat2 = cfact * lat2 # Number of locations in each set of points if not np.shape(lon1): nlocs1 = 1 lon1 = np.array([lon1]) lat1 = np.array([lat1]) else: nlocs1 = np.max(np.shape(lon1)) if not np.shape(lon2): nlocs2 = 1 lon2 = np.array([lon2]) lat2 = np.array([lat2]) else: nlocs2 = np.max(np.shape(lon2)) # Pre-allocate array distance = np.zeros((nlocs1, nlocs2)) i = 0 while i < nlocs2: # Perform distance calculation dlat = lat1 - lat2[i] dlon = lon1 - lon2[i] aval = (np.sin(dlat / 2.) ** 2.) + (np.cos(lat1) * np.cos(lat2[i]) * (np.sin(dlon / 2.) ** 2.)) distance[:, i] = (2. * earth_rad * np.arctan2(np.sqrt(aval), np.sqrt(1 - aval))).T i += 1 return distance
python
def haversine(lon1, lat1, lon2, lat2, radians=False, earth_rad=6371.227): """ Allows to calculate geographical distance using the haversine formula. :param lon1: longitude of the first set of locations :type lon1: numpy.ndarray :param lat1: latitude of the frist set of locations :type lat1: numpy.ndarray :param lon2: longitude of the second set of locations :type lon2: numpy.float64 :param lat2: latitude of the second set of locations :type lat2: numpy.float64 :keyword radians: states if locations are given in terms of radians :type radians: bool :keyword earth_rad: radius of the earth in km :type earth_rad: float :returns: geographical distance in km :rtype: numpy.ndarray """ if not radians: cfact = np.pi / 180. lon1 = cfact * lon1 lat1 = cfact * lat1 lon2 = cfact * lon2 lat2 = cfact * lat2 # Number of locations in each set of points if not np.shape(lon1): nlocs1 = 1 lon1 = np.array([lon1]) lat1 = np.array([lat1]) else: nlocs1 = np.max(np.shape(lon1)) if not np.shape(lon2): nlocs2 = 1 lon2 = np.array([lon2]) lat2 = np.array([lat2]) else: nlocs2 = np.max(np.shape(lon2)) # Pre-allocate array distance = np.zeros((nlocs1, nlocs2)) i = 0 while i < nlocs2: # Perform distance calculation dlat = lat1 - lat2[i] dlon = lon1 - lon2[i] aval = (np.sin(dlat / 2.) ** 2.) + (np.cos(lat1) * np.cos(lat2[i]) * (np.sin(dlon / 2.) ** 2.)) distance[:, i] = (2. * earth_rad * np.arctan2(np.sqrt(aval), np.sqrt(1 - aval))).T i += 1 return distance
[ "def", "haversine", "(", "lon1", ",", "lat1", ",", "lon2", ",", "lat2", ",", "radians", "=", "False", ",", "earth_rad", "=", "6371.227", ")", ":", "if", "not", "radians", ":", "cfact", "=", "np", ".", "pi", "/", "180.", "lon1", "=", "cfact", "*", "lon1", "lat1", "=", "cfact", "*", "lat1", "lon2", "=", "cfact", "*", "lon2", "lat2", "=", "cfact", "*", "lat2", "# Number of locations in each set of points", "if", "not", "np", ".", "shape", "(", "lon1", ")", ":", "nlocs1", "=", "1", "lon1", "=", "np", ".", "array", "(", "[", "lon1", "]", ")", "lat1", "=", "np", ".", "array", "(", "[", "lat1", "]", ")", "else", ":", "nlocs1", "=", "np", ".", "max", "(", "np", ".", "shape", "(", "lon1", ")", ")", "if", "not", "np", ".", "shape", "(", "lon2", ")", ":", "nlocs2", "=", "1", "lon2", "=", "np", ".", "array", "(", "[", "lon2", "]", ")", "lat2", "=", "np", ".", "array", "(", "[", "lat2", "]", ")", "else", ":", "nlocs2", "=", "np", ".", "max", "(", "np", ".", "shape", "(", "lon2", ")", ")", "# Pre-allocate array", "distance", "=", "np", ".", "zeros", "(", "(", "nlocs1", ",", "nlocs2", ")", ")", "i", "=", "0", "while", "i", "<", "nlocs2", ":", "# Perform distance calculation", "dlat", "=", "lat1", "-", "lat2", "[", "i", "]", "dlon", "=", "lon1", "-", "lon2", "[", "i", "]", "aval", "=", "(", "np", ".", "sin", "(", "dlat", "/", "2.", ")", "**", "2.", ")", "+", "(", "np", ".", "cos", "(", "lat1", ")", "*", "np", ".", "cos", "(", "lat2", "[", "i", "]", ")", "*", "(", "np", ".", "sin", "(", "dlon", "/", "2.", ")", "**", "2.", ")", ")", "distance", "[", ":", ",", "i", "]", "=", "(", "2.", "*", "earth_rad", "*", "np", ".", "arctan2", "(", "np", ".", "sqrt", "(", "aval", ")", ",", "np", ".", "sqrt", "(", "1", "-", "aval", ")", ")", ")", ".", "T", "i", "+=", "1", "return", "distance" ]
Allows to calculate geographical distance using the haversine formula. :param lon1: longitude of the first set of locations :type lon1: numpy.ndarray :param lat1: latitude of the frist set of locations :type lat1: numpy.ndarray :param lon2: longitude of the second set of locations :type lon2: numpy.float64 :param lat2: latitude of the second set of locations :type lat2: numpy.float64 :keyword radians: states if locations are given in terms of radians :type radians: bool :keyword earth_rad: radius of the earth in km :type earth_rad: float :returns: geographical distance in km :rtype: numpy.ndarray
[ "Allows", "to", "calculate", "geographical", "distance", "using", "the", "haversine", "formula", "." ]
8294553a0b8aba33fd96437a35065d03547d0040
https://github.com/gem/oq-engine/blob/8294553a0b8aba33fd96437a35065d03547d0040/openquake/hmtk/seismicity/utils.py#L200-L252
478
gem/oq-engine
openquake/hmtk/seismicity/utils.py
greg2julian
def greg2julian(year, month, day, hour, minute, second): """ Function to convert a date from Gregorian to Julian format :param year: Year of events (integer numpy.ndarray) :param month: Month of events (integer numpy.ndarray) :param day: Days of event (integer numpy.ndarray) :param hour: Hour of event (integer numpy.ndarray) :param minute: Minute of event (integer numpy.ndarray) :param second: Second of event (float numpy.ndarray) :returns julian_time: Julian representation of the time (as float numpy.ndarray) """ year = year.astype(float) month = month.astype(float) day = day.astype(float) timeut = hour.astype(float) + (minute.astype(float) / 60.0) + \ (second / 3600.0) julian_time = ((367.0 * year) - np.floor( 7.0 * (year + np.floor((month + 9.0) / 12.0)) / 4.0) - np.floor(3.0 * (np.floor((year + (month - 9.0) / 7.0) / 100.0) + 1.0) / 4.0) + np.floor((275.0 * month) / 9.0) + day + 1721028.5 + (timeut / 24.0)) return julian_time
python
def greg2julian(year, month, day, hour, minute, second): """ Function to convert a date from Gregorian to Julian format :param year: Year of events (integer numpy.ndarray) :param month: Month of events (integer numpy.ndarray) :param day: Days of event (integer numpy.ndarray) :param hour: Hour of event (integer numpy.ndarray) :param minute: Minute of event (integer numpy.ndarray) :param second: Second of event (float numpy.ndarray) :returns julian_time: Julian representation of the time (as float numpy.ndarray) """ year = year.astype(float) month = month.astype(float) day = day.astype(float) timeut = hour.astype(float) + (minute.astype(float) / 60.0) + \ (second / 3600.0) julian_time = ((367.0 * year) - np.floor( 7.0 * (year + np.floor((month + 9.0) / 12.0)) / 4.0) - np.floor(3.0 * (np.floor((year + (month - 9.0) / 7.0) / 100.0) + 1.0) / 4.0) + np.floor((275.0 * month) / 9.0) + day + 1721028.5 + (timeut / 24.0)) return julian_time
[ "def", "greg2julian", "(", "year", ",", "month", ",", "day", ",", "hour", ",", "minute", ",", "second", ")", ":", "year", "=", "year", ".", "astype", "(", "float", ")", "month", "=", "month", ".", "astype", "(", "float", ")", "day", "=", "day", ".", "astype", "(", "float", ")", "timeut", "=", "hour", ".", "astype", "(", "float", ")", "+", "(", "minute", ".", "astype", "(", "float", ")", "/", "60.0", ")", "+", "(", "second", "/", "3600.0", ")", "julian_time", "=", "(", "(", "367.0", "*", "year", ")", "-", "np", ".", "floor", "(", "7.0", "*", "(", "year", "+", "np", ".", "floor", "(", "(", "month", "+", "9.0", ")", "/", "12.0", ")", ")", "/", "4.0", ")", "-", "np", ".", "floor", "(", "3.0", "*", "(", "np", ".", "floor", "(", "(", "year", "+", "(", "month", "-", "9.0", ")", "/", "7.0", ")", "/", "100.0", ")", "+", "1.0", ")", "/", "4.0", ")", "+", "np", ".", "floor", "(", "(", "275.0", "*", "month", ")", "/", "9.0", ")", "+", "day", "+", "1721028.5", "+", "(", "timeut", "/", "24.0", ")", ")", "return", "julian_time" ]
Function to convert a date from Gregorian to Julian format :param year: Year of events (integer numpy.ndarray) :param month: Month of events (integer numpy.ndarray) :param day: Days of event (integer numpy.ndarray) :param hour: Hour of event (integer numpy.ndarray) :param minute: Minute of event (integer numpy.ndarray) :param second: Second of event (float numpy.ndarray) :returns julian_time: Julian representation of the time (as float numpy.ndarray)
[ "Function", "to", "convert", "a", "date", "from", "Gregorian", "to", "Julian", "format" ]
8294553a0b8aba33fd96437a35065d03547d0040
https://github.com/gem/oq-engine/blob/8294553a0b8aba33fd96437a35065d03547d0040/openquake/hmtk/seismicity/utils.py#L255-L289
479
gem/oq-engine
openquake/hmtk/seismicity/utils.py
sample_truncated_gaussian_vector
def sample_truncated_gaussian_vector(data, uncertainties, bounds=None): ''' Samples a Gaussian distribution subject to boundaries on the data :param numpy.ndarray data: Vector of N data values :param numpy.ndarray uncertainties: Vector of N data uncertainties :param int number_bootstraps: Number of bootstrap samples :param tuple bounds: (Lower, Upper) bound of data space ''' nvals = len(data) if bounds: # if bounds[0] or (fabs(bounds[0]) < PRECISION): if bounds[0] is not None: lower_bound = (bounds[0] - data) / uncertainties else: lower_bound = -np.inf * np.ones_like(data) # if bounds[1] or (fabs(bounds[1]) < PRECISION): if bounds[1] is not None: upper_bound = (bounds[1] - data) / uncertainties else: upper_bound = np.inf * np.ones_like(data) sample = hmtk_truncnorm.rvs(lower_bound, upper_bound, size=nvals) else: sample = np.random.normal(0., 1., nvals) return data + uncertainties * sample
python
def sample_truncated_gaussian_vector(data, uncertainties, bounds=None): ''' Samples a Gaussian distribution subject to boundaries on the data :param numpy.ndarray data: Vector of N data values :param numpy.ndarray uncertainties: Vector of N data uncertainties :param int number_bootstraps: Number of bootstrap samples :param tuple bounds: (Lower, Upper) bound of data space ''' nvals = len(data) if bounds: # if bounds[0] or (fabs(bounds[0]) < PRECISION): if bounds[0] is not None: lower_bound = (bounds[0] - data) / uncertainties else: lower_bound = -np.inf * np.ones_like(data) # if bounds[1] or (fabs(bounds[1]) < PRECISION): if bounds[1] is not None: upper_bound = (bounds[1] - data) / uncertainties else: upper_bound = np.inf * np.ones_like(data) sample = hmtk_truncnorm.rvs(lower_bound, upper_bound, size=nvals) else: sample = np.random.normal(0., 1., nvals) return data + uncertainties * sample
[ "def", "sample_truncated_gaussian_vector", "(", "data", ",", "uncertainties", ",", "bounds", "=", "None", ")", ":", "nvals", "=", "len", "(", "data", ")", "if", "bounds", ":", "# if bounds[0] or (fabs(bounds[0]) < PRECISION):", "if", "bounds", "[", "0", "]", "is", "not", "None", ":", "lower_bound", "=", "(", "bounds", "[", "0", "]", "-", "data", ")", "/", "uncertainties", "else", ":", "lower_bound", "=", "-", "np", ".", "inf", "*", "np", ".", "ones_like", "(", "data", ")", "# if bounds[1] or (fabs(bounds[1]) < PRECISION):", "if", "bounds", "[", "1", "]", "is", "not", "None", ":", "upper_bound", "=", "(", "bounds", "[", "1", "]", "-", "data", ")", "/", "uncertainties", "else", ":", "upper_bound", "=", "np", ".", "inf", "*", "np", ".", "ones_like", "(", "data", ")", "sample", "=", "hmtk_truncnorm", ".", "rvs", "(", "lower_bound", ",", "upper_bound", ",", "size", "=", "nvals", ")", "else", ":", "sample", "=", "np", ".", "random", ".", "normal", "(", "0.", ",", "1.", ",", "nvals", ")", "return", "data", "+", "uncertainties", "*", "sample" ]
Samples a Gaussian distribution subject to boundaries on the data :param numpy.ndarray data: Vector of N data values :param numpy.ndarray uncertainties: Vector of N data uncertainties :param int number_bootstraps: Number of bootstrap samples :param tuple bounds: (Lower, Upper) bound of data space
[ "Samples", "a", "Gaussian", "distribution", "subject", "to", "boundaries", "on", "the", "data" ]
8294553a0b8aba33fd96437a35065d03547d0040
https://github.com/gem/oq-engine/blob/8294553a0b8aba33fd96437a35065d03547d0040/openquake/hmtk/seismicity/utils.py#L333-L363
480
gem/oq-engine
openquake/hmtk/seismicity/utils.py
hmtk_histogram_2D
def hmtk_histogram_2D(xvalues, yvalues, bins, x_offset=1.0E-10, y_offset=1.0E-10): """ See the explanation for the 1D case - now applied to 2D. :param numpy.ndarray xvalues: Values of x-data :param numpy.ndarray yvalues: Values of y-data :param tuple bins: Tuple containing bin intervals for x-data and y-data (as numpy arrays) :param float x_offset: Small amount to offset the x-bins for floating point precision :param float y_offset: Small amount to offset the y-bins for floating point precision :returns: Count in each bin (as float) """ xbins, ybins = (bins[0] - x_offset, bins[1] - y_offset) n_x = len(xbins) - 1 n_y = len(ybins) - 1 counter = np.zeros([n_y, n_x], dtype=float) for j in range(n_y): y_idx = np.logical_and(yvalues >= ybins[j], yvalues < ybins[j + 1]) x_vals = xvalues[y_idx] for i in range(n_x): idx = np.logical_and(x_vals >= xbins[i], x_vals < xbins[i + 1]) counter[j, i] += float(np.sum(idx)) return counter.T
python
def hmtk_histogram_2D(xvalues, yvalues, bins, x_offset=1.0E-10, y_offset=1.0E-10): """ See the explanation for the 1D case - now applied to 2D. :param numpy.ndarray xvalues: Values of x-data :param numpy.ndarray yvalues: Values of y-data :param tuple bins: Tuple containing bin intervals for x-data and y-data (as numpy arrays) :param float x_offset: Small amount to offset the x-bins for floating point precision :param float y_offset: Small amount to offset the y-bins for floating point precision :returns: Count in each bin (as float) """ xbins, ybins = (bins[0] - x_offset, bins[1] - y_offset) n_x = len(xbins) - 1 n_y = len(ybins) - 1 counter = np.zeros([n_y, n_x], dtype=float) for j in range(n_y): y_idx = np.logical_and(yvalues >= ybins[j], yvalues < ybins[j + 1]) x_vals = xvalues[y_idx] for i in range(n_x): idx = np.logical_and(x_vals >= xbins[i], x_vals < xbins[i + 1]) counter[j, i] += float(np.sum(idx)) return counter.T
[ "def", "hmtk_histogram_2D", "(", "xvalues", ",", "yvalues", ",", "bins", ",", "x_offset", "=", "1.0E-10", ",", "y_offset", "=", "1.0E-10", ")", ":", "xbins", ",", "ybins", "=", "(", "bins", "[", "0", "]", "-", "x_offset", ",", "bins", "[", "1", "]", "-", "y_offset", ")", "n_x", "=", "len", "(", "xbins", ")", "-", "1", "n_y", "=", "len", "(", "ybins", ")", "-", "1", "counter", "=", "np", ".", "zeros", "(", "[", "n_y", ",", "n_x", "]", ",", "dtype", "=", "float", ")", "for", "j", "in", "range", "(", "n_y", ")", ":", "y_idx", "=", "np", ".", "logical_and", "(", "yvalues", ">=", "ybins", "[", "j", "]", ",", "yvalues", "<", "ybins", "[", "j", "+", "1", "]", ")", "x_vals", "=", "xvalues", "[", "y_idx", "]", "for", "i", "in", "range", "(", "n_x", ")", ":", "idx", "=", "np", ".", "logical_and", "(", "x_vals", ">=", "xbins", "[", "i", "]", ",", "x_vals", "<", "xbins", "[", "i", "+", "1", "]", ")", "counter", "[", "j", ",", "i", "]", "+=", "float", "(", "np", ".", "sum", "(", "idx", ")", ")", "return", "counter", ".", "T" ]
See the explanation for the 1D case - now applied to 2D. :param numpy.ndarray xvalues: Values of x-data :param numpy.ndarray yvalues: Values of y-data :param tuple bins: Tuple containing bin intervals for x-data and y-data (as numpy arrays) :param float x_offset: Small amount to offset the x-bins for floating point precision :param float y_offset: Small amount to offset the y-bins for floating point precision :returns: Count in each bin (as float)
[ "See", "the", "explanation", "for", "the", "1D", "case", "-", "now", "applied", "to", "2D", "." ]
8294553a0b8aba33fd96437a35065d03547d0040
https://github.com/gem/oq-engine/blob/8294553a0b8aba33fd96437a35065d03547d0040/openquake/hmtk/seismicity/utils.py#L404-L432
481
gem/oq-engine
openquake/hmtk/seismicity/utils.py
bootstrap_histogram_1D
def bootstrap_histogram_1D( values, intervals, uncertainties=None, normalisation=False, number_bootstraps=None, boundaries=None): ''' Bootstrap samples a set of vectors :param numpy.ndarray values: The data values :param numpy.ndarray intervals: The bin edges :param numpy.ndarray uncertainties: The standard deviations of each observation :param bool normalisation: If True then returns the histogram as a density function :param int number_bootstraps: Number of bootstraps :param tuple boundaries: (Lower, Upper) bounds on the data :param returns: 1-D histogram of data ''' if not number_bootstraps or np.all(np.fabs(uncertainties < PRECISION)): # No bootstraps or all uncertaintes are zero - return ordinary # histogram #output = np.histogram(values, intervals)[0] output = hmtk_histogram_1D(values, intervals) if normalisation: output = output / float(np.sum(output)) else: output = output return output else: temp_hist = np.zeros([len(intervals) - 1, number_bootstraps], dtype=float) for iloc in range(0, number_bootstraps): sample = sample_truncated_gaussian_vector(values, uncertainties, boundaries) #output = np.histogram(sample, intervals)[0] output = hmtk_histogram_1D(sample, intervals) temp_hist[:, iloc] = output output = np.sum(temp_hist, axis=1) if normalisation: output = output / float(np.sum(output)) else: output = output / float(number_bootstraps) return output
python
def bootstrap_histogram_1D( values, intervals, uncertainties=None, normalisation=False, number_bootstraps=None, boundaries=None): ''' Bootstrap samples a set of vectors :param numpy.ndarray values: The data values :param numpy.ndarray intervals: The bin edges :param numpy.ndarray uncertainties: The standard deviations of each observation :param bool normalisation: If True then returns the histogram as a density function :param int number_bootstraps: Number of bootstraps :param tuple boundaries: (Lower, Upper) bounds on the data :param returns: 1-D histogram of data ''' if not number_bootstraps or np.all(np.fabs(uncertainties < PRECISION)): # No bootstraps or all uncertaintes are zero - return ordinary # histogram #output = np.histogram(values, intervals)[0] output = hmtk_histogram_1D(values, intervals) if normalisation: output = output / float(np.sum(output)) else: output = output return output else: temp_hist = np.zeros([len(intervals) - 1, number_bootstraps], dtype=float) for iloc in range(0, number_bootstraps): sample = sample_truncated_gaussian_vector(values, uncertainties, boundaries) #output = np.histogram(sample, intervals)[0] output = hmtk_histogram_1D(sample, intervals) temp_hist[:, iloc] = output output = np.sum(temp_hist, axis=1) if normalisation: output = output / float(np.sum(output)) else: output = output / float(number_bootstraps) return output
[ "def", "bootstrap_histogram_1D", "(", "values", ",", "intervals", ",", "uncertainties", "=", "None", ",", "normalisation", "=", "False", ",", "number_bootstraps", "=", "None", ",", "boundaries", "=", "None", ")", ":", "if", "not", "number_bootstraps", "or", "np", ".", "all", "(", "np", ".", "fabs", "(", "uncertainties", "<", "PRECISION", ")", ")", ":", "# No bootstraps or all uncertaintes are zero - return ordinary", "# histogram", "#output = np.histogram(values, intervals)[0]", "output", "=", "hmtk_histogram_1D", "(", "values", ",", "intervals", ")", "if", "normalisation", ":", "output", "=", "output", "/", "float", "(", "np", ".", "sum", "(", "output", ")", ")", "else", ":", "output", "=", "output", "return", "output", "else", ":", "temp_hist", "=", "np", ".", "zeros", "(", "[", "len", "(", "intervals", ")", "-", "1", ",", "number_bootstraps", "]", ",", "dtype", "=", "float", ")", "for", "iloc", "in", "range", "(", "0", ",", "number_bootstraps", ")", ":", "sample", "=", "sample_truncated_gaussian_vector", "(", "values", ",", "uncertainties", ",", "boundaries", ")", "#output = np.histogram(sample, intervals)[0]", "output", "=", "hmtk_histogram_1D", "(", "sample", ",", "intervals", ")", "temp_hist", "[", ":", ",", "iloc", "]", "=", "output", "output", "=", "np", ".", "sum", "(", "temp_hist", ",", "axis", "=", "1", ")", "if", "normalisation", ":", "output", "=", "output", "/", "float", "(", "np", ".", "sum", "(", "output", ")", ")", "else", ":", "output", "=", "output", "/", "float", "(", "number_bootstraps", ")", "return", "output" ]
Bootstrap samples a set of vectors :param numpy.ndarray values: The data values :param numpy.ndarray intervals: The bin edges :param numpy.ndarray uncertainties: The standard deviations of each observation :param bool normalisation: If True then returns the histogram as a density function :param int number_bootstraps: Number of bootstraps :param tuple boundaries: (Lower, Upper) bounds on the data :param returns: 1-D histogram of data
[ "Bootstrap", "samples", "a", "set", "of", "vectors" ]
8294553a0b8aba33fd96437a35065d03547d0040
https://github.com/gem/oq-engine/blob/8294553a0b8aba33fd96437a35065d03547d0040/openquake/hmtk/seismicity/utils.py#L435-L483
482
gem/oq-engine
openquake/hmtk/seismicity/utils.py
bootstrap_histogram_2D
def bootstrap_histogram_2D( xvalues, yvalues, xbins, ybins, boundaries=[None, None], xsigma=None, ysigma=None, normalisation=False, number_bootstraps=None): ''' Calculates a 2D histogram of data, allowing for normalisation and bootstrap sampling :param numpy.ndarray xvalues: Data values of the first variable :param numpy.ndarray yvalues: Data values of the second variable :param numpy.ndarray xbins: Bin edges for the first variable :param numpy.ndarray ybins: Bin edges for the second variable :param list boundaries: List of (Lower, Upper) tuples corresponding to the bounds of the two data sets :param numpy.ndarray xsigma: Error values (standard deviatons) on first variable :param numpy.ndarray ysigma: Error values (standard deviatons) on second variable :param bool normalisation: If True then returns the histogram as a density function :param int number_bootstraps: Number of bootstraps :param returns: 2-D histogram of data ''' if (xsigma is None and ysigma is None) or not number_bootstraps: # No sampling - return simple 2-D histrogram #output = np.histogram2d(xvalues, yvalues, bins=[xbins, ybins])[0] output = hmtk_histogram_2D(xvalues, yvalues, bins=(xbins, ybins)) if normalisation: output = output / float(np.sum(output)) return output else: if xsigma is None: xsigma = np.zeros(len(xvalues), dtype=float) if ysigma is None: ysigma = np.zeros(len(yvalues), dtype=float) temp_hist = np.zeros( [len(xbins) - 1, len(ybins) - 1, number_bootstraps], dtype=float) for iloc in range(0, number_bootstraps): xsample = sample_truncated_gaussian_vector(xvalues, xsigma, boundaries[0]) ysample = sample_truncated_gaussian_vector(yvalues, ysigma, boundaries[0]) # temp_hist[:, :, iloc] = np.histogram2d(xsample, # ysample, # bins=[xbins, ybins])[0] temp_hist[:, :, iloc] = hmtk_histogram_2D(xsample, ysample, bins=(xbins, ybins)) if normalisation: output = np.sum(temp_hist, axis=2) output = output / np.sum(output) else: output = np.sum(temp_hist, axis=2) / float(number_bootstraps) return output
python
def bootstrap_histogram_2D( xvalues, yvalues, xbins, ybins, boundaries=[None, None], xsigma=None, ysigma=None, normalisation=False, number_bootstraps=None): ''' Calculates a 2D histogram of data, allowing for normalisation and bootstrap sampling :param numpy.ndarray xvalues: Data values of the first variable :param numpy.ndarray yvalues: Data values of the second variable :param numpy.ndarray xbins: Bin edges for the first variable :param numpy.ndarray ybins: Bin edges for the second variable :param list boundaries: List of (Lower, Upper) tuples corresponding to the bounds of the two data sets :param numpy.ndarray xsigma: Error values (standard deviatons) on first variable :param numpy.ndarray ysigma: Error values (standard deviatons) on second variable :param bool normalisation: If True then returns the histogram as a density function :param int number_bootstraps: Number of bootstraps :param returns: 2-D histogram of data ''' if (xsigma is None and ysigma is None) or not number_bootstraps: # No sampling - return simple 2-D histrogram #output = np.histogram2d(xvalues, yvalues, bins=[xbins, ybins])[0] output = hmtk_histogram_2D(xvalues, yvalues, bins=(xbins, ybins)) if normalisation: output = output / float(np.sum(output)) return output else: if xsigma is None: xsigma = np.zeros(len(xvalues), dtype=float) if ysigma is None: ysigma = np.zeros(len(yvalues), dtype=float) temp_hist = np.zeros( [len(xbins) - 1, len(ybins) - 1, number_bootstraps], dtype=float) for iloc in range(0, number_bootstraps): xsample = sample_truncated_gaussian_vector(xvalues, xsigma, boundaries[0]) ysample = sample_truncated_gaussian_vector(yvalues, ysigma, boundaries[0]) # temp_hist[:, :, iloc] = np.histogram2d(xsample, # ysample, # bins=[xbins, ybins])[0] temp_hist[:, :, iloc] = hmtk_histogram_2D(xsample, ysample, bins=(xbins, ybins)) if normalisation: output = np.sum(temp_hist, axis=2) output = output / np.sum(output) else: output = np.sum(temp_hist, axis=2) / float(number_bootstraps) return output
[ "def", "bootstrap_histogram_2D", "(", "xvalues", ",", "yvalues", ",", "xbins", ",", "ybins", ",", "boundaries", "=", "[", "None", ",", "None", "]", ",", "xsigma", "=", "None", ",", "ysigma", "=", "None", ",", "normalisation", "=", "False", ",", "number_bootstraps", "=", "None", ")", ":", "if", "(", "xsigma", "is", "None", "and", "ysigma", "is", "None", ")", "or", "not", "number_bootstraps", ":", "# No sampling - return simple 2-D histrogram", "#output = np.histogram2d(xvalues, yvalues, bins=[xbins, ybins])[0]", "output", "=", "hmtk_histogram_2D", "(", "xvalues", ",", "yvalues", ",", "bins", "=", "(", "xbins", ",", "ybins", ")", ")", "if", "normalisation", ":", "output", "=", "output", "/", "float", "(", "np", ".", "sum", "(", "output", ")", ")", "return", "output", "else", ":", "if", "xsigma", "is", "None", ":", "xsigma", "=", "np", ".", "zeros", "(", "len", "(", "xvalues", ")", ",", "dtype", "=", "float", ")", "if", "ysigma", "is", "None", ":", "ysigma", "=", "np", ".", "zeros", "(", "len", "(", "yvalues", ")", ",", "dtype", "=", "float", ")", "temp_hist", "=", "np", ".", "zeros", "(", "[", "len", "(", "xbins", ")", "-", "1", ",", "len", "(", "ybins", ")", "-", "1", ",", "number_bootstraps", "]", ",", "dtype", "=", "float", ")", "for", "iloc", "in", "range", "(", "0", ",", "number_bootstraps", ")", ":", "xsample", "=", "sample_truncated_gaussian_vector", "(", "xvalues", ",", "xsigma", ",", "boundaries", "[", "0", "]", ")", "ysample", "=", "sample_truncated_gaussian_vector", "(", "yvalues", ",", "ysigma", ",", "boundaries", "[", "0", "]", ")", "# temp_hist[:, :, iloc] = np.histogram2d(xsample,", "# ysample,", "# bins=[xbins, ybins])[0]", "temp_hist", "[", ":", ",", ":", ",", "iloc", "]", "=", "hmtk_histogram_2D", "(", "xsample", ",", "ysample", ",", "bins", "=", "(", "xbins", ",", "ybins", ")", ")", "if", "normalisation", ":", "output", "=", "np", ".", "sum", "(", "temp_hist", ",", "axis", "=", "2", ")", "output", "=", "output", "/", "np", ".", "sum", "(", "output", ")", "else", ":", "output", "=", "np", ".", "sum", "(", "temp_hist", ",", "axis", "=", "2", ")", "/", "float", "(", "number_bootstraps", ")", "return", "output" ]
Calculates a 2D histogram of data, allowing for normalisation and bootstrap sampling :param numpy.ndarray xvalues: Data values of the first variable :param numpy.ndarray yvalues: Data values of the second variable :param numpy.ndarray xbins: Bin edges for the first variable :param numpy.ndarray ybins: Bin edges for the second variable :param list boundaries: List of (Lower, Upper) tuples corresponding to the bounds of the two data sets :param numpy.ndarray xsigma: Error values (standard deviatons) on first variable :param numpy.ndarray ysigma: Error values (standard deviatons) on second variable :param bool normalisation: If True then returns the histogram as a density function :param int number_bootstraps: Number of bootstraps :param returns: 2-D histogram of data
[ "Calculates", "a", "2D", "histogram", "of", "data", "allowing", "for", "normalisation", "and", "bootstrap", "sampling" ]
8294553a0b8aba33fd96437a35065d03547d0040
https://github.com/gem/oq-engine/blob/8294553a0b8aba33fd96437a35065d03547d0040/openquake/hmtk/seismicity/utils.py#L486-L558
483
gem/oq-engine
openquake/hmtk/seismicity/utils.py
area_of_polygon
def area_of_polygon(polygon): """ Returns the area of an OpenQuake polygon in square kilometres """ lon0 = np.mean(polygon.lons) lat0 = np.mean(polygon.lats) # Transform to lamber equal area projection x, y = lonlat_to_laea(polygon.lons, polygon.lats, lon0, lat0) # Build shapely polygons poly = geometry.Polygon(zip(x, y)) return poly.area
python
def area_of_polygon(polygon): """ Returns the area of an OpenQuake polygon in square kilometres """ lon0 = np.mean(polygon.lons) lat0 = np.mean(polygon.lats) # Transform to lamber equal area projection x, y = lonlat_to_laea(polygon.lons, polygon.lats, lon0, lat0) # Build shapely polygons poly = geometry.Polygon(zip(x, y)) return poly.area
[ "def", "area_of_polygon", "(", "polygon", ")", ":", "lon0", "=", "np", ".", "mean", "(", "polygon", ".", "lons", ")", "lat0", "=", "np", ".", "mean", "(", "polygon", ".", "lats", ")", "# Transform to lamber equal area projection", "x", ",", "y", "=", "lonlat_to_laea", "(", "polygon", ".", "lons", ",", "polygon", ".", "lats", ",", "lon0", ",", "lat0", ")", "# Build shapely polygons", "poly", "=", "geometry", ".", "Polygon", "(", "zip", "(", "x", ",", "y", ")", ")", "return", "poly", ".", "area" ]
Returns the area of an OpenQuake polygon in square kilometres
[ "Returns", "the", "area", "of", "an", "OpenQuake", "polygon", "in", "square", "kilometres" ]
8294553a0b8aba33fd96437a35065d03547d0040
https://github.com/gem/oq-engine/blob/8294553a0b8aba33fd96437a35065d03547d0040/openquake/hmtk/seismicity/utils.py#L628-L638
484
gem/oq-engine
openquake/commonlib/oqvalidation.py
OqParam.lti
def lti(self): """ Dictionary extended_loss_type -> extended_loss_type index """ return {lt: i for i, (lt, dt) in enumerate(self.loss_dt_list())}
python
def lti(self): """ Dictionary extended_loss_type -> extended_loss_type index """ return {lt: i for i, (lt, dt) in enumerate(self.loss_dt_list())}
[ "def", "lti", "(", "self", ")", ":", "return", "{", "lt", ":", "i", "for", "i", ",", "(", "lt", ",", "dt", ")", "in", "enumerate", "(", "self", ".", "loss_dt_list", "(", ")", ")", "}" ]
Dictionary extended_loss_type -> extended_loss_type index
[ "Dictionary", "extended_loss_type", "-", ">", "extended_loss_type", "index" ]
8294553a0b8aba33fd96437a35065d03547d0040
https://github.com/gem/oq-engine/blob/8294553a0b8aba33fd96437a35065d03547d0040/openquake/commonlib/oqvalidation.py#L468-L472
485
gem/oq-engine
openquake/commonlib/oqvalidation.py
OqParam.loss_maps_dt
def loss_maps_dt(self, dtype=F32): """ Return a composite data type for loss maps """ ltypes = self.loss_dt(dtype).names lst = [('poe-%s' % poe, dtype) for poe in self.conditional_loss_poes] return numpy.dtype([(lt, lst) for lt in ltypes])
python
def loss_maps_dt(self, dtype=F32): """ Return a composite data type for loss maps """ ltypes = self.loss_dt(dtype).names lst = [('poe-%s' % poe, dtype) for poe in self.conditional_loss_poes] return numpy.dtype([(lt, lst) for lt in ltypes])
[ "def", "loss_maps_dt", "(", "self", ",", "dtype", "=", "F32", ")", ":", "ltypes", "=", "self", ".", "loss_dt", "(", "dtype", ")", ".", "names", "lst", "=", "[", "(", "'poe-%s'", "%", "poe", ",", "dtype", ")", "for", "poe", "in", "self", ".", "conditional_loss_poes", "]", "return", "numpy", ".", "dtype", "(", "[", "(", "lt", ",", "lst", ")", "for", "lt", "in", "ltypes", "]", ")" ]
Return a composite data type for loss maps
[ "Return", "a", "composite", "data", "type", "for", "loss", "maps" ]
8294553a0b8aba33fd96437a35065d03547d0040
https://github.com/gem/oq-engine/blob/8294553a0b8aba33fd96437a35065d03547d0040/openquake/commonlib/oqvalidation.py#L488-L494
486
gem/oq-engine
openquake/commonlib/oqvalidation.py
OqParam.gmf_data_dt
def gmf_data_dt(self): """ Return a composite data type for the GMFs """ return numpy.dtype( [('rlzi', U16), ('sid', U32), ('eid', U64), ('gmv', (F32, (len(self.imtls),)))])
python
def gmf_data_dt(self): """ Return a composite data type for the GMFs """ return numpy.dtype( [('rlzi', U16), ('sid', U32), ('eid', U64), ('gmv', (F32, (len(self.imtls),)))])
[ "def", "gmf_data_dt", "(", "self", ")", ":", "return", "numpy", ".", "dtype", "(", "[", "(", "'rlzi'", ",", "U16", ")", ",", "(", "'sid'", ",", "U32", ")", ",", "(", "'eid'", ",", "U64", ")", ",", "(", "'gmv'", ",", "(", "F32", ",", "(", "len", "(", "self", ".", "imtls", ")", ",", ")", ")", ")", "]", ")" ]
Return a composite data type for the GMFs
[ "Return", "a", "composite", "data", "type", "for", "the", "GMFs" ]
8294553a0b8aba33fd96437a35065d03547d0040
https://github.com/gem/oq-engine/blob/8294553a0b8aba33fd96437a35065d03547d0040/openquake/commonlib/oqvalidation.py#L496-L502
487
gem/oq-engine
openquake/commonlib/oqvalidation.py
OqParam.no_imls
def no_imls(self): """ Return True if there are no intensity measure levels """ return all(numpy.isnan(ls).any() for ls in self.imtls.values())
python
def no_imls(self): """ Return True if there are no intensity measure levels """ return all(numpy.isnan(ls).any() for ls in self.imtls.values())
[ "def", "no_imls", "(", "self", ")", ":", "return", "all", "(", "numpy", ".", "isnan", "(", "ls", ")", ".", "any", "(", ")", "for", "ls", "in", "self", ".", "imtls", ".", "values", "(", ")", ")" ]
Return True if there are no intensity measure levels
[ "Return", "True", "if", "there", "are", "no", "intensity", "measure", "levels" ]
8294553a0b8aba33fd96437a35065d03547d0040
https://github.com/gem/oq-engine/blob/8294553a0b8aba33fd96437a35065d03547d0040/openquake/commonlib/oqvalidation.py#L504-L508
488
gem/oq-engine
openquake/commonlib/oqvalidation.py
OqParam.get_kinds
def get_kinds(self, kind, R): """ Yield 'rlz-000', 'rlz-001', ...', 'mean', 'quantile-0.1', ... """ stats = self.hazard_stats() if kind == 'stats': yield from stats return elif kind == 'rlzs': for r in range(R): yield 'rlz-%d' % r return elif kind: yield kind return # default: yield stats (and realizations if required) if R > 1 and self.individual_curves or not stats: for r in range(R): yield 'rlz-%03d' % r yield from stats
python
def get_kinds(self, kind, R): """ Yield 'rlz-000', 'rlz-001', ...', 'mean', 'quantile-0.1', ... """ stats = self.hazard_stats() if kind == 'stats': yield from stats return elif kind == 'rlzs': for r in range(R): yield 'rlz-%d' % r return elif kind: yield kind return # default: yield stats (and realizations if required) if R > 1 and self.individual_curves or not stats: for r in range(R): yield 'rlz-%03d' % r yield from stats
[ "def", "get_kinds", "(", "self", ",", "kind", ",", "R", ")", ":", "stats", "=", "self", ".", "hazard_stats", "(", ")", "if", "kind", "==", "'stats'", ":", "yield", "from", "stats", "return", "elif", "kind", "==", "'rlzs'", ":", "for", "r", "in", "range", "(", "R", ")", ":", "yield", "'rlz-%d'", "%", "r", "return", "elif", "kind", ":", "yield", "kind", "return", "# default: yield stats (and realizations if required)", "if", "R", ">", "1", "and", "self", ".", "individual_curves", "or", "not", "stats", ":", "for", "r", "in", "range", "(", "R", ")", ":", "yield", "'rlz-%03d'", "%", "r", "yield", "from", "stats" ]
Yield 'rlz-000', 'rlz-001', ...', 'mean', 'quantile-0.1', ...
[ "Yield", "rlz", "-", "000", "rlz", "-", "001", "...", "mean", "quantile", "-", "0", ".", "1", "..." ]
8294553a0b8aba33fd96437a35065d03547d0040
https://github.com/gem/oq-engine/blob/8294553a0b8aba33fd96437a35065d03547d0040/openquake/commonlib/oqvalidation.py#L523-L542
489
gem/oq-engine
openquake/commonlib/oqvalidation.py
OqParam.hazard_stats
def hazard_stats(self): """ Return a list of item with the statistical functions defined for the hazard calculation """ names = [] # name of statistical functions funcs = [] # statistical functions of kind func(values, weights) if self.mean_hazard_curves: names.append('mean') funcs.append(stats.mean_curve) if self.std_hazard_curves: names.append('std') funcs.append(stats.std_curve) for q in self.quantiles: names.append('quantile-%s' % q) funcs.append(functools.partial(stats.quantile_curve, q)) if self.max_hazard_curves: names.append('max') funcs.append(stats.max_curve) return dict(zip(names, funcs))
python
def hazard_stats(self): """ Return a list of item with the statistical functions defined for the hazard calculation """ names = [] # name of statistical functions funcs = [] # statistical functions of kind func(values, weights) if self.mean_hazard_curves: names.append('mean') funcs.append(stats.mean_curve) if self.std_hazard_curves: names.append('std') funcs.append(stats.std_curve) for q in self.quantiles: names.append('quantile-%s' % q) funcs.append(functools.partial(stats.quantile_curve, q)) if self.max_hazard_curves: names.append('max') funcs.append(stats.max_curve) return dict(zip(names, funcs))
[ "def", "hazard_stats", "(", "self", ")", ":", "names", "=", "[", "]", "# name of statistical functions", "funcs", "=", "[", "]", "# statistical functions of kind func(values, weights)", "if", "self", ".", "mean_hazard_curves", ":", "names", ".", "append", "(", "'mean'", ")", "funcs", ".", "append", "(", "stats", ".", "mean_curve", ")", "if", "self", ".", "std_hazard_curves", ":", "names", ".", "append", "(", "'std'", ")", "funcs", ".", "append", "(", "stats", ".", "std_curve", ")", "for", "q", "in", "self", ".", "quantiles", ":", "names", ".", "append", "(", "'quantile-%s'", "%", "q", ")", "funcs", ".", "append", "(", "functools", ".", "partial", "(", "stats", ".", "quantile_curve", ",", "q", ")", ")", "if", "self", ".", "max_hazard_curves", ":", "names", ".", "append", "(", "'max'", ")", "funcs", ".", "append", "(", "stats", ".", "max_curve", ")", "return", "dict", "(", "zip", "(", "names", ",", "funcs", ")", ")" ]
Return a list of item with the statistical functions defined for the hazard calculation
[ "Return", "a", "list", "of", "item", "with", "the", "statistical", "functions", "defined", "for", "the", "hazard", "calculation" ]
8294553a0b8aba33fd96437a35065d03547d0040
https://github.com/gem/oq-engine/blob/8294553a0b8aba33fd96437a35065d03547d0040/openquake/commonlib/oqvalidation.py#L544-L563
490
gem/oq-engine
openquake/commonlib/oqvalidation.py
OqParam.is_valid_geometry
def is_valid_geometry(self): """ It is possible to infer the geometry only if exactly one of sites, sites_csv, hazard_curves_csv, gmfs_csv, region is set. You did set more than one, or nothing. """ has_sites = (self.sites is not None or 'sites' in self.inputs or 'site_model' in self.inputs) if not has_sites and not self.ground_motion_fields: # when generating only the ruptures you do not need the sites return True if ('gmfs' in self.inputs and not has_sites and not self.inputs['gmfs'].endswith('.xml')): raise ValueError('Missing sites or sites_csv in the .ini file') elif ('risk' in self.calculation_mode or 'damage' in self.calculation_mode or 'bcr' in self.calculation_mode): return True # no check on the sites for risk flags = dict( sites=bool(self.sites), sites_csv=self.inputs.get('sites', 0), hazard_curves_csv=self.inputs.get('hazard_curves', 0), gmfs_csv=self.inputs.get('gmfs', 0), region=bool(self.region and self.region_grid_spacing)) # NB: below we check that all the flags # are mutually exclusive return sum(bool(v) for v in flags.values()) == 1 or self.inputs.get( 'exposure') or self.inputs.get('site_model')
python
def is_valid_geometry(self): """ It is possible to infer the geometry only if exactly one of sites, sites_csv, hazard_curves_csv, gmfs_csv, region is set. You did set more than one, or nothing. """ has_sites = (self.sites is not None or 'sites' in self.inputs or 'site_model' in self.inputs) if not has_sites and not self.ground_motion_fields: # when generating only the ruptures you do not need the sites return True if ('gmfs' in self.inputs and not has_sites and not self.inputs['gmfs'].endswith('.xml')): raise ValueError('Missing sites or sites_csv in the .ini file') elif ('risk' in self.calculation_mode or 'damage' in self.calculation_mode or 'bcr' in self.calculation_mode): return True # no check on the sites for risk flags = dict( sites=bool(self.sites), sites_csv=self.inputs.get('sites', 0), hazard_curves_csv=self.inputs.get('hazard_curves', 0), gmfs_csv=self.inputs.get('gmfs', 0), region=bool(self.region and self.region_grid_spacing)) # NB: below we check that all the flags # are mutually exclusive return sum(bool(v) for v in flags.values()) == 1 or self.inputs.get( 'exposure') or self.inputs.get('site_model')
[ "def", "is_valid_geometry", "(", "self", ")", ":", "has_sites", "=", "(", "self", ".", "sites", "is", "not", "None", "or", "'sites'", "in", "self", ".", "inputs", "or", "'site_model'", "in", "self", ".", "inputs", ")", "if", "not", "has_sites", "and", "not", "self", ".", "ground_motion_fields", ":", "# when generating only the ruptures you do not need the sites", "return", "True", "if", "(", "'gmfs'", "in", "self", ".", "inputs", "and", "not", "has_sites", "and", "not", "self", ".", "inputs", "[", "'gmfs'", "]", ".", "endswith", "(", "'.xml'", ")", ")", ":", "raise", "ValueError", "(", "'Missing sites or sites_csv in the .ini file'", ")", "elif", "(", "'risk'", "in", "self", ".", "calculation_mode", "or", "'damage'", "in", "self", ".", "calculation_mode", "or", "'bcr'", "in", "self", ".", "calculation_mode", ")", ":", "return", "True", "# no check on the sites for risk", "flags", "=", "dict", "(", "sites", "=", "bool", "(", "self", ".", "sites", ")", ",", "sites_csv", "=", "self", ".", "inputs", ".", "get", "(", "'sites'", ",", "0", ")", ",", "hazard_curves_csv", "=", "self", ".", "inputs", ".", "get", "(", "'hazard_curves'", ",", "0", ")", ",", "gmfs_csv", "=", "self", ".", "inputs", ".", "get", "(", "'gmfs'", ",", "0", ")", ",", "region", "=", "bool", "(", "self", ".", "region", "and", "self", ".", "region_grid_spacing", ")", ")", "# NB: below we check that all the flags", "# are mutually exclusive", "return", "sum", "(", "bool", "(", "v", ")", "for", "v", "in", "flags", ".", "values", "(", ")", ")", "==", "1", "or", "self", ".", "inputs", ".", "get", "(", "'exposure'", ")", "or", "self", ".", "inputs", ".", "get", "(", "'site_model'", ")" ]
It is possible to infer the geometry only if exactly one of sites, sites_csv, hazard_curves_csv, gmfs_csv, region is set. You did set more than one, or nothing.
[ "It", "is", "possible", "to", "infer", "the", "geometry", "only", "if", "exactly", "one", "of", "sites", "sites_csv", "hazard_curves_csv", "gmfs_csv", "region", "is", "set", ".", "You", "did", "set", "more", "than", "one", "or", "nothing", "." ]
8294553a0b8aba33fd96437a35065d03547d0040
https://github.com/gem/oq-engine/blob/8294553a0b8aba33fd96437a35065d03547d0040/openquake/commonlib/oqvalidation.py#L595-L622
491
gem/oq-engine
openquake/commonlib/oqvalidation.py
OqParam.is_valid_intensity_measure_types
def is_valid_intensity_measure_types(self): """ If the IMTs and levels are extracted from the risk models, they must not be set directly. Moreover, if `intensity_measure_types_and_levels` is set directly, `intensity_measure_types` must not be set. """ if self.ground_motion_correlation_model: for imt in self.imtls: if not (imt.startswith('SA') or imt == 'PGA'): raise ValueError( 'Correlation model %s does not accept IMT=%s' % ( self.ground_motion_correlation_model, imt)) if self.risk_files: # IMTLs extracted from the risk files return (self.intensity_measure_types is None and self.intensity_measure_types_and_levels is None) elif not hasattr(self, 'hazard_imtls') and not hasattr( self, 'risk_imtls'): return False return True
python
def is_valid_intensity_measure_types(self): """ If the IMTs and levels are extracted from the risk models, they must not be set directly. Moreover, if `intensity_measure_types_and_levels` is set directly, `intensity_measure_types` must not be set. """ if self.ground_motion_correlation_model: for imt in self.imtls: if not (imt.startswith('SA') or imt == 'PGA'): raise ValueError( 'Correlation model %s does not accept IMT=%s' % ( self.ground_motion_correlation_model, imt)) if self.risk_files: # IMTLs extracted from the risk files return (self.intensity_measure_types is None and self.intensity_measure_types_and_levels is None) elif not hasattr(self, 'hazard_imtls') and not hasattr( self, 'risk_imtls'): return False return True
[ "def", "is_valid_intensity_measure_types", "(", "self", ")", ":", "if", "self", ".", "ground_motion_correlation_model", ":", "for", "imt", "in", "self", ".", "imtls", ":", "if", "not", "(", "imt", ".", "startswith", "(", "'SA'", ")", "or", "imt", "==", "'PGA'", ")", ":", "raise", "ValueError", "(", "'Correlation model %s does not accept IMT=%s'", "%", "(", "self", ".", "ground_motion_correlation_model", ",", "imt", ")", ")", "if", "self", ".", "risk_files", ":", "# IMTLs extracted from the risk files", "return", "(", "self", ".", "intensity_measure_types", "is", "None", "and", "self", ".", "intensity_measure_types_and_levels", "is", "None", ")", "elif", "not", "hasattr", "(", "self", ",", "'hazard_imtls'", ")", "and", "not", "hasattr", "(", "self", ",", "'risk_imtls'", ")", ":", "return", "False", "return", "True" ]
If the IMTs and levels are extracted from the risk models, they must not be set directly. Moreover, if `intensity_measure_types_and_levels` is set directly, `intensity_measure_types` must not be set.
[ "If", "the", "IMTs", "and", "levels", "are", "extracted", "from", "the", "risk", "models", "they", "must", "not", "be", "set", "directly", ".", "Moreover", "if", "intensity_measure_types_and_levels", "is", "set", "directly", "intensity_measure_types", "must", "not", "be", "set", "." ]
8294553a0b8aba33fd96437a35065d03547d0040
https://github.com/gem/oq-engine/blob/8294553a0b8aba33fd96437a35065d03547d0040/openquake/commonlib/oqvalidation.py#L660-L679
492
gem/oq-engine
openquake/commonlib/oqvalidation.py
OqParam.is_valid_intensity_measure_levels
def is_valid_intensity_measure_levels(self): """ In order to compute hazard curves, `intensity_measure_types_and_levels` must be set or extracted from the risk models. """ invalid = self.no_imls() and not self.risk_files and ( self.hazard_curves_from_gmfs or self.calculation_mode in ('classical', 'disaggregation')) return not invalid
python
def is_valid_intensity_measure_levels(self): """ In order to compute hazard curves, `intensity_measure_types_and_levels` must be set or extracted from the risk models. """ invalid = self.no_imls() and not self.risk_files and ( self.hazard_curves_from_gmfs or self.calculation_mode in ('classical', 'disaggregation')) return not invalid
[ "def", "is_valid_intensity_measure_levels", "(", "self", ")", ":", "invalid", "=", "self", ".", "no_imls", "(", ")", "and", "not", "self", ".", "risk_files", "and", "(", "self", ".", "hazard_curves_from_gmfs", "or", "self", ".", "calculation_mode", "in", "(", "'classical'", ",", "'disaggregation'", ")", ")", "return", "not", "invalid" ]
In order to compute hazard curves, `intensity_measure_types_and_levels` must be set or extracted from the risk models.
[ "In", "order", "to", "compute", "hazard", "curves", "intensity_measure_types_and_levels", "must", "be", "set", "or", "extracted", "from", "the", "risk", "models", "." ]
8294553a0b8aba33fd96437a35065d03547d0040
https://github.com/gem/oq-engine/blob/8294553a0b8aba33fd96437a35065d03547d0040/openquake/commonlib/oqvalidation.py#L681-L689
493
gem/oq-engine
openquake/commonlib/oqvalidation.py
OqParam.is_valid_sites
def is_valid_sites(self): """ The sites are overdetermined """ if 'site_model' in self.inputs and 'sites' in self.inputs: return False elif 'site_model' in self.inputs and self.sites: return False elif 'sites' in self.inputs and self.sites: return False elif self.sites and self.region and self.region_grid_spacing: return False else: return True
python
def is_valid_sites(self): """ The sites are overdetermined """ if 'site_model' in self.inputs and 'sites' in self.inputs: return False elif 'site_model' in self.inputs and self.sites: return False elif 'sites' in self.inputs and self.sites: return False elif self.sites and self.region and self.region_grid_spacing: return False else: return True
[ "def", "is_valid_sites", "(", "self", ")", ":", "if", "'site_model'", "in", "self", ".", "inputs", "and", "'sites'", "in", "self", ".", "inputs", ":", "return", "False", "elif", "'site_model'", "in", "self", ".", "inputs", "and", "self", ".", "sites", ":", "return", "False", "elif", "'sites'", "in", "self", ".", "inputs", "and", "self", ".", "sites", ":", "return", "False", "elif", "self", ".", "sites", "and", "self", ".", "region", "and", "self", ".", "region_grid_spacing", ":", "return", "False", "else", ":", "return", "True" ]
The sites are overdetermined
[ "The", "sites", "are", "overdetermined" ]
8294553a0b8aba33fd96437a35065d03547d0040
https://github.com/gem/oq-engine/blob/8294553a0b8aba33fd96437a35065d03547d0040/openquake/commonlib/oqvalidation.py#L733-L746
494
gem/oq-engine
openquake/commonlib/oqvalidation.py
OqParam.is_valid_complex_fault_mesh_spacing
def is_valid_complex_fault_mesh_spacing(self): """ The `complex_fault_mesh_spacing` parameter can be None only if `rupture_mesh_spacing` is set. In that case it is identified with it. """ rms = getattr(self, 'rupture_mesh_spacing', None) if rms and not getattr(self, 'complex_fault_mesh_spacing', None): self.complex_fault_mesh_spacing = self.rupture_mesh_spacing return True
python
def is_valid_complex_fault_mesh_spacing(self): """ The `complex_fault_mesh_spacing` parameter can be None only if `rupture_mesh_spacing` is set. In that case it is identified with it. """ rms = getattr(self, 'rupture_mesh_spacing', None) if rms and not getattr(self, 'complex_fault_mesh_spacing', None): self.complex_fault_mesh_spacing = self.rupture_mesh_spacing return True
[ "def", "is_valid_complex_fault_mesh_spacing", "(", "self", ")", ":", "rms", "=", "getattr", "(", "self", ",", "'rupture_mesh_spacing'", ",", "None", ")", "if", "rms", "and", "not", "getattr", "(", "self", ",", "'complex_fault_mesh_spacing'", ",", "None", ")", ":", "self", ".", "complex_fault_mesh_spacing", "=", "self", ".", "rupture_mesh_spacing", "return", "True" ]
The `complex_fault_mesh_spacing` parameter can be None only if `rupture_mesh_spacing` is set. In that case it is identified with it.
[ "The", "complex_fault_mesh_spacing", "parameter", "can", "be", "None", "only", "if", "rupture_mesh_spacing", "is", "set", ".", "In", "that", "case", "it", "is", "identified", "with", "it", "." ]
8294553a0b8aba33fd96437a35065d03547d0040
https://github.com/gem/oq-engine/blob/8294553a0b8aba33fd96437a35065d03547d0040/openquake/commonlib/oqvalidation.py#L748-L756
495
gem/oq-engine
openquake/commonlib/oqvalidation.py
OqParam.is_valid_optimize_same_id_sources
def is_valid_optimize_same_id_sources(self): """ The `optimize_same_id_sources` can be true only in the classical calculators. """ if (self.optimize_same_id_sources and 'classical' in self.calculation_mode or 'disagg' in self.calculation_mode): return True elif self.optimize_same_id_sources: return False else: return True
python
def is_valid_optimize_same_id_sources(self): """ The `optimize_same_id_sources` can be true only in the classical calculators. """ if (self.optimize_same_id_sources and 'classical' in self.calculation_mode or 'disagg' in self.calculation_mode): return True elif self.optimize_same_id_sources: return False else: return True
[ "def", "is_valid_optimize_same_id_sources", "(", "self", ")", ":", "if", "(", "self", ".", "optimize_same_id_sources", "and", "'classical'", "in", "self", ".", "calculation_mode", "or", "'disagg'", "in", "self", ".", "calculation_mode", ")", ":", "return", "True", "elif", "self", ".", "optimize_same_id_sources", ":", "return", "False", "else", ":", "return", "True" ]
The `optimize_same_id_sources` can be true only in the classical calculators.
[ "The", "optimize_same_id_sources", "can", "be", "true", "only", "in", "the", "classical", "calculators", "." ]
8294553a0b8aba33fd96437a35065d03547d0040
https://github.com/gem/oq-engine/blob/8294553a0b8aba33fd96437a35065d03547d0040/openquake/commonlib/oqvalidation.py#L758-L770
496
gem/oq-engine
openquake/commonlib/oqvalidation.py
OqParam.check_missing
def check_missing(self, param, action): """ Make sure the given parameter is missing in the job.ini file """ assert action in ('debug', 'info', 'warn', 'error'), action if self.inputs.get(param): msg = '%s_file in %s is ignored in %s' % ( param, self.inputs['job_ini'], self.calculation_mode) if action == 'error': raise InvalidFile(msg) else: getattr(logging, action)(msg)
python
def check_missing(self, param, action): """ Make sure the given parameter is missing in the job.ini file """ assert action in ('debug', 'info', 'warn', 'error'), action if self.inputs.get(param): msg = '%s_file in %s is ignored in %s' % ( param, self.inputs['job_ini'], self.calculation_mode) if action == 'error': raise InvalidFile(msg) else: getattr(logging, action)(msg)
[ "def", "check_missing", "(", "self", ",", "param", ",", "action", ")", ":", "assert", "action", "in", "(", "'debug'", ",", "'info'", ",", "'warn'", ",", "'error'", ")", ",", "action", "if", "self", ".", "inputs", ".", "get", "(", "param", ")", ":", "msg", "=", "'%s_file in %s is ignored in %s'", "%", "(", "param", ",", "self", ".", "inputs", "[", "'job_ini'", "]", ",", "self", ".", "calculation_mode", ")", "if", "action", "==", "'error'", ":", "raise", "InvalidFile", "(", "msg", ")", "else", ":", "getattr", "(", "logging", ",", "action", ")", "(", "msg", ")" ]
Make sure the given parameter is missing in the job.ini file
[ "Make", "sure", "the", "given", "parameter", "is", "missing", "in", "the", "job", ".", "ini", "file" ]
8294553a0b8aba33fd96437a35065d03547d0040
https://github.com/gem/oq-engine/blob/8294553a0b8aba33fd96437a35065d03547d0040/openquake/commonlib/oqvalidation.py#L795-L806
497
gem/oq-engine
openquake/hazardlib/sourceconverter.py
get_set_num_ruptures
def get_set_num_ruptures(src): """ Extract the number of ruptures and set it """ if not src.num_ruptures: t0 = time.time() src.num_ruptures = src.count_ruptures() dt = time.time() - t0 clsname = src.__class__.__name__ if dt > 10: if 'Area' in clsname: logging.warning( '%s.count_ruptures took %d seconds, perhaps the ' 'area discretization is too small', src, dt) elif 'ComplexFault' in clsname: logging.warning( '%s.count_ruptures took %d seconds, perhaps the ' 'complex_fault_mesh_spacing is too small', src, dt) elif 'SimpleFault' in clsname: logging.warning( '%s.count_ruptures took %d seconds, perhaps the ' 'rupture_mesh_spacing is too small', src, dt) else: # multiPointSource logging.warning('count_ruptures %s took %d seconds', src, dt) return src.num_ruptures
python
def get_set_num_ruptures(src): """ Extract the number of ruptures and set it """ if not src.num_ruptures: t0 = time.time() src.num_ruptures = src.count_ruptures() dt = time.time() - t0 clsname = src.__class__.__name__ if dt > 10: if 'Area' in clsname: logging.warning( '%s.count_ruptures took %d seconds, perhaps the ' 'area discretization is too small', src, dt) elif 'ComplexFault' in clsname: logging.warning( '%s.count_ruptures took %d seconds, perhaps the ' 'complex_fault_mesh_spacing is too small', src, dt) elif 'SimpleFault' in clsname: logging.warning( '%s.count_ruptures took %d seconds, perhaps the ' 'rupture_mesh_spacing is too small', src, dt) else: # multiPointSource logging.warning('count_ruptures %s took %d seconds', src, dt) return src.num_ruptures
[ "def", "get_set_num_ruptures", "(", "src", ")", ":", "if", "not", "src", ".", "num_ruptures", ":", "t0", "=", "time", ".", "time", "(", ")", "src", ".", "num_ruptures", "=", "src", ".", "count_ruptures", "(", ")", "dt", "=", "time", ".", "time", "(", ")", "-", "t0", "clsname", "=", "src", ".", "__class__", ".", "__name__", "if", "dt", ">", "10", ":", "if", "'Area'", "in", "clsname", ":", "logging", ".", "warning", "(", "'%s.count_ruptures took %d seconds, perhaps the '", "'area discretization is too small'", ",", "src", ",", "dt", ")", "elif", "'ComplexFault'", "in", "clsname", ":", "logging", ".", "warning", "(", "'%s.count_ruptures took %d seconds, perhaps the '", "'complex_fault_mesh_spacing is too small'", ",", "src", ",", "dt", ")", "elif", "'SimpleFault'", "in", "clsname", ":", "logging", ".", "warning", "(", "'%s.count_ruptures took %d seconds, perhaps the '", "'rupture_mesh_spacing is too small'", ",", "src", ",", "dt", ")", "else", ":", "# multiPointSource", "logging", ".", "warning", "(", "'count_ruptures %s took %d seconds'", ",", "src", ",", "dt", ")", "return", "src", ".", "num_ruptures" ]
Extract the number of ruptures and set it
[ "Extract", "the", "number", "of", "ruptures", "and", "set", "it" ]
8294553a0b8aba33fd96437a35065d03547d0040
https://github.com/gem/oq-engine/blob/8294553a0b8aba33fd96437a35065d03547d0040/openquake/hazardlib/sourceconverter.py#L238-L263
498
gem/oq-engine
openquake/hazardlib/sourceconverter.py
mfds2multimfd
def mfds2multimfd(mfds): """ Convert a list of MFD nodes into a single MultiMFD node """ _, kind = mfds[0].tag.split('}') node = Node('multiMFD', dict(kind=kind, size=len(mfds))) lengths = None for field in mfd.multi_mfd.ASSOC[kind][1:]: alias = mfd.multi_mfd.ALIAS.get(field, field) if field in ('magnitudes', 'occurRates'): data = [~getattr(m, field) for m in mfds] lengths = [len(d) for d in data] data = sum(data, []) # list of lists else: try: data = [m[alias] for m in mfds] except KeyError: if alias == 'binWidth': # missing bindWidth in GR MDFs is ok continue else: raise node.append(Node(field, text=collapse(data))) if lengths: # this is the last field if present node.append(Node('lengths', text=collapse(lengths))) return node
python
def mfds2multimfd(mfds): """ Convert a list of MFD nodes into a single MultiMFD node """ _, kind = mfds[0].tag.split('}') node = Node('multiMFD', dict(kind=kind, size=len(mfds))) lengths = None for field in mfd.multi_mfd.ASSOC[kind][1:]: alias = mfd.multi_mfd.ALIAS.get(field, field) if field in ('magnitudes', 'occurRates'): data = [~getattr(m, field) for m in mfds] lengths = [len(d) for d in data] data = sum(data, []) # list of lists else: try: data = [m[alias] for m in mfds] except KeyError: if alias == 'binWidth': # missing bindWidth in GR MDFs is ok continue else: raise node.append(Node(field, text=collapse(data))) if lengths: # this is the last field if present node.append(Node('lengths', text=collapse(lengths))) return node
[ "def", "mfds2multimfd", "(", "mfds", ")", ":", "_", ",", "kind", "=", "mfds", "[", "0", "]", ".", "tag", ".", "split", "(", "'}'", ")", "node", "=", "Node", "(", "'multiMFD'", ",", "dict", "(", "kind", "=", "kind", ",", "size", "=", "len", "(", "mfds", ")", ")", ")", "lengths", "=", "None", "for", "field", "in", "mfd", ".", "multi_mfd", ".", "ASSOC", "[", "kind", "]", "[", "1", ":", "]", ":", "alias", "=", "mfd", ".", "multi_mfd", ".", "ALIAS", ".", "get", "(", "field", ",", "field", ")", "if", "field", "in", "(", "'magnitudes'", ",", "'occurRates'", ")", ":", "data", "=", "[", "~", "getattr", "(", "m", ",", "field", ")", "for", "m", "in", "mfds", "]", "lengths", "=", "[", "len", "(", "d", ")", "for", "d", "in", "data", "]", "data", "=", "sum", "(", "data", ",", "[", "]", ")", "# list of lists", "else", ":", "try", ":", "data", "=", "[", "m", "[", "alias", "]", "for", "m", "in", "mfds", "]", "except", "KeyError", ":", "if", "alias", "==", "'binWidth'", ":", "# missing bindWidth in GR MDFs is ok", "continue", "else", ":", "raise", "node", ".", "append", "(", "Node", "(", "field", ",", "text", "=", "collapse", "(", "data", ")", ")", ")", "if", "lengths", ":", "# this is the last field if present", "node", ".", "append", "(", "Node", "(", "'lengths'", ",", "text", "=", "collapse", "(", "lengths", ")", ")", ")", "return", "node" ]
Convert a list of MFD nodes into a single MultiMFD node
[ "Convert", "a", "list", "of", "MFD", "nodes", "into", "a", "single", "MultiMFD", "node" ]
8294553a0b8aba33fd96437a35065d03547d0040
https://github.com/gem/oq-engine/blob/8294553a0b8aba33fd96437a35065d03547d0040/openquake/hazardlib/sourceconverter.py#L918-L943
499
gem/oq-engine
openquake/hazardlib/sourceconverter.py
SourceGroup.update
def update(self, src): """ Update the attributes sources, min_mag, max_mag according to the given source. :param src: an instance of :class: `openquake.hazardlib.source.base.BaseSeismicSource` """ assert src.tectonic_region_type == self.trt, ( src.tectonic_region_type, self.trt) if not src.min_mag: # if not set already src.min_mag = self.min_mag.get(self.trt) or self.min_mag['default'] # checking mutex ruptures if (not isinstance(src, NonParametricSeismicSource) and self.rup_interdep == 'mutex'): msg = "Mutually exclusive ruptures can only be " msg += "modelled using non-parametric sources" raise ValueError(msg) nr = get_set_num_ruptures(src) if nr == 0: # the minimum_magnitude filters all ruptures return self.tot_ruptures += nr self.sources.append(src) _, max_mag = src.get_min_max_mag() prev_max_mag = self.max_mag if prev_max_mag is None or max_mag > prev_max_mag: self.max_mag = max_mag
python
def update(self, src): """ Update the attributes sources, min_mag, max_mag according to the given source. :param src: an instance of :class: `openquake.hazardlib.source.base.BaseSeismicSource` """ assert src.tectonic_region_type == self.trt, ( src.tectonic_region_type, self.trt) if not src.min_mag: # if not set already src.min_mag = self.min_mag.get(self.trt) or self.min_mag['default'] # checking mutex ruptures if (not isinstance(src, NonParametricSeismicSource) and self.rup_interdep == 'mutex'): msg = "Mutually exclusive ruptures can only be " msg += "modelled using non-parametric sources" raise ValueError(msg) nr = get_set_num_ruptures(src) if nr == 0: # the minimum_magnitude filters all ruptures return self.tot_ruptures += nr self.sources.append(src) _, max_mag = src.get_min_max_mag() prev_max_mag = self.max_mag if prev_max_mag is None or max_mag > prev_max_mag: self.max_mag = max_mag
[ "def", "update", "(", "self", ",", "src", ")", ":", "assert", "src", ".", "tectonic_region_type", "==", "self", ".", "trt", ",", "(", "src", ".", "tectonic_region_type", ",", "self", ".", "trt", ")", "if", "not", "src", ".", "min_mag", ":", "# if not set already", "src", ".", "min_mag", "=", "self", ".", "min_mag", ".", "get", "(", "self", ".", "trt", ")", "or", "self", ".", "min_mag", "[", "'default'", "]", "# checking mutex ruptures", "if", "(", "not", "isinstance", "(", "src", ",", "NonParametricSeismicSource", ")", "and", "self", ".", "rup_interdep", "==", "'mutex'", ")", ":", "msg", "=", "\"Mutually exclusive ruptures can only be \"", "msg", "+=", "\"modelled using non-parametric sources\"", "raise", "ValueError", "(", "msg", ")", "nr", "=", "get_set_num_ruptures", "(", "src", ")", "if", "nr", "==", "0", ":", "# the minimum_magnitude filters all ruptures", "return", "self", ".", "tot_ruptures", "+=", "nr", "self", ".", "sources", ".", "append", "(", "src", ")", "_", ",", "max_mag", "=", "src", ".", "get_min_max_mag", "(", ")", "prev_max_mag", "=", "self", ".", "max_mag", "if", "prev_max_mag", "is", "None", "or", "max_mag", ">", "prev_max_mag", ":", "self", ".", "max_mag", "=", "max_mag" ]
Update the attributes sources, min_mag, max_mag according to the given source. :param src: an instance of :class: `openquake.hazardlib.source.base.BaseSeismicSource`
[ "Update", "the", "attributes", "sources", "min_mag", "max_mag", "according", "to", "the", "given", "source", "." ]
8294553a0b8aba33fd96437a35065d03547d0040
https://github.com/gem/oq-engine/blob/8294553a0b8aba33fd96437a35065d03547d0040/openquake/hazardlib/sourceconverter.py#L159-L187