repository_name
stringlengths
5
67
func_path_in_repository
stringlengths
4
234
func_name
stringlengths
0
314
whole_func_string
stringlengths
52
3.87M
language
stringclasses
6 values
func_code_string
stringlengths
39
1.84M
func_code_tokens
sequencelengths
15
672k
func_documentation_string
stringlengths
1
47.2k
func_documentation_tokens
sequencelengths
1
3.92k
split_name
stringclasses
1 value
func_code_url
stringlengths
85
339
gem/oq-engine
openquake/hazardlib/gsim/bindi_2017.py
BindiEtAl2017Rjb._get_distance_scaling
def _get_distance_scaling(self, C, dists, mag): """ Implements the distance scaling function F(M, R) presented in equations 2 and 3. In the case of Joyner-Boore distance then the fixed-depth term h is required """ r_h = self._get_rh(C, dists) return (C["c1"] + C["c2"] * (mag - self.CONSTANTS["mref"])) *\ np.log(r_h / self.CONSTANTS["rref"]) +\ C["c3"] * (r_h - self.CONSTANTS["rref"])
python
def _get_distance_scaling(self, C, dists, mag): r_h = self._get_rh(C, dists) return (C["c1"] + C["c2"] * (mag - self.CONSTANTS["mref"])) *\ np.log(r_h / self.CONSTANTS["rref"]) +\ C["c3"] * (r_h - self.CONSTANTS["rref"])
[ "def", "_get_distance_scaling", "(", "self", ",", "C", ",", "dists", ",", "mag", ")", ":", "r_h", "=", "self", ".", "_get_rh", "(", "C", ",", "dists", ")", "return", "(", "C", "[", "\"c1\"", "]", "+", "C", "[", "\"c2\"", "]", "*", "(", "mag", "-", "self", ".", "CONSTANTS", "[", "\"mref\"", "]", ")", ")", "*", "np", ".", "log", "(", "r_h", "/", "self", ".", "CONSTANTS", "[", "\"rref\"", "]", ")", "+", "C", "[", "\"c3\"", "]", "*", "(", "r_h", "-", "self", ".", "CONSTANTS", "[", "\"rref\"", "]", ")" ]
Implements the distance scaling function F(M, R) presented in equations 2 and 3. In the case of Joyner-Boore distance then the fixed-depth term h is required
[ "Implements", "the", "distance", "scaling", "function", "F", "(", "M", "R", ")", "presented", "in", "equations", "2", "and", "3", ".", "In", "the", "case", "of", "Joyner", "-", "Boore", "distance", "then", "the", "fixed", "-", "depth", "term", "h", "is", "required" ]
train
https://github.com/gem/oq-engine/blob/8294553a0b8aba33fd96437a35065d03547d0040/openquake/hazardlib/gsim/bindi_2017.py#L110-L119
gem/oq-engine
openquake/hazardlib/gsim/munson_thurber_1997.py
MunsonThurber1997.get_mean_and_stddevs
def get_mean_and_stddevs(self, sites, rup, dists, imt, stddev_types): """ See :meth:`superclass method <.base.GroundShakingIntensityModel.get_mean_and_stddevs>` for spec of input and result values. """ # Distance term R = np.sqrt(dists.rjb ** 2 + 11.29 ** 2) # Magnitude term M = rup.mag - 6 # Site term only distinguishes between lava and ash; # since ash sites have Vs30 in the range 60-200m/s, # we use this upper value as class separator S = np.zeros(R.shape) S[sites.vs30 <= 200] = 1 # Mean ground motion (log10) mean = (0.518 + 0.387*M - np.log10(R) - 0.00256*R + 0.335*S) # Converting to natural log mean /= np.log10(np.e) # Check for standard deviation type assert all(stddev_type in self.DEFINED_FOR_STANDARD_DEVIATION_TYPES for stddev_type in stddev_types) # Constant (total) standard deviation stddevs = [0.237/np.log10(np.e) + np.zeros(R.shape)] return mean, stddevs
python
def get_mean_and_stddevs(self, sites, rup, dists, imt, stddev_types): R = np.sqrt(dists.rjb ** 2 + 11.29 ** 2) M = rup.mag - 6 S = np.zeros(R.shape) S[sites.vs30 <= 200] = 1 mean = (0.518 + 0.387*M - np.log10(R) - 0.00256*R + 0.335*S) mean /= np.log10(np.e) assert all(stddev_type in self.DEFINED_FOR_STANDARD_DEVIATION_TYPES for stddev_type in stddev_types) stddevs = [0.237/np.log10(np.e) + np.zeros(R.shape)] return mean, stddevs
[ "def", "get_mean_and_stddevs", "(", "self", ",", "sites", ",", "rup", ",", "dists", ",", "imt", ",", "stddev_types", ")", ":", "# Distance term", "R", "=", "np", ".", "sqrt", "(", "dists", ".", "rjb", "**", "2", "+", "11.29", "**", "2", ")", "# Magnitude term", "M", "=", "rup", ".", "mag", "-", "6", "# Site term only distinguishes between lava and ash;", "# since ash sites have Vs30 in the range 60-200m/s,", "# we use this upper value as class separator", "S", "=", "np", ".", "zeros", "(", "R", ".", "shape", ")", "S", "[", "sites", ".", "vs30", "<=", "200", "]", "=", "1", "# Mean ground motion (log10)", "mean", "=", "(", "0.518", "+", "0.387", "*", "M", "-", "np", ".", "log10", "(", "R", ")", "-", "0.00256", "*", "R", "+", "0.335", "*", "S", ")", "# Converting to natural log", "mean", "/=", "np", ".", "log10", "(", "np", ".", "e", ")", "# Check for standard deviation type", "assert", "all", "(", "stddev_type", "in", "self", ".", "DEFINED_FOR_STANDARD_DEVIATION_TYPES", "for", "stddev_type", "in", "stddev_types", ")", "# Constant (total) standard deviation", "stddevs", "=", "[", "0.237", "/", "np", ".", "log10", "(", "np", ".", "e", ")", "+", "np", ".", "zeros", "(", "R", ".", "shape", ")", "]", "return", "mean", ",", "stddevs" ]
See :meth:`superclass method <.base.GroundShakingIntensityModel.get_mean_and_stddevs>` for spec of input and result values.
[ "See", ":", "meth", ":", "superclass", "method", "<", ".", "base", ".", "GroundShakingIntensityModel", ".", "get_mean_and_stddevs", ">", "for", "spec", "of", "input", "and", "result", "values", "." ]
train
https://github.com/gem/oq-engine/blob/8294553a0b8aba33fd96437a35065d03547d0040/openquake/hazardlib/gsim/munson_thurber_1997.py#L67-L99
gem/oq-engine
openquake/hazardlib/gsim/munson_thurber_1997.py
MunsonThurber1997Hawaii.get_mean_and_stddevs
def get_mean_and_stddevs(self, sites, rup, dists, imt, stddev_types): """ See :meth:`superclass method <.base.GroundShakingIntensityModel.get_mean_and_stddevs>` for spec of input and result values. """ # assign constant log10e = np.log10(np.e) # Distance term R = np.sqrt(dists.rjb ** 2 + 11.29 ** 2) # Magnitude term M = rup.mag - 6 # Site term only distinguishes between lava and ash; # since ash sites have Vs30 in the range 60-200m/s, # we use this upper value as class separator S = np.zeros(R.shape) S[sites.vs30 <= 200] = 1 # Mean ground motion (natural log) # call super mean, stddevs = super().get_mean_and_stddevs(sites, rup, dists, imt, stddev_types) if rup.mag > 7. and rup.mag <= 7.7: mean = (0.171 * (1 - M)) / log10e + mean elif rup.mag > 7.7: mean = (0.1512 + 0.387 * (1 - M)) / log10e + mean # define natural log of SA 0.3 sec and 0.2 sec if isinstance(imt, SA): if imt.period == 0.3: mean = np.log(2.2) + mean if imt.period == 0.2: mean = np.log(2.5) + mean return mean, stddevs
python
def get_mean_and_stddevs(self, sites, rup, dists, imt, stddev_types): log10e = np.log10(np.e) R = np.sqrt(dists.rjb ** 2 + 11.29 ** 2) M = rup.mag - 6 S = np.zeros(R.shape) S[sites.vs30 <= 200] = 1 mean, stddevs = super().get_mean_and_stddevs(sites, rup, dists, imt, stddev_types) if rup.mag > 7. and rup.mag <= 7.7: mean = (0.171 * (1 - M)) / log10e + mean elif rup.mag > 7.7: mean = (0.1512 + 0.387 * (1 - M)) / log10e + mean if isinstance(imt, SA): if imt.period == 0.3: mean = np.log(2.2) + mean if imt.period == 0.2: mean = np.log(2.5) + mean return mean, stddevs
[ "def", "get_mean_and_stddevs", "(", "self", ",", "sites", ",", "rup", ",", "dists", ",", "imt", ",", "stddev_types", ")", ":", "# assign constant", "log10e", "=", "np", ".", "log10", "(", "np", ".", "e", ")", "# Distance term", "R", "=", "np", ".", "sqrt", "(", "dists", ".", "rjb", "**", "2", "+", "11.29", "**", "2", ")", "# Magnitude term", "M", "=", "rup", ".", "mag", "-", "6", "# Site term only distinguishes between lava and ash;", "# since ash sites have Vs30 in the range 60-200m/s,", "# we use this upper value as class separator", "S", "=", "np", ".", "zeros", "(", "R", ".", "shape", ")", "S", "[", "sites", ".", "vs30", "<=", "200", "]", "=", "1", "# Mean ground motion (natural log)", "# call super", "mean", ",", "stddevs", "=", "super", "(", ")", ".", "get_mean_and_stddevs", "(", "sites", ",", "rup", ",", "dists", ",", "imt", ",", "stddev_types", ")", "if", "rup", ".", "mag", ">", "7.", "and", "rup", ".", "mag", "<=", "7.7", ":", "mean", "=", "(", "0.171", "*", "(", "1", "-", "M", ")", ")", "/", "log10e", "+", "mean", "elif", "rup", ".", "mag", ">", "7.7", ":", "mean", "=", "(", "0.1512", "+", "0.387", "*", "(", "1", "-", "M", ")", ")", "/", "log10e", "+", "mean", "# define natural log of SA 0.3 sec and 0.2 sec", "if", "isinstance", "(", "imt", ",", "SA", ")", ":", "if", "imt", ".", "period", "==", "0.3", ":", "mean", "=", "np", ".", "log", "(", "2.2", ")", "+", "mean", "if", "imt", ".", "period", "==", "0.2", ":", "mean", "=", "np", ".", "log", "(", "2.5", ")", "+", "mean", "return", "mean", ",", "stddevs" ]
See :meth:`superclass method <.base.GroundShakingIntensityModel.get_mean_and_stddevs>` for spec of input and result values.
[ "See", ":", "meth", ":", "superclass", "method", "<", ".", "base", ".", "GroundShakingIntensityModel", ".", "get_mean_and_stddevs", ">", "for", "spec", "of", "input", "and", "result", "values", "." ]
train
https://github.com/gem/oq-engine/blob/8294553a0b8aba33fd96437a35065d03547d0040/openquake/hazardlib/gsim/munson_thurber_1997.py#L111-L151
gem/oq-engine
openquake/hazardlib/gsim/skarlatoudis_2013.py
SkarlatoudisEtAlSSlab2013.get_mean_and_stddevs
def get_mean_and_stddevs(self, sites, rup, dists, imt, stddev_types): """ See :meth:`superclass method <.base.GroundShakingIntensityModel.get_mean_and_stddevs>` for spec of input and result values. """ # extracting dictionary of coefficients specific to required # intensity measure type. C = self.COEFFS[imt] imean = (self._compute_magnitude(rup, C) + self._compute_distance(rup, dists, C) + self._get_site_amplification(sites, C) + self._compute_forearc_backarc_term(C, sites, dists, rup)) istddevs = self._get_stddevs(C, stddev_types, num_sites=len(sites.vs30)) # Convert units to g, # but only for PGA and SA (not PGV): if imt.name in "SA PGA": mean = np.log((10.0 ** (imean - 2.0)) / g) else: # PGV: mean = np.log(10.0 ** imean) # Return stddevs in terms of natural log scaling stddevs = np.log(10.0 ** np.array(istddevs)) # mean_LogNaturale = np.log((10 ** mean) * 1e-2 / g) return mean, stddevs
python
def get_mean_and_stddevs(self, sites, rup, dists, imt, stddev_types): C = self.COEFFS[imt] imean = (self._compute_magnitude(rup, C) + self._compute_distance(rup, dists, C) + self._get_site_amplification(sites, C) + self._compute_forearc_backarc_term(C, sites, dists, rup)) istddevs = self._get_stddevs(C, stddev_types, num_sites=len(sites.vs30)) if imt.name in "SA PGA": mean = np.log((10.0 ** (imean - 2.0)) / g) else: mean = np.log(10.0 ** imean) stddevs = np.log(10.0 ** np.array(istddevs)) return mean, stddevs
[ "def", "get_mean_and_stddevs", "(", "self", ",", "sites", ",", "rup", ",", "dists", ",", "imt", ",", "stddev_types", ")", ":", "# extracting dictionary of coefficients specific to required", "# intensity measure type.", "C", "=", "self", ".", "COEFFS", "[", "imt", "]", "imean", "=", "(", "self", ".", "_compute_magnitude", "(", "rup", ",", "C", ")", "+", "self", ".", "_compute_distance", "(", "rup", ",", "dists", ",", "C", ")", "+", "self", ".", "_get_site_amplification", "(", "sites", ",", "C", ")", "+", "self", ".", "_compute_forearc_backarc_term", "(", "C", ",", "sites", ",", "dists", ",", "rup", ")", ")", "istddevs", "=", "self", ".", "_get_stddevs", "(", "C", ",", "stddev_types", ",", "num_sites", "=", "len", "(", "sites", ".", "vs30", ")", ")", "# Convert units to g,", "# but only for PGA and SA (not PGV):", "if", "imt", ".", "name", "in", "\"SA PGA\"", ":", "mean", "=", "np", ".", "log", "(", "(", "10.0", "**", "(", "imean", "-", "2.0", ")", ")", "/", "g", ")", "else", ":", "# PGV:", "mean", "=", "np", ".", "log", "(", "10.0", "**", "imean", ")", "# Return stddevs in terms of natural log scaling", "stddevs", "=", "np", ".", "log", "(", "10.0", "**", "np", ".", "array", "(", "istddevs", ")", ")", "# mean_LogNaturale = np.log((10 ** mean) * 1e-2 / g)", "return", "mean", ",", "stddevs" ]
See :meth:`superclass method <.base.GroundShakingIntensityModel.get_mean_and_stddevs>` for spec of input and result values.
[ "See", ":", "meth", ":", "superclass", "method", "<", ".", "base", ".", "GroundShakingIntensityModel", ".", "get_mean_and_stddevs", ">", "for", "spec", "of", "input", "and", "result", "values", "." ]
train
https://github.com/gem/oq-engine/blob/8294553a0b8aba33fd96437a35065d03547d0040/openquake/hazardlib/gsim/skarlatoudis_2013.py#L76-L105
gem/oq-engine
openquake/hazardlib/gsim/skarlatoudis_2013.py
SkarlatoudisEtAlSSlab2013._compute_distance
def _compute_distance(self, rup, dists, C): """ equation 3 pag 1960: ``c31 * logR + c32 * (R-Rref)`` """ rref = 1.0 c31 = -1.7 return (c31 * np.log10(dists.rhypo) + C['c32'] * (dists.rhypo - rref))
python
def _compute_distance(self, rup, dists, C): rref = 1.0 c31 = -1.7 return (c31 * np.log10(dists.rhypo) + C['c32'] * (dists.rhypo - rref))
[ "def", "_compute_distance", "(", "self", ",", "rup", ",", "dists", ",", "C", ")", ":", "rref", "=", "1.0", "c31", "=", "-", "1.7", "return", "(", "c31", "*", "np", ".", "log10", "(", "dists", ".", "rhypo", ")", "+", "C", "[", "'c32'", "]", "*", "(", "dists", ".", "rhypo", "-", "rref", ")", ")" ]
equation 3 pag 1960: ``c31 * logR + c32 * (R-Rref)``
[ "equation", "3", "pag", "1960", ":" ]
train
https://github.com/gem/oq-engine/blob/8294553a0b8aba33fd96437a35065d03547d0040/openquake/hazardlib/gsim/skarlatoudis_2013.py#L122-L130
gem/oq-engine
openquake/hazardlib/gsim/skarlatoudis_2013.py
SkarlatoudisEtAlSSlab2013._compute_magnitude
def _compute_magnitude(self, rup, C): """ equation 3 pag 1960: c1 + c2(M-5.5) """ m_h = 5.5 return C['c1'] + (C['c2'] * (rup.mag - m_h))
python
def _compute_magnitude(self, rup, C): m_h = 5.5 return C['c1'] + (C['c2'] * (rup.mag - m_h))
[ "def", "_compute_magnitude", "(", "self", ",", "rup", ",", "C", ")", ":", "m_h", "=", "5.5", "return", "C", "[", "'c1'", "]", "+", "(", "C", "[", "'c2'", "]", "*", "(", "rup", ".", "mag", "-", "m_h", ")", ")" ]
equation 3 pag 1960: c1 + c2(M-5.5)
[ "equation", "3", "pag", "1960", ":" ]
train
https://github.com/gem/oq-engine/blob/8294553a0b8aba33fd96437a35065d03547d0040/openquake/hazardlib/gsim/skarlatoudis_2013.py#L132-L139
gem/oq-engine
openquake/hazardlib/gsim/skarlatoudis_2013.py
SkarlatoudisEtAlSSlab2013._get_site_amplification
def _get_site_amplification(self, sites, C): """ Compute the fourth term of the equation 3: The functional form Fs in Eq. (1) represents the site amplification and it is given by FS = c61*S + c62*SS , where c61 and c62 are the coefficients to be determined through the regression analysis, while S and SS are dummy variables used to denote NEHRP site category C and D respectively Coefficents for categories A and B are set to zero """ S, SS = self._get_site_type_dummy_variables(sites) return (C['c61'] * S) + (C['c62'] * SS)
python
def _get_site_amplification(self, sites, C): S, SS = self._get_site_type_dummy_variables(sites) return (C['c61'] * S) + (C['c62'] * SS)
[ "def", "_get_site_amplification", "(", "self", ",", "sites", ",", "C", ")", ":", "S", ",", "SS", "=", "self", ".", "_get_site_type_dummy_variables", "(", "sites", ")", "return", "(", "C", "[", "'c61'", "]", "*", "S", ")", "+", "(", "C", "[", "'c62'", "]", "*", "SS", ")" ]
Compute the fourth term of the equation 3: The functional form Fs in Eq. (1) represents the site amplification and it is given by FS = c61*S + c62*SS , where c61 and c62 are the coefficients to be determined through the regression analysis, while S and SS are dummy variables used to denote NEHRP site category C and D respectively Coefficents for categories A and B are set to zero
[ "Compute", "the", "fourth", "term", "of", "the", "equation", "3", ":", "The", "functional", "form", "Fs", "in", "Eq", ".", "(", "1", ")", "represents", "the", "site", "amplification", "and", "it", "is", "given", "by", "FS", "=", "c61", "*", "S", "+", "c62", "*", "SS", "where", "c61", "and", "c62", "are", "the", "coefficients", "to", "be", "determined", "through", "the", "regression", "analysis", "while", "S", "and", "SS", "are", "dummy", "variables", "used", "to", "denote", "NEHRP", "site", "category", "C", "and", "D", "respectively", "Coefficents", "for", "categories", "A", "and", "B", "are", "set", "to", "zero" ]
train
https://github.com/gem/oq-engine/blob/8294553a0b8aba33fd96437a35065d03547d0040/openquake/hazardlib/gsim/skarlatoudis_2013.py#L141-L153
gem/oq-engine
openquake/hazardlib/gsim/skarlatoudis_2013.py
SkarlatoudisEtAlSSlab2013._get_site_type_dummy_variables
def _get_site_type_dummy_variables(self, sites): """ Get site type dummy variables, three different site classes, based on the shear wave velocity intervals in the uppermost 30 m, Vs30, according to the NEHRP: class A-B: Vs30 > 760 m/s class C: Vs30 = 360 − 760 m/s class D: Vs30 < 360 m/s """ S = np.zeros(len(sites.vs30)) SS = np.zeros(len(sites.vs30)) # Class C; 180 m/s <= Vs30 <= 360 m/s. idx = (sites.vs30 < 360.0) SS[idx] = 1.0 # Class B; 360 m/s <= Vs30 <= 760 m/s. (NEHRP) idx = (sites.vs30 >= 360.0) & (sites.vs30 < 760) S[idx] = 1.0 return S, SS
python
def _get_site_type_dummy_variables(self, sites): S = np.zeros(len(sites.vs30)) SS = np.zeros(len(sites.vs30)) idx = (sites.vs30 < 360.0) SS[idx] = 1.0 idx = (sites.vs30 >= 360.0) & (sites.vs30 < 760) S[idx] = 1.0 return S, SS
[ "def", "_get_site_type_dummy_variables", "(", "self", ",", "sites", ")", ":", "S", "=", "np", ".", "zeros", "(", "len", "(", "sites", ".", "vs30", ")", ")", "SS", "=", "np", ".", "zeros", "(", "len", "(", "sites", ".", "vs30", ")", ")", "# Class C; 180 m/s <= Vs30 <= 360 m/s.", "idx", "=", "(", "sites", ".", "vs30", "<", "360.0", ")", "SS", "[", "idx", "]", "=", "1.0", "# Class B; 360 m/s <= Vs30 <= 760 m/s. (NEHRP)", "idx", "=", "(", "sites", ".", "vs30", ">=", "360.0", ")", "&", "(", "sites", ".", "vs30", "<", "760", ")", "S", "[", "idx", "]", "=", "1.0", "return", "S", ",", "SS" ]
Get site type dummy variables, three different site classes, based on the shear wave velocity intervals in the uppermost 30 m, Vs30, according to the NEHRP: class A-B: Vs30 > 760 m/s class C: Vs30 = 360 − 760 m/s class D: Vs30 < 360 m/s
[ "Get", "site", "type", "dummy", "variables", "three", "different", "site", "classes", "based", "on", "the", "shear", "wave", "velocity", "intervals", "in", "the", "uppermost", "30", "m", "Vs30", "according", "to", "the", "NEHRP", ":", "class", "A", "-", "B", ":", "Vs30", ">", "760", "m", "/", "s", "class", "C", ":", "Vs30", "=", "360", "−", "760", "m", "/", "s", "class", "D", ":", "Vs30", "<", "360", "m", "/", "s" ]
train
https://github.com/gem/oq-engine/blob/8294553a0b8aba33fd96437a35065d03547d0040/openquake/hazardlib/gsim/skarlatoudis_2013.py#L155-L175
gem/oq-engine
openquake/hazardlib/gsim/skarlatoudis_2013.py
SkarlatoudisEtAlSSlab2013._compute_forearc_backarc_term
def _compute_forearc_backarc_term(self, C, sites, dists, rup): """ Compute back-arc term of Equation 3 """ # flag 1 (R < 335 & R >= 205) flag1 = np.zeros(len(dists.rhypo)) ind1 = np.logical_and((dists.rhypo < 335), (dists.rhypo >= 205)) flag1[ind1] = 1.0 # flag 2 (R >= 335) flag2 = np.zeros(len(dists.rhypo)) ind2 = (dists.rhypo >= 335) flag2[ind2] = 1.0 # flag 3 (R < 240 & R >= 140) flag3 = np.zeros(len(dists.rhypo)) ind3 = np.logical_and((dists.rhypo < 240), (dists.rhypo >= 140)) flag3[ind3] = 1.0 # flag 4 (R >= 240) flag4 = np.zeros(len(dists.rhypo)) ind4 = (dists.rhypo >= 240) flag4[ind4] = 1.0 A = flag1 * ((205 - dists.rhypo)/150) + flag2 B = flag3 * ((140 - dists.rhypo)/100) + flag4 if (rup.hypo_depth < 80): FHR = A else: FHR = B H0 = 100 # Heaviside function if (rup.hypo_depth >= H0): H = 1 else: H = 0 # ARC = 0 for back-arc - ARC = 1 for forearc ARC = np.zeros(len(sites.backarc)) idxarc = (sites.backarc == 1) ARC[idxarc] = 1.0 return ((C['c41'] * (1 - ARC) * H) + (C['c42'] * (1 - ARC) * H * FHR) + (C['c51'] * ARC * H) + (C['c52'] * ARC * H * FHR))
python
def _compute_forearc_backarc_term(self, C, sites, dists, rup): flag1 = np.zeros(len(dists.rhypo)) ind1 = np.logical_and((dists.rhypo < 335), (dists.rhypo >= 205)) flag1[ind1] = 1.0 flag2 = np.zeros(len(dists.rhypo)) ind2 = (dists.rhypo >= 335) flag2[ind2] = 1.0 flag3 = np.zeros(len(dists.rhypo)) ind3 = np.logical_and((dists.rhypo < 240), (dists.rhypo >= 140)) flag3[ind3] = 1.0 flag4 = np.zeros(len(dists.rhypo)) ind4 = (dists.rhypo >= 240) flag4[ind4] = 1.0 A = flag1 * ((205 - dists.rhypo)/150) + flag2 B = flag3 * ((140 - dists.rhypo)/100) + flag4 if (rup.hypo_depth < 80): FHR = A else: FHR = B H0 = 100 if (rup.hypo_depth >= H0): H = 1 else: H = 0 ARC = np.zeros(len(sites.backarc)) idxarc = (sites.backarc == 1) ARC[idxarc] = 1.0 return ((C['c41'] * (1 - ARC) * H) + (C['c42'] * (1 - ARC) * H * FHR) + (C['c51'] * ARC * H) + (C['c52'] * ARC * H * FHR))
[ "def", "_compute_forearc_backarc_term", "(", "self", ",", "C", ",", "sites", ",", "dists", ",", "rup", ")", ":", "# flag 1 (R < 335 & R >= 205)", "flag1", "=", "np", ".", "zeros", "(", "len", "(", "dists", ".", "rhypo", ")", ")", "ind1", "=", "np", ".", "logical_and", "(", "(", "dists", ".", "rhypo", "<", "335", ")", ",", "(", "dists", ".", "rhypo", ">=", "205", ")", ")", "flag1", "[", "ind1", "]", "=", "1.0", "# flag 2 (R >= 335)", "flag2", "=", "np", ".", "zeros", "(", "len", "(", "dists", ".", "rhypo", ")", ")", "ind2", "=", "(", "dists", ".", "rhypo", ">=", "335", ")", "flag2", "[", "ind2", "]", "=", "1.0", "# flag 3 (R < 240 & R >= 140)", "flag3", "=", "np", ".", "zeros", "(", "len", "(", "dists", ".", "rhypo", ")", ")", "ind3", "=", "np", ".", "logical_and", "(", "(", "dists", ".", "rhypo", "<", "240", ")", ",", "(", "dists", ".", "rhypo", ">=", "140", ")", ")", "flag3", "[", "ind3", "]", "=", "1.0", "# flag 4 (R >= 240)", "flag4", "=", "np", ".", "zeros", "(", "len", "(", "dists", ".", "rhypo", ")", ")", "ind4", "=", "(", "dists", ".", "rhypo", ">=", "240", ")", "flag4", "[", "ind4", "]", "=", "1.0", "A", "=", "flag1", "*", "(", "(", "205", "-", "dists", ".", "rhypo", ")", "/", "150", ")", "+", "flag2", "B", "=", "flag3", "*", "(", "(", "140", "-", "dists", ".", "rhypo", ")", "/", "100", ")", "+", "flag4", "if", "(", "rup", ".", "hypo_depth", "<", "80", ")", ":", "FHR", "=", "A", "else", ":", "FHR", "=", "B", "H0", "=", "100", "# Heaviside function", "if", "(", "rup", ".", "hypo_depth", ">=", "H0", ")", ":", "H", "=", "1", "else", ":", "H", "=", "0", "# ARC = 0 for back-arc - ARC = 1 for forearc", "ARC", "=", "np", ".", "zeros", "(", "len", "(", "sites", ".", "backarc", ")", ")", "idxarc", "=", "(", "sites", ".", "backarc", "==", "1", ")", "ARC", "[", "idxarc", "]", "=", "1.0", "return", "(", "(", "C", "[", "'c41'", "]", "*", "(", "1", "-", "ARC", ")", "*", "H", ")", "+", "(", "C", "[", "'c42'", "]", "*", "(", "1", "-", "ARC", ")", "*", "H", "*", "FHR", ")", "+", "(", "C", "[", "'c51'", "]", "*", "ARC", "*", "H", ")", "+", "(", "C", "[", "'c52'", "]", "*", "ARC", "*", "H", "*", "FHR", ")", ")" ]
Compute back-arc term of Equation 3
[ "Compute", "back", "-", "arc", "term", "of", "Equation", "3" ]
train
https://github.com/gem/oq-engine/blob/8294553a0b8aba33fd96437a35065d03547d0040/openquake/hazardlib/gsim/skarlatoudis_2013.py#L177-L219
gem/oq-engine
openquake/hazardlib/gsim/gmpe_table.py
AmplificationTable._build_data
def _build_data(self, amplification_group): """ Creates the numpy array tables from the hdf5 tables """ # Determine shape of the tables n_levels = len(amplification_group) # Checks the first group in the amplification group and returns the # shape of the SA array - implicitly assumes the SA array in all # amplification groups is the same shape level = next(iter(amplification_group)) n_d, n_p, n_m = amplification_group[level]["IMLs/SA"].shape assert n_d == len(self.distances), (n_d, len(self.distances)) assert n_m == len(self.magnitudes), (n_m, len(self.magnitudes)) # Instantiate the arrays with ones self.mean = {"SA": numpy.ones([n_d, n_p, n_m, n_levels]), "PGA": numpy.ones([n_d, 1, n_m, n_levels]), "PGV": numpy.ones([n_d, 1, n_m, n_levels])} self.sigma = {} for stddev_type in [const.StdDev.TOTAL, const.StdDev.INTER_EVENT, const.StdDev.INTRA_EVENT]: level = next(iter(amplification_group)) if stddev_type in amplification_group[level]: self.sigma[stddev_type] = deepcopy(self.mean) for iloc, (level, amp_model) in enumerate(amplification_group.items()): if "SA" in amp_model["IMLs"]: if iloc == 0: self.periods = amp_model["IMLs/T"][:] else: assert numpy.allclose(self.periods, amp_model["IMLs/T"][:]) for imt in ["SA", "PGA", "PGV"]: if imt in amp_model["IMLs"]: self.mean[imt][:, :, :, self.argidx[iloc]] = \ amp_model["IMLs/" + imt][:] for stddev_type in self.sigma: self.sigma[stddev_type][imt][ :, :, :, self.argidx[iloc]] = \ amp_model["/".join([stddev_type, imt])][:] self.shape = (n_d, n_p, n_m, n_levels)
python
def _build_data(self, amplification_group): n_levels = len(amplification_group) level = next(iter(amplification_group)) n_d, n_p, n_m = amplification_group[level]["IMLs/SA"].shape assert n_d == len(self.distances), (n_d, len(self.distances)) assert n_m == len(self.magnitudes), (n_m, len(self.magnitudes)) self.mean = {"SA": numpy.ones([n_d, n_p, n_m, n_levels]), "PGA": numpy.ones([n_d, 1, n_m, n_levels]), "PGV": numpy.ones([n_d, 1, n_m, n_levels])} self.sigma = {} for stddev_type in [const.StdDev.TOTAL, const.StdDev.INTER_EVENT, const.StdDev.INTRA_EVENT]: level = next(iter(amplification_group)) if stddev_type in amplification_group[level]: self.sigma[stddev_type] = deepcopy(self.mean) for iloc, (level, amp_model) in enumerate(amplification_group.items()): if "SA" in amp_model["IMLs"]: if iloc == 0: self.periods = amp_model["IMLs/T"][:] else: assert numpy.allclose(self.periods, amp_model["IMLs/T"][:]) for imt in ["SA", "PGA", "PGV"]: if imt in amp_model["IMLs"]: self.mean[imt][:, :, :, self.argidx[iloc]] = \ amp_model["IMLs/" + imt][:] for stddev_type in self.sigma: self.sigma[stddev_type][imt][ :, :, :, self.argidx[iloc]] = \ amp_model["/".join([stddev_type, imt])][:] self.shape = (n_d, n_p, n_m, n_levels)
[ "def", "_build_data", "(", "self", ",", "amplification_group", ")", ":", "# Determine shape of the tables", "n_levels", "=", "len", "(", "amplification_group", ")", "# Checks the first group in the amplification group and returns the", "# shape of the SA array - implicitly assumes the SA array in all", "# amplification groups is the same shape", "level", "=", "next", "(", "iter", "(", "amplification_group", ")", ")", "n_d", ",", "n_p", ",", "n_m", "=", "amplification_group", "[", "level", "]", "[", "\"IMLs/SA\"", "]", ".", "shape", "assert", "n_d", "==", "len", "(", "self", ".", "distances", ")", ",", "(", "n_d", ",", "len", "(", "self", ".", "distances", ")", ")", "assert", "n_m", "==", "len", "(", "self", ".", "magnitudes", ")", ",", "(", "n_m", ",", "len", "(", "self", ".", "magnitudes", ")", ")", "# Instantiate the arrays with ones", "self", ".", "mean", "=", "{", "\"SA\"", ":", "numpy", ".", "ones", "(", "[", "n_d", ",", "n_p", ",", "n_m", ",", "n_levels", "]", ")", ",", "\"PGA\"", ":", "numpy", ".", "ones", "(", "[", "n_d", ",", "1", ",", "n_m", ",", "n_levels", "]", ")", ",", "\"PGV\"", ":", "numpy", ".", "ones", "(", "[", "n_d", ",", "1", ",", "n_m", ",", "n_levels", "]", ")", "}", "self", ".", "sigma", "=", "{", "}", "for", "stddev_type", "in", "[", "const", ".", "StdDev", ".", "TOTAL", ",", "const", ".", "StdDev", ".", "INTER_EVENT", ",", "const", ".", "StdDev", ".", "INTRA_EVENT", "]", ":", "level", "=", "next", "(", "iter", "(", "amplification_group", ")", ")", "if", "stddev_type", "in", "amplification_group", "[", "level", "]", ":", "self", ".", "sigma", "[", "stddev_type", "]", "=", "deepcopy", "(", "self", ".", "mean", ")", "for", "iloc", ",", "(", "level", ",", "amp_model", ")", "in", "enumerate", "(", "amplification_group", ".", "items", "(", ")", ")", ":", "if", "\"SA\"", "in", "amp_model", "[", "\"IMLs\"", "]", ":", "if", "iloc", "==", "0", ":", "self", ".", "periods", "=", "amp_model", "[", "\"IMLs/T\"", "]", "[", ":", "]", "else", ":", "assert", "numpy", ".", "allclose", "(", "self", ".", "periods", ",", "amp_model", "[", "\"IMLs/T\"", "]", "[", ":", "]", ")", "for", "imt", "in", "[", "\"SA\"", ",", "\"PGA\"", ",", "\"PGV\"", "]", ":", "if", "imt", "in", "amp_model", "[", "\"IMLs\"", "]", ":", "self", ".", "mean", "[", "imt", "]", "[", ":", ",", ":", ",", ":", ",", "self", ".", "argidx", "[", "iloc", "]", "]", "=", "amp_model", "[", "\"IMLs/\"", "+", "imt", "]", "[", ":", "]", "for", "stddev_type", "in", "self", ".", "sigma", ":", "self", ".", "sigma", "[", "stddev_type", "]", "[", "imt", "]", "[", ":", ",", ":", ",", ":", ",", "self", ".", "argidx", "[", "iloc", "]", "]", "=", "amp_model", "[", "\"/\"", ".", "join", "(", "[", "stddev_type", ",", "imt", "]", ")", "]", "[", ":", "]", "self", ".", "shape", "=", "(", "n_d", ",", "n_p", ",", "n_m", ",", "n_levels", ")" ]
Creates the numpy array tables from the hdf5 tables
[ "Creates", "the", "numpy", "array", "tables", "from", "the", "hdf5", "tables" ]
train
https://github.com/gem/oq-engine/blob/8294553a0b8aba33fd96437a35065d03547d0040/openquake/hazardlib/gsim/gmpe_table.py#L112-L150
gem/oq-engine
openquake/hazardlib/gsim/gmpe_table.py
AmplificationTable.get_amplification_factors
def get_amplification_factors(self, imt, sctx, rctx, dists, stddev_types): """ Returns the amplification factors for the given rupture and site conditions. :param imt: Intensity measure type as an instance of the :class: `openquake.hazardlib.imt` :param sctx: SiteCollection instance :param rctx: Rupture instance :param dists: Source to site distances (km) :param stddev_types: List of required standard deviation types :returns: * mean_amp - Amplification factors applied to the median ground motion * sigma_amps - List of modification factors applied to the standard deviations of ground motion """ dist_level_table = self.get_mean_table(imt, rctx) sigma_tables = self.get_sigma_tables(imt, rctx, stddev_types) mean_interpolator = interp1d(self.values, numpy.log10(dist_level_table), axis=1) sigma_interpolators = [interp1d(self.values, sigma_table, axis=1) for sigma_table in sigma_tables] if self.element == "Rupture": mean_amp = 10.0 ** mean_interpolator( getattr(rctx, self.parameter))[0] * numpy.ones_like(dists) sigma_amps = [] for sig_interpolator in sigma_interpolators: sigma_amps.append(sig_interpolator( getattr(rctx, self.parameter))[0] * numpy.ones_like(dists)) else: mean_amp = 10.0 ** mean_interpolator( getattr(sctx, self.parameter))[0, :] sigma_amps = [] for sig_interpolator in sigma_interpolators: sigma_amps.append(sig_interpolator( getattr(sctx, self.parameter))[0, :] * numpy.ones_like(dists)) return mean_amp, sigma_amps
python
def get_amplification_factors(self, imt, sctx, rctx, dists, stddev_types): dist_level_table = self.get_mean_table(imt, rctx) sigma_tables = self.get_sigma_tables(imt, rctx, stddev_types) mean_interpolator = interp1d(self.values, numpy.log10(dist_level_table), axis=1) sigma_interpolators = [interp1d(self.values, sigma_table, axis=1) for sigma_table in sigma_tables] if self.element == "Rupture": mean_amp = 10.0 ** mean_interpolator( getattr(rctx, self.parameter))[0] * numpy.ones_like(dists) sigma_amps = [] for sig_interpolator in sigma_interpolators: sigma_amps.append(sig_interpolator( getattr(rctx, self.parameter))[0] * numpy.ones_like(dists)) else: mean_amp = 10.0 ** mean_interpolator( getattr(sctx, self.parameter))[0, :] sigma_amps = [] for sig_interpolator in sigma_interpolators: sigma_amps.append(sig_interpolator( getattr(sctx, self.parameter))[0, :] * numpy.ones_like(dists)) return mean_amp, sigma_amps
[ "def", "get_amplification_factors", "(", "self", ",", "imt", ",", "sctx", ",", "rctx", ",", "dists", ",", "stddev_types", ")", ":", "dist_level_table", "=", "self", ".", "get_mean_table", "(", "imt", ",", "rctx", ")", "sigma_tables", "=", "self", ".", "get_sigma_tables", "(", "imt", ",", "rctx", ",", "stddev_types", ")", "mean_interpolator", "=", "interp1d", "(", "self", ".", "values", ",", "numpy", ".", "log10", "(", "dist_level_table", ")", ",", "axis", "=", "1", ")", "sigma_interpolators", "=", "[", "interp1d", "(", "self", ".", "values", ",", "sigma_table", ",", "axis", "=", "1", ")", "for", "sigma_table", "in", "sigma_tables", "]", "if", "self", ".", "element", "==", "\"Rupture\"", ":", "mean_amp", "=", "10.0", "**", "mean_interpolator", "(", "getattr", "(", "rctx", ",", "self", ".", "parameter", ")", ")", "[", "0", "]", "*", "numpy", ".", "ones_like", "(", "dists", ")", "sigma_amps", "=", "[", "]", "for", "sig_interpolator", "in", "sigma_interpolators", ":", "sigma_amps", ".", "append", "(", "sig_interpolator", "(", "getattr", "(", "rctx", ",", "self", ".", "parameter", ")", ")", "[", "0", "]", "*", "numpy", ".", "ones_like", "(", "dists", ")", ")", "else", ":", "mean_amp", "=", "10.0", "**", "mean_interpolator", "(", "getattr", "(", "sctx", ",", "self", ".", "parameter", ")", ")", "[", "0", ",", ":", "]", "sigma_amps", "=", "[", "]", "for", "sig_interpolator", "in", "sigma_interpolators", ":", "sigma_amps", ".", "append", "(", "sig_interpolator", "(", "getattr", "(", "sctx", ",", "self", ".", "parameter", ")", ")", "[", "0", ",", ":", "]", "*", "numpy", ".", "ones_like", "(", "dists", ")", ")", "return", "mean_amp", ",", "sigma_amps" ]
Returns the amplification factors for the given rupture and site conditions. :param imt: Intensity measure type as an instance of the :class: `openquake.hazardlib.imt` :param sctx: SiteCollection instance :param rctx: Rupture instance :param dists: Source to site distances (km) :param stddev_types: List of required standard deviation types :returns: * mean_amp - Amplification factors applied to the median ground motion * sigma_amps - List of modification factors applied to the standard deviations of ground motion
[ "Returns", "the", "amplification", "factors", "for", "the", "given", "rupture", "and", "site", "conditions", "." ]
train
https://github.com/gem/oq-engine/blob/8294553a0b8aba33fd96437a35065d03547d0040/openquake/hazardlib/gsim/gmpe_table.py#L158-L202
gem/oq-engine
openquake/hazardlib/gsim/gmpe_table.py
AmplificationTable.get_mean_table
def get_mean_table(self, imt, rctx): """ Returns amplification factors for the mean, given the rupture and intensity measure type. :returns: amplification table as an array of [Number Distances, Number Levels] """ # Levels by Distances if imt.name in 'PGA PGV': interpolator = interp1d(self.magnitudes, numpy.log10(self.mean[imt.name]), axis=2) output_table = 10.0 ** ( interpolator(rctx.mag).reshape(self.shape[0], self.shape[3])) else: # For spectral accelerations - need two step process # Interpolate period - log-log space interpolator = interp1d(numpy.log10(self.periods), numpy.log10(self.mean["SA"]), axis=1) period_table = interpolator(numpy.log10(imt.period)) # Interpolate magnitude - linear-log space mag_interpolator = interp1d(self.magnitudes, period_table, axis=1) output_table = 10.0 ** mag_interpolator(rctx.mag) return output_table
python
def get_mean_table(self, imt, rctx): if imt.name in 'PGA PGV': interpolator = interp1d(self.magnitudes, numpy.log10(self.mean[imt.name]), axis=2) output_table = 10.0 ** ( interpolator(rctx.mag).reshape(self.shape[0], self.shape[3])) else: interpolator = interp1d(numpy.log10(self.periods), numpy.log10(self.mean["SA"]), axis=1) period_table = interpolator(numpy.log10(imt.period)) mag_interpolator = interp1d(self.magnitudes, period_table, axis=1) output_table = 10.0 ** mag_interpolator(rctx.mag) return output_table
[ "def", "get_mean_table", "(", "self", ",", "imt", ",", "rctx", ")", ":", "# Levels by Distances", "if", "imt", ".", "name", "in", "'PGA PGV'", ":", "interpolator", "=", "interp1d", "(", "self", ".", "magnitudes", ",", "numpy", ".", "log10", "(", "self", ".", "mean", "[", "imt", ".", "name", "]", ")", ",", "axis", "=", "2", ")", "output_table", "=", "10.0", "**", "(", "interpolator", "(", "rctx", ".", "mag", ")", ".", "reshape", "(", "self", ".", "shape", "[", "0", "]", ",", "self", ".", "shape", "[", "3", "]", ")", ")", "else", ":", "# For spectral accelerations - need two step process", "# Interpolate period - log-log space", "interpolator", "=", "interp1d", "(", "numpy", ".", "log10", "(", "self", ".", "periods", ")", ",", "numpy", ".", "log10", "(", "self", ".", "mean", "[", "\"SA\"", "]", ")", ",", "axis", "=", "1", ")", "period_table", "=", "interpolator", "(", "numpy", ".", "log10", "(", "imt", ".", "period", ")", ")", "# Interpolate magnitude - linear-log space", "mag_interpolator", "=", "interp1d", "(", "self", ".", "magnitudes", ",", "period_table", ",", "axis", "=", "1", ")", "output_table", "=", "10.0", "**", "mag_interpolator", "(", "rctx", ".", "mag", ")", "return", "output_table" ]
Returns amplification factors for the mean, given the rupture and intensity measure type. :returns: amplification table as an array of [Number Distances, Number Levels]
[ "Returns", "amplification", "factors", "for", "the", "mean", "given", "the", "rupture", "and", "intensity", "measure", "type", "." ]
train
https://github.com/gem/oq-engine/blob/8294553a0b8aba33fd96437a35065d03547d0040/openquake/hazardlib/gsim/gmpe_table.py#L204-L229
gem/oq-engine
openquake/hazardlib/gsim/gmpe_table.py
AmplificationTable.get_sigma_tables
def get_sigma_tables(self, imt, rctx, stddev_types): """ Returns modification factors for the standard deviations, given the rupture and intensity measure type. :returns: List of standard deviation modification tables, each as an array of [Number Distances, Number Levels] """ output_tables = [] for stddev_type in stddev_types: # For PGA and PGV only needs to apply magnitude interpolation if imt.name in 'PGA PGV': interpolator = interp1d(self.magnitudes, self.sigma[stddev_type][imt.name], axis=2) output_tables.append( interpolator(rctx.mag).reshape(self.shape[0], self.shape[3])) else: # For spectral accelerations - need two step process # Interpolate period interpolator = interp1d(numpy.log10(self.periods), self.sigma[stddev_type]["SA"], axis=1) period_table = interpolator(numpy.log10(imt.period)) mag_interpolator = interp1d(self.magnitudes, period_table, axis=1) output_tables.append(mag_interpolator(rctx.mag)) return output_tables
python
def get_sigma_tables(self, imt, rctx, stddev_types): output_tables = [] for stddev_type in stddev_types: if imt.name in 'PGA PGV': interpolator = interp1d(self.magnitudes, self.sigma[stddev_type][imt.name], axis=2) output_tables.append( interpolator(rctx.mag).reshape(self.shape[0], self.shape[3])) else: interpolator = interp1d(numpy.log10(self.periods), self.sigma[stddev_type]["SA"], axis=1) period_table = interpolator(numpy.log10(imt.period)) mag_interpolator = interp1d(self.magnitudes, period_table, axis=1) output_tables.append(mag_interpolator(rctx.mag)) return output_tables
[ "def", "get_sigma_tables", "(", "self", ",", "imt", ",", "rctx", ",", "stddev_types", ")", ":", "output_tables", "=", "[", "]", "for", "stddev_type", "in", "stddev_types", ":", "# For PGA and PGV only needs to apply magnitude interpolation", "if", "imt", ".", "name", "in", "'PGA PGV'", ":", "interpolator", "=", "interp1d", "(", "self", ".", "magnitudes", ",", "self", ".", "sigma", "[", "stddev_type", "]", "[", "imt", ".", "name", "]", ",", "axis", "=", "2", ")", "output_tables", ".", "append", "(", "interpolator", "(", "rctx", ".", "mag", ")", ".", "reshape", "(", "self", ".", "shape", "[", "0", "]", ",", "self", ".", "shape", "[", "3", "]", ")", ")", "else", ":", "# For spectral accelerations - need two step process", "# Interpolate period", "interpolator", "=", "interp1d", "(", "numpy", ".", "log10", "(", "self", ".", "periods", ")", ",", "self", ".", "sigma", "[", "stddev_type", "]", "[", "\"SA\"", "]", ",", "axis", "=", "1", ")", "period_table", "=", "interpolator", "(", "numpy", ".", "log10", "(", "imt", ".", "period", ")", ")", "mag_interpolator", "=", "interp1d", "(", "self", ".", "magnitudes", ",", "period_table", ",", "axis", "=", "1", ")", "output_tables", ".", "append", "(", "mag_interpolator", "(", "rctx", ".", "mag", ")", ")", "return", "output_tables" ]
Returns modification factors for the standard deviations, given the rupture and intensity measure type. :returns: List of standard deviation modification tables, each as an array of [Number Distances, Number Levels]
[ "Returns", "modification", "factors", "for", "the", "standard", "deviations", "given", "the", "rupture", "and", "intensity", "measure", "type", "." ]
train
https://github.com/gem/oq-engine/blob/8294553a0b8aba33fd96437a35065d03547d0040/openquake/hazardlib/gsim/gmpe_table.py#L231-L263
gem/oq-engine
openquake/hazardlib/gsim/gmpe_table.py
GMPETable.init
def init(self, fle=None): """ Executes the preprocessing steps at the instantiation stage to read in the tables from hdf5 and hold them in memory. """ if fle is None: fname = self.kwargs.get('gmpe_table', self.GMPE_TABLE) if fname is None: raise ValueError('You forgot to set GMPETable.GMPE_TABLE!') elif os.path.isabs(fname): self.GMPE_TABLE = fname else: # NB: (hackish) GMPE_DIR must be set externally self.GMPE_TABLE = os.path.abspath( os.path.join(self.GMPE_DIR, fname)) fle = h5py.File(self.GMPE_TABLE, "r") try: # this is the format inside the datastore self.distance_type = fle["distance_type"].value except KeyError: # this is the original format outside the datastore self.distance_type = decode(fle["Distances"].attrs["metric"]) self.REQUIRES_DISTANCES = set([self.distance_type]) # Load in magnitude self.m_w = fle["Mw"][:] # Load in distances self.distances = fle["Distances"][:] # Load intensity measure types and levels self.imls = hdf_arrays_to_dict(fle["IMLs"]) self.DEFINED_FOR_INTENSITY_MEASURE_TYPES = set(self._supported_imts()) if "SA" in self.imls and "T" not in self.imls: raise ValueError("Spectral Acceleration must be accompanied by " "periods") # Get the standard deviations self._setup_standard_deviations(fle) if "Amplification" in fle: self._setup_amplification(fle)
python
def init(self, fle=None): if fle is None: fname = self.kwargs.get('gmpe_table', self.GMPE_TABLE) if fname is None: raise ValueError('You forgot to set GMPETable.GMPE_TABLE!') elif os.path.isabs(fname): self.GMPE_TABLE = fname else: self.GMPE_TABLE = os.path.abspath( os.path.join(self.GMPE_DIR, fname)) fle = h5py.File(self.GMPE_TABLE, "r") try: self.distance_type = fle["distance_type"].value except KeyError: self.distance_type = decode(fle["Distances"].attrs["metric"]) self.REQUIRES_DISTANCES = set([self.distance_type]) self.m_w = fle["Mw"][:] self.distances = fle["Distances"][:] self.imls = hdf_arrays_to_dict(fle["IMLs"]) self.DEFINED_FOR_INTENSITY_MEASURE_TYPES = set(self._supported_imts()) if "SA" in self.imls and "T" not in self.imls: raise ValueError("Spectral Acceleration must be accompanied by " "periods") self._setup_standard_deviations(fle) if "Amplification" in fle: self._setup_amplification(fle)
[ "def", "init", "(", "self", ",", "fle", "=", "None", ")", ":", "if", "fle", "is", "None", ":", "fname", "=", "self", ".", "kwargs", ".", "get", "(", "'gmpe_table'", ",", "self", ".", "GMPE_TABLE", ")", "if", "fname", "is", "None", ":", "raise", "ValueError", "(", "'You forgot to set GMPETable.GMPE_TABLE!'", ")", "elif", "os", ".", "path", ".", "isabs", "(", "fname", ")", ":", "self", ".", "GMPE_TABLE", "=", "fname", "else", ":", "# NB: (hackish) GMPE_DIR must be set externally", "self", ".", "GMPE_TABLE", "=", "os", ".", "path", ".", "abspath", "(", "os", ".", "path", ".", "join", "(", "self", ".", "GMPE_DIR", ",", "fname", ")", ")", "fle", "=", "h5py", ".", "File", "(", "self", ".", "GMPE_TABLE", ",", "\"r\"", ")", "try", ":", "# this is the format inside the datastore", "self", ".", "distance_type", "=", "fle", "[", "\"distance_type\"", "]", ".", "value", "except", "KeyError", ":", "# this is the original format outside the datastore", "self", ".", "distance_type", "=", "decode", "(", "fle", "[", "\"Distances\"", "]", ".", "attrs", "[", "\"metric\"", "]", ")", "self", ".", "REQUIRES_DISTANCES", "=", "set", "(", "[", "self", ".", "distance_type", "]", ")", "# Load in magnitude", "self", ".", "m_w", "=", "fle", "[", "\"Mw\"", "]", "[", ":", "]", "# Load in distances", "self", ".", "distances", "=", "fle", "[", "\"Distances\"", "]", "[", ":", "]", "# Load intensity measure types and levels", "self", ".", "imls", "=", "hdf_arrays_to_dict", "(", "fle", "[", "\"IMLs\"", "]", ")", "self", ".", "DEFINED_FOR_INTENSITY_MEASURE_TYPES", "=", "set", "(", "self", ".", "_supported_imts", "(", ")", ")", "if", "\"SA\"", "in", "self", ".", "imls", "and", "\"T\"", "not", "in", "self", ".", "imls", ":", "raise", "ValueError", "(", "\"Spectral Acceleration must be accompanied by \"", "\"periods\"", ")", "# Get the standard deviations", "self", ".", "_setup_standard_deviations", "(", "fle", ")", "if", "\"Amplification\"", "in", "fle", ":", "self", ".", "_setup_amplification", "(", "fle", ")" ]
Executes the preprocessing steps at the instantiation stage to read in the tables from hdf5 and hold them in memory.
[ "Executes", "the", "preprocessing", "steps", "at", "the", "instantiation", "stage", "to", "read", "in", "the", "tables", "from", "hdf5", "and", "hold", "them", "in", "memory", "." ]
train
https://github.com/gem/oq-engine/blob/8294553a0b8aba33fd96437a35065d03547d0040/openquake/hazardlib/gsim/gmpe_table.py#L306-L342
gem/oq-engine
openquake/hazardlib/gsim/gmpe_table.py
GMPETable._setup_standard_deviations
def _setup_standard_deviations(self, fle): """ Reads the standard deviation tables from hdf5 and stores them in memory :param fle: HDF5 Tables as instance of :class:`h5py.File` """ # Load in total standard deviation self.stddevs = {} self.stddevs[const.StdDev.TOTAL] = hdf_arrays_to_dict(fle["Total"]) # If other standard deviations self.DEFINED_FOR_STANDARD_DEVIATION_TYPES = set( self.DEFINED_FOR_STANDARD_DEVIATION_TYPES) for stddev_type in [const.StdDev.INTER_EVENT, const.StdDev.INTRA_EVENT]: if stddev_type in fle: self.stddevs[stddev_type] = hdf_arrays_to_dict( fle[stddev_type]) self.DEFINED_FOR_STANDARD_DEVIATION_TYPES.add(stddev_type)
python
def _setup_standard_deviations(self, fle): self.stddevs = {} self.stddevs[const.StdDev.TOTAL] = hdf_arrays_to_dict(fle["Total"]) self.DEFINED_FOR_STANDARD_DEVIATION_TYPES = set( self.DEFINED_FOR_STANDARD_DEVIATION_TYPES) for stddev_type in [const.StdDev.INTER_EVENT, const.StdDev.INTRA_EVENT]: if stddev_type in fle: self.stddevs[stddev_type] = hdf_arrays_to_dict( fle[stddev_type]) self.DEFINED_FOR_STANDARD_DEVIATION_TYPES.add(stddev_type)
[ "def", "_setup_standard_deviations", "(", "self", ",", "fle", ")", ":", "# Load in total standard deviation", "self", ".", "stddevs", "=", "{", "}", "self", ".", "stddevs", "[", "const", ".", "StdDev", ".", "TOTAL", "]", "=", "hdf_arrays_to_dict", "(", "fle", "[", "\"Total\"", "]", ")", "# If other standard deviations", "self", ".", "DEFINED_FOR_STANDARD_DEVIATION_TYPES", "=", "set", "(", "self", ".", "DEFINED_FOR_STANDARD_DEVIATION_TYPES", ")", "for", "stddev_type", "in", "[", "const", ".", "StdDev", ".", "INTER_EVENT", ",", "const", ".", "StdDev", ".", "INTRA_EVENT", "]", ":", "if", "stddev_type", "in", "fle", ":", "self", ".", "stddevs", "[", "stddev_type", "]", "=", "hdf_arrays_to_dict", "(", "fle", "[", "stddev_type", "]", ")", "self", ".", "DEFINED_FOR_STANDARD_DEVIATION_TYPES", ".", "add", "(", "stddev_type", ")" ]
Reads the standard deviation tables from hdf5 and stores them in memory :param fle: HDF5 Tables as instance of :class:`h5py.File`
[ "Reads", "the", "standard", "deviation", "tables", "from", "hdf5", "and", "stores", "them", "in", "memory", ":", "param", "fle", ":", "HDF5", "Tables", "as", "instance", "of", ":", "class", ":", "h5py", ".", "File" ]
train
https://github.com/gem/oq-engine/blob/8294553a0b8aba33fd96437a35065d03547d0040/openquake/hazardlib/gsim/gmpe_table.py#L344-L362
gem/oq-engine
openquake/hazardlib/gsim/gmpe_table.py
GMPETable._setup_amplification
def _setup_amplification(self, fle): """ If amplification data is specified then reads into memory and updates the required rupture and site parameters """ self.amplification = AmplificationTable(fle["Amplification"], self.m_w, self.distances) if self.amplification.element == "Sites": self.REQUIRES_SITES_PARAMETERS = set( [self.amplification.parameter]) elif self.amplification.element == "Rupture": # set the site and rupture parameters on the instance self.REQUIRES_SITES_PARAMETERS = set() self.REQUIRES_RUPTURE_PARAMETERS = ( self.REQUIRES_RUPTURE_PARAMETERS | {self.amplification.parameter})
python
def _setup_amplification(self, fle): self.amplification = AmplificationTable(fle["Amplification"], self.m_w, self.distances) if self.amplification.element == "Sites": self.REQUIRES_SITES_PARAMETERS = set( [self.amplification.parameter]) elif self.amplification.element == "Rupture": self.REQUIRES_SITES_PARAMETERS = set() self.REQUIRES_RUPTURE_PARAMETERS = ( self.REQUIRES_RUPTURE_PARAMETERS | {self.amplification.parameter})
[ "def", "_setup_amplification", "(", "self", ",", "fle", ")", ":", "self", ".", "amplification", "=", "AmplificationTable", "(", "fle", "[", "\"Amplification\"", "]", ",", "self", ".", "m_w", ",", "self", ".", "distances", ")", "if", "self", ".", "amplification", ".", "element", "==", "\"Sites\"", ":", "self", ".", "REQUIRES_SITES_PARAMETERS", "=", "set", "(", "[", "self", ".", "amplification", ".", "parameter", "]", ")", "elif", "self", ".", "amplification", ".", "element", "==", "\"Rupture\"", ":", "# set the site and rupture parameters on the instance", "self", ".", "REQUIRES_SITES_PARAMETERS", "=", "set", "(", ")", "self", ".", "REQUIRES_RUPTURE_PARAMETERS", "=", "(", "self", ".", "REQUIRES_RUPTURE_PARAMETERS", "|", "{", "self", ".", "amplification", ".", "parameter", "}", ")" ]
If amplification data is specified then reads into memory and updates the required rupture and site parameters
[ "If", "amplification", "data", "is", "specified", "then", "reads", "into", "memory", "and", "updates", "the", "required", "rupture", "and", "site", "parameters" ]
train
https://github.com/gem/oq-engine/blob/8294553a0b8aba33fd96437a35065d03547d0040/openquake/hazardlib/gsim/gmpe_table.py#L364-L380
gem/oq-engine
openquake/hazardlib/gsim/gmpe_table.py
GMPETable._supported_imts
def _supported_imts(self): """ Updates the list of supported IMTs from the tables """ imt_list = [] for key in self.imls: if "SA" in key: imt_list.append(imt_module.SA) elif key == "T": continue else: try: factory = getattr(imt_module, key) except Exception: continue imt_list.append(factory) return imt_list
python
def _supported_imts(self): imt_list = [] for key in self.imls: if "SA" in key: imt_list.append(imt_module.SA) elif key == "T": continue else: try: factory = getattr(imt_module, key) except Exception: continue imt_list.append(factory) return imt_list
[ "def", "_supported_imts", "(", "self", ")", ":", "imt_list", "=", "[", "]", "for", "key", "in", "self", ".", "imls", ":", "if", "\"SA\"", "in", "key", ":", "imt_list", ".", "append", "(", "imt_module", ".", "SA", ")", "elif", "key", "==", "\"T\"", ":", "continue", "else", ":", "try", ":", "factory", "=", "getattr", "(", "imt_module", ",", "key", ")", "except", "Exception", ":", "continue", "imt_list", ".", "append", "(", "factory", ")", "return", "imt_list" ]
Updates the list of supported IMTs from the tables
[ "Updates", "the", "list", "of", "supported", "IMTs", "from", "the", "tables" ]
train
https://github.com/gem/oq-engine/blob/8294553a0b8aba33fd96437a35065d03547d0040/openquake/hazardlib/gsim/gmpe_table.py#L382-L398
gem/oq-engine
openquake/hazardlib/gsim/gmpe_table.py
GMPETable.get_mean_and_stddevs
def get_mean_and_stddevs(self, sctx, rctx, dctx, imt, stddev_types): """ Returns the mean and standard deviations """ # Return Distance Tables imls = self._return_tables(rctx.mag, imt, "IMLs") # Get distance vector for the given magnitude idx = numpy.searchsorted(self.m_w, rctx.mag) dists = self.distances[:, 0, idx - 1] # Get mean and standard deviations mean = self._get_mean(imls, dctx, dists) stddevs = self._get_stddevs(dists, rctx.mag, dctx, imt, stddev_types) if self.amplification: # Apply amplification mean_amp, sigma_amp = self.amplification.get_amplification_factors( imt, sctx, rctx, getattr(dctx, self.distance_type), stddev_types) mean = numpy.log(mean) + numpy.log(mean_amp) for iloc in range(len(stddev_types)): stddevs[iloc] *= sigma_amp[iloc] return mean, stddevs else: return numpy.log(mean), stddevs
python
def get_mean_and_stddevs(self, sctx, rctx, dctx, imt, stddev_types): imls = self._return_tables(rctx.mag, imt, "IMLs") idx = numpy.searchsorted(self.m_w, rctx.mag) dists = self.distances[:, 0, idx - 1] mean = self._get_mean(imls, dctx, dists) stddevs = self._get_stddevs(dists, rctx.mag, dctx, imt, stddev_types) if self.amplification: mean_amp, sigma_amp = self.amplification.get_amplification_factors( imt, sctx, rctx, getattr(dctx, self.distance_type), stddev_types) mean = numpy.log(mean) + numpy.log(mean_amp) for iloc in range(len(stddev_types)): stddevs[iloc] *= sigma_amp[iloc] return mean, stddevs else: return numpy.log(mean), stddevs
[ "def", "get_mean_and_stddevs", "(", "self", ",", "sctx", ",", "rctx", ",", "dctx", ",", "imt", ",", "stddev_types", ")", ":", "# Return Distance Tables", "imls", "=", "self", ".", "_return_tables", "(", "rctx", ".", "mag", ",", "imt", ",", "\"IMLs\"", ")", "# Get distance vector for the given magnitude", "idx", "=", "numpy", ".", "searchsorted", "(", "self", ".", "m_w", ",", "rctx", ".", "mag", ")", "dists", "=", "self", ".", "distances", "[", ":", ",", "0", ",", "idx", "-", "1", "]", "# Get mean and standard deviations", "mean", "=", "self", ".", "_get_mean", "(", "imls", ",", "dctx", ",", "dists", ")", "stddevs", "=", "self", ".", "_get_stddevs", "(", "dists", ",", "rctx", ".", "mag", ",", "dctx", ",", "imt", ",", "stddev_types", ")", "if", "self", ".", "amplification", ":", "# Apply amplification", "mean_amp", ",", "sigma_amp", "=", "self", ".", "amplification", ".", "get_amplification_factors", "(", "imt", ",", "sctx", ",", "rctx", ",", "getattr", "(", "dctx", ",", "self", ".", "distance_type", ")", ",", "stddev_types", ")", "mean", "=", "numpy", ".", "log", "(", "mean", ")", "+", "numpy", ".", "log", "(", "mean_amp", ")", "for", "iloc", "in", "range", "(", "len", "(", "stddev_types", ")", ")", ":", "stddevs", "[", "iloc", "]", "*=", "sigma_amp", "[", "iloc", "]", "return", "mean", ",", "stddevs", "else", ":", "return", "numpy", ".", "log", "(", "mean", ")", ",", "stddevs" ]
Returns the mean and standard deviations
[ "Returns", "the", "mean", "and", "standard", "deviations" ]
train
https://github.com/gem/oq-engine/blob/8294553a0b8aba33fd96437a35065d03547d0040/openquake/hazardlib/gsim/gmpe_table.py#L400-L425
gem/oq-engine
openquake/hazardlib/gsim/gmpe_table.py
GMPETable._get_mean
def _get_mean(self, data, dctx, dists): """ Returns the mean intensity measure level from the tables :param data: The intensity measure level vector for the given magnitude and IMT :param key: The distance type :param distances: The distance vector for the given magnitude and IMT """ # For values outside of the interpolation range use -999. to ensure # value is identifiable and outside of potential real values interpolator_mean = interp1d(dists, data, bounds_error=False, fill_value=-999.) mean = interpolator_mean(getattr(dctx, self.distance_type)) # For those distances less than or equal to the shortest distance # extrapolate the shortest distance value mean[getattr(dctx, self.distance_type) < (dists[0] + 1.0E-3)] = data[0] # For those distances significantly greater than the furthest distance # set to 1E-20. mean[getattr(dctx, self.distance_type) > (dists[-1] + 1.0E-3)] = 1E-20 # If any distance is between the final distance and a margin of 0.001 # km then assign to smallest distance mean[mean < -1.] = data[-1] return mean
python
def _get_mean(self, data, dctx, dists): interpolator_mean = interp1d(dists, data, bounds_error=False, fill_value=-999.) mean = interpolator_mean(getattr(dctx, self.distance_type)) mean[getattr(dctx, self.distance_type) < (dists[0] + 1.0E-3)] = data[0] mean[getattr(dctx, self.distance_type) > (dists[-1] + 1.0E-3)] = 1E-20 mean[mean < -1.] = data[-1] return mean
[ "def", "_get_mean", "(", "self", ",", "data", ",", "dctx", ",", "dists", ")", ":", "# For values outside of the interpolation range use -999. to ensure", "# value is identifiable and outside of potential real values", "interpolator_mean", "=", "interp1d", "(", "dists", ",", "data", ",", "bounds_error", "=", "False", ",", "fill_value", "=", "-", "999.", ")", "mean", "=", "interpolator_mean", "(", "getattr", "(", "dctx", ",", "self", ".", "distance_type", ")", ")", "# For those distances less than or equal to the shortest distance", "# extrapolate the shortest distance value", "mean", "[", "getattr", "(", "dctx", ",", "self", ".", "distance_type", ")", "<", "(", "dists", "[", "0", "]", "+", "1.0E-3", ")", "]", "=", "data", "[", "0", "]", "# For those distances significantly greater than the furthest distance", "# set to 1E-20.", "mean", "[", "getattr", "(", "dctx", ",", "self", ".", "distance_type", ")", ">", "(", "dists", "[", "-", "1", "]", "+", "1.0E-3", ")", "]", "=", "1E-20", "# If any distance is between the final distance and a margin of 0.001", "# km then assign to smallest distance", "mean", "[", "mean", "<", "-", "1.", "]", "=", "data", "[", "-", "1", "]", "return", "mean" ]
Returns the mean intensity measure level from the tables :param data: The intensity measure level vector for the given magnitude and IMT :param key: The distance type :param distances: The distance vector for the given magnitude and IMT
[ "Returns", "the", "mean", "intensity", "measure", "level", "from", "the", "tables", ":", "param", "data", ":", "The", "intensity", "measure", "level", "vector", "for", "the", "given", "magnitude", "and", "IMT", ":", "param", "key", ":", "The", "distance", "type", ":", "param", "distances", ":", "The", "distance", "vector", "for", "the", "given", "magnitude", "and", "IMT" ]
train
https://github.com/gem/oq-engine/blob/8294553a0b8aba33fd96437a35065d03547d0040/openquake/hazardlib/gsim/gmpe_table.py#L427-L452
gem/oq-engine
openquake/hazardlib/gsim/gmpe_table.py
GMPETable._get_stddevs
def _get_stddevs(self, dists, mag, dctx, imt, stddev_types): """ Returns the total standard deviation of the intensity measure level from the tables. :param fle: HDF5 data stream as instance of :class:`h5py.File` :param distances: The distance vector for the given magnitude and IMT :param key: The distance type :param mag: The rupture magnitude """ stddevs = [] for stddev_type in stddev_types: if stddev_type not in self.DEFINED_FOR_STANDARD_DEVIATION_TYPES: raise ValueError("Standard Deviation type %s not supported" % stddev_type) sigma = self._return_tables(mag, imt, stddev_type) interpolator_std = interp1d(dists, sigma, bounds_error=False) stddev = interpolator_std(getattr(dctx, self.distance_type)) stddev[getattr(dctx, self.distance_type) < dists[0]] = sigma[0] stddev[getattr(dctx, self.distance_type) > dists[-1]] = sigma[-1] stddevs.append(stddev) return stddevs
python
def _get_stddevs(self, dists, mag, dctx, imt, stddev_types): stddevs = [] for stddev_type in stddev_types: if stddev_type not in self.DEFINED_FOR_STANDARD_DEVIATION_TYPES: raise ValueError("Standard Deviation type %s not supported" % stddev_type) sigma = self._return_tables(mag, imt, stddev_type) interpolator_std = interp1d(dists, sigma, bounds_error=False) stddev = interpolator_std(getattr(dctx, self.distance_type)) stddev[getattr(dctx, self.distance_type) < dists[0]] = sigma[0] stddev[getattr(dctx, self.distance_type) > dists[-1]] = sigma[-1] stddevs.append(stddev) return stddevs
[ "def", "_get_stddevs", "(", "self", ",", "dists", ",", "mag", ",", "dctx", ",", "imt", ",", "stddev_types", ")", ":", "stddevs", "=", "[", "]", "for", "stddev_type", "in", "stddev_types", ":", "if", "stddev_type", "not", "in", "self", ".", "DEFINED_FOR_STANDARD_DEVIATION_TYPES", ":", "raise", "ValueError", "(", "\"Standard Deviation type %s not supported\"", "%", "stddev_type", ")", "sigma", "=", "self", ".", "_return_tables", "(", "mag", ",", "imt", ",", "stddev_type", ")", "interpolator_std", "=", "interp1d", "(", "dists", ",", "sigma", ",", "bounds_error", "=", "False", ")", "stddev", "=", "interpolator_std", "(", "getattr", "(", "dctx", ",", "self", ".", "distance_type", ")", ")", "stddev", "[", "getattr", "(", "dctx", ",", "self", ".", "distance_type", ")", "<", "dists", "[", "0", "]", "]", "=", "sigma", "[", "0", "]", "stddev", "[", "getattr", "(", "dctx", ",", "self", ".", "distance_type", ")", ">", "dists", "[", "-", "1", "]", "]", "=", "sigma", "[", "-", "1", "]", "stddevs", ".", "append", "(", "stddev", ")", "return", "stddevs" ]
Returns the total standard deviation of the intensity measure level from the tables. :param fle: HDF5 data stream as instance of :class:`h5py.File` :param distances: The distance vector for the given magnitude and IMT :param key: The distance type :param mag: The rupture magnitude
[ "Returns", "the", "total", "standard", "deviation", "of", "the", "intensity", "measure", "level", "from", "the", "tables", "." ]
train
https://github.com/gem/oq-engine/blob/8294553a0b8aba33fd96437a35065d03547d0040/openquake/hazardlib/gsim/gmpe_table.py#L454-L480
gem/oq-engine
openquake/hazardlib/gsim/gmpe_table.py
GMPETable._return_tables
def _return_tables(self, mag, imt, val_type): """ Returns the vector of ground motions or standard deviations corresponding to the specific magnitude and intensity measure type. :param val_type: String indicating the type of data {"IMLs", "Total", "Inter" etc} """ if imt.name in 'PGA PGV': # Get scalar imt if val_type == "IMLs": iml_table = self.imls[imt.name][:] else: iml_table = self.stddevs[val_type][imt.name][:] n_d, n_s, n_m = iml_table.shape iml_table = iml_table.reshape([n_d, n_m]) else: if val_type == "IMLs": periods = self.imls["T"][:] iml_table = self.imls["SA"][:] else: periods = self.stddevs[val_type]["T"][:] iml_table = self.stddevs[val_type]["SA"][:] low_period = round(periods[0], 7) high_period = round(periods[-1], 7) if (round(imt.period, 7) < low_period) or ( round(imt.period, 7) > high_period): raise ValueError("Spectral period %.3f outside of valid range " "(%.3f to %.3f)" % (imt.period, periods[0], periods[-1])) # Apply log-log interpolation for spectral period interpolator = interp1d(numpy.log10(periods), numpy.log10(iml_table), axis=1) iml_table = 10. ** interpolator(numpy.log10(imt.period)) return self.apply_magnitude_interpolation(mag, iml_table)
python
def _return_tables(self, mag, imt, val_type): if imt.name in 'PGA PGV': if val_type == "IMLs": iml_table = self.imls[imt.name][:] else: iml_table = self.stddevs[val_type][imt.name][:] n_d, n_s, n_m = iml_table.shape iml_table = iml_table.reshape([n_d, n_m]) else: if val_type == "IMLs": periods = self.imls["T"][:] iml_table = self.imls["SA"][:] else: periods = self.stddevs[val_type]["T"][:] iml_table = self.stddevs[val_type]["SA"][:] low_period = round(periods[0], 7) high_period = round(periods[-1], 7) if (round(imt.period, 7) < low_period) or ( round(imt.period, 7) > high_period): raise ValueError("Spectral period %.3f outside of valid range " "(%.3f to %.3f)" % (imt.period, periods[0], periods[-1])) interpolator = interp1d(numpy.log10(periods), numpy.log10(iml_table), axis=1) iml_table = 10. ** interpolator(numpy.log10(imt.period)) return self.apply_magnitude_interpolation(mag, iml_table)
[ "def", "_return_tables", "(", "self", ",", "mag", ",", "imt", ",", "val_type", ")", ":", "if", "imt", ".", "name", "in", "'PGA PGV'", ":", "# Get scalar imt", "if", "val_type", "==", "\"IMLs\"", ":", "iml_table", "=", "self", ".", "imls", "[", "imt", ".", "name", "]", "[", ":", "]", "else", ":", "iml_table", "=", "self", ".", "stddevs", "[", "val_type", "]", "[", "imt", ".", "name", "]", "[", ":", "]", "n_d", ",", "n_s", ",", "n_m", "=", "iml_table", ".", "shape", "iml_table", "=", "iml_table", ".", "reshape", "(", "[", "n_d", ",", "n_m", "]", ")", "else", ":", "if", "val_type", "==", "\"IMLs\"", ":", "periods", "=", "self", ".", "imls", "[", "\"T\"", "]", "[", ":", "]", "iml_table", "=", "self", ".", "imls", "[", "\"SA\"", "]", "[", ":", "]", "else", ":", "periods", "=", "self", ".", "stddevs", "[", "val_type", "]", "[", "\"T\"", "]", "[", ":", "]", "iml_table", "=", "self", ".", "stddevs", "[", "val_type", "]", "[", "\"SA\"", "]", "[", ":", "]", "low_period", "=", "round", "(", "periods", "[", "0", "]", ",", "7", ")", "high_period", "=", "round", "(", "periods", "[", "-", "1", "]", ",", "7", ")", "if", "(", "round", "(", "imt", ".", "period", ",", "7", ")", "<", "low_period", ")", "or", "(", "round", "(", "imt", ".", "period", ",", "7", ")", ">", "high_period", ")", ":", "raise", "ValueError", "(", "\"Spectral period %.3f outside of valid range \"", "\"(%.3f to %.3f)\"", "%", "(", "imt", ".", "period", ",", "periods", "[", "0", "]", ",", "periods", "[", "-", "1", "]", ")", ")", "# Apply log-log interpolation for spectral period", "interpolator", "=", "interp1d", "(", "numpy", ".", "log10", "(", "periods", ")", ",", "numpy", ".", "log10", "(", "iml_table", ")", ",", "axis", "=", "1", ")", "iml_table", "=", "10.", "**", "interpolator", "(", "numpy", ".", "log10", "(", "imt", ".", "period", ")", ")", "return", "self", ".", "apply_magnitude_interpolation", "(", "mag", ",", "iml_table", ")" ]
Returns the vector of ground motions or standard deviations corresponding to the specific magnitude and intensity measure type. :param val_type: String indicating the type of data {"IMLs", "Total", "Inter" etc}
[ "Returns", "the", "vector", "of", "ground", "motions", "or", "standard", "deviations", "corresponding", "to", "the", "specific", "magnitude", "and", "intensity", "measure", "type", "." ]
train
https://github.com/gem/oq-engine/blob/8294553a0b8aba33fd96437a35065d03547d0040/openquake/hazardlib/gsim/gmpe_table.py#L482-L518
gem/oq-engine
openquake/hazardlib/gsim/gmpe_table.py
GMPETable.apply_magnitude_interpolation
def apply_magnitude_interpolation(self, mag, iml_table): """ Interpolates the tables to the required magnitude level :param float mag: Magnitude :param iml_table: Intensity measure level table """ # do not allow "mag" to exceed maximum table magnitude if mag > self.m_w[-1]: mag = self.m_w[-1] # Get magnitude values if mag < self.m_w[0] or mag > self.m_w[-1]: raise ValueError("Magnitude %.2f outside of supported range " "(%.2f to %.2f)" % (mag, self.m_w[0], self.m_w[-1])) # It is assumed that log10 of the spectral acceleration scales # linearly (or approximately linearly) with magnitude m_interpolator = interp1d(self.m_w, numpy.log10(iml_table), axis=1) return 10.0 ** m_interpolator(mag)
python
def apply_magnitude_interpolation(self, mag, iml_table): if mag > self.m_w[-1]: mag = self.m_w[-1] if mag < self.m_w[0] or mag > self.m_w[-1]: raise ValueError("Magnitude %.2f outside of supported range " "(%.2f to %.2f)" % (mag, self.m_w[0], self.m_w[-1])) m_interpolator = interp1d(self.m_w, numpy.log10(iml_table), axis=1) return 10.0 ** m_interpolator(mag)
[ "def", "apply_magnitude_interpolation", "(", "self", ",", "mag", ",", "iml_table", ")", ":", "# do not allow \"mag\" to exceed maximum table magnitude", "if", "mag", ">", "self", ".", "m_w", "[", "-", "1", "]", ":", "mag", "=", "self", ".", "m_w", "[", "-", "1", "]", "# Get magnitude values", "if", "mag", "<", "self", ".", "m_w", "[", "0", "]", "or", "mag", ">", "self", ".", "m_w", "[", "-", "1", "]", ":", "raise", "ValueError", "(", "\"Magnitude %.2f outside of supported range \"", "\"(%.2f to %.2f)\"", "%", "(", "mag", ",", "self", ".", "m_w", "[", "0", "]", ",", "self", ".", "m_w", "[", "-", "1", "]", ")", ")", "# It is assumed that log10 of the spectral acceleration scales", "# linearly (or approximately linearly) with magnitude", "m_interpolator", "=", "interp1d", "(", "self", ".", "m_w", ",", "numpy", ".", "log10", "(", "iml_table", ")", ",", "axis", "=", "1", ")", "return", "10.0", "**", "m_interpolator", "(", "mag", ")" ]
Interpolates the tables to the required magnitude level :param float mag: Magnitude :param iml_table: Intensity measure level table
[ "Interpolates", "the", "tables", "to", "the", "required", "magnitude", "level" ]
train
https://github.com/gem/oq-engine/blob/8294553a0b8aba33fd96437a35065d03547d0040/openquake/hazardlib/gsim/gmpe_table.py#L520-L542
gem/oq-engine
openquake/hazardlib/gsim/sadigh_1997.py
SadighEtAl1997.get_mean_and_stddevs
def get_mean_and_stddevs(self, sites, rup, dists, imt, stddev_types): """ See :meth:`superclass method <.base.GroundShakingIntensityModel.get_mean_and_stddevs>` for spec of input and result values. """ assert all(stddev_type in self.DEFINED_FOR_STANDARD_DEVIATION_TYPES for stddev_type in stddev_types) # GMPE differentiates strike-slip, reverse and normal ruptures, # but combines normal and strike-slip into one category. See page 180. is_reverse = (45 <= rup.rake <= 135) stddevs = [numpy.zeros_like(sites.vs30) for _ in stddev_types] means = numpy.zeros_like(sites.vs30) [rocks_i] = (sites.vs30 > self.ROCK_VS30).nonzero() if len(rocks_i): rrup = dists.rrup.take(rocks_i) mean_rock = self._get_mean_rock(rup.mag, rup.rake, rrup, is_reverse, imt) means.put(rocks_i, mean_rock) for stddev_arr in stddevs: stddev_rock = self._get_stddev_rock(rup.mag, imt) stddev_arr.put(rocks_i, stddev_rock) [soils_i] = (sites.vs30 <= self.ROCK_VS30).nonzero() if len(soils_i): rrup = dists.rrup.take(soils_i) mean_soil = self._get_mean_deep_soil(rup.mag, rup.rake, rrup, is_reverse, imt) means.put(soils_i, mean_soil) for stddev_arr in stddevs: stddev_soil = self._get_stddev_deep_soil(rup.mag, imt) stddev_arr.put(soils_i, stddev_soil) return means, stddevs
python
def get_mean_and_stddevs(self, sites, rup, dists, imt, stddev_types): assert all(stddev_type in self.DEFINED_FOR_STANDARD_DEVIATION_TYPES for stddev_type in stddev_types) is_reverse = (45 <= rup.rake <= 135) stddevs = [numpy.zeros_like(sites.vs30) for _ in stddev_types] means = numpy.zeros_like(sites.vs30) [rocks_i] = (sites.vs30 > self.ROCK_VS30).nonzero() if len(rocks_i): rrup = dists.rrup.take(rocks_i) mean_rock = self._get_mean_rock(rup.mag, rup.rake, rrup, is_reverse, imt) means.put(rocks_i, mean_rock) for stddev_arr in stddevs: stddev_rock = self._get_stddev_rock(rup.mag, imt) stddev_arr.put(rocks_i, stddev_rock) [soils_i] = (sites.vs30 <= self.ROCK_VS30).nonzero() if len(soils_i): rrup = dists.rrup.take(soils_i) mean_soil = self._get_mean_deep_soil(rup.mag, rup.rake, rrup, is_reverse, imt) means.put(soils_i, mean_soil) for stddev_arr in stddevs: stddev_soil = self._get_stddev_deep_soil(rup.mag, imt) stddev_arr.put(soils_i, stddev_soil) return means, stddevs
[ "def", "get_mean_and_stddevs", "(", "self", ",", "sites", ",", "rup", ",", "dists", ",", "imt", ",", "stddev_types", ")", ":", "assert", "all", "(", "stddev_type", "in", "self", ".", "DEFINED_FOR_STANDARD_DEVIATION_TYPES", "for", "stddev_type", "in", "stddev_types", ")", "# GMPE differentiates strike-slip, reverse and normal ruptures,", "# but combines normal and strike-slip into one category. See page 180.", "is_reverse", "=", "(", "45", "<=", "rup", ".", "rake", "<=", "135", ")", "stddevs", "=", "[", "numpy", ".", "zeros_like", "(", "sites", ".", "vs30", ")", "for", "_", "in", "stddev_types", "]", "means", "=", "numpy", ".", "zeros_like", "(", "sites", ".", "vs30", ")", "[", "rocks_i", "]", "=", "(", "sites", ".", "vs30", ">", "self", ".", "ROCK_VS30", ")", ".", "nonzero", "(", ")", "if", "len", "(", "rocks_i", ")", ":", "rrup", "=", "dists", ".", "rrup", ".", "take", "(", "rocks_i", ")", "mean_rock", "=", "self", ".", "_get_mean_rock", "(", "rup", ".", "mag", ",", "rup", ".", "rake", ",", "rrup", ",", "is_reverse", ",", "imt", ")", "means", ".", "put", "(", "rocks_i", ",", "mean_rock", ")", "for", "stddev_arr", "in", "stddevs", ":", "stddev_rock", "=", "self", ".", "_get_stddev_rock", "(", "rup", ".", "mag", ",", "imt", ")", "stddev_arr", ".", "put", "(", "rocks_i", ",", "stddev_rock", ")", "[", "soils_i", "]", "=", "(", "sites", ".", "vs30", "<=", "self", ".", "ROCK_VS30", ")", ".", "nonzero", "(", ")", "if", "len", "(", "soils_i", ")", ":", "rrup", "=", "dists", ".", "rrup", ".", "take", "(", "soils_i", ")", "mean_soil", "=", "self", ".", "_get_mean_deep_soil", "(", "rup", ".", "mag", ",", "rup", ".", "rake", ",", "rrup", ",", "is_reverse", ",", "imt", ")", "means", ".", "put", "(", "soils_i", ",", "mean_soil", ")", "for", "stddev_arr", "in", "stddevs", ":", "stddev_soil", "=", "self", ".", "_get_stddev_deep_soil", "(", "rup", ".", "mag", ",", "imt", ")", "stddev_arr", ".", "put", "(", "soils_i", ",", "stddev_soil", ")", "return", "means", ",", "stddevs" ]
See :meth:`superclass method <.base.GroundShakingIntensityModel.get_mean_and_stddevs>` for spec of input and result values.
[ "See", ":", "meth", ":", "superclass", "method", "<", ".", "base", ".", "GroundShakingIntensityModel", ".", "get_mean_and_stddevs", ">", "for", "spec", "of", "input", "and", "result", "values", "." ]
train
https://github.com/gem/oq-engine/blob/8294553a0b8aba33fd96437a35065d03547d0040/openquake/hazardlib/gsim/sadigh_1997.py#L76-L112
gem/oq-engine
openquake/hazardlib/gsim/sadigh_1997.py
SadighEtAl1997._get_mean_deep_soil
def _get_mean_deep_soil(self, mag, rake, rrup, is_reverse, imt): """ Calculate and return the mean intensity for deep soil sites. Implements an equation from table 4. """ if mag <= self.NEAR_FIELD_SATURATION_MAG: c4 = self.COEFFS_SOIL_IMT_INDEPENDENT['c4lowmag'] c5 = self.COEFFS_SOIL_IMT_INDEPENDENT['c5lowmag'] else: c4 = self.COEFFS_SOIL_IMT_INDEPENDENT['c4himag'] c5 = self.COEFFS_SOIL_IMT_INDEPENDENT['c5himag'] c2 = self.COEFFS_SOIL_IMT_INDEPENDENT['c2'] c3 = self.COEFFS_SOIL_IMT_INDEPENDENT['c3'] C = self.COEFFS_SOIL[imt] if is_reverse: c1 = self.COEFFS_SOIL_IMT_INDEPENDENT['c1r'] c6 = C['c6r'] else: c1 = self.COEFFS_SOIL_IMT_INDEPENDENT['c1ss'] c6 = C['c6ss'] # clip mag if greater than 8.5. This is to avoid # ValueError: negative number cannot be raised to a fractional power mag = 8.5 if mag > 8.5 else mag return (c1 + c2 * mag + c6 + C['c7'] * ((8.5 - mag) ** 2.5) - c3 * numpy.log(rrup + c4 * numpy.exp(c5 * mag)))
python
def _get_mean_deep_soil(self, mag, rake, rrup, is_reverse, imt): if mag <= self.NEAR_FIELD_SATURATION_MAG: c4 = self.COEFFS_SOIL_IMT_INDEPENDENT['c4lowmag'] c5 = self.COEFFS_SOIL_IMT_INDEPENDENT['c5lowmag'] else: c4 = self.COEFFS_SOIL_IMT_INDEPENDENT['c4himag'] c5 = self.COEFFS_SOIL_IMT_INDEPENDENT['c5himag'] c2 = self.COEFFS_SOIL_IMT_INDEPENDENT['c2'] c3 = self.COEFFS_SOIL_IMT_INDEPENDENT['c3'] C = self.COEFFS_SOIL[imt] if is_reverse: c1 = self.COEFFS_SOIL_IMT_INDEPENDENT['c1r'] c6 = C['c6r'] else: c1 = self.COEFFS_SOIL_IMT_INDEPENDENT['c1ss'] c6 = C['c6ss'] mag = 8.5 if mag > 8.5 else mag return (c1 + c2 * mag + c6 + C['c7'] * ((8.5 - mag) ** 2.5) - c3 * numpy.log(rrup + c4 * numpy.exp(c5 * mag)))
[ "def", "_get_mean_deep_soil", "(", "self", ",", "mag", ",", "rake", ",", "rrup", ",", "is_reverse", ",", "imt", ")", ":", "if", "mag", "<=", "self", ".", "NEAR_FIELD_SATURATION_MAG", ":", "c4", "=", "self", ".", "COEFFS_SOIL_IMT_INDEPENDENT", "[", "'c4lowmag'", "]", "c5", "=", "self", ".", "COEFFS_SOIL_IMT_INDEPENDENT", "[", "'c5lowmag'", "]", "else", ":", "c4", "=", "self", ".", "COEFFS_SOIL_IMT_INDEPENDENT", "[", "'c4himag'", "]", "c5", "=", "self", ".", "COEFFS_SOIL_IMT_INDEPENDENT", "[", "'c5himag'", "]", "c2", "=", "self", ".", "COEFFS_SOIL_IMT_INDEPENDENT", "[", "'c2'", "]", "c3", "=", "self", ".", "COEFFS_SOIL_IMT_INDEPENDENT", "[", "'c3'", "]", "C", "=", "self", ".", "COEFFS_SOIL", "[", "imt", "]", "if", "is_reverse", ":", "c1", "=", "self", ".", "COEFFS_SOIL_IMT_INDEPENDENT", "[", "'c1r'", "]", "c6", "=", "C", "[", "'c6r'", "]", "else", ":", "c1", "=", "self", ".", "COEFFS_SOIL_IMT_INDEPENDENT", "[", "'c1ss'", "]", "c6", "=", "C", "[", "'c6ss'", "]", "# clip mag if greater than 8.5. This is to avoid", "# ValueError: negative number cannot be raised to a fractional power", "mag", "=", "8.5", "if", "mag", ">", "8.5", "else", "mag", "return", "(", "c1", "+", "c2", "*", "mag", "+", "c6", "+", "C", "[", "'c7'", "]", "*", "(", "(", "8.5", "-", "mag", ")", "**", "2.5", ")", "-", "c3", "*", "numpy", ".", "log", "(", "rrup", "+", "c4", "*", "numpy", ".", "exp", "(", "c5", "*", "mag", ")", ")", ")" ]
Calculate and return the mean intensity for deep soil sites. Implements an equation from table 4.
[ "Calculate", "and", "return", "the", "mean", "intensity", "for", "deep", "soil", "sites", "." ]
train
https://github.com/gem/oq-engine/blob/8294553a0b8aba33fd96437a35065d03547d0040/openquake/hazardlib/gsim/sadigh_1997.py#L114-L139
gem/oq-engine
openquake/hazardlib/gsim/sadigh_1997.py
SadighEtAl1997._get_mean_rock
def _get_mean_rock(self, mag, _rake, rrup, is_reverse, imt): """ Calculate and return the mean intensity for rock sites. Implements an equation from table 2. """ if mag <= self.NEAR_FIELD_SATURATION_MAG: C = self.COEFFS_ROCK_LOWMAG[imt] else: C = self.COEFFS_ROCK_HIMAG[imt] # clip mag if greater than 8.5. This is to avoid # ValueError: negative number cannot be raised to a fractional power mag = 8.5 if mag > 8.5 else mag mean = ( C['c1'] + C['c2'] * mag + C['c3'] * ((8.5 - mag) ** 2.5) + C['c4'] * numpy.log(rrup + numpy.exp(C['c5'] + C['c6'] * mag)) + C['c7'] * numpy.log(rrup + 2) ) if is_reverse: # footnote in table 2 says that for reverse ruptures # the mean amplitude value should be multiplied by 1.2 mean += 0.1823215567939546 # == log(1.2) return mean
python
def _get_mean_rock(self, mag, _rake, rrup, is_reverse, imt): if mag <= self.NEAR_FIELD_SATURATION_MAG: C = self.COEFFS_ROCK_LOWMAG[imt] else: C = self.COEFFS_ROCK_HIMAG[imt] mag = 8.5 if mag > 8.5 else mag mean = ( C['c1'] + C['c2'] * mag + C['c3'] * ((8.5 - mag) ** 2.5) + C['c4'] * numpy.log(rrup + numpy.exp(C['c5'] + C['c6'] * mag)) + C['c7'] * numpy.log(rrup + 2) ) if is_reverse: mean += 0.1823215567939546 return mean
[ "def", "_get_mean_rock", "(", "self", ",", "mag", ",", "_rake", ",", "rrup", ",", "is_reverse", ",", "imt", ")", ":", "if", "mag", "<=", "self", ".", "NEAR_FIELD_SATURATION_MAG", ":", "C", "=", "self", ".", "COEFFS_ROCK_LOWMAG", "[", "imt", "]", "else", ":", "C", "=", "self", ".", "COEFFS_ROCK_HIMAG", "[", "imt", "]", "# clip mag if greater than 8.5. This is to avoid", "# ValueError: negative number cannot be raised to a fractional power", "mag", "=", "8.5", "if", "mag", ">", "8.5", "else", "mag", "mean", "=", "(", "C", "[", "'c1'", "]", "+", "C", "[", "'c2'", "]", "*", "mag", "+", "C", "[", "'c3'", "]", "*", "(", "(", "8.5", "-", "mag", ")", "**", "2.5", ")", "+", "C", "[", "'c4'", "]", "*", "numpy", ".", "log", "(", "rrup", "+", "numpy", ".", "exp", "(", "C", "[", "'c5'", "]", "+", "C", "[", "'c6'", "]", "*", "mag", ")", ")", "+", "C", "[", "'c7'", "]", "*", "numpy", ".", "log", "(", "rrup", "+", "2", ")", ")", "if", "is_reverse", ":", "# footnote in table 2 says that for reverse ruptures", "# the mean amplitude value should be multiplied by 1.2", "mean", "+=", "0.1823215567939546", "# == log(1.2)", "return", "mean" ]
Calculate and return the mean intensity for rock sites. Implements an equation from table 2.
[ "Calculate", "and", "return", "the", "mean", "intensity", "for", "rock", "sites", "." ]
train
https://github.com/gem/oq-engine/blob/8294553a0b8aba33fd96437a35065d03547d0040/openquake/hazardlib/gsim/sadigh_1997.py#L141-L163
gem/oq-engine
openquake/hazardlib/gsim/sadigh_1997.py
SadighEtAl1997._get_stddev_rock
def _get_stddev_rock(self, mag, imt): """ Calculate and return total standard deviation for rock sites. Implements formulae from table 3. """ C = self.COEFFS_ROCK_STDDERR[imt] if mag > C['maxmag']: return C['maxsigma'] else: return C['sigma0'] + C['magfactor'] * mag
python
def _get_stddev_rock(self, mag, imt): C = self.COEFFS_ROCK_STDDERR[imt] if mag > C['maxmag']: return C['maxsigma'] else: return C['sigma0'] + C['magfactor'] * mag
[ "def", "_get_stddev_rock", "(", "self", ",", "mag", ",", "imt", ")", ":", "C", "=", "self", ".", "COEFFS_ROCK_STDDERR", "[", "imt", "]", "if", "mag", ">", "C", "[", "'maxmag'", "]", ":", "return", "C", "[", "'maxsigma'", "]", "else", ":", "return", "C", "[", "'sigma0'", "]", "+", "C", "[", "'magfactor'", "]", "*", "mag" ]
Calculate and return total standard deviation for rock sites. Implements formulae from table 3.
[ "Calculate", "and", "return", "total", "standard", "deviation", "for", "rock", "sites", "." ]
train
https://github.com/gem/oq-engine/blob/8294553a0b8aba33fd96437a35065d03547d0040/openquake/hazardlib/gsim/sadigh_1997.py#L165-L175
gem/oq-engine
openquake/hazardlib/gsim/sadigh_1997.py
SadighEtAl1997._get_stddev_deep_soil
def _get_stddev_deep_soil(self, mag, imt): """ Calculate and return total standard deviation for deep soil sites. Implements formulae from the last column of table 4. """ # footnote from table 4 says that stderr for magnitudes over 7 # is equal to one of magnitude 7. if mag > 7: mag = 7 C = self.COEFFS_SOIL[imt] return C['sigma0'] + C['magfactor'] * mag
python
def _get_stddev_deep_soil(self, mag, imt): if mag > 7: mag = 7 C = self.COEFFS_SOIL[imt] return C['sigma0'] + C['magfactor'] * mag
[ "def", "_get_stddev_deep_soil", "(", "self", ",", "mag", ",", "imt", ")", ":", "# footnote from table 4 says that stderr for magnitudes over 7", "# is equal to one of magnitude 7.", "if", "mag", ">", "7", ":", "mag", "=", "7", "C", "=", "self", ".", "COEFFS_SOIL", "[", "imt", "]", "return", "C", "[", "'sigma0'", "]", "+", "C", "[", "'magfactor'", "]", "*", "mag" ]
Calculate and return total standard deviation for deep soil sites. Implements formulae from the last column of table 4.
[ "Calculate", "and", "return", "total", "standard", "deviation", "for", "deep", "soil", "sites", "." ]
train
https://github.com/gem/oq-engine/blob/8294553a0b8aba33fd96437a35065d03547d0040/openquake/hazardlib/gsim/sadigh_1997.py#L177-L188
gem/oq-engine
openquake/commands/zip.py
zip
def zip(what, archive_zip='', risk_file=''): """ Zip into an archive one or two job.ini files with all related files """ if os.path.isdir(what): oqzip.zip_all(what) elif what.endswith('.xml') and '<logicTree' in open(what).read(512): # hack to see if the NRML file is of kind logicTree oqzip.zip_source_model(what, archive_zip) elif what.endswith('.xml') and '<exposureModel' in open(what).read(512): # hack to see if the NRML file is of kind exposureModel oqzip.zip_exposure(what, archive_zip) elif what.endswith('.ini'): # a job.ini oqzip.zip_job(what, archive_zip, risk_file) else: sys.exit('Cannot zip %s' % what)
python
def zip(what, archive_zip='', risk_file=''): if os.path.isdir(what): oqzip.zip_all(what) elif what.endswith('.xml') and '<logicTree' in open(what).read(512): oqzip.zip_source_model(what, archive_zip) elif what.endswith('.xml') and '<exposureModel' in open(what).read(512): oqzip.zip_exposure(what, archive_zip) elif what.endswith('.ini'): oqzip.zip_job(what, archive_zip, risk_file) else: sys.exit('Cannot zip %s' % what)
[ "def", "zip", "(", "what", ",", "archive_zip", "=", "''", ",", "risk_file", "=", "''", ")", ":", "if", "os", ".", "path", ".", "isdir", "(", "what", ")", ":", "oqzip", ".", "zip_all", "(", "what", ")", "elif", "what", ".", "endswith", "(", "'.xml'", ")", "and", "'<logicTree'", "in", "open", "(", "what", ")", ".", "read", "(", "512", ")", ":", "# hack to see if the NRML file is of kind logicTree", "oqzip", ".", "zip_source_model", "(", "what", ",", "archive_zip", ")", "elif", "what", ".", "endswith", "(", "'.xml'", ")", "and", "'<exposureModel'", "in", "open", "(", "what", ")", ".", "read", "(", "512", ")", ":", "# hack to see if the NRML file is of kind exposureModel", "oqzip", ".", "zip_exposure", "(", "what", ",", "archive_zip", ")", "elif", "what", ".", "endswith", "(", "'.ini'", ")", ":", "# a job.ini", "oqzip", ".", "zip_job", "(", "what", ",", "archive_zip", ",", "risk_file", ")", "else", ":", "sys", ".", "exit", "(", "'Cannot zip %s'", "%", "what", ")" ]
Zip into an archive one or two job.ini files with all related files
[ "Zip", "into", "an", "archive", "one", "or", "two", "job", ".", "ini", "files", "with", "all", "related", "files" ]
train
https://github.com/gem/oq-engine/blob/8294553a0b8aba33fd96437a35065d03547d0040/openquake/commands/zip.py#L25-L40
gem/oq-engine
openquake/commands/reduce.py
reduce
def reduce(fname, reduction_factor): """ Produce a submodel from `fname` by sampling the nodes randomly. Supports source models, site models and exposure models. As a special case, it is also able to reduce .csv files by sampling the lines. This is a debugging utility to reduce large computations to small ones. """ if fname.endswith('.csv'): with open(fname) as f: line = f.readline() # read the first line if csv.Sniffer().has_header(line): header = line all_lines = f.readlines() else: header = None f.seek(0) all_lines = f.readlines() lines = general.random_filter(all_lines, reduction_factor) shutil.copy(fname, fname + '.bak') print('Copied the original file in %s.bak' % fname) _save_csv(fname, lines, header) print('Extracted %d lines out of %d' % (len(lines), len(all_lines))) return elif fname.endswith('.npy'): array = numpy.load(fname) shutil.copy(fname, fname + '.bak') print('Copied the original file in %s.bak' % fname) arr = numpy.array(general.random_filter(array, reduction_factor)) numpy.save(fname, arr) print('Extracted %d rows out of %d' % (len(arr), len(array))) return node = nrml.read(fname) model = node[0] if model.tag.endswith('exposureModel'): total = len(model.assets) model.assets.nodes = general.random_filter( model.assets, reduction_factor) num_nodes = len(model.assets) elif model.tag.endswith('siteModel'): total = len(model) model.nodes = general.random_filter(model, reduction_factor) num_nodes = len(model) elif model.tag.endswith('sourceModel'): reduce_source_model(fname, reduction_factor) return elif model.tag.endswith('logicTree'): for smpath in logictree.collect_info(fname).smpaths: reduce_source_model(smpath, reduction_factor) return else: raise RuntimeError('Unknown model tag: %s' % model.tag) save_bak(fname, node, num_nodes, total)
python
def reduce(fname, reduction_factor): if fname.endswith('.csv'): with open(fname) as f: line = f.readline() if csv.Sniffer().has_header(line): header = line all_lines = f.readlines() else: header = None f.seek(0) all_lines = f.readlines() lines = general.random_filter(all_lines, reduction_factor) shutil.copy(fname, fname + '.bak') print('Copied the original file in %s.bak' % fname) _save_csv(fname, lines, header) print('Extracted %d lines out of %d' % (len(lines), len(all_lines))) return elif fname.endswith('.npy'): array = numpy.load(fname) shutil.copy(fname, fname + '.bak') print('Copied the original file in %s.bak' % fname) arr = numpy.array(general.random_filter(array, reduction_factor)) numpy.save(fname, arr) print('Extracted %d rows out of %d' % (len(arr), len(array))) return node = nrml.read(fname) model = node[0] if model.tag.endswith('exposureModel'): total = len(model.assets) model.assets.nodes = general.random_filter( model.assets, reduction_factor) num_nodes = len(model.assets) elif model.tag.endswith('siteModel'): total = len(model) model.nodes = general.random_filter(model, reduction_factor) num_nodes = len(model) elif model.tag.endswith('sourceModel'): reduce_source_model(fname, reduction_factor) return elif model.tag.endswith('logicTree'): for smpath in logictree.collect_info(fname).smpaths: reduce_source_model(smpath, reduction_factor) return else: raise RuntimeError('Unknown model tag: %s' % model.tag) save_bak(fname, node, num_nodes, total)
[ "def", "reduce", "(", "fname", ",", "reduction_factor", ")", ":", "if", "fname", ".", "endswith", "(", "'.csv'", ")", ":", "with", "open", "(", "fname", ")", "as", "f", ":", "line", "=", "f", ".", "readline", "(", ")", "# read the first line", "if", "csv", ".", "Sniffer", "(", ")", ".", "has_header", "(", "line", ")", ":", "header", "=", "line", "all_lines", "=", "f", ".", "readlines", "(", ")", "else", ":", "header", "=", "None", "f", ".", "seek", "(", "0", ")", "all_lines", "=", "f", ".", "readlines", "(", ")", "lines", "=", "general", ".", "random_filter", "(", "all_lines", ",", "reduction_factor", ")", "shutil", ".", "copy", "(", "fname", ",", "fname", "+", "'.bak'", ")", "print", "(", "'Copied the original file in %s.bak'", "%", "fname", ")", "_save_csv", "(", "fname", ",", "lines", ",", "header", ")", "print", "(", "'Extracted %d lines out of %d'", "%", "(", "len", "(", "lines", ")", ",", "len", "(", "all_lines", ")", ")", ")", "return", "elif", "fname", ".", "endswith", "(", "'.npy'", ")", ":", "array", "=", "numpy", ".", "load", "(", "fname", ")", "shutil", ".", "copy", "(", "fname", ",", "fname", "+", "'.bak'", ")", "print", "(", "'Copied the original file in %s.bak'", "%", "fname", ")", "arr", "=", "numpy", ".", "array", "(", "general", ".", "random_filter", "(", "array", ",", "reduction_factor", ")", ")", "numpy", ".", "save", "(", "fname", ",", "arr", ")", "print", "(", "'Extracted %d rows out of %d'", "%", "(", "len", "(", "arr", ")", ",", "len", "(", "array", ")", ")", ")", "return", "node", "=", "nrml", ".", "read", "(", "fname", ")", "model", "=", "node", "[", "0", "]", "if", "model", ".", "tag", ".", "endswith", "(", "'exposureModel'", ")", ":", "total", "=", "len", "(", "model", ".", "assets", ")", "model", ".", "assets", ".", "nodes", "=", "general", ".", "random_filter", "(", "model", ".", "assets", ",", "reduction_factor", ")", "num_nodes", "=", "len", "(", "model", ".", "assets", ")", "elif", "model", ".", "tag", ".", "endswith", "(", "'siteModel'", ")", ":", "total", "=", "len", "(", "model", ")", "model", ".", "nodes", "=", "general", ".", "random_filter", "(", "model", ",", "reduction_factor", ")", "num_nodes", "=", "len", "(", "model", ")", "elif", "model", ".", "tag", ".", "endswith", "(", "'sourceModel'", ")", ":", "reduce_source_model", "(", "fname", ",", "reduction_factor", ")", "return", "elif", "model", ".", "tag", ".", "endswith", "(", "'logicTree'", ")", ":", "for", "smpath", "in", "logictree", ".", "collect_info", "(", "fname", ")", ".", "smpaths", ":", "reduce_source_model", "(", "smpath", ",", "reduction_factor", ")", "return", "else", ":", "raise", "RuntimeError", "(", "'Unknown model tag: %s'", "%", "model", ".", "tag", ")", "save_bak", "(", "fname", ",", "node", ",", "num_nodes", ",", "total", ")" ]
Produce a submodel from `fname` by sampling the nodes randomly. Supports source models, site models and exposure models. As a special case, it is also able to reduce .csv files by sampling the lines. This is a debugging utility to reduce large computations to small ones.
[ "Produce", "a", "submodel", "from", "fname", "by", "sampling", "the", "nodes", "randomly", ".", "Supports", "source", "models", "site", "models", "and", "exposure", "models", ".", "As", "a", "special", "case", "it", "is", "also", "able", "to", "reduce", ".", "csv", "files", "by", "sampling", "the", "lines", ".", "This", "is", "a", "debugging", "utility", "to", "reduce", "large", "computations", "to", "small", "ones", "." ]
train
https://github.com/gem/oq-engine/blob/8294553a0b8aba33fd96437a35065d03547d0040/openquake/commands/reduce.py#L60-L111
gem/oq-engine
openquake/hmtk/faults/mfd/youngs_coppersmith.py
YoungsCoppersmithExponential.get_mfd
def get_mfd(self, slip, area, shear_modulus=30.0): ''' Calculates activity rate on the fault :param float slip: Slip rate in mm/yr :param area: Width of the fault (km) :param float shear_modulus: Shear modulus of the fault (GPa) :returns: * Minimum Magnitude (float) * Bin width (float) * Occurrence Rates (numpy.ndarray) ''' # Working in Nm so convert: shear_modulus - GPa -> Nm # area - km ** 2. -> m ** 2. # slip - mm/yr -> m/yr moment_rate = (shear_modulus * 1.E9) * (area * 1.E6) * (slip / 1000.) moment_mag = _scale_moment(self.mmax, in_nm=True) beta = self.b_value * log(10.) mag = np.arange(self.mmin - (self.bin_width / 2.), self.mmax + self.bin_width, self.bin_width) if self.b_value > 1.5: print('b-value larger than 1.5 will produce invalid results in ' 'Anderson & Luco models') self.occurrence_rate = np.nan * np.ones(len(mag) - 1) return self.mmin, self.bin_width, self.occurrence_rate self.occurrence_rate = np.zeros(len(mag) - 1, dtype=float) for ival in range(0, len(mag) - 1): self.occurrence_rate[ival] = ( self.cumulative_value(mag[ival], moment_rate, beta, moment_mag) - self.cumulative_value( mag[ival + 1], moment_rate, beta, moment_mag)) return self.mmin, self.bin_width, self.occurrence_rate
python
def get_mfd(self, slip, area, shear_modulus=30.0): moment_rate = (shear_modulus * 1.E9) * (area * 1.E6) * (slip / 1000.) moment_mag = _scale_moment(self.mmax, in_nm=True) beta = self.b_value * log(10.) mag = np.arange(self.mmin - (self.bin_width / 2.), self.mmax + self.bin_width, self.bin_width) if self.b_value > 1.5: print('b-value larger than 1.5 will produce invalid results in ' 'Anderson & Luco models') self.occurrence_rate = np.nan * np.ones(len(mag) - 1) return self.mmin, self.bin_width, self.occurrence_rate self.occurrence_rate = np.zeros(len(mag) - 1, dtype=float) for ival in range(0, len(mag) - 1): self.occurrence_rate[ival] = ( self.cumulative_value(mag[ival], moment_rate, beta, moment_mag) - self.cumulative_value( mag[ival + 1], moment_rate, beta, moment_mag)) return self.mmin, self.bin_width, self.occurrence_rate
[ "def", "get_mfd", "(", "self", ",", "slip", ",", "area", ",", "shear_modulus", "=", "30.0", ")", ":", "# Working in Nm so convert: shear_modulus - GPa -> Nm", "# area - km ** 2. -> m ** 2.", "# slip - mm/yr -> m/yr", "moment_rate", "=", "(", "shear_modulus", "*", "1.E9", ")", "*", "(", "area", "*", "1.E6", ")", "*", "(", "slip", "/", "1000.", ")", "moment_mag", "=", "_scale_moment", "(", "self", ".", "mmax", ",", "in_nm", "=", "True", ")", "beta", "=", "self", ".", "b_value", "*", "log", "(", "10.", ")", "mag", "=", "np", ".", "arange", "(", "self", ".", "mmin", "-", "(", "self", ".", "bin_width", "/", "2.", ")", ",", "self", ".", "mmax", "+", "self", ".", "bin_width", ",", "self", ".", "bin_width", ")", "if", "self", ".", "b_value", ">", "1.5", ":", "print", "(", "'b-value larger than 1.5 will produce invalid results in '", "'Anderson & Luco models'", ")", "self", ".", "occurrence_rate", "=", "np", ".", "nan", "*", "np", ".", "ones", "(", "len", "(", "mag", ")", "-", "1", ")", "return", "self", ".", "mmin", ",", "self", ".", "bin_width", ",", "self", ".", "occurrence_rate", "self", ".", "occurrence_rate", "=", "np", ".", "zeros", "(", "len", "(", "mag", ")", "-", "1", ",", "dtype", "=", "float", ")", "for", "ival", "in", "range", "(", "0", ",", "len", "(", "mag", ")", "-", "1", ")", ":", "self", ".", "occurrence_rate", "[", "ival", "]", "=", "(", "self", ".", "cumulative_value", "(", "mag", "[", "ival", "]", ",", "moment_rate", ",", "beta", ",", "moment_mag", ")", "-", "self", ".", "cumulative_value", "(", "mag", "[", "ival", "+", "1", "]", ",", "moment_rate", ",", "beta", ",", "moment_mag", ")", ")", "return", "self", ".", "mmin", ",", "self", ".", "bin_width", ",", "self", ".", "occurrence_rate" ]
Calculates activity rate on the fault :param float slip: Slip rate in mm/yr :param area: Width of the fault (km) :param float shear_modulus: Shear modulus of the fault (GPa) :returns: * Minimum Magnitude (float) * Bin width (float) * Occurrence Rates (numpy.ndarray)
[ "Calculates", "activity", "rate", "on", "the", "fault" ]
train
https://github.com/gem/oq-engine/blob/8294553a0b8aba33fd96437a35065d03547d0040/openquake/hmtk/faults/mfd/youngs_coppersmith.py#L155-L195
gem/oq-engine
openquake/hmtk/faults/mfd/youngs_coppersmith.py
YoungsCoppersmithExponential.cumulative_value
def cumulative_value(self, mag_val, moment_rate, beta, moment_mag): ''' Calculates the cumulative rate of events with M > m0 using equation 11 of Youngs & Coppersmith (1985) :param float mag_val: Magnitude :param float moment_rate: Moment rate on fault (in Nm) from slip rate :param float beta: Exponent (b log(10) :param float moment_mag: Moment of the upper bound magnitude ''' exponent = exp(-beta * (self.mmax - mag_val)) return (moment_rate * (D_VALUE - self.b_value) * (1. - exponent)) /\ (self.b_value * moment_mag * exponent)
python
def cumulative_value(self, mag_val, moment_rate, beta, moment_mag): exponent = exp(-beta * (self.mmax - mag_val)) return (moment_rate * (D_VALUE - self.b_value) * (1. - exponent)) /\ (self.b_value * moment_mag * exponent)
[ "def", "cumulative_value", "(", "self", ",", "mag_val", ",", "moment_rate", ",", "beta", ",", "moment_mag", ")", ":", "exponent", "=", "exp", "(", "-", "beta", "*", "(", "self", ".", "mmax", "-", "mag_val", ")", ")", "return", "(", "moment_rate", "*", "(", "D_VALUE", "-", "self", ".", "b_value", ")", "*", "(", "1.", "-", "exponent", ")", ")", "/", "(", "self", ".", "b_value", "*", "moment_mag", "*", "exponent", ")" ]
Calculates the cumulative rate of events with M > m0 using equation 11 of Youngs & Coppersmith (1985) :param float mag_val: Magnitude :param float moment_rate: Moment rate on fault (in Nm) from slip rate :param float beta: Exponent (b log(10) :param float moment_mag: Moment of the upper bound magnitude
[ "Calculates", "the", "cumulative", "rate", "of", "events", "with", "M", ">", "m0", "using", "equation", "11", "of", "Youngs", "&", "Coppersmith", "(", "1985", ")" ]
train
https://github.com/gem/oq-engine/blob/8294553a0b8aba33fd96437a35065d03547d0040/openquake/hmtk/faults/mfd/youngs_coppersmith.py#L197-L216
gem/oq-engine
openquake/hmtk/faults/mfd/youngs_coppersmith.py
YoungsCoppersmithCharacteristic.get_mfd
def get_mfd(self, slip, area, shear_modulus=30.0): ''' Calculates activity rate on the fault :param float slip: Slip rate in mm/yr :param area: Area of the fault (km) :param float shear_modulus: Shear modulus of the fault (GPa) :returns: * Minimum Magnitude (float) * Bin width (float) * Occurrence Rates (numpy.ndarray) Behavioural Notes: To use the openquake.hazardlib implementation the magnitudes returned will be the mid_point of the bins and not the original edge points. The minimum magnitude is update to reflect this! ''' # Calculate moment rate in N-m / year moment_rate = (shear_modulus * 1.E9) * (area * 1.E6) * (slip / 1000.) # Get Youngs & Coppersmith rate from # youngs_coppersmith.YoungsCoppersmith1985MFD.from_total_moment_rate self.model = YoungsCoppersmith1985MFD.from_total_moment_rate( self.mmin - (self.bin_width / 2.), self.b_value, self.mmax - 0.25, moment_rate, self.bin_width) temp_data = self.model.get_annual_occurrence_rates() self.occurrence_rate = np.array([value[1] for value in temp_data]) self.mmin = np.min(np.array([value[0] for value in temp_data])) return self.mmin, self.bin_width, self.occurrence_rate
python
def get_mfd(self, slip, area, shear_modulus=30.0): moment_rate = (shear_modulus * 1.E9) * (area * 1.E6) * (slip / 1000.) self.model = YoungsCoppersmith1985MFD.from_total_moment_rate( self.mmin - (self.bin_width / 2.), self.b_value, self.mmax - 0.25, moment_rate, self.bin_width) temp_data = self.model.get_annual_occurrence_rates() self.occurrence_rate = np.array([value[1] for value in temp_data]) self.mmin = np.min(np.array([value[0] for value in temp_data])) return self.mmin, self.bin_width, self.occurrence_rate
[ "def", "get_mfd", "(", "self", ",", "slip", ",", "area", ",", "shear_modulus", "=", "30.0", ")", ":", "# Calculate moment rate in N-m / year", "moment_rate", "=", "(", "shear_modulus", "*", "1.E9", ")", "*", "(", "area", "*", "1.E6", ")", "*", "(", "slip", "/", "1000.", ")", "# Get Youngs & Coppersmith rate from", "# youngs_coppersmith.YoungsCoppersmith1985MFD.from_total_moment_rate", "self", ".", "model", "=", "YoungsCoppersmith1985MFD", ".", "from_total_moment_rate", "(", "self", ".", "mmin", "-", "(", "self", ".", "bin_width", "/", "2.", ")", ",", "self", ".", "b_value", ",", "self", ".", "mmax", "-", "0.25", ",", "moment_rate", ",", "self", ".", "bin_width", ")", "temp_data", "=", "self", ".", "model", ".", "get_annual_occurrence_rates", "(", ")", "self", ".", "occurrence_rate", "=", "np", ".", "array", "(", "[", "value", "[", "1", "]", "for", "value", "in", "temp_data", "]", ")", "self", ".", "mmin", "=", "np", ".", "min", "(", "np", ".", "array", "(", "[", "value", "[", "0", "]", "for", "value", "in", "temp_data", "]", ")", ")", "return", "self", ".", "mmin", ",", "self", ".", "bin_width", ",", "self", ".", "occurrence_rate" ]
Calculates activity rate on the fault :param float slip: Slip rate in mm/yr :param area: Area of the fault (km) :param float shear_modulus: Shear modulus of the fault (GPa) :returns: * Minimum Magnitude (float) * Bin width (float) * Occurrence Rates (numpy.ndarray) Behavioural Notes: To use the openquake.hazardlib implementation the magnitudes returned will be the mid_point of the bins and not the original edge points. The minimum magnitude is update to reflect this!
[ "Calculates", "activity", "rate", "on", "the", "fault" ]
train
https://github.com/gem/oq-engine/blob/8294553a0b8aba33fd96437a35065d03547d0040/openquake/hmtk/faults/mfd/youngs_coppersmith.py#L309-L344
gem/oq-engine
openquake/hazardlib/gsim/can15/sinter.py
SInterCan15Mid.get_mean_and_stddevs
def get_mean_and_stddevs(self, sites, rup, dists, imt, stddev_types): """ See :meth:`superclass method <.base.GroundShakingIntensityModel.get_mean_and_stddevs>` for spec of input and result values. """ mean = self._get_mean(sites, rup, dists, imt, stddev_types) stddevs = [np.ones(len(dists.rrup))*get_sigma(imt)] return mean, stddevs
python
def get_mean_and_stddevs(self, sites, rup, dists, imt, stddev_types): mean = self._get_mean(sites, rup, dists, imt, stddev_types) stddevs = [np.ones(len(dists.rrup))*get_sigma(imt)] return mean, stddevs
[ "def", "get_mean_and_stddevs", "(", "self", ",", "sites", ",", "rup", ",", "dists", ",", "imt", ",", "stddev_types", ")", ":", "mean", "=", "self", ".", "_get_mean", "(", "sites", ",", "rup", ",", "dists", ",", "imt", ",", "stddev_types", ")", "stddevs", "=", "[", "np", ".", "ones", "(", "len", "(", "dists", ".", "rrup", ")", ")", "*", "get_sigma", "(", "imt", ")", "]", "return", "mean", ",", "stddevs" ]
See :meth:`superclass method <.base.GroundShakingIntensityModel.get_mean_and_stddevs>` for spec of input and result values.
[ "See", ":", "meth", ":", "superclass", "method", "<", ".", "base", ".", "GroundShakingIntensityModel", ".", "get_mean_and_stddevs", ">", "for", "spec", "of", "input", "and", "result", "values", "." ]
train
https://github.com/gem/oq-engine/blob/8294553a0b8aba33fd96437a35065d03547d0040/openquake/hazardlib/gsim/can15/sinter.py#L48-L56
gem/oq-engine
openquake/hazardlib/gsim/can15/sinter.py
SInterCan15Mid._get_mean
def _get_mean(self, sites, rup, dists, imt, stddev_types): """ See :meth:`superclass method <.base.GroundShakingIntensityModel.get_mean_and_stddevs>` for spec of input and result values. """ # Zhao et al. 2006 - Vs30 + Rrup mean_zh06, stds1 = super().get_mean_and_stddevs(sites, rup, dists, imt, stddev_types) # # Atkinson and Macias (2009) - Rrup gmpe = AtkinsonMacias2009() mean_am09, stds2 = gmpe.get_mean_and_stddevs(sites, rup, dists, imt, stddev_types) # # Abrahamson et al. (2015) - Rrup + vs30 + backarc gmpe = AbrahamsonEtAl2015SInter() mean_ab15, stds3 = gmpe.get_mean_and_stddevs(sites, rup, dists, imt, stddev_types) # # Ghofrani and Atkinson (2014) - Rrup + vs30 gmpe = GhofraniAtkinson2014() mean_ga14, stds4 = gmpe.get_mean_and_stddevs(sites, rup, dists, imt, stddev_types) # Computing adjusted mean and stds cff = self.SITE_COEFFS[imt] mean_adj = (np.log(np.exp(mean_zh06)*cff['mf'])*0.1 + mean_am09*0.5 + mean_ab15*0.2 + np.log(np.exp(mean_ga14)*cff['mf'])*0.2) return mean_adj
python
def _get_mean(self, sites, rup, dists, imt, stddev_types): mean_zh06, stds1 = super().get_mean_and_stddevs(sites, rup, dists, imt, stddev_types) gmpe = AtkinsonMacias2009() mean_am09, stds2 = gmpe.get_mean_and_stddevs(sites, rup, dists, imt, stddev_types) gmpe = AbrahamsonEtAl2015SInter() mean_ab15, stds3 = gmpe.get_mean_and_stddevs(sites, rup, dists, imt, stddev_types) gmpe = GhofraniAtkinson2014() mean_ga14, stds4 = gmpe.get_mean_and_stddevs(sites, rup, dists, imt, stddev_types) cff = self.SITE_COEFFS[imt] mean_adj = (np.log(np.exp(mean_zh06)*cff['mf'])*0.1 + mean_am09*0.5 + mean_ab15*0.2 + np.log(np.exp(mean_ga14)*cff['mf'])*0.2) return mean_adj
[ "def", "_get_mean", "(", "self", ",", "sites", ",", "rup", ",", "dists", ",", "imt", ",", "stddev_types", ")", ":", "# Zhao et al. 2006 - Vs30 + Rrup", "mean_zh06", ",", "stds1", "=", "super", "(", ")", ".", "get_mean_and_stddevs", "(", "sites", ",", "rup", ",", "dists", ",", "imt", ",", "stddev_types", ")", "#", "# Atkinson and Macias (2009) - Rrup", "gmpe", "=", "AtkinsonMacias2009", "(", ")", "mean_am09", ",", "stds2", "=", "gmpe", ".", "get_mean_and_stddevs", "(", "sites", ",", "rup", ",", "dists", ",", "imt", ",", "stddev_types", ")", "#", "# Abrahamson et al. (2015) - Rrup + vs30 + backarc", "gmpe", "=", "AbrahamsonEtAl2015SInter", "(", ")", "mean_ab15", ",", "stds3", "=", "gmpe", ".", "get_mean_and_stddevs", "(", "sites", ",", "rup", ",", "dists", ",", "imt", ",", "stddev_types", ")", "#", "# Ghofrani and Atkinson (2014) - Rrup + vs30", "gmpe", "=", "GhofraniAtkinson2014", "(", ")", "mean_ga14", ",", "stds4", "=", "gmpe", ".", "get_mean_and_stddevs", "(", "sites", ",", "rup", ",", "dists", ",", "imt", ",", "stddev_types", ")", "# Computing adjusted mean and stds", "cff", "=", "self", ".", "SITE_COEFFS", "[", "imt", "]", "mean_adj", "=", "(", "np", ".", "log", "(", "np", ".", "exp", "(", "mean_zh06", ")", "*", "cff", "[", "'mf'", "]", ")", "*", "0.1", "+", "mean_am09", "*", "0.5", "+", "mean_ab15", "*", "0.2", "+", "np", ".", "log", "(", "np", ".", "exp", "(", "mean_ga14", ")", "*", "cff", "[", "'mf'", "]", ")", "*", "0.2", ")", "return", "mean_adj" ]
See :meth:`superclass method <.base.GroundShakingIntensityModel.get_mean_and_stddevs>` for spec of input and result values.
[ "See", ":", "meth", ":", "superclass", "method", "<", ".", "base", ".", "GroundShakingIntensityModel", ".", "get_mean_and_stddevs", ">", "for", "spec", "of", "input", "and", "result", "values", "." ]
train
https://github.com/gem/oq-engine/blob/8294553a0b8aba33fd96437a35065d03547d0040/openquake/hazardlib/gsim/can15/sinter.py#L58-L87
gem/oq-engine
openquake/hazardlib/geo/surface/base.py
_find_turning_points
def _find_turning_points(mesh, tol=1.0): """ Identifies the turning points in a rectangular mesh based on the deviation in the azimuth between successive points on the upper edge. A turning point is flagged if the change in azimuth change is greater than the specified tolerance (in degrees) :param mesh: Mesh for downsampling as instance of :class: openquake.hazardlib.geo.mesh.RectangularMesh :param float tol: Maximum difference in azimuth (decimal degrees) between successive points to identify a turning point :returns: Column indices of turning points (as numpy array) """ assert isinstance(mesh, RectangularMesh) azimuths = geodetic.azimuth(mesh.lons[0, :-1], mesh.lats[0, :-1], mesh.lons[0, 1:], mesh.lats[0, 1:]) naz = len(azimuths) azim = azimuths[0] # Retain initial point idx = [0] for i in range(1, naz): if numpy.fabs(azimuths[i] - azim) > tol: idx.append(i) azim = azimuths[i] # Add on last point - if not already in the set if idx[-1] != mesh.lons.shape[1] - 1: idx.append(mesh.lons.shape[1] - 1) return numpy.array(idx)
python
def _find_turning_points(mesh, tol=1.0): assert isinstance(mesh, RectangularMesh) azimuths = geodetic.azimuth(mesh.lons[0, :-1], mesh.lats[0, :-1], mesh.lons[0, 1:], mesh.lats[0, 1:]) naz = len(azimuths) azim = azimuths[0] idx = [0] for i in range(1, naz): if numpy.fabs(azimuths[i] - azim) > tol: idx.append(i) azim = azimuths[i] if idx[-1] != mesh.lons.shape[1] - 1: idx.append(mesh.lons.shape[1] - 1) return numpy.array(idx)
[ "def", "_find_turning_points", "(", "mesh", ",", "tol", "=", "1.0", ")", ":", "assert", "isinstance", "(", "mesh", ",", "RectangularMesh", ")", "azimuths", "=", "geodetic", ".", "azimuth", "(", "mesh", ".", "lons", "[", "0", ",", ":", "-", "1", "]", ",", "mesh", ".", "lats", "[", "0", ",", ":", "-", "1", "]", ",", "mesh", ".", "lons", "[", "0", ",", "1", ":", "]", ",", "mesh", ".", "lats", "[", "0", ",", "1", ":", "]", ")", "naz", "=", "len", "(", "azimuths", ")", "azim", "=", "azimuths", "[", "0", "]", "# Retain initial point", "idx", "=", "[", "0", "]", "for", "i", "in", "range", "(", "1", ",", "naz", ")", ":", "if", "numpy", ".", "fabs", "(", "azimuths", "[", "i", "]", "-", "azim", ")", ">", "tol", ":", "idx", ".", "append", "(", "i", ")", "azim", "=", "azimuths", "[", "i", "]", "# Add on last point - if not already in the set", "if", "idx", "[", "-", "1", "]", "!=", "mesh", ".", "lons", ".", "shape", "[", "1", "]", "-", "1", ":", "idx", ".", "append", "(", "mesh", ".", "lons", ".", "shape", "[", "1", "]", "-", "1", ")", "return", "numpy", ".", "array", "(", "idx", ")" ]
Identifies the turning points in a rectangular mesh based on the deviation in the azimuth between successive points on the upper edge. A turning point is flagged if the change in azimuth change is greater than the specified tolerance (in degrees) :param mesh: Mesh for downsampling as instance of :class: openquake.hazardlib.geo.mesh.RectangularMesh :param float tol: Maximum difference in azimuth (decimal degrees) between successive points to identify a turning point :returns: Column indices of turning points (as numpy array)
[ "Identifies", "the", "turning", "points", "in", "a", "rectangular", "mesh", "based", "on", "the", "deviation", "in", "the", "azimuth", "between", "successive", "points", "on", "the", "upper", "edge", ".", "A", "turning", "point", "is", "flagged", "if", "the", "change", "in", "azimuth", "change", "is", "greater", "than", "the", "specified", "tolerance", "(", "in", "degrees", ")" ]
train
https://github.com/gem/oq-engine/blob/8294553a0b8aba33fd96437a35065d03547d0040/openquake/hazardlib/geo/surface/base.py#L29-L61
gem/oq-engine
openquake/hazardlib/geo/surface/base.py
downsample_mesh
def downsample_mesh(mesh, tol=1.0): """ Returns a mesh sampled at a lower resolution - if the difference in azimuth is larger than the specified tolerance a turn is assumed :returns: Downsampled mesh as instance of :class: openquake.hazardlib.geo.mesh.RectangularMesh """ idx = _find_turning_points(mesh, tol) if mesh.depths is not None: return RectangularMesh(lons=mesh.lons[:, idx], lats=mesh.lats[:, idx], depths=mesh.depths[:, idx]) else: return RectangularMesh(lons=mesh.lons[:, idx], lats=mesh.lats[:, idx])
python
def downsample_mesh(mesh, tol=1.0): idx = _find_turning_points(mesh, tol) if mesh.depths is not None: return RectangularMesh(lons=mesh.lons[:, idx], lats=mesh.lats[:, idx], depths=mesh.depths[:, idx]) else: return RectangularMesh(lons=mesh.lons[:, idx], lats=mesh.lats[:, idx])
[ "def", "downsample_mesh", "(", "mesh", ",", "tol", "=", "1.0", ")", ":", "idx", "=", "_find_turning_points", "(", "mesh", ",", "tol", ")", "if", "mesh", ".", "depths", "is", "not", "None", ":", "return", "RectangularMesh", "(", "lons", "=", "mesh", ".", "lons", "[", ":", ",", "idx", "]", ",", "lats", "=", "mesh", ".", "lats", "[", ":", ",", "idx", "]", ",", "depths", "=", "mesh", ".", "depths", "[", ":", ",", "idx", "]", ")", "else", ":", "return", "RectangularMesh", "(", "lons", "=", "mesh", ".", "lons", "[", ":", ",", "idx", "]", ",", "lats", "=", "mesh", ".", "lats", "[", ":", ",", "idx", "]", ")" ]
Returns a mesh sampled at a lower resolution - if the difference in azimuth is larger than the specified tolerance a turn is assumed :returns: Downsampled mesh as instance of :class: openquake.hazardlib.geo.mesh.RectangularMesh
[ "Returns", "a", "mesh", "sampled", "at", "a", "lower", "resolution", "-", "if", "the", "difference", "in", "azimuth", "is", "larger", "than", "the", "specified", "tolerance", "a", "turn", "is", "assumed" ]
train
https://github.com/gem/oq-engine/blob/8294553a0b8aba33fd96437a35065d03547d0040/openquake/hazardlib/geo/surface/base.py#L64-L80
gem/oq-engine
openquake/hazardlib/geo/surface/base.py
downsample_trace
def downsample_trace(mesh, tol=1.0): """ Downsamples the upper edge of a fault within a rectangular mesh, retaining node points only if changes in direction on the order of tol are found :returns: Downsampled edge as a numpy array of [long, lat, depth] """ idx = _find_turning_points(mesh, tol) if mesh.depths is not None: return numpy.column_stack([mesh.lons[0, idx], mesh.lats[0, idx], mesh.depths[0, idx]]) else: return numpy.column_stack([mesh.lons[0, idx], mesh.lats[0, idx]])
python
def downsample_trace(mesh, tol=1.0): idx = _find_turning_points(mesh, tol) if mesh.depths is not None: return numpy.column_stack([mesh.lons[0, idx], mesh.lats[0, idx], mesh.depths[0, idx]]) else: return numpy.column_stack([mesh.lons[0, idx], mesh.lats[0, idx]])
[ "def", "downsample_trace", "(", "mesh", ",", "tol", "=", "1.0", ")", ":", "idx", "=", "_find_turning_points", "(", "mesh", ",", "tol", ")", "if", "mesh", ".", "depths", "is", "not", "None", ":", "return", "numpy", ".", "column_stack", "(", "[", "mesh", ".", "lons", "[", "0", ",", "idx", "]", ",", "mesh", ".", "lats", "[", "0", ",", "idx", "]", ",", "mesh", ".", "depths", "[", "0", ",", "idx", "]", "]", ")", "else", ":", "return", "numpy", ".", "column_stack", "(", "[", "mesh", ".", "lons", "[", "0", ",", "idx", "]", ",", "mesh", ".", "lats", "[", "0", ",", "idx", "]", "]", ")" ]
Downsamples the upper edge of a fault within a rectangular mesh, retaining node points only if changes in direction on the order of tol are found :returns: Downsampled edge as a numpy array of [long, lat, depth]
[ "Downsamples", "the", "upper", "edge", "of", "a", "fault", "within", "a", "rectangular", "mesh", "retaining", "node", "points", "only", "if", "changes", "in", "direction", "on", "the", "order", "of", "tol", "are", "found" ]
train
https://github.com/gem/oq-engine/blob/8294553a0b8aba33fd96437a35065d03547d0040/openquake/hazardlib/geo/surface/base.py#L83-L97
gem/oq-engine
openquake/hazardlib/geo/surface/base.py
BaseSurface.get_ry0_distance
def get_ry0_distance(self, mesh): """ Compute the minimum distance between each point of a mesh and the great circle arcs perpendicular to the average strike direction of the fault trace and passing through the end-points of the trace. :param mesh: :class:`~openquake.hazardlib.geo.mesh.Mesh` of points to calculate Ry0-distance to. :returns: Numpy array of distances in km. """ # This computes ry0 by using an average strike direction top_edge = self.mesh[0:1] mean_strike = self.get_strike() dst1 = geodetic.distance_to_arc(top_edge.lons[0, 0], top_edge.lats[0, 0], (mean_strike + 90.) % 360, mesh.lons, mesh.lats) dst2 = geodetic.distance_to_arc(top_edge.lons[0, -1], top_edge.lats[0, -1], (mean_strike + 90.) % 360, mesh.lons, mesh.lats) # Find the points on the rupture # Get the shortest distance from the two lines idx = numpy.sign(dst1) == numpy.sign(dst2) dst = numpy.zeros_like(dst1) dst[idx] = numpy.fmin(numpy.abs(dst1[idx]), numpy.abs(dst2[idx])) return dst
python
def get_ry0_distance(self, mesh): top_edge = self.mesh[0:1] mean_strike = self.get_strike() dst1 = geodetic.distance_to_arc(top_edge.lons[0, 0], top_edge.lats[0, 0], (mean_strike + 90.) % 360, mesh.lons, mesh.lats) dst2 = geodetic.distance_to_arc(top_edge.lons[0, -1], top_edge.lats[0, -1], (mean_strike + 90.) % 360, mesh.lons, mesh.lats) idx = numpy.sign(dst1) == numpy.sign(dst2) dst = numpy.zeros_like(dst1) dst[idx] = numpy.fmin(numpy.abs(dst1[idx]), numpy.abs(dst2[idx])) return dst
[ "def", "get_ry0_distance", "(", "self", ",", "mesh", ")", ":", "# This computes ry0 by using an average strike direction", "top_edge", "=", "self", ".", "mesh", "[", "0", ":", "1", "]", "mean_strike", "=", "self", ".", "get_strike", "(", ")", "dst1", "=", "geodetic", ".", "distance_to_arc", "(", "top_edge", ".", "lons", "[", "0", ",", "0", "]", ",", "top_edge", ".", "lats", "[", "0", ",", "0", "]", ",", "(", "mean_strike", "+", "90.", ")", "%", "360", ",", "mesh", ".", "lons", ",", "mesh", ".", "lats", ")", "dst2", "=", "geodetic", ".", "distance_to_arc", "(", "top_edge", ".", "lons", "[", "0", ",", "-", "1", "]", ",", "top_edge", ".", "lats", "[", "0", ",", "-", "1", "]", ",", "(", "mean_strike", "+", "90.", ")", "%", "360", ",", "mesh", ".", "lons", ",", "mesh", ".", "lats", ")", "# Find the points on the rupture", "# Get the shortest distance from the two lines", "idx", "=", "numpy", ".", "sign", "(", "dst1", ")", "==", "numpy", ".", "sign", "(", "dst2", ")", "dst", "=", "numpy", ".", "zeros_like", "(", "dst1", ")", "dst", "[", "idx", "]", "=", "numpy", ".", "fmin", "(", "numpy", ".", "abs", "(", "dst1", "[", "idx", "]", ")", ",", "numpy", ".", "abs", "(", "dst2", "[", "idx", "]", ")", ")", "return", "dst" ]
Compute the minimum distance between each point of a mesh and the great circle arcs perpendicular to the average strike direction of the fault trace and passing through the end-points of the trace. :param mesh: :class:`~openquake.hazardlib.geo.mesh.Mesh` of points to calculate Ry0-distance to. :returns: Numpy array of distances in km.
[ "Compute", "the", "minimum", "distance", "between", "each", "point", "of", "a", "mesh", "and", "the", "great", "circle", "arcs", "perpendicular", "to", "the", "average", "strike", "direction", "of", "the", "fault", "trace", "and", "passing", "through", "the", "end", "-", "points", "of", "the", "trace", "." ]
train
https://github.com/gem/oq-engine/blob/8294553a0b8aba33fd96437a35065d03547d0040/openquake/hazardlib/geo/surface/base.py#L148-L180
gem/oq-engine
openquake/hazardlib/geo/surface/base.py
BaseSurface.get_rx_distance
def get_rx_distance(self, mesh): """ Compute distance between each point of mesh and surface's great circle arc. Distance is measured perpendicular to the rupture strike, from the surface projection of the updip edge of the rupture, with the down dip direction being positive (this distance is usually called ``Rx``). In other words, is the horizontal distance to top edge of rupture measured perpendicular to the strike. Values on the hanging wall are positive, values on the footwall are negative. :param mesh: :class:`~openquake.hazardlib.geo.mesh.Mesh` of points to calculate Rx-distance to. :returns: Numpy array of distances in km. """ top_edge = self.mesh[0:1] dists = [] if top_edge.lons.shape[1] < 3: i = 0 p1 = Point( top_edge.lons[0, i], top_edge.lats[0, i], top_edge.depths[0, i] ) p2 = Point( top_edge.lons[0, i + 1], top_edge.lats[0, i + 1], top_edge.depths[0, i + 1] ) azimuth = p1.azimuth(p2) dists.append( geodetic.distance_to_arc( p1.longitude, p1.latitude, azimuth, mesh.lons, mesh.lats ) ) else: for i in range(top_edge.lons.shape[1] - 1): p1 = Point( top_edge.lons[0, i], top_edge.lats[0, i], top_edge.depths[0, i] ) p2 = Point( top_edge.lons[0, i + 1], top_edge.lats[0, i + 1], top_edge.depths[0, i + 1] ) # Swapping if i == 0: pt = p1 p1 = p2 p2 = pt # Computing azimuth and distance if i == 0 or i == top_edge.lons.shape[1] - 2: azimuth = p1.azimuth(p2) tmp = geodetic.distance_to_semi_arc(p1.longitude, p1.latitude, azimuth, mesh.lons, mesh.lats) else: tmp = geodetic.min_distance_to_segment( numpy.array([p1.longitude, p2.longitude]), numpy.array([p1.latitude, p2.latitude]), mesh.lons, mesh.lats) # Correcting the sign of the distance if i == 0: tmp *= -1 dists.append(tmp) # Computing distances dists = numpy.array(dists) iii = abs(dists).argmin(axis=0) dst = dists[iii, list(range(dists.shape[1]))] return dst
python
def get_rx_distance(self, mesh): top_edge = self.mesh[0:1] dists = [] if top_edge.lons.shape[1] < 3: i = 0 p1 = Point( top_edge.lons[0, i], top_edge.lats[0, i], top_edge.depths[0, i] ) p2 = Point( top_edge.lons[0, i + 1], top_edge.lats[0, i + 1], top_edge.depths[0, i + 1] ) azimuth = p1.azimuth(p2) dists.append( geodetic.distance_to_arc( p1.longitude, p1.latitude, azimuth, mesh.lons, mesh.lats ) ) else: for i in range(top_edge.lons.shape[1] - 1): p1 = Point( top_edge.lons[0, i], top_edge.lats[0, i], top_edge.depths[0, i] ) p2 = Point( top_edge.lons[0, i + 1], top_edge.lats[0, i + 1], top_edge.depths[0, i + 1] ) if i == 0: pt = p1 p1 = p2 p2 = pt if i == 0 or i == top_edge.lons.shape[1] - 2: azimuth = p1.azimuth(p2) tmp = geodetic.distance_to_semi_arc(p1.longitude, p1.latitude, azimuth, mesh.lons, mesh.lats) else: tmp = geodetic.min_distance_to_segment( numpy.array([p1.longitude, p2.longitude]), numpy.array([p1.latitude, p2.latitude]), mesh.lons, mesh.lats) if i == 0: tmp *= -1 dists.append(tmp) dists = numpy.array(dists) iii = abs(dists).argmin(axis=0) dst = dists[iii, list(range(dists.shape[1]))] return dst
[ "def", "get_rx_distance", "(", "self", ",", "mesh", ")", ":", "top_edge", "=", "self", ".", "mesh", "[", "0", ":", "1", "]", "dists", "=", "[", "]", "if", "top_edge", ".", "lons", ".", "shape", "[", "1", "]", "<", "3", ":", "i", "=", "0", "p1", "=", "Point", "(", "top_edge", ".", "lons", "[", "0", ",", "i", "]", ",", "top_edge", ".", "lats", "[", "0", ",", "i", "]", ",", "top_edge", ".", "depths", "[", "0", ",", "i", "]", ")", "p2", "=", "Point", "(", "top_edge", ".", "lons", "[", "0", ",", "i", "+", "1", "]", ",", "top_edge", ".", "lats", "[", "0", ",", "i", "+", "1", "]", ",", "top_edge", ".", "depths", "[", "0", ",", "i", "+", "1", "]", ")", "azimuth", "=", "p1", ".", "azimuth", "(", "p2", ")", "dists", ".", "append", "(", "geodetic", ".", "distance_to_arc", "(", "p1", ".", "longitude", ",", "p1", ".", "latitude", ",", "azimuth", ",", "mesh", ".", "lons", ",", "mesh", ".", "lats", ")", ")", "else", ":", "for", "i", "in", "range", "(", "top_edge", ".", "lons", ".", "shape", "[", "1", "]", "-", "1", ")", ":", "p1", "=", "Point", "(", "top_edge", ".", "lons", "[", "0", ",", "i", "]", ",", "top_edge", ".", "lats", "[", "0", ",", "i", "]", ",", "top_edge", ".", "depths", "[", "0", ",", "i", "]", ")", "p2", "=", "Point", "(", "top_edge", ".", "lons", "[", "0", ",", "i", "+", "1", "]", ",", "top_edge", ".", "lats", "[", "0", ",", "i", "+", "1", "]", ",", "top_edge", ".", "depths", "[", "0", ",", "i", "+", "1", "]", ")", "# Swapping", "if", "i", "==", "0", ":", "pt", "=", "p1", "p1", "=", "p2", "p2", "=", "pt", "# Computing azimuth and distance", "if", "i", "==", "0", "or", "i", "==", "top_edge", ".", "lons", ".", "shape", "[", "1", "]", "-", "2", ":", "azimuth", "=", "p1", ".", "azimuth", "(", "p2", ")", "tmp", "=", "geodetic", ".", "distance_to_semi_arc", "(", "p1", ".", "longitude", ",", "p1", ".", "latitude", ",", "azimuth", ",", "mesh", ".", "lons", ",", "mesh", ".", "lats", ")", "else", ":", "tmp", "=", "geodetic", ".", "min_distance_to_segment", "(", "numpy", ".", "array", "(", "[", "p1", ".", "longitude", ",", "p2", ".", "longitude", "]", ")", ",", "numpy", ".", "array", "(", "[", "p1", ".", "latitude", ",", "p2", ".", "latitude", "]", ")", ",", "mesh", ".", "lons", ",", "mesh", ".", "lats", ")", "# Correcting the sign of the distance", "if", "i", "==", "0", ":", "tmp", "*=", "-", "1", "dists", ".", "append", "(", "tmp", ")", "# Computing distances", "dists", "=", "numpy", ".", "array", "(", "dists", ")", "iii", "=", "abs", "(", "dists", ")", ".", "argmin", "(", "axis", "=", "0", ")", "dst", "=", "dists", "[", "iii", ",", "list", "(", "range", "(", "dists", ".", "shape", "[", "1", "]", ")", ")", "]", "return", "dst" ]
Compute distance between each point of mesh and surface's great circle arc. Distance is measured perpendicular to the rupture strike, from the surface projection of the updip edge of the rupture, with the down dip direction being positive (this distance is usually called ``Rx``). In other words, is the horizontal distance to top edge of rupture measured perpendicular to the strike. Values on the hanging wall are positive, values on the footwall are negative. :param mesh: :class:`~openquake.hazardlib.geo.mesh.Mesh` of points to calculate Rx-distance to. :returns: Numpy array of distances in km.
[ "Compute", "distance", "between", "each", "point", "of", "mesh", "and", "surface", "s", "great", "circle", "arc", "." ]
train
https://github.com/gem/oq-engine/blob/8294553a0b8aba33fd96437a35065d03547d0040/openquake/hazardlib/geo/surface/base.py#L182-L266
gem/oq-engine
openquake/hazardlib/geo/surface/base.py
BaseSurface.get_top_edge_depth
def get_top_edge_depth(self): """ Return minimum depth of surface's top edge. :returns: Float value, the vertical distance between the earth surface and the shallowest point in surface's top edge in km. """ top_edge = self.mesh[0:1] if top_edge.depths is None: return 0 else: return numpy.min(top_edge.depths)
python
def get_top_edge_depth(self): top_edge = self.mesh[0:1] if top_edge.depths is None: return 0 else: return numpy.min(top_edge.depths)
[ "def", "get_top_edge_depth", "(", "self", ")", ":", "top_edge", "=", "self", ".", "mesh", "[", "0", ":", "1", "]", "if", "top_edge", ".", "depths", "is", "None", ":", "return", "0", "else", ":", "return", "numpy", ".", "min", "(", "top_edge", ".", "depths", ")" ]
Return minimum depth of surface's top edge. :returns: Float value, the vertical distance between the earth surface and the shallowest point in surface's top edge in km.
[ "Return", "minimum", "depth", "of", "surface", "s", "top", "edge", "." ]
train
https://github.com/gem/oq-engine/blob/8294553a0b8aba33fd96437a35065d03547d0040/openquake/hazardlib/geo/surface/base.py#L268-L280
gem/oq-engine
openquake/hazardlib/geo/surface/base.py
BaseSurface.get_area
def get_area(self): """ Compute area as the sum of the mesh cells area values. """ mesh = self.mesh _, _, _, area = mesh.get_cell_dimensions() return numpy.sum(area)
python
def get_area(self): mesh = self.mesh _, _, _, area = mesh.get_cell_dimensions() return numpy.sum(area)
[ "def", "get_area", "(", "self", ")", ":", "mesh", "=", "self", ".", "mesh", "_", ",", "_", ",", "_", ",", "area", "=", "mesh", ".", "get_cell_dimensions", "(", ")", "return", "numpy", ".", "sum", "(", "area", ")" ]
Compute area as the sum of the mesh cells area values.
[ "Compute", "area", "as", "the", "sum", "of", "the", "mesh", "cells", "area", "values", "." ]
train
https://github.com/gem/oq-engine/blob/8294553a0b8aba33fd96437a35065d03547d0040/openquake/hazardlib/geo/surface/base.py#L290-L297
gem/oq-engine
openquake/hazardlib/geo/surface/base.py
BaseSurface.get_bounding_box
def get_bounding_box(self): """ Compute surface bounding box from surface mesh representation. That is extract longitudes and latitudes of mesh points and calls: :meth:`openquake.hazardlib.geo.utils.get_spherical_bounding_box` :return: A tuple of four items. These items represent western, eastern, northern and southern borders of the bounding box respectively. Values are floats in decimal degrees. """ mesh = self.mesh return utils.get_spherical_bounding_box(mesh.lons, mesh.lats)
python
def get_bounding_box(self): mesh = self.mesh return utils.get_spherical_bounding_box(mesh.lons, mesh.lats)
[ "def", "get_bounding_box", "(", "self", ")", ":", "mesh", "=", "self", ".", "mesh", "return", "utils", ".", "get_spherical_bounding_box", "(", "mesh", ".", "lons", ",", "mesh", ".", "lats", ")" ]
Compute surface bounding box from surface mesh representation. That is extract longitudes and latitudes of mesh points and calls: :meth:`openquake.hazardlib.geo.utils.get_spherical_bounding_box` :return: A tuple of four items. These items represent western, eastern, northern and southern borders of the bounding box respectively. Values are floats in decimal degrees.
[ "Compute", "surface", "bounding", "box", "from", "surface", "mesh", "representation", ".", "That", "is", "extract", "longitudes", "and", "latitudes", "of", "mesh", "points", "and", "calls", ":", ":", "meth", ":", "openquake", ".", "hazardlib", ".", "geo", ".", "utils", ".", "get_spherical_bounding_box" ]
train
https://github.com/gem/oq-engine/blob/8294553a0b8aba33fd96437a35065d03547d0040/openquake/hazardlib/geo/surface/base.py#L299-L311
gem/oq-engine
openquake/hazardlib/geo/surface/base.py
BaseSurface.get_surface_boundaries
def get_surface_boundaries(self): """ Returns the boundaries in the same format as a multiplanar surface, with two one-element lists of lons and lats """ mesh = self.mesh lons = numpy.concatenate((mesh.lons[0, :], mesh.lons[1:, -1], mesh.lons[-1, :-1][::-1], mesh.lons[:-1, 0][::-1])) lats = numpy.concatenate((mesh.lats[0, :], mesh.lats[1:, -1], mesh.lats[-1, :-1][::-1], mesh.lats[:-1, 0][::-1])) return [lons], [lats]
python
def get_surface_boundaries(self): mesh = self.mesh lons = numpy.concatenate((mesh.lons[0, :], mesh.lons[1:, -1], mesh.lons[-1, :-1][::-1], mesh.lons[:-1, 0][::-1])) lats = numpy.concatenate((mesh.lats[0, :], mesh.lats[1:, -1], mesh.lats[-1, :-1][::-1], mesh.lats[:-1, 0][::-1])) return [lons], [lats]
[ "def", "get_surface_boundaries", "(", "self", ")", ":", "mesh", "=", "self", ".", "mesh", "lons", "=", "numpy", ".", "concatenate", "(", "(", "mesh", ".", "lons", "[", "0", ",", ":", "]", ",", "mesh", ".", "lons", "[", "1", ":", ",", "-", "1", "]", ",", "mesh", ".", "lons", "[", "-", "1", ",", ":", "-", "1", "]", "[", ":", ":", "-", "1", "]", ",", "mesh", ".", "lons", "[", ":", "-", "1", ",", "0", "]", "[", ":", ":", "-", "1", "]", ")", ")", "lats", "=", "numpy", ".", "concatenate", "(", "(", "mesh", ".", "lats", "[", "0", ",", ":", "]", ",", "mesh", ".", "lats", "[", "1", ":", ",", "-", "1", "]", ",", "mesh", ".", "lats", "[", "-", "1", ",", ":", "-", "1", "]", "[", ":", ":", "-", "1", "]", ",", "mesh", ".", "lats", "[", ":", "-", "1", ",", "0", "]", "[", ":", ":", "-", "1", "]", ")", ")", "return", "[", "lons", "]", ",", "[", "lats", "]" ]
Returns the boundaries in the same format as a multiplanar surface, with two one-element lists of lons and lats
[ "Returns", "the", "boundaries", "in", "the", "same", "format", "as", "a", "multiplanar", "surface", "with", "two", "one", "-", "element", "lists", "of", "lons", "and", "lats" ]
train
https://github.com/gem/oq-engine/blob/8294553a0b8aba33fd96437a35065d03547d0040/openquake/hazardlib/geo/surface/base.py#L326-L340
gem/oq-engine
openquake/hazardlib/geo/surface/base.py
BaseSurface.get_resampled_top_edge
def get_resampled_top_edge(self, angle_var=0.1): """ This methods computes a simplified representation of a fault top edge by removing the points that are not describing a change of direction, provided a certain tolerance angle. :param float angle_var: Number representing the maximum deviation (in degrees) admitted without the creation of a new segment :returns: A :class:`~openquake.hazardlib.geo.line.Line` representing the rupture surface's top edge. """ mesh = self.mesh top_edge = [Point(mesh.lons[0][0], mesh.lats[0][0], mesh.depths[0][0])] for i in range(len(mesh.triangulate()[1][0]) - 1): v1 = numpy.asarray(mesh.triangulate()[1][0][i]) v2 = numpy.asarray(mesh.triangulate()[1][0][i + 1]) cosang = numpy.dot(v1, v2) sinang = numpy.linalg.norm(numpy.cross(v1, v2)) angle = math.degrees(numpy.arctan2(sinang, cosang)) if abs(angle) > angle_var: top_edge.append(Point(mesh.lons[0][i + 1], mesh.lats[0][i + 1], mesh.depths[0][i + 1])) top_edge.append(Point(mesh.lons[0][-1], mesh.lats[0][-1], mesh.depths[0][-1])) line_top_edge = Line(top_edge) return line_top_edge
python
def get_resampled_top_edge(self, angle_var=0.1): mesh = self.mesh top_edge = [Point(mesh.lons[0][0], mesh.lats[0][0], mesh.depths[0][0])] for i in range(len(mesh.triangulate()[1][0]) - 1): v1 = numpy.asarray(mesh.triangulate()[1][0][i]) v2 = numpy.asarray(mesh.triangulate()[1][0][i + 1]) cosang = numpy.dot(v1, v2) sinang = numpy.linalg.norm(numpy.cross(v1, v2)) angle = math.degrees(numpy.arctan2(sinang, cosang)) if abs(angle) > angle_var: top_edge.append(Point(mesh.lons[0][i + 1], mesh.lats[0][i + 1], mesh.depths[0][i + 1])) top_edge.append(Point(mesh.lons[0][-1], mesh.lats[0][-1], mesh.depths[0][-1])) line_top_edge = Line(top_edge) return line_top_edge
[ "def", "get_resampled_top_edge", "(", "self", ",", "angle_var", "=", "0.1", ")", ":", "mesh", "=", "self", ".", "mesh", "top_edge", "=", "[", "Point", "(", "mesh", ".", "lons", "[", "0", "]", "[", "0", "]", ",", "mesh", ".", "lats", "[", "0", "]", "[", "0", "]", ",", "mesh", ".", "depths", "[", "0", "]", "[", "0", "]", ")", "]", "for", "i", "in", "range", "(", "len", "(", "mesh", ".", "triangulate", "(", ")", "[", "1", "]", "[", "0", "]", ")", "-", "1", ")", ":", "v1", "=", "numpy", ".", "asarray", "(", "mesh", ".", "triangulate", "(", ")", "[", "1", "]", "[", "0", "]", "[", "i", "]", ")", "v2", "=", "numpy", ".", "asarray", "(", "mesh", ".", "triangulate", "(", ")", "[", "1", "]", "[", "0", "]", "[", "i", "+", "1", "]", ")", "cosang", "=", "numpy", ".", "dot", "(", "v1", ",", "v2", ")", "sinang", "=", "numpy", ".", "linalg", ".", "norm", "(", "numpy", ".", "cross", "(", "v1", ",", "v2", ")", ")", "angle", "=", "math", ".", "degrees", "(", "numpy", ".", "arctan2", "(", "sinang", ",", "cosang", ")", ")", "if", "abs", "(", "angle", ")", ">", "angle_var", ":", "top_edge", ".", "append", "(", "Point", "(", "mesh", ".", "lons", "[", "0", "]", "[", "i", "+", "1", "]", ",", "mesh", ".", "lats", "[", "0", "]", "[", "i", "+", "1", "]", ",", "mesh", ".", "depths", "[", "0", "]", "[", "i", "+", "1", "]", ")", ")", "top_edge", ".", "append", "(", "Point", "(", "mesh", ".", "lons", "[", "0", "]", "[", "-", "1", "]", ",", "mesh", ".", "lats", "[", "0", "]", "[", "-", "1", "]", ",", "mesh", ".", "depths", "[", "0", "]", "[", "-", "1", "]", ")", ")", "line_top_edge", "=", "Line", "(", "top_edge", ")", "return", "line_top_edge" ]
This methods computes a simplified representation of a fault top edge by removing the points that are not describing a change of direction, provided a certain tolerance angle. :param float angle_var: Number representing the maximum deviation (in degrees) admitted without the creation of a new segment :returns: A :class:`~openquake.hazardlib.geo.line.Line` representing the rupture surface's top edge.
[ "This", "methods", "computes", "a", "simplified", "representation", "of", "a", "fault", "top", "edge", "by", "removing", "the", "points", "that", "are", "not", "describing", "a", "change", "of", "direction", "provided", "a", "certain", "tolerance", "angle", "." ]
train
https://github.com/gem/oq-engine/blob/8294553a0b8aba33fd96437a35065d03547d0040/openquake/hazardlib/geo/surface/base.py#L342-L375
gem/oq-engine
openquake/hazardlib/geo/surface/base.py
BaseSurface.get_hypo_location
def get_hypo_location(self, mesh_spacing, hypo_loc=None): """ The method determines the location of the hypocentre within the rupture :param mesh: :class:`~openquake.hazardlib.geo.mesh.Mesh` of points :param mesh_spacing: The desired distance between two adjacent points in source's ruptures' mesh, in km. Mainly this parameter allows to balance the trade-off between time needed to compute the distance between the rupture surface and a site and the precision of that computation. :param hypo_loc: Hypocentre location as fraction of rupture plane, as a tuple of (Along Strike, Down Dip), e.g. a hypocentre located in the centroid of the rupture would be input as (0.5, 0.5), whereas a hypocentre located in a position 3/4 along the length, and 1/4 of the way down dip of the rupture plane would be entered as (0.75, 0.25). :returns: Hypocentre location as instance of :class:`~openquake.hazardlib.geo.point.Point` """ mesh = self.mesh centroid = mesh.get_middle_point() if hypo_loc is None: return centroid total_len_y = (len(mesh.depths) - 1) * mesh_spacing y_distance = hypo_loc[1] * total_len_y y_node = int(numpy.round(y_distance / mesh_spacing)) total_len_x = (len(mesh.lons[y_node]) - 1) * mesh_spacing x_distance = hypo_loc[0] * total_len_x x_node = int(numpy.round(x_distance / mesh_spacing)) hypocentre = Point(mesh.lons[y_node][x_node], mesh.lats[y_node][x_node], mesh.depths[y_node][x_node]) return hypocentre
python
def get_hypo_location(self, mesh_spacing, hypo_loc=None): mesh = self.mesh centroid = mesh.get_middle_point() if hypo_loc is None: return centroid total_len_y = (len(mesh.depths) - 1) * mesh_spacing y_distance = hypo_loc[1] * total_len_y y_node = int(numpy.round(y_distance / mesh_spacing)) total_len_x = (len(mesh.lons[y_node]) - 1) * mesh_spacing x_distance = hypo_loc[0] * total_len_x x_node = int(numpy.round(x_distance / mesh_spacing)) hypocentre = Point(mesh.lons[y_node][x_node], mesh.lats[y_node][x_node], mesh.depths[y_node][x_node]) return hypocentre
[ "def", "get_hypo_location", "(", "self", ",", "mesh_spacing", ",", "hypo_loc", "=", "None", ")", ":", "mesh", "=", "self", ".", "mesh", "centroid", "=", "mesh", ".", "get_middle_point", "(", ")", "if", "hypo_loc", "is", "None", ":", "return", "centroid", "total_len_y", "=", "(", "len", "(", "mesh", ".", "depths", ")", "-", "1", ")", "*", "mesh_spacing", "y_distance", "=", "hypo_loc", "[", "1", "]", "*", "total_len_y", "y_node", "=", "int", "(", "numpy", ".", "round", "(", "y_distance", "/", "mesh_spacing", ")", ")", "total_len_x", "=", "(", "len", "(", "mesh", ".", "lons", "[", "y_node", "]", ")", "-", "1", ")", "*", "mesh_spacing", "x_distance", "=", "hypo_loc", "[", "0", "]", "*", "total_len_x", "x_node", "=", "int", "(", "numpy", ".", "round", "(", "x_distance", "/", "mesh_spacing", ")", ")", "hypocentre", "=", "Point", "(", "mesh", ".", "lons", "[", "y_node", "]", "[", "x_node", "]", ",", "mesh", ".", "lats", "[", "y_node", "]", "[", "x_node", "]", ",", "mesh", ".", "depths", "[", "y_node", "]", "[", "x_node", "]", ")", "return", "hypocentre" ]
The method determines the location of the hypocentre within the rupture :param mesh: :class:`~openquake.hazardlib.geo.mesh.Mesh` of points :param mesh_spacing: The desired distance between two adjacent points in source's ruptures' mesh, in km. Mainly this parameter allows to balance the trade-off between time needed to compute the distance between the rupture surface and a site and the precision of that computation. :param hypo_loc: Hypocentre location as fraction of rupture plane, as a tuple of (Along Strike, Down Dip), e.g. a hypocentre located in the centroid of the rupture would be input as (0.5, 0.5), whereas a hypocentre located in a position 3/4 along the length, and 1/4 of the way down dip of the rupture plane would be entered as (0.75, 0.25). :returns: Hypocentre location as instance of :class:`~openquake.hazardlib.geo.point.Point`
[ "The", "method", "determines", "the", "location", "of", "the", "hypocentre", "within", "the", "rupture" ]
train
https://github.com/gem/oq-engine/blob/8294553a0b8aba33fd96437a35065d03547d0040/openquake/hazardlib/geo/surface/base.py#L377-L414
gem/oq-engine
openquake/hazardlib/geo/surface/base.py
BaseSurface.get_azimuth
def get_azimuth(self, mesh): """ This method computes the azimuth of a set of points in a :class:`openquake.hazardlib.geo.mesh` instance. The reference used for the calculation of azimuth is the middle point and the strike of the rupture. The value of azimuth computed corresponds to the angle measured in a clockwise direction from the strike of the rupture. :parameter mesh: An instance of :class:`openquake.hazardlib.geo.mesh` :return: An instance of `numpy.ndarray` """ # Get info about the rupture strike = self.get_strike() hypocenter = self.get_middle_point() # This is the azimuth from the north of each point Vs. the middle of # the rupture azim = geodetic.azimuth(hypocenter.longitude, hypocenter.latitude, mesh.lons, mesh.lats) # Compute the azimuth from the fault strike rel_azi = (azim - strike) % 360 return rel_azi
python
def get_azimuth(self, mesh): strike = self.get_strike() hypocenter = self.get_middle_point() azim = geodetic.azimuth(hypocenter.longitude, hypocenter.latitude, mesh.lons, mesh.lats) rel_azi = (azim - strike) % 360 return rel_azi
[ "def", "get_azimuth", "(", "self", ",", "mesh", ")", ":", "# Get info about the rupture", "strike", "=", "self", ".", "get_strike", "(", ")", "hypocenter", "=", "self", ".", "get_middle_point", "(", ")", "# This is the azimuth from the north of each point Vs. the middle of", "# the rupture", "azim", "=", "geodetic", ".", "azimuth", "(", "hypocenter", ".", "longitude", ",", "hypocenter", ".", "latitude", ",", "mesh", ".", "lons", ",", "mesh", ".", "lats", ")", "# Compute the azimuth from the fault strike", "rel_azi", "=", "(", "azim", "-", "strike", ")", "%", "360", "return", "rel_azi" ]
This method computes the azimuth of a set of points in a :class:`openquake.hazardlib.geo.mesh` instance. The reference used for the calculation of azimuth is the middle point and the strike of the rupture. The value of azimuth computed corresponds to the angle measured in a clockwise direction from the strike of the rupture. :parameter mesh: An instance of :class:`openquake.hazardlib.geo.mesh` :return: An instance of `numpy.ndarray`
[ "This", "method", "computes", "the", "azimuth", "of", "a", "set", "of", "points", "in", "a", ":", "class", ":", "openquake", ".", "hazardlib", ".", "geo", ".", "mesh", "instance", ".", "The", "reference", "used", "for", "the", "calculation", "of", "azimuth", "is", "the", "middle", "point", "and", "the", "strike", "of", "the", "rupture", ".", "The", "value", "of", "azimuth", "computed", "corresponds", "to", "the", "angle", "measured", "in", "a", "clockwise", "direction", "from", "the", "strike", "of", "the", "rupture", "." ]
train
https://github.com/gem/oq-engine/blob/8294553a0b8aba33fd96437a35065d03547d0040/openquake/hazardlib/geo/surface/base.py#L416-L438
gem/oq-engine
openquake/calculators/export/risk.py
export_avg_losses
def export_avg_losses(ekey, dstore): """ :param ekey: export key, i.e. a pair (datastore key, fmt) :param dstore: datastore object """ dskey = ekey[0] oq = dstore['oqparam'] dt = oq.loss_dt() name, value, tags = _get_data(dstore, dskey, oq.hazard_stats().items()) writer = writers.CsvWriter(fmt=writers.FIVEDIGITS) assets = get_assets(dstore) for tag, values in zip(tags, value.transpose(1, 0, 2)): dest = dstore.build_fname(name, tag, 'csv') array = numpy.zeros(len(values), dt) for l, lt in enumerate(dt.names): array[lt] = values[:, l] writer.save(compose_arrays(assets, array), dest) return writer.getsaved()
python
def export_avg_losses(ekey, dstore): dskey = ekey[0] oq = dstore['oqparam'] dt = oq.loss_dt() name, value, tags = _get_data(dstore, dskey, oq.hazard_stats().items()) writer = writers.CsvWriter(fmt=writers.FIVEDIGITS) assets = get_assets(dstore) for tag, values in zip(tags, value.transpose(1, 0, 2)): dest = dstore.build_fname(name, tag, 'csv') array = numpy.zeros(len(values), dt) for l, lt in enumerate(dt.names): array[lt] = values[:, l] writer.save(compose_arrays(assets, array), dest) return writer.getsaved()
[ "def", "export_avg_losses", "(", "ekey", ",", "dstore", ")", ":", "dskey", "=", "ekey", "[", "0", "]", "oq", "=", "dstore", "[", "'oqparam'", "]", "dt", "=", "oq", ".", "loss_dt", "(", ")", "name", ",", "value", ",", "tags", "=", "_get_data", "(", "dstore", ",", "dskey", ",", "oq", ".", "hazard_stats", "(", ")", ".", "items", "(", ")", ")", "writer", "=", "writers", ".", "CsvWriter", "(", "fmt", "=", "writers", ".", "FIVEDIGITS", ")", "assets", "=", "get_assets", "(", "dstore", ")", "for", "tag", ",", "values", "in", "zip", "(", "tags", ",", "value", ".", "transpose", "(", "1", ",", "0", ",", "2", ")", ")", ":", "dest", "=", "dstore", ".", "build_fname", "(", "name", ",", "tag", ",", "'csv'", ")", "array", "=", "numpy", ".", "zeros", "(", "len", "(", "values", ")", ",", "dt", ")", "for", "l", ",", "lt", "in", "enumerate", "(", "dt", ".", "names", ")", ":", "array", "[", "lt", "]", "=", "values", "[", ":", ",", "l", "]", "writer", ".", "save", "(", "compose_arrays", "(", "assets", ",", "array", ")", ",", "dest", ")", "return", "writer", ".", "getsaved", "(", ")" ]
:param ekey: export key, i.e. a pair (datastore key, fmt) :param dstore: datastore object
[ ":", "param", "ekey", ":", "export", "key", "i", ".", "e", ".", "a", "pair", "(", "datastore", "key", "fmt", ")", ":", "param", "dstore", ":", "datastore", "object" ]
train
https://github.com/gem/oq-engine/blob/8294553a0b8aba33fd96437a35065d03547d0040/openquake/calculators/export/risk.py#L131-L148
gem/oq-engine
openquake/calculators/export/risk.py
export_agg_losses
def export_agg_losses(ekey, dstore): """ :param ekey: export key, i.e. a pair (datastore key, fmt) :param dstore: datastore object """ dskey = ekey[0] oq = dstore['oqparam'] dt = oq.loss_dt() name, value, tags = _get_data(dstore, dskey, oq.hazard_stats().items()) writer = writers.CsvWriter(fmt=writers.FIVEDIGITS) expvalue = dstore['exposed_value'].value # shape (T1, T2, ..., L) tagcol = dstore['assetcol/tagcol'] tagnames = tuple(dstore['oqparam'].aggregate_by) header = ('loss_type',) + tagnames + ( 'loss_value', 'exposed_value', 'loss_ratio') for r, tag in enumerate(tags): rows = [] for multi_idx, loss in numpy.ndenumerate(value[:, r]): l, *tagidxs = multi_idx evalue = expvalue[tuple(tagidxs) + (l,)] row = tagcol.get_tagvalues(tagnames, tagidxs) + ( loss, evalue, loss / evalue) rows.append((dt.names[l],) + row) dest = dstore.build_fname(name, tag, 'csv') writer.save(rows, dest, header) return writer.getsaved()
python
def export_agg_losses(ekey, dstore): dskey = ekey[0] oq = dstore['oqparam'] dt = oq.loss_dt() name, value, tags = _get_data(dstore, dskey, oq.hazard_stats().items()) writer = writers.CsvWriter(fmt=writers.FIVEDIGITS) expvalue = dstore['exposed_value'].value tagcol = dstore['assetcol/tagcol'] tagnames = tuple(dstore['oqparam'].aggregate_by) header = ('loss_type',) + tagnames + ( 'loss_value', 'exposed_value', 'loss_ratio') for r, tag in enumerate(tags): rows = [] for multi_idx, loss in numpy.ndenumerate(value[:, r]): l, *tagidxs = multi_idx evalue = expvalue[tuple(tagidxs) + (l,)] row = tagcol.get_tagvalues(tagnames, tagidxs) + ( loss, evalue, loss / evalue) rows.append((dt.names[l],) + row) dest = dstore.build_fname(name, tag, 'csv') writer.save(rows, dest, header) return writer.getsaved()
[ "def", "export_agg_losses", "(", "ekey", ",", "dstore", ")", ":", "dskey", "=", "ekey", "[", "0", "]", "oq", "=", "dstore", "[", "'oqparam'", "]", "dt", "=", "oq", ".", "loss_dt", "(", ")", "name", ",", "value", ",", "tags", "=", "_get_data", "(", "dstore", ",", "dskey", ",", "oq", ".", "hazard_stats", "(", ")", ".", "items", "(", ")", ")", "writer", "=", "writers", ".", "CsvWriter", "(", "fmt", "=", "writers", ".", "FIVEDIGITS", ")", "expvalue", "=", "dstore", "[", "'exposed_value'", "]", ".", "value", "# shape (T1, T2, ..., L)", "tagcol", "=", "dstore", "[", "'assetcol/tagcol'", "]", "tagnames", "=", "tuple", "(", "dstore", "[", "'oqparam'", "]", ".", "aggregate_by", ")", "header", "=", "(", "'loss_type'", ",", ")", "+", "tagnames", "+", "(", "'loss_value'", ",", "'exposed_value'", ",", "'loss_ratio'", ")", "for", "r", ",", "tag", "in", "enumerate", "(", "tags", ")", ":", "rows", "=", "[", "]", "for", "multi_idx", ",", "loss", "in", "numpy", ".", "ndenumerate", "(", "value", "[", ":", ",", "r", "]", ")", ":", "l", ",", "", "*", "tagidxs", "=", "multi_idx", "evalue", "=", "expvalue", "[", "tuple", "(", "tagidxs", ")", "+", "(", "l", ",", ")", "]", "row", "=", "tagcol", ".", "get_tagvalues", "(", "tagnames", ",", "tagidxs", ")", "+", "(", "loss", ",", "evalue", ",", "loss", "/", "evalue", ")", "rows", ".", "append", "(", "(", "dt", ".", "names", "[", "l", "]", ",", ")", "+", "row", ")", "dest", "=", "dstore", ".", "build_fname", "(", "name", ",", "tag", ",", "'csv'", ")", "writer", ".", "save", "(", "rows", ",", "dest", ",", "header", ")", "return", "writer", ".", "getsaved", "(", ")" ]
:param ekey: export key, i.e. a pair (datastore key, fmt) :param dstore: datastore object
[ ":", "param", "ekey", ":", "export", "key", "i", ".", "e", ".", "a", "pair", "(", "datastore", "key", "fmt", ")", ":", "param", "dstore", ":", "datastore", "object" ]
train
https://github.com/gem/oq-engine/blob/8294553a0b8aba33fd96437a35065d03547d0040/openquake/calculators/export/risk.py#L153-L178
gem/oq-engine
openquake/calculators/export/risk.py
export_avg_losses_ebrisk
def export_avg_losses_ebrisk(ekey, dstore): """ :param ekey: export key, i.e. a pair (datastore key, fmt) :param dstore: datastore object """ name = ekey[0] oq = dstore['oqparam'] dt = oq.loss_dt() value = dstore[name].value # shape (A, L) writer = writers.CsvWriter(fmt=writers.FIVEDIGITS) assets = get_assets(dstore) dest = dstore.build_fname(name, 'mean', 'csv') array = numpy.zeros(len(value), dt) for l, lt in enumerate(dt.names): array[lt] = value[:, l] writer.save(compose_arrays(assets, array), dest) return writer.getsaved()
python
def export_avg_losses_ebrisk(ekey, dstore): name = ekey[0] oq = dstore['oqparam'] dt = oq.loss_dt() value = dstore[name].value writer = writers.CsvWriter(fmt=writers.FIVEDIGITS) assets = get_assets(dstore) dest = dstore.build_fname(name, 'mean', 'csv') array = numpy.zeros(len(value), dt) for l, lt in enumerate(dt.names): array[lt] = value[:, l] writer.save(compose_arrays(assets, array), dest) return writer.getsaved()
[ "def", "export_avg_losses_ebrisk", "(", "ekey", ",", "dstore", ")", ":", "name", "=", "ekey", "[", "0", "]", "oq", "=", "dstore", "[", "'oqparam'", "]", "dt", "=", "oq", ".", "loss_dt", "(", ")", "value", "=", "dstore", "[", "name", "]", ".", "value", "# shape (A, L)", "writer", "=", "writers", ".", "CsvWriter", "(", "fmt", "=", "writers", ".", "FIVEDIGITS", ")", "assets", "=", "get_assets", "(", "dstore", ")", "dest", "=", "dstore", ".", "build_fname", "(", "name", ",", "'mean'", ",", "'csv'", ")", "array", "=", "numpy", ".", "zeros", "(", "len", "(", "value", ")", ",", "dt", ")", "for", "l", ",", "lt", "in", "enumerate", "(", "dt", ".", "names", ")", ":", "array", "[", "lt", "]", "=", "value", "[", ":", ",", "l", "]", "writer", ".", "save", "(", "compose_arrays", "(", "assets", ",", "array", ")", ",", "dest", ")", "return", "writer", ".", "getsaved", "(", ")" ]
:param ekey: export key, i.e. a pair (datastore key, fmt) :param dstore: datastore object
[ ":", "param", "ekey", ":", "export", "key", "i", ".", "e", ".", "a", "pair", "(", "datastore", "key", "fmt", ")", ":", "param", "dstore", ":", "datastore", "object" ]
train
https://github.com/gem/oq-engine/blob/8294553a0b8aba33fd96437a35065d03547d0040/openquake/calculators/export/risk.py#L183-L199
gem/oq-engine
openquake/calculators/export/risk.py
export_losses_by_asset
def export_losses_by_asset(ekey, dstore): """ :param ekey: export key, i.e. a pair (datastore key, fmt) :param dstore: datastore object """ loss_dt = dstore['oqparam'].loss_dt(stat_dt) losses_by_asset = dstore[ekey[0]].value rlzs = dstore['csm_info'].get_rlzs_assoc().realizations assets = get_assets(dstore) writer = writers.CsvWriter(fmt=writers.FIVEDIGITS) for rlz in rlzs: losses = losses_by_asset[:, rlz.ordinal] dest = dstore.build_fname('losses_by_asset', rlz, 'csv') data = compose_arrays(assets, losses.copy().view(loss_dt)[:, 0]) writer.save(data, dest) return writer.getsaved()
python
def export_losses_by_asset(ekey, dstore): loss_dt = dstore['oqparam'].loss_dt(stat_dt) losses_by_asset = dstore[ekey[0]].value rlzs = dstore['csm_info'].get_rlzs_assoc().realizations assets = get_assets(dstore) writer = writers.CsvWriter(fmt=writers.FIVEDIGITS) for rlz in rlzs: losses = losses_by_asset[:, rlz.ordinal] dest = dstore.build_fname('losses_by_asset', rlz, 'csv') data = compose_arrays(assets, losses.copy().view(loss_dt)[:, 0]) writer.save(data, dest) return writer.getsaved()
[ "def", "export_losses_by_asset", "(", "ekey", ",", "dstore", ")", ":", "loss_dt", "=", "dstore", "[", "'oqparam'", "]", ".", "loss_dt", "(", "stat_dt", ")", "losses_by_asset", "=", "dstore", "[", "ekey", "[", "0", "]", "]", ".", "value", "rlzs", "=", "dstore", "[", "'csm_info'", "]", ".", "get_rlzs_assoc", "(", ")", ".", "realizations", "assets", "=", "get_assets", "(", "dstore", ")", "writer", "=", "writers", ".", "CsvWriter", "(", "fmt", "=", "writers", ".", "FIVEDIGITS", ")", "for", "rlz", "in", "rlzs", ":", "losses", "=", "losses_by_asset", "[", ":", ",", "rlz", ".", "ordinal", "]", "dest", "=", "dstore", ".", "build_fname", "(", "'losses_by_asset'", ",", "rlz", ",", "'csv'", ")", "data", "=", "compose_arrays", "(", "assets", ",", "losses", ".", "copy", "(", ")", ".", "view", "(", "loss_dt", ")", "[", ":", ",", "0", "]", ")", "writer", ".", "save", "(", "data", ",", "dest", ")", "return", "writer", ".", "getsaved", "(", ")" ]
:param ekey: export key, i.e. a pair (datastore key, fmt) :param dstore: datastore object
[ ":", "param", "ekey", ":", "export", "key", "i", ".", "e", ".", "a", "pair", "(", "datastore", "key", "fmt", ")", ":", "param", "dstore", ":", "datastore", "object" ]
train
https://github.com/gem/oq-engine/blob/8294553a0b8aba33fd96437a35065d03547d0040/openquake/calculators/export/risk.py#L204-L219
gem/oq-engine
openquake/calculators/export/risk.py
export_losses_by_event
def export_losses_by_event(ekey, dstore): """ :param ekey: export key, i.e. a pair (datastore key, fmt) :param dstore: datastore object """ oq = dstore['oqparam'] writer = writers.CsvWriter(fmt=writers.FIVEDIGITS) dest = dstore.build_fname('losses_by_event', '', 'csv') if oq.calculation_mode.startswith('scenario'): dtlist = [('eid', U64)] + oq.loss_dt_list() arr = dstore['losses_by_event'].value[['eid', 'loss']] writer.save(arr.copy().view(dtlist), dest) elif oq.calculation_mode == 'ebrisk': tagcol = dstore['assetcol/tagcol'] lbe = dstore['losses_by_event'].value lbe.sort(order='eid') dic = dict(tagnames=['event_id', 'loss_type'] + oq.aggregate_by) for tagname in oq.aggregate_by: dic[tagname] = getattr(tagcol, tagname) dic['event_id'] = ['?'] + list(lbe['eid']) dic['loss_type'] = ('?',) + oq.loss_dt().names aw = hdf5.ArrayWrapper(lbe['loss'], dic) # shape (E, L, T...) writer.save(aw.to_table(), dest) else: dtlist = [('event_id', U64), ('rup_id', U32), ('year', U32)] + \ oq.loss_dt_list() eids = dstore['losses_by_event']['eid'] year_of = year_dict(dstore['events']['eid'], oq.investigation_time, oq.ses_seed) arr = numpy.zeros(len(dstore['losses_by_event']), dtlist) arr['event_id'] = eids arr['rup_id'] = arr['event_id'] / TWO32 arr['year'] = [year_of[eid] for eid in eids] loss = dstore['losses_by_event']['loss'].T # shape (L, E) for losses, loss_type in zip(loss, oq.loss_dt().names): arr[loss_type] = losses writer.save(arr, dest) return writer.getsaved()
python
def export_losses_by_event(ekey, dstore): oq = dstore['oqparam'] writer = writers.CsvWriter(fmt=writers.FIVEDIGITS) dest = dstore.build_fname('losses_by_event', '', 'csv') if oq.calculation_mode.startswith('scenario'): dtlist = [('eid', U64)] + oq.loss_dt_list() arr = dstore['losses_by_event'].value[['eid', 'loss']] writer.save(arr.copy().view(dtlist), dest) elif oq.calculation_mode == 'ebrisk': tagcol = dstore['assetcol/tagcol'] lbe = dstore['losses_by_event'].value lbe.sort(order='eid') dic = dict(tagnames=['event_id', 'loss_type'] + oq.aggregate_by) for tagname in oq.aggregate_by: dic[tagname] = getattr(tagcol, tagname) dic['event_id'] = ['?'] + list(lbe['eid']) dic['loss_type'] = ('?',) + oq.loss_dt().names aw = hdf5.ArrayWrapper(lbe['loss'], dic) writer.save(aw.to_table(), dest) else: dtlist = [('event_id', U64), ('rup_id', U32), ('year', U32)] + \ oq.loss_dt_list() eids = dstore['losses_by_event']['eid'] year_of = year_dict(dstore['events']['eid'], oq.investigation_time, oq.ses_seed) arr = numpy.zeros(len(dstore['losses_by_event']), dtlist) arr['event_id'] = eids arr['rup_id'] = arr['event_id'] / TWO32 arr['year'] = [year_of[eid] for eid in eids] loss = dstore['losses_by_event']['loss'].T for losses, loss_type in zip(loss, oq.loss_dt().names): arr[loss_type] = losses writer.save(arr, dest) return writer.getsaved()
[ "def", "export_losses_by_event", "(", "ekey", ",", "dstore", ")", ":", "oq", "=", "dstore", "[", "'oqparam'", "]", "writer", "=", "writers", ".", "CsvWriter", "(", "fmt", "=", "writers", ".", "FIVEDIGITS", ")", "dest", "=", "dstore", ".", "build_fname", "(", "'losses_by_event'", ",", "''", ",", "'csv'", ")", "if", "oq", ".", "calculation_mode", ".", "startswith", "(", "'scenario'", ")", ":", "dtlist", "=", "[", "(", "'eid'", ",", "U64", ")", "]", "+", "oq", ".", "loss_dt_list", "(", ")", "arr", "=", "dstore", "[", "'losses_by_event'", "]", ".", "value", "[", "[", "'eid'", ",", "'loss'", "]", "]", "writer", ".", "save", "(", "arr", ".", "copy", "(", ")", ".", "view", "(", "dtlist", ")", ",", "dest", ")", "elif", "oq", ".", "calculation_mode", "==", "'ebrisk'", ":", "tagcol", "=", "dstore", "[", "'assetcol/tagcol'", "]", "lbe", "=", "dstore", "[", "'losses_by_event'", "]", ".", "value", "lbe", ".", "sort", "(", "order", "=", "'eid'", ")", "dic", "=", "dict", "(", "tagnames", "=", "[", "'event_id'", ",", "'loss_type'", "]", "+", "oq", ".", "aggregate_by", ")", "for", "tagname", "in", "oq", ".", "aggregate_by", ":", "dic", "[", "tagname", "]", "=", "getattr", "(", "tagcol", ",", "tagname", ")", "dic", "[", "'event_id'", "]", "=", "[", "'?'", "]", "+", "list", "(", "lbe", "[", "'eid'", "]", ")", "dic", "[", "'loss_type'", "]", "=", "(", "'?'", ",", ")", "+", "oq", ".", "loss_dt", "(", ")", ".", "names", "aw", "=", "hdf5", ".", "ArrayWrapper", "(", "lbe", "[", "'loss'", "]", ",", "dic", ")", "# shape (E, L, T...)", "writer", ".", "save", "(", "aw", ".", "to_table", "(", ")", ",", "dest", ")", "else", ":", "dtlist", "=", "[", "(", "'event_id'", ",", "U64", ")", ",", "(", "'rup_id'", ",", "U32", ")", ",", "(", "'year'", ",", "U32", ")", "]", "+", "oq", ".", "loss_dt_list", "(", ")", "eids", "=", "dstore", "[", "'losses_by_event'", "]", "[", "'eid'", "]", "year_of", "=", "year_dict", "(", "dstore", "[", "'events'", "]", "[", "'eid'", "]", ",", "oq", ".", "investigation_time", ",", "oq", ".", "ses_seed", ")", "arr", "=", "numpy", ".", "zeros", "(", "len", "(", "dstore", "[", "'losses_by_event'", "]", ")", ",", "dtlist", ")", "arr", "[", "'event_id'", "]", "=", "eids", "arr", "[", "'rup_id'", "]", "=", "arr", "[", "'event_id'", "]", "/", "TWO32", "arr", "[", "'year'", "]", "=", "[", "year_of", "[", "eid", "]", "for", "eid", "in", "eids", "]", "loss", "=", "dstore", "[", "'losses_by_event'", "]", "[", "'loss'", "]", ".", "T", "# shape (L, E)", "for", "losses", ",", "loss_type", "in", "zip", "(", "loss", ",", "oq", ".", "loss_dt", "(", ")", ".", "names", ")", ":", "arr", "[", "loss_type", "]", "=", "losses", "writer", ".", "save", "(", "arr", ",", "dest", ")", "return", "writer", ".", "getsaved", "(", ")" ]
:param ekey: export key, i.e. a pair (datastore key, fmt) :param dstore: datastore object
[ ":", "param", "ekey", ":", "export", "key", "i", ".", "e", ".", "a", "pair", "(", "datastore", "key", "fmt", ")", ":", "param", "dstore", ":", "datastore", "object" ]
train
https://github.com/gem/oq-engine/blob/8294553a0b8aba33fd96437a35065d03547d0040/openquake/calculators/export/risk.py#L224-L261
gem/oq-engine
openquake/calculators/export/risk.py
export_losses_by_asset_npz
def export_losses_by_asset_npz(ekey, dstore): """ :param ekey: export key, i.e. a pair (datastore key, fmt) :param dstore: datastore object """ fname = dstore.export_path('%s.%s' % ekey) savez(fname, **dict(extract(dstore, 'losses_by_asset'))) return [fname]
python
def export_losses_by_asset_npz(ekey, dstore): fname = dstore.export_path('%s.%s' % ekey) savez(fname, **dict(extract(dstore, 'losses_by_asset'))) return [fname]
[ "def", "export_losses_by_asset_npz", "(", "ekey", ",", "dstore", ")", ":", "fname", "=", "dstore", ".", "export_path", "(", "'%s.%s'", "%", "ekey", ")", "savez", "(", "fname", ",", "*", "*", "dict", "(", "extract", "(", "dstore", ",", "'losses_by_asset'", ")", ")", ")", "return", "[", "fname", "]" ]
:param ekey: export key, i.e. a pair (datastore key, fmt) :param dstore: datastore object
[ ":", "param", "ekey", ":", "export", "key", "i", ".", "e", ".", "a", "pair", "(", "datastore", "key", "fmt", ")", ":", "param", "dstore", ":", "datastore", "object" ]
train
https://github.com/gem/oq-engine/blob/8294553a0b8aba33fd96437a35065d03547d0040/openquake/calculators/export/risk.py#L265-L272
gem/oq-engine
openquake/calculators/export/risk.py
export_maxloss_ruptures
def export_maxloss_ruptures(ekey, dstore): """ :param ekey: export key, i.e. a pair (datastore key, fmt) :param dstore: datastore object """ oq = dstore['oqparam'] mesh = get_mesh(dstore['sitecol']) rlzs_by_gsim = dstore['csm_info'].get_rlzs_by_gsim_grp() num_ses = oq.ses_per_logic_tree_path fnames = [] for loss_type in oq.loss_dt().names: ebr = getters.get_maxloss_rupture(dstore, loss_type) root = hazard_writers.rupture_to_element( ebr.export(mesh, rlzs_by_gsim[ebr.grp_id], num_ses)) dest = dstore.export_path('rupture-%s.xml' % loss_type) with open(dest, 'wb') as fh: nrml.write(list(root), fh) fnames.append(dest) return fnames
python
def export_maxloss_ruptures(ekey, dstore): oq = dstore['oqparam'] mesh = get_mesh(dstore['sitecol']) rlzs_by_gsim = dstore['csm_info'].get_rlzs_by_gsim_grp() num_ses = oq.ses_per_logic_tree_path fnames = [] for loss_type in oq.loss_dt().names: ebr = getters.get_maxloss_rupture(dstore, loss_type) root = hazard_writers.rupture_to_element( ebr.export(mesh, rlzs_by_gsim[ebr.grp_id], num_ses)) dest = dstore.export_path('rupture-%s.xml' % loss_type) with open(dest, 'wb') as fh: nrml.write(list(root), fh) fnames.append(dest) return fnames
[ "def", "export_maxloss_ruptures", "(", "ekey", ",", "dstore", ")", ":", "oq", "=", "dstore", "[", "'oqparam'", "]", "mesh", "=", "get_mesh", "(", "dstore", "[", "'sitecol'", "]", ")", "rlzs_by_gsim", "=", "dstore", "[", "'csm_info'", "]", ".", "get_rlzs_by_gsim_grp", "(", ")", "num_ses", "=", "oq", ".", "ses_per_logic_tree_path", "fnames", "=", "[", "]", "for", "loss_type", "in", "oq", ".", "loss_dt", "(", ")", ".", "names", ":", "ebr", "=", "getters", ".", "get_maxloss_rupture", "(", "dstore", ",", "loss_type", ")", "root", "=", "hazard_writers", ".", "rupture_to_element", "(", "ebr", ".", "export", "(", "mesh", ",", "rlzs_by_gsim", "[", "ebr", ".", "grp_id", "]", ",", "num_ses", ")", ")", "dest", "=", "dstore", ".", "export_path", "(", "'rupture-%s.xml'", "%", "loss_type", ")", "with", "open", "(", "dest", ",", "'wb'", ")", "as", "fh", ":", "nrml", ".", "write", "(", "list", "(", "root", ")", ",", "fh", ")", "fnames", ".", "append", "(", "dest", ")", "return", "fnames" ]
:param ekey: export key, i.e. a pair (datastore key, fmt) :param dstore: datastore object
[ ":", "param", "ekey", ":", "export", "key", "i", ".", "e", ".", "a", "pair", "(", "datastore", "key", "fmt", ")", ":", "param", "dstore", ":", "datastore", "object" ]
train
https://github.com/gem/oq-engine/blob/8294553a0b8aba33fd96437a35065d03547d0040/openquake/calculators/export/risk.py#L286-L304
gem/oq-engine
openquake/calculators/export/risk.py
export_agg_losses_ebr
def export_agg_losses_ebr(ekey, dstore): """ :param ekey: export key, i.e. a pair (datastore key, fmt) :param dstore: datastore object """ if 'ruptures' not in dstore: logging.warning('There are no ruptures in the datastore') return [] name, ext = export.keyfunc(ekey) agg_losses = dstore['losses_by_event'] has_rup_data = 'ruptures' in dstore extra_list = [('magnitude', F32), ('centroid_lon', F32), ('centroid_lat', F32), ('centroid_depth', F32)] if has_rup_data else [] oq = dstore['oqparam'] lti = oq.lti dtlist = ([('event_id', U64), ('rup_id', U32), ('year', U32)] + extra_list + oq.loss_dt_list()) elt_dt = numpy.dtype(dtlist) elt = numpy.zeros(len(agg_losses), elt_dt) writer = writers.CsvWriter(fmt=writers.FIVEDIGITS) events = dstore['events'].value events_by_rupid = collections.defaultdict(list) for event in events: rupid = event['eid'] // TWO32 events_by_rupid[rupid].append(event) year_of = year_dict(events['eid'], oq.investigation_time, oq.ses_seed) rup_data = {} event_by_eid = {} # eid -> event # populate rup_data and event_by_eid # TODO: avoid reading the events twice for rgetter in getters.gen_rupture_getters(dstore): ruptures = rgetter.get_ruptures() for ebr in ruptures: for event in events_by_rupid[ebr.serial]: event_by_eid[event['eid']] = event if has_rup_data: rup_data.update(get_rup_data(ruptures)) for r, row in enumerate(agg_losses): rec = elt[r] event = event_by_eid[row['eid']] rec['event_id'] = eid = event['eid'] rec['year'] = year_of[eid] if rup_data: rec['rup_id'] = rup_id = event['eid'] // TWO32 (rec['magnitude'], rec['centroid_lon'], rec['centroid_lat'], rec['centroid_depth']) = rup_data[rup_id] for lt, i in lti.items(): rec[lt] = row['loss'][i] elt.sort(order=['year', 'event_id']) dest = dstore.build_fname('elt', '', 'csv') writer.save(elt, dest) return writer.getsaved()
python
def export_agg_losses_ebr(ekey, dstore): if 'ruptures' not in dstore: logging.warning('There are no ruptures in the datastore') return [] name, ext = export.keyfunc(ekey) agg_losses = dstore['losses_by_event'] has_rup_data = 'ruptures' in dstore extra_list = [('magnitude', F32), ('centroid_lon', F32), ('centroid_lat', F32), ('centroid_depth', F32)] if has_rup_data else [] oq = dstore['oqparam'] lti = oq.lti dtlist = ([('event_id', U64), ('rup_id', U32), ('year', U32)] + extra_list + oq.loss_dt_list()) elt_dt = numpy.dtype(dtlist) elt = numpy.zeros(len(agg_losses), elt_dt) writer = writers.CsvWriter(fmt=writers.FIVEDIGITS) events = dstore['events'].value events_by_rupid = collections.defaultdict(list) for event in events: rupid = event['eid'] // TWO32 events_by_rupid[rupid].append(event) year_of = year_dict(events['eid'], oq.investigation_time, oq.ses_seed) rup_data = {} event_by_eid = {} for rgetter in getters.gen_rupture_getters(dstore): ruptures = rgetter.get_ruptures() for ebr in ruptures: for event in events_by_rupid[ebr.serial]: event_by_eid[event['eid']] = event if has_rup_data: rup_data.update(get_rup_data(ruptures)) for r, row in enumerate(agg_losses): rec = elt[r] event = event_by_eid[row['eid']] rec['event_id'] = eid = event['eid'] rec['year'] = year_of[eid] if rup_data: rec['rup_id'] = rup_id = event['eid'] // TWO32 (rec['magnitude'], rec['centroid_lon'], rec['centroid_lat'], rec['centroid_depth']) = rup_data[rup_id] for lt, i in lti.items(): rec[lt] = row['loss'][i] elt.sort(order=['year', 'event_id']) dest = dstore.build_fname('elt', '', 'csv') writer.save(elt, dest) return writer.getsaved()
[ "def", "export_agg_losses_ebr", "(", "ekey", ",", "dstore", ")", ":", "if", "'ruptures'", "not", "in", "dstore", ":", "logging", ".", "warning", "(", "'There are no ruptures in the datastore'", ")", "return", "[", "]", "name", ",", "ext", "=", "export", ".", "keyfunc", "(", "ekey", ")", "agg_losses", "=", "dstore", "[", "'losses_by_event'", "]", "has_rup_data", "=", "'ruptures'", "in", "dstore", "extra_list", "=", "[", "(", "'magnitude'", ",", "F32", ")", ",", "(", "'centroid_lon'", ",", "F32", ")", ",", "(", "'centroid_lat'", ",", "F32", ")", ",", "(", "'centroid_depth'", ",", "F32", ")", "]", "if", "has_rup_data", "else", "[", "]", "oq", "=", "dstore", "[", "'oqparam'", "]", "lti", "=", "oq", ".", "lti", "dtlist", "=", "(", "[", "(", "'event_id'", ",", "U64", ")", ",", "(", "'rup_id'", ",", "U32", ")", ",", "(", "'year'", ",", "U32", ")", "]", "+", "extra_list", "+", "oq", ".", "loss_dt_list", "(", ")", ")", "elt_dt", "=", "numpy", ".", "dtype", "(", "dtlist", ")", "elt", "=", "numpy", ".", "zeros", "(", "len", "(", "agg_losses", ")", ",", "elt_dt", ")", "writer", "=", "writers", ".", "CsvWriter", "(", "fmt", "=", "writers", ".", "FIVEDIGITS", ")", "events", "=", "dstore", "[", "'events'", "]", ".", "value", "events_by_rupid", "=", "collections", ".", "defaultdict", "(", "list", ")", "for", "event", "in", "events", ":", "rupid", "=", "event", "[", "'eid'", "]", "//", "TWO32", "events_by_rupid", "[", "rupid", "]", ".", "append", "(", "event", ")", "year_of", "=", "year_dict", "(", "events", "[", "'eid'", "]", ",", "oq", ".", "investigation_time", ",", "oq", ".", "ses_seed", ")", "rup_data", "=", "{", "}", "event_by_eid", "=", "{", "}", "# eid -> event", "# populate rup_data and event_by_eid", "# TODO: avoid reading the events twice", "for", "rgetter", "in", "getters", ".", "gen_rupture_getters", "(", "dstore", ")", ":", "ruptures", "=", "rgetter", ".", "get_ruptures", "(", ")", "for", "ebr", "in", "ruptures", ":", "for", "event", "in", "events_by_rupid", "[", "ebr", ".", "serial", "]", ":", "event_by_eid", "[", "event", "[", "'eid'", "]", "]", "=", "event", "if", "has_rup_data", ":", "rup_data", ".", "update", "(", "get_rup_data", "(", "ruptures", ")", ")", "for", "r", ",", "row", "in", "enumerate", "(", "agg_losses", ")", ":", "rec", "=", "elt", "[", "r", "]", "event", "=", "event_by_eid", "[", "row", "[", "'eid'", "]", "]", "rec", "[", "'event_id'", "]", "=", "eid", "=", "event", "[", "'eid'", "]", "rec", "[", "'year'", "]", "=", "year_of", "[", "eid", "]", "if", "rup_data", ":", "rec", "[", "'rup_id'", "]", "=", "rup_id", "=", "event", "[", "'eid'", "]", "//", "TWO32", "(", "rec", "[", "'magnitude'", "]", ",", "rec", "[", "'centroid_lon'", "]", ",", "rec", "[", "'centroid_lat'", "]", ",", "rec", "[", "'centroid_depth'", "]", ")", "=", "rup_data", "[", "rup_id", "]", "for", "lt", ",", "i", "in", "lti", ".", "items", "(", ")", ":", "rec", "[", "lt", "]", "=", "row", "[", "'loss'", "]", "[", "i", "]", "elt", ".", "sort", "(", "order", "=", "[", "'year'", ",", "'event_id'", "]", ")", "dest", "=", "dstore", ".", "build_fname", "(", "'elt'", ",", "''", ",", "'csv'", ")", "writer", ".", "save", "(", "elt", ",", "dest", ")", "return", "writer", ".", "getsaved", "(", ")" ]
:param ekey: export key, i.e. a pair (datastore key, fmt) :param dstore: datastore object
[ ":", "param", "ekey", ":", "export", "key", "i", ".", "e", ".", "a", "pair", "(", "datastore", "key", "fmt", ")", ":", "param", "dstore", ":", "datastore", "object" ]
train
https://github.com/gem/oq-engine/blob/8294553a0b8aba33fd96437a35065d03547d0040/openquake/calculators/export/risk.py#L316-L369
gem/oq-engine
openquake/calculators/export/risk.py
export_dmg_by_event
def export_dmg_by_event(ekey, dstore): """ :param ekey: export key, i.e. a pair (datastore key, fmt) :param dstore: datastore object """ damage_dt = build_damage_dt(dstore, mean_std=False) dt_list = [('event_id', numpy.uint64), ('rlzi', numpy.uint16)] + [ (f, damage_dt.fields[f][0]) for f in damage_dt.names] all_losses = dstore[ekey[0]].value # shape (E, R, LI) events_by_rlz = group_array(dstore['events'], 'rlz') rlzs = dstore['csm_info'].get_rlzs_assoc().realizations writer = writers.CsvWriter(fmt=writers.FIVEDIGITS) fname = dstore.build_fname('dmg_by_event', '', 'csv') writer.save(numpy.zeros(0, dt_list), fname) with open(fname, 'ab') as dest: for rlz in rlzs: data = all_losses[:, rlz.ordinal].copy().view(damage_dt) # shape E arr = numpy.zeros(len(data), dt_list) arr['event_id'] = events_by_rlz[rlz.ordinal]['eid'] arr['rlzi'] = rlz.ordinal for field in damage_dt.names: arr[field] = data[field].squeeze() writer.save_block(arr, dest) return [fname]
python
def export_dmg_by_event(ekey, dstore): damage_dt = build_damage_dt(dstore, mean_std=False) dt_list = [('event_id', numpy.uint64), ('rlzi', numpy.uint16)] + [ (f, damage_dt.fields[f][0]) for f in damage_dt.names] all_losses = dstore[ekey[0]].value events_by_rlz = group_array(dstore['events'], 'rlz') rlzs = dstore['csm_info'].get_rlzs_assoc().realizations writer = writers.CsvWriter(fmt=writers.FIVEDIGITS) fname = dstore.build_fname('dmg_by_event', '', 'csv') writer.save(numpy.zeros(0, dt_list), fname) with open(fname, 'ab') as dest: for rlz in rlzs: data = all_losses[:, rlz.ordinal].copy().view(damage_dt) arr = numpy.zeros(len(data), dt_list) arr['event_id'] = events_by_rlz[rlz.ordinal]['eid'] arr['rlzi'] = rlz.ordinal for field in damage_dt.names: arr[field] = data[field].squeeze() writer.save_block(arr, dest) return [fname]
[ "def", "export_dmg_by_event", "(", "ekey", ",", "dstore", ")", ":", "damage_dt", "=", "build_damage_dt", "(", "dstore", ",", "mean_std", "=", "False", ")", "dt_list", "=", "[", "(", "'event_id'", ",", "numpy", ".", "uint64", ")", ",", "(", "'rlzi'", ",", "numpy", ".", "uint16", ")", "]", "+", "[", "(", "f", ",", "damage_dt", ".", "fields", "[", "f", "]", "[", "0", "]", ")", "for", "f", "in", "damage_dt", ".", "names", "]", "all_losses", "=", "dstore", "[", "ekey", "[", "0", "]", "]", ".", "value", "# shape (E, R, LI)", "events_by_rlz", "=", "group_array", "(", "dstore", "[", "'events'", "]", ",", "'rlz'", ")", "rlzs", "=", "dstore", "[", "'csm_info'", "]", ".", "get_rlzs_assoc", "(", ")", ".", "realizations", "writer", "=", "writers", ".", "CsvWriter", "(", "fmt", "=", "writers", ".", "FIVEDIGITS", ")", "fname", "=", "dstore", ".", "build_fname", "(", "'dmg_by_event'", ",", "''", ",", "'csv'", ")", "writer", ".", "save", "(", "numpy", ".", "zeros", "(", "0", ",", "dt_list", ")", ",", "fname", ")", "with", "open", "(", "fname", ",", "'ab'", ")", "as", "dest", ":", "for", "rlz", "in", "rlzs", ":", "data", "=", "all_losses", "[", ":", ",", "rlz", ".", "ordinal", "]", ".", "copy", "(", ")", ".", "view", "(", "damage_dt", ")", "# shape E", "arr", "=", "numpy", ".", "zeros", "(", "len", "(", "data", ")", ",", "dt_list", ")", "arr", "[", "'event_id'", "]", "=", "events_by_rlz", "[", "rlz", ".", "ordinal", "]", "[", "'eid'", "]", "arr", "[", "'rlzi'", "]", "=", "rlz", ".", "ordinal", "for", "field", "in", "damage_dt", ".", "names", ":", "arr", "[", "field", "]", "=", "data", "[", "field", "]", ".", "squeeze", "(", ")", "writer", ".", "save_block", "(", "arr", ",", "dest", ")", "return", "[", "fname", "]" ]
:param ekey: export key, i.e. a pair (datastore key, fmt) :param dstore: datastore object
[ ":", "param", "ekey", ":", "export", "key", "i", ".", "e", ".", "a", "pair", "(", "datastore", "key", "fmt", ")", ":", "param", "dstore", ":", "datastore", "object" ]
train
https://github.com/gem/oq-engine/blob/8294553a0b8aba33fd96437a35065d03547d0040/openquake/calculators/export/risk.py#L479-L502
gem/oq-engine
openquake/calculators/export/risk.py
get_loss_maps
def get_loss_maps(dstore, kind): """ :param dstore: a DataStore instance :param kind: 'rlzs' or 'stats' """ oq = dstore['oqparam'] name = 'loss_maps-%s' % kind if name in dstore: # event_based risk return _to_loss_maps(dstore[name].value, oq.loss_maps_dt()) name = 'loss_curves-%s' % kind if name in dstore: # classical_risk # the loss maps are built on the fly from the loss curves loss_curves = dstore[name] loss_maps = scientific.broadcast( scientific.loss_maps, loss_curves, oq.conditional_loss_poes) return loss_maps raise KeyError('loss_maps/loss_curves missing in %s' % dstore)
python
def get_loss_maps(dstore, kind): oq = dstore['oqparam'] name = 'loss_maps-%s' % kind if name in dstore: return _to_loss_maps(dstore[name].value, oq.loss_maps_dt()) name = 'loss_curves-%s' % kind if name in dstore: loss_curves = dstore[name] loss_maps = scientific.broadcast( scientific.loss_maps, loss_curves, oq.conditional_loss_poes) return loss_maps raise KeyError('loss_maps/loss_curves missing in %s' % dstore)
[ "def", "get_loss_maps", "(", "dstore", ",", "kind", ")", ":", "oq", "=", "dstore", "[", "'oqparam'", "]", "name", "=", "'loss_maps-%s'", "%", "kind", "if", "name", "in", "dstore", ":", "# event_based risk", "return", "_to_loss_maps", "(", "dstore", "[", "name", "]", ".", "value", ",", "oq", ".", "loss_maps_dt", "(", ")", ")", "name", "=", "'loss_curves-%s'", "%", "kind", "if", "name", "in", "dstore", ":", "# classical_risk", "# the loss maps are built on the fly from the loss curves", "loss_curves", "=", "dstore", "[", "name", "]", "loss_maps", "=", "scientific", ".", "broadcast", "(", "scientific", ".", "loss_maps", ",", "loss_curves", ",", "oq", ".", "conditional_loss_poes", ")", "return", "loss_maps", "raise", "KeyError", "(", "'loss_maps/loss_curves missing in %s'", "%", "dstore", ")" ]
:param dstore: a DataStore instance :param kind: 'rlzs' or 'stats'
[ ":", "param", "dstore", ":", "a", "DataStore", "instance", ":", "param", "kind", ":", "rlzs", "or", "stats" ]
train
https://github.com/gem/oq-engine/blob/8294553a0b8aba33fd96437a35065d03547d0040/openquake/calculators/export/risk.py#L526-L542
gem/oq-engine
openquake/calculators/export/risk.py
get_paths
def get_paths(rlz): """ :param rlz: a logic tree realization (composite or simple) :returns: a dict {'source_model_tree_path': string, 'gsim_tree_path': string} """ dic = {} if hasattr(rlz, 'sm_lt_path'): # composite realization dic['source_model_tree_path'] = '_'.join(rlz.sm_lt_path) dic['gsim_tree_path'] = '_'.join(rlz.gsim_lt_path) else: # simple GSIM realization dic['source_model_tree_path'] = '' dic['gsim_tree_path'] = '_'.join(rlz.lt_path) return dic
python
def get_paths(rlz): dic = {} if hasattr(rlz, 'sm_lt_path'): dic['source_model_tree_path'] = '_'.join(rlz.sm_lt_path) dic['gsim_tree_path'] = '_'.join(rlz.gsim_lt_path) else: dic['source_model_tree_path'] = '' dic['gsim_tree_path'] = '_'.join(rlz.lt_path) return dic
[ "def", "get_paths", "(", "rlz", ")", ":", "dic", "=", "{", "}", "if", "hasattr", "(", "rlz", ",", "'sm_lt_path'", ")", ":", "# composite realization", "dic", "[", "'source_model_tree_path'", "]", "=", "'_'", ".", "join", "(", "rlz", ".", "sm_lt_path", ")", "dic", "[", "'gsim_tree_path'", "]", "=", "'_'", ".", "join", "(", "rlz", ".", "gsim_lt_path", ")", "else", ":", "# simple GSIM realization", "dic", "[", "'source_model_tree_path'", "]", "=", "''", "dic", "[", "'gsim_tree_path'", "]", "=", "'_'", ".", "join", "(", "rlz", ".", "lt_path", ")", "return", "dic" ]
:param rlz: a logic tree realization (composite or simple) :returns: a dict {'source_model_tree_path': string, 'gsim_tree_path': string}
[ ":", "param", "rlz", ":", "a", "logic", "tree", "realization", "(", "composite", "or", "simple", ")", ":", "returns", ":", "a", "dict", "{", "source_model_tree_path", ":", "string", "gsim_tree_path", ":", "string", "}" ]
train
https://github.com/gem/oq-engine/blob/8294553a0b8aba33fd96437a35065d03547d0040/openquake/calculators/export/risk.py#L573-L587
gem/oq-engine
openquake/calculators/export/risk.py
export_by_tag_csv
def export_by_tag_csv(ekey, dstore): """ :param ekey: export key, i.e. a pair (datastore key, fmt) :param dstore: datastore object """ token, tag = ekey[0].split('/') data = extract(dstore, token + '/' + tag) fnames = [] writer = writers.CsvWriter(fmt=writers.FIVEDIGITS) for stat, arr in data: tup = (ekey[0].replace('/', '-'), stat, ekey[1]) path = '%s-%s.%s' % tup fname = dstore.export_path(path) writer.save(arr, fname) fnames.append(fname) return fnames
python
def export_by_tag_csv(ekey, dstore): token, tag = ekey[0].split('/') data = extract(dstore, token + '/' + tag) fnames = [] writer = writers.CsvWriter(fmt=writers.FIVEDIGITS) for stat, arr in data: tup = (ekey[0].replace('/', '-'), stat, ekey[1]) path = '%s-%s.%s' % tup fname = dstore.export_path(path) writer.save(arr, fname) fnames.append(fname) return fnames
[ "def", "export_by_tag_csv", "(", "ekey", ",", "dstore", ")", ":", "token", ",", "tag", "=", "ekey", "[", "0", "]", ".", "split", "(", "'/'", ")", "data", "=", "extract", "(", "dstore", ",", "token", "+", "'/'", "+", "tag", ")", "fnames", "=", "[", "]", "writer", "=", "writers", ".", "CsvWriter", "(", "fmt", "=", "writers", ".", "FIVEDIGITS", ")", "for", "stat", ",", "arr", "in", "data", ":", "tup", "=", "(", "ekey", "[", "0", "]", ".", "replace", "(", "'/'", ",", "'-'", ")", ",", "stat", ",", "ekey", "[", "1", "]", ")", "path", "=", "'%s-%s.%s'", "%", "tup", "fname", "=", "dstore", ".", "export_path", "(", "path", ")", "writer", ".", "save", "(", "arr", ",", "fname", ")", "fnames", ".", "append", "(", "fname", ")", "return", "fnames" ]
:param ekey: export key, i.e. a pair (datastore key, fmt) :param dstore: datastore object
[ ":", "param", "ekey", ":", "export", "key", "i", ".", "e", ".", "a", "pair", "(", "datastore", "key", "fmt", ")", ":", "param", "dstore", ":", "datastore", "object" ]
train
https://github.com/gem/oq-engine/blob/8294553a0b8aba33fd96437a35065d03547d0040/openquake/calculators/export/risk.py#L611-L626
gem/oq-engine
openquake/calculators/export/risk.py
export_aggregate_by_csv
def export_aggregate_by_csv(ekey, dstore): """ :param ekey: export key, i.e. a pair (datastore key, fmt) :param dstore: datastore object """ token, what = ekey[0].split('/', 1) aw = extract(dstore, 'aggregate/' + what) fnames = [] writer = writers.CsvWriter(fmt=writers.FIVEDIGITS) path = '%s.%s' % (sanitize(ekey[0]), ekey[1]) fname = dstore.export_path(path) writer.save(aw.to_table(), fname) fnames.append(fname) return fnames
python
def export_aggregate_by_csv(ekey, dstore): token, what = ekey[0].split('/', 1) aw = extract(dstore, 'aggregate/' + what) fnames = [] writer = writers.CsvWriter(fmt=writers.FIVEDIGITS) path = '%s.%s' % (sanitize(ekey[0]), ekey[1]) fname = dstore.export_path(path) writer.save(aw.to_table(), fname) fnames.append(fname) return fnames
[ "def", "export_aggregate_by_csv", "(", "ekey", ",", "dstore", ")", ":", "token", ",", "what", "=", "ekey", "[", "0", "]", ".", "split", "(", "'/'", ",", "1", ")", "aw", "=", "extract", "(", "dstore", ",", "'aggregate/'", "+", "what", ")", "fnames", "=", "[", "]", "writer", "=", "writers", ".", "CsvWriter", "(", "fmt", "=", "writers", ".", "FIVEDIGITS", ")", "path", "=", "'%s.%s'", "%", "(", "sanitize", "(", "ekey", "[", "0", "]", ")", ",", "ekey", "[", "1", "]", ")", "fname", "=", "dstore", ".", "export_path", "(", "path", ")", "writer", ".", "save", "(", "aw", ".", "to_table", "(", ")", ",", "fname", ")", "fnames", ".", "append", "(", "fname", ")", "return", "fnames" ]
:param ekey: export key, i.e. a pair (datastore key, fmt) :param dstore: datastore object
[ ":", "param", "ekey", ":", "export", "key", "i", ".", "e", ".", "a", "pair", "(", "datastore", "key", "fmt", ")", ":", "param", "dstore", ":", "datastore", "object" ]
train
https://github.com/gem/oq-engine/blob/8294553a0b8aba33fd96437a35065d03547d0040/openquake/calculators/export/risk.py#L630-L643
gem/oq-engine
openquake/calculators/export/risk.py
export_asset_risk_csv
def export_asset_risk_csv(ekey, dstore): """ :param ekey: export key, i.e. a pair (datastore key, fmt) :param dstore: datastore object """ writer = writers.CsvWriter(fmt=writers.FIVEDIGITS) path = '%s.%s' % (sanitize(ekey[0]), ekey[1]) fname = dstore.export_path(path) md = extract(dstore, 'exposure_metadata') tostr = {'taxonomy': md.taxonomy} for tagname in md.tagnames: tostr[tagname] = getattr(md, tagname) arr = extract(dstore, 'asset_risk').array arefs = dstore['assetcol/asset_refs'].value rows = [] lossnames = sorted(name for name in arr.dtype.names if 'loss' in name) perilnames = sorted(name for name in arr.dtype.names if name.upper() == name) expnames = [name for name in arr.dtype.names if name not in md.tagnames and 'loss' not in name and name not in perilnames and name not in 'lon lat'] colnames = (['asset_ref'] + sorted(md.tagnames) + ['lon', 'lat'] + expnames + perilnames + lossnames) # sanity check assert len(colnames) == len(arr.dtype.names) + 1 for aref, rec in zip(arefs, arr): row = [aref] for name in colnames[1:]: value = rec[name] try: row.append('"%s"' % tostr[name][value]) except KeyError: row.append(value) rows.append(row) writer.save(rows, fname, colnames) return [fname]
python
def export_asset_risk_csv(ekey, dstore): writer = writers.CsvWriter(fmt=writers.FIVEDIGITS) path = '%s.%s' % (sanitize(ekey[0]), ekey[1]) fname = dstore.export_path(path) md = extract(dstore, 'exposure_metadata') tostr = {'taxonomy': md.taxonomy} for tagname in md.tagnames: tostr[tagname] = getattr(md, tagname) arr = extract(dstore, 'asset_risk').array arefs = dstore['assetcol/asset_refs'].value rows = [] lossnames = sorted(name for name in arr.dtype.names if 'loss' in name) perilnames = sorted(name for name in arr.dtype.names if name.upper() == name) expnames = [name for name in arr.dtype.names if name not in md.tagnames and 'loss' not in name and name not in perilnames and name not in 'lon lat'] colnames = (['asset_ref'] + sorted(md.tagnames) + ['lon', 'lat'] + expnames + perilnames + lossnames) assert len(colnames) == len(arr.dtype.names) + 1 for aref, rec in zip(arefs, arr): row = [aref] for name in colnames[1:]: value = rec[name] try: row.append('"%s"' % tostr[name][value]) except KeyError: row.append(value) rows.append(row) writer.save(rows, fname, colnames) return [fname]
[ "def", "export_asset_risk_csv", "(", "ekey", ",", "dstore", ")", ":", "writer", "=", "writers", ".", "CsvWriter", "(", "fmt", "=", "writers", ".", "FIVEDIGITS", ")", "path", "=", "'%s.%s'", "%", "(", "sanitize", "(", "ekey", "[", "0", "]", ")", ",", "ekey", "[", "1", "]", ")", "fname", "=", "dstore", ".", "export_path", "(", "path", ")", "md", "=", "extract", "(", "dstore", ",", "'exposure_metadata'", ")", "tostr", "=", "{", "'taxonomy'", ":", "md", ".", "taxonomy", "}", "for", "tagname", "in", "md", ".", "tagnames", ":", "tostr", "[", "tagname", "]", "=", "getattr", "(", "md", ",", "tagname", ")", "arr", "=", "extract", "(", "dstore", ",", "'asset_risk'", ")", ".", "array", "arefs", "=", "dstore", "[", "'assetcol/asset_refs'", "]", ".", "value", "rows", "=", "[", "]", "lossnames", "=", "sorted", "(", "name", "for", "name", "in", "arr", ".", "dtype", ".", "names", "if", "'loss'", "in", "name", ")", "perilnames", "=", "sorted", "(", "name", "for", "name", "in", "arr", ".", "dtype", ".", "names", "if", "name", ".", "upper", "(", ")", "==", "name", ")", "expnames", "=", "[", "name", "for", "name", "in", "arr", ".", "dtype", ".", "names", "if", "name", "not", "in", "md", ".", "tagnames", "and", "'loss'", "not", "in", "name", "and", "name", "not", "in", "perilnames", "and", "name", "not", "in", "'lon lat'", "]", "colnames", "=", "(", "[", "'asset_ref'", "]", "+", "sorted", "(", "md", ".", "tagnames", ")", "+", "[", "'lon'", ",", "'lat'", "]", "+", "expnames", "+", "perilnames", "+", "lossnames", ")", "# sanity check", "assert", "len", "(", "colnames", ")", "==", "len", "(", "arr", ".", "dtype", ".", "names", ")", "+", "1", "for", "aref", ",", "rec", "in", "zip", "(", "arefs", ",", "arr", ")", ":", "row", "=", "[", "aref", "]", "for", "name", "in", "colnames", "[", "1", ":", "]", ":", "value", "=", "rec", "[", "name", "]", "try", ":", "row", ".", "append", "(", "'\"%s\"'", "%", "tostr", "[", "name", "]", "[", "value", "]", ")", "except", "KeyError", ":", "row", ".", "append", "(", "value", ")", "rows", ".", "append", "(", "row", ")", "writer", ".", "save", "(", "rows", ",", "fname", ",", "colnames", ")", "return", "[", "fname", "]" ]
:param ekey: export key, i.e. a pair (datastore key, fmt) :param dstore: datastore object
[ ":", "param", "ekey", ":", "export", "key", "i", ".", "e", ".", "a", "pair", "(", "datastore", "key", "fmt", ")", ":", "param", "dstore", ":", "datastore", "object" ]
train
https://github.com/gem/oq-engine/blob/8294553a0b8aba33fd96437a35065d03547d0040/openquake/calculators/export/risk.py#L647-L682
gem/oq-engine
openquake/calculators/export/risk.py
export_agg_risk_csv
def export_agg_risk_csv(ekey, dstore): """ :param ekey: export key, i.e. a pair (datastore key, fmt) :param dstore: datastore object """ writer = writers.CsvWriter(fmt=writers.FIVEDIGITS) path = '%s.%s' % (sanitize(ekey[0]), ekey[1]) fname = dstore.export_path(path) writer.save(dstore['agg_risk'].value, fname) return [fname]
python
def export_agg_risk_csv(ekey, dstore): writer = writers.CsvWriter(fmt=writers.FIVEDIGITS) path = '%s.%s' % (sanitize(ekey[0]), ekey[1]) fname = dstore.export_path(path) writer.save(dstore['agg_risk'].value, fname) return [fname]
[ "def", "export_agg_risk_csv", "(", "ekey", ",", "dstore", ")", ":", "writer", "=", "writers", ".", "CsvWriter", "(", "fmt", "=", "writers", ".", "FIVEDIGITS", ")", "path", "=", "'%s.%s'", "%", "(", "sanitize", "(", "ekey", "[", "0", "]", ")", ",", "ekey", "[", "1", "]", ")", "fname", "=", "dstore", ".", "export_path", "(", "path", ")", "writer", ".", "save", "(", "dstore", "[", "'agg_risk'", "]", ".", "value", ",", "fname", ")", "return", "[", "fname", "]" ]
:param ekey: export key, i.e. a pair (datastore key, fmt) :param dstore: datastore object
[ ":", "param", "ekey", ":", "export", "key", "i", ".", "e", ".", "a", "pair", "(", "datastore", "key", "fmt", ")", ":", "param", "dstore", ":", "datastore", "object" ]
train
https://github.com/gem/oq-engine/blob/8294553a0b8aba33fd96437a35065d03547d0040/openquake/calculators/export/risk.py#L686-L695
gem/oq-engine
openquake/engine/tools/viewlog.py
viewlog
def viewlog(calc_id, host='localhost', port=8000): """ Extract the log of the given calculation ID from the WebUI """ base_url = 'http://%s:%s/v1/calc/' % (host, port) start = 0 psize = 10 # page size try: while True: url = base_url + '%d/log/%d:%d' % (calc_id, start, start + psize) rows = json.load(urlopen(url)) for row in rows: print(' '.join(row)) start += len(rows) time.sleep(1) except: pass
python
def viewlog(calc_id, host='localhost', port=8000): base_url = 'http://%s:%s/v1/calc/' % (host, port) start = 0 psize = 10 try: while True: url = base_url + '%d/log/%d:%d' % (calc_id, start, start + psize) rows = json.load(urlopen(url)) for row in rows: print(' '.join(row)) start += len(rows) time.sleep(1) except: pass
[ "def", "viewlog", "(", "calc_id", ",", "host", "=", "'localhost'", ",", "port", "=", "8000", ")", ":", "base_url", "=", "'http://%s:%s/v1/calc/'", "%", "(", "host", ",", "port", ")", "start", "=", "0", "psize", "=", "10", "# page size", "try", ":", "while", "True", ":", "url", "=", "base_url", "+", "'%d/log/%d:%d'", "%", "(", "calc_id", ",", "start", ",", "start", "+", "psize", ")", "rows", "=", "json", ".", "load", "(", "urlopen", "(", "url", ")", ")", "for", "row", "in", "rows", ":", "print", "(", "' '", ".", "join", "(", "row", ")", ")", "start", "+=", "len", "(", "rows", ")", "time", ".", "sleep", "(", "1", ")", "except", ":", "pass" ]
Extract the log of the given calculation ID from the WebUI
[ "Extract", "the", "log", "of", "the", "given", "calculation", "ID", "from", "the", "WebUI" ]
train
https://github.com/gem/oq-engine/blob/8294553a0b8aba33fd96437a35065d03547d0040/openquake/engine/tools/viewlog.py#L33-L49
gem/oq-engine
openquake/hazardlib/gsim/megawati_2003.py
MegawatiEtAl2003.get_mean_and_stddevs
def get_mean_and_stddevs(self, sites, rup, dists, imt, stddev_types): """ See :meth:`superclass method <.base.GroundShakingIntensityModel.get_mean_and_stddevs>` for spec of input and result values. """ # Check that the GSIM supports the standard deviations requested assert all(stddev_type in self.DEFINED_FOR_STANDARD_DEVIATION_TYPES for stddev_type in stddev_types) coe = self.COEFFS[imt] mean = (self._get_magnitude_scaling(coe, rup.mag) + self._get_distance_scaling(coe, dists.rhypo) + self._get_azimuth_correction(coe, dists.azimuth)) # Convert to g if imt.name in "SA PGA": mean = np.log(np.exp(mean) / (100.0 * g)) # Compute std stddevs = self._compute_std(coe, stddev_types, dists.azimuth.shape) return mean, stddevs
python
def get_mean_and_stddevs(self, sites, rup, dists, imt, stddev_types): assert all(stddev_type in self.DEFINED_FOR_STANDARD_DEVIATION_TYPES for stddev_type in stddev_types) coe = self.COEFFS[imt] mean = (self._get_magnitude_scaling(coe, rup.mag) + self._get_distance_scaling(coe, dists.rhypo) + self._get_azimuth_correction(coe, dists.azimuth)) if imt.name in "SA PGA": mean = np.log(np.exp(mean) / (100.0 * g)) stddevs = self._compute_std(coe, stddev_types, dists.azimuth.shape) return mean, stddevs
[ "def", "get_mean_and_stddevs", "(", "self", ",", "sites", ",", "rup", ",", "dists", ",", "imt", ",", "stddev_types", ")", ":", "# Check that the GSIM supports the standard deviations requested", "assert", "all", "(", "stddev_type", "in", "self", ".", "DEFINED_FOR_STANDARD_DEVIATION_TYPES", "for", "stddev_type", "in", "stddev_types", ")", "coe", "=", "self", ".", "COEFFS", "[", "imt", "]", "mean", "=", "(", "self", ".", "_get_magnitude_scaling", "(", "coe", ",", "rup", ".", "mag", ")", "+", "self", ".", "_get_distance_scaling", "(", "coe", ",", "dists", ".", "rhypo", ")", "+", "self", ".", "_get_azimuth_correction", "(", "coe", ",", "dists", ".", "azimuth", ")", ")", "# Convert to g", "if", "imt", ".", "name", "in", "\"SA PGA\"", ":", "mean", "=", "np", ".", "log", "(", "np", ".", "exp", "(", "mean", ")", "/", "(", "100.0", "*", "g", ")", ")", "# Compute std", "stddevs", "=", "self", ".", "_compute_std", "(", "coe", ",", "stddev_types", ",", "dists", ".", "azimuth", ".", "shape", ")", "return", "mean", ",", "stddevs" ]
See :meth:`superclass method <.base.GroundShakingIntensityModel.get_mean_and_stddevs>` for spec of input and result values.
[ "See", ":", "meth", ":", "superclass", "method", "<", ".", "base", ".", "GroundShakingIntensityModel", ".", "get_mean_and_stddevs", ">", "for", "spec", "of", "input", "and", "result", "values", "." ]
train
https://github.com/gem/oq-engine/blob/8294553a0b8aba33fd96437a35065d03547d0040/openquake/hazardlib/gsim/megawati_2003.py#L68-L86
gem/oq-engine
openquake/hazardlib/gsim/megawati_2003.py
MegawatiEtAl2003._get_distance_scaling
def _get_distance_scaling(self, coe, rhypo): """ Returns the distance scaling term """ return coe["a3"] * np.log(rhypo) + coe["a4"] * rhypo
python
def _get_distance_scaling(self, coe, rhypo): return coe["a3"] * np.log(rhypo) + coe["a4"] * rhypo
[ "def", "_get_distance_scaling", "(", "self", ",", "coe", ",", "rhypo", ")", ":", "return", "coe", "[", "\"a3\"", "]", "*", "np", ".", "log", "(", "rhypo", ")", "+", "coe", "[", "\"a4\"", "]", "*", "rhypo" ]
Returns the distance scaling term
[ "Returns", "the", "distance", "scaling", "term" ]
train
https://github.com/gem/oq-engine/blob/8294553a0b8aba33fd96437a35065d03547d0040/openquake/hazardlib/gsim/megawati_2003.py#L94-L98
gem/oq-engine
openquake/hazardlib/gsim/megawati_2003.py
MegawatiEtAl2003._get_azimuth_correction
def _get_azimuth_correction(self, coe, azimuth): """ This is the azimuth correction defined in the functional form (see equation 3 at page 2256) """ term1 = abs(np.cos(np.radians(2.*azimuth))) term2 = abs(np.sin(np.radians(2.*azimuth)))*coe['a5'] return np.log(np.max(np.hstack((term1, term2))))
python
def _get_azimuth_correction(self, coe, azimuth): term1 = abs(np.cos(np.radians(2.*azimuth))) term2 = abs(np.sin(np.radians(2.*azimuth)))*coe['a5'] return np.log(np.max(np.hstack((term1, term2))))
[ "def", "_get_azimuth_correction", "(", "self", ",", "coe", ",", "azimuth", ")", ":", "term1", "=", "abs", "(", "np", ".", "cos", "(", "np", ".", "radians", "(", "2.", "*", "azimuth", ")", ")", ")", "term2", "=", "abs", "(", "np", ".", "sin", "(", "np", ".", "radians", "(", "2.", "*", "azimuth", ")", ")", ")", "*", "coe", "[", "'a5'", "]", "return", "np", ".", "log", "(", "np", ".", "max", "(", "np", ".", "hstack", "(", "(", "term1", ",", "term2", ")", ")", ")", ")" ]
This is the azimuth correction defined in the functional form (see equation 3 at page 2256)
[ "This", "is", "the", "azimuth", "correction", "defined", "in", "the", "functional", "form", "(", "see", "equation", "3", "at", "page", "2256", ")" ]
train
https://github.com/gem/oq-engine/blob/8294553a0b8aba33fd96437a35065d03547d0040/openquake/hazardlib/gsim/megawati_2003.py#L100-L107
gem/oq-engine
openquake/baselib/parallel.py
get_pickled_sizes
def get_pickled_sizes(obj): """ Return the pickled sizes of an object and its direct attributes, ordered by decreasing size. Here is an example: >> total_size, partial_sizes = get_pickled_sizes(Monitor('')) >> total_size 345 >> partial_sizes [('_procs', 214), ('exc', 4), ('mem', 4), ('start_time', 4), ('_start_time', 4), ('duration', 4)] Notice that the sizes depend on the operating system and the machine. """ sizes = [] attrs = getattr(obj, '__dict__', {}) for name, value in attrs.items(): sizes.append((name, len(Pickled(value)))) return len(Pickled(obj)), sorted( sizes, key=lambda pair: pair[1], reverse=True)
python
def get_pickled_sizes(obj): sizes = [] attrs = getattr(obj, '__dict__', {}) for name, value in attrs.items(): sizes.append((name, len(Pickled(value)))) return len(Pickled(obj)), sorted( sizes, key=lambda pair: pair[1], reverse=True)
[ "def", "get_pickled_sizes", "(", "obj", ")", ":", "sizes", "=", "[", "]", "attrs", "=", "getattr", "(", "obj", ",", "'__dict__'", ",", "{", "}", ")", "for", "name", ",", "value", "in", "attrs", ".", "items", "(", ")", ":", "sizes", ".", "append", "(", "(", "name", ",", "len", "(", "Pickled", "(", "value", ")", ")", ")", ")", "return", "len", "(", "Pickled", "(", "obj", ")", ")", ",", "sorted", "(", "sizes", ",", "key", "=", "lambda", "pair", ":", "pair", "[", "1", "]", ",", "reverse", "=", "True", ")" ]
Return the pickled sizes of an object and its direct attributes, ordered by decreasing size. Here is an example: >> total_size, partial_sizes = get_pickled_sizes(Monitor('')) >> total_size 345 >> partial_sizes [('_procs', 214), ('exc', 4), ('mem', 4), ('start_time', 4), ('_start_time', 4), ('duration', 4)] Notice that the sizes depend on the operating system and the machine.
[ "Return", "the", "pickled", "sizes", "of", "an", "object", "and", "its", "direct", "attributes", "ordered", "by", "decreasing", "size", ".", "Here", "is", "an", "example", ":" ]
train
https://github.com/gem/oq-engine/blob/8294553a0b8aba33fd96437a35065d03547d0040/openquake/baselib/parallel.py#L272-L291
gem/oq-engine
openquake/baselib/parallel.py
pickle_sequence
def pickle_sequence(objects): """ Convert an iterable of objects into a list of pickled objects. If the iterable contains copies, the pickling will be done only once. If the iterable contains objects already pickled, they will not be pickled again. :param objects: a sequence of objects to pickle """ cache = {} out = [] for obj in objects: obj_id = id(obj) if obj_id not in cache: if isinstance(obj, Pickled): # already pickled cache[obj_id] = obj else: # pickle the object cache[obj_id] = Pickled(obj) out.append(cache[obj_id]) return out
python
def pickle_sequence(objects): cache = {} out = [] for obj in objects: obj_id = id(obj) if obj_id not in cache: if isinstance(obj, Pickled): cache[obj_id] = obj else: cache[obj_id] = Pickled(obj) out.append(cache[obj_id]) return out
[ "def", "pickle_sequence", "(", "objects", ")", ":", "cache", "=", "{", "}", "out", "=", "[", "]", "for", "obj", "in", "objects", ":", "obj_id", "=", "id", "(", "obj", ")", "if", "obj_id", "not", "in", "cache", ":", "if", "isinstance", "(", "obj", ",", "Pickled", ")", ":", "# already pickled", "cache", "[", "obj_id", "]", "=", "obj", "else", ":", "# pickle the object", "cache", "[", "obj_id", "]", "=", "Pickled", "(", "obj", ")", "out", ".", "append", "(", "cache", "[", "obj_id", "]", ")", "return", "out" ]
Convert an iterable of objects into a list of pickled objects. If the iterable contains copies, the pickling will be done only once. If the iterable contains objects already pickled, they will not be pickled again. :param objects: a sequence of objects to pickle
[ "Convert", "an", "iterable", "of", "objects", "into", "a", "list", "of", "pickled", "objects", ".", "If", "the", "iterable", "contains", "copies", "the", "pickling", "will", "be", "done", "only", "once", ".", "If", "the", "iterable", "contains", "objects", "already", "pickled", "they", "will", "not", "be", "pickled", "again", "." ]
train
https://github.com/gem/oq-engine/blob/8294553a0b8aba33fd96437a35065d03547d0040/openquake/baselib/parallel.py#L294-L313
gem/oq-engine
openquake/baselib/parallel.py
check_mem_usage
def check_mem_usage(soft_percent=None, hard_percent=None): """ Display a warning if we are running out of memory """ soft_percent = soft_percent or config.memory.soft_mem_limit hard_percent = hard_percent or config.memory.hard_mem_limit used_mem_percent = psutil.virtual_memory().percent if used_mem_percent > hard_percent: raise MemoryError('Using more memory than allowed by configuration ' '(Used: %d%% / Allowed: %d%%)! Shutting down.' % (used_mem_percent, hard_percent)) elif used_mem_percent > soft_percent: msg = 'Using over %d%% of the memory in %s!' return msg % (used_mem_percent, socket.gethostname())
python
def check_mem_usage(soft_percent=None, hard_percent=None): soft_percent = soft_percent or config.memory.soft_mem_limit hard_percent = hard_percent or config.memory.hard_mem_limit used_mem_percent = psutil.virtual_memory().percent if used_mem_percent > hard_percent: raise MemoryError('Using more memory than allowed by configuration ' '(Used: %d%% / Allowed: %d%%)! Shutting down.' % (used_mem_percent, hard_percent)) elif used_mem_percent > soft_percent: msg = 'Using over %d%% of the memory in %s!' return msg % (used_mem_percent, socket.gethostname())
[ "def", "check_mem_usage", "(", "soft_percent", "=", "None", ",", "hard_percent", "=", "None", ")", ":", "soft_percent", "=", "soft_percent", "or", "config", ".", "memory", ".", "soft_mem_limit", "hard_percent", "=", "hard_percent", "or", "config", ".", "memory", ".", "hard_mem_limit", "used_mem_percent", "=", "psutil", ".", "virtual_memory", "(", ")", ".", "percent", "if", "used_mem_percent", ">", "hard_percent", ":", "raise", "MemoryError", "(", "'Using more memory than allowed by configuration '", "'(Used: %d%% / Allowed: %d%%)! Shutting down.'", "%", "(", "used_mem_percent", ",", "hard_percent", ")", ")", "elif", "used_mem_percent", ">", "soft_percent", ":", "msg", "=", "'Using over %d%% of the memory in %s!'", "return", "msg", "%", "(", "used_mem_percent", ",", "socket", ".", "gethostname", "(", ")", ")" ]
Display a warning if we are running out of memory
[ "Display", "a", "warning", "if", "we", "are", "running", "out", "of", "memory" ]
train
https://github.com/gem/oq-engine/blob/8294553a0b8aba33fd96437a35065d03547d0040/openquake/baselib/parallel.py#L370-L383
gem/oq-engine
openquake/baselib/parallel.py
safely_call
def safely_call(func, args, task_no=0, mon=dummy_mon): """ Call the given function with the given arguments safely, i.e. by trapping the exceptions. Return a pair (result, exc_type) where exc_type is None if no exceptions occur, otherwise it is the exception class and the result is a string containing error message and traceback. :param func: the function to call :param args: the arguments :param task_no: the task number :param mon: a monitor """ isgenfunc = inspect.isgeneratorfunction(func) if hasattr(args[0], 'unpickle'): # args is a list of Pickled objects args = [a.unpickle() for a in args] if mon is dummy_mon: # in the DbServer assert not isgenfunc, func return Result.new(func, args, mon) mon = mon.new(operation='total ' + func.__name__, measuremem=True) mon.weight = getattr(args[0], 'weight', 1.) # used in task_info mon.task_no = task_no if mon.inject: args += (mon,) with Socket(mon.backurl, zmq.PUSH, 'connect') as zsocket: msg = check_mem_usage() # warn if too much memory is used if msg: zsocket.send(Result(None, mon, msg=msg)) if inspect.isgeneratorfunction(func): gfunc = func else: def gfunc(*args): yield func(*args) gobj = gfunc(*args) for count in itertools.count(): res = Result.new(next, (gobj,), mon, count=count) # StopIteration -> TASK_ENDED try: zsocket.send(res) except Exception: # like OverflowError _etype, exc, tb = sys.exc_info() err = Result(exc, mon, ''.join(traceback.format_tb(tb)), count=count) zsocket.send(err) mon.duration = 0 mon.counts = 0 mon.children.clear() if res.msg == 'TASK_ENDED': break
python
def safely_call(func, args, task_no=0, mon=dummy_mon): isgenfunc = inspect.isgeneratorfunction(func) if hasattr(args[0], 'unpickle'): args = [a.unpickle() for a in args] if mon is dummy_mon: assert not isgenfunc, func return Result.new(func, args, mon) mon = mon.new(operation='total ' + func.__name__, measuremem=True) mon.weight = getattr(args[0], 'weight', 1.) mon.task_no = task_no if mon.inject: args += (mon,) with Socket(mon.backurl, zmq.PUSH, 'connect') as zsocket: msg = check_mem_usage() if msg: zsocket.send(Result(None, mon, msg=msg)) if inspect.isgeneratorfunction(func): gfunc = func else: def gfunc(*args): yield func(*args) gobj = gfunc(*args) for count in itertools.count(): res = Result.new(next, (gobj,), mon, count=count) try: zsocket.send(res) except Exception: _etype, exc, tb = sys.exc_info() err = Result(exc, mon, ''.join(traceback.format_tb(tb)), count=count) zsocket.send(err) mon.duration = 0 mon.counts = 0 mon.children.clear() if res.msg == 'TASK_ENDED': break
[ "def", "safely_call", "(", "func", ",", "args", ",", "task_no", "=", "0", ",", "mon", "=", "dummy_mon", ")", ":", "isgenfunc", "=", "inspect", ".", "isgeneratorfunction", "(", "func", ")", "if", "hasattr", "(", "args", "[", "0", "]", ",", "'unpickle'", ")", ":", "# args is a list of Pickled objects", "args", "=", "[", "a", ".", "unpickle", "(", ")", "for", "a", "in", "args", "]", "if", "mon", "is", "dummy_mon", ":", "# in the DbServer", "assert", "not", "isgenfunc", ",", "func", "return", "Result", ".", "new", "(", "func", ",", "args", ",", "mon", ")", "mon", "=", "mon", ".", "new", "(", "operation", "=", "'total '", "+", "func", ".", "__name__", ",", "measuremem", "=", "True", ")", "mon", ".", "weight", "=", "getattr", "(", "args", "[", "0", "]", ",", "'weight'", ",", "1.", ")", "# used in task_info", "mon", ".", "task_no", "=", "task_no", "if", "mon", ".", "inject", ":", "args", "+=", "(", "mon", ",", ")", "with", "Socket", "(", "mon", ".", "backurl", ",", "zmq", ".", "PUSH", ",", "'connect'", ")", "as", "zsocket", ":", "msg", "=", "check_mem_usage", "(", ")", "# warn if too much memory is used", "if", "msg", ":", "zsocket", ".", "send", "(", "Result", "(", "None", ",", "mon", ",", "msg", "=", "msg", ")", ")", "if", "inspect", ".", "isgeneratorfunction", "(", "func", ")", ":", "gfunc", "=", "func", "else", ":", "def", "gfunc", "(", "*", "args", ")", ":", "yield", "func", "(", "*", "args", ")", "gobj", "=", "gfunc", "(", "*", "args", ")", "for", "count", "in", "itertools", ".", "count", "(", ")", ":", "res", "=", "Result", ".", "new", "(", "next", ",", "(", "gobj", ",", ")", ",", "mon", ",", "count", "=", "count", ")", "# StopIteration -> TASK_ENDED", "try", ":", "zsocket", ".", "send", "(", "res", ")", "except", "Exception", ":", "# like OverflowError", "_etype", ",", "exc", ",", "tb", "=", "sys", ".", "exc_info", "(", ")", "err", "=", "Result", "(", "exc", ",", "mon", ",", "''", ".", "join", "(", "traceback", ".", "format_tb", "(", "tb", ")", ")", ",", "count", "=", "count", ")", "zsocket", ".", "send", "(", "err", ")", "mon", ".", "duration", "=", "0", "mon", ".", "counts", "=", "0", "mon", ".", "children", ".", "clear", "(", ")", "if", "res", ".", "msg", "==", "'TASK_ENDED'", ":", "break" ]
Call the given function with the given arguments safely, i.e. by trapping the exceptions. Return a pair (result, exc_type) where exc_type is None if no exceptions occur, otherwise it is the exception class and the result is a string containing error message and traceback. :param func: the function to call :param args: the arguments :param task_no: the task number :param mon: a monitor
[ "Call", "the", "given", "function", "with", "the", "given", "arguments", "safely", "i", ".", "e", ".", "by", "trapping", "the", "exceptions", ".", "Return", "a", "pair", "(", "result", "exc_type", ")", "where", "exc_type", "is", "None", "if", "no", "exceptions", "occur", "otherwise", "it", "is", "the", "exception", "class", "and", "the", "result", "is", "a", "string", "containing", "error", "message", "and", "traceback", "." ]
train
https://github.com/gem/oq-engine/blob/8294553a0b8aba33fd96437a35065d03547d0040/openquake/baselib/parallel.py#L390-L440
gem/oq-engine
openquake/baselib/parallel.py
save_task_info
def save_task_info(self, res, mem_gb=0): """ :param self: an object with attributes .hdf5, .argnames, .sent :parent res: a :class:`Result` object :param mem_gb: memory consumption at the saving time (optional) """ mon = res.mon name = mon.operation[6:] # strip 'total ' if self.hdf5: mon.hdf5 = self.hdf5 # needed for the flush below t = (mon.task_no, mon.weight, mon.duration, len(res.pik), mem_gb) data = numpy.array([t], task_info_dt) hdf5.extend3(self.hdf5.filename, 'task_info/' + name, data, argnames=self.argnames, sent=self.sent) mon.flush()
python
def save_task_info(self, res, mem_gb=0): mon = res.mon name = mon.operation[6:] if self.hdf5: mon.hdf5 = self.hdf5 t = (mon.task_no, mon.weight, mon.duration, len(res.pik), mem_gb) data = numpy.array([t], task_info_dt) hdf5.extend3(self.hdf5.filename, 'task_info/' + name, data, argnames=self.argnames, sent=self.sent) mon.flush()
[ "def", "save_task_info", "(", "self", ",", "res", ",", "mem_gb", "=", "0", ")", ":", "mon", "=", "res", ".", "mon", "name", "=", "mon", ".", "operation", "[", "6", ":", "]", "# strip 'total '", "if", "self", ".", "hdf5", ":", "mon", ".", "hdf5", "=", "self", ".", "hdf5", "# needed for the flush below", "t", "=", "(", "mon", ".", "task_no", ",", "mon", ".", "weight", ",", "mon", ".", "duration", ",", "len", "(", "res", ".", "pik", ")", ",", "mem_gb", ")", "data", "=", "numpy", ".", "array", "(", "[", "t", "]", ",", "task_info_dt", ")", "hdf5", ".", "extend3", "(", "self", ".", "hdf5", ".", "filename", ",", "'task_info/'", "+", "name", ",", "data", ",", "argnames", "=", "self", ".", "argnames", ",", "sent", "=", "self", ".", "sent", ")", "mon", ".", "flush", "(", ")" ]
:param self: an object with attributes .hdf5, .argnames, .sent :parent res: a :class:`Result` object :param mem_gb: memory consumption at the saving time (optional)
[ ":", "param", "self", ":", "an", "object", "with", "attributes", ".", "hdf5", ".", "argnames", ".", "sent", ":", "parent", "res", ":", "a", ":", "class", ":", "Result", "object", ":", "param", "mem_gb", ":", "memory", "consumption", "at", "the", "saving", "time", "(", "optional", ")" ]
train
https://github.com/gem/oq-engine/blob/8294553a0b8aba33fd96437a35065d03547d0040/openquake/baselib/parallel.py#L550-L564
gem/oq-engine
openquake/baselib/parallel.py
init_workers
def init_workers(): """Waiting function, used to wake up the process pool""" setproctitle('oq-worker') # unregister raiseMasterKilled in oq-workers to avoid deadlock # since processes are terminated via pool.terminate() signal.signal(signal.SIGTERM, signal.SIG_DFL) # prctl is still useful (on Linux) to terminate all spawned processes # when master is killed via SIGKILL try: import prctl except ImportError: pass else: # if the parent dies, the children die prctl.set_pdeathsig(signal.SIGKILL)
python
def init_workers(): setproctitle('oq-worker') signal.signal(signal.SIGTERM, signal.SIG_DFL) try: import prctl except ImportError: pass else: prctl.set_pdeathsig(signal.SIGKILL)
[ "def", "init_workers", "(", ")", ":", "setproctitle", "(", "'oq-worker'", ")", "# unregister raiseMasterKilled in oq-workers to avoid deadlock", "# since processes are terminated via pool.terminate()", "signal", ".", "signal", "(", "signal", ".", "SIGTERM", ",", "signal", ".", "SIG_DFL", ")", "# prctl is still useful (on Linux) to terminate all spawned processes", "# when master is killed via SIGKILL", "try", ":", "import", "prctl", "except", "ImportError", ":", "pass", "else", ":", "# if the parent dies, the children die", "prctl", ".", "set_pdeathsig", "(", "signal", ".", "SIGKILL", ")" ]
Waiting function, used to wake up the process pool
[ "Waiting", "function", "used", "to", "wake", "up", "the", "process", "pool" ]
train
https://github.com/gem/oq-engine/blob/8294553a0b8aba33fd96437a35065d03547d0040/openquake/baselib/parallel.py#L567-L581
gem/oq-engine
openquake/baselib/parallel.py
sequential_apply
def sequential_apply(task, args, concurrent_tasks=cpu_count * 3, weight=lambda item: 1, key=lambda item: 'Unspecified'): """ Apply sequentially task to args by splitting args[0] in blocks """ chunks = split_in_blocks(args[0], concurrent_tasks or 1, weight, key) task_args = [(ch,) + args[1:] for ch in chunks] return itertools.starmap(task, task_args)
python
def sequential_apply(task, args, concurrent_tasks=cpu_count * 3, weight=lambda item: 1, key=lambda item: 'Unspecified'): chunks = split_in_blocks(args[0], concurrent_tasks or 1, weight, key) task_args = [(ch,) + args[1:] for ch in chunks] return itertools.starmap(task, task_args)
[ "def", "sequential_apply", "(", "task", ",", "args", ",", "concurrent_tasks", "=", "cpu_count", "*", "3", ",", "weight", "=", "lambda", "item", ":", "1", ",", "key", "=", "lambda", "item", ":", "'Unspecified'", ")", ":", "chunks", "=", "split_in_blocks", "(", "args", "[", "0", "]", ",", "concurrent_tasks", "or", "1", ",", "weight", ",", "key", ")", "task_args", "=", "[", "(", "ch", ",", ")", "+", "args", "[", "1", ":", "]", "for", "ch", "in", "chunks", "]", "return", "itertools", ".", "starmap", "(", "task", ",", "task_args", ")" ]
Apply sequentially task to args by splitting args[0] in blocks
[ "Apply", "sequentially", "task", "to", "args", "by", "splitting", "args", "[", "0", "]", "in", "blocks" ]
train
https://github.com/gem/oq-engine/blob/8294553a0b8aba33fd96437a35065d03547d0040/openquake/baselib/parallel.py#L787-L794
gem/oq-engine
openquake/baselib/parallel.py
Result.get
def get(self): """ Returns the underlying value or raise the underlying exception """ val = self.pik.unpickle() if self.tb_str: etype = val.__class__ msg = '\n%s%s: %s' % (self.tb_str, etype.__name__, val) if issubclass(etype, KeyError): raise RuntimeError(msg) # nicer message else: raise etype(msg) return val
python
def get(self): val = self.pik.unpickle() if self.tb_str: etype = val.__class__ msg = '\n%s%s: %s' % (self.tb_str, etype.__name__, val) if issubclass(etype, KeyError): raise RuntimeError(msg) else: raise etype(msg) return val
[ "def", "get", "(", "self", ")", ":", "val", "=", "self", ".", "pik", ".", "unpickle", "(", ")", "if", "self", ".", "tb_str", ":", "etype", "=", "val", ".", "__class__", "msg", "=", "'\\n%s%s: %s'", "%", "(", "self", ".", "tb_str", ",", "etype", ".", "__name__", ",", "val", ")", "if", "issubclass", "(", "etype", ",", "KeyError", ")", ":", "raise", "RuntimeError", "(", "msg", ")", "# nicer message", "else", ":", "raise", "etype", "(", "msg", ")", "return", "val" ]
Returns the underlying value or raise the underlying exception
[ "Returns", "the", "underlying", "value", "or", "raise", "the", "underlying", "exception" ]
train
https://github.com/gem/oq-engine/blob/8294553a0b8aba33fd96437a35065d03547d0040/openquake/baselib/parallel.py#L337-L349
gem/oq-engine
openquake/baselib/parallel.py
Result.new
def new(cls, func, args, mon, count=0): """ :returns: a new Result instance """ try: with mon: val = func(*args) except StopIteration: res = Result(None, mon, msg='TASK_ENDED') except Exception: _etype, exc, tb = sys.exc_info() res = Result(exc, mon, ''.join(traceback.format_tb(tb)), count=count) else: res = Result(val, mon, count=count) return res
python
def new(cls, func, args, mon, count=0): try: with mon: val = func(*args) except StopIteration: res = Result(None, mon, msg='TASK_ENDED') except Exception: _etype, exc, tb = sys.exc_info() res = Result(exc, mon, ''.join(traceback.format_tb(tb)), count=count) else: res = Result(val, mon, count=count) return res
[ "def", "new", "(", "cls", ",", "func", ",", "args", ",", "mon", ",", "count", "=", "0", ")", ":", "try", ":", "with", "mon", ":", "val", "=", "func", "(", "*", "args", ")", "except", "StopIteration", ":", "res", "=", "Result", "(", "None", ",", "mon", ",", "msg", "=", "'TASK_ENDED'", ")", "except", "Exception", ":", "_etype", ",", "exc", ",", "tb", "=", "sys", ".", "exc_info", "(", ")", "res", "=", "Result", "(", "exc", ",", "mon", ",", "''", ".", "join", "(", "traceback", ".", "format_tb", "(", "tb", ")", ")", ",", "count", "=", "count", ")", "else", ":", "res", "=", "Result", "(", "val", ",", "mon", ",", "count", "=", "count", ")", "return", "res" ]
:returns: a new Result instance
[ ":", "returns", ":", "a", "new", "Result", "instance" ]
train
https://github.com/gem/oq-engine/blob/8294553a0b8aba33fd96437a35065d03547d0040/openquake/baselib/parallel.py#L352-L367
gem/oq-engine
openquake/baselib/parallel.py
IterResult.sum
def sum(cls, iresults): """ Sum the data transfer information of a set of results """ res = object.__new__(cls) res.received = [] res.sent = 0 for iresult in iresults: res.received.extend(iresult.received) res.sent += iresult.sent name = iresult.name.split('#', 1)[0] if hasattr(res, 'name'): assert res.name.split('#', 1)[0] == name, (res.name, name) else: res.name = iresult.name.split('#')[0] return res
python
def sum(cls, iresults): res = object.__new__(cls) res.received = [] res.sent = 0 for iresult in iresults: res.received.extend(iresult.received) res.sent += iresult.sent name = iresult.name.split(' if hasattr(res, 'name'): assert res.name.split(' else: res.name = iresult.name.split(' return res
[ "def", "sum", "(", "cls", ",", "iresults", ")", ":", "res", "=", "object", ".", "__new__", "(", "cls", ")", "res", ".", "received", "=", "[", "]", "res", ".", "sent", "=", "0", "for", "iresult", "in", "iresults", ":", "res", ".", "received", ".", "extend", "(", "iresult", ".", "received", ")", "res", ".", "sent", "+=", "iresult", ".", "sent", "name", "=", "iresult", ".", "name", ".", "split", "(", "'#'", ",", "1", ")", "[", "0", "]", "if", "hasattr", "(", "res", ",", "'name'", ")", ":", "assert", "res", ".", "name", ".", "split", "(", "'#'", ",", "1", ")", "[", "0", "]", "==", "name", ",", "(", "res", ".", "name", ",", "name", ")", "else", ":", "res", ".", "name", "=", "iresult", ".", "name", ".", "split", "(", "'#'", ")", "[", "0", "]", "return", "res" ]
Sum the data transfer information of a set of results
[ "Sum", "the", "data", "transfer", "information", "of", "a", "set", "of", "results" ]
train
https://github.com/gem/oq-engine/blob/8294553a0b8aba33fd96437a35065d03547d0040/openquake/baselib/parallel.py#L532-L547
gem/oq-engine
openquake/baselib/parallel.py
Starmap.apply
def apply(cls, task, args, concurrent_tasks=cpu_count * 3, maxweight=None, weight=lambda item: 1, key=lambda item: 'Unspecified', distribute=None, progress=logging.info): r""" Apply a task to a tuple of the form (sequence, \*other_args) by first splitting the sequence in chunks, according to the weight of the elements and possibly to a key (see :func: `openquake.baselib.general.split_in_blocks`). :param task: a task to run in parallel :param args: the arguments to be passed to the task function :param concurrent_tasks: hint about how many tasks to generate :param maxweight: if not None, used to split the tasks :param weight: function to extract the weight of an item in arg0 :param key: function to extract the kind of an item in arg0 :param distribute: if not given, inferred from OQ_DISTRIBUTE :param progress: logging function to use (default logging.info) :returns: an :class:`IterResult` object """ arg0 = args[0] # this is assumed to be a sequence mon = args[-1] args = args[1:-1] if maxweight: # block_splitter is lazy task_args = ((blk,) + args for blk in block_splitter( arg0, maxweight, weight, key)) else: # split_in_blocks is eager task_args = [(blk,) + args for blk in split_in_blocks( arg0, concurrent_tasks or 1, weight, key)] return cls(task, task_args, mon, distribute, progress).submit_all()
python
def apply(cls, task, args, concurrent_tasks=cpu_count * 3, maxweight=None, weight=lambda item: 1, key=lambda item: 'Unspecified', distribute=None, progress=logging.info): r arg0 = args[0] mon = args[-1] args = args[1:-1] if maxweight: task_args = ((blk,) + args for blk in block_splitter( arg0, maxweight, weight, key)) else: task_args = [(blk,) + args for blk in split_in_blocks( arg0, concurrent_tasks or 1, weight, key)] return cls(task, task_args, mon, distribute, progress).submit_all()
[ "def", "apply", "(", "cls", ",", "task", ",", "args", ",", "concurrent_tasks", "=", "cpu_count", "*", "3", ",", "maxweight", "=", "None", ",", "weight", "=", "lambda", "item", ":", "1", ",", "key", "=", "lambda", "item", ":", "'Unspecified'", ",", "distribute", "=", "None", ",", "progress", "=", "logging", ".", "info", ")", ":", "arg0", "=", "args", "[", "0", "]", "# this is assumed to be a sequence", "mon", "=", "args", "[", "-", "1", "]", "args", "=", "args", "[", "1", ":", "-", "1", "]", "if", "maxweight", ":", "# block_splitter is lazy", "task_args", "=", "(", "(", "blk", ",", ")", "+", "args", "for", "blk", "in", "block_splitter", "(", "arg0", ",", "maxweight", ",", "weight", ",", "key", ")", ")", "else", ":", "# split_in_blocks is eager", "task_args", "=", "[", "(", "blk", ",", ")", "+", "args", "for", "blk", "in", "split_in_blocks", "(", "arg0", ",", "concurrent_tasks", "or", "1", ",", "weight", ",", "key", ")", "]", "return", "cls", "(", "task", ",", "task_args", ",", "mon", ",", "distribute", ",", "progress", ")", ".", "submit_all", "(", ")" ]
r""" Apply a task to a tuple of the form (sequence, \*other_args) by first splitting the sequence in chunks, according to the weight of the elements and possibly to a key (see :func: `openquake.baselib.general.split_in_blocks`). :param task: a task to run in parallel :param args: the arguments to be passed to the task function :param concurrent_tasks: hint about how many tasks to generate :param maxweight: if not None, used to split the tasks :param weight: function to extract the weight of an item in arg0 :param key: function to extract the kind of an item in arg0 :param distribute: if not given, inferred from OQ_DISTRIBUTE :param progress: logging function to use (default logging.info) :returns: an :class:`IterResult` object
[ "r", "Apply", "a", "task", "to", "a", "tuple", "of", "the", "form", "(", "sequence", "\\", "*", "other_args", ")", "by", "first", "splitting", "the", "sequence", "in", "chunks", "according", "to", "the", "weight", "of", "the", "elements", "and", "possibly", "to", "a", "key", "(", "see", ":", "func", ":", "openquake", ".", "baselib", ".", "general", ".", "split_in_blocks", ")", "." ]
train
https://github.com/gem/oq-engine/blob/8294553a0b8aba33fd96437a35065d03547d0040/openquake/baselib/parallel.py#L620-L649
gem/oq-engine
openquake/baselib/parallel.py
Starmap.log_percent
def log_percent(self): """ Log the progress of the computation in percentage """ done = self.total - self.todo percent = int(float(done) / self.total * 100) if not hasattr(self, 'prev_percent'): # first time self.prev_percent = 0 self.progress('Sent %s of data in %d %s task(s)', humansize(self.sent.sum()), self.total, self.name) elif percent > self.prev_percent: self.progress('%s %3d%% [of %d tasks]', self.name, percent, len(self.tasks)) self.prev_percent = percent return done
python
def log_percent(self): done = self.total - self.todo percent = int(float(done) / self.total * 100) if not hasattr(self, 'prev_percent'): self.prev_percent = 0 self.progress('Sent %s of data in %d %s task(s)', humansize(self.sent.sum()), self.total, self.name) elif percent > self.prev_percent: self.progress('%s %3d%% [of %d tasks]', self.name, percent, len(self.tasks)) self.prev_percent = percent return done
[ "def", "log_percent", "(", "self", ")", ":", "done", "=", "self", ".", "total", "-", "self", ".", "todo", "percent", "=", "int", "(", "float", "(", "done", ")", "/", "self", ".", "total", "*", "100", ")", "if", "not", "hasattr", "(", "self", ",", "'prev_percent'", ")", ":", "# first time", "self", ".", "prev_percent", "=", "0", "self", ".", "progress", "(", "'Sent %s of data in %d %s task(s)'", ",", "humansize", "(", "self", ".", "sent", ".", "sum", "(", ")", ")", ",", "self", ".", "total", ",", "self", ".", "name", ")", "elif", "percent", ">", "self", ".", "prev_percent", ":", "self", ".", "progress", "(", "'%s %3d%% [of %d tasks]'", ",", "self", ".", "name", ",", "percent", ",", "len", "(", "self", ".", "tasks", ")", ")", "self", ".", "prev_percent", "=", "percent", "return", "done" ]
Log the progress of the computation in percentage
[ "Log", "the", "progress", "of", "the", "computation", "in", "percentage" ]
train
https://github.com/gem/oq-engine/blob/8294553a0b8aba33fd96437a35065d03547d0040/openquake/baselib/parallel.py#L691-L705
gem/oq-engine
openquake/baselib/parallel.py
Starmap.submit
def submit(self, *args, func=None, monitor=None): """ Submit the given arguments to the underlying task """ monitor = monitor or self.monitor func = func or self.task_func if not hasattr(self, 'socket'): # first time self.__class__.running_tasks = self.tasks self.socket = Socket(self.receiver, zmq.PULL, 'bind').__enter__() monitor.backurl = 'tcp://%s:%s' % ( config.dbserver.host, self.socket.port) assert not isinstance(args[-1], Monitor) # sanity check dist = 'no' if self.num_tasks == 1 else self.distribute if dist != 'no': args = pickle_sequence(args) self.sent += numpy.array([len(p) for p in args]) res = submit[dist](self, func, args, monitor) self.tasks.append(res)
python
def submit(self, *args, func=None, monitor=None): monitor = monitor or self.monitor func = func or self.task_func if not hasattr(self, 'socket'): self.__class__.running_tasks = self.tasks self.socket = Socket(self.receiver, zmq.PULL, 'bind').__enter__() monitor.backurl = 'tcp://%s:%s' % ( config.dbserver.host, self.socket.port) assert not isinstance(args[-1], Monitor) dist = 'no' if self.num_tasks == 1 else self.distribute if dist != 'no': args = pickle_sequence(args) self.sent += numpy.array([len(p) for p in args]) res = submit[dist](self, func, args, monitor) self.tasks.append(res)
[ "def", "submit", "(", "self", ",", "*", "args", ",", "func", "=", "None", ",", "monitor", "=", "None", ")", ":", "monitor", "=", "monitor", "or", "self", ".", "monitor", "func", "=", "func", "or", "self", ".", "task_func", "if", "not", "hasattr", "(", "self", ",", "'socket'", ")", ":", "# first time", "self", ".", "__class__", ".", "running_tasks", "=", "self", ".", "tasks", "self", ".", "socket", "=", "Socket", "(", "self", ".", "receiver", ",", "zmq", ".", "PULL", ",", "'bind'", ")", ".", "__enter__", "(", ")", "monitor", ".", "backurl", "=", "'tcp://%s:%s'", "%", "(", "config", ".", "dbserver", ".", "host", ",", "self", ".", "socket", ".", "port", ")", "assert", "not", "isinstance", "(", "args", "[", "-", "1", "]", ",", "Monitor", ")", "# sanity check", "dist", "=", "'no'", "if", "self", ".", "num_tasks", "==", "1", "else", "self", ".", "distribute", "if", "dist", "!=", "'no'", ":", "args", "=", "pickle_sequence", "(", "args", ")", "self", ".", "sent", "+=", "numpy", ".", "array", "(", "[", "len", "(", "p", ")", "for", "p", "in", "args", "]", ")", "res", "=", "submit", "[", "dist", "]", "(", "self", ",", "func", ",", "args", ",", "monitor", ")", "self", ".", "tasks", ".", "append", "(", "res", ")" ]
Submit the given arguments to the underlying task
[ "Submit", "the", "given", "arguments", "to", "the", "underlying", "task" ]
train
https://github.com/gem/oq-engine/blob/8294553a0b8aba33fd96437a35065d03547d0040/openquake/baselib/parallel.py#L707-L724
gem/oq-engine
openquake/baselib/parallel.py
Starmap.submit_all
def submit_all(self): """ :returns: an IterResult object """ for args in self.task_args: self.submit(*args) return self.get_results()
python
def submit_all(self): for args in self.task_args: self.submit(*args) return self.get_results()
[ "def", "submit_all", "(", "self", ")", ":", "for", "args", "in", "self", ".", "task_args", ":", "self", ".", "submit", "(", "*", "args", ")", "return", "self", ".", "get_results", "(", ")" ]
:returns: an IterResult object
[ ":", "returns", ":", "an", "IterResult", "object" ]
train
https://github.com/gem/oq-engine/blob/8294553a0b8aba33fd96437a35065d03547d0040/openquake/baselib/parallel.py#L733-L739
gem/oq-engine
openquake/baselib/parallel.py
Starmap.get_results
def get_results(self): """ :returns: an :class:`IterResult` instance """ return IterResult(self._loop(), self.name, self.argnames, self.sent, self.monitor.hdf5)
python
def get_results(self): return IterResult(self._loop(), self.name, self.argnames, self.sent, self.monitor.hdf5)
[ "def", "get_results", "(", "self", ")", ":", "return", "IterResult", "(", "self", ".", "_loop", "(", ")", ",", "self", ".", "name", ",", "self", ".", "argnames", ",", "self", ".", "sent", ",", "self", ".", "monitor", ".", "hdf5", ")" ]
:returns: an :class:`IterResult` instance
[ ":", "returns", ":", "an", ":", "class", ":", "IterResult", "instance" ]
train
https://github.com/gem/oq-engine/blob/8294553a0b8aba33fd96437a35065d03547d0040/openquake/baselib/parallel.py#L741-L746
gem/oq-engine
openquake/baselib/parallel.py
Starmap.reduce
def reduce(self, agg=operator.add, acc=None): """ Submit all tasks and reduce the results """ return self.submit_all().reduce(agg, acc)
python
def reduce(self, agg=operator.add, acc=None): return self.submit_all().reduce(agg, acc)
[ "def", "reduce", "(", "self", ",", "agg", "=", "operator", ".", "add", ",", "acc", "=", "None", ")", ":", "return", "self", ".", "submit_all", "(", ")", ".", "reduce", "(", "agg", ",", "acc", ")" ]
Submit all tasks and reduce the results
[ "Submit", "all", "tasks", "and", "reduce", "the", "results" ]
train
https://github.com/gem/oq-engine/blob/8294553a0b8aba33fd96437a35065d03547d0040/openquake/baselib/parallel.py#L748-L752
gem/oq-engine
openquake/hazardlib/gsim/can15/utils.py
_get_equivalent_distances_east
def _get_equivalent_distances_east(wid, lng, mag, repi, focal_depth=10., ab06=False): """ Computes equivalent values of Joyner-Boore and closest distance to the rupture given epoicentral distance. The procedure is described in Atkinson (2012) - Appendix A (page 32). :param float wid: Width of rectangular rupture :param float lng: Length of rectangular rupture :param float mag: Magnitude :param repi: A :class:`numpy.ndarray` instance containing repi values :param float focal_depth: Focal depth :param boolean ab06: When true a minimum ztor value is set to force near-source saturation """ dtop = focal_depth - 0.5*wid # this computes a minimum ztor value - used for AB2006 if ab06: ztor_ab06 = 21-2.5*mag dtop = np.max([ztor_ab06, dtop]) ztor = max(0, dtop) # find the average distance to the fault projection dsurf = np.max([repi-0.3*lng, 0.1*np.ones_like(repi)], axis=0) # rrup rrup = (dsurf**2+ztor**2)**0.5 # return rjb and rrup return dsurf, rrup
python
def _get_equivalent_distances_east(wid, lng, mag, repi, focal_depth=10., ab06=False): dtop = focal_depth - 0.5*wid if ab06: ztor_ab06 = 21-2.5*mag dtop = np.max([ztor_ab06, dtop]) ztor = max(0, dtop) dsurf = np.max([repi-0.3*lng, 0.1*np.ones_like(repi)], axis=0) rrup = (dsurf**2+ztor**2)**0.5 return dsurf, rrup
[ "def", "_get_equivalent_distances_east", "(", "wid", ",", "lng", ",", "mag", ",", "repi", ",", "focal_depth", "=", "10.", ",", "ab06", "=", "False", ")", ":", "dtop", "=", "focal_depth", "-", "0.5", "*", "wid", "# this computes a minimum ztor value - used for AB2006", "if", "ab06", ":", "ztor_ab06", "=", "21", "-", "2.5", "*", "mag", "dtop", "=", "np", ".", "max", "(", "[", "ztor_ab06", ",", "dtop", "]", ")", "ztor", "=", "max", "(", "0", ",", "dtop", ")", "# find the average distance to the fault projection", "dsurf", "=", "np", ".", "max", "(", "[", "repi", "-", "0.3", "*", "lng", ",", "0.1", "*", "np", ".", "ones_like", "(", "repi", ")", "]", ",", "axis", "=", "0", ")", "# rrup", "rrup", "=", "(", "dsurf", "**", "2", "+", "ztor", "**", "2", ")", "**", "0.5", "# return rjb and rrup", "return", "dsurf", ",", "rrup" ]
Computes equivalent values of Joyner-Boore and closest distance to the rupture given epoicentral distance. The procedure is described in Atkinson (2012) - Appendix A (page 32). :param float wid: Width of rectangular rupture :param float lng: Length of rectangular rupture :param float mag: Magnitude :param repi: A :class:`numpy.ndarray` instance containing repi values :param float focal_depth: Focal depth :param boolean ab06: When true a minimum ztor value is set to force near-source saturation
[ "Computes", "equivalent", "values", "of", "Joyner", "-", "Boore", "and", "closest", "distance", "to", "the", "rupture", "given", "epoicentral", "distance", ".", "The", "procedure", "is", "described", "in", "Atkinson", "(", "2012", ")", "-", "Appendix", "A", "(", "page", "32", ")", "." ]
train
https://github.com/gem/oq-engine/blob/8294553a0b8aba33fd96437a35065d03547d0040/openquake/hazardlib/gsim/can15/utils.py#L30-L61
gem/oq-engine
openquake/hazardlib/gsim/can15/utils.py
get_equivalent_distance_inslab
def get_equivalent_distance_inslab(mag, repi, hslab): """ :param float mag: Magnitude :param repi: A :class:`numpy.ndarray` instance containing repi values :param float hslab: Depth of the slab """ area = 10**(-3.225+0.89*mag) radius = (area / scipy.constants.pi)**0.5 rjb = np.max([repi-radius, np.zeros_like(repi)], axis=0) rrup = (rjb**2+hslab**2)**0.5 return rjb, rrup
python
def get_equivalent_distance_inslab(mag, repi, hslab): area = 10**(-3.225+0.89*mag) radius = (area / scipy.constants.pi)**0.5 rjb = np.max([repi-radius, np.zeros_like(repi)], axis=0) rrup = (rjb**2+hslab**2)**0.5 return rjb, rrup
[ "def", "get_equivalent_distance_inslab", "(", "mag", ",", "repi", ",", "hslab", ")", ":", "area", "=", "10", "**", "(", "-", "3.225", "+", "0.89", "*", "mag", ")", "radius", "=", "(", "area", "/", "scipy", ".", "constants", ".", "pi", ")", "**", "0.5", "rjb", "=", "np", ".", "max", "(", "[", "repi", "-", "radius", ",", "np", ".", "zeros_like", "(", "repi", ")", "]", ",", "axis", "=", "0", ")", "rrup", "=", "(", "rjb", "**", "2", "+", "hslab", "**", "2", ")", "**", "0.5", "return", "rjb", ",", "rrup" ]
:param float mag: Magnitude :param repi: A :class:`numpy.ndarray` instance containing repi values :param float hslab: Depth of the slab
[ ":", "param", "float", "mag", ":", "Magnitude", ":", "param", "repi", ":", "A", ":", "class", ":", "numpy", ".", "ndarray", "instance", "containing", "repi", "values", ":", "param", "float", "hslab", ":", "Depth", "of", "the", "slab" ]
train
https://github.com/gem/oq-engine/blob/8294553a0b8aba33fd96437a35065d03547d0040/openquake/hazardlib/gsim/can15/utils.py#L82-L95
gem/oq-engine
openquake/hazardlib/probability_map.py
get_shape
def get_shape(pmaps): """ :param pmaps: a set of homogenous ProbabilityMaps :returns: the common shape (N, L, I) """ for pmap in pmaps: if pmap: sid = next(iter(pmap)) break else: raise AllEmptyProbabilityMaps(pmaps) return (len(pmap),) + pmap[sid].array.shape
python
def get_shape(pmaps): for pmap in pmaps: if pmap: sid = next(iter(pmap)) break else: raise AllEmptyProbabilityMaps(pmaps) return (len(pmap),) + pmap[sid].array.shape
[ "def", "get_shape", "(", "pmaps", ")", ":", "for", "pmap", "in", "pmaps", ":", "if", "pmap", ":", "sid", "=", "next", "(", "iter", "(", "pmap", ")", ")", "break", "else", ":", "raise", "AllEmptyProbabilityMaps", "(", "pmaps", ")", "return", "(", "len", "(", "pmap", ")", ",", ")", "+", "pmap", "[", "sid", "]", ".", "array", ".", "shape" ]
:param pmaps: a set of homogenous ProbabilityMaps :returns: the common shape (N, L, I)
[ ":", "param", "pmaps", ":", "a", "set", "of", "homogenous", "ProbabilityMaps", ":", "returns", ":", "the", "common", "shape", "(", "N", "L", "I", ")" ]
train
https://github.com/gem/oq-engine/blob/8294553a0b8aba33fd96437a35065d03547d0040/openquake/hazardlib/probability_map.py#L356-L367
gem/oq-engine
openquake/hazardlib/probability_map.py
combine
def combine(pmaps): """ :param pmaps: a set of homogenous ProbabilityMaps :returns: the combined map """ shape = get_shape(pmaps) res = ProbabilityMap(shape[1], shape[2]) for pmap in pmaps: res |= pmap return res
python
def combine(pmaps): shape = get_shape(pmaps) res = ProbabilityMap(shape[1], shape[2]) for pmap in pmaps: res |= pmap return res
[ "def", "combine", "(", "pmaps", ")", ":", "shape", "=", "get_shape", "(", "pmaps", ")", "res", "=", "ProbabilityMap", "(", "shape", "[", "1", "]", ",", "shape", "[", "2", "]", ")", "for", "pmap", "in", "pmaps", ":", "res", "|=", "pmap", "return", "res" ]
:param pmaps: a set of homogenous ProbabilityMaps :returns: the combined map
[ ":", "param", "pmaps", ":", "a", "set", "of", "homogenous", "ProbabilityMaps", ":", "returns", ":", "the", "combined", "map" ]
train
https://github.com/gem/oq-engine/blob/8294553a0b8aba33fd96437a35065d03547d0040/openquake/hazardlib/probability_map.py#L370-L379
gem/oq-engine
openquake/hazardlib/probability_map.py
ProbabilityCurve.convert
def convert(self, imtls, idx=0): """ Convert a probability curve into a record of dtype `imtls.dt`. :param imtls: DictArray instance :param idx: extract the data corresponding to the given inner index """ curve = numpy.zeros(1, imtls.dt) for imt in imtls: curve[imt] = self.array[imtls(imt), idx] return curve[0]
python
def convert(self, imtls, idx=0): curve = numpy.zeros(1, imtls.dt) for imt in imtls: curve[imt] = self.array[imtls(imt), idx] return curve[0]
[ "def", "convert", "(", "self", ",", "imtls", ",", "idx", "=", "0", ")", ":", "curve", "=", "numpy", ".", "zeros", "(", "1", ",", "imtls", ".", "dt", ")", "for", "imt", "in", "imtls", ":", "curve", "[", "imt", "]", "=", "self", ".", "array", "[", "imtls", "(", "imt", ")", ",", "idx", "]", "return", "curve", "[", "0", "]" ]
Convert a probability curve into a record of dtype `imtls.dt`. :param imtls: DictArray instance :param idx: extract the data corresponding to the given inner index
[ "Convert", "a", "probability", "curve", "into", "a", "record", "of", "dtype", "imtls", ".", "dt", "." ]
train
https://github.com/gem/oq-engine/blob/8294553a0b8aba33fd96437a35065d03547d0040/openquake/hazardlib/probability_map.py#L96-L106
gem/oq-engine
openquake/hazardlib/probability_map.py
ProbabilityMap.build
def build(cls, shape_y, shape_z, sids, initvalue=0., dtype=F64): """ :param shape_y: the total number of intensity measure levels :param shape_z: the number of inner levels :param sids: a set of site indices :param initvalue: the initial value of the probability (default 0) :returns: a ProbabilityMap dictionary """ dic = cls(shape_y, shape_z) for sid in sids: dic.setdefault(sid, initvalue, dtype) return dic
python
def build(cls, shape_y, shape_z, sids, initvalue=0., dtype=F64): dic = cls(shape_y, shape_z) for sid in sids: dic.setdefault(sid, initvalue, dtype) return dic
[ "def", "build", "(", "cls", ",", "shape_y", ",", "shape_z", ",", "sids", ",", "initvalue", "=", "0.", ",", "dtype", "=", "F64", ")", ":", "dic", "=", "cls", "(", "shape_y", ",", "shape_z", ")", "for", "sid", "in", "sids", ":", "dic", ".", "setdefault", "(", "sid", ",", "initvalue", ",", "dtype", ")", "return", "dic" ]
:param shape_y: the total number of intensity measure levels :param shape_z: the number of inner levels :param sids: a set of site indices :param initvalue: the initial value of the probability (default 0) :returns: a ProbabilityMap dictionary
[ ":", "param", "shape_y", ":", "the", "total", "number", "of", "intensity", "measure", "levels", ":", "param", "shape_z", ":", "the", "number", "of", "inner", "levels", ":", "param", "sids", ":", "a", "set", "of", "site", "indices", ":", "param", "initvalue", ":", "the", "initial", "value", "of", "the", "probability", "(", "default", "0", ")", ":", "returns", ":", "a", "ProbabilityMap", "dictionary" ]
train
https://github.com/gem/oq-engine/blob/8294553a0b8aba33fd96437a35065d03547d0040/openquake/hazardlib/probability_map.py#L128-L139
gem/oq-engine
openquake/hazardlib/probability_map.py
ProbabilityMap.from_array
def from_array(cls, array, sids): """ :param array: array of shape (N, L) or (N, L, I) :param sids: array of N site IDs """ n_sites = len(sids) n = len(array) if n_sites != n: raise ValueError('Passed %d site IDs, but the array has length %d' % (n_sites, n)) if len(array.shape) == 2: # shape (N, L) -> (N, L, 1) array = array.reshape(array.shape + (1,)) self = cls(*array.shape[1:]) for sid, poes in zip(sids, array): self[sid] = ProbabilityCurve(poes) return self
python
def from_array(cls, array, sids): n_sites = len(sids) n = len(array) if n_sites != n: raise ValueError('Passed %d site IDs, but the array has length %d' % (n_sites, n)) if len(array.shape) == 2: array = array.reshape(array.shape + (1,)) self = cls(*array.shape[1:]) for sid, poes in zip(sids, array): self[sid] = ProbabilityCurve(poes) return self
[ "def", "from_array", "(", "cls", ",", "array", ",", "sids", ")", ":", "n_sites", "=", "len", "(", "sids", ")", "n", "=", "len", "(", "array", ")", "if", "n_sites", "!=", "n", ":", "raise", "ValueError", "(", "'Passed %d site IDs, but the array has length %d'", "%", "(", "n_sites", ",", "n", ")", ")", "if", "len", "(", "array", ".", "shape", ")", "==", "2", ":", "# shape (N, L) -> (N, L, 1)", "array", "=", "array", ".", "reshape", "(", "array", ".", "shape", "+", "(", "1", ",", ")", ")", "self", "=", "cls", "(", "*", "array", ".", "shape", "[", "1", ":", "]", ")", "for", "sid", ",", "poes", "in", "zip", "(", "sids", ",", "array", ")", ":", "self", "[", "sid", "]", "=", "ProbabilityCurve", "(", "poes", ")", "return", "self" ]
:param array: array of shape (N, L) or (N, L, I) :param sids: array of N site IDs
[ ":", "param", "array", ":", "array", "of", "shape", "(", "N", "L", ")", "or", "(", "N", "L", "I", ")", ":", "param", "sids", ":", "array", "of", "N", "site", "IDs" ]
train
https://github.com/gem/oq-engine/blob/8294553a0b8aba33fd96437a35065d03547d0040/openquake/hazardlib/probability_map.py#L142-L157
gem/oq-engine
openquake/hazardlib/probability_map.py
ProbabilityMap.setdefault
def setdefault(self, sid, value, dtype=F64): """ Works like `dict.setdefault`: if the `sid` key is missing, it fills it with an array and returns the associate ProbabilityCurve :param sid: site ID :param value: value used to fill the returned ProbabilityCurve :param dtype: dtype used internally (F32 or F64) """ try: return self[sid] except KeyError: array = numpy.empty((self.shape_y, self.shape_z), dtype) array.fill(value) pc = ProbabilityCurve(array) self[sid] = pc return pc
python
def setdefault(self, sid, value, dtype=F64): try: return self[sid] except KeyError: array = numpy.empty((self.shape_y, self.shape_z), dtype) array.fill(value) pc = ProbabilityCurve(array) self[sid] = pc return pc
[ "def", "setdefault", "(", "self", ",", "sid", ",", "value", ",", "dtype", "=", "F64", ")", ":", "try", ":", "return", "self", "[", "sid", "]", "except", "KeyError", ":", "array", "=", "numpy", ".", "empty", "(", "(", "self", ".", "shape_y", ",", "self", ".", "shape_z", ")", ",", "dtype", ")", "array", ".", "fill", "(", "value", ")", "pc", "=", "ProbabilityCurve", "(", "array", ")", "self", "[", "sid", "]", "=", "pc", "return", "pc" ]
Works like `dict.setdefault`: if the `sid` key is missing, it fills it with an array and returns the associate ProbabilityCurve :param sid: site ID :param value: value used to fill the returned ProbabilityCurve :param dtype: dtype used internally (F32 or F64)
[ "Works", "like", "dict", ".", "setdefault", ":", "if", "the", "sid", "key", "is", "missing", "it", "fills", "it", "with", "an", "array", "and", "returns", "the", "associate", "ProbabilityCurve" ]
train
https://github.com/gem/oq-engine/blob/8294553a0b8aba33fd96437a35065d03547d0040/openquake/hazardlib/probability_map.py#L163-L179
gem/oq-engine
openquake/hazardlib/probability_map.py
ProbabilityMap.array
def array(self): """ The underlying array of shape (N, L, I) """ return numpy.array([self[sid].array for sid in sorted(self)])
python
def array(self): return numpy.array([self[sid].array for sid in sorted(self)])
[ "def", "array", "(", "self", ")", ":", "return", "numpy", ".", "array", "(", "[", "self", "[", "sid", "]", ".", "array", "for", "sid", "in", "sorted", "(", "self", ")", "]", ")" ]
The underlying array of shape (N, L, I)
[ "The", "underlying", "array", "of", "shape", "(", "N", "L", "I", ")" ]
train
https://github.com/gem/oq-engine/blob/8294553a0b8aba33fd96437a35065d03547d0040/openquake/hazardlib/probability_map.py#L187-L191
gem/oq-engine
openquake/hazardlib/probability_map.py
ProbabilityMap.nbytes
def nbytes(self): """The size of the underlying array""" try: N, L, I = get_shape([self]) except AllEmptyProbabilityMaps: return 0 return BYTES_PER_FLOAT * N * L * I
python
def nbytes(self): try: N, L, I = get_shape([self]) except AllEmptyProbabilityMaps: return 0 return BYTES_PER_FLOAT * N * L * I
[ "def", "nbytes", "(", "self", ")", ":", "try", ":", "N", ",", "L", ",", "I", "=", "get_shape", "(", "[", "self", "]", ")", "except", "AllEmptyProbabilityMaps", ":", "return", "0", "return", "BYTES_PER_FLOAT", "*", "N", "*", "L", "*", "I" ]
The size of the underlying array
[ "The", "size", "of", "the", "underlying", "array" ]
train
https://github.com/gem/oq-engine/blob/8294553a0b8aba33fd96437a35065d03547d0040/openquake/hazardlib/probability_map.py#L194-L200
gem/oq-engine
openquake/hazardlib/probability_map.py
ProbabilityMap.convert
def convert(self, imtls, nsites, idx=0): """ Convert a probability map into a composite array of length `nsites` and dtype `imtls.dt`. :param imtls: DictArray instance :param nsites: the total number of sites :param idx: index on the z-axis (default 0) """ curves = numpy.zeros(nsites, imtls.dt) for imt in curves.dtype.names: curves_by_imt = curves[imt] for sid in self: curves_by_imt[sid] = self[sid].array[imtls(imt), idx] return curves
python
def convert(self, imtls, nsites, idx=0): curves = numpy.zeros(nsites, imtls.dt) for imt in curves.dtype.names: curves_by_imt = curves[imt] for sid in self: curves_by_imt[sid] = self[sid].array[imtls(imt), idx] return curves
[ "def", "convert", "(", "self", ",", "imtls", ",", "nsites", ",", "idx", "=", "0", ")", ":", "curves", "=", "numpy", ".", "zeros", "(", "nsites", ",", "imtls", ".", "dt", ")", "for", "imt", "in", "curves", ".", "dtype", ".", "names", ":", "curves_by_imt", "=", "curves", "[", "imt", "]", "for", "sid", "in", "self", ":", "curves_by_imt", "[", "sid", "]", "=", "self", "[", "sid", "]", ".", "array", "[", "imtls", "(", "imt", ")", ",", "idx", "]", "return", "curves" ]
Convert a probability map into a composite array of length `nsites` and dtype `imtls.dt`. :param imtls: DictArray instance :param nsites: the total number of sites :param idx: index on the z-axis (default 0)
[ "Convert", "a", "probability", "map", "into", "a", "composite", "array", "of", "length", "nsites", "and", "dtype", "imtls", ".", "dt", "." ]
train
https://github.com/gem/oq-engine/blob/8294553a0b8aba33fd96437a35065d03547d0040/openquake/hazardlib/probability_map.py#L203-L220
gem/oq-engine
openquake/hazardlib/probability_map.py
ProbabilityMap.convert2
def convert2(self, imtls, sids): """ Convert a probability map into a composite array of shape (N,) and dtype `imtls.dt`. :param imtls: DictArray instance :param sids: the IDs of the sites we are interested in :returns: an array of curves of shape (N,) """ assert self.shape_z == 1, self.shape_z curves = numpy.zeros(len(sids), imtls.dt) for imt in curves.dtype.names: curves_by_imt = curves[imt] for i, sid in numpy.ndenumerate(sids): try: pcurve = self[sid] except KeyError: pass # the poes will be zeros else: curves_by_imt[i] = pcurve.array[imtls(imt), 0] return curves
python
def convert2(self, imtls, sids): assert self.shape_z == 1, self.shape_z curves = numpy.zeros(len(sids), imtls.dt) for imt in curves.dtype.names: curves_by_imt = curves[imt] for i, sid in numpy.ndenumerate(sids): try: pcurve = self[sid] except KeyError: pass else: curves_by_imt[i] = pcurve.array[imtls(imt), 0] return curves
[ "def", "convert2", "(", "self", ",", "imtls", ",", "sids", ")", ":", "assert", "self", ".", "shape_z", "==", "1", ",", "self", ".", "shape_z", "curves", "=", "numpy", ".", "zeros", "(", "len", "(", "sids", ")", ",", "imtls", ".", "dt", ")", "for", "imt", "in", "curves", ".", "dtype", ".", "names", ":", "curves_by_imt", "=", "curves", "[", "imt", "]", "for", "i", ",", "sid", "in", "numpy", ".", "ndenumerate", "(", "sids", ")", ":", "try", ":", "pcurve", "=", "self", "[", "sid", "]", "except", "KeyError", ":", "pass", "# the poes will be zeros", "else", ":", "curves_by_imt", "[", "i", "]", "=", "pcurve", ".", "array", "[", "imtls", "(", "imt", ")", ",", "0", "]", "return", "curves" ]
Convert a probability map into a composite array of shape (N,) and dtype `imtls.dt`. :param imtls: DictArray instance :param sids: the IDs of the sites we are interested in :returns: an array of curves of shape (N,)
[ "Convert", "a", "probability", "map", "into", "a", "composite", "array", "of", "shape", "(", "N", ")", "and", "dtype", "imtls", ".", "dt", "." ]
train
https://github.com/gem/oq-engine/blob/8294553a0b8aba33fd96437a35065d03547d0040/openquake/hazardlib/probability_map.py#L222-L245
gem/oq-engine
openquake/hazardlib/probability_map.py
ProbabilityMap.filter
def filter(self, sids): """ Extracs a submap of self for the given sids. """ dic = self.__class__(self.shape_y, self.shape_z) for sid in sids: try: dic[sid] = self[sid] except KeyError: pass return dic
python
def filter(self, sids): dic = self.__class__(self.shape_y, self.shape_z) for sid in sids: try: dic[sid] = self[sid] except KeyError: pass return dic
[ "def", "filter", "(", "self", ",", "sids", ")", ":", "dic", "=", "self", ".", "__class__", "(", "self", ".", "shape_y", ",", "self", ".", "shape_z", ")", "for", "sid", "in", "sids", ":", "try", ":", "dic", "[", "sid", "]", "=", "self", "[", "sid", "]", "except", "KeyError", ":", "pass", "return", "dic" ]
Extracs a submap of self for the given sids.
[ "Extracs", "a", "submap", "of", "self", "for", "the", "given", "sids", "." ]
train
https://github.com/gem/oq-engine/blob/8294553a0b8aba33fd96437a35065d03547d0040/openquake/hazardlib/probability_map.py#L247-L257
gem/oq-engine
openquake/hazardlib/probability_map.py
ProbabilityMap.extract
def extract(self, inner_idx): """ Extracts a component of the underlying ProbabilityCurves, specified by the index `inner_idx`. """ out = self.__class__(self.shape_y, 1) for sid in self: curve = self[sid] array = curve.array[:, inner_idx].reshape(-1, 1) out[sid] = ProbabilityCurve(array) return out
python
def extract(self, inner_idx): out = self.__class__(self.shape_y, 1) for sid in self: curve = self[sid] array = curve.array[:, inner_idx].reshape(-1, 1) out[sid] = ProbabilityCurve(array) return out
[ "def", "extract", "(", "self", ",", "inner_idx", ")", ":", "out", "=", "self", ".", "__class__", "(", "self", ".", "shape_y", ",", "1", ")", "for", "sid", "in", "self", ":", "curve", "=", "self", "[", "sid", "]", "array", "=", "curve", ".", "array", "[", ":", ",", "inner_idx", "]", ".", "reshape", "(", "-", "1", ",", "1", ")", "out", "[", "sid", "]", "=", "ProbabilityCurve", "(", "array", ")", "return", "out" ]
Extracts a component of the underlying ProbabilityCurves, specified by the index `inner_idx`.
[ "Extracts", "a", "component", "of", "the", "underlying", "ProbabilityCurves", "specified", "by", "the", "index", "inner_idx", "." ]
train
https://github.com/gem/oq-engine/blob/8294553a0b8aba33fd96437a35065d03547d0040/openquake/hazardlib/probability_map.py#L259-L269
gem/oq-engine
openquake/commands/compare.py
get_diff_idxs
def get_diff_idxs(array, rtol, atol): """ Given an array with (C, N, L) values, being the first the reference value, compute the relative differences and discard the one below the tolerance. :returns: indices where there are sensible differences. """ C, N, L = array.shape diff_idxs = set() # indices of the sites with differences for c in range(1, C): for n in range(N): if not numpy.allclose(array[c, n], array[0, n], rtol, atol): diff_idxs.add(n) return numpy.fromiter(diff_idxs, int)
python
def get_diff_idxs(array, rtol, atol): C, N, L = array.shape diff_idxs = set() for c in range(1, C): for n in range(N): if not numpy.allclose(array[c, n], array[0, n], rtol, atol): diff_idxs.add(n) return numpy.fromiter(diff_idxs, int)
[ "def", "get_diff_idxs", "(", "array", ",", "rtol", ",", "atol", ")", ":", "C", ",", "N", ",", "L", "=", "array", ".", "shape", "diff_idxs", "=", "set", "(", ")", "# indices of the sites with differences", "for", "c", "in", "range", "(", "1", ",", "C", ")", ":", "for", "n", "in", "range", "(", "N", ")", ":", "if", "not", "numpy", ".", "allclose", "(", "array", "[", "c", ",", "n", "]", ",", "array", "[", "0", ",", "n", "]", ",", "rtol", ",", "atol", ")", ":", "diff_idxs", ".", "add", "(", "n", ")", "return", "numpy", ".", "fromiter", "(", "diff_idxs", ",", "int", ")" ]
Given an array with (C, N, L) values, being the first the reference value, compute the relative differences and discard the one below the tolerance. :returns: indices where there are sensible differences.
[ "Given", "an", "array", "with", "(", "C", "N", "L", ")", "values", "being", "the", "first", "the", "reference", "value", "compute", "the", "relative", "differences", "and", "discard", "the", "one", "below", "the", "tolerance", ".", ":", "returns", ":", "indices", "where", "there", "are", "sensible", "differences", "." ]
train
https://github.com/gem/oq-engine/blob/8294553a0b8aba33fd96437a35065d03547d0040/openquake/commands/compare.py#L53-L65
gem/oq-engine
openquake/commands/compare.py
compare
def compare(what, imt, calc_ids, files, samplesites=100, rtol=.1, atol=1E-4): """ Compare the hazard curves or maps of two or more calculations """ sids, imtls, poes, arrays = getdata(what, calc_ids, samplesites) try: levels = imtls[imt] except KeyError: sys.exit( '%s not found. The available IMTs are %s' % (imt, list(imtls))) imt2idx = {imt: i for i, imt in enumerate(imtls)} head = ['site_id'] if files else ['site_id', 'calc_id'] if what == 'hcurves': array_imt = arrays[:, :, imtls(imt)] header = head + ['%.5f' % lvl for lvl in levels] else: # hmaps array_imt = arrays[:, :, imt2idx[imt]] header = head + [str(poe) for poe in poes] rows = collections.defaultdict(list) diff_idxs = get_diff_idxs(array_imt, rtol, atol) if len(diff_idxs) == 0: print('There are no differences within the tolerance of %d%%' % (rtol * 100)) return arr = array_imt.transpose(1, 0, 2) # shape (N, C, L) for sid, array in sorted(zip(sids[diff_idxs], arr[diff_idxs])): for calc_id, cols in zip(calc_ids, array): if files: rows[calc_id].append([sid] + list(cols)) else: rows['all'].append([sid, calc_id] + list(cols)) if files: fdict = {calc_id: open('%s.txt' % calc_id, 'w') for calc_id in calc_ids} for calc_id, f in fdict.items(): f.write(views.rst_table(rows[calc_id], header)) print('Generated %s' % f.name) else: print(views.rst_table(rows['all'], header))
python
def compare(what, imt, calc_ids, files, samplesites=100, rtol=.1, atol=1E-4): sids, imtls, poes, arrays = getdata(what, calc_ids, samplesites) try: levels = imtls[imt] except KeyError: sys.exit( '%s not found. The available IMTs are %s' % (imt, list(imtls))) imt2idx = {imt: i for i, imt in enumerate(imtls)} head = ['site_id'] if files else ['site_id', 'calc_id'] if what == 'hcurves': array_imt = arrays[:, :, imtls(imt)] header = head + ['%.5f' % lvl for lvl in levels] else: array_imt = arrays[:, :, imt2idx[imt]] header = head + [str(poe) for poe in poes] rows = collections.defaultdict(list) diff_idxs = get_diff_idxs(array_imt, rtol, atol) if len(diff_idxs) == 0: print('There are no differences within the tolerance of %d%%' % (rtol * 100)) return arr = array_imt.transpose(1, 0, 2) for sid, array in sorted(zip(sids[diff_idxs], arr[diff_idxs])): for calc_id, cols in zip(calc_ids, array): if files: rows[calc_id].append([sid] + list(cols)) else: rows['all'].append([sid, calc_id] + list(cols)) if files: fdict = {calc_id: open('%s.txt' % calc_id, 'w') for calc_id in calc_ids} for calc_id, f in fdict.items(): f.write(views.rst_table(rows[calc_id], header)) print('Generated %s' % f.name) else: print(views.rst_table(rows['all'], header))
[ "def", "compare", "(", "what", ",", "imt", ",", "calc_ids", ",", "files", ",", "samplesites", "=", "100", ",", "rtol", "=", ".1", ",", "atol", "=", "1E-4", ")", ":", "sids", ",", "imtls", ",", "poes", ",", "arrays", "=", "getdata", "(", "what", ",", "calc_ids", ",", "samplesites", ")", "try", ":", "levels", "=", "imtls", "[", "imt", "]", "except", "KeyError", ":", "sys", ".", "exit", "(", "'%s not found. The available IMTs are %s'", "%", "(", "imt", ",", "list", "(", "imtls", ")", ")", ")", "imt2idx", "=", "{", "imt", ":", "i", "for", "i", ",", "imt", "in", "enumerate", "(", "imtls", ")", "}", "head", "=", "[", "'site_id'", "]", "if", "files", "else", "[", "'site_id'", ",", "'calc_id'", "]", "if", "what", "==", "'hcurves'", ":", "array_imt", "=", "arrays", "[", ":", ",", ":", ",", "imtls", "(", "imt", ")", "]", "header", "=", "head", "+", "[", "'%.5f'", "%", "lvl", "for", "lvl", "in", "levels", "]", "else", ":", "# hmaps", "array_imt", "=", "arrays", "[", ":", ",", ":", ",", "imt2idx", "[", "imt", "]", "]", "header", "=", "head", "+", "[", "str", "(", "poe", ")", "for", "poe", "in", "poes", "]", "rows", "=", "collections", ".", "defaultdict", "(", "list", ")", "diff_idxs", "=", "get_diff_idxs", "(", "array_imt", ",", "rtol", ",", "atol", ")", "if", "len", "(", "diff_idxs", ")", "==", "0", ":", "print", "(", "'There are no differences within the tolerance of %d%%'", "%", "(", "rtol", "*", "100", ")", ")", "return", "arr", "=", "array_imt", ".", "transpose", "(", "1", ",", "0", ",", "2", ")", "# shape (N, C, L)", "for", "sid", ",", "array", "in", "sorted", "(", "zip", "(", "sids", "[", "diff_idxs", "]", ",", "arr", "[", "diff_idxs", "]", ")", ")", ":", "for", "calc_id", ",", "cols", "in", "zip", "(", "calc_ids", ",", "array", ")", ":", "if", "files", ":", "rows", "[", "calc_id", "]", ".", "append", "(", "[", "sid", "]", "+", "list", "(", "cols", ")", ")", "else", ":", "rows", "[", "'all'", "]", ".", "append", "(", "[", "sid", ",", "calc_id", "]", "+", "list", "(", "cols", ")", ")", "if", "files", ":", "fdict", "=", "{", "calc_id", ":", "open", "(", "'%s.txt'", "%", "calc_id", ",", "'w'", ")", "for", "calc_id", "in", "calc_ids", "}", "for", "calc_id", ",", "f", "in", "fdict", ".", "items", "(", ")", ":", "f", ".", "write", "(", "views", ".", "rst_table", "(", "rows", "[", "calc_id", "]", ",", "header", ")", ")", "print", "(", "'Generated %s'", "%", "f", ".", "name", ")", "else", ":", "print", "(", "views", ".", "rst_table", "(", "rows", "[", "'all'", "]", ",", "header", ")", ")" ]
Compare the hazard curves or maps of two or more calculations
[ "Compare", "the", "hazard", "curves", "or", "maps", "of", "two", "or", "more", "calculations" ]
train
https://github.com/gem/oq-engine/blob/8294553a0b8aba33fd96437a35065d03547d0040/openquake/commands/compare.py#L69-L107
gem/oq-engine
openquake/hmtk/plotting/seismicity/catalogue_plots.py
build_filename
def build_filename(filename, filetype='png', resolution=300): """ Uses the input properties to create the string of the filename :param str filename: Name of the file :param str filetype: Type of file :param int resolution: DPI resolution of the output figure """ filevals = os.path.splitext(filename) if filevals[1]: filetype = filevals[1][1:] if not filetype: filetype = 'png' filename = filevals[0] + '.' + filetype if not resolution: resolution = 300 return filename, filetype, resolution
python
def build_filename(filename, filetype='png', resolution=300): filevals = os.path.splitext(filename) if filevals[1]: filetype = filevals[1][1:] if not filetype: filetype = 'png' filename = filevals[0] + '.' + filetype if not resolution: resolution = 300 return filename, filetype, resolution
[ "def", "build_filename", "(", "filename", ",", "filetype", "=", "'png'", ",", "resolution", "=", "300", ")", ":", "filevals", "=", "os", ".", "path", ".", "splitext", "(", "filename", ")", "if", "filevals", "[", "1", "]", ":", "filetype", "=", "filevals", "[", "1", "]", "[", "1", ":", "]", "if", "not", "filetype", ":", "filetype", "=", "'png'", "filename", "=", "filevals", "[", "0", "]", "+", "'.'", "+", "filetype", "if", "not", "resolution", ":", "resolution", "=", "300", "return", "filename", ",", "filetype", ",", "resolution" ]
Uses the input properties to create the string of the filename :param str filename: Name of the file :param str filetype: Type of file :param int resolution: DPI resolution of the output figure
[ "Uses", "the", "input", "properties", "to", "create", "the", "string", "of", "the", "filename" ]
train
https://github.com/gem/oq-engine/blob/8294553a0b8aba33fd96437a35065d03547d0040/openquake/hmtk/plotting/seismicity/catalogue_plots.py#L61-L81
gem/oq-engine
openquake/hmtk/plotting/seismicity/catalogue_plots.py
_save_image
def _save_image(fig, filename, filetype='png', resolution=300): """ If filename is specified, saves the image :param str filename: Name of the file :param str filetype: Type of file :param int resolution: DPI resolution of the output figure """ if filename: filename, filetype, resolution = build_filename(filename, filetype, resolution) fig.savefig(filename, dpi=resolution, format=filetype) else: pass
python
def _save_image(fig, filename, filetype='png', resolution=300): if filename: filename, filetype, resolution = build_filename(filename, filetype, resolution) fig.savefig(filename, dpi=resolution, format=filetype) else: pass
[ "def", "_save_image", "(", "fig", ",", "filename", ",", "filetype", "=", "'png'", ",", "resolution", "=", "300", ")", ":", "if", "filename", ":", "filename", ",", "filetype", ",", "resolution", "=", "build_filename", "(", "filename", ",", "filetype", ",", "resolution", ")", "fig", ".", "savefig", "(", "filename", ",", "dpi", "=", "resolution", ",", "format", "=", "filetype", ")", "else", ":", "pass" ]
If filename is specified, saves the image :param str filename: Name of the file :param str filetype: Type of file :param int resolution: DPI resolution of the output figure
[ "If", "filename", "is", "specified", "saves", "the", "image", ":", "param", "str", "filename", ":", "Name", "of", "the", "file", ":", "param", "str", "filetype", ":", "Type", "of", "file", ":", "param", "int", "resolution", ":", "DPI", "resolution", "of", "the", "output", "figure" ]
train
https://github.com/gem/oq-engine/blob/8294553a0b8aba33fd96437a35065d03547d0040/openquake/hmtk/plotting/seismicity/catalogue_plots.py#L84-L100
gem/oq-engine
openquake/hmtk/plotting/seismicity/catalogue_plots.py
_get_catalogue_bin_limits
def _get_catalogue_bin_limits(catalogue, dmag): """ Returns the magnitude bins corresponing to the catalogue """ mag_bins = np.arange( float(np.floor(np.min(catalogue.data['magnitude']))) - dmag, float(np.ceil(np.max(catalogue.data['magnitude']))) + dmag, dmag) counter = np.histogram(catalogue.data['magnitude'], mag_bins)[0] idx = np.where(counter > 0)[0] mag_bins = mag_bins[idx[0]:(idx[-1] + 2)] return mag_bins
python
def _get_catalogue_bin_limits(catalogue, dmag): mag_bins = np.arange( float(np.floor(np.min(catalogue.data['magnitude']))) - dmag, float(np.ceil(np.max(catalogue.data['magnitude']))) + dmag, dmag) counter = np.histogram(catalogue.data['magnitude'], mag_bins)[0] idx = np.where(counter > 0)[0] mag_bins = mag_bins[idx[0]:(idx[-1] + 2)] return mag_bins
[ "def", "_get_catalogue_bin_limits", "(", "catalogue", ",", "dmag", ")", ":", "mag_bins", "=", "np", ".", "arange", "(", "float", "(", "np", ".", "floor", "(", "np", ".", "min", "(", "catalogue", ".", "data", "[", "'magnitude'", "]", ")", ")", ")", "-", "dmag", ",", "float", "(", "np", ".", "ceil", "(", "np", ".", "max", "(", "catalogue", ".", "data", "[", "'magnitude'", "]", ")", ")", ")", "+", "dmag", ",", "dmag", ")", "counter", "=", "np", ".", "histogram", "(", "catalogue", ".", "data", "[", "'magnitude'", "]", ",", "mag_bins", ")", "[", "0", "]", "idx", "=", "np", ".", "where", "(", "counter", ">", "0", ")", "[", "0", "]", "mag_bins", "=", "mag_bins", "[", "idx", "[", "0", "]", ":", "(", "idx", "[", "-", "1", "]", "+", "2", ")", "]", "return", "mag_bins" ]
Returns the magnitude bins corresponing to the catalogue
[ "Returns", "the", "magnitude", "bins", "corresponing", "to", "the", "catalogue" ]
train
https://github.com/gem/oq-engine/blob/8294553a0b8aba33fd96437a35065d03547d0040/openquake/hmtk/plotting/seismicity/catalogue_plots.py#L103-L114