repository_name
stringlengths
5
67
func_path_in_repository
stringlengths
4
234
func_name
stringlengths
0
314
whole_func_string
stringlengths
52
3.87M
language
stringclasses
6 values
func_code_string
stringlengths
39
1.84M
func_code_tokens
sequencelengths
15
672k
func_documentation_string
stringlengths
1
47.2k
func_documentation_tokens
sequencelengths
1
3.92k
split_name
stringclasses
1 value
func_code_url
stringlengths
85
339
gem/oq-engine
openquake/commands/purge.py
purge
def purge(calc_id): """ Remove the given calculation. If you want to remove all calculations, use oq reset. """ if calc_id < 0: try: calc_id = datastore.get_calc_ids(datadir)[calc_id] except IndexError: print('Calculation %d not found' % calc_id) return purge_one(calc_id, getpass.getuser())
python
def purge(calc_id): if calc_id < 0: try: calc_id = datastore.get_calc_ids(datadir)[calc_id] except IndexError: print('Calculation %d not found' % calc_id) return purge_one(calc_id, getpass.getuser())
[ "def", "purge", "(", "calc_id", ")", ":", "if", "calc_id", "<", "0", ":", "try", ":", "calc_id", "=", "datastore", ".", "get_calc_ids", "(", "datadir", ")", "[", "calc_id", "]", "except", "IndexError", ":", "print", "(", "'Calculation %d not found'", "%", "calc_id", ")", "return", "purge_one", "(", "calc_id", ",", "getpass", ".", "getuser", "(", ")", ")" ]
Remove the given calculation. If you want to remove all calculations, use oq reset.
[ "Remove", "the", "given", "calculation", ".", "If", "you", "want", "to", "remove", "all", "calculations", "use", "oq", "reset", "." ]
train
https://github.com/gem/oq-engine/blob/8294553a0b8aba33fd96437a35065d03547d0040/openquake/commands/purge.py#L60-L71
gem/oq-engine
openquake/hmtk/plotting/patch.py
PolygonPatch
def PolygonPatch(polygon, **kwargs): """Constructs a matplotlib patch from a geometric object The `polygon` may be a Shapely or GeoJSON-like object possibly with holes. The `kwargs` are those supported by the matplotlib.patches.Polygon class constructor. Returns an instance of matplotlib.patches.PathPatch. Example (using Shapely Point and a matplotlib axes): >> b = Point(0, 0).buffer(1.0) >> patch = PolygonPatch(b, fc='blue', ec='blue', alpha=0.5) >> axis.add_patch(patch) """ def coding(ob): # The codes will be all "LINETO" commands, except for "MOVETO"s at the # beginning of each subpath n = len(getattr(ob, 'coords', None) or ob) vals = ones(n, dtype=Path.code_type) * Path.LINETO vals[0] = Path.MOVETO return vals if hasattr(polygon, 'geom_type'): # Shapely ptype = polygon.geom_type if ptype == 'Polygon': polygon = [Polygon(polygon)] elif ptype == 'MultiPolygon': polygon = [Polygon(p) for p in polygon] else: raise ValueError( "A polygon or multi-polygon representation is required") else: # GeoJSON polygon = getattr(polygon, '__geo_interface__', polygon) ptype = polygon["type"] if ptype == 'Polygon': polygon = [Polygon(polygon)] elif ptype == 'MultiPolygon': polygon = [Polygon(p) for p in polygon['coordinates']] else: raise ValueError( "A polygon or multi-polygon representation is required") vertices = concatenate([ concatenate([asarray(t.exterior)[:, :2]] + [asarray(r)[:, :2] for r in t.interiors]) for t in polygon]) codes = concatenate([ concatenate([coding(t.exterior)] + [coding(r) for r in t.interiors]) for t in polygon]) return PathPatch(Path(vertices, codes), **kwargs)
python
def PolygonPatch(polygon, **kwargs): def coding(ob): n = len(getattr(ob, 'coords', None) or ob) vals = ones(n, dtype=Path.code_type) * Path.LINETO vals[0] = Path.MOVETO return vals if hasattr(polygon, 'geom_type'): ptype = polygon.geom_type if ptype == 'Polygon': polygon = [Polygon(polygon)] elif ptype == 'MultiPolygon': polygon = [Polygon(p) for p in polygon] else: raise ValueError( "A polygon or multi-polygon representation is required") else: polygon = getattr(polygon, '__geo_interface__', polygon) ptype = polygon["type"] if ptype == 'Polygon': polygon = [Polygon(polygon)] elif ptype == 'MultiPolygon': polygon = [Polygon(p) for p in polygon['coordinates']] else: raise ValueError( "A polygon or multi-polygon representation is required") vertices = concatenate([ concatenate([asarray(t.exterior)[:, :2]] + [asarray(r)[:, :2] for r in t.interiors]) for t in polygon]) codes = concatenate([ concatenate([coding(t.exterior)] + [coding(r) for r in t.interiors]) for t in polygon]) return PathPatch(Path(vertices, codes), **kwargs)
[ "def", "PolygonPatch", "(", "polygon", ",", "*", "*", "kwargs", ")", ":", "def", "coding", "(", "ob", ")", ":", "# The codes will be all \"LINETO\" commands, except for \"MOVETO\"s at the", "# beginning of each subpath", "n", "=", "len", "(", "getattr", "(", "ob", ",", "'coords'", ",", "None", ")", "or", "ob", ")", "vals", "=", "ones", "(", "n", ",", "dtype", "=", "Path", ".", "code_type", ")", "*", "Path", ".", "LINETO", "vals", "[", "0", "]", "=", "Path", ".", "MOVETO", "return", "vals", "if", "hasattr", "(", "polygon", ",", "'geom_type'", ")", ":", "# Shapely", "ptype", "=", "polygon", ".", "geom_type", "if", "ptype", "==", "'Polygon'", ":", "polygon", "=", "[", "Polygon", "(", "polygon", ")", "]", "elif", "ptype", "==", "'MultiPolygon'", ":", "polygon", "=", "[", "Polygon", "(", "p", ")", "for", "p", "in", "polygon", "]", "else", ":", "raise", "ValueError", "(", "\"A polygon or multi-polygon representation is required\"", ")", "else", ":", "# GeoJSON", "polygon", "=", "getattr", "(", "polygon", ",", "'__geo_interface__'", ",", "polygon", ")", "ptype", "=", "polygon", "[", "\"type\"", "]", "if", "ptype", "==", "'Polygon'", ":", "polygon", "=", "[", "Polygon", "(", "polygon", ")", "]", "elif", "ptype", "==", "'MultiPolygon'", ":", "polygon", "=", "[", "Polygon", "(", "p", ")", "for", "p", "in", "polygon", "[", "'coordinates'", "]", "]", "else", ":", "raise", "ValueError", "(", "\"A polygon or multi-polygon representation is required\"", ")", "vertices", "=", "concatenate", "(", "[", "concatenate", "(", "[", "asarray", "(", "t", ".", "exterior", ")", "[", ":", ",", ":", "2", "]", "]", "+", "[", "asarray", "(", "r", ")", "[", ":", ",", ":", "2", "]", "for", "r", "in", "t", ".", "interiors", "]", ")", "for", "t", "in", "polygon", "]", ")", "codes", "=", "concatenate", "(", "[", "concatenate", "(", "[", "coding", "(", "t", ".", "exterior", ")", "]", "+", "[", "coding", "(", "r", ")", "for", "r", "in", "t", ".", "interiors", "]", ")", "for", "t", "in", "polygon", "]", ")", "return", "PathPatch", "(", "Path", "(", "vertices", ",", "codes", ")", ",", "*", "*", "kwargs", ")" ]
Constructs a matplotlib patch from a geometric object The `polygon` may be a Shapely or GeoJSON-like object possibly with holes. The `kwargs` are those supported by the matplotlib.patches.Polygon class constructor. Returns an instance of matplotlib.patches.PathPatch. Example (using Shapely Point and a matplotlib axes): >> b = Point(0, 0).buffer(1.0) >> patch = PolygonPatch(b, fc='blue', ec='blue', alpha=0.5) >> axis.add_patch(patch)
[ "Constructs", "a", "matplotlib", "patch", "from", "a", "geometric", "object" ]
train
https://github.com/gem/oq-engine/blob/8294553a0b8aba33fd96437a35065d03547d0040/openquake/hmtk/plotting/patch.py#L43-L93
gem/oq-engine
openquake/hazardlib/gsim/kotha_2019.py
KothaEtAl2019.retreive_sigma_mu_data
def retreive_sigma_mu_data(self): """ For the general form of the GMPE this retrieves the sigma mu values from the hdf5 file using the "general" model, i.e. sigma mu factors that are independent of the choice of region or depth """ fle = h5py.File(os.path.join(BASE_PATH, "KothaEtAl2019_SigmaMu_Fixed.hdf5"), "r") self.mags = fle["M"][:] self.dists = fle["R"][:] self.periods = fle["T"][:] self.pga = fle["PGA"][:] self.pgv = fle["PGV"][:] self.s_a = fle["SA"][:] fle.close()
python
def retreive_sigma_mu_data(self): fle = h5py.File(os.path.join(BASE_PATH, "KothaEtAl2019_SigmaMu_Fixed.hdf5"), "r") self.mags = fle["M"][:] self.dists = fle["R"][:] self.periods = fle["T"][:] self.pga = fle["PGA"][:] self.pgv = fle["PGV"][:] self.s_a = fle["SA"][:] fle.close()
[ "def", "retreive_sigma_mu_data", "(", "self", ")", ":", "fle", "=", "h5py", ".", "File", "(", "os", ".", "path", ".", "join", "(", "BASE_PATH", ",", "\"KothaEtAl2019_SigmaMu_Fixed.hdf5\"", ")", ",", "\"r\"", ")", "self", ".", "mags", "=", "fle", "[", "\"M\"", "]", "[", ":", "]", "self", ".", "dists", "=", "fle", "[", "\"R\"", "]", "[", ":", "]", "self", ".", "periods", "=", "fle", "[", "\"T\"", "]", "[", ":", "]", "self", ".", "pga", "=", "fle", "[", "\"PGA\"", "]", "[", ":", "]", "self", ".", "pgv", "=", "fle", "[", "\"PGV\"", "]", "[", ":", "]", "self", ".", "s_a", "=", "fle", "[", "\"SA\"", "]", "[", ":", "]", "fle", ".", "close", "(", ")" ]
For the general form of the GMPE this retrieves the sigma mu values from the hdf5 file using the "general" model, i.e. sigma mu factors that are independent of the choice of region or depth
[ "For", "the", "general", "form", "of", "the", "GMPE", "this", "retrieves", "the", "sigma", "mu", "values", "from", "the", "hdf5", "file", "using", "the", "general", "model", "i", ".", "e", ".", "sigma", "mu", "factors", "that", "are", "independent", "of", "the", "choice", "of", "region", "or", "depth" ]
train
https://github.com/gem/oq-engine/blob/8294553a0b8aba33fd96437a35065d03547d0040/openquake/hazardlib/gsim/kotha_2019.py#L129-L143
gem/oq-engine
openquake/hazardlib/gsim/kotha_2019.py
KothaEtAl2019.get_mean_and_stddevs
def get_mean_and_stddevs(self, sites, rup, dists, imt, stddev_types): """ See :meth:`superclass method <.base.GroundShakingIntensityModel.get_mean_and_stddevs>` for spec of input and result values. """ # extracting dictionary of coefficients specific to required # intensity measure type. C = self.COEFFS[imt] mean = (self.get_magnitude_scaling(C, rup.mag) + self.get_distance_term(C, rup, dists.rjb, imt) + self.get_site_amplification(C, sites)) # GMPE originally in cm/s/s - convert to g if imt.name in "PGA SA": mean -= np.log(100.0 * g) stddevs = self.get_stddevs(C, dists.rjb.shape, stddev_types, sites) if self.sigma_mu_epsilon: # Apply the epistemic uncertainty factor (sigma_mu) multiplied by # the number of standard deviations sigma_mu = self.get_sigma_mu_adjustment(C, imt, rup, dists) # Cap sigma_mu at 0.5 ln units sigma_mu[sigma_mu > 0.5] = 0.5 # Sigma mu should not be less than the standard deviation of the # fault-to-fault variability sigma_mu[sigma_mu < C["tau_fault"]] = C["tau_fault"] mean += (self.sigma_mu_epsilon * sigma_mu) return mean, stddevs
python
def get_mean_and_stddevs(self, sites, rup, dists, imt, stddev_types): C = self.COEFFS[imt] mean = (self.get_magnitude_scaling(C, rup.mag) + self.get_distance_term(C, rup, dists.rjb, imt) + self.get_site_amplification(C, sites)) if imt.name in "PGA SA": mean -= np.log(100.0 * g) stddevs = self.get_stddevs(C, dists.rjb.shape, stddev_types, sites) if self.sigma_mu_epsilon: sigma_mu = self.get_sigma_mu_adjustment(C, imt, rup, dists) sigma_mu[sigma_mu > 0.5] = 0.5 sigma_mu[sigma_mu < C["tau_fault"]] = C["tau_fault"] mean += (self.sigma_mu_epsilon * sigma_mu) return mean, stddevs
[ "def", "get_mean_and_stddevs", "(", "self", ",", "sites", ",", "rup", ",", "dists", ",", "imt", ",", "stddev_types", ")", ":", "# extracting dictionary of coefficients specific to required", "# intensity measure type.", "C", "=", "self", ".", "COEFFS", "[", "imt", "]", "mean", "=", "(", "self", ".", "get_magnitude_scaling", "(", "C", ",", "rup", ".", "mag", ")", "+", "self", ".", "get_distance_term", "(", "C", ",", "rup", ",", "dists", ".", "rjb", ",", "imt", ")", "+", "self", ".", "get_site_amplification", "(", "C", ",", "sites", ")", ")", "# GMPE originally in cm/s/s - convert to g", "if", "imt", ".", "name", "in", "\"PGA SA\"", ":", "mean", "-=", "np", ".", "log", "(", "100.0", "*", "g", ")", "stddevs", "=", "self", ".", "get_stddevs", "(", "C", ",", "dists", ".", "rjb", ".", "shape", ",", "stddev_types", ",", "sites", ")", "if", "self", ".", "sigma_mu_epsilon", ":", "# Apply the epistemic uncertainty factor (sigma_mu) multiplied by", "# the number of standard deviations", "sigma_mu", "=", "self", ".", "get_sigma_mu_adjustment", "(", "C", ",", "imt", ",", "rup", ",", "dists", ")", "# Cap sigma_mu at 0.5 ln units", "sigma_mu", "[", "sigma_mu", ">", "0.5", "]", "=", "0.5", "# Sigma mu should not be less than the standard deviation of the", "# fault-to-fault variability", "sigma_mu", "[", "sigma_mu", "<", "C", "[", "\"tau_fault\"", "]", "]", "=", "C", "[", "\"tau_fault\"", "]", "mean", "+=", "(", "self", ".", "sigma_mu_epsilon", "*", "sigma_mu", ")", "return", "mean", ",", "stddevs" ]
See :meth:`superclass method <.base.GroundShakingIntensityModel.get_mean_and_stddevs>` for spec of input and result values.
[ "See", ":", "meth", ":", "superclass", "method", "<", ".", "base", ".", "GroundShakingIntensityModel", ".", "get_mean_and_stddevs", ">", "for", "spec", "of", "input", "and", "result", "values", "." ]
train
https://github.com/gem/oq-engine/blob/8294553a0b8aba33fd96437a35065d03547d0040/openquake/hazardlib/gsim/kotha_2019.py#L145-L172
gem/oq-engine
openquake/hazardlib/gsim/kotha_2019.py
KothaEtAl2019.get_magnitude_scaling
def get_magnitude_scaling(self, C, mag): """ Returns the magnitude scaling term """ d_m = mag - self.CONSTANTS["Mh"] if mag < self.CONSTANTS["Mh"]: return C["e1"] + C["b1"] * d_m + C["b2"] * (d_m ** 2.0) else: return C["e1"] + C["b3"] * d_m
python
def get_magnitude_scaling(self, C, mag): d_m = mag - self.CONSTANTS["Mh"] if mag < self.CONSTANTS["Mh"]: return C["e1"] + C["b1"] * d_m + C["b2"] * (d_m ** 2.0) else: return C["e1"] + C["b3"] * d_m
[ "def", "get_magnitude_scaling", "(", "self", ",", "C", ",", "mag", ")", ":", "d_m", "=", "mag", "-", "self", ".", "CONSTANTS", "[", "\"Mh\"", "]", "if", "mag", "<", "self", ".", "CONSTANTS", "[", "\"Mh\"", "]", ":", "return", "C", "[", "\"e1\"", "]", "+", "C", "[", "\"b1\"", "]", "*", "d_m", "+", "C", "[", "\"b2\"", "]", "*", "(", "d_m", "**", "2.0", ")", "else", ":", "return", "C", "[", "\"e1\"", "]", "+", "C", "[", "\"b3\"", "]", "*", "d_m" ]
Returns the magnitude scaling term
[ "Returns", "the", "magnitude", "scaling", "term" ]
train
https://github.com/gem/oq-engine/blob/8294553a0b8aba33fd96437a35065d03547d0040/openquake/hazardlib/gsim/kotha_2019.py#L174-L182
gem/oq-engine
openquake/hazardlib/gsim/kotha_2019.py
KothaEtAl2019.get_distance_term
def get_distance_term(self, C, rup, rjb, imt): """ Returns the distance attenuation factor """ h = self._get_h(C, rup.hypo_depth) rval = np.sqrt(rjb ** 2. + h ** 2.) c3 = self.get_distance_coefficients(C, imt) f_r = (C["c1"] + C["c2"] * (rup.mag - self.CONSTANTS["Mref"])) *\ np.log(rval / self.CONSTANTS["Rref"]) +\ c3 * (rval - self.CONSTANTS["Rref"]) return f_r
python
def get_distance_term(self, C, rup, rjb, imt): h = self._get_h(C, rup.hypo_depth) rval = np.sqrt(rjb ** 2. + h ** 2.) c3 = self.get_distance_coefficients(C, imt) f_r = (C["c1"] + C["c2"] * (rup.mag - self.CONSTANTS["Mref"])) *\ np.log(rval / self.CONSTANTS["Rref"]) +\ c3 * (rval - self.CONSTANTS["Rref"]) return f_r
[ "def", "get_distance_term", "(", "self", ",", "C", ",", "rup", ",", "rjb", ",", "imt", ")", ":", "h", "=", "self", ".", "_get_h", "(", "C", ",", "rup", ".", "hypo_depth", ")", "rval", "=", "np", ".", "sqrt", "(", "rjb", "**", "2.", "+", "h", "**", "2.", ")", "c3", "=", "self", ".", "get_distance_coefficients", "(", "C", ",", "imt", ")", "f_r", "=", "(", "C", "[", "\"c1\"", "]", "+", "C", "[", "\"c2\"", "]", "*", "(", "rup", ".", "mag", "-", "self", ".", "CONSTANTS", "[", "\"Mref\"", "]", ")", ")", "*", "np", ".", "log", "(", "rval", "/", "self", ".", "CONSTANTS", "[", "\"Rref\"", "]", ")", "+", "c3", "*", "(", "rval", "-", "self", ".", "CONSTANTS", "[", "\"Rref\"", "]", ")", "return", "f_r" ]
Returns the distance attenuation factor
[ "Returns", "the", "distance", "attenuation", "factor" ]
train
https://github.com/gem/oq-engine/blob/8294553a0b8aba33fd96437a35065d03547d0040/openquake/hazardlib/gsim/kotha_2019.py#L184-L195
gem/oq-engine
openquake/hazardlib/gsim/kotha_2019.py
KothaEtAl2019.get_distance_coefficients
def get_distance_coefficients(self, C, imt): """ Returns the c3 term """ c3 = self.c3[imt]["c3"] if self.c3 else C["c3"] return c3
python
def get_distance_coefficients(self, C, imt): c3 = self.c3[imt]["c3"] if self.c3 else C["c3"] return c3
[ "def", "get_distance_coefficients", "(", "self", ",", "C", ",", "imt", ")", ":", "c3", "=", "self", ".", "c3", "[", "imt", "]", "[", "\"c3\"", "]", "if", "self", ".", "c3", "else", "C", "[", "\"c3\"", "]", "return", "c3" ]
Returns the c3 term
[ "Returns", "the", "c3", "term" ]
train
https://github.com/gem/oq-engine/blob/8294553a0b8aba33fd96437a35065d03547d0040/openquake/hazardlib/gsim/kotha_2019.py#L208-L213
gem/oq-engine
openquake/hazardlib/gsim/kotha_2019.py
KothaEtAl2019.get_sigma_mu_adjustment
def get_sigma_mu_adjustment(self, C, imt, rup, dists): """ Returns the sigma mu adjustment factor """ if imt.name in "PGA PGV": # PGA and PGV are 2D arrays of dimension [nmags, ndists] sigma_mu = getattr(self, imt.name.lower()) if rup.mag <= self.mags[0]: sigma_mu_m = sigma_mu[0, :] elif rup.mag >= self.mags[-1]: sigma_mu_m = sigma_mu[-1, :] else: intpl1 = interp1d(self.mags, sigma_mu, axis=0) sigma_mu_m = intpl1(rup.mag) # Linear interpolation with distance intpl2 = interp1d(self.dists, sigma_mu_m, bounds_error=False, fill_value=(sigma_mu_m[0], sigma_mu_m[-1])) return intpl2(dists.rjb) # In the case of SA the array is of dimension [nmags, ndists, nperiods] # Get values for given magnitude if rup.mag <= self.mags[0]: sigma_mu_m = self.s_a[0, :, :] elif rup.mag >= self.mags[-1]: sigma_mu_m = self.s_a[-1, :, :] else: intpl1 = interp1d(self.mags, self.s_a, axis=0) sigma_mu_m = intpl1(rup.mag) # Get values for period - N.B. ln T, linear sigma mu interpolation if imt.period <= self.periods[0]: sigma_mu_t = sigma_mu_m[:, 0] elif imt.period >= self.periods[-1]: sigma_mu_t = sigma_mu_m[:, -1] else: intpl2 = interp1d(np.log(self.periods), sigma_mu_m, axis=1) sigma_mu_t = intpl2(np.log(imt.period)) intpl3 = interp1d(self.dists, sigma_mu_t, bounds_error=False, fill_value=(sigma_mu_t[0], sigma_mu_t[-1])) return intpl3(dists.rjb)
python
def get_sigma_mu_adjustment(self, C, imt, rup, dists): if imt.name in "PGA PGV": sigma_mu = getattr(self, imt.name.lower()) if rup.mag <= self.mags[0]: sigma_mu_m = sigma_mu[0, :] elif rup.mag >= self.mags[-1]: sigma_mu_m = sigma_mu[-1, :] else: intpl1 = interp1d(self.mags, sigma_mu, axis=0) sigma_mu_m = intpl1(rup.mag) intpl2 = interp1d(self.dists, sigma_mu_m, bounds_error=False, fill_value=(sigma_mu_m[0], sigma_mu_m[-1])) return intpl2(dists.rjb) if rup.mag <= self.mags[0]: sigma_mu_m = self.s_a[0, :, :] elif rup.mag >= self.mags[-1]: sigma_mu_m = self.s_a[-1, :, :] else: intpl1 = interp1d(self.mags, self.s_a, axis=0) sigma_mu_m = intpl1(rup.mag) if imt.period <= self.periods[0]: sigma_mu_t = sigma_mu_m[:, 0] elif imt.period >= self.periods[-1]: sigma_mu_t = sigma_mu_m[:, -1] else: intpl2 = interp1d(np.log(self.periods), sigma_mu_m, axis=1) sigma_mu_t = intpl2(np.log(imt.period)) intpl3 = interp1d(self.dists, sigma_mu_t, bounds_error=False, fill_value=(sigma_mu_t[0], sigma_mu_t[-1])) return intpl3(dists.rjb)
[ "def", "get_sigma_mu_adjustment", "(", "self", ",", "C", ",", "imt", ",", "rup", ",", "dists", ")", ":", "if", "imt", ".", "name", "in", "\"PGA PGV\"", ":", "# PGA and PGV are 2D arrays of dimension [nmags, ndists]", "sigma_mu", "=", "getattr", "(", "self", ",", "imt", ".", "name", ".", "lower", "(", ")", ")", "if", "rup", ".", "mag", "<=", "self", ".", "mags", "[", "0", "]", ":", "sigma_mu_m", "=", "sigma_mu", "[", "0", ",", ":", "]", "elif", "rup", ".", "mag", ">=", "self", ".", "mags", "[", "-", "1", "]", ":", "sigma_mu_m", "=", "sigma_mu", "[", "-", "1", ",", ":", "]", "else", ":", "intpl1", "=", "interp1d", "(", "self", ".", "mags", ",", "sigma_mu", ",", "axis", "=", "0", ")", "sigma_mu_m", "=", "intpl1", "(", "rup", ".", "mag", ")", "# Linear interpolation with distance", "intpl2", "=", "interp1d", "(", "self", ".", "dists", ",", "sigma_mu_m", ",", "bounds_error", "=", "False", ",", "fill_value", "=", "(", "sigma_mu_m", "[", "0", "]", ",", "sigma_mu_m", "[", "-", "1", "]", ")", ")", "return", "intpl2", "(", "dists", ".", "rjb", ")", "# In the case of SA the array is of dimension [nmags, ndists, nperiods]", "# Get values for given magnitude", "if", "rup", ".", "mag", "<=", "self", ".", "mags", "[", "0", "]", ":", "sigma_mu_m", "=", "self", ".", "s_a", "[", "0", ",", ":", ",", ":", "]", "elif", "rup", ".", "mag", ">=", "self", ".", "mags", "[", "-", "1", "]", ":", "sigma_mu_m", "=", "self", ".", "s_a", "[", "-", "1", ",", ":", ",", ":", "]", "else", ":", "intpl1", "=", "interp1d", "(", "self", ".", "mags", ",", "self", ".", "s_a", ",", "axis", "=", "0", ")", "sigma_mu_m", "=", "intpl1", "(", "rup", ".", "mag", ")", "# Get values for period - N.B. ln T, linear sigma mu interpolation", "if", "imt", ".", "period", "<=", "self", ".", "periods", "[", "0", "]", ":", "sigma_mu_t", "=", "sigma_mu_m", "[", ":", ",", "0", "]", "elif", "imt", ".", "period", ">=", "self", ".", "periods", "[", "-", "1", "]", ":", "sigma_mu_t", "=", "sigma_mu_m", "[", ":", ",", "-", "1", "]", "else", ":", "intpl2", "=", "interp1d", "(", "np", ".", "log", "(", "self", ".", "periods", ")", ",", "sigma_mu_m", ",", "axis", "=", "1", ")", "sigma_mu_t", "=", "intpl2", "(", "np", ".", "log", "(", "imt", ".", "period", ")", ")", "intpl3", "=", "interp1d", "(", "self", ".", "dists", ",", "sigma_mu_t", ",", "bounds_error", "=", "False", ",", "fill_value", "=", "(", "sigma_mu_t", "[", "0", "]", ",", "sigma_mu_t", "[", "-", "1", "]", ")", ")", "return", "intpl3", "(", "dists", ".", "rjb", ")" ]
Returns the sigma mu adjustment factor
[ "Returns", "the", "sigma", "mu", "adjustment", "factor" ]
train
https://github.com/gem/oq-engine/blob/8294553a0b8aba33fd96437a35065d03547d0040/openquake/hazardlib/gsim/kotha_2019.py#L221-L258
gem/oq-engine
openquake/hazardlib/gsim/kotha_2019.py
KothaEtAl2019SERA.get_site_amplification
def get_site_amplification(self, C, sites): """ Returns the linear site amplification term depending on whether the Vs30 is observed of inferred """ ampl = np.zeros(sites.vs30.shape) # For observed vs30 sites ampl[sites.vs30measured] = (C["d0_obs"] + C["d1_obs"] * np.log(sites.vs30[sites.vs30measured])) # For inferred Vs30 sites idx = np.logical_not(sites.vs30measured) ampl[idx] = (C["d0_inf"] + C["d1_inf"] * np.log(sites.vs30[idx])) return ampl
python
def get_site_amplification(self, C, sites): ampl = np.zeros(sites.vs30.shape) ampl[sites.vs30measured] = (C["d0_obs"] + C["d1_obs"] * np.log(sites.vs30[sites.vs30measured])) idx = np.logical_not(sites.vs30measured) ampl[idx] = (C["d0_inf"] + C["d1_inf"] * np.log(sites.vs30[idx])) return ampl
[ "def", "get_site_amplification", "(", "self", ",", "C", ",", "sites", ")", ":", "ampl", "=", "np", ".", "zeros", "(", "sites", ".", "vs30", ".", "shape", ")", "# For observed vs30 sites", "ampl", "[", "sites", ".", "vs30measured", "]", "=", "(", "C", "[", "\"d0_obs\"", "]", "+", "C", "[", "\"d1_obs\"", "]", "*", "np", ".", "log", "(", "sites", ".", "vs30", "[", "sites", ".", "vs30measured", "]", ")", ")", "# For inferred Vs30 sites", "idx", "=", "np", ".", "logical_not", "(", "sites", ".", "vs30measured", ")", "ampl", "[", "idx", "]", "=", "(", "C", "[", "\"d0_inf\"", "]", "+", "C", "[", "\"d1_inf\"", "]", "*", "np", ".", "log", "(", "sites", ".", "vs30", "[", "idx", "]", ")", ")", "return", "ampl" ]
Returns the linear site amplification term depending on whether the Vs30 is observed of inferred
[ "Returns", "the", "linear", "site", "amplification", "term", "depending", "on", "whether", "the", "Vs30", "is", "observed", "of", "inferred" ]
train
https://github.com/gem/oq-engine/blob/8294553a0b8aba33fd96437a35065d03547d0040/openquake/hazardlib/gsim/kotha_2019.py#L332-L344
gem/oq-engine
openquake/hazardlib/gsim/kotha_2019.py
KothaEtAl2019SERA.get_stddevs
def get_stddevs(self, C, stddev_shape, stddev_types, sites): """ Returns the standard deviations, with different site standard deviation for inferred vs. observed vs30 sites. """ stddevs = [] tau = C["tau_event"] sigma_s = np.zeros(sites.vs30measured.shape, dtype=float) sigma_s[sites.vs30measured] += C["sigma_s_obs"] sigma_s[np.logical_not(sites.vs30measured)] += C["sigma_s_inf"] phi = np.sqrt(C["phi0"] ** 2.0 + sigma_s ** 2.) for stddev_type in stddev_types: assert stddev_type in self.DEFINED_FOR_STANDARD_DEVIATION_TYPES if stddev_type == const.StdDev.TOTAL: stddevs.append(np.sqrt(tau ** 2. + phi ** 2.) + np.zeros(stddev_shape)) elif stddev_type == const.StdDev.INTRA_EVENT: stddevs.append(phi + np.zeros(stddev_shape)) elif stddev_type == const.StdDev.INTER_EVENT: stddevs.append(tau + np.zeros(stddev_shape)) return stddevs
python
def get_stddevs(self, C, stddev_shape, stddev_types, sites): stddevs = [] tau = C["tau_event"] sigma_s = np.zeros(sites.vs30measured.shape, dtype=float) sigma_s[sites.vs30measured] += C["sigma_s_obs"] sigma_s[np.logical_not(sites.vs30measured)] += C["sigma_s_inf"] phi = np.sqrt(C["phi0"] ** 2.0 + sigma_s ** 2.) for stddev_type in stddev_types: assert stddev_type in self.DEFINED_FOR_STANDARD_DEVIATION_TYPES if stddev_type == const.StdDev.TOTAL: stddevs.append(np.sqrt(tau ** 2. + phi ** 2.) + np.zeros(stddev_shape)) elif stddev_type == const.StdDev.INTRA_EVENT: stddevs.append(phi + np.zeros(stddev_shape)) elif stddev_type == const.StdDev.INTER_EVENT: stddevs.append(tau + np.zeros(stddev_shape)) return stddevs
[ "def", "get_stddevs", "(", "self", ",", "C", ",", "stddev_shape", ",", "stddev_types", ",", "sites", ")", ":", "stddevs", "=", "[", "]", "tau", "=", "C", "[", "\"tau_event\"", "]", "sigma_s", "=", "np", ".", "zeros", "(", "sites", ".", "vs30measured", ".", "shape", ",", "dtype", "=", "float", ")", "sigma_s", "[", "sites", ".", "vs30measured", "]", "+=", "C", "[", "\"sigma_s_obs\"", "]", "sigma_s", "[", "np", ".", "logical_not", "(", "sites", ".", "vs30measured", ")", "]", "+=", "C", "[", "\"sigma_s_inf\"", "]", "phi", "=", "np", ".", "sqrt", "(", "C", "[", "\"phi0\"", "]", "**", "2.0", "+", "sigma_s", "**", "2.", ")", "for", "stddev_type", "in", "stddev_types", ":", "assert", "stddev_type", "in", "self", ".", "DEFINED_FOR_STANDARD_DEVIATION_TYPES", "if", "stddev_type", "==", "const", ".", "StdDev", ".", "TOTAL", ":", "stddevs", ".", "append", "(", "np", ".", "sqrt", "(", "tau", "**", "2.", "+", "phi", "**", "2.", ")", "+", "np", ".", "zeros", "(", "stddev_shape", ")", ")", "elif", "stddev_type", "==", "const", ".", "StdDev", ".", "INTRA_EVENT", ":", "stddevs", ".", "append", "(", "phi", "+", "np", ".", "zeros", "(", "stddev_shape", ")", ")", "elif", "stddev_type", "==", "const", ".", "StdDev", ".", "INTER_EVENT", ":", "stddevs", ".", "append", "(", "tau", "+", "np", ".", "zeros", "(", "stddev_shape", ")", ")", "return", "stddevs" ]
Returns the standard deviations, with different site standard deviation for inferred vs. observed vs30 sites.
[ "Returns", "the", "standard", "deviations", "with", "different", "site", "standard", "deviation", "for", "inferred", "vs", ".", "observed", "vs30", "sites", "." ]
train
https://github.com/gem/oq-engine/blob/8294553a0b8aba33fd96437a35065d03547d0040/openquake/hazardlib/gsim/kotha_2019.py#L346-L366
gem/oq-engine
openquake/hazardlib/geo/geodetic.py
geodetic_distance
def geodetic_distance(lons1, lats1, lons2, lats2, diameter=2*EARTH_RADIUS): """ Calculate the geodetic distance between two points or two collections of points. Parameters are coordinates in decimal degrees. They could be scalar float numbers or numpy arrays, in which case they should "broadcast together". Implements http://williams.best.vwh.net/avform.htm#Dist :returns: Distance in km, floating point scalar or numpy array of such. """ lons1, lats1, lons2, lats2 = _prepare_coords(lons1, lats1, lons2, lats2) distance = numpy.arcsin(numpy.sqrt( numpy.sin((lats1 - lats2) / 2.0) ** 2.0 + numpy.cos(lats1) * numpy.cos(lats2) * numpy.sin((lons1 - lons2) / 2.0) ** 2.0 )) return diameter * distance
python
def geodetic_distance(lons1, lats1, lons2, lats2, diameter=2*EARTH_RADIUS): lons1, lats1, lons2, lats2 = _prepare_coords(lons1, lats1, lons2, lats2) distance = numpy.arcsin(numpy.sqrt( numpy.sin((lats1 - lats2) / 2.0) ** 2.0 + numpy.cos(lats1) * numpy.cos(lats2) * numpy.sin((lons1 - lons2) / 2.0) ** 2.0 )) return diameter * distance
[ "def", "geodetic_distance", "(", "lons1", ",", "lats1", ",", "lons2", ",", "lats2", ",", "diameter", "=", "2", "*", "EARTH_RADIUS", ")", ":", "lons1", ",", "lats1", ",", "lons2", ",", "lats2", "=", "_prepare_coords", "(", "lons1", ",", "lats1", ",", "lons2", ",", "lats2", ")", "distance", "=", "numpy", ".", "arcsin", "(", "numpy", ".", "sqrt", "(", "numpy", ".", "sin", "(", "(", "lats1", "-", "lats2", ")", "/", "2.0", ")", "**", "2.0", "+", "numpy", ".", "cos", "(", "lats1", ")", "*", "numpy", ".", "cos", "(", "lats2", ")", "*", "numpy", ".", "sin", "(", "(", "lons1", "-", "lons2", ")", "/", "2.0", ")", "**", "2.0", ")", ")", "return", "diameter", "*", "distance" ]
Calculate the geodetic distance between two points or two collections of points. Parameters are coordinates in decimal degrees. They could be scalar float numbers or numpy arrays, in which case they should "broadcast together". Implements http://williams.best.vwh.net/avform.htm#Dist :returns: Distance in km, floating point scalar or numpy array of such.
[ "Calculate", "the", "geodetic", "distance", "between", "two", "points", "or", "two", "collections", "of", "points", "." ]
train
https://github.com/gem/oq-engine/blob/8294553a0b8aba33fd96437a35065d03547d0040/openquake/hazardlib/geo/geodetic.py#L34-L54
gem/oq-engine
openquake/hazardlib/geo/geodetic.py
azimuth
def azimuth(lons1, lats1, lons2, lats2): """ Calculate the azimuth between two points or two collections of points. Parameters are the same as for :func:`geodetic_distance`. Implements an "alternative formula" from http://williams.best.vwh.net/avform.htm#Crs :returns: Azimuth as an angle between direction to north from first point and direction to the second point measured clockwise in decimal degrees. """ lons1, lats1, lons2, lats2 = _prepare_coords(lons1, lats1, lons2, lats2) cos_lat2 = numpy.cos(lats2) true_course = numpy.degrees(numpy.arctan2( numpy.sin(lons1 - lons2) * cos_lat2, numpy.cos(lats1) * numpy.sin(lats2) - numpy.sin(lats1) * cos_lat2 * numpy.cos(lons1 - lons2) )) return (360 - true_course) % 360
python
def azimuth(lons1, lats1, lons2, lats2): lons1, lats1, lons2, lats2 = _prepare_coords(lons1, lats1, lons2, lats2) cos_lat2 = numpy.cos(lats2) true_course = numpy.degrees(numpy.arctan2( numpy.sin(lons1 - lons2) * cos_lat2, numpy.cos(lats1) * numpy.sin(lats2) - numpy.sin(lats1) * cos_lat2 * numpy.cos(lons1 - lons2) )) return (360 - true_course) % 360
[ "def", "azimuth", "(", "lons1", ",", "lats1", ",", "lons2", ",", "lats2", ")", ":", "lons1", ",", "lats1", ",", "lons2", ",", "lats2", "=", "_prepare_coords", "(", "lons1", ",", "lats1", ",", "lons2", ",", "lats2", ")", "cos_lat2", "=", "numpy", ".", "cos", "(", "lats2", ")", "true_course", "=", "numpy", ".", "degrees", "(", "numpy", ".", "arctan2", "(", "numpy", ".", "sin", "(", "lons1", "-", "lons2", ")", "*", "cos_lat2", ",", "numpy", ".", "cos", "(", "lats1", ")", "*", "numpy", ".", "sin", "(", "lats2", ")", "-", "numpy", ".", "sin", "(", "lats1", ")", "*", "cos_lat2", "*", "numpy", ".", "cos", "(", "lons1", "-", "lons2", ")", ")", ")", "return", "(", "360", "-", "true_course", ")", "%", "360" ]
Calculate the azimuth between two points or two collections of points. Parameters are the same as for :func:`geodetic_distance`. Implements an "alternative formula" from http://williams.best.vwh.net/avform.htm#Crs :returns: Azimuth as an angle between direction to north from first point and direction to the second point measured clockwise in decimal degrees.
[ "Calculate", "the", "azimuth", "between", "two", "points", "or", "two", "collections", "of", "points", "." ]
train
https://github.com/gem/oq-engine/blob/8294553a0b8aba33fd96437a35065d03547d0040/openquake/hazardlib/geo/geodetic.py#L57-L77
gem/oq-engine
openquake/hazardlib/geo/geodetic.py
distance
def distance(lons1, lats1, depths1, lons2, lats2, depths2): """ Calculate a distance between two points (or collections of points) considering points' depth. Calls :func:`geodetic_distance`, finds the "vertical" distance between points by subtracting one depth from another and combine both using Pythagoras theorem. :returns: Distance in km, a square root of sum of squares of :func:`geodetic <geodetic_distance>` distance and vertical distance, which is just a difference between depths. """ hdist = geodetic_distance(lons1, lats1, lons2, lats2) vdist = depths1 - depths2 return numpy.sqrt(hdist ** 2 + vdist ** 2)
python
def distance(lons1, lats1, depths1, lons2, lats2, depths2): hdist = geodetic_distance(lons1, lats1, lons2, lats2) vdist = depths1 - depths2 return numpy.sqrt(hdist ** 2 + vdist ** 2)
[ "def", "distance", "(", "lons1", ",", "lats1", ",", "depths1", ",", "lons2", ",", "lats2", ",", "depths2", ")", ":", "hdist", "=", "geodetic_distance", "(", "lons1", ",", "lats1", ",", "lons2", ",", "lats2", ")", "vdist", "=", "depths1", "-", "depths2", "return", "numpy", ".", "sqrt", "(", "hdist", "**", "2", "+", "vdist", "**", "2", ")" ]
Calculate a distance between two points (or collections of points) considering points' depth. Calls :func:`geodetic_distance`, finds the "vertical" distance between points by subtracting one depth from another and combine both using Pythagoras theorem. :returns: Distance in km, a square root of sum of squares of :func:`geodetic <geodetic_distance>` distance and vertical distance, which is just a difference between depths.
[ "Calculate", "a", "distance", "between", "two", "points", "(", "or", "collections", "of", "points", ")", "considering", "points", "depth", "." ]
train
https://github.com/gem/oq-engine/blob/8294553a0b8aba33fd96437a35065d03547d0040/openquake/hazardlib/geo/geodetic.py#L80-L96
gem/oq-engine
openquake/hazardlib/geo/geodetic.py
min_distance_to_segment
def min_distance_to_segment(seglons, seglats, lons, lats): """ This function computes the shortest distance to a segment in a 2D reference system. :parameter seglons: A list or an array of floats specifying the longitude values of the two vertexes delimiting the segment. :parameter seglats: A list or an array of floats specifying the latitude values of the two vertexes delimiting the segment. :parameter lons: A list or a 1D array of floats specifying the longitude values of the points for which the calculation of the shortest distance is requested. :parameter lats: A list or a 1D array of floats specifying the latitude values of the points for which the calculation of the shortest distance is requested. :returns: An array of the same shape as lons which contains for each point defined by (lons, lats) the shortest distance to the segment. Distances are negative for those points that stay on the 'left side' of the segment direction and whose projection lies within the segment edges. For all other points, distance is positive. """ # Check the size of the seglons, seglats arrays assert len(seglons) == len(seglats) == 2 # Compute the azimuth of the segment seg_azim = azimuth(seglons[0], seglats[0], seglons[1], seglats[1]) # Compute the azimuth of the direction obtained # connecting the first point defining the segment and each site azimuth1 = azimuth(seglons[0], seglats[0], lons, lats) # Compute the azimuth of the direction obtained # connecting the second point defining the segment and each site azimuth2 = azimuth(seglons[1], seglats[1], lons, lats) # Find the points inside the band defined by the two lines perpendicular # to the segment direction passing through the two vertexes of the segment. # For these points the closest distance is the distance from the great arc. idx_in = numpy.nonzero( (numpy.cos(numpy.radians(seg_azim-azimuth1)) >= 0.0) & (numpy.cos(numpy.radians(seg_azim-azimuth2)) <= 0.0)) # Find the points outside the band defined by the two line perpendicular # to the segment direction passing through the two vertexes of the segment. # For these points the closest distance is the minimum of the distance from # the two point vertexes. idx_out = numpy.nonzero( (numpy.cos(numpy.radians(seg_azim-azimuth1)) < 0.0) | (numpy.cos(numpy.radians(seg_azim-azimuth2)) > 0.0)) # Find the indexes of points 'on the left of the segment' idx_neg = numpy.nonzero(numpy.sin(numpy.radians( (azimuth1-seg_azim))) < 0.0) # Now let's compute the distances for the two cases. dists = numpy.zeros_like(lons) if len(idx_in[0]): dists[idx_in] = distance_to_arc( seglons[0], seglats[0], seg_azim, lons[idx_in], lats[idx_in]) if len(idx_out[0]): dists[idx_out] = min_geodetic_distance( (seglons, seglats), (lons[idx_out], lats[idx_out])) # Finally we correct the sign of the distances in order to make sure that # the points on the right semispace defined using as a reference the # direction defined by the segment (i.e. the direction defined by going # from the first point to the second one) have a positive distance and # the others a negative one. dists = abs(dists) dists[idx_neg] = - dists[idx_neg] return dists
python
def min_distance_to_segment(seglons, seglats, lons, lats): assert len(seglons) == len(seglats) == 2 seg_azim = azimuth(seglons[0], seglats[0], seglons[1], seglats[1]) azimuth1 = azimuth(seglons[0], seglats[0], lons, lats) azimuth2 = azimuth(seglons[1], seglats[1], lons, lats) idx_in = numpy.nonzero( (numpy.cos(numpy.radians(seg_azim-azimuth1)) >= 0.0) & (numpy.cos(numpy.radians(seg_azim-azimuth2)) <= 0.0)) idx_out = numpy.nonzero( (numpy.cos(numpy.radians(seg_azim-azimuth1)) < 0.0) | (numpy.cos(numpy.radians(seg_azim-azimuth2)) > 0.0)) idx_neg = numpy.nonzero(numpy.sin(numpy.radians( (azimuth1-seg_azim))) < 0.0) dists = numpy.zeros_like(lons) if len(idx_in[0]): dists[idx_in] = distance_to_arc( seglons[0], seglats[0], seg_azim, lons[idx_in], lats[idx_in]) if len(idx_out[0]): dists[idx_out] = min_geodetic_distance( (seglons, seglats), (lons[idx_out], lats[idx_out])) dists = abs(dists) dists[idx_neg] = - dists[idx_neg] return dists
[ "def", "min_distance_to_segment", "(", "seglons", ",", "seglats", ",", "lons", ",", "lats", ")", ":", "# Check the size of the seglons, seglats arrays", "assert", "len", "(", "seglons", ")", "==", "len", "(", "seglats", ")", "==", "2", "# Compute the azimuth of the segment", "seg_azim", "=", "azimuth", "(", "seglons", "[", "0", "]", ",", "seglats", "[", "0", "]", ",", "seglons", "[", "1", "]", ",", "seglats", "[", "1", "]", ")", "# Compute the azimuth of the direction obtained", "# connecting the first point defining the segment and each site", "azimuth1", "=", "azimuth", "(", "seglons", "[", "0", "]", ",", "seglats", "[", "0", "]", ",", "lons", ",", "lats", ")", "# Compute the azimuth of the direction obtained", "# connecting the second point defining the segment and each site", "azimuth2", "=", "azimuth", "(", "seglons", "[", "1", "]", ",", "seglats", "[", "1", "]", ",", "lons", ",", "lats", ")", "# Find the points inside the band defined by the two lines perpendicular", "# to the segment direction passing through the two vertexes of the segment.", "# For these points the closest distance is the distance from the great arc.", "idx_in", "=", "numpy", ".", "nonzero", "(", "(", "numpy", ".", "cos", "(", "numpy", ".", "radians", "(", "seg_azim", "-", "azimuth1", ")", ")", ">=", "0.0", ")", "&", "(", "numpy", ".", "cos", "(", "numpy", ".", "radians", "(", "seg_azim", "-", "azimuth2", ")", ")", "<=", "0.0", ")", ")", "# Find the points outside the band defined by the two line perpendicular", "# to the segment direction passing through the two vertexes of the segment.", "# For these points the closest distance is the minimum of the distance from", "# the two point vertexes.", "idx_out", "=", "numpy", ".", "nonzero", "(", "(", "numpy", ".", "cos", "(", "numpy", ".", "radians", "(", "seg_azim", "-", "azimuth1", ")", ")", "<", "0.0", ")", "|", "(", "numpy", ".", "cos", "(", "numpy", ".", "radians", "(", "seg_azim", "-", "azimuth2", ")", ")", ">", "0.0", ")", ")", "# Find the indexes of points 'on the left of the segment'", "idx_neg", "=", "numpy", ".", "nonzero", "(", "numpy", ".", "sin", "(", "numpy", ".", "radians", "(", "(", "azimuth1", "-", "seg_azim", ")", ")", ")", "<", "0.0", ")", "# Now let's compute the distances for the two cases.", "dists", "=", "numpy", ".", "zeros_like", "(", "lons", ")", "if", "len", "(", "idx_in", "[", "0", "]", ")", ":", "dists", "[", "idx_in", "]", "=", "distance_to_arc", "(", "seglons", "[", "0", "]", ",", "seglats", "[", "0", "]", ",", "seg_azim", ",", "lons", "[", "idx_in", "]", ",", "lats", "[", "idx_in", "]", ")", "if", "len", "(", "idx_out", "[", "0", "]", ")", ":", "dists", "[", "idx_out", "]", "=", "min_geodetic_distance", "(", "(", "seglons", ",", "seglats", ")", ",", "(", "lons", "[", "idx_out", "]", ",", "lats", "[", "idx_out", "]", ")", ")", "# Finally we correct the sign of the distances in order to make sure that", "# the points on the right semispace defined using as a reference the", "# direction defined by the segment (i.e. the direction defined by going", "# from the first point to the second one) have a positive distance and", "# the others a negative one.", "dists", "=", "abs", "(", "dists", ")", "dists", "[", "idx_neg", "]", "=", "-", "dists", "[", "idx_neg", "]", "return", "dists" ]
This function computes the shortest distance to a segment in a 2D reference system. :parameter seglons: A list or an array of floats specifying the longitude values of the two vertexes delimiting the segment. :parameter seglats: A list or an array of floats specifying the latitude values of the two vertexes delimiting the segment. :parameter lons: A list or a 1D array of floats specifying the longitude values of the points for which the calculation of the shortest distance is requested. :parameter lats: A list or a 1D array of floats specifying the latitude values of the points for which the calculation of the shortest distance is requested. :returns: An array of the same shape as lons which contains for each point defined by (lons, lats) the shortest distance to the segment. Distances are negative for those points that stay on the 'left side' of the segment direction and whose projection lies within the segment edges. For all other points, distance is positive.
[ "This", "function", "computes", "the", "shortest", "distance", "to", "a", "segment", "in", "a", "2D", "reference", "system", "." ]
train
https://github.com/gem/oq-engine/blob/8294553a0b8aba33fd96437a35065d03547d0040/openquake/hazardlib/geo/geodetic.py#L99-L174
gem/oq-engine
openquake/hazardlib/geo/geodetic.py
spherical_to_cartesian
def spherical_to_cartesian(lons, lats, depths=None): """ Return the position vectors (in Cartesian coordinates) of list of spherical coordinates. For equations see: http://mathworld.wolfram.com/SphericalCoordinates.html. Parameters are components of spherical coordinates in a form of scalars, lists or numpy arrays. ``depths`` can be ``None`` in which case it's considered zero for all points. :returns: ``numpy.array`` of 3d vectors representing points' coordinates in Cartesian space in km. The array has shape `lons.shape + (3,)`. In particular, if ``lons`` and ``lats`` are scalars the result is a 3D vector and if they are vectors the result is a matrix of shape (N, 3). See also :func:`cartesian_to_spherical`. """ phi = numpy.radians(lons) theta = numpy.radians(lats) if depths is None: rr = EARTH_RADIUS else: rr = EARTH_RADIUS - numpy.array(depths) cos_theta_r = rr * numpy.cos(theta) try: shape = lons.shape except AttributeError: # a list/tuple was passed try: shape = (len(lons),) except TypeError: # a scalar was passed shape = () arr = numpy.zeros(shape + (3,)) arr[..., 0] = cos_theta_r * numpy.cos(phi) arr[..., 1] = cos_theta_r * numpy.sin(phi) arr[..., 2] = rr * numpy.sin(theta) return arr
python
def spherical_to_cartesian(lons, lats, depths=None): phi = numpy.radians(lons) theta = numpy.radians(lats) if depths is None: rr = EARTH_RADIUS else: rr = EARTH_RADIUS - numpy.array(depths) cos_theta_r = rr * numpy.cos(theta) try: shape = lons.shape except AttributeError: try: shape = (len(lons),) except TypeError: shape = () arr = numpy.zeros(shape + (3,)) arr[..., 0] = cos_theta_r * numpy.cos(phi) arr[..., 1] = cos_theta_r * numpy.sin(phi) arr[..., 2] = rr * numpy.sin(theta) return arr
[ "def", "spherical_to_cartesian", "(", "lons", ",", "lats", ",", "depths", "=", "None", ")", ":", "phi", "=", "numpy", ".", "radians", "(", "lons", ")", "theta", "=", "numpy", ".", "radians", "(", "lats", ")", "if", "depths", "is", "None", ":", "rr", "=", "EARTH_RADIUS", "else", ":", "rr", "=", "EARTH_RADIUS", "-", "numpy", ".", "array", "(", "depths", ")", "cos_theta_r", "=", "rr", "*", "numpy", ".", "cos", "(", "theta", ")", "try", ":", "shape", "=", "lons", ".", "shape", "except", "AttributeError", ":", "# a list/tuple was passed", "try", ":", "shape", "=", "(", "len", "(", "lons", ")", ",", ")", "except", "TypeError", ":", "# a scalar was passed", "shape", "=", "(", ")", "arr", "=", "numpy", ".", "zeros", "(", "shape", "+", "(", "3", ",", ")", ")", "arr", "[", "...", ",", "0", "]", "=", "cos_theta_r", "*", "numpy", ".", "cos", "(", "phi", ")", "arr", "[", "...", ",", "1", "]", "=", "cos_theta_r", "*", "numpy", ".", "sin", "(", "phi", ")", "arr", "[", "...", ",", "2", "]", "=", "rr", "*", "numpy", ".", "sin", "(", "theta", ")", "return", "arr" ]
Return the position vectors (in Cartesian coordinates) of list of spherical coordinates. For equations see: http://mathworld.wolfram.com/SphericalCoordinates.html. Parameters are components of spherical coordinates in a form of scalars, lists or numpy arrays. ``depths`` can be ``None`` in which case it's considered zero for all points. :returns: ``numpy.array`` of 3d vectors representing points' coordinates in Cartesian space in km. The array has shape `lons.shape + (3,)`. In particular, if ``lons`` and ``lats`` are scalars the result is a 3D vector and if they are vectors the result is a matrix of shape (N, 3). See also :func:`cartesian_to_spherical`.
[ "Return", "the", "position", "vectors", "(", "in", "Cartesian", "coordinates", ")", "of", "list", "of", "spherical", "coordinates", "." ]
train
https://github.com/gem/oq-engine/blob/8294553a0b8aba33fd96437a35065d03547d0040/openquake/hazardlib/geo/geodetic.py#L183-L221
gem/oq-engine
openquake/hazardlib/geo/geodetic.py
min_geodetic_distance
def min_geodetic_distance(a, b): """ Compute the minimum distance between first mesh and each point of the second mesh when both are defined on the earth surface. :param a: a pair of (lons, lats) or an array of cartesian coordinates :param b: a pair of (lons, lats) or an array of cartesian coordinates """ if isinstance(a, tuple): a = spherical_to_cartesian(a[0].flatten(), a[1].flatten()) if isinstance(b, tuple): b = spherical_to_cartesian(b[0].flatten(), b[1].flatten()) return cdist(a, b).min(axis=0)
python
def min_geodetic_distance(a, b): if isinstance(a, tuple): a = spherical_to_cartesian(a[0].flatten(), a[1].flatten()) if isinstance(b, tuple): b = spherical_to_cartesian(b[0].flatten(), b[1].flatten()) return cdist(a, b).min(axis=0)
[ "def", "min_geodetic_distance", "(", "a", ",", "b", ")", ":", "if", "isinstance", "(", "a", ",", "tuple", ")", ":", "a", "=", "spherical_to_cartesian", "(", "a", "[", "0", "]", ".", "flatten", "(", ")", ",", "a", "[", "1", "]", ".", "flatten", "(", ")", ")", "if", "isinstance", "(", "b", ",", "tuple", ")", ":", "b", "=", "spherical_to_cartesian", "(", "b", "[", "0", "]", ".", "flatten", "(", ")", ",", "b", "[", "1", "]", ".", "flatten", "(", ")", ")", "return", "cdist", "(", "a", ",", "b", ")", ".", "min", "(", "axis", "=", "0", ")" ]
Compute the minimum distance between first mesh and each point of the second mesh when both are defined on the earth surface. :param a: a pair of (lons, lats) or an array of cartesian coordinates :param b: a pair of (lons, lats) or an array of cartesian coordinates
[ "Compute", "the", "minimum", "distance", "between", "first", "mesh", "and", "each", "point", "of", "the", "second", "mesh", "when", "both", "are", "defined", "on", "the", "earth", "surface", "." ]
train
https://github.com/gem/oq-engine/blob/8294553a0b8aba33fd96437a35065d03547d0040/openquake/hazardlib/geo/geodetic.py#L224-L236
gem/oq-engine
openquake/hazardlib/geo/geodetic.py
distance_matrix
def distance_matrix(lons, lats, diameter=2*EARTH_RADIUS): """ :param lons: array of m longitudes :param lats: array of m latitudes :returns: matrix of (m, m) distances """ m = len(lons) assert m == len(lats), (m, len(lats)) lons = numpy.radians(lons) lats = numpy.radians(lats) cos_lats = numpy.cos(lats) result = numpy.zeros((m, m)) for i in range(len(lons)): a = numpy.sin((lats[i] - lats) / 2.0) b = numpy.sin((lons[i] - lons) / 2.0) result[i, :] = numpy.arcsin( numpy.sqrt(a * a + cos_lats[i] * cos_lats * b * b)) * diameter return numpy.matrix(result, copy=False)
python
def distance_matrix(lons, lats, diameter=2*EARTH_RADIUS): m = len(lons) assert m == len(lats), (m, len(lats)) lons = numpy.radians(lons) lats = numpy.radians(lats) cos_lats = numpy.cos(lats) result = numpy.zeros((m, m)) for i in range(len(lons)): a = numpy.sin((lats[i] - lats) / 2.0) b = numpy.sin((lons[i] - lons) / 2.0) result[i, :] = numpy.arcsin( numpy.sqrt(a * a + cos_lats[i] * cos_lats * b * b)) * diameter return numpy.matrix(result, copy=False)
[ "def", "distance_matrix", "(", "lons", ",", "lats", ",", "diameter", "=", "2", "*", "EARTH_RADIUS", ")", ":", "m", "=", "len", "(", "lons", ")", "assert", "m", "==", "len", "(", "lats", ")", ",", "(", "m", ",", "len", "(", "lats", ")", ")", "lons", "=", "numpy", ".", "radians", "(", "lons", ")", "lats", "=", "numpy", ".", "radians", "(", "lats", ")", "cos_lats", "=", "numpy", ".", "cos", "(", "lats", ")", "result", "=", "numpy", ".", "zeros", "(", "(", "m", ",", "m", ")", ")", "for", "i", "in", "range", "(", "len", "(", "lons", ")", ")", ":", "a", "=", "numpy", ".", "sin", "(", "(", "lats", "[", "i", "]", "-", "lats", ")", "/", "2.0", ")", "b", "=", "numpy", ".", "sin", "(", "(", "lons", "[", "i", "]", "-", "lons", ")", "/", "2.0", ")", "result", "[", "i", ",", ":", "]", "=", "numpy", ".", "arcsin", "(", "numpy", ".", "sqrt", "(", "a", "*", "a", "+", "cos_lats", "[", "i", "]", "*", "cos_lats", "*", "b", "*", "b", ")", ")", "*", "diameter", "return", "numpy", ".", "matrix", "(", "result", ",", "copy", "=", "False", ")" ]
:param lons: array of m longitudes :param lats: array of m latitudes :returns: matrix of (m, m) distances
[ ":", "param", "lons", ":", "array", "of", "m", "longitudes", ":", "param", "lats", ":", "array", "of", "m", "latitudes", ":", "returns", ":", "matrix", "of", "(", "m", "m", ")", "distances" ]
train
https://github.com/gem/oq-engine/blob/8294553a0b8aba33fd96437a35065d03547d0040/openquake/hazardlib/geo/geodetic.py#L239-L256
gem/oq-engine
openquake/hazardlib/geo/geodetic.py
intervals_between
def intervals_between(lon1, lat1, depth1, lon2, lat2, depth2, length): """ Find a list of points between two given ones that lie on the same great circle arc and are equally spaced by ``length`` km. :param float lon1, lat1, depth1: Coordinates of a point to start placing intervals from. The first point in the resulting list has these coordinates. :param float lon2, lat2, depth2: Coordinates of the other end of the great circle arc segment to put intervals on. The last resulting point might be closer to the first reference point than the second one or further, since the number of segments is taken as rounded division of length between two reference points and ``length``. :param length: Required distance between two subsequent resulting points, in km. :returns: Tuple of three 1d numpy arrays: longitudes, latitudes and depths of resulting points respectively. Rounds the distance between two reference points with respect to ``length`` and calls :func:`npoints_towards`. """ assert length > 0 hdist = geodetic_distance(lon1, lat1, lon2, lat2) vdist = depth2 - depth1 # if this method is called multiple times with coordinates that are # separated by the same distance, because of floating point imprecisions # the total distance may have slightly different values (for instance if # the distance between two set of points is 65 km, total distance can be # 64.9999999999989910 and 65.0000000000020322). These two values bring to # two different values of num_intervals (32 in the first case and 33 in # the second), and this is a problem because for the same distance we # should have the same number of intervals. To reduce potential differences # due to floating point errors, we therefore round total_distance to a # fixed precision (7) total_distance = round(numpy.sqrt(hdist ** 2 + vdist ** 2), 7) num_intervals = int(round(total_distance / length)) if num_intervals == 0: return numpy.array([lon1]), numpy.array([lat1]), numpy.array([depth1]) dist_factor = (length * num_intervals) / total_distance return npoints_towards( lon1, lat1, depth1, azimuth(lon1, lat1, lon2, lat2), hdist * dist_factor, vdist * dist_factor, num_intervals + 1)
python
def intervals_between(lon1, lat1, depth1, lon2, lat2, depth2, length): assert length > 0 hdist = geodetic_distance(lon1, lat1, lon2, lat2) vdist = depth2 - depth1 total_distance = round(numpy.sqrt(hdist ** 2 + vdist ** 2), 7) num_intervals = int(round(total_distance / length)) if num_intervals == 0: return numpy.array([lon1]), numpy.array([lat1]), numpy.array([depth1]) dist_factor = (length * num_intervals) / total_distance return npoints_towards( lon1, lat1, depth1, azimuth(lon1, lat1, lon2, lat2), hdist * dist_factor, vdist * dist_factor, num_intervals + 1)
[ "def", "intervals_between", "(", "lon1", ",", "lat1", ",", "depth1", ",", "lon2", ",", "lat2", ",", "depth2", ",", "length", ")", ":", "assert", "length", ">", "0", "hdist", "=", "geodetic_distance", "(", "lon1", ",", "lat1", ",", "lon2", ",", "lat2", ")", "vdist", "=", "depth2", "-", "depth1", "# if this method is called multiple times with coordinates that are", "# separated by the same distance, because of floating point imprecisions", "# the total distance may have slightly different values (for instance if", "# the distance between two set of points is 65 km, total distance can be", "# 64.9999999999989910 and 65.0000000000020322). These two values bring to", "# two different values of num_intervals (32 in the first case and 33 in", "# the second), and this is a problem because for the same distance we", "# should have the same number of intervals. To reduce potential differences", "# due to floating point errors, we therefore round total_distance to a", "# fixed precision (7)", "total_distance", "=", "round", "(", "numpy", ".", "sqrt", "(", "hdist", "**", "2", "+", "vdist", "**", "2", ")", ",", "7", ")", "num_intervals", "=", "int", "(", "round", "(", "total_distance", "/", "length", ")", ")", "if", "num_intervals", "==", "0", ":", "return", "numpy", ".", "array", "(", "[", "lon1", "]", ")", ",", "numpy", ".", "array", "(", "[", "lat1", "]", ")", ",", "numpy", ".", "array", "(", "[", "depth1", "]", ")", "dist_factor", "=", "(", "length", "*", "num_intervals", ")", "/", "total_distance", "return", "npoints_towards", "(", "lon1", ",", "lat1", ",", "depth1", ",", "azimuth", "(", "lon1", ",", "lat1", ",", "lon2", ",", "lat2", ")", ",", "hdist", "*", "dist_factor", ",", "vdist", "*", "dist_factor", ",", "num_intervals", "+", "1", ")" ]
Find a list of points between two given ones that lie on the same great circle arc and are equally spaced by ``length`` km. :param float lon1, lat1, depth1: Coordinates of a point to start placing intervals from. The first point in the resulting list has these coordinates. :param float lon2, lat2, depth2: Coordinates of the other end of the great circle arc segment to put intervals on. The last resulting point might be closer to the first reference point than the second one or further, since the number of segments is taken as rounded division of length between two reference points and ``length``. :param length: Required distance between two subsequent resulting points, in km. :returns: Tuple of three 1d numpy arrays: longitudes, latitudes and depths of resulting points respectively. Rounds the distance between two reference points with respect to ``length`` and calls :func:`npoints_towards`.
[ "Find", "a", "list", "of", "points", "between", "two", "given", "ones", "that", "lie", "on", "the", "same", "great", "circle", "arc", "and", "are", "equally", "spaced", "by", "length", "km", "." ]
train
https://github.com/gem/oq-engine/blob/8294553a0b8aba33fd96437a35065d03547d0040/openquake/hazardlib/geo/geodetic.py#L259-L302
gem/oq-engine
openquake/hazardlib/geo/geodetic.py
npoints_between
def npoints_between(lon1, lat1, depth1, lon2, lat2, depth2, npoints): """ Find a list of specified number of points between two given ones that are equally spaced along the great circle arc connecting given points. :param float lon1, lat1, depth1: Coordinates of a point to start from. The first point in a resulting list has these coordinates. :param float lon2, lat2, depth2: Coordinates of a point to finish at. The last point in a resulting list has these coordinates. :param npoints: Integer number of points to return. First and last points count, so if there have to be two intervals, ``npoints`` should be 3. :returns: Tuple of three 1d numpy arrays: longitudes, latitudes and depths of resulting points respectively. Finds distance between two reference points and calls :func:`npoints_towards`. """ hdist = geodetic_distance(lon1, lat1, lon2, lat2) vdist = depth2 - depth1 rlons, rlats, rdepths = npoints_towards( lon1, lat1, depth1, azimuth(lon1, lat1, lon2, lat2), hdist, vdist, npoints ) # the last point should be left intact rlons[-1] = lon2 rlats[-1] = lat2 rdepths[-1] = depth2 return rlons, rlats, rdepths
python
def npoints_between(lon1, lat1, depth1, lon2, lat2, depth2, npoints): hdist = geodetic_distance(lon1, lat1, lon2, lat2) vdist = depth2 - depth1 rlons, rlats, rdepths = npoints_towards( lon1, lat1, depth1, azimuth(lon1, lat1, lon2, lat2), hdist, vdist, npoints ) rlons[-1] = lon2 rlats[-1] = lat2 rdepths[-1] = depth2 return rlons, rlats, rdepths
[ "def", "npoints_between", "(", "lon1", ",", "lat1", ",", "depth1", ",", "lon2", ",", "lat2", ",", "depth2", ",", "npoints", ")", ":", "hdist", "=", "geodetic_distance", "(", "lon1", ",", "lat1", ",", "lon2", ",", "lat2", ")", "vdist", "=", "depth2", "-", "depth1", "rlons", ",", "rlats", ",", "rdepths", "=", "npoints_towards", "(", "lon1", ",", "lat1", ",", "depth1", ",", "azimuth", "(", "lon1", ",", "lat1", ",", "lon2", ",", "lat2", ")", ",", "hdist", ",", "vdist", ",", "npoints", ")", "# the last point should be left intact", "rlons", "[", "-", "1", "]", "=", "lon2", "rlats", "[", "-", "1", "]", "=", "lat2", "rdepths", "[", "-", "1", "]", "=", "depth2", "return", "rlons", ",", "rlats", ",", "rdepths" ]
Find a list of specified number of points between two given ones that are equally spaced along the great circle arc connecting given points. :param float lon1, lat1, depth1: Coordinates of a point to start from. The first point in a resulting list has these coordinates. :param float lon2, lat2, depth2: Coordinates of a point to finish at. The last point in a resulting list has these coordinates. :param npoints: Integer number of points to return. First and last points count, so if there have to be two intervals, ``npoints`` should be 3. :returns: Tuple of three 1d numpy arrays: longitudes, latitudes and depths of resulting points respectively. Finds distance between two reference points and calls :func:`npoints_towards`.
[ "Find", "a", "list", "of", "specified", "number", "of", "points", "between", "two", "given", "ones", "that", "are", "equally", "spaced", "along", "the", "great", "circle", "arc", "connecting", "given", "points", "." ]
train
https://github.com/gem/oq-engine/blob/8294553a0b8aba33fd96437a35065d03547d0040/openquake/hazardlib/geo/geodetic.py#L305-L336
gem/oq-engine
openquake/hazardlib/geo/geodetic.py
npoints_towards
def npoints_towards(lon, lat, depth, azimuth, hdist, vdist, npoints): """ Find a list of specified number of points starting from a given one along a great circle arc with a given azimuth measured in a given point. :param float lon, lat, depth: Coordinates of a point to start from. The first point in a resulting list has these coordinates. :param azimuth: A direction representing a great circle arc together with a reference point. :param hdist: Horizontal (geodetic) distance from reference point to the last point of the resulting list, in km. :param vdist: Vertical (depth) distance between reference and the last point, in km. :param npoints: Integer number of points to return. First and last points count, so if there have to be two intervals, ``npoints`` should be 3. :returns: Tuple of three 1d numpy arrays: longitudes, latitudes and depths of resulting points respectively. Implements "completely general but more complicated algorithm" from http://williams.best.vwh.net/avform.htm#LL """ assert npoints > 1 rlon, rlat = numpy.radians(lon), numpy.radians(lat) tc = numpy.radians(360 - azimuth) hdists = numpy.arange(npoints, dtype=float) hdists *= (hdist / EARTH_RADIUS) / (npoints - 1) vdists = numpy.arange(npoints, dtype=float) vdists *= vdist / (npoints - 1) sin_dists = numpy.sin(hdists) cos_dists = numpy.cos(hdists) sin_lat = numpy.sin(rlat) cos_lat = numpy.cos(rlat) sin_lats = sin_lat * cos_dists + cos_lat * sin_dists * numpy.cos(tc) lats = numpy.degrees(numpy.arcsin(sin_lats)) dlon = numpy.arctan2(numpy.sin(tc) * sin_dists * cos_lat, cos_dists - sin_lat * sin_lats) lons = numpy.mod(rlon - dlon + numpy.pi, 2 * numpy.pi) - numpy.pi lons = numpy.degrees(lons) depths = vdists + depth # the first point should be left intact lons[0] = lon lats[0] = lat depths[0] = depth return lons, lats, depths
python
def npoints_towards(lon, lat, depth, azimuth, hdist, vdist, npoints): assert npoints > 1 rlon, rlat = numpy.radians(lon), numpy.radians(lat) tc = numpy.radians(360 - azimuth) hdists = numpy.arange(npoints, dtype=float) hdists *= (hdist / EARTH_RADIUS) / (npoints - 1) vdists = numpy.arange(npoints, dtype=float) vdists *= vdist / (npoints - 1) sin_dists = numpy.sin(hdists) cos_dists = numpy.cos(hdists) sin_lat = numpy.sin(rlat) cos_lat = numpy.cos(rlat) sin_lats = sin_lat * cos_dists + cos_lat * sin_dists * numpy.cos(tc) lats = numpy.degrees(numpy.arcsin(sin_lats)) dlon = numpy.arctan2(numpy.sin(tc) * sin_dists * cos_lat, cos_dists - sin_lat * sin_lats) lons = numpy.mod(rlon - dlon + numpy.pi, 2 * numpy.pi) - numpy.pi lons = numpy.degrees(lons) depths = vdists + depth lons[0] = lon lats[0] = lat depths[0] = depth return lons, lats, depths
[ "def", "npoints_towards", "(", "lon", ",", "lat", ",", "depth", ",", "azimuth", ",", "hdist", ",", "vdist", ",", "npoints", ")", ":", "assert", "npoints", ">", "1", "rlon", ",", "rlat", "=", "numpy", ".", "radians", "(", "lon", ")", ",", "numpy", ".", "radians", "(", "lat", ")", "tc", "=", "numpy", ".", "radians", "(", "360", "-", "azimuth", ")", "hdists", "=", "numpy", ".", "arange", "(", "npoints", ",", "dtype", "=", "float", ")", "hdists", "*=", "(", "hdist", "/", "EARTH_RADIUS", ")", "/", "(", "npoints", "-", "1", ")", "vdists", "=", "numpy", ".", "arange", "(", "npoints", ",", "dtype", "=", "float", ")", "vdists", "*=", "vdist", "/", "(", "npoints", "-", "1", ")", "sin_dists", "=", "numpy", ".", "sin", "(", "hdists", ")", "cos_dists", "=", "numpy", ".", "cos", "(", "hdists", ")", "sin_lat", "=", "numpy", ".", "sin", "(", "rlat", ")", "cos_lat", "=", "numpy", ".", "cos", "(", "rlat", ")", "sin_lats", "=", "sin_lat", "*", "cos_dists", "+", "cos_lat", "*", "sin_dists", "*", "numpy", ".", "cos", "(", "tc", ")", "lats", "=", "numpy", ".", "degrees", "(", "numpy", ".", "arcsin", "(", "sin_lats", ")", ")", "dlon", "=", "numpy", ".", "arctan2", "(", "numpy", ".", "sin", "(", "tc", ")", "*", "sin_dists", "*", "cos_lat", ",", "cos_dists", "-", "sin_lat", "*", "sin_lats", ")", "lons", "=", "numpy", ".", "mod", "(", "rlon", "-", "dlon", "+", "numpy", ".", "pi", ",", "2", "*", "numpy", ".", "pi", ")", "-", "numpy", ".", "pi", "lons", "=", "numpy", ".", "degrees", "(", "lons", ")", "depths", "=", "vdists", "+", "depth", "# the first point should be left intact", "lons", "[", "0", "]", "=", "lon", "lats", "[", "0", "]", "=", "lat", "depths", "[", "0", "]", "=", "depth", "return", "lons", ",", "lats", ",", "depths" ]
Find a list of specified number of points starting from a given one along a great circle arc with a given azimuth measured in a given point. :param float lon, lat, depth: Coordinates of a point to start from. The first point in a resulting list has these coordinates. :param azimuth: A direction representing a great circle arc together with a reference point. :param hdist: Horizontal (geodetic) distance from reference point to the last point of the resulting list, in km. :param vdist: Vertical (depth) distance between reference and the last point, in km. :param npoints: Integer number of points to return. First and last points count, so if there have to be two intervals, ``npoints`` should be 3. :returns: Tuple of three 1d numpy arrays: longitudes, latitudes and depths of resulting points respectively. Implements "completely general but more complicated algorithm" from http://williams.best.vwh.net/avform.htm#LL
[ "Find", "a", "list", "of", "specified", "number", "of", "points", "starting", "from", "a", "given", "one", "along", "a", "great", "circle", "arc", "with", "a", "given", "azimuth", "measured", "in", "a", "given", "point", "." ]
train
https://github.com/gem/oq-engine/blob/8294553a0b8aba33fd96437a35065d03547d0040/openquake/hazardlib/geo/geodetic.py#L339-L393
gem/oq-engine
openquake/hazardlib/geo/geodetic.py
point_at
def point_at(lon, lat, azimuth, distance): """ Perform a forward geodetic transformation: find a point lying at a given distance from a given one on a great circle arc defined by azimuth. :param float lon, lat: Coordinates of a reference point, in decimal degrees. :param azimuth: An azimuth of a great circle arc of interest measured in a reference point in decimal degrees. :param distance: Distance to target point in km. :returns: Tuple of two float numbers: longitude and latitude of a target point in decimal degrees respectively. Implements the same approach as :func:`npoints_towards`. """ # this is a simplified version of npoints_towards(). # code duplication is justified by performance reasons. lon, lat = numpy.radians(lon), numpy.radians(lat) tc = numpy.radians(360 - azimuth) sin_dists = numpy.sin(distance / EARTH_RADIUS) cos_dists = numpy.cos(distance / EARTH_RADIUS) sin_lat = numpy.sin(lat) cos_lat = numpy.cos(lat) sin_lats = sin_lat * cos_dists + cos_lat * sin_dists * numpy.cos(tc) lats = numpy.degrees(numpy.arcsin(sin_lats)) dlon = numpy.arctan2(numpy.sin(tc) * sin_dists * cos_lat, cos_dists - sin_lat * sin_lats) lons = numpy.mod(lon - dlon + numpy.pi, 2 * numpy.pi) - numpy.pi lons = numpy.degrees(lons) return lons, lats
python
def point_at(lon, lat, azimuth, distance): lon, lat = numpy.radians(lon), numpy.radians(lat) tc = numpy.radians(360 - azimuth) sin_dists = numpy.sin(distance / EARTH_RADIUS) cos_dists = numpy.cos(distance / EARTH_RADIUS) sin_lat = numpy.sin(lat) cos_lat = numpy.cos(lat) sin_lats = sin_lat * cos_dists + cos_lat * sin_dists * numpy.cos(tc) lats = numpy.degrees(numpy.arcsin(sin_lats)) dlon = numpy.arctan2(numpy.sin(tc) * sin_dists * cos_lat, cos_dists - sin_lat * sin_lats) lons = numpy.mod(lon - dlon + numpy.pi, 2 * numpy.pi) - numpy.pi lons = numpy.degrees(lons) return lons, lats
[ "def", "point_at", "(", "lon", ",", "lat", ",", "azimuth", ",", "distance", ")", ":", "# this is a simplified version of npoints_towards().", "# code duplication is justified by performance reasons.", "lon", ",", "lat", "=", "numpy", ".", "radians", "(", "lon", ")", ",", "numpy", ".", "radians", "(", "lat", ")", "tc", "=", "numpy", ".", "radians", "(", "360", "-", "azimuth", ")", "sin_dists", "=", "numpy", ".", "sin", "(", "distance", "/", "EARTH_RADIUS", ")", "cos_dists", "=", "numpy", ".", "cos", "(", "distance", "/", "EARTH_RADIUS", ")", "sin_lat", "=", "numpy", ".", "sin", "(", "lat", ")", "cos_lat", "=", "numpy", ".", "cos", "(", "lat", ")", "sin_lats", "=", "sin_lat", "*", "cos_dists", "+", "cos_lat", "*", "sin_dists", "*", "numpy", ".", "cos", "(", "tc", ")", "lats", "=", "numpy", ".", "degrees", "(", "numpy", ".", "arcsin", "(", "sin_lats", ")", ")", "dlon", "=", "numpy", ".", "arctan2", "(", "numpy", ".", "sin", "(", "tc", ")", "*", "sin_dists", "*", "cos_lat", ",", "cos_dists", "-", "sin_lat", "*", "sin_lats", ")", "lons", "=", "numpy", ".", "mod", "(", "lon", "-", "dlon", "+", "numpy", ".", "pi", ",", "2", "*", "numpy", ".", "pi", ")", "-", "numpy", ".", "pi", "lons", "=", "numpy", ".", "degrees", "(", "lons", ")", "return", "lons", ",", "lats" ]
Perform a forward geodetic transformation: find a point lying at a given distance from a given one on a great circle arc defined by azimuth. :param float lon, lat: Coordinates of a reference point, in decimal degrees. :param azimuth: An azimuth of a great circle arc of interest measured in a reference point in decimal degrees. :param distance: Distance to target point in km. :returns: Tuple of two float numbers: longitude and latitude of a target point in decimal degrees respectively. Implements the same approach as :func:`npoints_towards`.
[ "Perform", "a", "forward", "geodetic", "transformation", ":", "find", "a", "point", "lying", "at", "a", "given", "distance", "from", "a", "given", "one", "on", "a", "great", "circle", "arc", "defined", "by", "azimuth", "." ]
train
https://github.com/gem/oq-engine/blob/8294553a0b8aba33fd96437a35065d03547d0040/openquake/hazardlib/geo/geodetic.py#L396-L431
gem/oq-engine
openquake/hazardlib/geo/geodetic.py
distance_to_semi_arc
def distance_to_semi_arc(alon, alat, aazimuth, plons, plats): """ In this method we use a reference system centerd on (alon, alat) and with the y-axis corresponding to aazimuth direction to calculate the minimum distance from a semiarc with generates in (alon, alat). Parameters are the same as for :func:`distance_to_arc`. """ if type(plons) is float: plons = numpy.array([plons]) plats = numpy.array([plats]) azimuth_to_target = azimuth(alon, alat, plons, plats) # Find the indexes of the points in the positive y halfspace idx = numpy.nonzero(numpy.cos( numpy.radians((aazimuth-azimuth_to_target))) > 0.0) # Find the indexes of the points in the negative y halfspace idx_not = numpy.nonzero(numpy.cos( numpy.radians((aazimuth-azimuth_to_target))) <= 0.0) idx_ll_quadr = numpy.nonzero( (numpy.cos(numpy.radians((aazimuth-azimuth_to_target))) <= 0.0) & (numpy.sin(numpy.radians((aazimuth-azimuth_to_target))) > 0.0)) # Initialise the array containing the final distances distance = numpy.zeros_like(plons) # Compute the distance between the semi-arc with 'aazimuth' direction # and the set of sites in the positive half-space. The shortest distance to # the semi-arc in this case can be computed using the function # :func:`openquake.hazardlib.geo.geodetic.distance_to_arc`. if len(idx): distance_to_target = geodetic_distance(alon, alat, plons[idx], plats[idx]) t_angle = (azimuth_to_target[idx] - aazimuth + 360) % 360 angle = numpy.arccos((numpy.sin(numpy.radians(t_angle)) * numpy.sin(distance_to_target / EARTH_RADIUS))) distance[idx] = (numpy.pi / 2 - angle) * EARTH_RADIUS # Compute the distance between the reference point and the set of sites # in the negative half-space. The shortest distance for the semi-arc for # all the points in the negative semi-space simply corresponds to the # shortest distance to its origin. if len(idx_not): distance[idx_not] = geodetic_distance(alon, alat, plons[idx_not], plats[idx_not]) distance[idx_ll_quadr] = -1 * distance[idx_ll_quadr] return distance
python
def distance_to_semi_arc(alon, alat, aazimuth, plons, plats): if type(plons) is float: plons = numpy.array([plons]) plats = numpy.array([plats]) azimuth_to_target = azimuth(alon, alat, plons, plats) idx = numpy.nonzero(numpy.cos( numpy.radians((aazimuth-azimuth_to_target))) > 0.0) idx_not = numpy.nonzero(numpy.cos( numpy.radians((aazimuth-azimuth_to_target))) <= 0.0) idx_ll_quadr = numpy.nonzero( (numpy.cos(numpy.radians((aazimuth-azimuth_to_target))) <= 0.0) & (numpy.sin(numpy.radians((aazimuth-azimuth_to_target))) > 0.0)) distance = numpy.zeros_like(plons) if len(idx): distance_to_target = geodetic_distance(alon, alat, plons[idx], plats[idx]) t_angle = (azimuth_to_target[idx] - aazimuth + 360) % 360 angle = numpy.arccos((numpy.sin(numpy.radians(t_angle)) * numpy.sin(distance_to_target / EARTH_RADIUS))) distance[idx] = (numpy.pi / 2 - angle) * EARTH_RADIUS if len(idx_not): distance[idx_not] = geodetic_distance(alon, alat, plons[idx_not], plats[idx_not]) distance[idx_ll_quadr] = -1 * distance[idx_ll_quadr] return distance
[ "def", "distance_to_semi_arc", "(", "alon", ",", "alat", ",", "aazimuth", ",", "plons", ",", "plats", ")", ":", "if", "type", "(", "plons", ")", "is", "float", ":", "plons", "=", "numpy", ".", "array", "(", "[", "plons", "]", ")", "plats", "=", "numpy", ".", "array", "(", "[", "plats", "]", ")", "azimuth_to_target", "=", "azimuth", "(", "alon", ",", "alat", ",", "plons", ",", "plats", ")", "# Find the indexes of the points in the positive y halfspace", "idx", "=", "numpy", ".", "nonzero", "(", "numpy", ".", "cos", "(", "numpy", ".", "radians", "(", "(", "aazimuth", "-", "azimuth_to_target", ")", ")", ")", ">", "0.0", ")", "# Find the indexes of the points in the negative y halfspace", "idx_not", "=", "numpy", ".", "nonzero", "(", "numpy", ".", "cos", "(", "numpy", ".", "radians", "(", "(", "aazimuth", "-", "azimuth_to_target", ")", ")", ")", "<=", "0.0", ")", "idx_ll_quadr", "=", "numpy", ".", "nonzero", "(", "(", "numpy", ".", "cos", "(", "numpy", ".", "radians", "(", "(", "aazimuth", "-", "azimuth_to_target", ")", ")", ")", "<=", "0.0", ")", "&", "(", "numpy", ".", "sin", "(", "numpy", ".", "radians", "(", "(", "aazimuth", "-", "azimuth_to_target", ")", ")", ")", ">", "0.0", ")", ")", "# Initialise the array containing the final distances", "distance", "=", "numpy", ".", "zeros_like", "(", "plons", ")", "# Compute the distance between the semi-arc with 'aazimuth' direction", "# and the set of sites in the positive half-space. The shortest distance to", "# the semi-arc in this case can be computed using the function", "# :func:`openquake.hazardlib.geo.geodetic.distance_to_arc`.", "if", "len", "(", "idx", ")", ":", "distance_to_target", "=", "geodetic_distance", "(", "alon", ",", "alat", ",", "plons", "[", "idx", "]", ",", "plats", "[", "idx", "]", ")", "t_angle", "=", "(", "azimuth_to_target", "[", "idx", "]", "-", "aazimuth", "+", "360", ")", "%", "360", "angle", "=", "numpy", ".", "arccos", "(", "(", "numpy", ".", "sin", "(", "numpy", ".", "radians", "(", "t_angle", ")", ")", "*", "numpy", ".", "sin", "(", "distance_to_target", "/", "EARTH_RADIUS", ")", ")", ")", "distance", "[", "idx", "]", "=", "(", "numpy", ".", "pi", "/", "2", "-", "angle", ")", "*", "EARTH_RADIUS", "# Compute the distance between the reference point and the set of sites", "# in the negative half-space. The shortest distance for the semi-arc for", "# all the points in the negative semi-space simply corresponds to the", "# shortest distance to its origin.", "if", "len", "(", "idx_not", ")", ":", "distance", "[", "idx_not", "]", "=", "geodetic_distance", "(", "alon", ",", "alat", ",", "plons", "[", "idx_not", "]", ",", "plats", "[", "idx_not", "]", ")", "distance", "[", "idx_ll_quadr", "]", "=", "-", "1", "*", "distance", "[", "idx_ll_quadr", "]", "return", "distance" ]
In this method we use a reference system centerd on (alon, alat) and with the y-axis corresponding to aazimuth direction to calculate the minimum distance from a semiarc with generates in (alon, alat). Parameters are the same as for :func:`distance_to_arc`.
[ "In", "this", "method", "we", "use", "a", "reference", "system", "centerd", "on", "(", "alon", "alat", ")", "and", "with", "the", "y", "-", "axis", "corresponding", "to", "aazimuth", "direction", "to", "calculate", "the", "minimum", "distance", "from", "a", "semiarc", "with", "generates", "in", "(", "alon", "alat", ")", "." ]
train
https://github.com/gem/oq-engine/blob/8294553a0b8aba33fd96437a35065d03547d0040/openquake/hazardlib/geo/geodetic.py#L434-L486
gem/oq-engine
openquake/hazardlib/geo/geodetic.py
distance_to_arc
def distance_to_arc(alon, alat, aazimuth, plons, plats): """ Calculate a closest distance between a great circle arc and a point (or a collection of points). :param float alon, alat: Arc reference point longitude and latitude, in decimal degrees. :param azimuth: Arc azimuth (an angle between direction to a north and arc in clockwise direction), measured in a reference point, in decimal degrees. :param float plons, plats: Longitudes and latitudes of points to measure distance. Either scalar values or numpy arrays of decimal degrees. :returns: Distance in km, a scalar value or numpy array depending on ``plons`` and ``plats``. A distance is negative if the target point lies on the right hand side of the arc. Solves a spherical triangle formed by reference point, target point and a projection of target point to a reference great circle arc. """ azimuth_to_target = azimuth(alon, alat, plons, plats) distance_to_target = geodetic_distance(alon, alat, plons, plats) # find an angle between an arc and a great circle arc connecting # arc's reference point and a target point t_angle = (azimuth_to_target - aazimuth + 360) % 360 # in a spherical right triangle cosine of the angle of a cathetus # augmented to pi/2 is equal to sine of an opposite angle times # sine of hypotenuse, see # http://en.wikipedia.org/wiki/Spherical_trigonometry#Napier.27s_Pentagon angle = numpy.arccos( (numpy.sin(numpy.radians(t_angle)) * numpy.sin(distance_to_target / EARTH_RADIUS)) ) return (numpy.pi / 2 - angle) * EARTH_RADIUS
python
def distance_to_arc(alon, alat, aazimuth, plons, plats): azimuth_to_target = azimuth(alon, alat, plons, plats) distance_to_target = geodetic_distance(alon, alat, plons, plats) t_angle = (azimuth_to_target - aazimuth + 360) % 360 angle = numpy.arccos( (numpy.sin(numpy.radians(t_angle)) * numpy.sin(distance_to_target / EARTH_RADIUS)) ) return (numpy.pi / 2 - angle) * EARTH_RADIUS
[ "def", "distance_to_arc", "(", "alon", ",", "alat", ",", "aazimuth", ",", "plons", ",", "plats", ")", ":", "azimuth_to_target", "=", "azimuth", "(", "alon", ",", "alat", ",", "plons", ",", "plats", ")", "distance_to_target", "=", "geodetic_distance", "(", "alon", ",", "alat", ",", "plons", ",", "plats", ")", "# find an angle between an arc and a great circle arc connecting", "# arc's reference point and a target point", "t_angle", "=", "(", "azimuth_to_target", "-", "aazimuth", "+", "360", ")", "%", "360", "# in a spherical right triangle cosine of the angle of a cathetus", "# augmented to pi/2 is equal to sine of an opposite angle times", "# sine of hypotenuse, see", "# http://en.wikipedia.org/wiki/Spherical_trigonometry#Napier.27s_Pentagon", "angle", "=", "numpy", ".", "arccos", "(", "(", "numpy", ".", "sin", "(", "numpy", ".", "radians", "(", "t_angle", ")", ")", "*", "numpy", ".", "sin", "(", "distance_to_target", "/", "EARTH_RADIUS", ")", ")", ")", "return", "(", "numpy", ".", "pi", "/", "2", "-", "angle", ")", "*", "EARTH_RADIUS" ]
Calculate a closest distance between a great circle arc and a point (or a collection of points). :param float alon, alat: Arc reference point longitude and latitude, in decimal degrees. :param azimuth: Arc azimuth (an angle between direction to a north and arc in clockwise direction), measured in a reference point, in decimal degrees. :param float plons, plats: Longitudes and latitudes of points to measure distance. Either scalar values or numpy arrays of decimal degrees. :returns: Distance in km, a scalar value or numpy array depending on ``plons`` and ``plats``. A distance is negative if the target point lies on the right hand side of the arc. Solves a spherical triangle formed by reference point, target point and a projection of target point to a reference great circle arc.
[ "Calculate", "a", "closest", "distance", "between", "a", "great", "circle", "arc", "and", "a", "point", "(", "or", "a", "collection", "of", "points", ")", "." ]
train
https://github.com/gem/oq-engine/blob/8294553a0b8aba33fd96437a35065d03547d0040/openquake/hazardlib/geo/geodetic.py#L489-L525
gem/oq-engine
openquake/hazardlib/geo/geodetic.py
_prepare_coords
def _prepare_coords(lons1, lats1, lons2, lats2): """ Convert two pairs of spherical coordinates in decimal degrees to numpy arrays of radians. Makes sure that respective coordinates in pairs have the same shape. """ lons1 = numpy.radians(lons1) lats1 = numpy.radians(lats1) assert lons1.shape == lats1.shape lons2 = numpy.radians(lons2) lats2 = numpy.radians(lats2) assert lons2.shape == lats2.shape return lons1, lats1, lons2, lats2
python
def _prepare_coords(lons1, lats1, lons2, lats2): lons1 = numpy.radians(lons1) lats1 = numpy.radians(lats1) assert lons1.shape == lats1.shape lons2 = numpy.radians(lons2) lats2 = numpy.radians(lats2) assert lons2.shape == lats2.shape return lons1, lats1, lons2, lats2
[ "def", "_prepare_coords", "(", "lons1", ",", "lats1", ",", "lons2", ",", "lats2", ")", ":", "lons1", "=", "numpy", ".", "radians", "(", "lons1", ")", "lats1", "=", "numpy", ".", "radians", "(", "lats1", ")", "assert", "lons1", ".", "shape", "==", "lats1", ".", "shape", "lons2", "=", "numpy", ".", "radians", "(", "lons2", ")", "lats2", "=", "numpy", ".", "radians", "(", "lats2", ")", "assert", "lons2", ".", "shape", "==", "lats2", ".", "shape", "return", "lons1", ",", "lats1", ",", "lons2", ",", "lats2" ]
Convert two pairs of spherical coordinates in decimal degrees to numpy arrays of radians. Makes sure that respective coordinates in pairs have the same shape.
[ "Convert", "two", "pairs", "of", "spherical", "coordinates", "in", "decimal", "degrees", "to", "numpy", "arrays", "of", "radians", ".", "Makes", "sure", "that", "respective", "coordinates", "in", "pairs", "have", "the", "same", "shape", "." ]
train
https://github.com/gem/oq-engine/blob/8294553a0b8aba33fd96437a35065d03547d0040/openquake/hazardlib/geo/geodetic.py#L528-L540
gem/oq-engine
openquake/hmtk/sources/simple_fault_source.py
mtkSimpleFaultSource._check_seismogenic_depths
def _check_seismogenic_depths(self, upper_depth, lower_depth): ''' Checks the seismic depths for physical consistency :param float upper_depth: Upper seismogenic depth (km) :param float lower_depth: Lower seismogenic depth (km) ''' # Simple check on depths if upper_depth: if upper_depth < 0.: raise ValueError('Upper seismogenic depth must be greater than' ' or equal to 0.0!') else: self.upper_depth = upper_depth else: self.upper_depth = 0.0 if not lower_depth: raise ValueError('Lower seismogenic depth must be defined for ' 'simple fault source!') if lower_depth < self.upper_depth: raise ValueError('Lower seismogenic depth must take a greater' ' value than upper seismogenic depth') self.lower_depth = lower_depth
python
def _check_seismogenic_depths(self, upper_depth, lower_depth): if upper_depth: if upper_depth < 0.: raise ValueError('Upper seismogenic depth must be greater than' ' or equal to 0.0!') else: self.upper_depth = upper_depth else: self.upper_depth = 0.0 if not lower_depth: raise ValueError('Lower seismogenic depth must be defined for ' 'simple fault source!') if lower_depth < self.upper_depth: raise ValueError('Lower seismogenic depth must take a greater' ' value than upper seismogenic depth') self.lower_depth = lower_depth
[ "def", "_check_seismogenic_depths", "(", "self", ",", "upper_depth", ",", "lower_depth", ")", ":", "# Simple check on depths", "if", "upper_depth", ":", "if", "upper_depth", "<", "0.", ":", "raise", "ValueError", "(", "'Upper seismogenic depth must be greater than'", "' or equal to 0.0!'", ")", "else", ":", "self", ".", "upper_depth", "=", "upper_depth", "else", ":", "self", ".", "upper_depth", "=", "0.0", "if", "not", "lower_depth", ":", "raise", "ValueError", "(", "'Lower seismogenic depth must be defined for '", "'simple fault source!'", ")", "if", "lower_depth", "<", "self", ".", "upper_depth", ":", "raise", "ValueError", "(", "'Lower seismogenic depth must take a greater'", "' value than upper seismogenic depth'", ")", "self", ".", "lower_depth", "=", "lower_depth" ]
Checks the seismic depths for physical consistency :param float upper_depth: Upper seismogenic depth (km) :param float lower_depth: Lower seismogenic depth (km)
[ "Checks", "the", "seismic", "depths", "for", "physical", "consistency", ":", "param", "float", "upper_depth", ":", "Upper", "seismogenic", "depth", "(", "km", ")" ]
train
https://github.com/gem/oq-engine/blob/8294553a0b8aba33fd96437a35065d03547d0040/openquake/hmtk/sources/simple_fault_source.py#L119-L145
gem/oq-engine
openquake/hmtk/sources/simple_fault_source.py
mtkSimpleFaultSource.create_geometry
def create_geometry(self, input_geometry, dip, upper_depth, lower_depth, mesh_spacing=1.0): ''' If geometry is defined as a numpy array then create instance of nhlib.geo.line.Line class, otherwise if already instance of class accept class :param input_geometry: Trace (line) of the fault source as either i) instance of nhlib.geo.line.Line class ii) numpy.ndarray [Longitude, Latitude] :param float dip: Dip of fault surface (in degrees) :param float upper_depth: Upper seismogenic depth (km) :param float lower_depth: Lower seismogenic depth (km) :param float mesh_spacing: Spacing of the fault mesh (km) {default = 1.0} ''' assert((dip > 0.) and (dip <= 90.)) self.dip = dip self._check_seismogenic_depths(upper_depth, lower_depth) if not isinstance(input_geometry, Line): if not isinstance(input_geometry, np.ndarray): raise ValueError('Unrecognised or unsupported geometry ' 'definition') else: self.fault_trace = Line([Point(row[0], row[1]) for row in input_geometry]) else: self.fault_trace = input_geometry # Build fault surface self.geometry = SimpleFaultSurface.from_fault_data(self.fault_trace, self.upper_depth, self.lower_depth, self.dip, mesh_spacing)
python
def create_geometry(self, input_geometry, dip, upper_depth, lower_depth, mesh_spacing=1.0): assert((dip > 0.) and (dip <= 90.)) self.dip = dip self._check_seismogenic_depths(upper_depth, lower_depth) if not isinstance(input_geometry, Line): if not isinstance(input_geometry, np.ndarray): raise ValueError('Unrecognised or unsupported geometry ' 'definition') else: self.fault_trace = Line([Point(row[0], row[1]) for row in input_geometry]) else: self.fault_trace = input_geometry self.geometry = SimpleFaultSurface.from_fault_data(self.fault_trace, self.upper_depth, self.lower_depth, self.dip, mesh_spacing)
[ "def", "create_geometry", "(", "self", ",", "input_geometry", ",", "dip", ",", "upper_depth", ",", "lower_depth", ",", "mesh_spacing", "=", "1.0", ")", ":", "assert", "(", "(", "dip", ">", "0.", ")", "and", "(", "dip", "<=", "90.", ")", ")", "self", ".", "dip", "=", "dip", "self", ".", "_check_seismogenic_depths", "(", "upper_depth", ",", "lower_depth", ")", "if", "not", "isinstance", "(", "input_geometry", ",", "Line", ")", ":", "if", "not", "isinstance", "(", "input_geometry", ",", "np", ".", "ndarray", ")", ":", "raise", "ValueError", "(", "'Unrecognised or unsupported geometry '", "'definition'", ")", "else", ":", "self", ".", "fault_trace", "=", "Line", "(", "[", "Point", "(", "row", "[", "0", "]", ",", "row", "[", "1", "]", ")", "for", "row", "in", "input_geometry", "]", ")", "else", ":", "self", ".", "fault_trace", "=", "input_geometry", "# Build fault surface", "self", ".", "geometry", "=", "SimpleFaultSurface", ".", "from_fault_data", "(", "self", ".", "fault_trace", ",", "self", ".", "upper_depth", ",", "self", ".", "lower_depth", ",", "self", ".", "dip", ",", "mesh_spacing", ")" ]
If geometry is defined as a numpy array then create instance of nhlib.geo.line.Line class, otherwise if already instance of class accept class :param input_geometry: Trace (line) of the fault source as either i) instance of nhlib.geo.line.Line class ii) numpy.ndarray [Longitude, Latitude] :param float dip: Dip of fault surface (in degrees) :param float upper_depth: Upper seismogenic depth (km) :param float lower_depth: Lower seismogenic depth (km) :param float mesh_spacing: Spacing of the fault mesh (km) {default = 1.0}
[ "If", "geometry", "is", "defined", "as", "a", "numpy", "array", "then", "create", "instance", "of", "nhlib", ".", "geo", ".", "line", ".", "Line", "class", "otherwise", "if", "already", "instance", "of", "class", "accept", "class" ]
train
https://github.com/gem/oq-engine/blob/8294553a0b8aba33fd96437a35065d03547d0040/openquake/hmtk/sources/simple_fault_source.py#L147-L189
gem/oq-engine
openquake/hmtk/sources/simple_fault_source.py
mtkSimpleFaultSource.select_catalogue
def select_catalogue(self, selector, distance, distance_metric='joyner-boore', upper_eq_depth=None, lower_eq_depth=None): ''' Selects earthquakes within a distance of the fault :param selector: Populated instance of :class: `openquake.hmtk.seismicity.selector.CatalogueSelector` :param distance: Distance from point (km) for selection :param str distance_metric Choice of fault source distance metric 'joyner-boore' or 'rupture' :param float upper_eq_depth: Upper hypocentral depth of hypocentres to be selected :param float lower_eq_depth: Lower hypocentral depth of hypocentres to be selected ''' if selector.catalogue.get_number_events() < 1: raise ValueError('No events found in catalogue!') # rupture metric is selected and dip != 90 or 'rupture' if ('rupture' in distance_metric) and (fabs(self.dip - 90) > 1E-5): # Use rupture distance self.catalogue = selector.within_rupture_distance( self.geometry, distance, upper_depth=upper_eq_depth, lower_depth=lower_eq_depth) else: # Use Joyner-Boore distance self.catalogue = selector.within_joyner_boore_distance( self.geometry, distance, upper_depth=upper_eq_depth, lower_depth=lower_eq_depth) if self.catalogue.get_number_events() < 5: # Throw a warning regarding the small number of earthquakes in # the source! warnings.warn('Source %s (%s) has fewer than 5 events' % (self.id, self.name))
python
def select_catalogue(self, selector, distance, distance_metric='joyner-boore', upper_eq_depth=None, lower_eq_depth=None): if selector.catalogue.get_number_events() < 1: raise ValueError('No events found in catalogue!') if ('rupture' in distance_metric) and (fabs(self.dip - 90) > 1E-5): self.catalogue = selector.within_rupture_distance( self.geometry, distance, upper_depth=upper_eq_depth, lower_depth=lower_eq_depth) else: self.catalogue = selector.within_joyner_boore_distance( self.geometry, distance, upper_depth=upper_eq_depth, lower_depth=lower_eq_depth) if self.catalogue.get_number_events() < 5: warnings.warn('Source %s (%s) has fewer than 5 events' % (self.id, self.name))
[ "def", "select_catalogue", "(", "self", ",", "selector", ",", "distance", ",", "distance_metric", "=", "'joyner-boore'", ",", "upper_eq_depth", "=", "None", ",", "lower_eq_depth", "=", "None", ")", ":", "if", "selector", ".", "catalogue", ".", "get_number_events", "(", ")", "<", "1", ":", "raise", "ValueError", "(", "'No events found in catalogue!'", ")", "# rupture metric is selected and dip != 90 or 'rupture'", "if", "(", "'rupture'", "in", "distance_metric", ")", "and", "(", "fabs", "(", "self", ".", "dip", "-", "90", ")", ">", "1E-5", ")", ":", "# Use rupture distance", "self", ".", "catalogue", "=", "selector", ".", "within_rupture_distance", "(", "self", ".", "geometry", ",", "distance", ",", "upper_depth", "=", "upper_eq_depth", ",", "lower_depth", "=", "lower_eq_depth", ")", "else", ":", "# Use Joyner-Boore distance", "self", ".", "catalogue", "=", "selector", ".", "within_joyner_boore_distance", "(", "self", ".", "geometry", ",", "distance", ",", "upper_depth", "=", "upper_eq_depth", ",", "lower_depth", "=", "lower_eq_depth", ")", "if", "self", ".", "catalogue", ".", "get_number_events", "(", ")", "<", "5", ":", "# Throw a warning regarding the small number of earthquakes in", "# the source!", "warnings", ".", "warn", "(", "'Source %s (%s) has fewer than 5 events'", "%", "(", "self", ".", "id", ",", "self", ".", "name", ")", ")" ]
Selects earthquakes within a distance of the fault :param selector: Populated instance of :class: `openquake.hmtk.seismicity.selector.CatalogueSelector` :param distance: Distance from point (km) for selection :param str distance_metric Choice of fault source distance metric 'joyner-boore' or 'rupture' :param float upper_eq_depth: Upper hypocentral depth of hypocentres to be selected :param float lower_eq_depth: Lower hypocentral depth of hypocentres to be selected
[ "Selects", "earthquakes", "within", "a", "distance", "of", "the", "fault" ]
train
https://github.com/gem/oq-engine/blob/8294553a0b8aba33fd96437a35065d03547d0040/openquake/hmtk/sources/simple_fault_source.py#L191-L237
gem/oq-engine
openquake/hmtk/sources/simple_fault_source.py
mtkSimpleFaultSource.create_oqhazardlib_source
def create_oqhazardlib_source(self, tom, mesh_spacing, use_defaults=False): """ Returns an instance of the :class: `openquake.hazardlib.source.simple_fault.SimpleFaultSource` :param tom: Temporal occurrance model :param float mesh_spacing: Mesh spacing """ if not self.mfd: raise ValueError("Cannot write to hazardlib without MFD") return SimpleFaultSource( self.id, self.name, self.trt, self.mfd, mesh_spacing, conv.mag_scale_rel_to_hazardlib(self.mag_scale_rel, use_defaults), conv.render_aspect_ratio(self.rupt_aspect_ratio, use_defaults), tom, self.upper_depth, self.lower_depth, self.fault_trace, self.dip, self.rake)
python
def create_oqhazardlib_source(self, tom, mesh_spacing, use_defaults=False): if not self.mfd: raise ValueError("Cannot write to hazardlib without MFD") return SimpleFaultSource( self.id, self.name, self.trt, self.mfd, mesh_spacing, conv.mag_scale_rel_to_hazardlib(self.mag_scale_rel, use_defaults), conv.render_aspect_ratio(self.rupt_aspect_ratio, use_defaults), tom, self.upper_depth, self.lower_depth, self.fault_trace, self.dip, self.rake)
[ "def", "create_oqhazardlib_source", "(", "self", ",", "tom", ",", "mesh_spacing", ",", "use_defaults", "=", "False", ")", ":", "if", "not", "self", ".", "mfd", ":", "raise", "ValueError", "(", "\"Cannot write to hazardlib without MFD\"", ")", "return", "SimpleFaultSource", "(", "self", ".", "id", ",", "self", ".", "name", ",", "self", ".", "trt", ",", "self", ".", "mfd", ",", "mesh_spacing", ",", "conv", ".", "mag_scale_rel_to_hazardlib", "(", "self", ".", "mag_scale_rel", ",", "use_defaults", ")", ",", "conv", ".", "render_aspect_ratio", "(", "self", ".", "rupt_aspect_ratio", ",", "use_defaults", ")", ",", "tom", ",", "self", ".", "upper_depth", ",", "self", ".", "lower_depth", ",", "self", ".", "fault_trace", ",", "self", ".", "dip", ",", "self", ".", "rake", ")" ]
Returns an instance of the :class: `openquake.hazardlib.source.simple_fault.SimpleFaultSource` :param tom: Temporal occurrance model :param float mesh_spacing: Mesh spacing
[ "Returns", "an", "instance", "of", "the", ":", "class", ":", "openquake", ".", "hazardlib", ".", "source", ".", "simple_fault", ".", "SimpleFaultSource" ]
train
https://github.com/gem/oq-engine/blob/8294553a0b8aba33fd96437a35065d03547d0040/openquake/hmtk/sources/simple_fault_source.py#L239-L264
gem/oq-engine
openquake/hmtk/plotting/faults/geology_mfd_plot.py
plot_recurrence_models
def plot_recurrence_models( configs, area, slip, msr, rake, shear_modulus=30.0, disp_length_ratio=1.25E-5, msr_sigma=0., figure_size=(8, 6), filename=None, filetype='png', dpi=300, ax=None): """ Plots a set of recurrence models :param list configs: List of configuration dictionaries """ if ax is None: fig, ax = plt.subplots(figsize=figure_size) else: fig = ax.get_figure() for config in configs: model = RecurrenceBranch(area, slip, msr, rake, shear_modulus, disp_length_ratio, msr_sigma, weight=1.0) model.get_recurrence(config) occurrence = model.recurrence.occur_rates cumulative = np.array([np.sum(occurrence[iloc:]) for iloc in range(0, len(occurrence))]) if 'AndersonLuco' in config['Model_Name']: flt_label = config['Model_Name'] + ' - ' + config['Model_Type'] +\ ' Type' else: flt_label = config['Model_Name'] flt_color = np.random.uniform(0.1, 1.0, 3) ax.semilogy(model.magnitudes, cumulative, '-', label=flt_label, color=flt_color, linewidth=2.) ax.semilogy(model.magnitudes, model.recurrence.occur_rates, '--', color=flt_color, linewidth=2.) ax.set_xlabel('Magnitude') ax.set_ylabel('Annual Rate') ax.legend(bbox_to_anchor=(1.1, 1.0)) _save_image(fig, filename, filetype, dpi)
python
def plot_recurrence_models( configs, area, slip, msr, rake, shear_modulus=30.0, disp_length_ratio=1.25E-5, msr_sigma=0., figure_size=(8, 6), filename=None, filetype='png', dpi=300, ax=None): if ax is None: fig, ax = plt.subplots(figsize=figure_size) else: fig = ax.get_figure() for config in configs: model = RecurrenceBranch(area, slip, msr, rake, shear_modulus, disp_length_ratio, msr_sigma, weight=1.0) model.get_recurrence(config) occurrence = model.recurrence.occur_rates cumulative = np.array([np.sum(occurrence[iloc:]) for iloc in range(0, len(occurrence))]) if 'AndersonLuco' in config['Model_Name']: flt_label = config['Model_Name'] + ' - ' + config['Model_Type'] +\ ' Type' else: flt_label = config['Model_Name'] flt_color = np.random.uniform(0.1, 1.0, 3) ax.semilogy(model.magnitudes, cumulative, '-', label=flt_label, color=flt_color, linewidth=2.) ax.semilogy(model.magnitudes, model.recurrence.occur_rates, '--', color=flt_color, linewidth=2.) ax.set_xlabel('Magnitude') ax.set_ylabel('Annual Rate') ax.legend(bbox_to_anchor=(1.1, 1.0)) _save_image(fig, filename, filetype, dpi)
[ "def", "plot_recurrence_models", "(", "configs", ",", "area", ",", "slip", ",", "msr", ",", "rake", ",", "shear_modulus", "=", "30.0", ",", "disp_length_ratio", "=", "1.25E-5", ",", "msr_sigma", "=", "0.", ",", "figure_size", "=", "(", "8", ",", "6", ")", ",", "filename", "=", "None", ",", "filetype", "=", "'png'", ",", "dpi", "=", "300", ",", "ax", "=", "None", ")", ":", "if", "ax", "is", "None", ":", "fig", ",", "ax", "=", "plt", ".", "subplots", "(", "figsize", "=", "figure_size", ")", "else", ":", "fig", "=", "ax", ".", "get_figure", "(", ")", "for", "config", "in", "configs", ":", "model", "=", "RecurrenceBranch", "(", "area", ",", "slip", ",", "msr", ",", "rake", ",", "shear_modulus", ",", "disp_length_ratio", ",", "msr_sigma", ",", "weight", "=", "1.0", ")", "model", ".", "get_recurrence", "(", "config", ")", "occurrence", "=", "model", ".", "recurrence", ".", "occur_rates", "cumulative", "=", "np", ".", "array", "(", "[", "np", ".", "sum", "(", "occurrence", "[", "iloc", ":", "]", ")", "for", "iloc", "in", "range", "(", "0", ",", "len", "(", "occurrence", ")", ")", "]", ")", "if", "'AndersonLuco'", "in", "config", "[", "'Model_Name'", "]", ":", "flt_label", "=", "config", "[", "'Model_Name'", "]", "+", "' - '", "+", "config", "[", "'Model_Type'", "]", "+", "' Type'", "else", ":", "flt_label", "=", "config", "[", "'Model_Name'", "]", "flt_color", "=", "np", ".", "random", ".", "uniform", "(", "0.1", ",", "1.0", ",", "3", ")", "ax", ".", "semilogy", "(", "model", ".", "magnitudes", ",", "cumulative", ",", "'-'", ",", "label", "=", "flt_label", ",", "color", "=", "flt_color", ",", "linewidth", "=", "2.", ")", "ax", ".", "semilogy", "(", "model", ".", "magnitudes", ",", "model", ".", "recurrence", ".", "occur_rates", ",", "'--'", ",", "color", "=", "flt_color", ",", "linewidth", "=", "2.", ")", "ax", ".", "set_xlabel", "(", "'Magnitude'", ")", "ax", ".", "set_ylabel", "(", "'Annual Rate'", ")", "ax", ".", "legend", "(", "bbox_to_anchor", "=", "(", "1.1", ",", "1.0", ")", ")", "_save_image", "(", "fig", ",", "filename", ",", "filetype", ",", "dpi", ")" ]
Plots a set of recurrence models :param list configs: List of configuration dictionaries
[ "Plots", "a", "set", "of", "recurrence", "models" ]
train
https://github.com/gem/oq-engine/blob/8294553a0b8aba33fd96437a35065d03547d0040/openquake/hmtk/plotting/faults/geology_mfd_plot.py#L69-L105
gem/oq-engine
openquake/hazardlib/sourcewriter.py
build_area_source_geometry
def build_area_source_geometry(area_source): """ Returns the area source geometry as a Node :param area_source: Area source model as an instance of the :class: `openquake.hazardlib.source.area.AreaSource` :returns: Instance of :class:`openquake.baselib.node.Node` """ geom = [] for lon_lat in zip(area_source.polygon.lons, area_source.polygon.lats): geom.extend(lon_lat) poslist_node = Node("gml:posList", text=geom) linear_ring_node = Node("gml:LinearRing", nodes=[poslist_node]) exterior_node = Node("gml:exterior", nodes=[linear_ring_node]) polygon_node = Node("gml:Polygon", nodes=[exterior_node]) upper_depth_node = Node( "upperSeismoDepth", text=area_source.upper_seismogenic_depth) lower_depth_node = Node( "lowerSeismoDepth", text=area_source.lower_seismogenic_depth) return Node( "areaGeometry", {'discretization': area_source.area_discretization}, nodes=[polygon_node, upper_depth_node, lower_depth_node])
python
def build_area_source_geometry(area_source): geom = [] for lon_lat in zip(area_source.polygon.lons, area_source.polygon.lats): geom.extend(lon_lat) poslist_node = Node("gml:posList", text=geom) linear_ring_node = Node("gml:LinearRing", nodes=[poslist_node]) exterior_node = Node("gml:exterior", nodes=[linear_ring_node]) polygon_node = Node("gml:Polygon", nodes=[exterior_node]) upper_depth_node = Node( "upperSeismoDepth", text=area_source.upper_seismogenic_depth) lower_depth_node = Node( "lowerSeismoDepth", text=area_source.lower_seismogenic_depth) return Node( "areaGeometry", {'discretization': area_source.area_discretization}, nodes=[polygon_node, upper_depth_node, lower_depth_node])
[ "def", "build_area_source_geometry", "(", "area_source", ")", ":", "geom", "=", "[", "]", "for", "lon_lat", "in", "zip", "(", "area_source", ".", "polygon", ".", "lons", ",", "area_source", ".", "polygon", ".", "lats", ")", ":", "geom", ".", "extend", "(", "lon_lat", ")", "poslist_node", "=", "Node", "(", "\"gml:posList\"", ",", "text", "=", "geom", ")", "linear_ring_node", "=", "Node", "(", "\"gml:LinearRing\"", ",", "nodes", "=", "[", "poslist_node", "]", ")", "exterior_node", "=", "Node", "(", "\"gml:exterior\"", ",", "nodes", "=", "[", "linear_ring_node", "]", ")", "polygon_node", "=", "Node", "(", "\"gml:Polygon\"", ",", "nodes", "=", "[", "exterior_node", "]", ")", "upper_depth_node", "=", "Node", "(", "\"upperSeismoDepth\"", ",", "text", "=", "area_source", ".", "upper_seismogenic_depth", ")", "lower_depth_node", "=", "Node", "(", "\"lowerSeismoDepth\"", ",", "text", "=", "area_source", ".", "lower_seismogenic_depth", ")", "return", "Node", "(", "\"areaGeometry\"", ",", "{", "'discretization'", ":", "area_source", ".", "area_discretization", "}", ",", "nodes", "=", "[", "polygon_node", ",", "upper_depth_node", ",", "lower_depth_node", "]", ")" ]
Returns the area source geometry as a Node :param area_source: Area source model as an instance of the :class: `openquake.hazardlib.source.area.AreaSource` :returns: Instance of :class:`openquake.baselib.node.Node`
[ "Returns", "the", "area", "source", "geometry", "as", "a", "Node" ]
train
https://github.com/gem/oq-engine/blob/8294553a0b8aba33fd96437a35065d03547d0040/openquake/hazardlib/sourcewriter.py#L35-L58
gem/oq-engine
openquake/hazardlib/sourcewriter.py
build_point_source_geometry
def build_point_source_geometry(point_source): """ Returns the poing source geometry as a Node :param point_source: Point source model as an instance of the :class: `openquake.hazardlib.source.point.PointSource` :returns: Instance of :class:`openquake.baselib.node.Node` """ xy = point_source.location.x, point_source.location.y pos_node = Node("gml:pos", text=xy) point_node = Node("gml:Point", nodes=[pos_node]) upper_depth_node = Node( "upperSeismoDepth", text=point_source.upper_seismogenic_depth) lower_depth_node = Node( "lowerSeismoDepth", text=point_source.lower_seismogenic_depth) return Node( "pointGeometry", nodes=[point_node, upper_depth_node, lower_depth_node])
python
def build_point_source_geometry(point_source): xy = point_source.location.x, point_source.location.y pos_node = Node("gml:pos", text=xy) point_node = Node("gml:Point", nodes=[pos_node]) upper_depth_node = Node( "upperSeismoDepth", text=point_source.upper_seismogenic_depth) lower_depth_node = Node( "lowerSeismoDepth", text=point_source.lower_seismogenic_depth) return Node( "pointGeometry", nodes=[point_node, upper_depth_node, lower_depth_node])
[ "def", "build_point_source_geometry", "(", "point_source", ")", ":", "xy", "=", "point_source", ".", "location", ".", "x", ",", "point_source", ".", "location", ".", "y", "pos_node", "=", "Node", "(", "\"gml:pos\"", ",", "text", "=", "xy", ")", "point_node", "=", "Node", "(", "\"gml:Point\"", ",", "nodes", "=", "[", "pos_node", "]", ")", "upper_depth_node", "=", "Node", "(", "\"upperSeismoDepth\"", ",", "text", "=", "point_source", ".", "upper_seismogenic_depth", ")", "lower_depth_node", "=", "Node", "(", "\"lowerSeismoDepth\"", ",", "text", "=", "point_source", ".", "lower_seismogenic_depth", ")", "return", "Node", "(", "\"pointGeometry\"", ",", "nodes", "=", "[", "point_node", ",", "upper_depth_node", ",", "lower_depth_node", "]", ")" ]
Returns the poing source geometry as a Node :param point_source: Point source model as an instance of the :class: `openquake.hazardlib.source.point.PointSource` :returns: Instance of :class:`openquake.baselib.node.Node`
[ "Returns", "the", "poing", "source", "geometry", "as", "a", "Node" ]
train
https://github.com/gem/oq-engine/blob/8294553a0b8aba33fd96437a35065d03547d0040/openquake/hazardlib/sourcewriter.py#L61-L80
gem/oq-engine
openquake/hazardlib/sourcewriter.py
build_linestring_node
def build_linestring_node(line, with_depth=False): """ Parses a line to a Node class :param line: Line as instance of :class:`openquake.hazardlib.geo.line.Line` :param bool with_depth: Include the depth values (True) or not (False): :returns: Instance of :class:`openquake.baselib.node.Node` """ geom = [] for p in line.points: if with_depth: geom.extend((p.x, p.y, p.z)) else: geom.extend((p.x, p.y)) poslist_node = Node("gml:posList", text=geom) return Node("gml:LineString", nodes=[poslist_node])
python
def build_linestring_node(line, with_depth=False): geom = [] for p in line.points: if with_depth: geom.extend((p.x, p.y, p.z)) else: geom.extend((p.x, p.y)) poslist_node = Node("gml:posList", text=geom) return Node("gml:LineString", nodes=[poslist_node])
[ "def", "build_linestring_node", "(", "line", ",", "with_depth", "=", "False", ")", ":", "geom", "=", "[", "]", "for", "p", "in", "line", ".", "points", ":", "if", "with_depth", ":", "geom", ".", "extend", "(", "(", "p", ".", "x", ",", "p", ".", "y", ",", "p", ".", "z", ")", ")", "else", ":", "geom", ".", "extend", "(", "(", "p", ".", "x", ",", "p", ".", "y", ")", ")", "poslist_node", "=", "Node", "(", "\"gml:posList\"", ",", "text", "=", "geom", ")", "return", "Node", "(", "\"gml:LineString\"", ",", "nodes", "=", "[", "poslist_node", "]", ")" ]
Parses a line to a Node class :param line: Line as instance of :class:`openquake.hazardlib.geo.line.Line` :param bool with_depth: Include the depth values (True) or not (False): :returns: Instance of :class:`openquake.baselib.node.Node`
[ "Parses", "a", "line", "to", "a", "Node", "class" ]
train
https://github.com/gem/oq-engine/blob/8294553a0b8aba33fd96437a35065d03547d0040/openquake/hazardlib/sourcewriter.py#L83-L101
gem/oq-engine
openquake/hazardlib/sourcewriter.py
build_simple_fault_geometry
def build_simple_fault_geometry(fault_source): """ Returns the simple fault source geometry as a Node :param fault_source: Simple fault source model as an instance of the :class: `openquake.hazardlib.source.simple_fault.SimpleFaultSource` :returns: Instance of :class:`openquake.baselib.node.Node` """ linestring_node = build_linestring_node(fault_source.fault_trace, with_depth=False) dip_node = Node("dip", text=fault_source.dip) upper_depth_node = Node( "upperSeismoDepth", text=fault_source.upper_seismogenic_depth) lower_depth_node = Node( "lowerSeismoDepth", text=fault_source.lower_seismogenic_depth) return Node("simpleFaultGeometry", nodes=[linestring_node, dip_node, upper_depth_node, lower_depth_node])
python
def build_simple_fault_geometry(fault_source): linestring_node = build_linestring_node(fault_source.fault_trace, with_depth=False) dip_node = Node("dip", text=fault_source.dip) upper_depth_node = Node( "upperSeismoDepth", text=fault_source.upper_seismogenic_depth) lower_depth_node = Node( "lowerSeismoDepth", text=fault_source.lower_seismogenic_depth) return Node("simpleFaultGeometry", nodes=[linestring_node, dip_node, upper_depth_node, lower_depth_node])
[ "def", "build_simple_fault_geometry", "(", "fault_source", ")", ":", "linestring_node", "=", "build_linestring_node", "(", "fault_source", ".", "fault_trace", ",", "with_depth", "=", "False", ")", "dip_node", "=", "Node", "(", "\"dip\"", ",", "text", "=", "fault_source", ".", "dip", ")", "upper_depth_node", "=", "Node", "(", "\"upperSeismoDepth\"", ",", "text", "=", "fault_source", ".", "upper_seismogenic_depth", ")", "lower_depth_node", "=", "Node", "(", "\"lowerSeismoDepth\"", ",", "text", "=", "fault_source", ".", "lower_seismogenic_depth", ")", "return", "Node", "(", "\"simpleFaultGeometry\"", ",", "nodes", "=", "[", "linestring_node", ",", "dip_node", ",", "upper_depth_node", ",", "lower_depth_node", "]", ")" ]
Returns the simple fault source geometry as a Node :param fault_source: Simple fault source model as an instance of the :class: `openquake.hazardlib.source.simple_fault.SimpleFaultSource` :returns: Instance of :class:`openquake.baselib.node.Node`
[ "Returns", "the", "simple", "fault", "source", "geometry", "as", "a", "Node" ]
train
https://github.com/gem/oq-engine/blob/8294553a0b8aba33fd96437a35065d03547d0040/openquake/hazardlib/sourcewriter.py#L104-L123
gem/oq-engine
openquake/hazardlib/sourcewriter.py
build_complex_fault_geometry
def build_complex_fault_geometry(fault_source): """ Returns the complex fault source geometry as a Node :param fault_source: Complex fault source model as an instance of the :class: `openquake.hazardlib.source.complex_fault.ComplexFaultSource` :returns: Instance of :class:`openquake.baselib.node.Node` """ num_edges = len(fault_source.edges) edge_nodes = [] for iloc, edge in enumerate(fault_source.edges): if iloc == 0: # Top Edge node_name = "faultTopEdge" elif iloc == (num_edges - 1): # Bottom edge node_name = "faultBottomEdge" else: # Intermediate edge node_name = "intermediateEdge" edge_nodes.append( Node(node_name, nodes=[build_linestring_node(edge, with_depth=True)])) return Node("complexFaultGeometry", nodes=edge_nodes)
python
def build_complex_fault_geometry(fault_source): num_edges = len(fault_source.edges) edge_nodes = [] for iloc, edge in enumerate(fault_source.edges): if iloc == 0: node_name = "faultTopEdge" elif iloc == (num_edges - 1): node_name = "faultBottomEdge" else: node_name = "intermediateEdge" edge_nodes.append( Node(node_name, nodes=[build_linestring_node(edge, with_depth=True)])) return Node("complexFaultGeometry", nodes=edge_nodes)
[ "def", "build_complex_fault_geometry", "(", "fault_source", ")", ":", "num_edges", "=", "len", "(", "fault_source", ".", "edges", ")", "edge_nodes", "=", "[", "]", "for", "iloc", ",", "edge", "in", "enumerate", "(", "fault_source", ".", "edges", ")", ":", "if", "iloc", "==", "0", ":", "# Top Edge", "node_name", "=", "\"faultTopEdge\"", "elif", "iloc", "==", "(", "num_edges", "-", "1", ")", ":", "# Bottom edge", "node_name", "=", "\"faultBottomEdge\"", "else", ":", "# Intermediate edge", "node_name", "=", "\"intermediateEdge\"", "edge_nodes", ".", "append", "(", "Node", "(", "node_name", ",", "nodes", "=", "[", "build_linestring_node", "(", "edge", ",", "with_depth", "=", "True", ")", "]", ")", ")", "return", "Node", "(", "\"complexFaultGeometry\"", ",", "nodes", "=", "edge_nodes", ")" ]
Returns the complex fault source geometry as a Node :param fault_source: Complex fault source model as an instance of the :class: `openquake.hazardlib.source.complex_fault.ComplexFaultSource` :returns: Instance of :class:`openquake.baselib.node.Node`
[ "Returns", "the", "complex", "fault", "source", "geometry", "as", "a", "Node" ]
train
https://github.com/gem/oq-engine/blob/8294553a0b8aba33fd96437a35065d03547d0040/openquake/hazardlib/sourcewriter.py#L126-L152
gem/oq-engine
openquake/hazardlib/sourcewriter.py
build_evenly_discretised_mfd
def build_evenly_discretised_mfd(mfd): """ Returns the evenly discretized MFD as a Node :param mfd: MFD as instance of :class: `openquake.hazardlib.mfd.evenly_discretized.EvenlyDiscretizedMFD` :returns: Instance of :class:`openquake.baselib.node.Node` """ occur_rates = Node("occurRates", text=mfd.occurrence_rates) return Node("incrementalMFD", {"binWidth": mfd.bin_width, "minMag": mfd.min_mag}, nodes=[occur_rates])
python
def build_evenly_discretised_mfd(mfd): occur_rates = Node("occurRates", text=mfd.occurrence_rates) return Node("incrementalMFD", {"binWidth": mfd.bin_width, "minMag": mfd.min_mag}, nodes=[occur_rates])
[ "def", "build_evenly_discretised_mfd", "(", "mfd", ")", ":", "occur_rates", "=", "Node", "(", "\"occurRates\"", ",", "text", "=", "mfd", ".", "occurrence_rates", ")", "return", "Node", "(", "\"incrementalMFD\"", ",", "{", "\"binWidth\"", ":", "mfd", ".", "bin_width", ",", "\"minMag\"", ":", "mfd", ".", "min_mag", "}", ",", "nodes", "=", "[", "occur_rates", "]", ")" ]
Returns the evenly discretized MFD as a Node :param mfd: MFD as instance of :class: `openquake.hazardlib.mfd.evenly_discretized.EvenlyDiscretizedMFD` :returns: Instance of :class:`openquake.baselib.node.Node`
[ "Returns", "the", "evenly", "discretized", "MFD", "as", "a", "Node" ]
train
https://github.com/gem/oq-engine/blob/8294553a0b8aba33fd96437a35065d03547d0040/openquake/hazardlib/sourcewriter.py#L156-L169
gem/oq-engine
openquake/hazardlib/sourcewriter.py
build_truncated_gr_mfd
def build_truncated_gr_mfd(mfd): """ Parses the truncated Gutenberg Richter MFD as a Node :param mfd: MFD as instance of :class: `openquake.hazardlib.mfd.truncated_gr.TruncatedGRMFD` :returns: Instance of :class:`openquake.baselib.node.Node` """ return Node("truncGutenbergRichterMFD", {"aValue": mfd.a_val, "bValue": mfd.b_val, "minMag": mfd.min_mag, "maxMag": mfd.max_mag})
python
def build_truncated_gr_mfd(mfd): return Node("truncGutenbergRichterMFD", {"aValue": mfd.a_val, "bValue": mfd.b_val, "minMag": mfd.min_mag, "maxMag": mfd.max_mag})
[ "def", "build_truncated_gr_mfd", "(", "mfd", ")", ":", "return", "Node", "(", "\"truncGutenbergRichterMFD\"", ",", "{", "\"aValue\"", ":", "mfd", ".", "a_val", ",", "\"bValue\"", ":", "mfd", ".", "b_val", ",", "\"minMag\"", ":", "mfd", ".", "min_mag", ",", "\"maxMag\"", ":", "mfd", ".", "max_mag", "}", ")" ]
Parses the truncated Gutenberg Richter MFD as a Node :param mfd: MFD as instance of :class: `openquake.hazardlib.mfd.truncated_gr.TruncatedGRMFD` :returns: Instance of :class:`openquake.baselib.node.Node`
[ "Parses", "the", "truncated", "Gutenberg", "Richter", "MFD", "as", "a", "Node" ]
train
https://github.com/gem/oq-engine/blob/8294553a0b8aba33fd96437a35065d03547d0040/openquake/hazardlib/sourcewriter.py#L173-L185
gem/oq-engine
openquake/hazardlib/sourcewriter.py
build_arbitrary_mfd
def build_arbitrary_mfd(mfd): """ Parses the arbitrary MFD as a Node :param mfd: MFD as instance of :class: `openquake.hazardlib.mfd.arbitrary.ArbitraryMFD` :returns: Instance of :class:`openquake.baselib.node.Node` """ magnitudes = Node("magnitudes", text=mfd.magnitudes) occur_rates = Node("occurRates", text=mfd.occurrence_rates) return Node("arbitraryMFD", nodes=[magnitudes, occur_rates])
python
def build_arbitrary_mfd(mfd): magnitudes = Node("magnitudes", text=mfd.magnitudes) occur_rates = Node("occurRates", text=mfd.occurrence_rates) return Node("arbitraryMFD", nodes=[magnitudes, occur_rates])
[ "def", "build_arbitrary_mfd", "(", "mfd", ")", ":", "magnitudes", "=", "Node", "(", "\"magnitudes\"", ",", "text", "=", "mfd", ".", "magnitudes", ")", "occur_rates", "=", "Node", "(", "\"occurRates\"", ",", "text", "=", "mfd", ".", "occurrence_rates", ")", "return", "Node", "(", "\"arbitraryMFD\"", ",", "nodes", "=", "[", "magnitudes", ",", "occur_rates", "]", ")" ]
Parses the arbitrary MFD as a Node :param mfd: MFD as instance of :class: `openquake.hazardlib.mfd.arbitrary.ArbitraryMFD` :returns: Instance of :class:`openquake.baselib.node.Node`
[ "Parses", "the", "arbitrary", "MFD", "as", "a", "Node" ]
train
https://github.com/gem/oq-engine/blob/8294553a0b8aba33fd96437a35065d03547d0040/openquake/hazardlib/sourcewriter.py#L189-L201
gem/oq-engine
openquake/hazardlib/sourcewriter.py
build_youngs_coppersmith_mfd
def build_youngs_coppersmith_mfd(mfd): """ Parses the Youngs & Coppersmith MFD as a node. Note that the MFD does not hold the total moment rate, but only the characteristic rate. Therefore the node is written to the characteristic rate version regardless of whether or not it was originally created from total moment rate :param mfd: MFD as instance of :class: `openquake.hazardlib.mfd.youngs_coppersmith_1985. YoungsCoppersmith1985MFD` :returns: Instance of :class:`openquake.baselib.node.Node` """ return Node("YoungsCoppersmithMFD", {"minMag": mfd.min_mag, "bValue": mfd.b_val, "characteristicMag": mfd.char_mag, "characteristicRate": mfd.char_rate, "binWidth": mfd.bin_width})
python
def build_youngs_coppersmith_mfd(mfd): return Node("YoungsCoppersmithMFD", {"minMag": mfd.min_mag, "bValue": mfd.b_val, "characteristicMag": mfd.char_mag, "characteristicRate": mfd.char_rate, "binWidth": mfd.bin_width})
[ "def", "build_youngs_coppersmith_mfd", "(", "mfd", ")", ":", "return", "Node", "(", "\"YoungsCoppersmithMFD\"", ",", "{", "\"minMag\"", ":", "mfd", ".", "min_mag", ",", "\"bValue\"", ":", "mfd", ".", "b_val", ",", "\"characteristicMag\"", ":", "mfd", ".", "char_mag", ",", "\"characteristicRate\"", ":", "mfd", ".", "char_rate", ",", "\"binWidth\"", ":", "mfd", ".", "bin_width", "}", ")" ]
Parses the Youngs & Coppersmith MFD as a node. Note that the MFD does not hold the total moment rate, but only the characteristic rate. Therefore the node is written to the characteristic rate version regardless of whether or not it was originally created from total moment rate :param mfd: MFD as instance of :class: `openquake.hazardlib.mfd.youngs_coppersmith_1985. YoungsCoppersmith1985MFD` :returns: Instance of :class:`openquake.baselib.node.Node`
[ "Parses", "the", "Youngs", "&", "Coppersmith", "MFD", "as", "a", "node", ".", "Note", "that", "the", "MFD", "does", "not", "hold", "the", "total", "moment", "rate", "but", "only", "the", "characteristic", "rate", ".", "Therefore", "the", "node", "is", "written", "to", "the", "characteristic", "rate", "version", "regardless", "of", "whether", "or", "not", "it", "was", "originally", "created", "from", "total", "moment", "rate" ]
train
https://github.com/gem/oq-engine/blob/8294553a0b8aba33fd96437a35065d03547d0040/openquake/hazardlib/sourcewriter.py#L205-L223
gem/oq-engine
openquake/hazardlib/sourcewriter.py
build_multi_mfd
def build_multi_mfd(mfd): """ Parses the MultiMFD as a Node :param mfd: MFD as instance of :class: `openquake.hazardlib.mfd.multi_mfd.MultiMFD` :returns: Instance of :class:`openquake.baselib.node.Node` """ node = Node("multiMFD", dict(kind=mfd.kind, size=mfd.size)) for name in sorted(mfd.kwargs): values = mfd.kwargs[name] if name in ('magnitudes', 'occurRates'): if len(values[0]) > 1: # tested in multipoint_test.py values = list(numpy.concatenate(values)) else: values = sum(values, []) node.append(Node(name, text=values)) if 'occurRates' in mfd.kwargs: lengths = [len(rates) for rates in mfd.kwargs['occurRates']] node.append(Node('lengths', text=lengths)) return node
python
def build_multi_mfd(mfd): node = Node("multiMFD", dict(kind=mfd.kind, size=mfd.size)) for name in sorted(mfd.kwargs): values = mfd.kwargs[name] if name in ('magnitudes', 'occurRates'): if len(values[0]) > 1: values = list(numpy.concatenate(values)) else: values = sum(values, []) node.append(Node(name, text=values)) if 'occurRates' in mfd.kwargs: lengths = [len(rates) for rates in mfd.kwargs['occurRates']] node.append(Node('lengths', text=lengths)) return node
[ "def", "build_multi_mfd", "(", "mfd", ")", ":", "node", "=", "Node", "(", "\"multiMFD\"", ",", "dict", "(", "kind", "=", "mfd", ".", "kind", ",", "size", "=", "mfd", ".", "size", ")", ")", "for", "name", "in", "sorted", "(", "mfd", ".", "kwargs", ")", ":", "values", "=", "mfd", ".", "kwargs", "[", "name", "]", "if", "name", "in", "(", "'magnitudes'", ",", "'occurRates'", ")", ":", "if", "len", "(", "values", "[", "0", "]", ")", ">", "1", ":", "# tested in multipoint_test.py", "values", "=", "list", "(", "numpy", ".", "concatenate", "(", "values", ")", ")", "else", ":", "values", "=", "sum", "(", "values", ",", "[", "]", ")", "node", ".", "append", "(", "Node", "(", "name", ",", "text", "=", "values", ")", ")", "if", "'occurRates'", "in", "mfd", ".", "kwargs", ":", "lengths", "=", "[", "len", "(", "rates", ")", "for", "rates", "in", "mfd", ".", "kwargs", "[", "'occurRates'", "]", "]", "node", ".", "append", "(", "Node", "(", "'lengths'", ",", "text", "=", "lengths", ")", ")", "return", "node" ]
Parses the MultiMFD as a Node :param mfd: MFD as instance of :class: `openquake.hazardlib.mfd.multi_mfd.MultiMFD` :returns: Instance of :class:`openquake.baselib.node.Node`
[ "Parses", "the", "MultiMFD", "as", "a", "Node" ]
train
https://github.com/gem/oq-engine/blob/8294553a0b8aba33fd96437a35065d03547d0040/openquake/hazardlib/sourcewriter.py#L227-L249
gem/oq-engine
openquake/hazardlib/sourcewriter.py
build_nodal_plane_dist
def build_nodal_plane_dist(npd): """ Returns the nodal plane distribution as a Node instance :param npd: Nodal plane distribution as instance of :class: `openquake.hazardlib.pmf.PMF` :returns: Instance of :class:`openquake.baselib.node.Node` """ npds = [] for prob, npd in npd.data: nodal_plane = Node( "nodalPlane", {"dip": npd.dip, "probability": prob, "strike": npd.strike, "rake": npd.rake}) npds.append(nodal_plane) return Node("nodalPlaneDist", nodes=npds)
python
def build_nodal_plane_dist(npd): npds = [] for prob, npd in npd.data: nodal_plane = Node( "nodalPlane", {"dip": npd.dip, "probability": prob, "strike": npd.strike, "rake": npd.rake}) npds.append(nodal_plane) return Node("nodalPlaneDist", nodes=npds)
[ "def", "build_nodal_plane_dist", "(", "npd", ")", ":", "npds", "=", "[", "]", "for", "prob", ",", "npd", "in", "npd", ".", "data", ":", "nodal_plane", "=", "Node", "(", "\"nodalPlane\"", ",", "{", "\"dip\"", ":", "npd", ".", "dip", ",", "\"probability\"", ":", "prob", ",", "\"strike\"", ":", "npd", ".", "strike", ",", "\"rake\"", ":", "npd", ".", "rake", "}", ")", "npds", ".", "append", "(", "nodal_plane", ")", "return", "Node", "(", "\"nodalPlaneDist\"", ",", "nodes", "=", "npds", ")" ]
Returns the nodal plane distribution as a Node instance :param npd: Nodal plane distribution as instance of :class: `openquake.hazardlib.pmf.PMF` :returns: Instance of :class:`openquake.baselib.node.Node`
[ "Returns", "the", "nodal", "plane", "distribution", "as", "a", "Node", "instance" ]
train
https://github.com/gem/oq-engine/blob/8294553a0b8aba33fd96437a35065d03547d0040/openquake/hazardlib/sourcewriter.py#L252-L268
gem/oq-engine
openquake/hazardlib/sourcewriter.py
build_hypo_depth_dist
def build_hypo_depth_dist(hdd): """ Returns the hypocentral depth distribution as a Node instance :param hdd: Hypocentral depth distribution as an instance of :class: `openquake.hzardlib.pmf.PMF` :returns: Instance of :class:`openquake.baselib.node.Node` """ hdds = [] for (prob, depth) in hdd.data: hdds.append( Node("hypoDepth", {"depth": depth, "probability": prob})) return Node("hypoDepthDist", nodes=hdds)
python
def build_hypo_depth_dist(hdd): hdds = [] for (prob, depth) in hdd.data: hdds.append( Node("hypoDepth", {"depth": depth, "probability": prob})) return Node("hypoDepthDist", nodes=hdds)
[ "def", "build_hypo_depth_dist", "(", "hdd", ")", ":", "hdds", "=", "[", "]", "for", "(", "prob", ",", "depth", ")", "in", "hdd", ".", "data", ":", "hdds", ".", "append", "(", "Node", "(", "\"hypoDepth\"", ",", "{", "\"depth\"", ":", "depth", ",", "\"probability\"", ":", "prob", "}", ")", ")", "return", "Node", "(", "\"hypoDepthDist\"", ",", "nodes", "=", "hdds", ")" ]
Returns the hypocentral depth distribution as a Node instance :param hdd: Hypocentral depth distribution as an instance of :class: `openquake.hzardlib.pmf.PMF` :returns: Instance of :class:`openquake.baselib.node.Node`
[ "Returns", "the", "hypocentral", "depth", "distribution", "as", "a", "Node", "instance" ]
train
https://github.com/gem/oq-engine/blob/8294553a0b8aba33fd96437a35065d03547d0040/openquake/hazardlib/sourcewriter.py#L271-L285
gem/oq-engine
openquake/hazardlib/sourcewriter.py
get_distributed_seismicity_source_nodes
def get_distributed_seismicity_source_nodes(source): """ Returns list of nodes of attributes common to all distributed seismicity source classes :param source: Seismic source as instance of :class: `openquake.hazardlib.source.area.AreaSource` or :class: `openquake.hazardlib.source.point.PointSource` :returns: List of instances of :class:`openquake.baselib.node.Node` """ source_nodes = [] # parse msr source_nodes.append( Node("magScaleRel", text=source.magnitude_scaling_relationship.__class__.__name__)) # Parse aspect ratio source_nodes.append( Node("ruptAspectRatio", text=source.rupture_aspect_ratio)) # Parse MFD source_nodes.append(obj_to_node(source.mfd)) # Parse nodal plane distribution source_nodes.append( build_nodal_plane_dist(source.nodal_plane_distribution)) # Parse hypocentral depth distribution source_nodes.append( build_hypo_depth_dist(source.hypocenter_distribution)) return source_nodes
python
def get_distributed_seismicity_source_nodes(source): source_nodes = [] source_nodes.append( Node("magScaleRel", text=source.magnitude_scaling_relationship.__class__.__name__)) source_nodes.append( Node("ruptAspectRatio", text=source.rupture_aspect_ratio)) source_nodes.append(obj_to_node(source.mfd)) source_nodes.append( build_nodal_plane_dist(source.nodal_plane_distribution)) source_nodes.append( build_hypo_depth_dist(source.hypocenter_distribution)) return source_nodes
[ "def", "get_distributed_seismicity_source_nodes", "(", "source", ")", ":", "source_nodes", "=", "[", "]", "# parse msr", "source_nodes", ".", "append", "(", "Node", "(", "\"magScaleRel\"", ",", "text", "=", "source", ".", "magnitude_scaling_relationship", ".", "__class__", ".", "__name__", ")", ")", "# Parse aspect ratio", "source_nodes", ".", "append", "(", "Node", "(", "\"ruptAspectRatio\"", ",", "text", "=", "source", ".", "rupture_aspect_ratio", ")", ")", "# Parse MFD", "source_nodes", ".", "append", "(", "obj_to_node", "(", "source", ".", "mfd", ")", ")", "# Parse nodal plane distribution", "source_nodes", ".", "append", "(", "build_nodal_plane_dist", "(", "source", ".", "nodal_plane_distribution", ")", ")", "# Parse hypocentral depth distribution", "source_nodes", ".", "append", "(", "build_hypo_depth_dist", "(", "source", ".", "hypocenter_distribution", ")", ")", "return", "source_nodes" ]
Returns list of nodes of attributes common to all distributed seismicity source classes :param source: Seismic source as instance of :class: `openquake.hazardlib.source.area.AreaSource` or :class: `openquake.hazardlib.source.point.PointSource` :returns: List of instances of :class:`openquake.baselib.node.Node`
[ "Returns", "list", "of", "nodes", "of", "attributes", "common", "to", "all", "distributed", "seismicity", "source", "classes" ]
train
https://github.com/gem/oq-engine/blob/8294553a0b8aba33fd96437a35065d03547d0040/openquake/hazardlib/sourcewriter.py#L288-L316
gem/oq-engine
openquake/hazardlib/sourcewriter.py
build_hypo_list_node
def build_hypo_list_node(hypo_list): """ :param hypo_list: an array of shape (N, 3) with columns (alongStrike, downDip, weight) :returns: a hypoList node containing N hypo nodes """ hypolist = Node('hypoList', {}) for row in hypo_list: n = Node( 'hypo', dict(alongStrike=row[0], downDip=row[1], weight=row[2])) hypolist.append(n) return hypolist
python
def build_hypo_list_node(hypo_list): hypolist = Node('hypoList', {}) for row in hypo_list: n = Node( 'hypo', dict(alongStrike=row[0], downDip=row[1], weight=row[2])) hypolist.append(n) return hypolist
[ "def", "build_hypo_list_node", "(", "hypo_list", ")", ":", "hypolist", "=", "Node", "(", "'hypoList'", ",", "{", "}", ")", "for", "row", "in", "hypo_list", ":", "n", "=", "Node", "(", "'hypo'", ",", "dict", "(", "alongStrike", "=", "row", "[", "0", "]", ",", "downDip", "=", "row", "[", "1", "]", ",", "weight", "=", "row", "[", "2", "]", ")", ")", "hypolist", ".", "append", "(", "n", ")", "return", "hypolist" ]
:param hypo_list: an array of shape (N, 3) with columns (alongStrike, downDip, weight) :returns: a hypoList node containing N hypo nodes
[ ":", "param", "hypo_list", ":", "an", "array", "of", "shape", "(", "N", "3", ")", "with", "columns", "(", "alongStrike", "downDip", "weight", ")", ":", "returns", ":", "a", "hypoList", "node", "containing", "N", "hypo", "nodes" ]
train
https://github.com/gem/oq-engine/blob/8294553a0b8aba33fd96437a35065d03547d0040/openquake/hazardlib/sourcewriter.py#L319-L331
gem/oq-engine
openquake/hazardlib/sourcewriter.py
build_slip_list_node
def build_slip_list_node(slip_list): """ :param slip_list: an array of shape (N, 2) with columns (slip, weight) :returns: a hypoList node containing N slip nodes """ sliplist = Node('slipList', {}) for row in slip_list: sliplist.append( Node('slip', dict(weight=row[1]), row[0])) return sliplist
python
def build_slip_list_node(slip_list): sliplist = Node('slipList', {}) for row in slip_list: sliplist.append( Node('slip', dict(weight=row[1]), row[0])) return sliplist
[ "def", "build_slip_list_node", "(", "slip_list", ")", ":", "sliplist", "=", "Node", "(", "'slipList'", ",", "{", "}", ")", "for", "row", "in", "slip_list", ":", "sliplist", ".", "append", "(", "Node", "(", "'slip'", ",", "dict", "(", "weight", "=", "row", "[", "1", "]", ")", ",", "row", "[", "0", "]", ")", ")", "return", "sliplist" ]
:param slip_list: an array of shape (N, 2) with columns (slip, weight) :returns: a hypoList node containing N slip nodes
[ ":", "param", "slip_list", ":", "an", "array", "of", "shape", "(", "N", "2", ")", "with", "columns", "(", "slip", "weight", ")", ":", "returns", ":", "a", "hypoList", "node", "containing", "N", "slip", "nodes" ]
train
https://github.com/gem/oq-engine/blob/8294553a0b8aba33fd96437a35065d03547d0040/openquake/hazardlib/sourcewriter.py#L334-L345
gem/oq-engine
openquake/hazardlib/sourcewriter.py
get_fault_source_nodes
def get_fault_source_nodes(source): """ Returns list of nodes of attributes common to all fault source classes :param source: Fault source as instance of :class: `openquake.hazardlib.source.simple_fault.SimpleFaultSource` or :class: `openquake.hazardlib.source.complex_fault.ComplexFaultSource` :returns: List of instances of :class:`openquake.baselib.node.Node` """ source_nodes = [] # parse msr source_nodes.append( Node( "magScaleRel", text=source.magnitude_scaling_relationship.__class__.__name__)) # Parse aspect ratio source_nodes.append( Node("ruptAspectRatio", text=source.rupture_aspect_ratio)) # Parse MFD source_nodes.append(obj_to_node(source.mfd)) # Parse Rake source_nodes.append(Node("rake", text=source.rake)) if len(getattr(source, 'hypo_list', [])): source_nodes.append(build_hypo_list_node(source.hypo_list)) if len(getattr(source, 'slip_list', [])): source_nodes.append(build_slip_list_node(source.slip_list)) return source_nodes
python
def get_fault_source_nodes(source): source_nodes = [] source_nodes.append( Node( "magScaleRel", text=source.magnitude_scaling_relationship.__class__.__name__)) source_nodes.append( Node("ruptAspectRatio", text=source.rupture_aspect_ratio)) source_nodes.append(obj_to_node(source.mfd)) source_nodes.append(Node("rake", text=source.rake)) if len(getattr(source, 'hypo_list', [])): source_nodes.append(build_hypo_list_node(source.hypo_list)) if len(getattr(source, 'slip_list', [])): source_nodes.append(build_slip_list_node(source.slip_list)) return source_nodes
[ "def", "get_fault_source_nodes", "(", "source", ")", ":", "source_nodes", "=", "[", "]", "# parse msr", "source_nodes", ".", "append", "(", "Node", "(", "\"magScaleRel\"", ",", "text", "=", "source", ".", "magnitude_scaling_relationship", ".", "__class__", ".", "__name__", ")", ")", "# Parse aspect ratio", "source_nodes", ".", "append", "(", "Node", "(", "\"ruptAspectRatio\"", ",", "text", "=", "source", ".", "rupture_aspect_ratio", ")", ")", "# Parse MFD", "source_nodes", ".", "append", "(", "obj_to_node", "(", "source", ".", "mfd", ")", ")", "# Parse Rake", "source_nodes", ".", "append", "(", "Node", "(", "\"rake\"", ",", "text", "=", "source", ".", "rake", ")", ")", "if", "len", "(", "getattr", "(", "source", ",", "'hypo_list'", ",", "[", "]", ")", ")", ":", "source_nodes", ".", "append", "(", "build_hypo_list_node", "(", "source", ".", "hypo_list", ")", ")", "if", "len", "(", "getattr", "(", "source", ",", "'slip_list'", ",", "[", "]", ")", ")", ":", "source_nodes", ".", "append", "(", "build_slip_list_node", "(", "source", ".", "slip_list", ")", ")", "return", "source_nodes" ]
Returns list of nodes of attributes common to all fault source classes :param source: Fault source as instance of :class: `openquake.hazardlib.source.simple_fault.SimpleFaultSource` or :class: `openquake.hazardlib.source.complex_fault.ComplexFaultSource` :returns: List of instances of :class:`openquake.baselib.node.Node`
[ "Returns", "list", "of", "nodes", "of", "attributes", "common", "to", "all", "fault", "source", "classes" ]
train
https://github.com/gem/oq-engine/blob/8294553a0b8aba33fd96437a35065d03547d0040/openquake/hazardlib/sourcewriter.py#L348-L376
gem/oq-engine
openquake/hazardlib/sourcewriter.py
get_source_attributes
def get_source_attributes(source): """ Retreives a dictionary of source attributes from the source class :param source: Seismic source as instance of :class: `openquake.hazardlib.source.base.BaseSeismicSource` :returns: Dictionary of source attributes """ attrs = {"id": source.source_id, "name": source.name, "tectonicRegion": source.tectonic_region_type} if isinstance(source, NonParametricSeismicSource): if source.data[0][0].weight is not None: weights = [] for data in source.data: weights.append(data[0].weight) attrs['rup_weights'] = numpy.array(weights) print(attrs) return attrs
python
def get_source_attributes(source): attrs = {"id": source.source_id, "name": source.name, "tectonicRegion": source.tectonic_region_type} if isinstance(source, NonParametricSeismicSource): if source.data[0][0].weight is not None: weights = [] for data in source.data: weights.append(data[0].weight) attrs['rup_weights'] = numpy.array(weights) print(attrs) return attrs
[ "def", "get_source_attributes", "(", "source", ")", ":", "attrs", "=", "{", "\"id\"", ":", "source", ".", "source_id", ",", "\"name\"", ":", "source", ".", "name", ",", "\"tectonicRegion\"", ":", "source", ".", "tectonic_region_type", "}", "if", "isinstance", "(", "source", ",", "NonParametricSeismicSource", ")", ":", "if", "source", ".", "data", "[", "0", "]", "[", "0", "]", ".", "weight", "is", "not", "None", ":", "weights", "=", "[", "]", "for", "data", "in", "source", ".", "data", ":", "weights", ".", "append", "(", "data", "[", "0", "]", ".", "weight", ")", "attrs", "[", "'rup_weights'", "]", "=", "numpy", ".", "array", "(", "weights", ")", "print", "(", "attrs", ")", "return", "attrs" ]
Retreives a dictionary of source attributes from the source class :param source: Seismic source as instance of :class: `openquake.hazardlib.source.base.BaseSeismicSource` :returns: Dictionary of source attributes
[ "Retreives", "a", "dictionary", "of", "source", "attributes", "from", "the", "source", "class" ]
train
https://github.com/gem/oq-engine/blob/8294553a0b8aba33fd96437a35065d03547d0040/openquake/hazardlib/sourcewriter.py#L379-L399
gem/oq-engine
openquake/hazardlib/sourcewriter.py
build_area_source_node
def build_area_source_node(area_source): """ Parses an area source to a Node class :param area_source: Area source as instance of :class: `openquake.hazardlib.source.area.AreaSource` :returns: Instance of :class:`openquake.baselib.node.Node` """ # parse geometry source_nodes = [build_area_source_geometry(area_source)] # parse common distributed attributes source_nodes.extend(get_distributed_seismicity_source_nodes(area_source)) return Node( "areaSource", get_source_attributes(area_source), nodes=source_nodes)
python
def build_area_source_node(area_source): source_nodes = [build_area_source_geometry(area_source)] source_nodes.extend(get_distributed_seismicity_source_nodes(area_source)) return Node( "areaSource", get_source_attributes(area_source), nodes=source_nodes)
[ "def", "build_area_source_node", "(", "area_source", ")", ":", "# parse geometry", "source_nodes", "=", "[", "build_area_source_geometry", "(", "area_source", ")", "]", "# parse common distributed attributes", "source_nodes", ".", "extend", "(", "get_distributed_seismicity_source_nodes", "(", "area_source", ")", ")", "return", "Node", "(", "\"areaSource\"", ",", "get_source_attributes", "(", "area_source", ")", ",", "nodes", "=", "source_nodes", ")" ]
Parses an area source to a Node class :param area_source: Area source as instance of :class: `openquake.hazardlib.source.area.AreaSource` :returns: Instance of :class:`openquake.baselib.node.Node`
[ "Parses", "an", "area", "source", "to", "a", "Node", "class" ]
train
https://github.com/gem/oq-engine/blob/8294553a0b8aba33fd96437a35065d03547d0040/openquake/hazardlib/sourcewriter.py#L403-L418
gem/oq-engine
openquake/hazardlib/sourcewriter.py
build_rupture_node
def build_rupture_node(rupt, probs_occur): """ :param rupt: a hazardlib rupture object :param probs_occur: a list of floats with sum 1 """ s = sum(probs_occur) if abs(s - 1) > pmf.PRECISION: raise ValueError('The sum of %s is not 1: %s' % (probs_occur, s)) h = rupt.hypocenter hp_dict = dict(lon=h.longitude, lat=h.latitude, depth=h.depth) rupt_nodes = [Node('magnitude', {}, rupt.mag), Node('rake', {}, rupt.rake), Node('hypocenter', hp_dict)] rupt_nodes.extend(rupt.surface.surface_nodes) geom = rupt.surface.surface_nodes[0].tag if len(rupt.surface.surface_nodes) > 1: name = 'multiPlanesRupture' elif geom == 'planarSurface': name = 'singlePlaneRupture' elif geom == 'simpleFaultGeometry': name = 'simpleFaultRupture' elif geom == 'complexFaultGeometry': name = 'complexFaultRupture' elif geom == 'griddedSurface': name = 'griddedRupture' return Node(name, {'probs_occur': probs_occur}, nodes=rupt_nodes)
python
def build_rupture_node(rupt, probs_occur): s = sum(probs_occur) if abs(s - 1) > pmf.PRECISION: raise ValueError('The sum of %s is not 1: %s' % (probs_occur, s)) h = rupt.hypocenter hp_dict = dict(lon=h.longitude, lat=h.latitude, depth=h.depth) rupt_nodes = [Node('magnitude', {}, rupt.mag), Node('rake', {}, rupt.rake), Node('hypocenter', hp_dict)] rupt_nodes.extend(rupt.surface.surface_nodes) geom = rupt.surface.surface_nodes[0].tag if len(rupt.surface.surface_nodes) > 1: name = 'multiPlanesRupture' elif geom == 'planarSurface': name = 'singlePlaneRupture' elif geom == 'simpleFaultGeometry': name = 'simpleFaultRupture' elif geom == 'complexFaultGeometry': name = 'complexFaultRupture' elif geom == 'griddedSurface': name = 'griddedRupture' return Node(name, {'probs_occur': probs_occur}, nodes=rupt_nodes)
[ "def", "build_rupture_node", "(", "rupt", ",", "probs_occur", ")", ":", "s", "=", "sum", "(", "probs_occur", ")", "if", "abs", "(", "s", "-", "1", ")", ">", "pmf", ".", "PRECISION", ":", "raise", "ValueError", "(", "'The sum of %s is not 1: %s'", "%", "(", "probs_occur", ",", "s", ")", ")", "h", "=", "rupt", ".", "hypocenter", "hp_dict", "=", "dict", "(", "lon", "=", "h", ".", "longitude", ",", "lat", "=", "h", ".", "latitude", ",", "depth", "=", "h", ".", "depth", ")", "rupt_nodes", "=", "[", "Node", "(", "'magnitude'", ",", "{", "}", ",", "rupt", ".", "mag", ")", ",", "Node", "(", "'rake'", ",", "{", "}", ",", "rupt", ".", "rake", ")", ",", "Node", "(", "'hypocenter'", ",", "hp_dict", ")", "]", "rupt_nodes", ".", "extend", "(", "rupt", ".", "surface", ".", "surface_nodes", ")", "geom", "=", "rupt", ".", "surface", ".", "surface_nodes", "[", "0", "]", ".", "tag", "if", "len", "(", "rupt", ".", "surface", ".", "surface_nodes", ")", ">", "1", ":", "name", "=", "'multiPlanesRupture'", "elif", "geom", "==", "'planarSurface'", ":", "name", "=", "'singlePlaneRupture'", "elif", "geom", "==", "'simpleFaultGeometry'", ":", "name", "=", "'simpleFaultRupture'", "elif", "geom", "==", "'complexFaultGeometry'", ":", "name", "=", "'complexFaultRupture'", "elif", "geom", "==", "'griddedSurface'", ":", "name", "=", "'griddedRupture'", "return", "Node", "(", "name", ",", "{", "'probs_occur'", ":", "probs_occur", "}", ",", "nodes", "=", "rupt_nodes", ")" ]
:param rupt: a hazardlib rupture object :param probs_occur: a list of floats with sum 1
[ ":", "param", "rupt", ":", "a", "hazardlib", "rupture", "object", ":", "param", "probs_occur", ":", "a", "list", "of", "floats", "with", "sum", "1" ]
train
https://github.com/gem/oq-engine/blob/8294553a0b8aba33fd96437a35065d03547d0040/openquake/hazardlib/sourcewriter.py#L442-L467
gem/oq-engine
openquake/hazardlib/sourcewriter.py
build_multi_point_source_node
def build_multi_point_source_node(multi_point_source): """ Parses a point source to a Node class :param point_source: MultiPoint source as instance of :class: `openquake.hazardlib.source.point.MultiPointSource` :returns: Instance of :class:`openquake.baselib.node.Node` """ # parse geometry pos = [] for p in multi_point_source.mesh: pos.append(p.x) pos.append(p.y) mesh_node = Node('gml:posList', text=pos) upper_depth_node = Node( "upperSeismoDepth", text=multi_point_source.upper_seismogenic_depth) lower_depth_node = Node( "lowerSeismoDepth", text=multi_point_source.lower_seismogenic_depth) source_nodes = [Node( "multiPointGeometry", nodes=[mesh_node, upper_depth_node, lower_depth_node])] # parse common distributed attributes source_nodes.extend(get_distributed_seismicity_source_nodes( multi_point_source)) return Node("multiPointSource", get_source_attributes(multi_point_source), nodes=source_nodes)
python
def build_multi_point_source_node(multi_point_source): pos = [] for p in multi_point_source.mesh: pos.append(p.x) pos.append(p.y) mesh_node = Node('gml:posList', text=pos) upper_depth_node = Node( "upperSeismoDepth", text=multi_point_source.upper_seismogenic_depth) lower_depth_node = Node( "lowerSeismoDepth", text=multi_point_source.lower_seismogenic_depth) source_nodes = [Node( "multiPointGeometry", nodes=[mesh_node, upper_depth_node, lower_depth_node])] source_nodes.extend(get_distributed_seismicity_source_nodes( multi_point_source)) return Node("multiPointSource", get_source_attributes(multi_point_source), nodes=source_nodes)
[ "def", "build_multi_point_source_node", "(", "multi_point_source", ")", ":", "# parse geometry", "pos", "=", "[", "]", "for", "p", "in", "multi_point_source", ".", "mesh", ":", "pos", ".", "append", "(", "p", ".", "x", ")", "pos", ".", "append", "(", "p", ".", "y", ")", "mesh_node", "=", "Node", "(", "'gml:posList'", ",", "text", "=", "pos", ")", "upper_depth_node", "=", "Node", "(", "\"upperSeismoDepth\"", ",", "text", "=", "multi_point_source", ".", "upper_seismogenic_depth", ")", "lower_depth_node", "=", "Node", "(", "\"lowerSeismoDepth\"", ",", "text", "=", "multi_point_source", ".", "lower_seismogenic_depth", ")", "source_nodes", "=", "[", "Node", "(", "\"multiPointGeometry\"", ",", "nodes", "=", "[", "mesh_node", ",", "upper_depth_node", ",", "lower_depth_node", "]", ")", "]", "# parse common distributed attributes", "source_nodes", ".", "extend", "(", "get_distributed_seismicity_source_nodes", "(", "multi_point_source", ")", ")", "return", "Node", "(", "\"multiPointSource\"", ",", "get_source_attributes", "(", "multi_point_source", ")", ",", "nodes", "=", "source_nodes", ")" ]
Parses a point source to a Node class :param point_source: MultiPoint source as instance of :class: `openquake.hazardlib.source.point.MultiPointSource` :returns: Instance of :class:`openquake.baselib.node.Node`
[ "Parses", "a", "point", "source", "to", "a", "Node", "class" ]
train
https://github.com/gem/oq-engine/blob/8294553a0b8aba33fd96437a35065d03547d0040/openquake/hazardlib/sourcewriter.py#L471-L499
gem/oq-engine
openquake/hazardlib/sourcewriter.py
build_point_source_node
def build_point_source_node(point_source): """ Parses a point source to a Node class :param point_source: Point source as instance of :class: `openquake.hazardlib.source.point.PointSource` :returns: Instance of :class:`openquake.baselib.node.Node` """ # parse geometry source_nodes = [build_point_source_geometry(point_source)] # parse common distributed attributes source_nodes.extend(get_distributed_seismicity_source_nodes(point_source)) return Node("pointSource", get_source_attributes(point_source), nodes=source_nodes)
python
def build_point_source_node(point_source): source_nodes = [build_point_source_geometry(point_source)] source_nodes.extend(get_distributed_seismicity_source_nodes(point_source)) return Node("pointSource", get_source_attributes(point_source), nodes=source_nodes)
[ "def", "build_point_source_node", "(", "point_source", ")", ":", "# parse geometry", "source_nodes", "=", "[", "build_point_source_geometry", "(", "point_source", ")", "]", "# parse common distributed attributes", "source_nodes", ".", "extend", "(", "get_distributed_seismicity_source_nodes", "(", "point_source", ")", ")", "return", "Node", "(", "\"pointSource\"", ",", "get_source_attributes", "(", "point_source", ")", ",", "nodes", "=", "source_nodes", ")" ]
Parses a point source to a Node class :param point_source: Point source as instance of :class: `openquake.hazardlib.source.point.PointSource` :returns: Instance of :class:`openquake.baselib.node.Node`
[ "Parses", "a", "point", "source", "to", "a", "Node", "class" ]
train
https://github.com/gem/oq-engine/blob/8294553a0b8aba33fd96437a35065d03547d0040/openquake/hazardlib/sourcewriter.py#L503-L520
gem/oq-engine
openquake/hazardlib/sourcewriter.py
build_simple_fault_source_node
def build_simple_fault_source_node(fault_source): """ Parses a simple fault source to a Node class :param fault_source: Simple fault source as instance of :class: `openquake.hazardlib.source.simple_fault.SimpleFaultSource` :returns: Instance of :class:`openquake.baselib.node.Node` """ # Parse geometry source_nodes = [build_simple_fault_geometry(fault_source)] # Parse common fault source attributes source_nodes.extend(get_fault_source_nodes(fault_source)) return Node("simpleFaultSource", get_source_attributes(fault_source), nodes=source_nodes)
python
def build_simple_fault_source_node(fault_source): source_nodes = [build_simple_fault_geometry(fault_source)] source_nodes.extend(get_fault_source_nodes(fault_source)) return Node("simpleFaultSource", get_source_attributes(fault_source), nodes=source_nodes)
[ "def", "build_simple_fault_source_node", "(", "fault_source", ")", ":", "# Parse geometry", "source_nodes", "=", "[", "build_simple_fault_geometry", "(", "fault_source", ")", "]", "# Parse common fault source attributes", "source_nodes", ".", "extend", "(", "get_fault_source_nodes", "(", "fault_source", ")", ")", "return", "Node", "(", "\"simpleFaultSource\"", ",", "get_source_attributes", "(", "fault_source", ")", ",", "nodes", "=", "source_nodes", ")" ]
Parses a simple fault source to a Node class :param fault_source: Simple fault source as instance of :class: `openquake.hazardlib.source.simple_fault.SimpleFaultSource` :returns: Instance of :class:`openquake.baselib.node.Node`
[ "Parses", "a", "simple", "fault", "source", "to", "a", "Node", "class" ]
train
https://github.com/gem/oq-engine/blob/8294553a0b8aba33fd96437a35065d03547d0040/openquake/hazardlib/sourcewriter.py#L524-L540
gem/oq-engine
openquake/hazardlib/sourcewriter.py
build_complex_fault_source_node
def build_complex_fault_source_node(fault_source): """ Parses a complex fault source to a Node class :param fault_source: Simple fault source as instance of :class: `openquake.hazardlib.source.complex_fault.ComplexFaultSource` :returns: Instance of :class:`openquake.baselib.node.Node` """ # Parse geometry source_nodes = [build_complex_fault_geometry(fault_source)] # Parse common fault source attributes source_nodes.extend(get_fault_source_nodes(fault_source)) return Node("complexFaultSource", get_source_attributes(fault_source), nodes=source_nodes)
python
def build_complex_fault_source_node(fault_source): source_nodes = [build_complex_fault_geometry(fault_source)] source_nodes.extend(get_fault_source_nodes(fault_source)) return Node("complexFaultSource", get_source_attributes(fault_source), nodes=source_nodes)
[ "def", "build_complex_fault_source_node", "(", "fault_source", ")", ":", "# Parse geometry", "source_nodes", "=", "[", "build_complex_fault_geometry", "(", "fault_source", ")", "]", "# Parse common fault source attributes", "source_nodes", ".", "extend", "(", "get_fault_source_nodes", "(", "fault_source", ")", ")", "return", "Node", "(", "\"complexFaultSource\"", ",", "get_source_attributes", "(", "fault_source", ")", ",", "nodes", "=", "source_nodes", ")" ]
Parses a complex fault source to a Node class :param fault_source: Simple fault source as instance of :class: `openquake.hazardlib.source.complex_fault.ComplexFaultSource` :returns: Instance of :class:`openquake.baselib.node.Node`
[ "Parses", "a", "complex", "fault", "source", "to", "a", "Node", "class" ]
train
https://github.com/gem/oq-engine/blob/8294553a0b8aba33fd96437a35065d03547d0040/openquake/hazardlib/sourcewriter.py#L544-L560
gem/oq-engine
openquake/hazardlib/sourcewriter.py
write_source_model
def write_source_model(dest, sources_or_groups, name=None, investigation_time=None): """ Writes a source model to XML. :param dest: Destination path :param sources_or_groups: Source model in different formats :param name: Name of the source model (if missing, extracted from the filename) """ if isinstance(sources_or_groups, nrml.SourceModel): with open(dest, 'wb') as f: nrml.write([obj_to_node(sources_or_groups)], f, '%s') return if isinstance(sources_or_groups[0], sourceconverter.SourceGroup): groups = sources_or_groups else: # passed a list of sources srcs_by_trt = groupby( sources_or_groups, operator.attrgetter('tectonic_region_type')) groups = [sourceconverter.SourceGroup(trt, srcs_by_trt[trt]) for trt in srcs_by_trt] name = name or os.path.splitext(os.path.basename(dest))[0] nodes = list(map(obj_to_node, sorted(groups))) attrs = {"name": name} if investigation_time is not None: attrs['investigation_time'] = investigation_time source_model = Node("sourceModel", attrs, nodes=nodes) with open(dest, 'wb') as f: nrml.write([source_model], f, '%s') return dest
python
def write_source_model(dest, sources_or_groups, name=None, investigation_time=None): if isinstance(sources_or_groups, nrml.SourceModel): with open(dest, 'wb') as f: nrml.write([obj_to_node(sources_or_groups)], f, '%s') return if isinstance(sources_or_groups[0], sourceconverter.SourceGroup): groups = sources_or_groups else: srcs_by_trt = groupby( sources_or_groups, operator.attrgetter('tectonic_region_type')) groups = [sourceconverter.SourceGroup(trt, srcs_by_trt[trt]) for trt in srcs_by_trt] name = name or os.path.splitext(os.path.basename(dest))[0] nodes = list(map(obj_to_node, sorted(groups))) attrs = {"name": name} if investigation_time is not None: attrs['investigation_time'] = investigation_time source_model = Node("sourceModel", attrs, nodes=nodes) with open(dest, 'wb') as f: nrml.write([source_model], f, '%s') return dest
[ "def", "write_source_model", "(", "dest", ",", "sources_or_groups", ",", "name", "=", "None", ",", "investigation_time", "=", "None", ")", ":", "if", "isinstance", "(", "sources_or_groups", ",", "nrml", ".", "SourceModel", ")", ":", "with", "open", "(", "dest", ",", "'wb'", ")", "as", "f", ":", "nrml", ".", "write", "(", "[", "obj_to_node", "(", "sources_or_groups", ")", "]", ",", "f", ",", "'%s'", ")", "return", "if", "isinstance", "(", "sources_or_groups", "[", "0", "]", ",", "sourceconverter", ".", "SourceGroup", ")", ":", "groups", "=", "sources_or_groups", "else", ":", "# passed a list of sources", "srcs_by_trt", "=", "groupby", "(", "sources_or_groups", ",", "operator", ".", "attrgetter", "(", "'tectonic_region_type'", ")", ")", "groups", "=", "[", "sourceconverter", ".", "SourceGroup", "(", "trt", ",", "srcs_by_trt", "[", "trt", "]", ")", "for", "trt", "in", "srcs_by_trt", "]", "name", "=", "name", "or", "os", ".", "path", ".", "splitext", "(", "os", ".", "path", ".", "basename", "(", "dest", ")", ")", "[", "0", "]", "nodes", "=", "list", "(", "map", "(", "obj_to_node", ",", "sorted", "(", "groups", ")", ")", ")", "attrs", "=", "{", "\"name\"", ":", "name", "}", "if", "investigation_time", "is", "not", "None", ":", "attrs", "[", "'investigation_time'", "]", "=", "investigation_time", "source_model", "=", "Node", "(", "\"sourceModel\"", ",", "attrs", ",", "nodes", "=", "nodes", ")", "with", "open", "(", "dest", ",", "'wb'", ")", "as", "f", ":", "nrml", ".", "write", "(", "[", "source_model", "]", ",", "f", ",", "'%s'", ")", "return", "dest" ]
Writes a source model to XML. :param dest: Destination path :param sources_or_groups: Source model in different formats :param name: Name of the source model (if missing, extracted from the filename)
[ "Writes", "a", "source", "model", "to", "XML", "." ]
train
https://github.com/gem/oq-engine/blob/8294553a0b8aba33fd96437a35065d03547d0040/openquake/hazardlib/sourcewriter.py#L610-L641
gem/oq-engine
openquake/hazardlib/sourcewriter.py
hdf5write
def hdf5write(h5file, obj, root=''): """ Write a generic object serializable to a Node-like object into a :class: `openquake.baselib.hdf5.File` """ dic = node_to_dict(obj_to_node(obj)) h5file.save(dic, root)
python
def hdf5write(h5file, obj, root=''): dic = node_to_dict(obj_to_node(obj)) h5file.save(dic, root)
[ "def", "hdf5write", "(", "h5file", ",", "obj", ",", "root", "=", "''", ")", ":", "dic", "=", "node_to_dict", "(", "obj_to_node", "(", "obj", ")", ")", "h5file", ".", "save", "(", "dic", ",", "root", ")" ]
Write a generic object serializable to a Node-like object into a :class: `openquake.baselib.hdf5.File`
[ "Write", "a", "generic", "object", "serializable", "to", "a", "Node", "-", "like", "object", "into", "a", ":", "class", ":", "openquake", ".", "baselib", ".", "hdf5", ".", "File" ]
train
https://github.com/gem/oq-engine/blob/8294553a0b8aba33fd96437a35065d03547d0040/openquake/hazardlib/sourcewriter.py#L644-L650
gem/oq-engine
openquake/hazardlib/gsim/sharma_2009.py
SharmaEtAl2009.get_mean_and_stddevs
def get_mean_and_stddevs(self, sites, rup, dists, imt, stddev_types): # pylint: disable=too-many-arguments """ See :meth:`superclass method <.base.GroundShakingIntensityModel.get_mean_and_stddevs>` for specification of input and result values. """ # extract dictionary of coefficients specific to required # intensity measure type coeffs = self.COEFFS[imt] coeffs.update(self.CONSTS) # equation (1) is in terms of common logarithm log_mean = (self._compute_magnitude(rup, coeffs) + self._compute_distance(dists, coeffs) + self._get_site_amplification(sites, coeffs) + self._get_mechanism(rup, coeffs)) # so convert to g and thence to the natural logarithm mean = log_mean*np.log(10.0) - np.log(g) # convert standard deviations from common to natural logarithm log_stddevs = self._get_stddevs(coeffs, stddev_types, len(sites.vs30)) stddevs = log_stddevs*np.log(10.0) return mean, stddevs
python
def get_mean_and_stddevs(self, sites, rup, dists, imt, stddev_types): coeffs = self.COEFFS[imt] coeffs.update(self.CONSTS) log_mean = (self._compute_magnitude(rup, coeffs) + self._compute_distance(dists, coeffs) + self._get_site_amplification(sites, coeffs) + self._get_mechanism(rup, coeffs)) mean = log_mean*np.log(10.0) - np.log(g) log_stddevs = self._get_stddevs(coeffs, stddev_types, len(sites.vs30)) stddevs = log_stddevs*np.log(10.0) return mean, stddevs
[ "def", "get_mean_and_stddevs", "(", "self", ",", "sites", ",", "rup", ",", "dists", ",", "imt", ",", "stddev_types", ")", ":", "# pylint: disable=too-many-arguments", "# extract dictionary of coefficients specific to required", "# intensity measure type", "coeffs", "=", "self", ".", "COEFFS", "[", "imt", "]", "coeffs", ".", "update", "(", "self", ".", "CONSTS", ")", "# equation (1) is in terms of common logarithm", "log_mean", "=", "(", "self", ".", "_compute_magnitude", "(", "rup", ",", "coeffs", ")", "+", "self", ".", "_compute_distance", "(", "dists", ",", "coeffs", ")", "+", "self", ".", "_get_site_amplification", "(", "sites", ",", "coeffs", ")", "+", "self", ".", "_get_mechanism", "(", "rup", ",", "coeffs", ")", ")", "# so convert to g and thence to the natural logarithm", "mean", "=", "log_mean", "*", "np", ".", "log", "(", "10.0", ")", "-", "np", ".", "log", "(", "g", ")", "# convert standard deviations from common to natural logarithm", "log_stddevs", "=", "self", ".", "_get_stddevs", "(", "coeffs", ",", "stddev_types", ",", "len", "(", "sites", ".", "vs30", ")", ")", "stddevs", "=", "log_stddevs", "*", "np", ".", "log", "(", "10.0", ")", "return", "mean", ",", "stddevs" ]
See :meth:`superclass method <.base.GroundShakingIntensityModel.get_mean_and_stddevs>` for specification of input and result values.
[ "See", ":", "meth", ":", "superclass", "method", "<", ".", "base", ".", "GroundShakingIntensityModel", ".", "get_mean_and_stddevs", ">", "for", "specification", "of", "input", "and", "result", "values", "." ]
train
https://github.com/gem/oq-engine/blob/8294553a0b8aba33fd96437a35065d03547d0040/openquake/hazardlib/gsim/sharma_2009.py#L94-L119
gem/oq-engine
openquake/hazardlib/gsim/sharma_2009.py
SharmaEtAl2009._get_stddevs
def _get_stddevs(self, coeffs, stddev_types, num_sites): """ Return total sigma as reported in Table 2, p. 1202. """ stddevs = [] for stddev_type in stddev_types: assert stddev_type in self.DEFINED_FOR_STANDARD_DEVIATION_TYPES stddevs.append(coeffs['sigma'] + np.zeros(num_sites)) return np.array(stddevs)
python
def _get_stddevs(self, coeffs, stddev_types, num_sites): stddevs = [] for stddev_type in stddev_types: assert stddev_type in self.DEFINED_FOR_STANDARD_DEVIATION_TYPES stddevs.append(coeffs['sigma'] + np.zeros(num_sites)) return np.array(stddevs)
[ "def", "_get_stddevs", "(", "self", ",", "coeffs", ",", "stddev_types", ",", "num_sites", ")", ":", "stddevs", "=", "[", "]", "for", "stddev_type", "in", "stddev_types", ":", "assert", "stddev_type", "in", "self", ".", "DEFINED_FOR_STANDARD_DEVIATION_TYPES", "stddevs", ".", "append", "(", "coeffs", "[", "'sigma'", "]", "+", "np", ".", "zeros", "(", "num_sites", ")", ")", "return", "np", ".", "array", "(", "stddevs", ")" ]
Return total sigma as reported in Table 2, p. 1202.
[ "Return", "total", "sigma", "as", "reported", "in", "Table", "2", "p", ".", "1202", "." ]
train
https://github.com/gem/oq-engine/blob/8294553a0b8aba33fd96437a35065d03547d0040/openquake/hazardlib/gsim/sharma_2009.py#L121-L129
gem/oq-engine
openquake/hazardlib/gsim/sharma_2009.py
SharmaEtAl2009._compute_distance
def _compute_distance(cls, dists, coeffs): """ Compute third term of equation (1) on p. 1200: ``b3 * log(sqrt(Rjb ** 2 + b4 ** 2))`` """ return coeffs['b3']*np.log10(np.sqrt(dists.rjb**2. + coeffs['b4']**2.))
python
def _compute_distance(cls, dists, coeffs): return coeffs['b3']*np.log10(np.sqrt(dists.rjb**2. + coeffs['b4']**2.))
[ "def", "_compute_distance", "(", "cls", ",", "dists", ",", "coeffs", ")", ":", "return", "coeffs", "[", "'b3'", "]", "*", "np", ".", "log10", "(", "np", ".", "sqrt", "(", "dists", ".", "rjb", "**", "2.", "+", "coeffs", "[", "'b4'", "]", "**", "2.", ")", ")" ]
Compute third term of equation (1) on p. 1200: ``b3 * log(sqrt(Rjb ** 2 + b4 ** 2))``
[ "Compute", "third", "term", "of", "equation", "(", "1", ")", "on", "p", ".", "1200", ":" ]
train
https://github.com/gem/oq-engine/blob/8294553a0b8aba33fd96437a35065d03547d0040/openquake/hazardlib/gsim/sharma_2009.py#L141-L147
gem/oq-engine
openquake/hazardlib/gsim/sharma_2009.py
SharmaEtAl2009._get_site_amplification
def _get_site_amplification(self, sites, coeffs): """ Compute fourth term of equation (1) on p. 1200: ``b5 * S`` """ is_rock = self.get_site_type_dummy_variables(sites) return coeffs['b5']*is_rock
python
def _get_site_amplification(self, sites, coeffs): is_rock = self.get_site_type_dummy_variables(sites) return coeffs['b5']*is_rock
[ "def", "_get_site_amplification", "(", "self", ",", "sites", ",", "coeffs", ")", ":", "is_rock", "=", "self", ".", "get_site_type_dummy_variables", "(", "sites", ")", "return", "coeffs", "[", "'b5'", "]", "*", "is_rock" ]
Compute fourth term of equation (1) on p. 1200: ``b5 * S``
[ "Compute", "fourth", "term", "of", "equation", "(", "1", ")", "on", "p", ".", "1200", ":" ]
train
https://github.com/gem/oq-engine/blob/8294553a0b8aba33fd96437a35065d03547d0040/openquake/hazardlib/gsim/sharma_2009.py#L149-L156
gem/oq-engine
openquake/hazardlib/gsim/sharma_2009.py
SharmaEtAl2009._get_mechanism
def _get_mechanism(self, rup, coeffs): """ Compute fifth term of equation (1) on p. 1200: ``b6 * H`` """ is_strike_slip = self.get_fault_type_dummy_variables(rup) return coeffs['b6']*is_strike_slip
python
def _get_mechanism(self, rup, coeffs): is_strike_slip = self.get_fault_type_dummy_variables(rup) return coeffs['b6']*is_strike_slip
[ "def", "_get_mechanism", "(", "self", ",", "rup", ",", "coeffs", ")", ":", "is_strike_slip", "=", "self", ".", "get_fault_type_dummy_variables", "(", "rup", ")", "return", "coeffs", "[", "'b6'", "]", "*", "is_strike_slip" ]
Compute fifth term of equation (1) on p. 1200: ``b6 * H``
[ "Compute", "fifth", "term", "of", "equation", "(", "1", ")", "on", "p", ".", "1200", ":" ]
train
https://github.com/gem/oq-engine/blob/8294553a0b8aba33fd96437a35065d03547d0040/openquake/hazardlib/gsim/sharma_2009.py#L158-L165
gem/oq-engine
openquake/hazardlib/gsim/sharma_2009.py
SharmaEtAl2009.get_site_type_dummy_variables
def get_site_type_dummy_variables(self, sites): """ Binary rock/soil classification dummy variable based on sites.vs30. "``S`` is 1 for a rock site and 0 otherwise" (p. 1201). """ is_rock = np.array(sites.vs30 > self.NEHRP_BC_BOUNDARY) return is_rock
python
def get_site_type_dummy_variables(self, sites): is_rock = np.array(sites.vs30 > self.NEHRP_BC_BOUNDARY) return is_rock
[ "def", "get_site_type_dummy_variables", "(", "self", ",", "sites", ")", ":", "is_rock", "=", "np", ".", "array", "(", "sites", ".", "vs30", ">", "self", ".", "NEHRP_BC_BOUNDARY", ")", "return", "is_rock" ]
Binary rock/soil classification dummy variable based on sites.vs30. "``S`` is 1 for a rock site and 0 otherwise" (p. 1201).
[ "Binary", "rock", "/", "soil", "classification", "dummy", "variable", "based", "on", "sites", ".", "vs30", "." ]
train
https://github.com/gem/oq-engine/blob/8294553a0b8aba33fd96437a35065d03547d0040/openquake/hazardlib/gsim/sharma_2009.py#L167-L174
gem/oq-engine
openquake/hazardlib/gsim/sharma_2009.py
SharmaEtAl2009.get_fault_type_dummy_variables
def get_fault_type_dummy_variables(self, rup): """ Fault-type classification dummy variable based on rup.rake. "``H`` is 1 for a strike-slip mechanism and 0 for a reverse mechanism" (p. 1201). Note: UserWarning is raised if mechanism is determined to be normal faulting, since as summarized in Table 2 on p. 1197 the data used for regression included only reverse and stike-slip events. """ # normal faulting is_normal = np.array( self.RAKE_THRESH < -rup.rake < (180. - self.RAKE_THRESH)) # reverse raulting is_reverse = np.array( self.RAKE_THRESH < rup.rake < (180. - self.RAKE_THRESH)) if not self.ALREADY_WARNED and is_normal.any(): # make sure that the warning is printed only once to avoid # flooding the terminal msg = ('Normal faulting not supported by %s; ' 'treating as strike-slip' % type(self).__name__) warnings.warn(msg, UserWarning) self.ALREADY_WARNED = True is_strike_slip = ~is_reverse | is_normal is_strike_slip = is_strike_slip.astype(float) return is_strike_slip
python
def get_fault_type_dummy_variables(self, rup): is_normal = np.array( self.RAKE_THRESH < -rup.rake < (180. - self.RAKE_THRESH)) is_reverse = np.array( self.RAKE_THRESH < rup.rake < (180. - self.RAKE_THRESH)) if not self.ALREADY_WARNED and is_normal.any(): msg = ('Normal faulting not supported by %s; ' 'treating as strike-slip' % type(self).__name__) warnings.warn(msg, UserWarning) self.ALREADY_WARNED = True is_strike_slip = ~is_reverse | is_normal is_strike_slip = is_strike_slip.astype(float) return is_strike_slip
[ "def", "get_fault_type_dummy_variables", "(", "self", ",", "rup", ")", ":", "# normal faulting", "is_normal", "=", "np", ".", "array", "(", "self", ".", "RAKE_THRESH", "<", "-", "rup", ".", "rake", "<", "(", "180.", "-", "self", ".", "RAKE_THRESH", ")", ")", "# reverse raulting", "is_reverse", "=", "np", ".", "array", "(", "self", ".", "RAKE_THRESH", "<", "rup", ".", "rake", "<", "(", "180.", "-", "self", ".", "RAKE_THRESH", ")", ")", "if", "not", "self", ".", "ALREADY_WARNED", "and", "is_normal", ".", "any", "(", ")", ":", "# make sure that the warning is printed only once to avoid", "# flooding the terminal", "msg", "=", "(", "'Normal faulting not supported by %s; '", "'treating as strike-slip'", "%", "type", "(", "self", ")", ".", "__name__", ")", "warnings", ".", "warn", "(", "msg", ",", "UserWarning", ")", "self", ".", "ALREADY_WARNED", "=", "True", "is_strike_slip", "=", "~", "is_reverse", "|", "is_normal", "is_strike_slip", "=", "is_strike_slip", ".", "astype", "(", "float", ")", "return", "is_strike_slip" ]
Fault-type classification dummy variable based on rup.rake. "``H`` is 1 for a strike-slip mechanism and 0 for a reverse mechanism" (p. 1201). Note: UserWarning is raised if mechanism is determined to be normal faulting, since as summarized in Table 2 on p. 1197 the data used for regression included only reverse and stike-slip events.
[ "Fault", "-", "type", "classification", "dummy", "variable", "based", "on", "rup", ".", "rake", "." ]
train
https://github.com/gem/oq-engine/blob/8294553a0b8aba33fd96437a35065d03547d0040/openquake/hazardlib/gsim/sharma_2009.py#L176-L208
gem/oq-engine
openquake/hmtk/parsers/strain/strain_csv_parser.py
ReadStrainCsv.read_data
def read_data(self, scaling_factor=1E-9, strain_headers=None): ''' Reads the data from the csv file :param float scaling_factor: Scaling factor used for all strain values (default 1E-9 for nanostrain) :param list strain_headers: List of the variables in the file that correspond to strain parameters :returns: strain - Strain model as an instance of the :class: openquake.hmtk.strain.geodetic_strain.GeodeticStrain ''' if strain_headers: self.strain.data_variables = strain_headers else: self.strain.data_variables = STRAIN_VARIABLES datafile = open(self.filename, 'r') reader = csv.DictReader(datafile) self.strain.data = dict([(name, []) for name in reader.fieldnames]) for row in reader: for name in row.keys(): if 'region' in name.lower(): self.strain.data[name].append(row[name]) elif name in self.strain.data_variables: self.strain.data[name].append( scaling_factor * float(row[name])) else: self.strain.data[name].append(float(row[name])) for key in self.strain.data.keys(): if 'region' in key: self.strain.data[key] = np.array(self.strain.data[key], dtype='S13') else: self.strain.data[key] = np.array(self.strain.data[key]) self._check_invalid_longitudes() if 'region' not in self.strain.data: print('No tectonic regionalisation found in input file!') self.strain.data_variables = self.strain.data.keys() # Update data with secondary data (i.e. 2nd invariant, e1h, e2h etc. self.strain.get_secondary_strain_data() return self.strain
python
def read_data(self, scaling_factor=1E-9, strain_headers=None): if strain_headers: self.strain.data_variables = strain_headers else: self.strain.data_variables = STRAIN_VARIABLES datafile = open(self.filename, 'r') reader = csv.DictReader(datafile) self.strain.data = dict([(name, []) for name in reader.fieldnames]) for row in reader: for name in row.keys(): if 'region' in name.lower(): self.strain.data[name].append(row[name]) elif name in self.strain.data_variables: self.strain.data[name].append( scaling_factor * float(row[name])) else: self.strain.data[name].append(float(row[name])) for key in self.strain.data.keys(): if 'region' in key: self.strain.data[key] = np.array(self.strain.data[key], dtype='S13') else: self.strain.data[key] = np.array(self.strain.data[key]) self._check_invalid_longitudes() if 'region' not in self.strain.data: print('No tectonic regionalisation found in input file!') self.strain.data_variables = self.strain.data.keys() self.strain.get_secondary_strain_data() return self.strain
[ "def", "read_data", "(", "self", ",", "scaling_factor", "=", "1E-9", ",", "strain_headers", "=", "None", ")", ":", "if", "strain_headers", ":", "self", ".", "strain", ".", "data_variables", "=", "strain_headers", "else", ":", "self", ".", "strain", ".", "data_variables", "=", "STRAIN_VARIABLES", "datafile", "=", "open", "(", "self", ".", "filename", ",", "'r'", ")", "reader", "=", "csv", ".", "DictReader", "(", "datafile", ")", "self", ".", "strain", ".", "data", "=", "dict", "(", "[", "(", "name", ",", "[", "]", ")", "for", "name", "in", "reader", ".", "fieldnames", "]", ")", "for", "row", "in", "reader", ":", "for", "name", "in", "row", ".", "keys", "(", ")", ":", "if", "'region'", "in", "name", ".", "lower", "(", ")", ":", "self", ".", "strain", ".", "data", "[", "name", "]", ".", "append", "(", "row", "[", "name", "]", ")", "elif", "name", "in", "self", ".", "strain", ".", "data_variables", ":", "self", ".", "strain", ".", "data", "[", "name", "]", ".", "append", "(", "scaling_factor", "*", "float", "(", "row", "[", "name", "]", ")", ")", "else", ":", "self", ".", "strain", ".", "data", "[", "name", "]", ".", "append", "(", "float", "(", "row", "[", "name", "]", ")", ")", "for", "key", "in", "self", ".", "strain", ".", "data", ".", "keys", "(", ")", ":", "if", "'region'", "in", "key", ":", "self", ".", "strain", ".", "data", "[", "key", "]", "=", "np", ".", "array", "(", "self", ".", "strain", ".", "data", "[", "key", "]", ",", "dtype", "=", "'S13'", ")", "else", ":", "self", ".", "strain", ".", "data", "[", "key", "]", "=", "np", ".", "array", "(", "self", ".", "strain", ".", "data", "[", "key", "]", ")", "self", ".", "_check_invalid_longitudes", "(", ")", "if", "'region'", "not", "in", "self", ".", "strain", ".", "data", ":", "print", "(", "'No tectonic regionalisation found in input file!'", ")", "self", ".", "strain", ".", "data_variables", "=", "self", ".", "strain", ".", "data", ".", "keys", "(", ")", "# Update data with secondary data (i.e. 2nd invariant, e1h, e2h etc.", "self", ".", "strain", ".", "get_secondary_strain_data", "(", ")", "return", "self", ".", "strain" ]
Reads the data from the csv file :param float scaling_factor: Scaling factor used for all strain values (default 1E-9 for nanostrain) :param list strain_headers: List of the variables in the file that correspond to strain parameters :returns: strain - Strain model as an instance of the :class: openquake.hmtk.strain.geodetic_strain.GeodeticStrain
[ "Reads", "the", "data", "from", "the", "csv", "file" ]
train
https://github.com/gem/oq-engine/blob/8294553a0b8aba33fd96437a35065d03547d0040/openquake/hmtk/parsers/strain/strain_csv_parser.py#L82-L132
gem/oq-engine
openquake/hmtk/parsers/strain/strain_csv_parser.py
ReadStrainCsv._check_invalid_longitudes
def _check_invalid_longitudes(self): ''' Checks to ensure that all longitudes are in the range -180. to 180 ''' idlon = self.strain.data['longitude'] > 180. if np.any(idlon): self.strain.data['longitude'][idlon] = \ self.strain.data['longitude'][idlon] - 360.
python
def _check_invalid_longitudes(self): idlon = self.strain.data['longitude'] > 180. if np.any(idlon): self.strain.data['longitude'][idlon] = \ self.strain.data['longitude'][idlon] - 360.
[ "def", "_check_invalid_longitudes", "(", "self", ")", ":", "idlon", "=", "self", ".", "strain", ".", "data", "[", "'longitude'", "]", ">", "180.", "if", "np", ".", "any", "(", "idlon", ")", ":", "self", ".", "strain", ".", "data", "[", "'longitude'", "]", "[", "idlon", "]", "=", "self", ".", "strain", ".", "data", "[", "'longitude'", "]", "[", "idlon", "]", "-", "360." ]
Checks to ensure that all longitudes are in the range -180. to 180
[ "Checks", "to", "ensure", "that", "all", "longitudes", "are", "in", "the", "range", "-", "180", ".", "to", "180" ]
train
https://github.com/gem/oq-engine/blob/8294553a0b8aba33fd96437a35065d03547d0040/openquake/hmtk/parsers/strain/strain_csv_parser.py#L134-L141
gem/oq-engine
openquake/hmtk/parsers/strain/strain_csv_parser.py
WriteStrainCsv.write_file
def write_file(self, strain, scaling_factor=1E-9): ''' Main writer function for the csv file :param strain: Instance of :class: openquake.hmtk.strain.geodetic_strain.GeodeticStrain :param float scaling_factor: Scaling factor used for all strain values (default 1E-9 for nanostrain) ''' if not isinstance(strain, GeodeticStrain): raise ValueError('Strain data must be instance of GeodeticStrain') for key in strain.data.keys(): if key in strain.data_variables: # Return strain value back to original scaling if key in ['longitude', 'latitude']: continue strain.data[key] = strain.data[key] / scaling_factor # Slice seismicity rates into separate dictionary vectors strain, output_variables = self.slice_rates_to_data(strain) outfile = open(self.filename, 'wt') print('Writing strain data to file %s' % self.filename) writer = csv.DictWriter(outfile, fieldnames=output_variables) writer.writeheader() for iloc in range(0, strain.get_number_observations()): row_dict = {} for key in output_variables: if len(strain.data[key]) > 0: # Ignores empty dictionary attributes row_dict[key] = strain.data[key][iloc] writer.writerow(row_dict) outfile.close() print('done!')
python
def write_file(self, strain, scaling_factor=1E-9): if not isinstance(strain, GeodeticStrain): raise ValueError('Strain data must be instance of GeodeticStrain') for key in strain.data.keys(): if key in strain.data_variables: if key in ['longitude', 'latitude']: continue strain.data[key] = strain.data[key] / scaling_factor strain, output_variables = self.slice_rates_to_data(strain) outfile = open(self.filename, 'wt') print('Writing strain data to file %s' % self.filename) writer = csv.DictWriter(outfile, fieldnames=output_variables) writer.writeheader() for iloc in range(0, strain.get_number_observations()): row_dict = {} for key in output_variables: if len(strain.data[key]) > 0: row_dict[key] = strain.data[key][iloc] writer.writerow(row_dict) outfile.close() print('done!')
[ "def", "write_file", "(", "self", ",", "strain", ",", "scaling_factor", "=", "1E-9", ")", ":", "if", "not", "isinstance", "(", "strain", ",", "GeodeticStrain", ")", ":", "raise", "ValueError", "(", "'Strain data must be instance of GeodeticStrain'", ")", "for", "key", "in", "strain", ".", "data", ".", "keys", "(", ")", ":", "if", "key", "in", "strain", ".", "data_variables", ":", "# Return strain value back to original scaling", "if", "key", "in", "[", "'longitude'", ",", "'latitude'", "]", ":", "continue", "strain", ".", "data", "[", "key", "]", "=", "strain", ".", "data", "[", "key", "]", "/", "scaling_factor", "# Slice seismicity rates into separate dictionary vectors", "strain", ",", "output_variables", "=", "self", ".", "slice_rates_to_data", "(", "strain", ")", "outfile", "=", "open", "(", "self", ".", "filename", ",", "'wt'", ")", "print", "(", "'Writing strain data to file %s'", "%", "self", ".", "filename", ")", "writer", "=", "csv", ".", "DictWriter", "(", "outfile", ",", "fieldnames", "=", "output_variables", ")", "writer", ".", "writeheader", "(", ")", "for", "iloc", "in", "range", "(", "0", ",", "strain", ".", "get_number_observations", "(", ")", ")", ":", "row_dict", "=", "{", "}", "for", "key", "in", "output_variables", ":", "if", "len", "(", "strain", ".", "data", "[", "key", "]", ")", ">", "0", ":", "# Ignores empty dictionary attributes", "row_dict", "[", "key", "]", "=", "strain", ".", "data", "[", "key", "]", "[", "iloc", "]", "writer", ".", "writerow", "(", "row_dict", ")", "outfile", ".", "close", "(", ")", "print", "(", "'done!'", ")" ]
Main writer function for the csv file :param strain: Instance of :class: openquake.hmtk.strain.geodetic_strain.GeodeticStrain :param float scaling_factor: Scaling factor used for all strain values (default 1E-9 for nanostrain)
[ "Main", "writer", "function", "for", "the", "csv", "file" ]
train
https://github.com/gem/oq-engine/blob/8294553a0b8aba33fd96437a35065d03547d0040/openquake/hmtk/parsers/strain/strain_csv_parser.py#L160-L196
gem/oq-engine
openquake/hmtk/parsers/strain/strain_csv_parser.py
WriteStrainCsv.slice_rates_to_data
def slice_rates_to_data(self, strain): ''' For the strain data, checks to see if seismicity rates have been calculated. If so, each column in the array is sliced and stored as a single vector in the strain.data dictionary with the corresponding magnitude as a key. :param strain: Instance of :class: openquake.hmtk.strain.geodetic_strain.GeodeticStrain :returns: strain - Instance of strain class with updated data dictionary output_variables - Updated list of headers ''' output_variables = list(strain.data) cond = (isinstance(strain.target_magnitudes, np.ndarray) or isinstance(strain.target_magnitudes, list)) if cond: magnitude_list = ['%.3f' % mag for mag in strain.target_magnitudes] else: return strain, output_variables # Ensure that the number of rows in the rate array corresponds to the # number of observations assert np.shape(strain.seismicity_rate)[0] == \ strain.get_number_observations() for iloc, magnitude in enumerate(magnitude_list): strain.data[magnitude] = strain.seismicity_rate[:, iloc] output_variables.extend(magnitude_list) return strain, output_variables
python
def slice_rates_to_data(self, strain): output_variables = list(strain.data) cond = (isinstance(strain.target_magnitudes, np.ndarray) or isinstance(strain.target_magnitudes, list)) if cond: magnitude_list = ['%.3f' % mag for mag in strain.target_magnitudes] else: return strain, output_variables assert np.shape(strain.seismicity_rate)[0] == \ strain.get_number_observations() for iloc, magnitude in enumerate(magnitude_list): strain.data[magnitude] = strain.seismicity_rate[:, iloc] output_variables.extend(magnitude_list) return strain, output_variables
[ "def", "slice_rates_to_data", "(", "self", ",", "strain", ")", ":", "output_variables", "=", "list", "(", "strain", ".", "data", ")", "cond", "=", "(", "isinstance", "(", "strain", ".", "target_magnitudes", ",", "np", ".", "ndarray", ")", "or", "isinstance", "(", "strain", ".", "target_magnitudes", ",", "list", ")", ")", "if", "cond", ":", "magnitude_list", "=", "[", "'%.3f'", "%", "mag", "for", "mag", "in", "strain", ".", "target_magnitudes", "]", "else", ":", "return", "strain", ",", "output_variables", "# Ensure that the number of rows in the rate array corresponds to the", "# number of observations", "assert", "np", ".", "shape", "(", "strain", ".", "seismicity_rate", ")", "[", "0", "]", "==", "strain", ".", "get_number_observations", "(", ")", "for", "iloc", ",", "magnitude", "in", "enumerate", "(", "magnitude_list", ")", ":", "strain", ".", "data", "[", "magnitude", "]", "=", "strain", ".", "seismicity_rate", "[", ":", ",", "iloc", "]", "output_variables", ".", "extend", "(", "magnitude_list", ")", "return", "strain", ",", "output_variables" ]
For the strain data, checks to see if seismicity rates have been calculated. If so, each column in the array is sliced and stored as a single vector in the strain.data dictionary with the corresponding magnitude as a key. :param strain: Instance of :class: openquake.hmtk.strain.geodetic_strain.GeodeticStrain :returns: strain - Instance of strain class with updated data dictionary output_variables - Updated list of headers
[ "For", "the", "strain", "data", "checks", "to", "see", "if", "seismicity", "rates", "have", "been", "calculated", ".", "If", "so", "each", "column", "in", "the", "array", "is", "sliced", "and", "stored", "as", "a", "single", "vector", "in", "the", "strain", ".", "data", "dictionary", "with", "the", "corresponding", "magnitude", "as", "a", "key", "." ]
train
https://github.com/gem/oq-engine/blob/8294553a0b8aba33fd96437a35065d03547d0040/openquake/hmtk/parsers/strain/strain_csv_parser.py#L198-L228
gem/oq-engine
openquake/baselib/__init__.py
read
def read(*paths, **validators): """ Load the configuration, make each section available in a separate dict. The configuration location can specified via an environment variable: - OQ_CONFIG_FILE In the absence of this environment variable the following paths will be used: - sys.prefix + /openquake.cfg when in a virtualenv - /etc/openquake/openquake.cfg outside of a virtualenv If those files are missing, the fallback is the source code: - openquake/engine/openquake.cfg Please note: settings in the site configuration file are overridden by settings with the same key names in the OQ_CONFIG_FILE openquake.cfg. """ paths = config.paths + list(paths) parser = configparser.ConfigParser() found = parser.read(os.path.normpath(os.path.expanduser(p)) for p in paths) if not found: raise IOError('No configuration file found in %s' % str(paths)) config.found = found config.clear() for section in parser.sections(): config[section] = sec = DotDict(parser.items(section)) for k, v in sec.items(): sec[k] = validators.get(k, lambda x: x)(v)
python
def read(*paths, **validators): paths = config.paths + list(paths) parser = configparser.ConfigParser() found = parser.read(os.path.normpath(os.path.expanduser(p)) for p in paths) if not found: raise IOError('No configuration file found in %s' % str(paths)) config.found = found config.clear() for section in parser.sections(): config[section] = sec = DotDict(parser.items(section)) for k, v in sec.items(): sec[k] = validators.get(k, lambda x: x)(v)
[ "def", "read", "(", "*", "paths", ",", "*", "*", "validators", ")", ":", "paths", "=", "config", ".", "paths", "+", "list", "(", "paths", ")", "parser", "=", "configparser", ".", "ConfigParser", "(", ")", "found", "=", "parser", ".", "read", "(", "os", ".", "path", ".", "normpath", "(", "os", ".", "path", ".", "expanduser", "(", "p", ")", ")", "for", "p", "in", "paths", ")", "if", "not", "found", ":", "raise", "IOError", "(", "'No configuration file found in %s'", "%", "str", "(", "paths", ")", ")", "config", ".", "found", "=", "found", "config", ".", "clear", "(", ")", "for", "section", "in", "parser", ".", "sections", "(", ")", ":", "config", "[", "section", "]", "=", "sec", "=", "DotDict", "(", "parser", ".", "items", "(", "section", ")", ")", "for", "k", ",", "v", "in", "sec", ".", "items", "(", ")", ":", "sec", "[", "k", "]", "=", "validators", ".", "get", "(", "k", ",", "lambda", "x", ":", "x", ")", "(", "v", ")" ]
Load the configuration, make each section available in a separate dict. The configuration location can specified via an environment variable: - OQ_CONFIG_FILE In the absence of this environment variable the following paths will be used: - sys.prefix + /openquake.cfg when in a virtualenv - /etc/openquake/openquake.cfg outside of a virtualenv If those files are missing, the fallback is the source code: - openquake/engine/openquake.cfg Please note: settings in the site configuration file are overridden by settings with the same key names in the OQ_CONFIG_FILE openquake.cfg.
[ "Load", "the", "configuration", "make", "each", "section", "available", "in", "a", "separate", "dict", "." ]
train
https://github.com/gem/oq-engine/blob/8294553a0b8aba33fd96437a35065d03547d0040/openquake/baselib/__init__.py#L56-L84
gem/oq-engine
openquake/baselib/__init__.py
boolean
def boolean(flag): """ Convert string in boolean """ s = flag.lower() if s in ('1', 'yes', 'true'): return True elif s in ('0', 'no', 'false'): return False raise ValueError('Unknown flag %r' % s)
python
def boolean(flag): s = flag.lower() if s in ('1', 'yes', 'true'): return True elif s in ('0', 'no', 'false'): return False raise ValueError('Unknown flag %r' % s)
[ "def", "boolean", "(", "flag", ")", ":", "s", "=", "flag", ".", "lower", "(", ")", "if", "s", "in", "(", "'1'", ",", "'yes'", ",", "'true'", ")", ":", "return", "True", "elif", "s", "in", "(", "'0'", ",", "'no'", ",", "'false'", ")", ":", "return", "False", "raise", "ValueError", "(", "'Unknown flag %r'", "%", "s", ")" ]
Convert string in boolean
[ "Convert", "string", "in", "boolean" ]
train
https://github.com/gem/oq-engine/blob/8294553a0b8aba33fd96437a35065d03547d0040/openquake/baselib/__init__.py#L90-L99
gem/oq-engine
openquake/hazardlib/gsim/atkinson_boore_2006.py
AtkinsonBoore2006.get_mean_and_stddevs
def get_mean_and_stddevs(self, sites, rup, dists, imt, stddev_types): """ See :meth:`superclass method <.base.GroundShakingIntensityModel.get_mean_and_stddevs>` for spec of input and result values. """ mean = self._get_mean(sites.vs30, rup.mag, dists.rrup, imt, scale_fac=0) stddevs = self._get_stddevs(stddev_types, num_sites=sites.vs30.size) return mean, stddevs
python
def get_mean_and_stddevs(self, sites, rup, dists, imt, stddev_types): mean = self._get_mean(sites.vs30, rup.mag, dists.rrup, imt, scale_fac=0) stddevs = self._get_stddevs(stddev_types, num_sites=sites.vs30.size) return mean, stddevs
[ "def", "get_mean_and_stddevs", "(", "self", ",", "sites", ",", "rup", ",", "dists", ",", "imt", ",", "stddev_types", ")", ":", "mean", "=", "self", ".", "_get_mean", "(", "sites", ".", "vs30", ",", "rup", ".", "mag", ",", "dists", ".", "rrup", ",", "imt", ",", "scale_fac", "=", "0", ")", "stddevs", "=", "self", ".", "_get_stddevs", "(", "stddev_types", ",", "num_sites", "=", "sites", ".", "vs30", ".", "size", ")", "return", "mean", ",", "stddevs" ]
See :meth:`superclass method <.base.GroundShakingIntensityModel.get_mean_and_stddevs>` for spec of input and result values.
[ "See", ":", "meth", ":", "superclass", "method", "<", ".", "base", ".", "GroundShakingIntensityModel", ".", "get_mean_and_stddevs", ">", "for", "spec", "of", "input", "and", "result", "values", "." ]
train
https://github.com/gem/oq-engine/blob/8294553a0b8aba33fd96437a35065d03547d0040/openquake/hazardlib/gsim/atkinson_boore_2006.py#L98-L107
gem/oq-engine
openquake/hazardlib/gsim/atkinson_boore_2006.py
AtkinsonBoore2006._get_mean
def _get_mean(self, vs30, mag, rrup, imt, scale_fac): """ Compute and return mean """ C_HR, C_BC, C_SR, SC = self._extract_coeffs(imt) rrup = self._clip_distances(rrup) f0 = self._compute_f0_factor(rrup) f1 = self._compute_f1_factor(rrup) f2 = self._compute_f2_factor(rrup) pga_bc = self._get_pga_bc( f0, f1, f2, SC, mag, rrup, vs30, scale_fac ) # compute mean values for hard-rock sites (vs30 >= 2000), # and non-hard-rock sites (vs30 < 2000) and add soil amplification # term mean = np.zeros_like(vs30) self._compute_mean(C_HR, f0, f1, f2, SC, mag, rrup, vs30 >= 2000.0, mean, scale_fac) self._compute_mean(C_BC, f0, f1, f2, SC, mag, rrup, vs30 < 2000.0, mean, scale_fac) self._compute_soil_amplification(C_SR, vs30, pga_bc, mean) # convert from base 10 to base e if imt == PGV(): mean = np.log(10 ** mean) else: # convert from cm/s**2 to g mean = np.log((10 ** mean) * 1e-2 / g) return mean
python
def _get_mean(self, vs30, mag, rrup, imt, scale_fac): C_HR, C_BC, C_SR, SC = self._extract_coeffs(imt) rrup = self._clip_distances(rrup) f0 = self._compute_f0_factor(rrup) f1 = self._compute_f1_factor(rrup) f2 = self._compute_f2_factor(rrup) pga_bc = self._get_pga_bc( f0, f1, f2, SC, mag, rrup, vs30, scale_fac ) mean = np.zeros_like(vs30) self._compute_mean(C_HR, f0, f1, f2, SC, mag, rrup, vs30 >= 2000.0, mean, scale_fac) self._compute_mean(C_BC, f0, f1, f2, SC, mag, rrup, vs30 < 2000.0, mean, scale_fac) self._compute_soil_amplification(C_SR, vs30, pga_bc, mean) if imt == PGV(): mean = np.log(10 ** mean) else: mean = np.log((10 ** mean) * 1e-2 / g) return mean
[ "def", "_get_mean", "(", "self", ",", "vs30", ",", "mag", ",", "rrup", ",", "imt", ",", "scale_fac", ")", ":", "C_HR", ",", "C_BC", ",", "C_SR", ",", "SC", "=", "self", ".", "_extract_coeffs", "(", "imt", ")", "rrup", "=", "self", ".", "_clip_distances", "(", "rrup", ")", "f0", "=", "self", ".", "_compute_f0_factor", "(", "rrup", ")", "f1", "=", "self", ".", "_compute_f1_factor", "(", "rrup", ")", "f2", "=", "self", ".", "_compute_f2_factor", "(", "rrup", ")", "pga_bc", "=", "self", ".", "_get_pga_bc", "(", "f0", ",", "f1", ",", "f2", ",", "SC", ",", "mag", ",", "rrup", ",", "vs30", ",", "scale_fac", ")", "# compute mean values for hard-rock sites (vs30 >= 2000),", "# and non-hard-rock sites (vs30 < 2000) and add soil amplification", "# term", "mean", "=", "np", ".", "zeros_like", "(", "vs30", ")", "self", ".", "_compute_mean", "(", "C_HR", ",", "f0", ",", "f1", ",", "f2", ",", "SC", ",", "mag", ",", "rrup", ",", "vs30", ">=", "2000.0", ",", "mean", ",", "scale_fac", ")", "self", ".", "_compute_mean", "(", "C_BC", ",", "f0", ",", "f1", ",", "f2", ",", "SC", ",", "mag", ",", "rrup", ",", "vs30", "<", "2000.0", ",", "mean", ",", "scale_fac", ")", "self", ".", "_compute_soil_amplification", "(", "C_SR", ",", "vs30", ",", "pga_bc", ",", "mean", ")", "# convert from base 10 to base e", "if", "imt", "==", "PGV", "(", ")", ":", "mean", "=", "np", ".", "log", "(", "10", "**", "mean", ")", "else", ":", "# convert from cm/s**2 to g", "mean", "=", "np", ".", "log", "(", "(", "10", "**", "mean", ")", "*", "1e-2", "/", "g", ")", "return", "mean" ]
Compute and return mean
[ "Compute", "and", "return", "mean" ]
train
https://github.com/gem/oq-engine/blob/8294553a0b8aba33fd96437a35065d03547d0040/openquake/hazardlib/gsim/atkinson_boore_2006.py#L109-L142
gem/oq-engine
openquake/hazardlib/gsim/atkinson_boore_2006.py
AtkinsonBoore2006._get_pga_bc
def _get_pga_bc(self, f0, f1, f2, SC, mag, rrup, vs30, scale_fac): """ Compute and return PGA on BC boundary """ pga_bc = np.zeros_like(vs30) self._compute_mean(self.COEFFS_BC[PGA()], f0, f1, f2, SC, mag, rrup, vs30 < 2000.0, pga_bc, scale_fac) return (10 ** pga_bc) * 1e-2 / g
python
def _get_pga_bc(self, f0, f1, f2, SC, mag, rrup, vs30, scale_fac): pga_bc = np.zeros_like(vs30) self._compute_mean(self.COEFFS_BC[PGA()], f0, f1, f2, SC, mag, rrup, vs30 < 2000.0, pga_bc, scale_fac) return (10 ** pga_bc) * 1e-2 / g
[ "def", "_get_pga_bc", "(", "self", ",", "f0", ",", "f1", ",", "f2", ",", "SC", ",", "mag", ",", "rrup", ",", "vs30", ",", "scale_fac", ")", ":", "pga_bc", "=", "np", ".", "zeros_like", "(", "vs30", ")", "self", ".", "_compute_mean", "(", "self", ".", "COEFFS_BC", "[", "PGA", "(", ")", "]", ",", "f0", ",", "f1", ",", "f2", ",", "SC", ",", "mag", ",", "rrup", ",", "vs30", "<", "2000.0", ",", "pga_bc", ",", "scale_fac", ")", "return", "(", "10", "**", "pga_bc", ")", "*", "1e-2", "/", "g" ]
Compute and return PGA on BC boundary
[ "Compute", "and", "return", "PGA", "on", "BC", "boundary" ]
train
https://github.com/gem/oq-engine/blob/8294553a0b8aba33fd96437a35065d03547d0040/openquake/hazardlib/gsim/atkinson_boore_2006.py#L144-L152
gem/oq-engine
openquake/hazardlib/gsim/atkinson_boore_2006.py
AtkinsonBoore2006._extract_coeffs
def _extract_coeffs(self, imt): """ Extract dictionaries of coefficients specific to required intensity measure type. """ C_HR = self.COEFFS_HARD_ROCK[imt] C_BC = self.COEFFS_BC[imt] C_SR = self.COEFFS_SOIL_RESPONSE[imt] SC = self.COEFFS_STRESS[imt] return C_HR, C_BC, C_SR, SC
python
def _extract_coeffs(self, imt): C_HR = self.COEFFS_HARD_ROCK[imt] C_BC = self.COEFFS_BC[imt] C_SR = self.COEFFS_SOIL_RESPONSE[imt] SC = self.COEFFS_STRESS[imt] return C_HR, C_BC, C_SR, SC
[ "def", "_extract_coeffs", "(", "self", ",", "imt", ")", ":", "C_HR", "=", "self", ".", "COEFFS_HARD_ROCK", "[", "imt", "]", "C_BC", "=", "self", ".", "COEFFS_BC", "[", "imt", "]", "C_SR", "=", "self", ".", "COEFFS_SOIL_RESPONSE", "[", "imt", "]", "SC", "=", "self", ".", "COEFFS_STRESS", "[", "imt", "]", "return", "C_HR", ",", "C_BC", ",", "C_SR", ",", "SC" ]
Extract dictionaries of coefficients specific to required intensity measure type.
[ "Extract", "dictionaries", "of", "coefficients", "specific", "to", "required", "intensity", "measure", "type", "." ]
train
https://github.com/gem/oq-engine/blob/8294553a0b8aba33fd96437a35065d03547d0040/openquake/hazardlib/gsim/atkinson_boore_2006.py#L154-L164
gem/oq-engine
openquake/hazardlib/gsim/atkinson_boore_2006.py
AtkinsonBoore2006._compute_f0_factor
def _compute_f0_factor(self, rrup): """ Compute and return factor f0 - see equation (5), 6th term, p. 2191. """ # f0 = max(log10(R0/rrup),0) f0 = np.log10(self.COEFFS_IMT_INDEPENDENT['R0'] / rrup) f0[f0 < 0] = 0.0 return f0
python
def _compute_f0_factor(self, rrup): f0 = np.log10(self.COEFFS_IMT_INDEPENDENT['R0'] / rrup) f0[f0 < 0] = 0.0 return f0
[ "def", "_compute_f0_factor", "(", "self", ",", "rrup", ")", ":", "# f0 = max(log10(R0/rrup),0)", "f0", "=", "np", ".", "log10", "(", "self", ".", "COEFFS_IMT_INDEPENDENT", "[", "'R0'", "]", "/", "rrup", ")", "f0", "[", "f0", "<", "0", "]", "=", "0.0", "return", "f0" ]
Compute and return factor f0 - see equation (5), 6th term, p. 2191.
[ "Compute", "and", "return", "factor", "f0", "-", "see", "equation", "(", "5", ")", "6th", "term", "p", ".", "2191", "." ]
train
https://github.com/gem/oq-engine/blob/8294553a0b8aba33fd96437a35065d03547d0040/openquake/hazardlib/gsim/atkinson_boore_2006.py#L178-L186
gem/oq-engine
openquake/hazardlib/gsim/atkinson_boore_2006.py
AtkinsonBoore2006._compute_f1_factor
def _compute_f1_factor(self, rrup): """ Compute and return factor f1 - see equation (5), 4th term, p. 2191 """ # f1 = min(log10(rrup),log10(R1)) f1 = np.log10(rrup) logR1 = np.log10(self.COEFFS_IMT_INDEPENDENT['R1']) f1[f1 > logR1] = logR1 return f1
python
def _compute_f1_factor(self, rrup): f1 = np.log10(rrup) logR1 = np.log10(self.COEFFS_IMT_INDEPENDENT['R1']) f1[f1 > logR1] = logR1 return f1
[ "def", "_compute_f1_factor", "(", "self", ",", "rrup", ")", ":", "# f1 = min(log10(rrup),log10(R1))", "f1", "=", "np", ".", "log10", "(", "rrup", ")", "logR1", "=", "np", ".", "log10", "(", "self", ".", "COEFFS_IMT_INDEPENDENT", "[", "'R1'", "]", ")", "f1", "[", "f1", ">", "logR1", "]", "=", "logR1", "return", "f1" ]
Compute and return factor f1 - see equation (5), 4th term, p. 2191
[ "Compute", "and", "return", "factor", "f1", "-", "see", "equation", "(", "5", ")", "4th", "term", "p", ".", "2191" ]
train
https://github.com/gem/oq-engine/blob/8294553a0b8aba33fd96437a35065d03547d0040/openquake/hazardlib/gsim/atkinson_boore_2006.py#L188-L197
gem/oq-engine
openquake/hazardlib/gsim/atkinson_boore_2006.py
AtkinsonBoore2006._compute_f2_factor
def _compute_f2_factor(self, rrup): """ Compute and return factor f2, see equation (5), 5th term, pag 2191 """ # f2 = max(log10(rrup/R2),0) f2 = np.log10(rrup / self.COEFFS_IMT_INDEPENDENT['R2']) f2[f2 < 0] = 0.0 return f2
python
def _compute_f2_factor(self, rrup): f2 = np.log10(rrup / self.COEFFS_IMT_INDEPENDENT['R2']) f2[f2 < 0] = 0.0 return f2
[ "def", "_compute_f2_factor", "(", "self", ",", "rrup", ")", ":", "# f2 = max(log10(rrup/R2),0)", "f2", "=", "np", ".", "log10", "(", "rrup", "/", "self", ".", "COEFFS_IMT_INDEPENDENT", "[", "'R2'", "]", ")", "f2", "[", "f2", "<", "0", "]", "=", "0.0", "return", "f2" ]
Compute and return factor f2, see equation (5), 5th term, pag 2191
[ "Compute", "and", "return", "factor", "f2", "see", "equation", "(", "5", ")", "5th", "term", "pag", "2191" ]
train
https://github.com/gem/oq-engine/blob/8294553a0b8aba33fd96437a35065d03547d0040/openquake/hazardlib/gsim/atkinson_boore_2006.py#L199-L207
gem/oq-engine
openquake/hazardlib/gsim/atkinson_boore_2006.py
AtkinsonBoore2006._compute_stress_drop_adjustment
def _compute_stress_drop_adjustment(self, SC, mag, scale_fac): """ Compute equation (6) p. 2200 """ return scale_fac * np.minimum( SC['delta'] + 0.05, 0.05 + SC['delta'] * ( np.maximum(mag - SC['M1'], 0) / (SC['Mh'] - SC['M1']) ) )
python
def _compute_stress_drop_adjustment(self, SC, mag, scale_fac): return scale_fac * np.minimum( SC['delta'] + 0.05, 0.05 + SC['delta'] * ( np.maximum(mag - SC['M1'], 0) / (SC['Mh'] - SC['M1']) ) )
[ "def", "_compute_stress_drop_adjustment", "(", "self", ",", "SC", ",", "mag", ",", "scale_fac", ")", ":", "return", "scale_fac", "*", "np", ".", "minimum", "(", "SC", "[", "'delta'", "]", "+", "0.05", ",", "0.05", "+", "SC", "[", "'delta'", "]", "*", "(", "np", ".", "maximum", "(", "mag", "-", "SC", "[", "'M1'", "]", ",", "0", ")", "/", "(", "SC", "[", "'Mh'", "]", "-", "SC", "[", "'M1'", "]", ")", ")", ")" ]
Compute equation (6) p. 2200
[ "Compute", "equation", "(", "6", ")", "p", ".", "2200" ]
train
https://github.com/gem/oq-engine/blob/8294553a0b8aba33fd96437a35065d03547d0040/openquake/hazardlib/gsim/atkinson_boore_2006.py#L209-L218
gem/oq-engine
openquake/hazardlib/gsim/atkinson_boore_2006.py
AtkinsonBoore2006._compute_mean
def _compute_mean(self, C, f0, f1, f2, SC, mag, rrup, idxs, mean, scale_fac): """ Compute mean value (for a set of indexes) without site amplification terms. This is equation (5), p. 2191, without S term. """ mean[idxs] = (C['c1'] + C['c2'] * mag + C['c3'] * (mag ** 2) + (C['c4'] + C['c5'] * mag) * f1[idxs] + (C['c6'] + C['c7'] * mag) * f2[idxs] + (C['c8'] + C['c9'] * mag) * f0[idxs] + C['c10'] * rrup[idxs] + self._compute_stress_drop_adjustment(SC, mag, scale_fac))
python
def _compute_mean(self, C, f0, f1, f2, SC, mag, rrup, idxs, mean, scale_fac): mean[idxs] = (C['c1'] + C['c2'] * mag + C['c3'] * (mag ** 2) + (C['c4'] + C['c5'] * mag) * f1[idxs] + (C['c6'] + C['c7'] * mag) * f2[idxs] + (C['c8'] + C['c9'] * mag) * f0[idxs] + C['c10'] * rrup[idxs] + self._compute_stress_drop_adjustment(SC, mag, scale_fac))
[ "def", "_compute_mean", "(", "self", ",", "C", ",", "f0", ",", "f1", ",", "f2", ",", "SC", ",", "mag", ",", "rrup", ",", "idxs", ",", "mean", ",", "scale_fac", ")", ":", "mean", "[", "idxs", "]", "=", "(", "C", "[", "'c1'", "]", "+", "C", "[", "'c2'", "]", "*", "mag", "+", "C", "[", "'c3'", "]", "*", "(", "mag", "**", "2", ")", "+", "(", "C", "[", "'c4'", "]", "+", "C", "[", "'c5'", "]", "*", "mag", ")", "*", "f1", "[", "idxs", "]", "+", "(", "C", "[", "'c6'", "]", "+", "C", "[", "'c7'", "]", "*", "mag", ")", "*", "f2", "[", "idxs", "]", "+", "(", "C", "[", "'c8'", "]", "+", "C", "[", "'c9'", "]", "*", "mag", ")", "*", "f0", "[", "idxs", "]", "+", "C", "[", "'c10'", "]", "*", "rrup", "[", "idxs", "]", "+", "self", ".", "_compute_stress_drop_adjustment", "(", "SC", ",", "mag", ",", "scale_fac", ")", ")" ]
Compute mean value (for a set of indexes) without site amplification terms. This is equation (5), p. 2191, without S term.
[ "Compute", "mean", "value", "(", "for", "a", "set", "of", "indexes", ")", "without", "site", "amplification", "terms", ".", "This", "is", "equation", "(", "5", ")", "p", ".", "2191", "without", "S", "term", "." ]
train
https://github.com/gem/oq-engine/blob/8294553a0b8aba33fd96437a35065d03547d0040/openquake/hazardlib/gsim/atkinson_boore_2006.py#L220-L233
gem/oq-engine
openquake/hazardlib/gsim/atkinson_boore_2006.py
AtkinsonBoore2006._compute_soil_amplification
def _compute_soil_amplification(self, C, vs30, pga_bc, mean): """ Compute soil amplification, that is S term in equation (5), p. 2191, and add to mean values for non hard rock sites. """ # convert from base e (as defined in BA2008) to base 10 (as used in # AB2006) sal = np.log10(np.exp(self._get_site_amplification_linear(vs30, C))) sanl = np.log10(np.exp( self._get_site_amplification_non_linear(vs30, pga_bc, C))) idxs = vs30 < 2000.0 mean[idxs] = mean[idxs] + sal[idxs] + sanl[idxs]
python
def _compute_soil_amplification(self, C, vs30, pga_bc, mean): sal = np.log10(np.exp(self._get_site_amplification_linear(vs30, C))) sanl = np.log10(np.exp( self._get_site_amplification_non_linear(vs30, pga_bc, C))) idxs = vs30 < 2000.0 mean[idxs] = mean[idxs] + sal[idxs] + sanl[idxs]
[ "def", "_compute_soil_amplification", "(", "self", ",", "C", ",", "vs30", ",", "pga_bc", ",", "mean", ")", ":", "# convert from base e (as defined in BA2008) to base 10 (as used in", "# AB2006)", "sal", "=", "np", ".", "log10", "(", "np", ".", "exp", "(", "self", ".", "_get_site_amplification_linear", "(", "vs30", ",", "C", ")", ")", ")", "sanl", "=", "np", ".", "log10", "(", "np", ".", "exp", "(", "self", ".", "_get_site_amplification_non_linear", "(", "vs30", ",", "pga_bc", ",", "C", ")", ")", ")", "idxs", "=", "vs30", "<", "2000.0", "mean", "[", "idxs", "]", "=", "mean", "[", "idxs", "]", "+", "sal", "[", "idxs", "]", "+", "sanl", "[", "idxs", "]" ]
Compute soil amplification, that is S term in equation (5), p. 2191, and add to mean values for non hard rock sites.
[ "Compute", "soil", "amplification", "that", "is", "S", "term", "in", "equation", "(", "5", ")", "p", ".", "2191", "and", "add", "to", "mean", "values", "for", "non", "hard", "rock", "sites", "." ]
train
https://github.com/gem/oq-engine/blob/8294553a0b8aba33fd96437a35065d03547d0040/openquake/hazardlib/gsim/atkinson_boore_2006.py#L235-L247
gem/oq-engine
openquake/hazardlib/gsim/atkinson_boore_2006.py
AtkinsonBoore2006Modified2011._get_stress_drop_scaling_factor
def _get_stress_drop_scaling_factor(self, magnitude): """ Returns the magnitude dependent stress drop scaling factor defined in equation 6 (page 1128) of Atkinson & Boore (2011) """ stress_drop = 10.0 ** (3.45 - 0.2 * magnitude) cap = 10.0 ** (3.45 - 0.2 * 5.0) if stress_drop > cap: stress_drop = cap return log10(stress_drop / 140.0) / log10(2.0)
python
def _get_stress_drop_scaling_factor(self, magnitude): stress_drop = 10.0 ** (3.45 - 0.2 * magnitude) cap = 10.0 ** (3.45 - 0.2 * 5.0) if stress_drop > cap: stress_drop = cap return log10(stress_drop / 140.0) / log10(2.0)
[ "def", "_get_stress_drop_scaling_factor", "(", "self", ",", "magnitude", ")", ":", "stress_drop", "=", "10.0", "**", "(", "3.45", "-", "0.2", "*", "magnitude", ")", "cap", "=", "10.0", "**", "(", "3.45", "-", "0.2", "*", "5.0", ")", "if", "stress_drop", ">", "cap", ":", "stress_drop", "=", "cap", "return", "log10", "(", "stress_drop", "/", "140.0", ")", "/", "log10", "(", "2.0", ")" ]
Returns the magnitude dependent stress drop scaling factor defined in equation 6 (page 1128) of Atkinson & Boore (2011)
[ "Returns", "the", "magnitude", "dependent", "stress", "drop", "scaling", "factor", "defined", "in", "equation", "6", "(", "page", "1128", ")", "of", "Atkinson", "&", "Boore", "(", "2011", ")" ]
train
https://github.com/gem/oq-engine/blob/8294553a0b8aba33fd96437a35065d03547d0040/openquake/hazardlib/gsim/atkinson_boore_2006.py#L522-L531
gem/oq-engine
openquake/hazardlib/gsim/utils.py
clip_mean
def clip_mean(imt, mean): """ Clip GMPE mean value at 1.5 g for PGA and 3 g for short periods (0.02 < T < 0.55) """ if imt.period == 0: mean[mean > 0.405] = 0.405 if 0.02 < imt.period < 0.55: mean[mean > 1.099] = 1.099 return mean
python
def clip_mean(imt, mean): if imt.period == 0: mean[mean > 0.405] = 0.405 if 0.02 < imt.period < 0.55: mean[mean > 1.099] = 1.099 return mean
[ "def", "clip_mean", "(", "imt", ",", "mean", ")", ":", "if", "imt", ".", "period", "==", "0", ":", "mean", "[", "mean", ">", "0.405", "]", "=", "0.405", "if", "0.02", "<", "imt", ".", "period", "<", "0.55", ":", "mean", "[", "mean", ">", "1.099", "]", "=", "1.099", "return", "mean" ]
Clip GMPE mean value at 1.5 g for PGA and 3 g for short periods (0.02 < T < 0.55)
[ "Clip", "GMPE", "mean", "value", "at", "1", ".", "5", "g", "for", "PGA", "and", "3", "g", "for", "short", "periods", "(", "0", ".", "02", "<", "T", "<", "0", ".", "55", ")" ]
train
https://github.com/gem/oq-engine/blob/8294553a0b8aba33fd96437a35065d03547d0040/openquake/hazardlib/gsim/utils.py#L45-L56
gem/oq-engine
openquake/calculators/getters.py
gen_rupture_getters
def gen_rupture_getters(dstore, slc=slice(None), concurrent_tasks=1, hdf5cache=None): """ :yields: RuptureGetters """ if dstore.parent: dstore = dstore.parent csm_info = dstore['csm_info'] trt_by_grp = csm_info.grp_by("trt") samples = csm_info.get_samples_by_grp() rlzs_by_gsim = csm_info.get_rlzs_by_gsim_grp() rup_array = dstore['ruptures'][slc] maxweight = numpy.ceil(len(rup_array) / (concurrent_tasks or 1)) nr, ne = 0, 0 for grp_id, arr in general.group_array(rup_array, 'grp_id').items(): if not rlzs_by_gsim[grp_id]: # this may happen if a source model has no sources, like # in event_based_risk/case_3 continue for block in general.block_splitter(arr, maxweight): rgetter = RuptureGetter( hdf5cache or dstore.filename, numpy.array(block), grp_id, trt_by_grp[grp_id], samples[grp_id], rlzs_by_gsim[grp_id]) rgetter.weight = getattr(block, 'weight', len(block)) yield rgetter nr += len(block) ne += rgetter.num_events logging.info('Read %d ruptures and %d events', nr, ne)
python
def gen_rupture_getters(dstore, slc=slice(None), concurrent_tasks=1, hdf5cache=None): if dstore.parent: dstore = dstore.parent csm_info = dstore['csm_info'] trt_by_grp = csm_info.grp_by("trt") samples = csm_info.get_samples_by_grp() rlzs_by_gsim = csm_info.get_rlzs_by_gsim_grp() rup_array = dstore['ruptures'][slc] maxweight = numpy.ceil(len(rup_array) / (concurrent_tasks or 1)) nr, ne = 0, 0 for grp_id, arr in general.group_array(rup_array, 'grp_id').items(): if not rlzs_by_gsim[grp_id]: continue for block in general.block_splitter(arr, maxweight): rgetter = RuptureGetter( hdf5cache or dstore.filename, numpy.array(block), grp_id, trt_by_grp[grp_id], samples[grp_id], rlzs_by_gsim[grp_id]) rgetter.weight = getattr(block, 'weight', len(block)) yield rgetter nr += len(block) ne += rgetter.num_events logging.info('Read %d ruptures and %d events', nr, ne)
[ "def", "gen_rupture_getters", "(", "dstore", ",", "slc", "=", "slice", "(", "None", ")", ",", "concurrent_tasks", "=", "1", ",", "hdf5cache", "=", "None", ")", ":", "if", "dstore", ".", "parent", ":", "dstore", "=", "dstore", ".", "parent", "csm_info", "=", "dstore", "[", "'csm_info'", "]", "trt_by_grp", "=", "csm_info", ".", "grp_by", "(", "\"trt\"", ")", "samples", "=", "csm_info", ".", "get_samples_by_grp", "(", ")", "rlzs_by_gsim", "=", "csm_info", ".", "get_rlzs_by_gsim_grp", "(", ")", "rup_array", "=", "dstore", "[", "'ruptures'", "]", "[", "slc", "]", "maxweight", "=", "numpy", ".", "ceil", "(", "len", "(", "rup_array", ")", "/", "(", "concurrent_tasks", "or", "1", ")", ")", "nr", ",", "ne", "=", "0", ",", "0", "for", "grp_id", ",", "arr", "in", "general", ".", "group_array", "(", "rup_array", ",", "'grp_id'", ")", ".", "items", "(", ")", ":", "if", "not", "rlzs_by_gsim", "[", "grp_id", "]", ":", "# this may happen if a source model has no sources, like", "# in event_based_risk/case_3", "continue", "for", "block", "in", "general", ".", "block_splitter", "(", "arr", ",", "maxweight", ")", ":", "rgetter", "=", "RuptureGetter", "(", "hdf5cache", "or", "dstore", ".", "filename", ",", "numpy", ".", "array", "(", "block", ")", ",", "grp_id", ",", "trt_by_grp", "[", "grp_id", "]", ",", "samples", "[", "grp_id", "]", ",", "rlzs_by_gsim", "[", "grp_id", "]", ")", "rgetter", ".", "weight", "=", "getattr", "(", "block", ",", "'weight'", ",", "len", "(", "block", ")", ")", "yield", "rgetter", "nr", "+=", "len", "(", "block", ")", "ne", "+=", "rgetter", ".", "num_events", "logging", ".", "info", "(", "'Read %d ruptures and %d events'", ",", "nr", ",", "ne", ")" ]
:yields: RuptureGetters
[ ":", "yields", ":", "RuptureGetters" ]
train
https://github.com/gem/oq-engine/blob/8294553a0b8aba33fd96437a35065d03547d0040/openquake/calculators/getters.py#L433-L460
gem/oq-engine
openquake/calculators/getters.py
get_maxloss_rupture
def get_maxloss_rupture(dstore, loss_type): """ :param dstore: a DataStore instance :param loss_type: a loss type string :returns: EBRupture instance corresponding to the maximum loss for the given loss type """ lti = dstore['oqparam'].lti[loss_type] ridx = dstore.get_attr('rup_loss_table', 'ridx')[lti] [rgetter] = gen_rupture_getters(dstore, slice(ridx, ridx + 1)) [ebr] = rgetter.get_ruptures() return ebr
python
def get_maxloss_rupture(dstore, loss_type): lti = dstore['oqparam'].lti[loss_type] ridx = dstore.get_attr('rup_loss_table', 'ridx')[lti] [rgetter] = gen_rupture_getters(dstore, slice(ridx, ridx + 1)) [ebr] = rgetter.get_ruptures() return ebr
[ "def", "get_maxloss_rupture", "(", "dstore", ",", "loss_type", ")", ":", "lti", "=", "dstore", "[", "'oqparam'", "]", ".", "lti", "[", "loss_type", "]", "ridx", "=", "dstore", ".", "get_attr", "(", "'rup_loss_table'", ",", "'ridx'", ")", "[", "lti", "]", "[", "rgetter", "]", "=", "gen_rupture_getters", "(", "dstore", ",", "slice", "(", "ridx", ",", "ridx", "+", "1", ")", ")", "[", "ebr", "]", "=", "rgetter", ".", "get_ruptures", "(", ")", "return", "ebr" ]
:param dstore: a DataStore instance :param loss_type: a loss type string :returns: EBRupture instance corresponding to the maximum loss for the given loss type
[ ":", "param", "dstore", ":", "a", "DataStore", "instance", ":", "param", "loss_type", ":", "a", "loss", "type", "string", ":", "returns", ":", "EBRupture", "instance", "corresponding", "to", "the", "maximum", "loss", "for", "the", "given", "loss", "type" ]
train
https://github.com/gem/oq-engine/blob/8294553a0b8aba33fd96437a35065d03547d0040/openquake/calculators/getters.py#L463-L475
gem/oq-engine
openquake/calculators/getters.py
PmapGetter.init
def init(self): """ Read the poes and set the .data attribute with the hazard curves """ if hasattr(self, 'data'): # already initialized return if isinstance(self.dstore, str): self.dstore = hdf5.File(self.dstore, 'r') else: self.dstore.open('r') # if not if self.sids is None: self.sids = self.dstore['sitecol'].sids oq = self.dstore['oqparam'] self.imtls = oq.imtls self.poes = self.poes or oq.poes self.data = {} try: hcurves = self.get_hcurves(self.imtls) # shape (R, N) except IndexError: # no data return for sid, hcurve_by_rlz in zip(self.sids, hcurves.T): self.data[sid] = datadict = {} for rlzi, hcurve in enumerate(hcurve_by_rlz): datadict[rlzi] = lst = [None for imt in self.imtls] for imti, imt in enumerate(self.imtls): lst[imti] = hcurve[imt]
python
def init(self): if hasattr(self, 'data'): return if isinstance(self.dstore, str): self.dstore = hdf5.File(self.dstore, 'r') else: self.dstore.open('r') if self.sids is None: self.sids = self.dstore['sitecol'].sids oq = self.dstore['oqparam'] self.imtls = oq.imtls self.poes = self.poes or oq.poes self.data = {} try: hcurves = self.get_hcurves(self.imtls) except IndexError: return for sid, hcurve_by_rlz in zip(self.sids, hcurves.T): self.data[sid] = datadict = {} for rlzi, hcurve in enumerate(hcurve_by_rlz): datadict[rlzi] = lst = [None for imt in self.imtls] for imti, imt in enumerate(self.imtls): lst[imti] = hcurve[imt]
[ "def", "init", "(", "self", ")", ":", "if", "hasattr", "(", "self", ",", "'data'", ")", ":", "# already initialized", "return", "if", "isinstance", "(", "self", ".", "dstore", ",", "str", ")", ":", "self", ".", "dstore", "=", "hdf5", ".", "File", "(", "self", ".", "dstore", ",", "'r'", ")", "else", ":", "self", ".", "dstore", ".", "open", "(", "'r'", ")", "# if not", "if", "self", ".", "sids", "is", "None", ":", "self", ".", "sids", "=", "self", ".", "dstore", "[", "'sitecol'", "]", ".", "sids", "oq", "=", "self", ".", "dstore", "[", "'oqparam'", "]", "self", ".", "imtls", "=", "oq", ".", "imtls", "self", ".", "poes", "=", "self", ".", "poes", "or", "oq", ".", "poes", "self", ".", "data", "=", "{", "}", "try", ":", "hcurves", "=", "self", ".", "get_hcurves", "(", "self", ".", "imtls", ")", "# shape (R, N)", "except", "IndexError", ":", "# no data", "return", "for", "sid", ",", "hcurve_by_rlz", "in", "zip", "(", "self", ".", "sids", ",", "hcurves", ".", "T", ")", ":", "self", ".", "data", "[", "sid", "]", "=", "datadict", "=", "{", "}", "for", "rlzi", ",", "hcurve", "in", "enumerate", "(", "hcurve_by_rlz", ")", ":", "datadict", "[", "rlzi", "]", "=", "lst", "=", "[", "None", "for", "imt", "in", "self", ".", "imtls", "]", "for", "imti", ",", "imt", "in", "enumerate", "(", "self", ".", "imtls", ")", ":", "lst", "[", "imti", "]", "=", "hcurve", "[", "imt", "]" ]
Read the poes and set the .data attribute with the hazard curves
[ "Read", "the", "poes", "and", "set", "the", ".", "data", "attribute", "with", "the", "hazard", "curves" ]
train
https://github.com/gem/oq-engine/blob/8294553a0b8aba33fd96437a35065d03547d0040/openquake/calculators/getters.py#L66-L91
gem/oq-engine
openquake/calculators/getters.py
PmapGetter.pmap_by_grp
def pmap_by_grp(self): """ :returns: dictionary "grp-XXX" -> ProbabilityMap instance """ if hasattr(self, '_pmap_by_grp'): # already called return self._pmap_by_grp # populate _pmap_by_grp self._pmap_by_grp = {} if 'poes' in self.dstore: # build probability maps restricted to the given sids ok_sids = set(self.sids) for grp, dset in self.dstore['poes'].items(): ds = dset['array'] L, G = ds.shape[1:] pmap = probability_map.ProbabilityMap(L, G) for idx, sid in enumerate(dset['sids'].value): if sid in ok_sids: pmap[sid] = probability_map.ProbabilityCurve(ds[idx]) self._pmap_by_grp[grp] = pmap self.nbytes += pmap.nbytes return self._pmap_by_grp
python
def pmap_by_grp(self): if hasattr(self, '_pmap_by_grp'): return self._pmap_by_grp self._pmap_by_grp = {} if 'poes' in self.dstore: ok_sids = set(self.sids) for grp, dset in self.dstore['poes'].items(): ds = dset['array'] L, G = ds.shape[1:] pmap = probability_map.ProbabilityMap(L, G) for idx, sid in enumerate(dset['sids'].value): if sid in ok_sids: pmap[sid] = probability_map.ProbabilityCurve(ds[idx]) self._pmap_by_grp[grp] = pmap self.nbytes += pmap.nbytes return self._pmap_by_grp
[ "def", "pmap_by_grp", "(", "self", ")", ":", "if", "hasattr", "(", "self", ",", "'_pmap_by_grp'", ")", ":", "# already called", "return", "self", ".", "_pmap_by_grp", "# populate _pmap_by_grp", "self", ".", "_pmap_by_grp", "=", "{", "}", "if", "'poes'", "in", "self", ".", "dstore", ":", "# build probability maps restricted to the given sids", "ok_sids", "=", "set", "(", "self", ".", "sids", ")", "for", "grp", ",", "dset", "in", "self", ".", "dstore", "[", "'poes'", "]", ".", "items", "(", ")", ":", "ds", "=", "dset", "[", "'array'", "]", "L", ",", "G", "=", "ds", ".", "shape", "[", "1", ":", "]", "pmap", "=", "probability_map", ".", "ProbabilityMap", "(", "L", ",", "G", ")", "for", "idx", ",", "sid", "in", "enumerate", "(", "dset", "[", "'sids'", "]", ".", "value", ")", ":", "if", "sid", "in", "ok_sids", ":", "pmap", "[", "sid", "]", "=", "probability_map", ".", "ProbabilityCurve", "(", "ds", "[", "idx", "]", ")", "self", ".", "_pmap_by_grp", "[", "grp", "]", "=", "pmap", "self", ".", "nbytes", "+=", "pmap", ".", "nbytes", "return", "self", ".", "_pmap_by_grp" ]
:returns: dictionary "grp-XXX" -> ProbabilityMap instance
[ ":", "returns", ":", "dictionary", "grp", "-", "XXX", "-", ">", "ProbabilityMap", "instance" ]
train
https://github.com/gem/oq-engine/blob/8294553a0b8aba33fd96437a35065d03547d0040/openquake/calculators/getters.py#L94-L114
gem/oq-engine
openquake/calculators/getters.py
PmapGetter.get
def get(self, rlzi, grp=None): """ :param rlzi: a realization index :param grp: None (all groups) or a string of the form "grp-XX" :returns: the hazard curves for the given realization """ self.init() assert self.sids is not None pmap = probability_map.ProbabilityMap(len(self.imtls.array), 1) grps = [grp] if grp is not None else sorted(self.pmap_by_grp) array = self.rlzs_assoc.by_grp() for grp in grps: for gsim_idx, rlzis in enumerate(array[grp]): for r in rlzis: if r == rlzi: pmap |= self.pmap_by_grp[grp].extract(gsim_idx) break return pmap
python
def get(self, rlzi, grp=None): self.init() assert self.sids is not None pmap = probability_map.ProbabilityMap(len(self.imtls.array), 1) grps = [grp] if grp is not None else sorted(self.pmap_by_grp) array = self.rlzs_assoc.by_grp() for grp in grps: for gsim_idx, rlzis in enumerate(array[grp]): for r in rlzis: if r == rlzi: pmap |= self.pmap_by_grp[grp].extract(gsim_idx) break return pmap
[ "def", "get", "(", "self", ",", "rlzi", ",", "grp", "=", "None", ")", ":", "self", ".", "init", "(", ")", "assert", "self", ".", "sids", "is", "not", "None", "pmap", "=", "probability_map", ".", "ProbabilityMap", "(", "len", "(", "self", ".", "imtls", ".", "array", ")", ",", "1", ")", "grps", "=", "[", "grp", "]", "if", "grp", "is", "not", "None", "else", "sorted", "(", "self", ".", "pmap_by_grp", ")", "array", "=", "self", ".", "rlzs_assoc", ".", "by_grp", "(", ")", "for", "grp", "in", "grps", ":", "for", "gsim_idx", ",", "rlzis", "in", "enumerate", "(", "array", "[", "grp", "]", ")", ":", "for", "r", "in", "rlzis", ":", "if", "r", "==", "rlzi", ":", "pmap", "|=", "self", ".", "pmap_by_grp", "[", "grp", "]", ".", "extract", "(", "gsim_idx", ")", "break", "return", "pmap" ]
:param rlzi: a realization index :param grp: None (all groups) or a string of the form "grp-XX" :returns: the hazard curves for the given realization
[ ":", "param", "rlzi", ":", "a", "realization", "index", ":", "param", "grp", ":", "None", "(", "all", "groups", ")", "or", "a", "string", "of", "the", "form", "grp", "-", "XX", ":", "returns", ":", "the", "hazard", "curves", "for", "the", "given", "realization" ]
train
https://github.com/gem/oq-engine/blob/8294553a0b8aba33fd96437a35065d03547d0040/openquake/calculators/getters.py#L123-L140
gem/oq-engine
openquake/calculators/getters.py
PmapGetter.get_hcurves
def get_hcurves(self, imtls=None): """ :param imtls: intensity measure types and levels :returns: an array of (R, N) hazard curves """ self.init() if imtls is None: imtls = self.imtls pmaps = [pmap.convert2(imtls, self.sids) for pmap in self.get_pmaps()] return numpy.array(pmaps)
python
def get_hcurves(self, imtls=None): self.init() if imtls is None: imtls = self.imtls pmaps = [pmap.convert2(imtls, self.sids) for pmap in self.get_pmaps()] return numpy.array(pmaps)
[ "def", "get_hcurves", "(", "self", ",", "imtls", "=", "None", ")", ":", "self", ".", "init", "(", ")", "if", "imtls", "is", "None", ":", "imtls", "=", "self", ".", "imtls", "pmaps", "=", "[", "pmap", ".", "convert2", "(", "imtls", ",", "self", ".", "sids", ")", "for", "pmap", "in", "self", ".", "get_pmaps", "(", ")", "]", "return", "numpy", ".", "array", "(", "pmaps", ")" ]
:param imtls: intensity measure types and levels :returns: an array of (R, N) hazard curves
[ ":", "param", "imtls", ":", "intensity", "measure", "types", "and", "levels", ":", "returns", ":", "an", "array", "of", "(", "R", "N", ")", "hazard", "curves" ]
train
https://github.com/gem/oq-engine/blob/8294553a0b8aba33fd96437a35065d03547d0040/openquake/calculators/getters.py#L148-L158
gem/oq-engine
openquake/calculators/getters.py
PmapGetter.items
def items(self, kind=''): """ Extract probability maps from the datastore, possibly generating on the fly the ones corresponding to the individual realizations. Yields pairs (tag, pmap). :param kind: the kind of PoEs to extract; if not given, returns the realization if there is only one or the statistics otherwise. """ num_rlzs = len(self.weights) if not kind or kind == 'all': # use default if 'hcurves' in self.dstore: for k in sorted(self.dstore['hcurves']): yield k, self.dstore['hcurves/' + k].value elif num_rlzs == 1: yield 'mean', self.get(0) return if 'poes' in self.dstore and kind in ('rlzs', 'all'): for rlzi in range(num_rlzs): hcurves = self.get(rlzi) yield 'rlz-%03d' % rlzi, hcurves elif 'poes' in self.dstore and kind.startswith('rlz-'): yield kind, self.get(int(kind[4:])) if 'hcurves' in self.dstore and kind == 'stats': for k in sorted(self.dstore['hcurves']): if not k.startswith('rlz'): yield k, self.dstore['hcurves/' + k].value
python
def items(self, kind=''): num_rlzs = len(self.weights) if not kind or kind == 'all': if 'hcurves' in self.dstore: for k in sorted(self.dstore['hcurves']): yield k, self.dstore['hcurves/' + k].value elif num_rlzs == 1: yield 'mean', self.get(0) return if 'poes' in self.dstore and kind in ('rlzs', 'all'): for rlzi in range(num_rlzs): hcurves = self.get(rlzi) yield 'rlz-%03d' % rlzi, hcurves elif 'poes' in self.dstore and kind.startswith('rlz-'): yield kind, self.get(int(kind[4:])) if 'hcurves' in self.dstore and kind == 'stats': for k in sorted(self.dstore['hcurves']): if not k.startswith('rlz'): yield k, self.dstore['hcurves/' + k].value
[ "def", "items", "(", "self", ",", "kind", "=", "''", ")", ":", "num_rlzs", "=", "len", "(", "self", ".", "weights", ")", "if", "not", "kind", "or", "kind", "==", "'all'", ":", "# use default", "if", "'hcurves'", "in", "self", ".", "dstore", ":", "for", "k", "in", "sorted", "(", "self", ".", "dstore", "[", "'hcurves'", "]", ")", ":", "yield", "k", ",", "self", ".", "dstore", "[", "'hcurves/'", "+", "k", "]", ".", "value", "elif", "num_rlzs", "==", "1", ":", "yield", "'mean'", ",", "self", ".", "get", "(", "0", ")", "return", "if", "'poes'", "in", "self", ".", "dstore", "and", "kind", "in", "(", "'rlzs'", ",", "'all'", ")", ":", "for", "rlzi", "in", "range", "(", "num_rlzs", ")", ":", "hcurves", "=", "self", ".", "get", "(", "rlzi", ")", "yield", "'rlz-%03d'", "%", "rlzi", ",", "hcurves", "elif", "'poes'", "in", "self", ".", "dstore", "and", "kind", ".", "startswith", "(", "'rlz-'", ")", ":", "yield", "kind", ",", "self", ".", "get", "(", "int", "(", "kind", "[", "4", ":", "]", ")", ")", "if", "'hcurves'", "in", "self", ".", "dstore", "and", "kind", "==", "'stats'", ":", "for", "k", "in", "sorted", "(", "self", ".", "dstore", "[", "'hcurves'", "]", ")", ":", "if", "not", "k", ".", "startswith", "(", "'rlz'", ")", ":", "yield", "k", ",", "self", ".", "dstore", "[", "'hcurves/'", "+", "k", "]", ".", "value" ]
Extract probability maps from the datastore, possibly generating on the fly the ones corresponding to the individual realizations. Yields pairs (tag, pmap). :param kind: the kind of PoEs to extract; if not given, returns the realization if there is only one or the statistics otherwise.
[ "Extract", "probability", "maps", "from", "the", "datastore", "possibly", "generating", "on", "the", "fly", "the", "ones", "corresponding", "to", "the", "individual", "realizations", ".", "Yields", "pairs", "(", "tag", "pmap", ")", "." ]
train
https://github.com/gem/oq-engine/blob/8294553a0b8aba33fd96437a35065d03547d0040/openquake/calculators/getters.py#L160-L187
gem/oq-engine
openquake/calculators/getters.py
PmapGetter.get_mean
def get_mean(self, grp=None): """ Compute the mean curve as a ProbabilityMap :param grp: if not None must be a string of the form "grp-XX"; in that case returns the mean considering only the contribution for group XX """ self.init() if len(self.weights) == 1: # one realization # the standard deviation is zero pmap = self.get(0, grp) for sid, pcurve in pmap.items(): array = numpy.zeros(pcurve.array.shape[:-1] + (2,)) array[:, 0] = pcurve.array[:, 0] pcurve.array = array return pmap else: # multiple realizations dic = ({g: self.dstore['poes/' + g] for g in self.dstore['poes']} if grp is None else {grp: self.dstore['poes/' + grp]}) pmaps = self.rlzs_assoc.combine_pmaps(dic) return stats.compute_pmap_stats( pmaps, [stats.mean_curve, stats.std_curve], self.weights, self.imtls)
python
def get_mean(self, grp=None): self.init() if len(self.weights) == 1: pmap = self.get(0, grp) for sid, pcurve in pmap.items(): array = numpy.zeros(pcurve.array.shape[:-1] + (2,)) array[:, 0] = pcurve.array[:, 0] pcurve.array = array return pmap else: dic = ({g: self.dstore['poes/' + g] for g in self.dstore['poes']} if grp is None else {grp: self.dstore['poes/' + grp]}) pmaps = self.rlzs_assoc.combine_pmaps(dic) return stats.compute_pmap_stats( pmaps, [stats.mean_curve, stats.std_curve], self.weights, self.imtls)
[ "def", "get_mean", "(", "self", ",", "grp", "=", "None", ")", ":", "self", ".", "init", "(", ")", "if", "len", "(", "self", ".", "weights", ")", "==", "1", ":", "# one realization", "# the standard deviation is zero", "pmap", "=", "self", ".", "get", "(", "0", ",", "grp", ")", "for", "sid", ",", "pcurve", "in", "pmap", ".", "items", "(", ")", ":", "array", "=", "numpy", ".", "zeros", "(", "pcurve", ".", "array", ".", "shape", "[", ":", "-", "1", "]", "+", "(", "2", ",", ")", ")", "array", "[", ":", ",", "0", "]", "=", "pcurve", ".", "array", "[", ":", ",", "0", "]", "pcurve", ".", "array", "=", "array", "return", "pmap", "else", ":", "# multiple realizations", "dic", "=", "(", "{", "g", ":", "self", ".", "dstore", "[", "'poes/'", "+", "g", "]", "for", "g", "in", "self", ".", "dstore", "[", "'poes'", "]", "}", "if", "grp", "is", "None", "else", "{", "grp", ":", "self", ".", "dstore", "[", "'poes/'", "+", "grp", "]", "}", ")", "pmaps", "=", "self", ".", "rlzs_assoc", ".", "combine_pmaps", "(", "dic", ")", "return", "stats", ".", "compute_pmap_stats", "(", "pmaps", ",", "[", "stats", ".", "mean_curve", ",", "stats", ".", "std_curve", "]", ",", "self", ".", "weights", ",", "self", ".", "imtls", ")" ]
Compute the mean curve as a ProbabilityMap :param grp: if not None must be a string of the form "grp-XX"; in that case returns the mean considering only the contribution for group XX
[ "Compute", "the", "mean", "curve", "as", "a", "ProbabilityMap" ]
train
https://github.com/gem/oq-engine/blob/8294553a0b8aba33fd96437a35065d03547d0040/openquake/calculators/getters.py#L189-L212
gem/oq-engine
openquake/calculators/getters.py
GmfGetter.init
def init(self): """ Initialize the computers. Should be called on the workers """ if hasattr(self, 'computers'): # init already called return with hdf5.File(self.rupgetter.filename, 'r') as parent: self.weights = parent['weights'].value self.computers = [] for ebr in self.rupgetter.get_ruptures(self.srcfilter): sitecol = self.sitecol.filtered(ebr.sids) try: computer = calc.gmf.GmfComputer( ebr, sitecol, self.oqparam.imtls, self.cmaker, self.oqparam.truncation_level, self.correl_model) except FarAwayRupture: # due to numeric errors, ruptures within the maximum_distance # when written, can be outside when read; I found a case with # a distance of 99.9996936 km over a maximum distance of 100 km continue self.computers.append(computer)
python
def init(self): if hasattr(self, 'computers'): return with hdf5.File(self.rupgetter.filename, 'r') as parent: self.weights = parent['weights'].value self.computers = [] for ebr in self.rupgetter.get_ruptures(self.srcfilter): sitecol = self.sitecol.filtered(ebr.sids) try: computer = calc.gmf.GmfComputer( ebr, sitecol, self.oqparam.imtls, self.cmaker, self.oqparam.truncation_level, self.correl_model) except FarAwayRupture: continue self.computers.append(computer)
[ "def", "init", "(", "self", ")", ":", "if", "hasattr", "(", "self", ",", "'computers'", ")", ":", "# init already called", "return", "with", "hdf5", ".", "File", "(", "self", ".", "rupgetter", ".", "filename", ",", "'r'", ")", "as", "parent", ":", "self", ".", "weights", "=", "parent", "[", "'weights'", "]", ".", "value", "self", ".", "computers", "=", "[", "]", "for", "ebr", "in", "self", ".", "rupgetter", ".", "get_ruptures", "(", "self", ".", "srcfilter", ")", ":", "sitecol", "=", "self", ".", "sitecol", ".", "filtered", "(", "ebr", ".", "sids", ")", "try", ":", "computer", "=", "calc", ".", "gmf", ".", "GmfComputer", "(", "ebr", ",", "sitecol", ",", "self", ".", "oqparam", ".", "imtls", ",", "self", ".", "cmaker", ",", "self", ".", "oqparam", ".", "truncation_level", ",", "self", ".", "correl_model", ")", "except", "FarAwayRupture", ":", "# due to numeric errors, ruptures within the maximum_distance", "# when written, can be outside when read; I found a case with", "# a distance of 99.9996936 km over a maximum distance of 100 km", "continue", "self", ".", "computers", ".", "append", "(", "computer", ")" ]
Initialize the computers. Should be called on the workers
[ "Initialize", "the", "computers", ".", "Should", "be", "called", "on", "the", "workers" ]
train
https://github.com/gem/oq-engine/blob/8294553a0b8aba33fd96437a35065d03547d0040/openquake/calculators/getters.py#L306-L326
gem/oq-engine
openquake/calculators/getters.py
GmfGetter.gen_gmfs
def gen_gmfs(self): """ Compute the GMFs for the given realization and yields arrays of the dtype (sid, eid, imti, gmv), one for rupture """ self.sig_eps = [] for computer in self.computers: rup = computer.rupture sids = computer.sids eids_by_rlz = rup.get_eids_by_rlz(self.rlzs_by_gsim) data = [] for gs, rlzs in self.rlzs_by_gsim.items(): num_events = sum(len(eids_by_rlz[rlzi]) for rlzi in rlzs) if num_events == 0: continue # NB: the trick for performance is to keep the call to # compute.compute outside of the loop over the realizations # it is better to have few calls producing big arrays array, sig, eps = computer.compute(gs, num_events) array = array.transpose(1, 0, 2) # from M, N, E to N, M, E for i, miniml in enumerate(self.min_iml): # gmv < minimum arr = array[:, i, :] arr[arr < miniml] = 0 n = 0 for rlzi in rlzs: eids = eids_by_rlz[rlzi] e = len(eids) if not e: continue for ei, eid in enumerate(eids): gmf = array[:, :, n + ei] # shape (N, M) tot = gmf.sum(axis=0) # shape (M,) if not tot.sum(): continue sigmas = sig[:, n + ei] self.sig_eps.append((eid, sigmas, eps[:, n + ei])) for sid, gmv in zip(sids, gmf): if gmv.sum(): data.append((rlzi, sid, eid, gmv)) n += e yield numpy.array(data, self.gmv_dt)
python
def gen_gmfs(self): self.sig_eps = [] for computer in self.computers: rup = computer.rupture sids = computer.sids eids_by_rlz = rup.get_eids_by_rlz(self.rlzs_by_gsim) data = [] for gs, rlzs in self.rlzs_by_gsim.items(): num_events = sum(len(eids_by_rlz[rlzi]) for rlzi in rlzs) if num_events == 0: continue array, sig, eps = computer.compute(gs, num_events) array = array.transpose(1, 0, 2) for i, miniml in enumerate(self.min_iml): arr = array[:, i, :] arr[arr < miniml] = 0 n = 0 for rlzi in rlzs: eids = eids_by_rlz[rlzi] e = len(eids) if not e: continue for ei, eid in enumerate(eids): gmf = array[:, :, n + ei] tot = gmf.sum(axis=0) if not tot.sum(): continue sigmas = sig[:, n + ei] self.sig_eps.append((eid, sigmas, eps[:, n + ei])) for sid, gmv in zip(sids, gmf): if gmv.sum(): data.append((rlzi, sid, eid, gmv)) n += e yield numpy.array(data, self.gmv_dt)
[ "def", "gen_gmfs", "(", "self", ")", ":", "self", ".", "sig_eps", "=", "[", "]", "for", "computer", "in", "self", ".", "computers", ":", "rup", "=", "computer", ".", "rupture", "sids", "=", "computer", ".", "sids", "eids_by_rlz", "=", "rup", ".", "get_eids_by_rlz", "(", "self", ".", "rlzs_by_gsim", ")", "data", "=", "[", "]", "for", "gs", ",", "rlzs", "in", "self", ".", "rlzs_by_gsim", ".", "items", "(", ")", ":", "num_events", "=", "sum", "(", "len", "(", "eids_by_rlz", "[", "rlzi", "]", ")", "for", "rlzi", "in", "rlzs", ")", "if", "num_events", "==", "0", ":", "continue", "# NB: the trick for performance is to keep the call to", "# compute.compute outside of the loop over the realizations", "# it is better to have few calls producing big arrays", "array", ",", "sig", ",", "eps", "=", "computer", ".", "compute", "(", "gs", ",", "num_events", ")", "array", "=", "array", ".", "transpose", "(", "1", ",", "0", ",", "2", ")", "# from M, N, E to N, M, E", "for", "i", ",", "miniml", "in", "enumerate", "(", "self", ".", "min_iml", ")", ":", "# gmv < minimum", "arr", "=", "array", "[", ":", ",", "i", ",", ":", "]", "arr", "[", "arr", "<", "miniml", "]", "=", "0", "n", "=", "0", "for", "rlzi", "in", "rlzs", ":", "eids", "=", "eids_by_rlz", "[", "rlzi", "]", "e", "=", "len", "(", "eids", ")", "if", "not", "e", ":", "continue", "for", "ei", ",", "eid", "in", "enumerate", "(", "eids", ")", ":", "gmf", "=", "array", "[", ":", ",", ":", ",", "n", "+", "ei", "]", "# shape (N, M)", "tot", "=", "gmf", ".", "sum", "(", "axis", "=", "0", ")", "# shape (M,)", "if", "not", "tot", ".", "sum", "(", ")", ":", "continue", "sigmas", "=", "sig", "[", ":", ",", "n", "+", "ei", "]", "self", ".", "sig_eps", ".", "append", "(", "(", "eid", ",", "sigmas", ",", "eps", "[", ":", ",", "n", "+", "ei", "]", ")", ")", "for", "sid", ",", "gmv", "in", "zip", "(", "sids", ",", "gmf", ")", ":", "if", "gmv", ".", "sum", "(", ")", ":", "data", ".", "append", "(", "(", "rlzi", ",", "sid", ",", "eid", ",", "gmv", ")", ")", "n", "+=", "e", "yield", "numpy", ".", "array", "(", "data", ",", "self", ".", "gmv_dt", ")" ]
Compute the GMFs for the given realization and yields arrays of the dtype (sid, eid, imti, gmv), one for rupture
[ "Compute", "the", "GMFs", "for", "the", "given", "realization", "and", "yields", "arrays", "of", "the", "dtype", "(", "sid", "eid", "imti", "gmv", ")", "one", "for", "rupture" ]
train
https://github.com/gem/oq-engine/blob/8294553a0b8aba33fd96437a35065d03547d0040/openquake/calculators/getters.py#L328-L368
gem/oq-engine
openquake/calculators/getters.py
GmfGetter.get_gmfdata
def get_gmfdata(self): """ :returns: an array of the dtype (sid, eid, imti, gmv) """ alldata = list(self.gen_gmfs()) if not alldata: return numpy.zeros(0, self.gmv_dt) return numpy.concatenate(alldata)
python
def get_gmfdata(self): alldata = list(self.gen_gmfs()) if not alldata: return numpy.zeros(0, self.gmv_dt) return numpy.concatenate(alldata)
[ "def", "get_gmfdata", "(", "self", ")", ":", "alldata", "=", "list", "(", "self", ".", "gen_gmfs", "(", ")", ")", "if", "not", "alldata", ":", "return", "numpy", ".", "zeros", "(", "0", ",", "self", ".", "gmv_dt", ")", "return", "numpy", ".", "concatenate", "(", "alldata", ")" ]
:returns: an array of the dtype (sid, eid, imti, gmv)
[ ":", "returns", ":", "an", "array", "of", "the", "dtype", "(", "sid", "eid", "imti", "gmv", ")" ]
train
https://github.com/gem/oq-engine/blob/8294553a0b8aba33fd96437a35065d03547d0040/openquake/calculators/getters.py#L370-L377
gem/oq-engine
openquake/calculators/getters.py
GmfGetter.get_hazard
def get_hazard(self, data=None): """ :param data: if given, an iterator of records of dtype gmf_dt :returns: sid -> records """ if data is None: data = self.get_gmfdata() return general.group_array(data, 'sid')
python
def get_hazard(self, data=None): if data is None: data = self.get_gmfdata() return general.group_array(data, 'sid')
[ "def", "get_hazard", "(", "self", ",", "data", "=", "None", ")", ":", "if", "data", "is", "None", ":", "data", "=", "self", ".", "get_gmfdata", "(", ")", "return", "general", ".", "group_array", "(", "data", ",", "'sid'", ")" ]
:param data: if given, an iterator of records of dtype gmf_dt :returns: sid -> records
[ ":", "param", "data", ":", "if", "given", "an", "iterator", "of", "records", "of", "dtype", "gmf_dt", ":", "returns", ":", "sid", "-", ">", "records" ]
train
https://github.com/gem/oq-engine/blob/8294553a0b8aba33fd96437a35065d03547d0040/openquake/calculators/getters.py#L379-L386
gem/oq-engine
openquake/calculators/getters.py
GmfGetter.compute_gmfs_curves
def compute_gmfs_curves(self, monitor): """ :returns: a dict with keys gmfdata, indices, hcurves """ oq = self.oqparam with monitor('GmfGetter.init', measuremem=True): self.init() hcurves = {} # key -> poes if oq.hazard_curves_from_gmfs: hc_mon = monitor('building hazard curves', measuremem=False) duration = oq.investigation_time * oq.ses_per_logic_tree_path with monitor('building hazard', measuremem=True): gmfdata = self.get_gmfdata() # returned later hazard = self.get_hazard(data=gmfdata) for sid, hazardr in hazard.items(): dic = general.group_array(hazardr, 'rlzi') for rlzi, array in dic.items(): with hc_mon: gmvs = array['gmv'] for imti, imt in enumerate(oq.imtls): poes = _gmvs_to_haz_curve( gmvs[:, imti], oq.imtls[imt], oq.investigation_time, duration) hcurves[rsi2str(rlzi, sid, imt)] = poes elif oq.ground_motion_fields: # fast lane with monitor('building hazard', measuremem=True): gmfdata = self.get_gmfdata() else: return {} if len(gmfdata) == 0: return dict(gmfdata=[]) indices = [] gmfdata.sort(order=('sid', 'rlzi', 'eid')) start = stop = 0 for sid, rows in itertools.groupby(gmfdata['sid']): for row in rows: stop += 1 indices.append((sid, start, stop)) start = stop res = dict(gmfdata=gmfdata, hcurves=hcurves, sig_eps=numpy.array(self.sig_eps, self.sig_eps_dt), indices=numpy.array(indices, (U32, 3))) return res
python
def compute_gmfs_curves(self, monitor): oq = self.oqparam with monitor('GmfGetter.init', measuremem=True): self.init() hcurves = {} if oq.hazard_curves_from_gmfs: hc_mon = monitor('building hazard curves', measuremem=False) duration = oq.investigation_time * oq.ses_per_logic_tree_path with monitor('building hazard', measuremem=True): gmfdata = self.get_gmfdata() hazard = self.get_hazard(data=gmfdata) for sid, hazardr in hazard.items(): dic = general.group_array(hazardr, 'rlzi') for rlzi, array in dic.items(): with hc_mon: gmvs = array['gmv'] for imti, imt in enumerate(oq.imtls): poes = _gmvs_to_haz_curve( gmvs[:, imti], oq.imtls[imt], oq.investigation_time, duration) hcurves[rsi2str(rlzi, sid, imt)] = poes elif oq.ground_motion_fields: with monitor('building hazard', measuremem=True): gmfdata = self.get_gmfdata() else: return {} if len(gmfdata) == 0: return dict(gmfdata=[]) indices = [] gmfdata.sort(order=('sid', 'rlzi', 'eid')) start = stop = 0 for sid, rows in itertools.groupby(gmfdata['sid']): for row in rows: stop += 1 indices.append((sid, start, stop)) start = stop res = dict(gmfdata=gmfdata, hcurves=hcurves, sig_eps=numpy.array(self.sig_eps, self.sig_eps_dt), indices=numpy.array(indices, (U32, 3))) return res
[ "def", "compute_gmfs_curves", "(", "self", ",", "monitor", ")", ":", "oq", "=", "self", ".", "oqparam", "with", "monitor", "(", "'GmfGetter.init'", ",", "measuremem", "=", "True", ")", ":", "self", ".", "init", "(", ")", "hcurves", "=", "{", "}", "# key -> poes", "if", "oq", ".", "hazard_curves_from_gmfs", ":", "hc_mon", "=", "monitor", "(", "'building hazard curves'", ",", "measuremem", "=", "False", ")", "duration", "=", "oq", ".", "investigation_time", "*", "oq", ".", "ses_per_logic_tree_path", "with", "monitor", "(", "'building hazard'", ",", "measuremem", "=", "True", ")", ":", "gmfdata", "=", "self", ".", "get_gmfdata", "(", ")", "# returned later", "hazard", "=", "self", ".", "get_hazard", "(", "data", "=", "gmfdata", ")", "for", "sid", ",", "hazardr", "in", "hazard", ".", "items", "(", ")", ":", "dic", "=", "general", ".", "group_array", "(", "hazardr", ",", "'rlzi'", ")", "for", "rlzi", ",", "array", "in", "dic", ".", "items", "(", ")", ":", "with", "hc_mon", ":", "gmvs", "=", "array", "[", "'gmv'", "]", "for", "imti", ",", "imt", "in", "enumerate", "(", "oq", ".", "imtls", ")", ":", "poes", "=", "_gmvs_to_haz_curve", "(", "gmvs", "[", ":", ",", "imti", "]", ",", "oq", ".", "imtls", "[", "imt", "]", ",", "oq", ".", "investigation_time", ",", "duration", ")", "hcurves", "[", "rsi2str", "(", "rlzi", ",", "sid", ",", "imt", ")", "]", "=", "poes", "elif", "oq", ".", "ground_motion_fields", ":", "# fast lane", "with", "monitor", "(", "'building hazard'", ",", "measuremem", "=", "True", ")", ":", "gmfdata", "=", "self", ".", "get_gmfdata", "(", ")", "else", ":", "return", "{", "}", "if", "len", "(", "gmfdata", ")", "==", "0", ":", "return", "dict", "(", "gmfdata", "=", "[", "]", ")", "indices", "=", "[", "]", "gmfdata", ".", "sort", "(", "order", "=", "(", "'sid'", ",", "'rlzi'", ",", "'eid'", ")", ")", "start", "=", "stop", "=", "0", "for", "sid", ",", "rows", "in", "itertools", ".", "groupby", "(", "gmfdata", "[", "'sid'", "]", ")", ":", "for", "row", "in", "rows", ":", "stop", "+=", "1", "indices", ".", "append", "(", "(", "sid", ",", "start", ",", "stop", ")", ")", "start", "=", "stop", "res", "=", "dict", "(", "gmfdata", "=", "gmfdata", ",", "hcurves", "=", "hcurves", ",", "sig_eps", "=", "numpy", ".", "array", "(", "self", ".", "sig_eps", ",", "self", ".", "sig_eps_dt", ")", ",", "indices", "=", "numpy", ".", "array", "(", "indices", ",", "(", "U32", ",", "3", ")", ")", ")", "return", "res" ]
:returns: a dict with keys gmfdata, indices, hcurves
[ ":", "returns", ":", "a", "dict", "with", "keys", "gmfdata", "indices", "hcurves" ]
train
https://github.com/gem/oq-engine/blob/8294553a0b8aba33fd96437a35065d03547d0040/openquake/calculators/getters.py#L388-L430
gem/oq-engine
openquake/calculators/getters.py
RuptureGetter.set_weights
def set_weights(self, src_filter, num_taxonomies_by_site): """ :returns: the weights of the ruptures in the getter """ weights = [] for rup in self.rup_array: sids = src_filter.close_sids(rup, self.trt, rup['mag']) weights.append(num_taxonomies_by_site[sids].sum()) self.weights = numpy.array(weights) self.weight = self.weights.sum()
python
def set_weights(self, src_filter, num_taxonomies_by_site): weights = [] for rup in self.rup_array: sids = src_filter.close_sids(rup, self.trt, rup['mag']) weights.append(num_taxonomies_by_site[sids].sum()) self.weights = numpy.array(weights) self.weight = self.weights.sum()
[ "def", "set_weights", "(", "self", ",", "src_filter", ",", "num_taxonomies_by_site", ")", ":", "weights", "=", "[", "]", "for", "rup", "in", "self", ".", "rup_array", ":", "sids", "=", "src_filter", ".", "close_sids", "(", "rup", ",", "self", ".", "trt", ",", "rup", "[", "'mag'", "]", ")", "weights", ".", "append", "(", "num_taxonomies_by_site", "[", "sids", "]", ".", "sum", "(", ")", ")", "self", ".", "weights", "=", "numpy", ".", "array", "(", "weights", ")", "self", ".", "weight", "=", "self", ".", "weights", ".", "sum", "(", ")" ]
:returns: the weights of the ruptures in the getter
[ ":", "returns", ":", "the", "weights", "of", "the", "ruptures", "in", "the", "getter" ]
train
https://github.com/gem/oq-engine/blob/8294553a0b8aba33fd96437a35065d03547d0040/openquake/calculators/getters.py#L540-L549
gem/oq-engine
openquake/calculators/getters.py
RuptureGetter.split
def split(self, maxweight): """ :yields: RuptureGetters with weight <= maxweight """ # NB: can be called only after .set_weights() has been called idx = {ri: i for i, ri in enumerate(self.rup_indices)} for rup_indices in general.block_splitter( self.rup_indices, maxweight, lambda ri: self.weights[idx[ri]]): if rup_indices: # some indices may have weight 0 and are discarded rgetter = self.__class__( self.filename, list(rup_indices), self.grp_id, self.trt, self.samples, self.rlzs_by_gsim, self.first_event) rgetter.weight = sum([self.weights[idx[ri]] for ri in rup_indices]) yield rgetter
python
def split(self, maxweight): idx = {ri: i for i, ri in enumerate(self.rup_indices)} for rup_indices in general.block_splitter( self.rup_indices, maxweight, lambda ri: self.weights[idx[ri]]): if rup_indices: rgetter = self.__class__( self.filename, list(rup_indices), self.grp_id, self.trt, self.samples, self.rlzs_by_gsim, self.first_event) rgetter.weight = sum([self.weights[idx[ri]] for ri in rup_indices]) yield rgetter
[ "def", "split", "(", "self", ",", "maxweight", ")", ":", "# NB: can be called only after .set_weights() has been called", "idx", "=", "{", "ri", ":", "i", "for", "i", ",", "ri", "in", "enumerate", "(", "self", ".", "rup_indices", ")", "}", "for", "rup_indices", "in", "general", ".", "block_splitter", "(", "self", ".", "rup_indices", ",", "maxweight", ",", "lambda", "ri", ":", "self", ".", "weights", "[", "idx", "[", "ri", "]", "]", ")", ":", "if", "rup_indices", ":", "# some indices may have weight 0 and are discarded", "rgetter", "=", "self", ".", "__class__", "(", "self", ".", "filename", ",", "list", "(", "rup_indices", ")", ",", "self", ".", "grp_id", ",", "self", ".", "trt", ",", "self", ".", "samples", ",", "self", ".", "rlzs_by_gsim", ",", "self", ".", "first_event", ")", "rgetter", ".", "weight", "=", "sum", "(", "[", "self", ".", "weights", "[", "idx", "[", "ri", "]", "]", "for", "ri", "in", "rup_indices", "]", ")", "yield", "rgetter" ]
:yields: RuptureGetters with weight <= maxweight
[ ":", "yields", ":", "RuptureGetters", "with", "weight", "<", "=", "maxweight" ]
train
https://github.com/gem/oq-engine/blob/8294553a0b8aba33fd96437a35065d03547d0040/openquake/calculators/getters.py#L551-L567
gem/oq-engine
openquake/calculators/getters.py
RuptureGetter.get_eid_rlz
def get_eid_rlz(self, monitor=None): """ :returns: a composite array with the associations eid->rlz """ eid_rlz = [] for rup in self.rup_array: ebr = EBRupture(mock.Mock(serial=rup['serial']), rup['srcidx'], self.grp_id, rup['n_occ'], self.samples) for rlz, eids in ebr.get_eids_by_rlz(self.rlzs_by_gsim).items(): for eid in eids: eid_rlz.append((eid, rlz)) return numpy.array(eid_rlz, [('eid', U64), ('rlz', U16)])
python
def get_eid_rlz(self, monitor=None): eid_rlz = [] for rup in self.rup_array: ebr = EBRupture(mock.Mock(serial=rup['serial']), rup['srcidx'], self.grp_id, rup['n_occ'], self.samples) for rlz, eids in ebr.get_eids_by_rlz(self.rlzs_by_gsim).items(): for eid in eids: eid_rlz.append((eid, rlz)) return numpy.array(eid_rlz, [('eid', U64), ('rlz', U16)])
[ "def", "get_eid_rlz", "(", "self", ",", "monitor", "=", "None", ")", ":", "eid_rlz", "=", "[", "]", "for", "rup", "in", "self", ".", "rup_array", ":", "ebr", "=", "EBRupture", "(", "mock", ".", "Mock", "(", "serial", "=", "rup", "[", "'serial'", "]", ")", ",", "rup", "[", "'srcidx'", "]", ",", "self", ".", "grp_id", ",", "rup", "[", "'n_occ'", "]", ",", "self", ".", "samples", ")", "for", "rlz", ",", "eids", "in", "ebr", ".", "get_eids_by_rlz", "(", "self", ".", "rlzs_by_gsim", ")", ".", "items", "(", ")", ":", "for", "eid", "in", "eids", ":", "eid_rlz", ".", "append", "(", "(", "eid", ",", "rlz", ")", ")", "return", "numpy", ".", "array", "(", "eid_rlz", ",", "[", "(", "'eid'", ",", "U64", ")", ",", "(", "'rlz'", ",", "U16", ")", "]", ")" ]
:returns: a composite array with the associations eid->rlz
[ ":", "returns", ":", "a", "composite", "array", "with", "the", "associations", "eid", "-", ">", "rlz" ]
train
https://github.com/gem/oq-engine/blob/8294553a0b8aba33fd96437a35065d03547d0040/openquake/calculators/getters.py#L569-L580
gem/oq-engine
openquake/calculators/getters.py
RuptureGetter.get_rupdict
def get_rupdict(self): """ :returns: a dictionary with the parameters of the rupture """ assert len(self.rup_array) == 1, 'Please specify a slice of length 1' dic = {'trt': self.trt, 'samples': self.samples} with datastore.read(self.filename) as dstore: rupgeoms = dstore['rupgeoms'] source_ids = dstore['source_info']['source_id'] rec = self.rup_array[0] geom = rupgeoms[rec['gidx1']:rec['gidx2']].reshape( rec['sy'], rec['sz']) dic['lons'] = geom['lon'] dic['lats'] = geom['lat'] dic['deps'] = geom['depth'] rupclass, surclass = self.code2cls[rec['code']] dic['rupture_class'] = rupclass.__name__ dic['surface_class'] = surclass.__name__ dic['hypo'] = rec['hypo'] dic['occurrence_rate'] = rec['occurrence_rate'] dic['grp_id'] = rec['grp_id'] dic['n_occ'] = rec['n_occ'] dic['serial'] = rec['serial'] dic['mag'] = rec['mag'] dic['srcid'] = source_ids[rec['srcidx']] return dic
python
def get_rupdict(self): assert len(self.rup_array) == 1, 'Please specify a slice of length 1' dic = {'trt': self.trt, 'samples': self.samples} with datastore.read(self.filename) as dstore: rupgeoms = dstore['rupgeoms'] source_ids = dstore['source_info']['source_id'] rec = self.rup_array[0] geom = rupgeoms[rec['gidx1']:rec['gidx2']].reshape( rec['sy'], rec['sz']) dic['lons'] = geom['lon'] dic['lats'] = geom['lat'] dic['deps'] = geom['depth'] rupclass, surclass = self.code2cls[rec['code']] dic['rupture_class'] = rupclass.__name__ dic['surface_class'] = surclass.__name__ dic['hypo'] = rec['hypo'] dic['occurrence_rate'] = rec['occurrence_rate'] dic['grp_id'] = rec['grp_id'] dic['n_occ'] = rec['n_occ'] dic['serial'] = rec['serial'] dic['mag'] = rec['mag'] dic['srcid'] = source_ids[rec['srcidx']] return dic
[ "def", "get_rupdict", "(", "self", ")", ":", "assert", "len", "(", "self", ".", "rup_array", ")", "==", "1", ",", "'Please specify a slice of length 1'", "dic", "=", "{", "'trt'", ":", "self", ".", "trt", ",", "'samples'", ":", "self", ".", "samples", "}", "with", "datastore", ".", "read", "(", "self", ".", "filename", ")", "as", "dstore", ":", "rupgeoms", "=", "dstore", "[", "'rupgeoms'", "]", "source_ids", "=", "dstore", "[", "'source_info'", "]", "[", "'source_id'", "]", "rec", "=", "self", ".", "rup_array", "[", "0", "]", "geom", "=", "rupgeoms", "[", "rec", "[", "'gidx1'", "]", ":", "rec", "[", "'gidx2'", "]", "]", ".", "reshape", "(", "rec", "[", "'sy'", "]", ",", "rec", "[", "'sz'", "]", ")", "dic", "[", "'lons'", "]", "=", "geom", "[", "'lon'", "]", "dic", "[", "'lats'", "]", "=", "geom", "[", "'lat'", "]", "dic", "[", "'deps'", "]", "=", "geom", "[", "'depth'", "]", "rupclass", ",", "surclass", "=", "self", ".", "code2cls", "[", "rec", "[", "'code'", "]", "]", "dic", "[", "'rupture_class'", "]", "=", "rupclass", ".", "__name__", "dic", "[", "'surface_class'", "]", "=", "surclass", ".", "__name__", "dic", "[", "'hypo'", "]", "=", "rec", "[", "'hypo'", "]", "dic", "[", "'occurrence_rate'", "]", "=", "rec", "[", "'occurrence_rate'", "]", "dic", "[", "'grp_id'", "]", "=", "rec", "[", "'grp_id'", "]", "dic", "[", "'n_occ'", "]", "=", "rec", "[", "'n_occ'", "]", "dic", "[", "'serial'", "]", "=", "rec", "[", "'serial'", "]", "dic", "[", "'mag'", "]", "=", "rec", "[", "'mag'", "]", "dic", "[", "'srcid'", "]", "=", "source_ids", "[", "rec", "[", "'srcidx'", "]", "]", "return", "dic" ]
:returns: a dictionary with the parameters of the rupture
[ ":", "returns", ":", "a", "dictionary", "with", "the", "parameters", "of", "the", "rupture" ]
train
https://github.com/gem/oq-engine/blob/8294553a0b8aba33fd96437a35065d03547d0040/openquake/calculators/getters.py#L582-L607
gem/oq-engine
openquake/calculators/getters.py
RuptureGetter.get_ruptures
def get_ruptures(self, srcfilter=calc.filters.nofilter): """ :returns: a list of EBRuptures filtered by bounding box """ ebrs = [] with datastore.read(self.filename) as dstore: rupgeoms = dstore['rupgeoms'] for rec in self.rup_array: if srcfilter.integration_distance: sids = srcfilter.close_sids(rec, self.trt, rec['mag']) if len(sids) == 0: # the rupture is far away continue else: sids = None mesh = numpy.zeros((3, rec['sy'], rec['sz']), F32) geom = rupgeoms[rec['gidx1']:rec['gidx2']].reshape( rec['sy'], rec['sz']) mesh[0] = geom['lon'] mesh[1] = geom['lat'] mesh[2] = geom['depth'] rupture_cls, surface_cls = self.code2cls[rec['code']] rupture = object.__new__(rupture_cls) rupture.serial = rec['serial'] rupture.surface = object.__new__(surface_cls) rupture.mag = rec['mag'] rupture.rake = rec['rake'] rupture.hypocenter = geo.Point(*rec['hypo']) rupture.occurrence_rate = rec['occurrence_rate'] rupture.tectonic_region_type = self.trt if surface_cls is geo.PlanarSurface: rupture.surface = geo.PlanarSurface.from_array( mesh[:, 0, :]) elif surface_cls is geo.MultiSurface: # mesh has shape (3, n, 4) rupture.surface.__init__([ geo.PlanarSurface.from_array(mesh[:, i, :]) for i in range(mesh.shape[1])]) elif surface_cls is geo.GriddedSurface: # fault surface, strike and dip will be computed rupture.surface.strike = rupture.surface.dip = None rupture.surface.mesh = Mesh(*mesh) else: # fault surface, strike and dip will be computed rupture.surface.strike = rupture.surface.dip = None rupture.surface.__init__(RectangularMesh(*mesh)) grp_id = rec['grp_id'] ebr = EBRupture(rupture, rec['srcidx'], grp_id, rec['n_occ'], self.samples) # not implemented: rupture_slip_direction ebr.sids = sids ebrs.append(ebr) return ebrs
python
def get_ruptures(self, srcfilter=calc.filters.nofilter): ebrs = [] with datastore.read(self.filename) as dstore: rupgeoms = dstore['rupgeoms'] for rec in self.rup_array: if srcfilter.integration_distance: sids = srcfilter.close_sids(rec, self.trt, rec['mag']) if len(sids) == 0: continue else: sids = None mesh = numpy.zeros((3, rec['sy'], rec['sz']), F32) geom = rupgeoms[rec['gidx1']:rec['gidx2']].reshape( rec['sy'], rec['sz']) mesh[0] = geom['lon'] mesh[1] = geom['lat'] mesh[2] = geom['depth'] rupture_cls, surface_cls = self.code2cls[rec['code']] rupture = object.__new__(rupture_cls) rupture.serial = rec['serial'] rupture.surface = object.__new__(surface_cls) rupture.mag = rec['mag'] rupture.rake = rec['rake'] rupture.hypocenter = geo.Point(*rec['hypo']) rupture.occurrence_rate = rec['occurrence_rate'] rupture.tectonic_region_type = self.trt if surface_cls is geo.PlanarSurface: rupture.surface = geo.PlanarSurface.from_array( mesh[:, 0, :]) elif surface_cls is geo.MultiSurface: rupture.surface.__init__([ geo.PlanarSurface.from_array(mesh[:, i, :]) for i in range(mesh.shape[1])]) elif surface_cls is geo.GriddedSurface: rupture.surface.strike = rupture.surface.dip = None rupture.surface.mesh = Mesh(*mesh) else: rupture.surface.strike = rupture.surface.dip = None rupture.surface.__init__(RectangularMesh(*mesh)) grp_id = rec['grp_id'] ebr = EBRupture(rupture, rec['srcidx'], grp_id, rec['n_occ'], self.samples) ebr.sids = sids ebrs.append(ebr) return ebrs
[ "def", "get_ruptures", "(", "self", ",", "srcfilter", "=", "calc", ".", "filters", ".", "nofilter", ")", ":", "ebrs", "=", "[", "]", "with", "datastore", ".", "read", "(", "self", ".", "filename", ")", "as", "dstore", ":", "rupgeoms", "=", "dstore", "[", "'rupgeoms'", "]", "for", "rec", "in", "self", ".", "rup_array", ":", "if", "srcfilter", ".", "integration_distance", ":", "sids", "=", "srcfilter", ".", "close_sids", "(", "rec", ",", "self", ".", "trt", ",", "rec", "[", "'mag'", "]", ")", "if", "len", "(", "sids", ")", "==", "0", ":", "# the rupture is far away", "continue", "else", ":", "sids", "=", "None", "mesh", "=", "numpy", ".", "zeros", "(", "(", "3", ",", "rec", "[", "'sy'", "]", ",", "rec", "[", "'sz'", "]", ")", ",", "F32", ")", "geom", "=", "rupgeoms", "[", "rec", "[", "'gidx1'", "]", ":", "rec", "[", "'gidx2'", "]", "]", ".", "reshape", "(", "rec", "[", "'sy'", "]", ",", "rec", "[", "'sz'", "]", ")", "mesh", "[", "0", "]", "=", "geom", "[", "'lon'", "]", "mesh", "[", "1", "]", "=", "geom", "[", "'lat'", "]", "mesh", "[", "2", "]", "=", "geom", "[", "'depth'", "]", "rupture_cls", ",", "surface_cls", "=", "self", ".", "code2cls", "[", "rec", "[", "'code'", "]", "]", "rupture", "=", "object", ".", "__new__", "(", "rupture_cls", ")", "rupture", ".", "serial", "=", "rec", "[", "'serial'", "]", "rupture", ".", "surface", "=", "object", ".", "__new__", "(", "surface_cls", ")", "rupture", ".", "mag", "=", "rec", "[", "'mag'", "]", "rupture", ".", "rake", "=", "rec", "[", "'rake'", "]", "rupture", ".", "hypocenter", "=", "geo", ".", "Point", "(", "*", "rec", "[", "'hypo'", "]", ")", "rupture", ".", "occurrence_rate", "=", "rec", "[", "'occurrence_rate'", "]", "rupture", ".", "tectonic_region_type", "=", "self", ".", "trt", "if", "surface_cls", "is", "geo", ".", "PlanarSurface", ":", "rupture", ".", "surface", "=", "geo", ".", "PlanarSurface", ".", "from_array", "(", "mesh", "[", ":", ",", "0", ",", ":", "]", ")", "elif", "surface_cls", "is", "geo", ".", "MultiSurface", ":", "# mesh has shape (3, n, 4)", "rupture", ".", "surface", ".", "__init__", "(", "[", "geo", ".", "PlanarSurface", ".", "from_array", "(", "mesh", "[", ":", ",", "i", ",", ":", "]", ")", "for", "i", "in", "range", "(", "mesh", ".", "shape", "[", "1", "]", ")", "]", ")", "elif", "surface_cls", "is", "geo", ".", "GriddedSurface", ":", "# fault surface, strike and dip will be computed", "rupture", ".", "surface", ".", "strike", "=", "rupture", ".", "surface", ".", "dip", "=", "None", "rupture", ".", "surface", ".", "mesh", "=", "Mesh", "(", "*", "mesh", ")", "else", ":", "# fault surface, strike and dip will be computed", "rupture", ".", "surface", ".", "strike", "=", "rupture", ".", "surface", ".", "dip", "=", "None", "rupture", ".", "surface", ".", "__init__", "(", "RectangularMesh", "(", "*", "mesh", ")", ")", "grp_id", "=", "rec", "[", "'grp_id'", "]", "ebr", "=", "EBRupture", "(", "rupture", ",", "rec", "[", "'srcidx'", "]", ",", "grp_id", ",", "rec", "[", "'n_occ'", "]", ",", "self", ".", "samples", ")", "# not implemented: rupture_slip_direction", "ebr", ".", "sids", "=", "sids", "ebrs", ".", "append", "(", "ebr", ")", "return", "ebrs" ]
:returns: a list of EBRuptures filtered by bounding box
[ ":", "returns", ":", "a", "list", "of", "EBRuptures", "filtered", "by", "bounding", "box" ]
train
https://github.com/gem/oq-engine/blob/8294553a0b8aba33fd96437a35065d03547d0040/openquake/calculators/getters.py#L609-L660
gem/oq-engine
openquake/calculators/getters.py
RuptureGetter.E2R
def E2R(self, array, rlzi): """ :param array: an array of shape (E, ...) :param rlzi: an array of E realization indices :returns: an aggregated array of shape (R, ...) """ z = numpy.zeros((self.num_rlzs,) + array.shape[1:], array.dtype) for a, r in zip(array, rlzi): z[self.rlz2idx[r]] += a return z
python
def E2R(self, array, rlzi): z = numpy.zeros((self.num_rlzs,) + array.shape[1:], array.dtype) for a, r in zip(array, rlzi): z[self.rlz2idx[r]] += a return z
[ "def", "E2R", "(", "self", ",", "array", ",", "rlzi", ")", ":", "z", "=", "numpy", ".", "zeros", "(", "(", "self", ".", "num_rlzs", ",", ")", "+", "array", ".", "shape", "[", "1", ":", "]", ",", "array", ".", "dtype", ")", "for", "a", ",", "r", "in", "zip", "(", "array", ",", "rlzi", ")", ":", "z", "[", "self", ".", "rlz2idx", "[", "r", "]", "]", "+=", "a", "return", "z" ]
:param array: an array of shape (E, ...) :param rlzi: an array of E realization indices :returns: an aggregated array of shape (R, ...)
[ ":", "param", "array", ":", "an", "array", "of", "shape", "(", "E", "...", ")", ":", "param", "rlzi", ":", "an", "array", "of", "E", "realization", "indices", ":", "returns", ":", "an", "aggregated", "array", "of", "shape", "(", "R", "...", ")" ]
train
https://github.com/gem/oq-engine/blob/8294553a0b8aba33fd96437a35065d03547d0040/openquake/calculators/getters.py#L662-L671
gem/oq-engine
openquake/hazardlib/gsim/bindi_2017.py
BindiEtAl2017Rjb.get_mean_and_stddevs
def get_mean_and_stddevs(self, sites, rup, dists, imt, stddev_types): """ See :meth:`superclass method <.base.GroundShakingIntensityModel.get_mean_and_stddevs>` for spec of input and result values. """ # extracting dictionary of coefficients specific to required # intensity measure type. C = self.COEFFS[imt] mean = (self._get_magnitude_scaling(C, rup.mag) + self._get_distance_scaling(C, dists, rup.mag) + self._get_site_term(C, sites.vs30)) # Mean is returned in terms of m/s^2. Need to convert to g mean -= np.log(g) stddevs = self.get_stddevs(C, sites.vs30.shape, stddev_types) return mean + self.adjustment_factor, stddevs
python
def get_mean_and_stddevs(self, sites, rup, dists, imt, stddev_types): C = self.COEFFS[imt] mean = (self._get_magnitude_scaling(C, rup.mag) + self._get_distance_scaling(C, dists, rup.mag) + self._get_site_term(C, sites.vs30)) mean -= np.log(g) stddevs = self.get_stddevs(C, sites.vs30.shape, stddev_types) return mean + self.adjustment_factor, stddevs
[ "def", "get_mean_and_stddevs", "(", "self", ",", "sites", ",", "rup", ",", "dists", ",", "imt", ",", "stddev_types", ")", ":", "# extracting dictionary of coefficients specific to required", "# intensity measure type.", "C", "=", "self", ".", "COEFFS", "[", "imt", "]", "mean", "=", "(", "self", ".", "_get_magnitude_scaling", "(", "C", ",", "rup", ".", "mag", ")", "+", "self", ".", "_get_distance_scaling", "(", "C", ",", "dists", ",", "rup", ".", "mag", ")", "+", "self", ".", "_get_site_term", "(", "C", ",", "sites", ".", "vs30", ")", ")", "# Mean is returned in terms of m/s^2. Need to convert to g", "mean", "-=", "np", ".", "log", "(", "g", ")", "stddevs", "=", "self", ".", "get_stddevs", "(", "C", ",", "sites", ".", "vs30", ".", "shape", ",", "stddev_types", ")", "return", "mean", "+", "self", ".", "adjustment_factor", ",", "stddevs" ]
See :meth:`superclass method <.base.GroundShakingIntensityModel.get_mean_and_stddevs>` for spec of input and result values.
[ "See", ":", "meth", ":", "superclass", "method", "<", ".", "base", ".", "GroundShakingIntensityModel", ".", "get_mean_and_stddevs", ">", "for", "spec", "of", "input", "and", "result", "values", "." ]
train
https://github.com/gem/oq-engine/blob/8294553a0b8aba33fd96437a35065d03547d0040/openquake/hazardlib/gsim/bindi_2017.py#L79-L96
gem/oq-engine
openquake/hazardlib/gsim/bindi_2017.py
BindiEtAl2017Rjb._get_magnitude_scaling
def _get_magnitude_scaling(self, C, mag): """ Implements the magnitude scaling function F(M) presented in equation 4 """ if mag < self.CONSTANTS["mh"]: return C["e1"] + C["b1"] * (mag - self.CONSTANTS["mref"]) +\ C["b2"] * ((mag - self.CONSTANTS["mref"]) ** 2.) else: d_m = self.CONSTANTS["mh"] - self.CONSTANTS["mref"] return C["e1"] + C["b3"] * (mag - self.CONSTANTS["mh"]) +\ (C["b1"] * d_m) + C["b2"] * (d_m ** 2.)
python
def _get_magnitude_scaling(self, C, mag): if mag < self.CONSTANTS["mh"]: return C["e1"] + C["b1"] * (mag - self.CONSTANTS["mref"]) +\ C["b2"] * ((mag - self.CONSTANTS["mref"]) ** 2.) else: d_m = self.CONSTANTS["mh"] - self.CONSTANTS["mref"] return C["e1"] + C["b3"] * (mag - self.CONSTANTS["mh"]) +\ (C["b1"] * d_m) + C["b2"] * (d_m ** 2.)
[ "def", "_get_magnitude_scaling", "(", "self", ",", "C", ",", "mag", ")", ":", "if", "mag", "<", "self", ".", "CONSTANTS", "[", "\"mh\"", "]", ":", "return", "C", "[", "\"e1\"", "]", "+", "C", "[", "\"b1\"", "]", "*", "(", "mag", "-", "self", ".", "CONSTANTS", "[", "\"mref\"", "]", ")", "+", "C", "[", "\"b2\"", "]", "*", "(", "(", "mag", "-", "self", ".", "CONSTANTS", "[", "\"mref\"", "]", ")", "**", "2.", ")", "else", ":", "d_m", "=", "self", ".", "CONSTANTS", "[", "\"mh\"", "]", "-", "self", ".", "CONSTANTS", "[", "\"mref\"", "]", "return", "C", "[", "\"e1\"", "]", "+", "C", "[", "\"b3\"", "]", "*", "(", "mag", "-", "self", ".", "CONSTANTS", "[", "\"mh\"", "]", ")", "+", "(", "C", "[", "\"b1\"", "]", "*", "d_m", ")", "+", "C", "[", "\"b2\"", "]", "*", "(", "d_m", "**", "2.", ")" ]
Implements the magnitude scaling function F(M) presented in equation 4
[ "Implements", "the", "magnitude", "scaling", "function", "F", "(", "M", ")", "presented", "in", "equation", "4" ]
train
https://github.com/gem/oq-engine/blob/8294553a0b8aba33fd96437a35065d03547d0040/openquake/hazardlib/gsim/bindi_2017.py#L98-L108