repository_name
stringlengths
5
67
func_path_in_repository
stringlengths
4
234
func_name
stringlengths
0
314
whole_func_string
stringlengths
52
3.87M
language
stringclasses
6 values
func_code_string
stringlengths
39
1.84M
func_code_tokens
sequencelengths
15
672k
func_documentation_string
stringlengths
1
47.2k
func_documentation_tokens
sequencelengths
1
3.92k
split_name
stringclasses
1 value
func_code_url
stringlengths
85
339
gem/oq-engine
openquake/hazardlib/gsim/tavakoli_pezeshk_2005.py
TavakoliPezeshk2005._compute_anelastic_attenuation_term
def _compute_anelastic_attenuation_term(self, C, rrup, mag): """ Compute magnitude-distance scaling term as defined in equation 21, page 2291 (Tavakoli and Pezeshk, 2005) """ r = (rrup**2. + (C['c5'] * np.exp(C['c6'] * mag + C['c7'] * (8.5 - mag)**2.5))**2.)**.5 f3 = ((C['c4'] + C['c13'] * mag) * np.log(r) + (C['c8'] + C['c12'] * mag) * r) return f3
python
def _compute_anelastic_attenuation_term(self, C, rrup, mag): r = (rrup**2. + (C['c5'] * np.exp(C['c6'] * mag + C['c7'] * (8.5 - mag)**2.5))**2.)**.5 f3 = ((C['c4'] + C['c13'] * mag) * np.log(r) + (C['c8'] + C['c12'] * mag) * r) return f3
[ "def", "_compute_anelastic_attenuation_term", "(", "self", ",", "C", ",", "rrup", ",", "mag", ")", ":", "r", "=", "(", "rrup", "**", "2.", "+", "(", "C", "[", "'c5'", "]", "*", "np", ".", "exp", "(", "C", "[", "'c6'", "]", "*", "mag", "+", "C", "[", "'c7'", "]", "*", "(", "8.5", "-", "mag", ")", "**", "2.5", ")", ")", "**", "2.", ")", "**", ".5", "f3", "=", "(", "(", "C", "[", "'c4'", "]", "+", "C", "[", "'c13'", "]", "*", "mag", ")", "*", "np", ".", "log", "(", "r", ")", "+", "(", "C", "[", "'c8'", "]", "+", "C", "[", "'c12'", "]", "*", "mag", ")", "*", "r", ")", "return", "f3" ]
Compute magnitude-distance scaling term as defined in equation 21, page 2291 (Tavakoli and Pezeshk, 2005)
[ "Compute", "magnitude", "-", "distance", "scaling", "term", "as", "defined", "in", "equation", "21", "page", "2291", "(", "Tavakoli", "and", "Pezeshk", "2005", ")" ]
train
https://github.com/gem/oq-engine/blob/8294553a0b8aba33fd96437a35065d03547d0040/openquake/hazardlib/gsim/tavakoli_pezeshk_2005.py#L157-L166
gem/oq-engine
openquake/hazardlib/geo/surface/complex_fault.py
edge_node
def edge_node(name, points): """ :param name: 'faultTopEdge', 'intermediateEdge' or 'faultBottomEdge' :param points: a list of Point objects :returns: a Node of kind faultTopEdge, intermediateEdge or faultBottomEdge """ line = [] for point in points: line.append(point.longitude) line.append(point.latitude) line.append(point.depth) pos = Node('gml:posList', {}, line) node = Node(name, nodes=[Node('gml:LineString', nodes=[pos])]) return node
python
def edge_node(name, points): line = [] for point in points: line.append(point.longitude) line.append(point.latitude) line.append(point.depth) pos = Node('gml:posList', {}, line) node = Node(name, nodes=[Node('gml:LineString', nodes=[pos])]) return node
[ "def", "edge_node", "(", "name", ",", "points", ")", ":", "line", "=", "[", "]", "for", "point", "in", "points", ":", "line", ".", "append", "(", "point", ".", "longitude", ")", "line", ".", "append", "(", "point", ".", "latitude", ")", "line", ".", "append", "(", "point", ".", "depth", ")", "pos", "=", "Node", "(", "'gml:posList'", ",", "{", "}", ",", "line", ")", "node", "=", "Node", "(", "name", ",", "nodes", "=", "[", "Node", "(", "'gml:LineString'", ",", "nodes", "=", "[", "pos", "]", ")", "]", ")", "return", "node" ]
:param name: 'faultTopEdge', 'intermediateEdge' or 'faultBottomEdge' :param points: a list of Point objects :returns: a Node of kind faultTopEdge, intermediateEdge or faultBottomEdge
[ ":", "param", "name", ":", "faultTopEdge", "intermediateEdge", "or", "faultBottomEdge", ":", "param", "points", ":", "a", "list", "of", "Point", "objects", ":", "returns", ":", "a", "Node", "of", "kind", "faultTopEdge", "intermediateEdge", "or", "faultBottomEdge" ]
train
https://github.com/gem/oq-engine/blob/8294553a0b8aba33fd96437a35065d03547d0040/openquake/hazardlib/geo/surface/complex_fault.py#L36-L49
gem/oq-engine
openquake/hazardlib/geo/surface/complex_fault.py
complex_fault_node
def complex_fault_node(edges): """ :param edges: a list of lists of points :returns: a Node of kind complexFaultGeometry """ node = Node('complexFaultGeometry') node.append(edge_node('faultTopEdge', edges[0])) for edge in edges[1:-1]: node.append(edge_node('intermediateEdge', edge)) node.append(edge_node('faultBottomEdge', edges[-1])) return node
python
def complex_fault_node(edges): node = Node('complexFaultGeometry') node.append(edge_node('faultTopEdge', edges[0])) for edge in edges[1:-1]: node.append(edge_node('intermediateEdge', edge)) node.append(edge_node('faultBottomEdge', edges[-1])) return node
[ "def", "complex_fault_node", "(", "edges", ")", ":", "node", "=", "Node", "(", "'complexFaultGeometry'", ")", "node", ".", "append", "(", "edge_node", "(", "'faultTopEdge'", ",", "edges", "[", "0", "]", ")", ")", "for", "edge", "in", "edges", "[", "1", ":", "-", "1", "]", ":", "node", ".", "append", "(", "edge_node", "(", "'intermediateEdge'", ",", "edge", ")", ")", "node", ".", "append", "(", "edge_node", "(", "'faultBottomEdge'", ",", "edges", "[", "-", "1", "]", ")", ")", "return", "node" ]
:param edges: a list of lists of points :returns: a Node of kind complexFaultGeometry
[ ":", "param", "edges", ":", "a", "list", "of", "lists", "of", "points", ":", "returns", ":", "a", "Node", "of", "kind", "complexFaultGeometry" ]
train
https://github.com/gem/oq-engine/blob/8294553a0b8aba33fd96437a35065d03547d0040/openquake/hazardlib/geo/surface/complex_fault.py#L52-L62
gem/oq-engine
openquake/hazardlib/geo/surface/complex_fault.py
ComplexFaultSurface.get_dip
def get_dip(self): """ Return the fault dip as the average dip over the mesh. The average dip is defined as the weighted mean inclination of all the mesh cells. See :meth:`openquake.hazardlib.geo.mesh.RectangularMesh.get_mean_inclination_and_azimuth` :returns: The average dip, in decimal degrees. """ # uses the same approach as in simple fault surface if self.dip is None: mesh = self.mesh self.dip, self.strike = mesh.get_mean_inclination_and_azimuth() return self.dip
python
def get_dip(self): if self.dip is None: mesh = self.mesh self.dip, self.strike = mesh.get_mean_inclination_and_azimuth() return self.dip
[ "def", "get_dip", "(", "self", ")", ":", "# uses the same approach as in simple fault surface", "if", "self", ".", "dip", "is", "None", ":", "mesh", "=", "self", ".", "mesh", "self", ".", "dip", ",", "self", ".", "strike", "=", "mesh", ".", "get_mean_inclination_and_azimuth", "(", ")", "return", "self", ".", "dip" ]
Return the fault dip as the average dip over the mesh. The average dip is defined as the weighted mean inclination of all the mesh cells. See :meth:`openquake.hazardlib.geo.mesh.RectangularMesh.get_mean_inclination_and_azimuth` :returns: The average dip, in decimal degrees.
[ "Return", "the", "fault", "dip", "as", "the", "average", "dip", "over", "the", "mesh", "." ]
train
https://github.com/gem/oq-engine/blob/8294553a0b8aba33fd96437a35065d03547d0040/openquake/hazardlib/geo/surface/complex_fault.py#L96-L111
gem/oq-engine
openquake/hazardlib/geo/surface/complex_fault.py
ComplexFaultSurface.check_aki_richards_convention
def check_aki_richards_convention(cls, edges): """ Verify that surface (as defined by corner points) conforms with Aki and Richard convention (i.e. surface dips right of surface strike) This method doesn't have to be called by hands before creating the surface object, because it is called from :meth:`from_fault_data`. """ # 1) extract 4 corner points of surface mesh # 2) compute cross products between left and right edges and top edge # (these define vectors normal to the surface) # 3) compute dot products between cross product results and # position vectors associated with upper left and right corners (if # both angles are less then 90 degrees then the surface is correctly # defined) ul = edges[0].points[0] ur = edges[0].points[-1] bl = edges[-1].points[0] br = edges[-1].points[-1] ul, ur, bl, br = spherical_to_cartesian( [ul.longitude, ur.longitude, bl.longitude, br.longitude], [ul.latitude, ur.latitude, bl.latitude, br.latitude], [ul.depth, ur.depth, bl.depth, br.depth], ) top_edge = ur - ul left_edge = bl - ul right_edge = br - ur left_cross_top = numpy.cross(left_edge, top_edge) right_cross_top = numpy.cross(right_edge, top_edge) left_cross_top /= numpy.sqrt(numpy.dot(left_cross_top, left_cross_top)) right_cross_top /= numpy.sqrt( numpy.dot(right_cross_top, right_cross_top) ) ul /= numpy.sqrt(numpy.dot(ul, ul)) ur /= numpy.sqrt(numpy.dot(ur, ur)) # rounding to 1st digit, to avoid ValueError raised for floating point # imprecision angle_ul = round( numpy.degrees(numpy.arccos(numpy.dot(ul, left_cross_top))), 1 ) angle_ur = round( numpy.degrees(numpy.arccos(numpy.dot(ur, right_cross_top))), 1 ) if (angle_ul > 90) or (angle_ur > 90): raise ValueError( "Surface does not conform with Aki & Richards convention" )
python
def check_aki_richards_convention(cls, edges): ul = edges[0].points[0] ur = edges[0].points[-1] bl = edges[-1].points[0] br = edges[-1].points[-1] ul, ur, bl, br = spherical_to_cartesian( [ul.longitude, ur.longitude, bl.longitude, br.longitude], [ul.latitude, ur.latitude, bl.latitude, br.latitude], [ul.depth, ur.depth, bl.depth, br.depth], ) top_edge = ur - ul left_edge = bl - ul right_edge = br - ur left_cross_top = numpy.cross(left_edge, top_edge) right_cross_top = numpy.cross(right_edge, top_edge) left_cross_top /= numpy.sqrt(numpy.dot(left_cross_top, left_cross_top)) right_cross_top /= numpy.sqrt( numpy.dot(right_cross_top, right_cross_top) ) ul /= numpy.sqrt(numpy.dot(ul, ul)) ur /= numpy.sqrt(numpy.dot(ur, ur)) angle_ul = round( numpy.degrees(numpy.arccos(numpy.dot(ul, left_cross_top))), 1 ) angle_ur = round( numpy.degrees(numpy.arccos(numpy.dot(ur, right_cross_top))), 1 ) if (angle_ul > 90) or (angle_ur > 90): raise ValueError( "Surface does not conform with Aki & Richards convention" )
[ "def", "check_aki_richards_convention", "(", "cls", ",", "edges", ")", ":", "# 1) extract 4 corner points of surface mesh", "# 2) compute cross products between left and right edges and top edge", "# (these define vectors normal to the surface)", "# 3) compute dot products between cross product results and", "# position vectors associated with upper left and right corners (if", "# both angles are less then 90 degrees then the surface is correctly", "# defined)", "ul", "=", "edges", "[", "0", "]", ".", "points", "[", "0", "]", "ur", "=", "edges", "[", "0", "]", ".", "points", "[", "-", "1", "]", "bl", "=", "edges", "[", "-", "1", "]", ".", "points", "[", "0", "]", "br", "=", "edges", "[", "-", "1", "]", ".", "points", "[", "-", "1", "]", "ul", ",", "ur", ",", "bl", ",", "br", "=", "spherical_to_cartesian", "(", "[", "ul", ".", "longitude", ",", "ur", ".", "longitude", ",", "bl", ".", "longitude", ",", "br", ".", "longitude", "]", ",", "[", "ul", ".", "latitude", ",", "ur", ".", "latitude", ",", "bl", ".", "latitude", ",", "br", ".", "latitude", "]", ",", "[", "ul", ".", "depth", ",", "ur", ".", "depth", ",", "bl", ".", "depth", ",", "br", ".", "depth", "]", ",", ")", "top_edge", "=", "ur", "-", "ul", "left_edge", "=", "bl", "-", "ul", "right_edge", "=", "br", "-", "ur", "left_cross_top", "=", "numpy", ".", "cross", "(", "left_edge", ",", "top_edge", ")", "right_cross_top", "=", "numpy", ".", "cross", "(", "right_edge", ",", "top_edge", ")", "left_cross_top", "/=", "numpy", ".", "sqrt", "(", "numpy", ".", "dot", "(", "left_cross_top", ",", "left_cross_top", ")", ")", "right_cross_top", "/=", "numpy", ".", "sqrt", "(", "numpy", ".", "dot", "(", "right_cross_top", ",", "right_cross_top", ")", ")", "ul", "/=", "numpy", ".", "sqrt", "(", "numpy", ".", "dot", "(", "ul", ",", "ul", ")", ")", "ur", "/=", "numpy", ".", "sqrt", "(", "numpy", ".", "dot", "(", "ur", ",", "ur", ")", ")", "# rounding to 1st digit, to avoid ValueError raised for floating point", "# imprecision", "angle_ul", "=", "round", "(", "numpy", ".", "degrees", "(", "numpy", ".", "arccos", "(", "numpy", ".", "dot", "(", "ul", ",", "left_cross_top", ")", ")", ")", ",", "1", ")", "angle_ur", "=", "round", "(", "numpy", ".", "degrees", "(", "numpy", ".", "arccos", "(", "numpy", ".", "dot", "(", "ur", ",", "right_cross_top", ")", ")", ")", ",", "1", ")", "if", "(", "angle_ul", ">", "90", ")", "or", "(", "angle_ur", ">", "90", ")", ":", "raise", "ValueError", "(", "\"Surface does not conform with Aki & Richards convention\"", ")" ]
Verify that surface (as defined by corner points) conforms with Aki and Richard convention (i.e. surface dips right of surface strike) This method doesn't have to be called by hands before creating the surface object, because it is called from :meth:`from_fault_data`.
[ "Verify", "that", "surface", "(", "as", "defined", "by", "corner", "points", ")", "conforms", "with", "Aki", "and", "Richard", "convention", "(", "i", ".", "e", ".", "surface", "dips", "right", "of", "surface", "strike", ")" ]
train
https://github.com/gem/oq-engine/blob/8294553a0b8aba33fd96437a35065d03547d0040/openquake/hazardlib/geo/surface/complex_fault.py#L129-L179
gem/oq-engine
openquake/hazardlib/geo/surface/complex_fault.py
ComplexFaultSurface.check_surface_validity
def check_surface_validity(cls, edges): """ Check validity of the surface. Project edge points to vertical plane anchored to surface upper left edge and with strike equal to top edge strike. Check that resulting polygon is valid. This method doesn't have to be called by hands before creating the surface object, because it is called from :meth:`from_fault_data`. """ # extract coordinates of surface boundary (as defined from edges) full_boundary = [] left_boundary = [] right_boundary = [] for i in range(1, len(edges) - 1): left_boundary.append(edges[i].points[0]) right_boundary.append(edges[i].points[-1]) full_boundary.extend(edges[0].points) full_boundary.extend(right_boundary) full_boundary.extend(edges[-1].points[::-1]) full_boundary.extend(left_boundary[::-1]) lons = [p.longitude for p in full_boundary] lats = [p.latitude for p in full_boundary] depths = [p.depth for p in full_boundary] # define reference plane. Corner points are separated by an arbitrary # distance of 10 km. The mesh spacing is set to 2 km. Both corner # distance and mesh spacing values do not affect the algorithm results. ul = edges[0].points[0] strike = ul.azimuth(edges[0].points[-1]) dist = 10. ur = ul.point_at(dist, 0, strike) bl = Point(ul.longitude, ul.latitude, ul.depth + dist) br = bl.point_at(dist, 0, strike) # project surface boundary to reference plane and check for # validity. ref_plane = PlanarSurface.from_corner_points(ul, ur, br, bl) _, xx, yy = ref_plane._project( spherical_to_cartesian(lons, lats, depths)) coords = [(x, y) for x, y in zip(xx, yy)] p = shapely.geometry.Polygon(coords) if not p.is_valid: raise ValueError('Edges points are not in the right order')
python
def check_surface_validity(cls, edges): full_boundary = [] left_boundary = [] right_boundary = [] for i in range(1, len(edges) - 1): left_boundary.append(edges[i].points[0]) right_boundary.append(edges[i].points[-1]) full_boundary.extend(edges[0].points) full_boundary.extend(right_boundary) full_boundary.extend(edges[-1].points[::-1]) full_boundary.extend(left_boundary[::-1]) lons = [p.longitude for p in full_boundary] lats = [p.latitude for p in full_boundary] depths = [p.depth for p in full_boundary] ul = edges[0].points[0] strike = ul.azimuth(edges[0].points[-1]) dist = 10. ur = ul.point_at(dist, 0, strike) bl = Point(ul.longitude, ul.latitude, ul.depth + dist) br = bl.point_at(dist, 0, strike) ref_plane = PlanarSurface.from_corner_points(ul, ur, br, bl) _, xx, yy = ref_plane._project( spherical_to_cartesian(lons, lats, depths)) coords = [(x, y) for x, y in zip(xx, yy)] p = shapely.geometry.Polygon(coords) if not p.is_valid: raise ValueError('Edges points are not in the right order')
[ "def", "check_surface_validity", "(", "cls", ",", "edges", ")", ":", "# extract coordinates of surface boundary (as defined from edges)", "full_boundary", "=", "[", "]", "left_boundary", "=", "[", "]", "right_boundary", "=", "[", "]", "for", "i", "in", "range", "(", "1", ",", "len", "(", "edges", ")", "-", "1", ")", ":", "left_boundary", ".", "append", "(", "edges", "[", "i", "]", ".", "points", "[", "0", "]", ")", "right_boundary", ".", "append", "(", "edges", "[", "i", "]", ".", "points", "[", "-", "1", "]", ")", "full_boundary", ".", "extend", "(", "edges", "[", "0", "]", ".", "points", ")", "full_boundary", ".", "extend", "(", "right_boundary", ")", "full_boundary", ".", "extend", "(", "edges", "[", "-", "1", "]", ".", "points", "[", ":", ":", "-", "1", "]", ")", "full_boundary", ".", "extend", "(", "left_boundary", "[", ":", ":", "-", "1", "]", ")", "lons", "=", "[", "p", ".", "longitude", "for", "p", "in", "full_boundary", "]", "lats", "=", "[", "p", ".", "latitude", "for", "p", "in", "full_boundary", "]", "depths", "=", "[", "p", ".", "depth", "for", "p", "in", "full_boundary", "]", "# define reference plane. Corner points are separated by an arbitrary", "# distance of 10 km. The mesh spacing is set to 2 km. Both corner", "# distance and mesh spacing values do not affect the algorithm results.", "ul", "=", "edges", "[", "0", "]", ".", "points", "[", "0", "]", "strike", "=", "ul", ".", "azimuth", "(", "edges", "[", "0", "]", ".", "points", "[", "-", "1", "]", ")", "dist", "=", "10.", "ur", "=", "ul", ".", "point_at", "(", "dist", ",", "0", ",", "strike", ")", "bl", "=", "Point", "(", "ul", ".", "longitude", ",", "ul", ".", "latitude", ",", "ul", ".", "depth", "+", "dist", ")", "br", "=", "bl", ".", "point_at", "(", "dist", ",", "0", ",", "strike", ")", "# project surface boundary to reference plane and check for", "# validity.", "ref_plane", "=", "PlanarSurface", ".", "from_corner_points", "(", "ul", ",", "ur", ",", "br", ",", "bl", ")", "_", ",", "xx", ",", "yy", "=", "ref_plane", ".", "_project", "(", "spherical_to_cartesian", "(", "lons", ",", "lats", ",", "depths", ")", ")", "coords", "=", "[", "(", "x", ",", "y", ")", "for", "x", ",", "y", "in", "zip", "(", "xx", ",", "yy", ")", "]", "p", "=", "shapely", ".", "geometry", ".", "Polygon", "(", "coords", ")", "if", "not", "p", ".", "is_valid", ":", "raise", "ValueError", "(", "'Edges points are not in the right order'", ")" ]
Check validity of the surface. Project edge points to vertical plane anchored to surface upper left edge and with strike equal to top edge strike. Check that resulting polygon is valid. This method doesn't have to be called by hands before creating the surface object, because it is called from :meth:`from_fault_data`.
[ "Check", "validity", "of", "the", "surface", "." ]
train
https://github.com/gem/oq-engine/blob/8294553a0b8aba33fd96437a35065d03547d0040/openquake/hazardlib/geo/surface/complex_fault.py#L182-L230
gem/oq-engine
openquake/hazardlib/geo/surface/complex_fault.py
ComplexFaultSurface.check_fault_data
def check_fault_data(cls, edges, mesh_spacing): """ Verify the fault data and raise ``ValueError`` if anything is wrong. This method doesn't have to be called by hands before creating the surface object, because it is called from :meth:`from_fault_data`. """ if not len(edges) >= 2: raise ValueError("at least two edges are required") if not all(len(edge) >= 2 for edge in edges): raise ValueError("at least two points must be defined " "in each edge") if not mesh_spacing > 0.0: raise ValueError("mesh spacing must be positive") cls.check_surface_validity(edges) cls.check_aki_richards_convention(edges)
python
def check_fault_data(cls, edges, mesh_spacing): if not len(edges) >= 2: raise ValueError("at least two edges are required") if not all(len(edge) >= 2 for edge in edges): raise ValueError("at least two points must be defined " "in each edge") if not mesh_spacing > 0.0: raise ValueError("mesh spacing must be positive") cls.check_surface_validity(edges) cls.check_aki_richards_convention(edges)
[ "def", "check_fault_data", "(", "cls", ",", "edges", ",", "mesh_spacing", ")", ":", "if", "not", "len", "(", "edges", ")", ">=", "2", ":", "raise", "ValueError", "(", "\"at least two edges are required\"", ")", "if", "not", "all", "(", "len", "(", "edge", ")", ">=", "2", "for", "edge", "in", "edges", ")", ":", "raise", "ValueError", "(", "\"at least two points must be defined \"", "\"in each edge\"", ")", "if", "not", "mesh_spacing", ">", "0.0", ":", "raise", "ValueError", "(", "\"mesh spacing must be positive\"", ")", "cls", ".", "check_surface_validity", "(", "edges", ")", "cls", ".", "check_aki_richards_convention", "(", "edges", ")" ]
Verify the fault data and raise ``ValueError`` if anything is wrong. This method doesn't have to be called by hands before creating the surface object, because it is called from :meth:`from_fault_data`.
[ "Verify", "the", "fault", "data", "and", "raise", "ValueError", "if", "anything", "is", "wrong", "." ]
train
https://github.com/gem/oq-engine/blob/8294553a0b8aba33fd96437a35065d03547d0040/openquake/hazardlib/geo/surface/complex_fault.py#L233-L249
gem/oq-engine
openquake/hazardlib/geo/surface/complex_fault.py
ComplexFaultSurface.from_fault_data
def from_fault_data(cls, edges, mesh_spacing): """ Create and return a fault surface using fault source data. :param edges: A list of at least two horizontal edges of the surface as instances of :class:`openquake.hazardlib.geo.line.Line`. The list should be in top-to-bottom order (the shallowest edge first). :param mesh_spacing: Distance between two subsequent points in a mesh, in km. :returns: An instance of :class:`ComplexFaultSurface` created using that data. :raises ValueError: If requested mesh spacing is too big for the surface geometry (doesn't allow to put a single mesh cell along length and/or width). Uses :meth:`check_fault_data` for checking parameters. """ cls.check_fault_data(edges, mesh_spacing) surface_nodes = [complex_fault_node(edges)] mean_length = numpy.mean([edge.get_length() for edge in edges]) num_hor_points = int(round(mean_length / mesh_spacing)) + 1 if num_hor_points <= 1: raise ValueError( 'mesh spacing %.1f km is too big for mean length %.1f km' % (mesh_spacing, mean_length) ) edges = [edge.resample_to_num_points(num_hor_points).points for i, edge in enumerate(edges)] vert_edges = [Line(v_edge) for v_edge in zip(*edges)] mean_width = numpy.mean([v_edge.get_length() for v_edge in vert_edges]) num_vert_points = int(round(mean_width / mesh_spacing)) + 1 if num_vert_points <= 1: raise ValueError( 'mesh spacing %.1f km is too big for mean width %.1f km' % (mesh_spacing, mean_width) ) points = zip(*[v_edge.resample_to_num_points(num_vert_points).points for v_edge in vert_edges]) mesh = RectangularMesh.from_points_list(list(points)) assert 1 not in mesh.shape self = cls(mesh) self.surface_nodes = surface_nodes return self
python
def from_fault_data(cls, edges, mesh_spacing): cls.check_fault_data(edges, mesh_spacing) surface_nodes = [complex_fault_node(edges)] mean_length = numpy.mean([edge.get_length() for edge in edges]) num_hor_points = int(round(mean_length / mesh_spacing)) + 1 if num_hor_points <= 1: raise ValueError( 'mesh spacing %.1f km is too big for mean length %.1f km' % (mesh_spacing, mean_length) ) edges = [edge.resample_to_num_points(num_hor_points).points for i, edge in enumerate(edges)] vert_edges = [Line(v_edge) for v_edge in zip(*edges)] mean_width = numpy.mean([v_edge.get_length() for v_edge in vert_edges]) num_vert_points = int(round(mean_width / mesh_spacing)) + 1 if num_vert_points <= 1: raise ValueError( 'mesh spacing %.1f km is too big for mean width %.1f km' % (mesh_spacing, mean_width) ) points = zip(*[v_edge.resample_to_num_points(num_vert_points).points for v_edge in vert_edges]) mesh = RectangularMesh.from_points_list(list(points)) assert 1 not in mesh.shape self = cls(mesh) self.surface_nodes = surface_nodes return self
[ "def", "from_fault_data", "(", "cls", ",", "edges", ",", "mesh_spacing", ")", ":", "cls", ".", "check_fault_data", "(", "edges", ",", "mesh_spacing", ")", "surface_nodes", "=", "[", "complex_fault_node", "(", "edges", ")", "]", "mean_length", "=", "numpy", ".", "mean", "(", "[", "edge", ".", "get_length", "(", ")", "for", "edge", "in", "edges", "]", ")", "num_hor_points", "=", "int", "(", "round", "(", "mean_length", "/", "mesh_spacing", ")", ")", "+", "1", "if", "num_hor_points", "<=", "1", ":", "raise", "ValueError", "(", "'mesh spacing %.1f km is too big for mean length %.1f km'", "%", "(", "mesh_spacing", ",", "mean_length", ")", ")", "edges", "=", "[", "edge", ".", "resample_to_num_points", "(", "num_hor_points", ")", ".", "points", "for", "i", ",", "edge", "in", "enumerate", "(", "edges", ")", "]", "vert_edges", "=", "[", "Line", "(", "v_edge", ")", "for", "v_edge", "in", "zip", "(", "*", "edges", ")", "]", "mean_width", "=", "numpy", ".", "mean", "(", "[", "v_edge", ".", "get_length", "(", ")", "for", "v_edge", "in", "vert_edges", "]", ")", "num_vert_points", "=", "int", "(", "round", "(", "mean_width", "/", "mesh_spacing", ")", ")", "+", "1", "if", "num_vert_points", "<=", "1", ":", "raise", "ValueError", "(", "'mesh spacing %.1f km is too big for mean width %.1f km'", "%", "(", "mesh_spacing", ",", "mean_width", ")", ")", "points", "=", "zip", "(", "*", "[", "v_edge", ".", "resample_to_num_points", "(", "num_vert_points", ")", ".", "points", "for", "v_edge", "in", "vert_edges", "]", ")", "mesh", "=", "RectangularMesh", ".", "from_points_list", "(", "list", "(", "points", ")", ")", "assert", "1", "not", "in", "mesh", ".", "shape", "self", "=", "cls", "(", "mesh", ")", "self", ".", "surface_nodes", "=", "surface_nodes", "return", "self" ]
Create and return a fault surface using fault source data. :param edges: A list of at least two horizontal edges of the surface as instances of :class:`openquake.hazardlib.geo.line.Line`. The list should be in top-to-bottom order (the shallowest edge first). :param mesh_spacing: Distance between two subsequent points in a mesh, in km. :returns: An instance of :class:`ComplexFaultSurface` created using that data. :raises ValueError: If requested mesh spacing is too big for the surface geometry (doesn't allow to put a single mesh cell along length and/or width). Uses :meth:`check_fault_data` for checking parameters.
[ "Create", "and", "return", "a", "fault", "surface", "using", "fault", "source", "data", "." ]
train
https://github.com/gem/oq-engine/blob/8294553a0b8aba33fd96437a35065d03547d0040/openquake/hazardlib/geo/surface/complex_fault.py#L252-L299
gem/oq-engine
openquake/hazardlib/geo/surface/complex_fault.py
ComplexFaultSurface.surface_projection_from_fault_data
def surface_projection_from_fault_data(cls, edges): """ Get a surface projection of the complex fault surface. :param edges: A list of horizontal edges of the surface as instances of :class:`openquake.hazardlib.geo.line.Line`. :returns: Instance of :class:`~openquake.hazardlib.geo.polygon.Polygon` describing the surface projection of the complex fault. """ # collect lons and lats of all the vertices of all the edges lons = [] lats = [] for edge in edges: for point in edge: lons.append(point.longitude) lats.append(point.latitude) lons = numpy.array(lons, dtype=float) lats = numpy.array(lats, dtype=float) return Mesh(lons, lats, depths=None).get_convex_hull()
python
def surface_projection_from_fault_data(cls, edges): lons = [] lats = [] for edge in edges: for point in edge: lons.append(point.longitude) lats.append(point.latitude) lons = numpy.array(lons, dtype=float) lats = numpy.array(lats, dtype=float) return Mesh(lons, lats, depths=None).get_convex_hull()
[ "def", "surface_projection_from_fault_data", "(", "cls", ",", "edges", ")", ":", "# collect lons and lats of all the vertices of all the edges", "lons", "=", "[", "]", "lats", "=", "[", "]", "for", "edge", "in", "edges", ":", "for", "point", "in", "edge", ":", "lons", ".", "append", "(", "point", ".", "longitude", ")", "lats", ".", "append", "(", "point", ".", "latitude", ")", "lons", "=", "numpy", ".", "array", "(", "lons", ",", "dtype", "=", "float", ")", "lats", "=", "numpy", ".", "array", "(", "lats", ",", "dtype", "=", "float", ")", "return", "Mesh", "(", "lons", ",", "lats", ",", "depths", "=", "None", ")", ".", "get_convex_hull", "(", ")" ]
Get a surface projection of the complex fault surface. :param edges: A list of horizontal edges of the surface as instances of :class:`openquake.hazardlib.geo.line.Line`. :returns: Instance of :class:`~openquake.hazardlib.geo.polygon.Polygon` describing the surface projection of the complex fault.
[ "Get", "a", "surface", "projection", "of", "the", "complex", "fault", "surface", "." ]
train
https://github.com/gem/oq-engine/blob/8294553a0b8aba33fd96437a35065d03547d0040/openquake/hazardlib/geo/surface/complex_fault.py#L302-L323
gem/oq-engine
openquake/calculators/base.py
fix_ones
def fix_ones(pmap): """ Physically, an extremely small intensity measure level can have an extremely large probability of exceedence, however that probability cannot be exactly 1 unless the level is exactly 0. Numerically, the PoE can be 1 and this give issues when calculating the damage (there is a log(0) in :class:`openquake.risklib.scientific.annual_frequency_of_exceedence`). Here we solve the issue by replacing the unphysical probabilities 1 with .9999999999999999 (the float64 closest to 1). """ for sid in pmap: array = pmap[sid].array array[array == 1.] = .9999999999999999 return pmap
python
def fix_ones(pmap): for sid in pmap: array = pmap[sid].array array[array == 1.] = .9999999999999999 return pmap
[ "def", "fix_ones", "(", "pmap", ")", ":", "for", "sid", "in", "pmap", ":", "array", "=", "pmap", "[", "sid", "]", ".", "array", "array", "[", "array", "==", "1.", "]", "=", ".9999999999999999", "return", "pmap" ]
Physically, an extremely small intensity measure level can have an extremely large probability of exceedence, however that probability cannot be exactly 1 unless the level is exactly 0. Numerically, the PoE can be 1 and this give issues when calculating the damage (there is a log(0) in :class:`openquake.risklib.scientific.annual_frequency_of_exceedence`). Here we solve the issue by replacing the unphysical probabilities 1 with .9999999999999999 (the float64 closest to 1).
[ "Physically", "an", "extremely", "small", "intensity", "measure", "level", "can", "have", "an", "extremely", "large", "probability", "of", "exceedence", "however", "that", "probability", "cannot", "be", "exactly", "1", "unless", "the", "level", "is", "exactly", "0", ".", "Numerically", "the", "PoE", "can", "be", "1", "and", "this", "give", "issues", "when", "calculating", "the", "damage", "(", "there", "is", "a", "log", "(", "0", ")", "in", ":", "class", ":", "openquake", ".", "risklib", ".", "scientific", ".", "annual_frequency_of_exceedence", ")", ".", "Here", "we", "solve", "the", "issue", "by", "replacing", "the", "unphysical", "probabilities", "1", "with", ".", "9999999999999999", "(", "the", "float64", "closest", "to", "1", ")", "." ]
train
https://github.com/gem/oq-engine/blob/8294553a0b8aba33fd96437a35065d03547d0040/openquake/calculators/base.py#L65-L79
gem/oq-engine
openquake/calculators/base.py
build_weights
def build_weights(realizations, imt_dt): """ :returns: an array with the realization weights of shape (R, M) """ arr = numpy.zeros((len(realizations), len(imt_dt.names))) for m, imt in enumerate(imt_dt.names): arr[:, m] = [rlz.weight[imt] for rlz in realizations] return arr
python
def build_weights(realizations, imt_dt): arr = numpy.zeros((len(realizations), len(imt_dt.names))) for m, imt in enumerate(imt_dt.names): arr[:, m] = [rlz.weight[imt] for rlz in realizations] return arr
[ "def", "build_weights", "(", "realizations", ",", "imt_dt", ")", ":", "arr", "=", "numpy", ".", "zeros", "(", "(", "len", "(", "realizations", ")", ",", "len", "(", "imt_dt", ".", "names", ")", ")", ")", "for", "m", ",", "imt", "in", "enumerate", "(", "imt_dt", ".", "names", ")", ":", "arr", "[", ":", ",", "m", "]", "=", "[", "rlz", ".", "weight", "[", "imt", "]", "for", "rlz", "in", "realizations", "]", "return", "arr" ]
:returns: an array with the realization weights of shape (R, M)
[ ":", "returns", ":", "an", "array", "with", "the", "realization", "weights", "of", "shape", "(", "R", "M", ")" ]
train
https://github.com/gem/oq-engine/blob/8294553a0b8aba33fd96437a35065d03547d0040/openquake/calculators/base.py#L82-L89
gem/oq-engine
openquake/calculators/base.py
set_array
def set_array(longarray, shortarray): """ :param longarray: a numpy array of floats of length L >= l :param shortarray: a numpy array of floats of length l Fill `longarray` with the values of `shortarray`, starting from the left. If `shortarry` is shorter than `longarray`, then the remaining elements on the right are filled with `numpy.nan` values. """ longarray[:len(shortarray)] = shortarray longarray[len(shortarray):] = numpy.nan
python
def set_array(longarray, shortarray): longarray[:len(shortarray)] = shortarray longarray[len(shortarray):] = numpy.nan
[ "def", "set_array", "(", "longarray", ",", "shortarray", ")", ":", "longarray", "[", ":", "len", "(", "shortarray", ")", "]", "=", "shortarray", "longarray", "[", "len", "(", "shortarray", ")", ":", "]", "=", "numpy", ".", "nan" ]
:param longarray: a numpy array of floats of length L >= l :param shortarray: a numpy array of floats of length l Fill `longarray` with the values of `shortarray`, starting from the left. If `shortarry` is shorter than `longarray`, then the remaining elements on the right are filled with `numpy.nan` values.
[ ":", "param", "longarray", ":", "a", "numpy", "array", "of", "floats", "of", "length", "L", ">", "=", "l", ":", "param", "shortarray", ":", "a", "numpy", "array", "of", "floats", "of", "length", "l" ]
train
https://github.com/gem/oq-engine/blob/8294553a0b8aba33fd96437a35065d03547d0040/openquake/calculators/base.py#L92-L102
gem/oq-engine
openquake/calculators/base.py
check_time_event
def check_time_event(oqparam, occupancy_periods): """ Check the `time_event` parameter in the datastore, by comparing with the periods found in the exposure. """ time_event = oqparam.time_event if time_event and time_event not in occupancy_periods: raise ValueError( 'time_event is %s in %s, but the exposure contains %s' % (time_event, oqparam.inputs['job_ini'], ', '.join(occupancy_periods)))
python
def check_time_event(oqparam, occupancy_periods): time_event = oqparam.time_event if time_event and time_event not in occupancy_periods: raise ValueError( 'time_event is %s in %s, but the exposure contains %s' % (time_event, oqparam.inputs['job_ini'], ', '.join(occupancy_periods)))
[ "def", "check_time_event", "(", "oqparam", ",", "occupancy_periods", ")", ":", "time_event", "=", "oqparam", ".", "time_event", "if", "time_event", "and", "time_event", "not", "in", "occupancy_periods", ":", "raise", "ValueError", "(", "'time_event is %s in %s, but the exposure contains %s'", "%", "(", "time_event", ",", "oqparam", ".", "inputs", "[", "'job_ini'", "]", ",", "', '", ".", "join", "(", "occupancy_periods", ")", ")", ")" ]
Check the `time_event` parameter in the datastore, by comparing with the periods found in the exposure.
[ "Check", "the", "time_event", "parameter", "in", "the", "datastore", "by", "comparing", "with", "the", "periods", "found", "in", "the", "exposure", "." ]
train
https://github.com/gem/oq-engine/blob/8294553a0b8aba33fd96437a35065d03547d0040/openquake/calculators/base.py#L321-L331
gem/oq-engine
openquake/calculators/base.py
build_hmaps
def build_hmaps(hcurves_by_kind, slice_, imtls, poes, monitor): """ Build hazard maps from a slice of hazard curves. :returns: a pair ({kind: hmaps}, slice) """ dic = {} for kind, hcurves in hcurves_by_kind.items(): dic[kind] = calc.make_hmap_array(hcurves, imtls, poes, len(hcurves)) return dic, slice_
python
def build_hmaps(hcurves_by_kind, slice_, imtls, poes, monitor): dic = {} for kind, hcurves in hcurves_by_kind.items(): dic[kind] = calc.make_hmap_array(hcurves, imtls, poes, len(hcurves)) return dic, slice_
[ "def", "build_hmaps", "(", "hcurves_by_kind", ",", "slice_", ",", "imtls", ",", "poes", ",", "monitor", ")", ":", "dic", "=", "{", "}", "for", "kind", ",", "hcurves", "in", "hcurves_by_kind", ".", "items", "(", ")", ":", "dic", "[", "kind", "]", "=", "calc", ".", "make_hmap_array", "(", "hcurves", ",", "imtls", ",", "poes", ",", "len", "(", "hcurves", ")", ")", "return", "dic", ",", "slice_" ]
Build hazard maps from a slice of hazard curves. :returns: a pair ({kind: hmaps}, slice)
[ "Build", "hazard", "maps", "from", "a", "slice", "of", "hazard", "curves", ".", ":", "returns", ":", "a", "pair", "(", "{", "kind", ":", "hmaps", "}", "slice", ")" ]
train
https://github.com/gem/oq-engine/blob/8294553a0b8aba33fd96437a35065d03547d0040/openquake/calculators/base.py#L749-L757
gem/oq-engine
openquake/calculators/base.py
get_gmv_data
def get_gmv_data(sids, gmfs, events): """ Convert an array of shape (N, E, M) into an array of type gmv_data_dt """ N, E, M = gmfs.shape gmv_data_dt = numpy.dtype( [('rlzi', U16), ('sid', U32), ('eid', U64), ('gmv', (F32, (M,)))]) # NB: ordering of the loops: first site, then event lst = [(event['rlz'], sids[s], ei, gmfs[s, ei]) for s in numpy.arange(N, dtype=U32) for ei, event in enumerate(events)] return numpy.array(lst, gmv_data_dt)
python
def get_gmv_data(sids, gmfs, events): N, E, M = gmfs.shape gmv_data_dt = numpy.dtype( [('rlzi', U16), ('sid', U32), ('eid', U64), ('gmv', (F32, (M,)))]) lst = [(event['rlz'], sids[s], ei, gmfs[s, ei]) for s in numpy.arange(N, dtype=U32) for ei, event in enumerate(events)] return numpy.array(lst, gmv_data_dt)
[ "def", "get_gmv_data", "(", "sids", ",", "gmfs", ",", "events", ")", ":", "N", ",", "E", ",", "M", "=", "gmfs", ".", "shape", "gmv_data_dt", "=", "numpy", ".", "dtype", "(", "[", "(", "'rlzi'", ",", "U16", ")", ",", "(", "'sid'", ",", "U32", ")", ",", "(", "'eid'", ",", "U64", ")", ",", "(", "'gmv'", ",", "(", "F32", ",", "(", "M", ",", ")", ")", ")", "]", ")", "# NB: ordering of the loops: first site, then event", "lst", "=", "[", "(", "event", "[", "'rlz'", "]", ",", "sids", "[", "s", "]", ",", "ei", ",", "gmfs", "[", "s", ",", "ei", "]", ")", "for", "s", "in", "numpy", ".", "arange", "(", "N", ",", "dtype", "=", "U32", ")", "for", "ei", ",", "event", "in", "enumerate", "(", "events", ")", "]", "return", "numpy", ".", "array", "(", "lst", ",", "gmv_data_dt", ")" ]
Convert an array of shape (N, E, M) into an array of type gmv_data_dt
[ "Convert", "an", "array", "of", "shape", "(", "N", "E", "M", ")", "into", "an", "array", "of", "type", "gmv_data_dt" ]
train
https://github.com/gem/oq-engine/blob/8294553a0b8aba33fd96437a35065d03547d0040/openquake/calculators/base.py#L884-L895
gem/oq-engine
openquake/calculators/base.py
save_gmfs
def save_gmfs(calculator): """ :param calculator: a scenario_risk/damage or event_based_risk calculator :returns: a pair (eids, R) where R is the number of realizations """ dstore = calculator.datastore oq = calculator.oqparam logging.info('Reading gmfs from file') if oq.inputs['gmfs'].endswith('.csv'): # TODO: check if import_gmfs can be removed eids = import_gmfs( dstore, oq.inputs['gmfs'], calculator.sitecol.complete.sids) else: # XML eids, gmfs = readinput.eids, readinput.gmfs E = len(eids) events = numpy.zeros(E, rupture.events_dt) events['eid'] = eids calculator.eids = eids if hasattr(oq, 'number_of_ground_motion_fields'): if oq.number_of_ground_motion_fields != E: raise RuntimeError( 'Expected %d ground motion fields, found %d' % (oq.number_of_ground_motion_fields, E)) else: # set the number of GMFs from the file oq.number_of_ground_motion_fields = E # NB: save_gmfs redefine oq.sites in case of GMFs from XML or CSV if oq.inputs['gmfs'].endswith('.xml'): haz_sitecol = readinput.get_site_collection(oq) N, E, M = gmfs.shape save_gmf_data(dstore, haz_sitecol, gmfs[haz_sitecol.sids], oq.imtls, events)
python
def save_gmfs(calculator): dstore = calculator.datastore oq = calculator.oqparam logging.info('Reading gmfs from file') if oq.inputs['gmfs'].endswith('.csv'): eids = import_gmfs( dstore, oq.inputs['gmfs'], calculator.sitecol.complete.sids) else: eids, gmfs = readinput.eids, readinput.gmfs E = len(eids) events = numpy.zeros(E, rupture.events_dt) events['eid'] = eids calculator.eids = eids if hasattr(oq, 'number_of_ground_motion_fields'): if oq.number_of_ground_motion_fields != E: raise RuntimeError( 'Expected %d ground motion fields, found %d' % (oq.number_of_ground_motion_fields, E)) else: oq.number_of_ground_motion_fields = E if oq.inputs['gmfs'].endswith('.xml'): haz_sitecol = readinput.get_site_collection(oq) N, E, M = gmfs.shape save_gmf_data(dstore, haz_sitecol, gmfs[haz_sitecol.sids], oq.imtls, events)
[ "def", "save_gmfs", "(", "calculator", ")", ":", "dstore", "=", "calculator", ".", "datastore", "oq", "=", "calculator", ".", "oqparam", "logging", ".", "info", "(", "'Reading gmfs from file'", ")", "if", "oq", ".", "inputs", "[", "'gmfs'", "]", ".", "endswith", "(", "'.csv'", ")", ":", "# TODO: check if import_gmfs can be removed", "eids", "=", "import_gmfs", "(", "dstore", ",", "oq", ".", "inputs", "[", "'gmfs'", "]", ",", "calculator", ".", "sitecol", ".", "complete", ".", "sids", ")", "else", ":", "# XML", "eids", ",", "gmfs", "=", "readinput", ".", "eids", ",", "readinput", ".", "gmfs", "E", "=", "len", "(", "eids", ")", "events", "=", "numpy", ".", "zeros", "(", "E", ",", "rupture", ".", "events_dt", ")", "events", "[", "'eid'", "]", "=", "eids", "calculator", ".", "eids", "=", "eids", "if", "hasattr", "(", "oq", ",", "'number_of_ground_motion_fields'", ")", ":", "if", "oq", ".", "number_of_ground_motion_fields", "!=", "E", ":", "raise", "RuntimeError", "(", "'Expected %d ground motion fields, found %d'", "%", "(", "oq", ".", "number_of_ground_motion_fields", ",", "E", ")", ")", "else", ":", "# set the number of GMFs from the file", "oq", ".", "number_of_ground_motion_fields", "=", "E", "# NB: save_gmfs redefine oq.sites in case of GMFs from XML or CSV", "if", "oq", ".", "inputs", "[", "'gmfs'", "]", ".", "endswith", "(", "'.xml'", ")", ":", "haz_sitecol", "=", "readinput", ".", "get_site_collection", "(", "oq", ")", "N", ",", "E", ",", "M", "=", "gmfs", ".", "shape", "save_gmf_data", "(", "dstore", ",", "haz_sitecol", ",", "gmfs", "[", "haz_sitecol", ".", "sids", "]", ",", "oq", ".", "imtls", ",", "events", ")" ]
:param calculator: a scenario_risk/damage or event_based_risk calculator :returns: a pair (eids, R) where R is the number of realizations
[ ":", "param", "calculator", ":", "a", "scenario_risk", "/", "damage", "or", "event_based_risk", "calculator", ":", "returns", ":", "a", "pair", "(", "eids", "R", ")", "where", "R", "is", "the", "number", "of", "realizations" ]
train
https://github.com/gem/oq-engine/blob/8294553a0b8aba33fd96437a35065d03547d0040/openquake/calculators/base.py#L898-L928
gem/oq-engine
openquake/calculators/base.py
save_gmf_data
def save_gmf_data(dstore, sitecol, gmfs, imts, events=()): """ :param dstore: a :class:`openquake.baselib.datastore.DataStore` instance :param sitecol: a :class:`openquake.hazardlib.site.SiteCollection` instance :param gmfs: an array of shape (N, E, M) :param imts: a list of IMT strings :param events: E event IDs or the empty tuple """ if len(events) == 0: E = gmfs.shape[1] events = numpy.zeros(E, rupture.events_dt) events['eid'] = numpy.arange(E, dtype=U64) dstore['events'] = events offset = 0 gmfa = get_gmv_data(sitecol.sids, gmfs, events) dstore['gmf_data/data'] = gmfa dic = general.group_array(gmfa, 'sid') lst = [] all_sids = sitecol.complete.sids for sid in all_sids: rows = dic.get(sid, ()) n = len(rows) lst.append((offset, offset + n)) offset += n dstore['gmf_data/imts'] = ' '.join(imts) dstore['gmf_data/indices'] = numpy.array(lst, U32)
python
def save_gmf_data(dstore, sitecol, gmfs, imts, events=()): if len(events) == 0: E = gmfs.shape[1] events = numpy.zeros(E, rupture.events_dt) events['eid'] = numpy.arange(E, dtype=U64) dstore['events'] = events offset = 0 gmfa = get_gmv_data(sitecol.sids, gmfs, events) dstore['gmf_data/data'] = gmfa dic = general.group_array(gmfa, 'sid') lst = [] all_sids = sitecol.complete.sids for sid in all_sids: rows = dic.get(sid, ()) n = len(rows) lst.append((offset, offset + n)) offset += n dstore['gmf_data/imts'] = ' '.join(imts) dstore['gmf_data/indices'] = numpy.array(lst, U32)
[ "def", "save_gmf_data", "(", "dstore", ",", "sitecol", ",", "gmfs", ",", "imts", ",", "events", "=", "(", ")", ")", ":", "if", "len", "(", "events", ")", "==", "0", ":", "E", "=", "gmfs", ".", "shape", "[", "1", "]", "events", "=", "numpy", ".", "zeros", "(", "E", ",", "rupture", ".", "events_dt", ")", "events", "[", "'eid'", "]", "=", "numpy", ".", "arange", "(", "E", ",", "dtype", "=", "U64", ")", "dstore", "[", "'events'", "]", "=", "events", "offset", "=", "0", "gmfa", "=", "get_gmv_data", "(", "sitecol", ".", "sids", ",", "gmfs", ",", "events", ")", "dstore", "[", "'gmf_data/data'", "]", "=", "gmfa", "dic", "=", "general", ".", "group_array", "(", "gmfa", ",", "'sid'", ")", "lst", "=", "[", "]", "all_sids", "=", "sitecol", ".", "complete", ".", "sids", "for", "sid", "in", "all_sids", ":", "rows", "=", "dic", ".", "get", "(", "sid", ",", "(", ")", ")", "n", "=", "len", "(", "rows", ")", "lst", ".", "append", "(", "(", "offset", ",", "offset", "+", "n", ")", ")", "offset", "+=", "n", "dstore", "[", "'gmf_data/imts'", "]", "=", "' '", ".", "join", "(", "imts", ")", "dstore", "[", "'gmf_data/indices'", "]", "=", "numpy", ".", "array", "(", "lst", ",", "U32", ")" ]
:param dstore: a :class:`openquake.baselib.datastore.DataStore` instance :param sitecol: a :class:`openquake.hazardlib.site.SiteCollection` instance :param gmfs: an array of shape (N, E, M) :param imts: a list of IMT strings :param events: E event IDs or the empty tuple
[ ":", "param", "dstore", ":", "a", ":", "class", ":", "openquake", ".", "baselib", ".", "datastore", ".", "DataStore", "instance", ":", "param", "sitecol", ":", "a", ":", "class", ":", "openquake", ".", "hazardlib", ".", "site", ".", "SiteCollection", "instance", ":", "param", "gmfs", ":", "an", "array", "of", "shape", "(", "N", "E", "M", ")", ":", "param", "imts", ":", "a", "list", "of", "IMT", "strings", ":", "param", "events", ":", "E", "event", "IDs", "or", "the", "empty", "tuple" ]
train
https://github.com/gem/oq-engine/blob/8294553a0b8aba33fd96437a35065d03547d0040/openquake/calculators/base.py#L931-L956
gem/oq-engine
openquake/calculators/base.py
get_idxs
def get_idxs(data, eid2idx): """ Convert from event IDs to event indices. :param data: an array with a field eid :param eid2idx: a dictionary eid -> idx :returns: the array of event indices """ uniq, inv = numpy.unique(data['eid'], return_inverse=True) idxs = numpy.array([eid2idx[eid] for eid in uniq])[inv] return idxs
python
def get_idxs(data, eid2idx): uniq, inv = numpy.unique(data['eid'], return_inverse=True) idxs = numpy.array([eid2idx[eid] for eid in uniq])[inv] return idxs
[ "def", "get_idxs", "(", "data", ",", "eid2idx", ")", ":", "uniq", ",", "inv", "=", "numpy", ".", "unique", "(", "data", "[", "'eid'", "]", ",", "return_inverse", "=", "True", ")", "idxs", "=", "numpy", ".", "array", "(", "[", "eid2idx", "[", "eid", "]", "for", "eid", "in", "uniq", "]", ")", "[", "inv", "]", "return", "idxs" ]
Convert from event IDs to event indices. :param data: an array with a field eid :param eid2idx: a dictionary eid -> idx :returns: the array of event indices
[ "Convert", "from", "event", "IDs", "to", "event", "indices", "." ]
train
https://github.com/gem/oq-engine/blob/8294553a0b8aba33fd96437a35065d03547d0040/openquake/calculators/base.py#L959-L969
gem/oq-engine
openquake/calculators/base.py
import_gmfs
def import_gmfs(dstore, fname, sids): """ Import in the datastore a ground motion field CSV file. :param dstore: the datastore :param fname: the CSV file :param sids: the site IDs (complete) :returns: event_ids, num_rlzs """ array = writers.read_composite_array(fname).array # has header rlzi, sid, eid, gmv_PGA, ... imts = [name[4:] for name in array.dtype.names[3:]] n_imts = len(imts) gmf_data_dt = numpy.dtype( [('rlzi', U16), ('sid', U32), ('eid', U64), ('gmv', (F32, (n_imts,)))]) # store the events eids = numpy.unique(array['eid']) eids.sort() E = len(eids) eid2idx = dict(zip(eids, range(E))) events = numpy.zeros(E, rupture.events_dt) events['eid'] = eids dstore['events'] = events # store the GMFs dic = general.group_array(array.view(gmf_data_dt), 'sid') lst = [] offset = 0 for sid in sids: n = len(dic.get(sid, [])) lst.append((offset, offset + n)) if n: offset += n gmvs = dic[sid] gmvs['eid'] = get_idxs(gmvs, eid2idx) gmvs['rlzi'] = 0 # effectively there is only 1 realization dstore.extend('gmf_data/data', gmvs) dstore['gmf_data/indices'] = numpy.array(lst, U32) dstore['gmf_data/imts'] = ' '.join(imts) sig_eps_dt = [('eid', U64), ('sig', (F32, n_imts)), ('eps', (F32, n_imts))] dstore['gmf_data/sigma_epsilon'] = numpy.zeros(0, sig_eps_dt) dstore['weights'] = numpy.ones((1, n_imts)) return eids
python
def import_gmfs(dstore, fname, sids): array = writers.read_composite_array(fname).array imts = [name[4:] for name in array.dtype.names[3:]] n_imts = len(imts) gmf_data_dt = numpy.dtype( [('rlzi', U16), ('sid', U32), ('eid', U64), ('gmv', (F32, (n_imts,)))]) eids = numpy.unique(array['eid']) eids.sort() E = len(eids) eid2idx = dict(zip(eids, range(E))) events = numpy.zeros(E, rupture.events_dt) events['eid'] = eids dstore['events'] = events dic = general.group_array(array.view(gmf_data_dt), 'sid') lst = [] offset = 0 for sid in sids: n = len(dic.get(sid, [])) lst.append((offset, offset + n)) if n: offset += n gmvs = dic[sid] gmvs['eid'] = get_idxs(gmvs, eid2idx) gmvs['rlzi'] = 0 dstore.extend('gmf_data/data', gmvs) dstore['gmf_data/indices'] = numpy.array(lst, U32) dstore['gmf_data/imts'] = ' '.join(imts) sig_eps_dt = [('eid', U64), ('sig', (F32, n_imts)), ('eps', (F32, n_imts))] dstore['gmf_data/sigma_epsilon'] = numpy.zeros(0, sig_eps_dt) dstore['weights'] = numpy.ones((1, n_imts)) return eids
[ "def", "import_gmfs", "(", "dstore", ",", "fname", ",", "sids", ")", ":", "array", "=", "writers", ".", "read_composite_array", "(", "fname", ")", ".", "array", "# has header rlzi, sid, eid, gmv_PGA, ...", "imts", "=", "[", "name", "[", "4", ":", "]", "for", "name", "in", "array", ".", "dtype", ".", "names", "[", "3", ":", "]", "]", "n_imts", "=", "len", "(", "imts", ")", "gmf_data_dt", "=", "numpy", ".", "dtype", "(", "[", "(", "'rlzi'", ",", "U16", ")", ",", "(", "'sid'", ",", "U32", ")", ",", "(", "'eid'", ",", "U64", ")", ",", "(", "'gmv'", ",", "(", "F32", ",", "(", "n_imts", ",", ")", ")", ")", "]", ")", "# store the events", "eids", "=", "numpy", ".", "unique", "(", "array", "[", "'eid'", "]", ")", "eids", ".", "sort", "(", ")", "E", "=", "len", "(", "eids", ")", "eid2idx", "=", "dict", "(", "zip", "(", "eids", ",", "range", "(", "E", ")", ")", ")", "events", "=", "numpy", ".", "zeros", "(", "E", ",", "rupture", ".", "events_dt", ")", "events", "[", "'eid'", "]", "=", "eids", "dstore", "[", "'events'", "]", "=", "events", "# store the GMFs", "dic", "=", "general", ".", "group_array", "(", "array", ".", "view", "(", "gmf_data_dt", ")", ",", "'sid'", ")", "lst", "=", "[", "]", "offset", "=", "0", "for", "sid", "in", "sids", ":", "n", "=", "len", "(", "dic", ".", "get", "(", "sid", ",", "[", "]", ")", ")", "lst", ".", "append", "(", "(", "offset", ",", "offset", "+", "n", ")", ")", "if", "n", ":", "offset", "+=", "n", "gmvs", "=", "dic", "[", "sid", "]", "gmvs", "[", "'eid'", "]", "=", "get_idxs", "(", "gmvs", ",", "eid2idx", ")", "gmvs", "[", "'rlzi'", "]", "=", "0", "# effectively there is only 1 realization", "dstore", ".", "extend", "(", "'gmf_data/data'", ",", "gmvs", ")", "dstore", "[", "'gmf_data/indices'", "]", "=", "numpy", ".", "array", "(", "lst", ",", "U32", ")", "dstore", "[", "'gmf_data/imts'", "]", "=", "' '", ".", "join", "(", "imts", ")", "sig_eps_dt", "=", "[", "(", "'eid'", ",", "U64", ")", ",", "(", "'sig'", ",", "(", "F32", ",", "n_imts", ")", ")", ",", "(", "'eps'", ",", "(", "F32", ",", "n_imts", ")", ")", "]", "dstore", "[", "'gmf_data/sigma_epsilon'", "]", "=", "numpy", ".", "zeros", "(", "0", ",", "sig_eps_dt", ")", "dstore", "[", "'weights'", "]", "=", "numpy", ".", "ones", "(", "(", "1", ",", "n_imts", ")", ")", "return", "eids" ]
Import in the datastore a ground motion field CSV file. :param dstore: the datastore :param fname: the CSV file :param sids: the site IDs (complete) :returns: event_ids, num_rlzs
[ "Import", "in", "the", "datastore", "a", "ground", "motion", "field", "CSV", "file", "." ]
train
https://github.com/gem/oq-engine/blob/8294553a0b8aba33fd96437a35065d03547d0040/openquake/calculators/base.py#L972-L1013
gem/oq-engine
openquake/calculators/base.py
BaseCalculator.monitor
def monitor(self, operation='', **kw): """ :returns: a new Monitor instance """ mon = self._monitor(operation, hdf5=self.datastore.hdf5) self._monitor.calc_id = mon.calc_id = self.datastore.calc_id vars(mon).update(kw) return mon
python
def monitor(self, operation='', **kw): mon = self._monitor(operation, hdf5=self.datastore.hdf5) self._monitor.calc_id = mon.calc_id = self.datastore.calc_id vars(mon).update(kw) return mon
[ "def", "monitor", "(", "self", ",", "operation", "=", "''", ",", "*", "*", "kw", ")", ":", "mon", "=", "self", ".", "_monitor", "(", "operation", ",", "hdf5", "=", "self", ".", "datastore", ".", "hdf5", ")", "self", ".", "_monitor", ".", "calc_id", "=", "mon", ".", "calc_id", "=", "self", ".", "datastore", ".", "calc_id", "vars", "(", "mon", ")", ".", "update", "(", "kw", ")", "return", "mon" ]
:returns: a new Monitor instance
[ ":", "returns", ":", "a", "new", "Monitor", "instance" ]
train
https://github.com/gem/oq-engine/blob/8294553a0b8aba33fd96437a35065d03547d0040/openquake/calculators/base.py#L133-L140
gem/oq-engine
openquake/calculators/base.py
BaseCalculator.save_params
def save_params(self, **kw): """ Update the current calculation parameters and save engine_version """ if ('hazard_calculation_id' in kw and kw['hazard_calculation_id'] is None): del kw['hazard_calculation_id'] vars(self.oqparam).update(**kw) self.datastore['oqparam'] = self.oqparam # save the updated oqparam attrs = self.datastore['/'].attrs attrs['engine_version'] = engine_version attrs['date'] = datetime.now().isoformat()[:19] if 'checksum32' not in attrs: attrs['checksum32'] = readinput.get_checksum32(self.oqparam) self.datastore.flush()
python
def save_params(self, **kw): if ('hazard_calculation_id' in kw and kw['hazard_calculation_id'] is None): del kw['hazard_calculation_id'] vars(self.oqparam).update(**kw) self.datastore['oqparam'] = self.oqparam attrs = self.datastore['/'].attrs attrs['engine_version'] = engine_version attrs['date'] = datetime.now().isoformat()[:19] if 'checksum32' not in attrs: attrs['checksum32'] = readinput.get_checksum32(self.oqparam) self.datastore.flush()
[ "def", "save_params", "(", "self", ",", "*", "*", "kw", ")", ":", "if", "(", "'hazard_calculation_id'", "in", "kw", "and", "kw", "[", "'hazard_calculation_id'", "]", "is", "None", ")", ":", "del", "kw", "[", "'hazard_calculation_id'", "]", "vars", "(", "self", ".", "oqparam", ")", ".", "update", "(", "*", "*", "kw", ")", "self", ".", "datastore", "[", "'oqparam'", "]", "=", "self", ".", "oqparam", "# save the updated oqparam", "attrs", "=", "self", ".", "datastore", "[", "'/'", "]", ".", "attrs", "attrs", "[", "'engine_version'", "]", "=", "engine_version", "attrs", "[", "'date'", "]", "=", "datetime", ".", "now", "(", ")", ".", "isoformat", "(", ")", "[", ":", "19", "]", "if", "'checksum32'", "not", "in", "attrs", ":", "attrs", "[", "'checksum32'", "]", "=", "readinput", ".", "get_checksum32", "(", "self", ".", "oqparam", ")", "self", ".", "datastore", ".", "flush", "(", ")" ]
Update the current calculation parameters and save engine_version
[ "Update", "the", "current", "calculation", "parameters", "and", "save", "engine_version" ]
train
https://github.com/gem/oq-engine/blob/8294553a0b8aba33fd96437a35065d03547d0040/openquake/calculators/base.py#L142-L156
gem/oq-engine
openquake/calculators/base.py
BaseCalculator.check_precalc
def check_precalc(self, precalc_mode): """ Defensive programming against users providing an incorrect pre-calculation ID (with ``--hazard-calculation-id``). :param precalc_mode: calculation_mode of the previous calculation """ calc_mode = self.oqparam.calculation_mode ok_mode = self.accept_precalc if calc_mode != precalc_mode and precalc_mode not in ok_mode: raise InvalidCalculationID( 'In order to run a calculation of kind %r, ' 'you need to provide a calculation of kind %r, ' 'but you provided a %r instead' % (calc_mode, ok_mode, precalc_mode))
python
def check_precalc(self, precalc_mode): calc_mode = self.oqparam.calculation_mode ok_mode = self.accept_precalc if calc_mode != precalc_mode and precalc_mode not in ok_mode: raise InvalidCalculationID( 'In order to run a calculation of kind %r, ' 'you need to provide a calculation of kind %r, ' 'but you provided a %r instead' % (calc_mode, ok_mode, precalc_mode))
[ "def", "check_precalc", "(", "self", ",", "precalc_mode", ")", ":", "calc_mode", "=", "self", ".", "oqparam", ".", "calculation_mode", "ok_mode", "=", "self", ".", "accept_precalc", "if", "calc_mode", "!=", "precalc_mode", "and", "precalc_mode", "not", "in", "ok_mode", ":", "raise", "InvalidCalculationID", "(", "'In order to run a calculation of kind %r, '", "'you need to provide a calculation of kind %r, '", "'but you provided a %r instead'", "%", "(", "calc_mode", ",", "ok_mode", ",", "precalc_mode", ")", ")" ]
Defensive programming against users providing an incorrect pre-calculation ID (with ``--hazard-calculation-id``). :param precalc_mode: calculation_mode of the previous calculation
[ "Defensive", "programming", "against", "users", "providing", "an", "incorrect", "pre", "-", "calculation", "ID", "(", "with", "--", "hazard", "-", "calculation", "-", "id", ")", "." ]
train
https://github.com/gem/oq-engine/blob/8294553a0b8aba33fd96437a35065d03547d0040/openquake/calculators/base.py#L158-L173
gem/oq-engine
openquake/calculators/base.py
BaseCalculator.run
def run(self, pre_execute=True, concurrent_tasks=None, close=True, **kw): """ Run the calculation and return the exported outputs. """ with self._monitor: self._monitor.username = kw.get('username', '') self._monitor.hdf5 = self.datastore.hdf5 if concurrent_tasks is None: # use the job.ini parameter ct = self.oqparam.concurrent_tasks else: # used the parameter passed in the command-line ct = concurrent_tasks if ct == 0: # disable distribution temporarily oq_distribute = os.environ.get('OQ_DISTRIBUTE') os.environ['OQ_DISTRIBUTE'] = 'no' if ct != self.oqparam.concurrent_tasks: # save the used concurrent_tasks self.oqparam.concurrent_tasks = ct self.save_params(**kw) try: if pre_execute: self.pre_execute() self.result = self.execute() if self.result is not None: self.post_execute(self.result) self.before_export() self.export(kw.get('exports', '')) except Exception: if kw.get('pdb'): # post-mortem debug tb = sys.exc_info()[2] traceback.print_tb(tb) pdb.post_mortem(tb) else: logging.critical('', exc_info=True) raise finally: # cleanup globals if ct == 0: # restore OQ_DISTRIBUTE if oq_distribute is None: # was not set del os.environ['OQ_DISTRIBUTE'] else: os.environ['OQ_DISTRIBUTE'] = oq_distribute readinput.pmap = None readinput.exposure = None readinput.gmfs = None readinput.eids = None self._monitor.flush() if close: # in the engine we close later self.result = None try: self.datastore.close() except (RuntimeError, ValueError): # sometimes produces errors but they are difficult to # reproduce logging.warning('', exc_info=True) return getattr(self, 'exported', {})
python
def run(self, pre_execute=True, concurrent_tasks=None, close=True, **kw): with self._monitor: self._monitor.username = kw.get('username', '') self._monitor.hdf5 = self.datastore.hdf5 if concurrent_tasks is None: ct = self.oqparam.concurrent_tasks else: ct = concurrent_tasks if ct == 0: oq_distribute = os.environ.get('OQ_DISTRIBUTE') os.environ['OQ_DISTRIBUTE'] = 'no' if ct != self.oqparam.concurrent_tasks: self.oqparam.concurrent_tasks = ct self.save_params(**kw) try: if pre_execute: self.pre_execute() self.result = self.execute() if self.result is not None: self.post_execute(self.result) self.before_export() self.export(kw.get('exports', '')) except Exception: if kw.get('pdb'): tb = sys.exc_info()[2] traceback.print_tb(tb) pdb.post_mortem(tb) else: logging.critical('', exc_info=True) raise finally: if ct == 0: if oq_distribute is None: del os.environ['OQ_DISTRIBUTE'] else: os.environ['OQ_DISTRIBUTE'] = oq_distribute readinput.pmap = None readinput.exposure = None readinput.gmfs = None readinput.eids = None self._monitor.flush() if close: self.result = None try: self.datastore.close() except (RuntimeError, ValueError): logging.warning('', exc_info=True) return getattr(self, 'exported', {})
[ "def", "run", "(", "self", ",", "pre_execute", "=", "True", ",", "concurrent_tasks", "=", "None", ",", "close", "=", "True", ",", "*", "*", "kw", ")", ":", "with", "self", ".", "_monitor", ":", "self", ".", "_monitor", ".", "username", "=", "kw", ".", "get", "(", "'username'", ",", "''", ")", "self", ".", "_monitor", ".", "hdf5", "=", "self", ".", "datastore", ".", "hdf5", "if", "concurrent_tasks", "is", "None", ":", "# use the job.ini parameter", "ct", "=", "self", ".", "oqparam", ".", "concurrent_tasks", "else", ":", "# used the parameter passed in the command-line", "ct", "=", "concurrent_tasks", "if", "ct", "==", "0", ":", "# disable distribution temporarily", "oq_distribute", "=", "os", ".", "environ", ".", "get", "(", "'OQ_DISTRIBUTE'", ")", "os", ".", "environ", "[", "'OQ_DISTRIBUTE'", "]", "=", "'no'", "if", "ct", "!=", "self", ".", "oqparam", ".", "concurrent_tasks", ":", "# save the used concurrent_tasks", "self", ".", "oqparam", ".", "concurrent_tasks", "=", "ct", "self", ".", "save_params", "(", "*", "*", "kw", ")", "try", ":", "if", "pre_execute", ":", "self", ".", "pre_execute", "(", ")", "self", ".", "result", "=", "self", ".", "execute", "(", ")", "if", "self", ".", "result", "is", "not", "None", ":", "self", ".", "post_execute", "(", "self", ".", "result", ")", "self", ".", "before_export", "(", ")", "self", ".", "export", "(", "kw", ".", "get", "(", "'exports'", ",", "''", ")", ")", "except", "Exception", ":", "if", "kw", ".", "get", "(", "'pdb'", ")", ":", "# post-mortem debug", "tb", "=", "sys", ".", "exc_info", "(", ")", "[", "2", "]", "traceback", ".", "print_tb", "(", "tb", ")", "pdb", ".", "post_mortem", "(", "tb", ")", "else", ":", "logging", ".", "critical", "(", "''", ",", "exc_info", "=", "True", ")", "raise", "finally", ":", "# cleanup globals", "if", "ct", "==", "0", ":", "# restore OQ_DISTRIBUTE", "if", "oq_distribute", "is", "None", ":", "# was not set", "del", "os", ".", "environ", "[", "'OQ_DISTRIBUTE'", "]", "else", ":", "os", ".", "environ", "[", "'OQ_DISTRIBUTE'", "]", "=", "oq_distribute", "readinput", ".", "pmap", "=", "None", "readinput", ".", "exposure", "=", "None", "readinput", ".", "gmfs", "=", "None", "readinput", ".", "eids", "=", "None", "self", ".", "_monitor", ".", "flush", "(", ")", "if", "close", ":", "# in the engine we close later", "self", ".", "result", "=", "None", "try", ":", "self", ".", "datastore", ".", "close", "(", ")", "except", "(", "RuntimeError", ",", "ValueError", ")", ":", "# sometimes produces errors but they are difficult to", "# reproduce", "logging", ".", "warning", "(", "''", ",", "exc_info", "=", "True", ")", "return", "getattr", "(", "self", ",", "'exported'", ",", "{", "}", ")" ]
Run the calculation and return the exported outputs.
[ "Run", "the", "calculation", "and", "return", "the", "exported", "outputs", "." ]
train
https://github.com/gem/oq-engine/blob/8294553a0b8aba33fd96437a35065d03547d0040/openquake/calculators/base.py#L175-L231
gem/oq-engine
openquake/calculators/base.py
BaseCalculator.export
def export(self, exports=None): """ Export all the outputs in the datastore in the given export formats. Individual outputs are not exported if there are multiple realizations. """ self.exported = getattr(self.precalc, 'exported', {}) if isinstance(exports, tuple): fmts = exports elif exports: # is a string fmts = exports.split(',') elif isinstance(self.oqparam.exports, tuple): fmts = self.oqparam.exports else: # is a string fmts = self.oqparam.exports.split(',') keys = set(self.datastore) has_hcurves = ('hcurves-stats' in self.datastore or 'hcurves-rlzs' in self.datastore) if has_hcurves: keys.add('hcurves') for fmt in fmts: if not fmt: continue for key in sorted(keys): # top level keys if 'rlzs' in key and self.R > 1: continue # skip individual curves self._export((key, fmt)) if has_hcurves and self.oqparam.hazard_maps: self._export(('hmaps', fmt)) if has_hcurves and self.oqparam.uniform_hazard_spectra: self._export(('uhs', fmt))
python
def export(self, exports=None): self.exported = getattr(self.precalc, 'exported', {}) if isinstance(exports, tuple): fmts = exports elif exports: fmts = exports.split(',') elif isinstance(self.oqparam.exports, tuple): fmts = self.oqparam.exports else: fmts = self.oqparam.exports.split(',') keys = set(self.datastore) has_hcurves = ('hcurves-stats' in self.datastore or 'hcurves-rlzs' in self.datastore) if has_hcurves: keys.add('hcurves') for fmt in fmts: if not fmt: continue for key in sorted(keys): if 'rlzs' in key and self.R > 1: continue self._export((key, fmt)) if has_hcurves and self.oqparam.hazard_maps: self._export(('hmaps', fmt)) if has_hcurves and self.oqparam.uniform_hazard_spectra: self._export(('uhs', fmt))
[ "def", "export", "(", "self", ",", "exports", "=", "None", ")", ":", "self", ".", "exported", "=", "getattr", "(", "self", ".", "precalc", ",", "'exported'", ",", "{", "}", ")", "if", "isinstance", "(", "exports", ",", "tuple", ")", ":", "fmts", "=", "exports", "elif", "exports", ":", "# is a string", "fmts", "=", "exports", ".", "split", "(", "','", ")", "elif", "isinstance", "(", "self", ".", "oqparam", ".", "exports", ",", "tuple", ")", ":", "fmts", "=", "self", ".", "oqparam", ".", "exports", "else", ":", "# is a string", "fmts", "=", "self", ".", "oqparam", ".", "exports", ".", "split", "(", "','", ")", "keys", "=", "set", "(", "self", ".", "datastore", ")", "has_hcurves", "=", "(", "'hcurves-stats'", "in", "self", ".", "datastore", "or", "'hcurves-rlzs'", "in", "self", ".", "datastore", ")", "if", "has_hcurves", ":", "keys", ".", "add", "(", "'hcurves'", ")", "for", "fmt", "in", "fmts", ":", "if", "not", "fmt", ":", "continue", "for", "key", "in", "sorted", "(", "keys", ")", ":", "# top level keys", "if", "'rlzs'", "in", "key", "and", "self", ".", "R", ">", "1", ":", "continue", "# skip individual curves", "self", ".", "_export", "(", "(", "key", ",", "fmt", ")", ")", "if", "has_hcurves", "and", "self", ".", "oqparam", ".", "hazard_maps", ":", "self", ".", "_export", "(", "(", "'hmaps'", ",", "fmt", ")", ")", "if", "has_hcurves", "and", "self", ".", "oqparam", ".", "uniform_hazard_spectra", ":", "self", ".", "_export", "(", "(", "'uhs'", ",", "fmt", ")", ")" ]
Export all the outputs in the datastore in the given export formats. Individual outputs are not exported if there are multiple realizations.
[ "Export", "all", "the", "outputs", "in", "the", "datastore", "in", "the", "given", "export", "formats", ".", "Individual", "outputs", "are", "not", "exported", "if", "there", "are", "multiple", "realizations", "." ]
train
https://github.com/gem/oq-engine/blob/8294553a0b8aba33fd96437a35065d03547d0040/openquake/calculators/base.py#L260-L289
gem/oq-engine
openquake/calculators/base.py
BaseCalculator.before_export
def before_export(self): """ Set the attributes nbytes """ # sanity check that eff_ruptures have been set, i.e. are not -1 try: csm_info = self.datastore['csm_info'] except KeyError: csm_info = self.datastore['csm_info'] = self.csm.info for sm in csm_info.source_models: for sg in sm.src_groups: assert sg.eff_ruptures != -1, sg for key in self.datastore: self.datastore.set_nbytes(key) self.datastore.flush()
python
def before_export(self): try: csm_info = self.datastore['csm_info'] except KeyError: csm_info = self.datastore['csm_info'] = self.csm.info for sm in csm_info.source_models: for sg in sm.src_groups: assert sg.eff_ruptures != -1, sg for key in self.datastore: self.datastore.set_nbytes(key) self.datastore.flush()
[ "def", "before_export", "(", "self", ")", ":", "# sanity check that eff_ruptures have been set, i.e. are not -1", "try", ":", "csm_info", "=", "self", ".", "datastore", "[", "'csm_info'", "]", "except", "KeyError", ":", "csm_info", "=", "self", ".", "datastore", "[", "'csm_info'", "]", "=", "self", ".", "csm", ".", "info", "for", "sm", "in", "csm_info", ".", "source_models", ":", "for", "sg", "in", "sm", ".", "src_groups", ":", "assert", "sg", ".", "eff_ruptures", "!=", "-", "1", ",", "sg", "for", "key", "in", "self", ".", "datastore", ":", "self", ".", "datastore", ".", "set_nbytes", "(", "key", ")", "self", ".", "datastore", ".", "flush", "(", ")" ]
Set the attributes nbytes
[ "Set", "the", "attributes", "nbytes" ]
train
https://github.com/gem/oq-engine/blob/8294553a0b8aba33fd96437a35065d03547d0040/openquake/calculators/base.py#L303-L318
gem/oq-engine
openquake/calculators/base.py
HazardCalculator.block_splitter
def block_splitter(self, sources, weight=get_weight, key=lambda src: 1): """ :param sources: a list of sources :param weight: a weight function (default .weight) :param key: None or 'src_group_id' :returns: an iterator over blocks of sources """ ct = self.oqparam.concurrent_tasks or 1 maxweight = self.csm.get_maxweight(weight, ct, source.MINWEIGHT) if not hasattr(self, 'logged'): if maxweight == source.MINWEIGHT: logging.info('Using minweight=%d', source.MINWEIGHT) else: logging.info('Using maxweight=%d', maxweight) self.logged = True return general.block_splitter(sources, maxweight, weight, key)
python
def block_splitter(self, sources, weight=get_weight, key=lambda src: 1): ct = self.oqparam.concurrent_tasks or 1 maxweight = self.csm.get_maxweight(weight, ct, source.MINWEIGHT) if not hasattr(self, 'logged'): if maxweight == source.MINWEIGHT: logging.info('Using minweight=%d', source.MINWEIGHT) else: logging.info('Using maxweight=%d', maxweight) self.logged = True return general.block_splitter(sources, maxweight, weight, key)
[ "def", "block_splitter", "(", "self", ",", "sources", ",", "weight", "=", "get_weight", ",", "key", "=", "lambda", "src", ":", "1", ")", ":", "ct", "=", "self", ".", "oqparam", ".", "concurrent_tasks", "or", "1", "maxweight", "=", "self", ".", "csm", ".", "get_maxweight", "(", "weight", ",", "ct", ",", "source", ".", "MINWEIGHT", ")", "if", "not", "hasattr", "(", "self", ",", "'logged'", ")", ":", "if", "maxweight", "==", "source", ".", "MINWEIGHT", ":", "logging", ".", "info", "(", "'Using minweight=%d'", ",", "source", ".", "MINWEIGHT", ")", "else", ":", "logging", ".", "info", "(", "'Using maxweight=%d'", ",", "maxweight", ")", "self", ".", "logged", "=", "True", "return", "general", ".", "block_splitter", "(", "sources", ",", "maxweight", ",", "weight", ",", "key", ")" ]
:param sources: a list of sources :param weight: a weight function (default .weight) :param key: None or 'src_group_id' :returns: an iterator over blocks of sources
[ ":", "param", "sources", ":", "a", "list", "of", "sources", ":", "param", "weight", ":", "a", "weight", "function", "(", "default", ".", "weight", ")", ":", "param", "key", ":", "None", "or", "src_group_id", ":", "returns", ":", "an", "iterator", "over", "blocks", "of", "sources" ]
train
https://github.com/gem/oq-engine/blob/8294553a0b8aba33fd96437a35065d03547d0040/openquake/calculators/base.py#L338-L353
gem/oq-engine
openquake/calculators/base.py
HazardCalculator.src_filter
def src_filter(self): """ :returns: a SourceFilter/UcerfFilter """ oq = self.oqparam self.hdf5cache = self.datastore.hdf5cache() sitecol = self.sitecol.complete if self.sitecol else None if 'ucerf' in oq.calculation_mode: return UcerfFilter(sitecol, oq.maximum_distance, self.hdf5cache) return SourceFilter(sitecol, oq.maximum_distance, self.hdf5cache)
python
def src_filter(self): oq = self.oqparam self.hdf5cache = self.datastore.hdf5cache() sitecol = self.sitecol.complete if self.sitecol else None if 'ucerf' in oq.calculation_mode: return UcerfFilter(sitecol, oq.maximum_distance, self.hdf5cache) return SourceFilter(sitecol, oq.maximum_distance, self.hdf5cache)
[ "def", "src_filter", "(", "self", ")", ":", "oq", "=", "self", ".", "oqparam", "self", ".", "hdf5cache", "=", "self", ".", "datastore", ".", "hdf5cache", "(", ")", "sitecol", "=", "self", ".", "sitecol", ".", "complete", "if", "self", ".", "sitecol", "else", "None", "if", "'ucerf'", "in", "oq", ".", "calculation_mode", ":", "return", "UcerfFilter", "(", "sitecol", ",", "oq", ".", "maximum_distance", ",", "self", ".", "hdf5cache", ")", "return", "SourceFilter", "(", "sitecol", ",", "oq", ".", "maximum_distance", ",", "self", ".", "hdf5cache", ")" ]
:returns: a SourceFilter/UcerfFilter
[ ":", "returns", ":", "a", "SourceFilter", "/", "UcerfFilter" ]
train
https://github.com/gem/oq-engine/blob/8294553a0b8aba33fd96437a35065d03547d0040/openquake/calculators/base.py#L356-L365
gem/oq-engine
openquake/calculators/base.py
HazardCalculator.rtree_filter
def rtree_filter(self): """ :returns: an RtreeFilter """ return RtreeFilter(self.src_filter.sitecol, self.oqparam.maximum_distance, self.src_filter.filename)
python
def rtree_filter(self): return RtreeFilter(self.src_filter.sitecol, self.oqparam.maximum_distance, self.src_filter.filename)
[ "def", "rtree_filter", "(", "self", ")", ":", "return", "RtreeFilter", "(", "self", ".", "src_filter", ".", "sitecol", ",", "self", ".", "oqparam", ".", "maximum_distance", ",", "self", ".", "src_filter", ".", "filename", ")" ]
:returns: an RtreeFilter
[ ":", "returns", ":", "an", "RtreeFilter" ]
train
https://github.com/gem/oq-engine/blob/8294553a0b8aba33fd96437a35065d03547d0040/openquake/calculators/base.py#L368-L374
gem/oq-engine
openquake/calculators/base.py
HazardCalculator.N
def N(self): """ :returns: the total number of sites """ if hasattr(self, 'sitecol'): return len(self.sitecol.complete) if self.sitecol else None return len(self.datastore['sitecol/array'])
python
def N(self): if hasattr(self, 'sitecol'): return len(self.sitecol.complete) if self.sitecol else None return len(self.datastore['sitecol/array'])
[ "def", "N", "(", "self", ")", ":", "if", "hasattr", "(", "self", ",", "'sitecol'", ")", ":", "return", "len", "(", "self", ".", "sitecol", ".", "complete", ")", "if", "self", ".", "sitecol", "else", "None", "return", "len", "(", "self", ".", "datastore", "[", "'sitecol/array'", "]", ")" ]
:returns: the total number of sites
[ ":", "returns", ":", "the", "total", "number", "of", "sites" ]
train
https://github.com/gem/oq-engine/blob/8294553a0b8aba33fd96437a35065d03547d0040/openquake/calculators/base.py#L387-L393
gem/oq-engine
openquake/calculators/base.py
HazardCalculator.read_inputs
def read_inputs(self): """ Read risk data and sources if any """ oq = self.oqparam self._read_risk_data() self.check_overflow() # check if self.sitecol is too large if ('source_model_logic_tree' in oq.inputs and oq.hazard_calculation_id is None): self.csm = readinput.get_composite_source_model( oq, self.monitor(), srcfilter=self.src_filter) self.init()
python
def read_inputs(self): oq = self.oqparam self._read_risk_data() self.check_overflow() if ('source_model_logic_tree' in oq.inputs and oq.hazard_calculation_id is None): self.csm = readinput.get_composite_source_model( oq, self.monitor(), srcfilter=self.src_filter) self.init()
[ "def", "read_inputs", "(", "self", ")", ":", "oq", "=", "self", ".", "oqparam", "self", ".", "_read_risk_data", "(", ")", "self", ".", "check_overflow", "(", ")", "# check if self.sitecol is too large", "if", "(", "'source_model_logic_tree'", "in", "oq", ".", "inputs", "and", "oq", ".", "hazard_calculation_id", "is", "None", ")", ":", "self", ".", "csm", "=", "readinput", ".", "get_composite_source_model", "(", "oq", ",", "self", ".", "monitor", "(", ")", ",", "srcfilter", "=", "self", ".", "src_filter", ")", "self", ".", "init", "(", ")" ]
Read risk data and sources if any
[ "Read", "risk", "data", "and", "sources", "if", "any" ]
train
https://github.com/gem/oq-engine/blob/8294553a0b8aba33fd96437a35065d03547d0040/openquake/calculators/base.py#L406-L417
gem/oq-engine
openquake/calculators/base.py
HazardCalculator.pre_execute
def pre_execute(self): """ Check if there is a previous calculation ID. If yes, read the inputs by retrieving the previous calculation; if not, read the inputs directly. """ oq = self.oqparam if 'gmfs' in oq.inputs or 'multi_peril' in oq.inputs: # read hazard from files assert not oq.hazard_calculation_id, ( 'You cannot use --hc together with gmfs_file') self.read_inputs() if 'gmfs' in oq.inputs: save_gmfs(self) else: self.save_multi_peril() elif 'hazard_curves' in oq.inputs: # read hazard from file assert not oq.hazard_calculation_id, ( 'You cannot use --hc together with hazard_curves') haz_sitecol = readinput.get_site_collection(oq) # NB: horrible: get_site_collection calls get_pmap_from_nrml # that sets oq.investigation_time, so it must be called first self.load_riskmodel() # must be after get_site_collection self.read_exposure(haz_sitecol) # define .assets_by_site self.datastore['poes/grp-00'] = fix_ones(readinput.pmap) self.datastore['sitecol'] = self.sitecol self.datastore['assetcol'] = self.assetcol self.datastore['csm_info'] = fake = source.CompositionInfo.fake() self.rlzs_assoc = fake.get_rlzs_assoc() elif oq.hazard_calculation_id: parent = util.read(oq.hazard_calculation_id) self.check_precalc(parent['oqparam'].calculation_mode) self.datastore.parent = parent # copy missing parameters from the parent params = {name: value for name, value in vars(parent['oqparam']).items() if name not in vars(self.oqparam)} self.save_params(**params) self.read_inputs() oqp = parent['oqparam'] if oqp.investigation_time != oq.investigation_time: raise ValueError( 'The parent calculation was using investigation_time=%s' ' != %s' % (oqp.investigation_time, oq.investigation_time)) if oqp.minimum_intensity != oq.minimum_intensity: raise ValueError( 'The parent calculation was using minimum_intensity=%s' ' != %s' % (oqp.minimum_intensity, oq.minimum_intensity)) missing_imts = set(oq.risk_imtls) - set(oqp.imtls) if missing_imts: raise ValueError( 'The parent calculation is missing the IMT(s) %s' % ', '.join(missing_imts)) elif self.__class__.precalc: calc = calculators[self.__class__.precalc]( self.oqparam, self.datastore.calc_id) calc.run() self.param = calc.param self.sitecol = calc.sitecol self.assetcol = calc.assetcol self.riskmodel = calc.riskmodel if hasattr(calc, 'rlzs_assoc'): self.rlzs_assoc = calc.rlzs_assoc else: # this happens for instance for a scenario_damage without # rupture, gmfs, multi_peril raise InvalidFile( '%(job_ini)s: missing gmfs_csv, multi_peril_csv' % oq.inputs) if hasattr(calc, 'csm'): # no scenario self.csm = calc.csm else: self.read_inputs() if self.riskmodel: self.save_riskmodel()
python
def pre_execute(self): oq = self.oqparam if 'gmfs' in oq.inputs or 'multi_peril' in oq.inputs: assert not oq.hazard_calculation_id, ( 'You cannot use --hc together with gmfs_file') self.read_inputs() if 'gmfs' in oq.inputs: save_gmfs(self) else: self.save_multi_peril() elif 'hazard_curves' in oq.inputs: assert not oq.hazard_calculation_id, ( 'You cannot use --hc together with hazard_curves') haz_sitecol = readinput.get_site_collection(oq) self.load_riskmodel() self.read_exposure(haz_sitecol) self.datastore['poes/grp-00'] = fix_ones(readinput.pmap) self.datastore['sitecol'] = self.sitecol self.datastore['assetcol'] = self.assetcol self.datastore['csm_info'] = fake = source.CompositionInfo.fake() self.rlzs_assoc = fake.get_rlzs_assoc() elif oq.hazard_calculation_id: parent = util.read(oq.hazard_calculation_id) self.check_precalc(parent['oqparam'].calculation_mode) self.datastore.parent = parent params = {name: value for name, value in vars(parent['oqparam']).items() if name not in vars(self.oqparam)} self.save_params(**params) self.read_inputs() oqp = parent['oqparam'] if oqp.investigation_time != oq.investigation_time: raise ValueError( 'The parent calculation was using investigation_time=%s' ' != %s' % (oqp.investigation_time, oq.investigation_time)) if oqp.minimum_intensity != oq.minimum_intensity: raise ValueError( 'The parent calculation was using minimum_intensity=%s' ' != %s' % (oqp.minimum_intensity, oq.minimum_intensity)) missing_imts = set(oq.risk_imtls) - set(oqp.imtls) if missing_imts: raise ValueError( 'The parent calculation is missing the IMT(s) %s' % ', '.join(missing_imts)) elif self.__class__.precalc: calc = calculators[self.__class__.precalc]( self.oqparam, self.datastore.calc_id) calc.run() self.param = calc.param self.sitecol = calc.sitecol self.assetcol = calc.assetcol self.riskmodel = calc.riskmodel if hasattr(calc, 'rlzs_assoc'): self.rlzs_assoc = calc.rlzs_assoc else: raise InvalidFile( '%(job_ini)s: missing gmfs_csv, multi_peril_csv' % oq.inputs) if hasattr(calc, 'csm'): self.csm = calc.csm else: self.read_inputs() if self.riskmodel: self.save_riskmodel()
[ "def", "pre_execute", "(", "self", ")", ":", "oq", "=", "self", ".", "oqparam", "if", "'gmfs'", "in", "oq", ".", "inputs", "or", "'multi_peril'", "in", "oq", ".", "inputs", ":", "# read hazard from files", "assert", "not", "oq", ".", "hazard_calculation_id", ",", "(", "'You cannot use --hc together with gmfs_file'", ")", "self", ".", "read_inputs", "(", ")", "if", "'gmfs'", "in", "oq", ".", "inputs", ":", "save_gmfs", "(", "self", ")", "else", ":", "self", ".", "save_multi_peril", "(", ")", "elif", "'hazard_curves'", "in", "oq", ".", "inputs", ":", "# read hazard from file", "assert", "not", "oq", ".", "hazard_calculation_id", ",", "(", "'You cannot use --hc together with hazard_curves'", ")", "haz_sitecol", "=", "readinput", ".", "get_site_collection", "(", "oq", ")", "# NB: horrible: get_site_collection calls get_pmap_from_nrml", "# that sets oq.investigation_time, so it must be called first", "self", ".", "load_riskmodel", "(", ")", "# must be after get_site_collection", "self", ".", "read_exposure", "(", "haz_sitecol", ")", "# define .assets_by_site", "self", ".", "datastore", "[", "'poes/grp-00'", "]", "=", "fix_ones", "(", "readinput", ".", "pmap", ")", "self", ".", "datastore", "[", "'sitecol'", "]", "=", "self", ".", "sitecol", "self", ".", "datastore", "[", "'assetcol'", "]", "=", "self", ".", "assetcol", "self", ".", "datastore", "[", "'csm_info'", "]", "=", "fake", "=", "source", ".", "CompositionInfo", ".", "fake", "(", ")", "self", ".", "rlzs_assoc", "=", "fake", ".", "get_rlzs_assoc", "(", ")", "elif", "oq", ".", "hazard_calculation_id", ":", "parent", "=", "util", ".", "read", "(", "oq", ".", "hazard_calculation_id", ")", "self", ".", "check_precalc", "(", "parent", "[", "'oqparam'", "]", ".", "calculation_mode", ")", "self", ".", "datastore", ".", "parent", "=", "parent", "# copy missing parameters from the parent", "params", "=", "{", "name", ":", "value", "for", "name", ",", "value", "in", "vars", "(", "parent", "[", "'oqparam'", "]", ")", ".", "items", "(", ")", "if", "name", "not", "in", "vars", "(", "self", ".", "oqparam", ")", "}", "self", ".", "save_params", "(", "*", "*", "params", ")", "self", ".", "read_inputs", "(", ")", "oqp", "=", "parent", "[", "'oqparam'", "]", "if", "oqp", ".", "investigation_time", "!=", "oq", ".", "investigation_time", ":", "raise", "ValueError", "(", "'The parent calculation was using investigation_time=%s'", "' != %s'", "%", "(", "oqp", ".", "investigation_time", ",", "oq", ".", "investigation_time", ")", ")", "if", "oqp", ".", "minimum_intensity", "!=", "oq", ".", "minimum_intensity", ":", "raise", "ValueError", "(", "'The parent calculation was using minimum_intensity=%s'", "' != %s'", "%", "(", "oqp", ".", "minimum_intensity", ",", "oq", ".", "minimum_intensity", ")", ")", "missing_imts", "=", "set", "(", "oq", ".", "risk_imtls", ")", "-", "set", "(", "oqp", ".", "imtls", ")", "if", "missing_imts", ":", "raise", "ValueError", "(", "'The parent calculation is missing the IMT(s) %s'", "%", "', '", ".", "join", "(", "missing_imts", ")", ")", "elif", "self", ".", "__class__", ".", "precalc", ":", "calc", "=", "calculators", "[", "self", ".", "__class__", ".", "precalc", "]", "(", "self", ".", "oqparam", ",", "self", ".", "datastore", ".", "calc_id", ")", "calc", ".", "run", "(", ")", "self", ".", "param", "=", "calc", ".", "param", "self", ".", "sitecol", "=", "calc", ".", "sitecol", "self", ".", "assetcol", "=", "calc", ".", "assetcol", "self", ".", "riskmodel", "=", "calc", ".", "riskmodel", "if", "hasattr", "(", "calc", ",", "'rlzs_assoc'", ")", ":", "self", ".", "rlzs_assoc", "=", "calc", ".", "rlzs_assoc", "else", ":", "# this happens for instance for a scenario_damage without", "# rupture, gmfs, multi_peril", "raise", "InvalidFile", "(", "'%(job_ini)s: missing gmfs_csv, multi_peril_csv'", "%", "oq", ".", "inputs", ")", "if", "hasattr", "(", "calc", ",", "'csm'", ")", ":", "# no scenario", "self", ".", "csm", "=", "calc", ".", "csm", "else", ":", "self", ".", "read_inputs", "(", ")", "if", "self", ".", "riskmodel", ":", "self", ".", "save_riskmodel", "(", ")" ]
Check if there is a previous calculation ID. If yes, read the inputs by retrieving the previous calculation; if not, read the inputs directly.
[ "Check", "if", "there", "is", "a", "previous", "calculation", "ID", ".", "If", "yes", "read", "the", "inputs", "by", "retrieving", "the", "previous", "calculation", ";", "if", "not", "read", "the", "inputs", "directly", "." ]
train
https://github.com/gem/oq-engine/blob/8294553a0b8aba33fd96437a35065d03547d0040/openquake/calculators/base.py#L422-L496
gem/oq-engine
openquake/calculators/base.py
HazardCalculator.init
def init(self): """ To be overridden to initialize the datasets needed by the calculation """ oq = self.oqparam if not oq.risk_imtls: if self.datastore.parent: oq.risk_imtls = ( self.datastore.parent['oqparam'].risk_imtls) if 'precalc' in vars(self): self.rlzs_assoc = self.precalc.rlzs_assoc elif 'csm_info' in self.datastore: csm_info = self.datastore['csm_info'] if oq.hazard_calculation_id and 'gsim_logic_tree' in oq.inputs: # redefine the realizations by reading the weights from the # gsim_logic_tree_file that could be different from the parent csm_info.gsim_lt = logictree.GsimLogicTree( oq.inputs['gsim_logic_tree'], set(csm_info.trts)) self.rlzs_assoc = csm_info.get_rlzs_assoc() elif hasattr(self, 'csm'): self.check_floating_spinning() self.rlzs_assoc = self.csm.info.get_rlzs_assoc() else: # build a fake; used by risk-from-file calculators self.datastore['csm_info'] = fake = source.CompositionInfo.fake() self.rlzs_assoc = fake.get_rlzs_assoc()
python
def init(self): oq = self.oqparam if not oq.risk_imtls: if self.datastore.parent: oq.risk_imtls = ( self.datastore.parent['oqparam'].risk_imtls) if 'precalc' in vars(self): self.rlzs_assoc = self.precalc.rlzs_assoc elif 'csm_info' in self.datastore: csm_info = self.datastore['csm_info'] if oq.hazard_calculation_id and 'gsim_logic_tree' in oq.inputs: csm_info.gsim_lt = logictree.GsimLogicTree( oq.inputs['gsim_logic_tree'], set(csm_info.trts)) self.rlzs_assoc = csm_info.get_rlzs_assoc() elif hasattr(self, 'csm'): self.check_floating_spinning() self.rlzs_assoc = self.csm.info.get_rlzs_assoc() else: self.datastore['csm_info'] = fake = source.CompositionInfo.fake() self.rlzs_assoc = fake.get_rlzs_assoc()
[ "def", "init", "(", "self", ")", ":", "oq", "=", "self", ".", "oqparam", "if", "not", "oq", ".", "risk_imtls", ":", "if", "self", ".", "datastore", ".", "parent", ":", "oq", ".", "risk_imtls", "=", "(", "self", ".", "datastore", ".", "parent", "[", "'oqparam'", "]", ".", "risk_imtls", ")", "if", "'precalc'", "in", "vars", "(", "self", ")", ":", "self", ".", "rlzs_assoc", "=", "self", ".", "precalc", ".", "rlzs_assoc", "elif", "'csm_info'", "in", "self", ".", "datastore", ":", "csm_info", "=", "self", ".", "datastore", "[", "'csm_info'", "]", "if", "oq", ".", "hazard_calculation_id", "and", "'gsim_logic_tree'", "in", "oq", ".", "inputs", ":", "# redefine the realizations by reading the weights from the", "# gsim_logic_tree_file that could be different from the parent", "csm_info", ".", "gsim_lt", "=", "logictree", ".", "GsimLogicTree", "(", "oq", ".", "inputs", "[", "'gsim_logic_tree'", "]", ",", "set", "(", "csm_info", ".", "trts", ")", ")", "self", ".", "rlzs_assoc", "=", "csm_info", ".", "get_rlzs_assoc", "(", ")", "elif", "hasattr", "(", "self", ",", "'csm'", ")", ":", "self", ".", "check_floating_spinning", "(", ")", "self", ".", "rlzs_assoc", "=", "self", ".", "csm", ".", "info", ".", "get_rlzs_assoc", "(", ")", "else", ":", "# build a fake; used by risk-from-file calculators", "self", ".", "datastore", "[", "'csm_info'", "]", "=", "fake", "=", "source", ".", "CompositionInfo", ".", "fake", "(", ")", "self", ".", "rlzs_assoc", "=", "fake", ".", "get_rlzs_assoc", "(", ")" ]
To be overridden to initialize the datasets needed by the calculation
[ "To", "be", "overridden", "to", "initialize", "the", "datasets", "needed", "by", "the", "calculation" ]
train
https://github.com/gem/oq-engine/blob/8294553a0b8aba33fd96437a35065d03547d0040/openquake/calculators/base.py#L498-L522
gem/oq-engine
openquake/calculators/base.py
HazardCalculator.R
def R(self): """ :returns: the number of realizations """ try: return self.csm.info.get_num_rlzs() except AttributeError: # no self.csm return self.datastore['csm_info'].get_num_rlzs()
python
def R(self): try: return self.csm.info.get_num_rlzs() except AttributeError: return self.datastore['csm_info'].get_num_rlzs()
[ "def", "R", "(", "self", ")", ":", "try", ":", "return", "self", ".", "csm", ".", "info", ".", "get_num_rlzs", "(", ")", "except", "AttributeError", ":", "# no self.csm", "return", "self", ".", "datastore", "[", "'csm_info'", "]", ".", "get_num_rlzs", "(", ")" ]
:returns: the number of realizations
[ ":", "returns", ":", "the", "number", "of", "realizations" ]
train
https://github.com/gem/oq-engine/blob/8294553a0b8aba33fd96437a35065d03547d0040/openquake/calculators/base.py#L525-L532
gem/oq-engine
openquake/calculators/base.py
HazardCalculator.read_exposure
def read_exposure(self, haz_sitecol=None): # after load_risk_model """ Read the exposure, the riskmodel and update the attributes .sitecol, .assetcol """ with self.monitor('reading exposure', autoflush=True): self.sitecol, self.assetcol, discarded = ( readinput.get_sitecol_assetcol( self.oqparam, haz_sitecol, self.riskmodel.loss_types)) if len(discarded): self.datastore['discarded'] = discarded if hasattr(self, 'rup'): # this is normal for the case of scenario from rupture logging.info('%d assets were discarded because too far ' 'from the rupture; use `oq show discarded` ' 'to show them and `oq plot_assets` to plot ' 'them' % len(discarded)) elif not self.oqparam.discard_assets: # raise an error self.datastore['sitecol'] = self.sitecol self.datastore['assetcol'] = self.assetcol raise RuntimeError( '%d assets were discarded; use `oq show discarded` to' ' show them and `oq plot_assets` to plot them' % len(discarded)) # reduce the riskmodel to the relevant taxonomies taxonomies = set(taxo for taxo in self.assetcol.tagcol.taxonomy if taxo != '?') if len(self.riskmodel.taxonomies) > len(taxonomies): logging.info('Reducing risk model from %d to %d taxonomies', len(self.riskmodel.taxonomies), len(taxonomies)) self.riskmodel = self.riskmodel.reduce(taxonomies) return readinput.exposure
python
def read_exposure(self, haz_sitecol=None): with self.monitor('reading exposure', autoflush=True): self.sitecol, self.assetcol, discarded = ( readinput.get_sitecol_assetcol( self.oqparam, haz_sitecol, self.riskmodel.loss_types)) if len(discarded): self.datastore['discarded'] = discarded if hasattr(self, 'rup'): logging.info('%d assets were discarded because too far ' 'from the rupture; use `oq show discarded` ' 'to show them and `oq plot_assets` to plot ' 'them' % len(discarded)) elif not self.oqparam.discard_assets: self.datastore['sitecol'] = self.sitecol self.datastore['assetcol'] = self.assetcol raise RuntimeError( '%d assets were discarded; use `oq show discarded` to' ' show them and `oq plot_assets` to plot them' % len(discarded)) taxonomies = set(taxo for taxo in self.assetcol.tagcol.taxonomy if taxo != '?') if len(self.riskmodel.taxonomies) > len(taxonomies): logging.info('Reducing risk model from %d to %d taxonomies', len(self.riskmodel.taxonomies), len(taxonomies)) self.riskmodel = self.riskmodel.reduce(taxonomies) return readinput.exposure
[ "def", "read_exposure", "(", "self", ",", "haz_sitecol", "=", "None", ")", ":", "# after load_risk_model", "with", "self", ".", "monitor", "(", "'reading exposure'", ",", "autoflush", "=", "True", ")", ":", "self", ".", "sitecol", ",", "self", ".", "assetcol", ",", "discarded", "=", "(", "readinput", ".", "get_sitecol_assetcol", "(", "self", ".", "oqparam", ",", "haz_sitecol", ",", "self", ".", "riskmodel", ".", "loss_types", ")", ")", "if", "len", "(", "discarded", ")", ":", "self", ".", "datastore", "[", "'discarded'", "]", "=", "discarded", "if", "hasattr", "(", "self", ",", "'rup'", ")", ":", "# this is normal for the case of scenario from rupture", "logging", ".", "info", "(", "'%d assets were discarded because too far '", "'from the rupture; use `oq show discarded` '", "'to show them and `oq plot_assets` to plot '", "'them'", "%", "len", "(", "discarded", ")", ")", "elif", "not", "self", ".", "oqparam", ".", "discard_assets", ":", "# raise an error", "self", ".", "datastore", "[", "'sitecol'", "]", "=", "self", ".", "sitecol", "self", ".", "datastore", "[", "'assetcol'", "]", "=", "self", ".", "assetcol", "raise", "RuntimeError", "(", "'%d assets were discarded; use `oq show discarded` to'", "' show them and `oq plot_assets` to plot them'", "%", "len", "(", "discarded", ")", ")", "# reduce the riskmodel to the relevant taxonomies", "taxonomies", "=", "set", "(", "taxo", "for", "taxo", "in", "self", ".", "assetcol", ".", "tagcol", ".", "taxonomy", "if", "taxo", "!=", "'?'", ")", "if", "len", "(", "self", ".", "riskmodel", ".", "taxonomies", ")", ">", "len", "(", "taxonomies", ")", ":", "logging", ".", "info", "(", "'Reducing risk model from %d to %d taxonomies'", ",", "len", "(", "self", ".", "riskmodel", ".", "taxonomies", ")", ",", "len", "(", "taxonomies", ")", ")", "self", ".", "riskmodel", "=", "self", ".", "riskmodel", ".", "reduce", "(", "taxonomies", ")", "return", "readinput", ".", "exposure" ]
Read the exposure, the riskmodel and update the attributes .sitecol, .assetcol
[ "Read", "the", "exposure", "the", "riskmodel", "and", "update", "the", "attributes", ".", "sitecol", ".", "assetcol" ]
train
https://github.com/gem/oq-engine/blob/8294553a0b8aba33fd96437a35065d03547d0040/openquake/calculators/base.py#L534-L566
gem/oq-engine
openquake/calculators/base.py
HazardCalculator.load_riskmodel
def load_riskmodel(self): # to be called before read_exposure # NB: this is called even if there is no risk model """ Read the risk model and set the attribute .riskmodel. The riskmodel can be empty for hazard calculations. Save the loss ratios (if any) in the datastore. """ logging.info('Reading the risk model if present') self.riskmodel = readinput.get_risk_model(self.oqparam) if not self.riskmodel: parent = self.datastore.parent if 'risk_model' in parent: self.riskmodel = riskinput.CompositeRiskModel.read(parent) return if self.oqparam.ground_motion_fields and not self.oqparam.imtls: raise InvalidFile('No intensity_measure_types specified in %s' % self.oqparam.inputs['job_ini']) self.save_params()
python
def load_riskmodel(self): logging.info('Reading the risk model if present') self.riskmodel = readinput.get_risk_model(self.oqparam) if not self.riskmodel: parent = self.datastore.parent if 'risk_model' in parent: self.riskmodel = riskinput.CompositeRiskModel.read(parent) return if self.oqparam.ground_motion_fields and not self.oqparam.imtls: raise InvalidFile('No intensity_measure_types specified in %s' % self.oqparam.inputs['job_ini']) self.save_params()
[ "def", "load_riskmodel", "(", "self", ")", ":", "# to be called before read_exposure", "# NB: this is called even if there is no risk model", "logging", ".", "info", "(", "'Reading the risk model if present'", ")", "self", ".", "riskmodel", "=", "readinput", ".", "get_risk_model", "(", "self", ".", "oqparam", ")", "if", "not", "self", ".", "riskmodel", ":", "parent", "=", "self", ".", "datastore", ".", "parent", "if", "'risk_model'", "in", "parent", ":", "self", ".", "riskmodel", "=", "riskinput", ".", "CompositeRiskModel", ".", "read", "(", "parent", ")", "return", "if", "self", ".", "oqparam", ".", "ground_motion_fields", "and", "not", "self", ".", "oqparam", ".", "imtls", ":", "raise", "InvalidFile", "(", "'No intensity_measure_types specified in %s'", "%", "self", ".", "oqparam", ".", "inputs", "[", "'job_ini'", "]", ")", "self", ".", "save_params", "(", ")" ]
Read the risk model and set the attribute .riskmodel. The riskmodel can be empty for hazard calculations. Save the loss ratios (if any) in the datastore.
[ "Read", "the", "risk", "model", "and", "set", "the", "attribute", ".", "riskmodel", ".", "The", "riskmodel", "can", "be", "empty", "for", "hazard", "calculations", ".", "Save", "the", "loss", "ratios", "(", "if", "any", ")", "in", "the", "datastore", "." ]
train
https://github.com/gem/oq-engine/blob/8294553a0b8aba33fd96437a35065d03547d0040/openquake/calculators/base.py#L568-L586
gem/oq-engine
openquake/calculators/base.py
HazardCalculator.save_riskmodel
def save_riskmodel(self): """ Save the risk models in the datastore """ self.datastore['risk_model'] = rm = self.riskmodel self.datastore['taxonomy_mapping'] = self.riskmodel.tmap attrs = self.datastore.getitem('risk_model').attrs attrs['min_iml'] = hdf5.array_of_vstr(sorted(rm.min_iml.items())) self.datastore.set_nbytes('risk_model')
python
def save_riskmodel(self): self.datastore['risk_model'] = rm = self.riskmodel self.datastore['taxonomy_mapping'] = self.riskmodel.tmap attrs = self.datastore.getitem('risk_model').attrs attrs['min_iml'] = hdf5.array_of_vstr(sorted(rm.min_iml.items())) self.datastore.set_nbytes('risk_model')
[ "def", "save_riskmodel", "(", "self", ")", ":", "self", ".", "datastore", "[", "'risk_model'", "]", "=", "rm", "=", "self", ".", "riskmodel", "self", ".", "datastore", "[", "'taxonomy_mapping'", "]", "=", "self", ".", "riskmodel", ".", "tmap", "attrs", "=", "self", ".", "datastore", ".", "getitem", "(", "'risk_model'", ")", ".", "attrs", "attrs", "[", "'min_iml'", "]", "=", "hdf5", ".", "array_of_vstr", "(", "sorted", "(", "rm", ".", "min_iml", ".", "items", "(", ")", ")", ")", "self", ".", "datastore", ".", "set_nbytes", "(", "'risk_model'", ")" ]
Save the risk models in the datastore
[ "Save", "the", "risk", "models", "in", "the", "datastore" ]
train
https://github.com/gem/oq-engine/blob/8294553a0b8aba33fd96437a35065d03547d0040/openquake/calculators/base.py#L588-L596
gem/oq-engine
openquake/calculators/base.py
HazardCalculator.store_rlz_info
def store_rlz_info(self, eff_ruptures=None): """ Save info about the composite source model inside the csm_info dataset """ if hasattr(self, 'csm'): # no scenario self.csm.info.update_eff_ruptures(eff_ruptures) self.rlzs_assoc = self.csm.info.get_rlzs_assoc( self.oqparam.sm_lt_path) if not self.rlzs_assoc: raise RuntimeError('Empty logic tree: too much filtering?') self.datastore['csm_info'] = self.csm.info R = len(self.rlzs_assoc.realizations) logging.info('There are %d realization(s)', R) if self.oqparam.imtls: self.datastore['weights'] = arr = build_weights( self.rlzs_assoc.realizations, self.oqparam.imt_dt()) self.datastore.set_attrs('weights', nbytes=arr.nbytes) if hasattr(self, 'hdf5cache'): # no scenario with hdf5.File(self.hdf5cache, 'r+') as cache: cache['weights'] = arr if 'event_based' in self.oqparam.calculation_mode and R >= TWO16: # rlzi is 16 bit integer in the GMFs, so there is hard limit or R raise ValueError( 'The logic tree has %d realizations, the maximum ' 'is %d' % (R, TWO16)) elif R > 10000: logging.warning( 'The logic tree has %d realizations(!), please consider ' 'sampling it', R) self.datastore.flush()
python
def store_rlz_info(self, eff_ruptures=None): if hasattr(self, 'csm'): self.csm.info.update_eff_ruptures(eff_ruptures) self.rlzs_assoc = self.csm.info.get_rlzs_assoc( self.oqparam.sm_lt_path) if not self.rlzs_assoc: raise RuntimeError('Empty logic tree: too much filtering?') self.datastore['csm_info'] = self.csm.info R = len(self.rlzs_assoc.realizations) logging.info('There are %d realization(s)', R) if self.oqparam.imtls: self.datastore['weights'] = arr = build_weights( self.rlzs_assoc.realizations, self.oqparam.imt_dt()) self.datastore.set_attrs('weights', nbytes=arr.nbytes) if hasattr(self, 'hdf5cache'): with hdf5.File(self.hdf5cache, 'r+') as cache: cache['weights'] = arr if 'event_based' in self.oqparam.calculation_mode and R >= TWO16: raise ValueError( 'The logic tree has %d realizations, the maximum ' 'is %d' % (R, TWO16)) elif R > 10000: logging.warning( 'The logic tree has %d realizations(!), please consider ' 'sampling it', R) self.datastore.flush()
[ "def", "store_rlz_info", "(", "self", ",", "eff_ruptures", "=", "None", ")", ":", "if", "hasattr", "(", "self", ",", "'csm'", ")", ":", "# no scenario", "self", ".", "csm", ".", "info", ".", "update_eff_ruptures", "(", "eff_ruptures", ")", "self", ".", "rlzs_assoc", "=", "self", ".", "csm", ".", "info", ".", "get_rlzs_assoc", "(", "self", ".", "oqparam", ".", "sm_lt_path", ")", "if", "not", "self", ".", "rlzs_assoc", ":", "raise", "RuntimeError", "(", "'Empty logic tree: too much filtering?'", ")", "self", ".", "datastore", "[", "'csm_info'", "]", "=", "self", ".", "csm", ".", "info", "R", "=", "len", "(", "self", ".", "rlzs_assoc", ".", "realizations", ")", "logging", ".", "info", "(", "'There are %d realization(s)'", ",", "R", ")", "if", "self", ".", "oqparam", ".", "imtls", ":", "self", ".", "datastore", "[", "'weights'", "]", "=", "arr", "=", "build_weights", "(", "self", ".", "rlzs_assoc", ".", "realizations", ",", "self", ".", "oqparam", ".", "imt_dt", "(", ")", ")", "self", ".", "datastore", ".", "set_attrs", "(", "'weights'", ",", "nbytes", "=", "arr", ".", "nbytes", ")", "if", "hasattr", "(", "self", ",", "'hdf5cache'", ")", ":", "# no scenario", "with", "hdf5", ".", "File", "(", "self", ".", "hdf5cache", ",", "'r+'", ")", "as", "cache", ":", "cache", "[", "'weights'", "]", "=", "arr", "if", "'event_based'", "in", "self", ".", "oqparam", ".", "calculation_mode", "and", "R", ">=", "TWO16", ":", "# rlzi is 16 bit integer in the GMFs, so there is hard limit or R", "raise", "ValueError", "(", "'The logic tree has %d realizations, the maximum '", "'is %d'", "%", "(", "R", ",", "TWO16", ")", ")", "elif", "R", ">", "10000", ":", "logging", ".", "warning", "(", "'The logic tree has %d realizations(!), please consider '", "'sampling it'", ",", "R", ")", "self", ".", "datastore", ".", "flush", "(", ")" ]
Save info about the composite source model inside the csm_info dataset
[ "Save", "info", "about", "the", "composite", "source", "model", "inside", "the", "csm_info", "dataset" ]
train
https://github.com/gem/oq-engine/blob/8294553a0b8aba33fd96437a35065d03547d0040/openquake/calculators/base.py#L701-L730
gem/oq-engine
openquake/calculators/base.py
HazardCalculator.store_source_info
def store_source_info(self, calc_times): """ Save (weight, num_sites, calc_time) inside the source_info dataset """ if calc_times: source_info = self.datastore['source_info'] arr = numpy.zeros((len(source_info), 3), F32) ids, vals = zip(*sorted(calc_times.items())) arr[numpy.array(ids)] = vals source_info['weight'] += arr[:, 0] source_info['num_sites'] += arr[:, 1] source_info['calc_time'] += arr[:, 2]
python
def store_source_info(self, calc_times): if calc_times: source_info = self.datastore['source_info'] arr = numpy.zeros((len(source_info), 3), F32) ids, vals = zip(*sorted(calc_times.items())) arr[numpy.array(ids)] = vals source_info['weight'] += arr[:, 0] source_info['num_sites'] += arr[:, 1] source_info['calc_time'] += arr[:, 2]
[ "def", "store_source_info", "(", "self", ",", "calc_times", ")", ":", "if", "calc_times", ":", "source_info", "=", "self", ".", "datastore", "[", "'source_info'", "]", "arr", "=", "numpy", ".", "zeros", "(", "(", "len", "(", "source_info", ")", ",", "3", ")", ",", "F32", ")", "ids", ",", "vals", "=", "zip", "(", "*", "sorted", "(", "calc_times", ".", "items", "(", ")", ")", ")", "arr", "[", "numpy", ".", "array", "(", "ids", ")", "]", "=", "vals", "source_info", "[", "'weight'", "]", "+=", "arr", "[", ":", ",", "0", "]", "source_info", "[", "'num_sites'", "]", "+=", "arr", "[", ":", ",", "1", "]", "source_info", "[", "'calc_time'", "]", "+=", "arr", "[", ":", ",", "2", "]" ]
Save (weight, num_sites, calc_time) inside the source_info dataset
[ "Save", "(", "weight", "num_sites", "calc_time", ")", "inside", "the", "source_info", "dataset" ]
train
https://github.com/gem/oq-engine/blob/8294553a0b8aba33fd96437a35065d03547d0040/openquake/calculators/base.py#L732-L743
gem/oq-engine
openquake/calculators/base.py
RiskCalculator.read_shakemap
def read_shakemap(self, haz_sitecol, assetcol): """ Enabled only if there is a shakemap_id parameter in the job.ini. Download, unzip, parse USGS shakemap files and build a corresponding set of GMFs which are then filtered with the hazard site collection and stored in the datastore. """ oq = self.oqparam E = oq.number_of_ground_motion_fields oq.risk_imtls = oq.imtls or self.datastore.parent['oqparam'].imtls extra = self.riskmodel.get_extra_imts(oq.risk_imtls) if extra: logging.warning('There are risk functions for not available IMTs ' 'which will be ignored: %s' % extra) logging.info('Getting/reducing shakemap') with self.monitor('getting/reducing shakemap'): smap = oq.shakemap_id if oq.shakemap_id else numpy.load( oq.inputs['shakemap']) sitecol, shakemap, discarded = get_sitecol_shakemap( smap, oq.imtls, haz_sitecol, oq.asset_hazard_distance['default'], oq.discard_assets) if len(discarded): self.datastore['discarded'] = discarded assetcol = assetcol.reduce_also(sitecol) logging.info('Building GMFs') with self.monitor('building/saving GMFs'): imts, gmfs = to_gmfs( shakemap, oq.spatial_correlation, oq.cross_correlation, oq.site_effects, oq.truncation_level, E, oq.random_seed, oq.imtls) save_gmf_data(self.datastore, sitecol, gmfs, imts) return sitecol, assetcol
python
def read_shakemap(self, haz_sitecol, assetcol): oq = self.oqparam E = oq.number_of_ground_motion_fields oq.risk_imtls = oq.imtls or self.datastore.parent['oqparam'].imtls extra = self.riskmodel.get_extra_imts(oq.risk_imtls) if extra: logging.warning('There are risk functions for not available IMTs ' 'which will be ignored: %s' % extra) logging.info('Getting/reducing shakemap') with self.monitor('getting/reducing shakemap'): smap = oq.shakemap_id if oq.shakemap_id else numpy.load( oq.inputs['shakemap']) sitecol, shakemap, discarded = get_sitecol_shakemap( smap, oq.imtls, haz_sitecol, oq.asset_hazard_distance['default'], oq.discard_assets) if len(discarded): self.datastore['discarded'] = discarded assetcol = assetcol.reduce_also(sitecol) logging.info('Building GMFs') with self.monitor('building/saving GMFs'): imts, gmfs = to_gmfs( shakemap, oq.spatial_correlation, oq.cross_correlation, oq.site_effects, oq.truncation_level, E, oq.random_seed, oq.imtls) save_gmf_data(self.datastore, sitecol, gmfs, imts) return sitecol, assetcol
[ "def", "read_shakemap", "(", "self", ",", "haz_sitecol", ",", "assetcol", ")", ":", "oq", "=", "self", ".", "oqparam", "E", "=", "oq", ".", "number_of_ground_motion_fields", "oq", ".", "risk_imtls", "=", "oq", ".", "imtls", "or", "self", ".", "datastore", ".", "parent", "[", "'oqparam'", "]", ".", "imtls", "extra", "=", "self", ".", "riskmodel", ".", "get_extra_imts", "(", "oq", ".", "risk_imtls", ")", "if", "extra", ":", "logging", ".", "warning", "(", "'There are risk functions for not available IMTs '", "'which will be ignored: %s'", "%", "extra", ")", "logging", ".", "info", "(", "'Getting/reducing shakemap'", ")", "with", "self", ".", "monitor", "(", "'getting/reducing shakemap'", ")", ":", "smap", "=", "oq", ".", "shakemap_id", "if", "oq", ".", "shakemap_id", "else", "numpy", ".", "load", "(", "oq", ".", "inputs", "[", "'shakemap'", "]", ")", "sitecol", ",", "shakemap", ",", "discarded", "=", "get_sitecol_shakemap", "(", "smap", ",", "oq", ".", "imtls", ",", "haz_sitecol", ",", "oq", ".", "asset_hazard_distance", "[", "'default'", "]", ",", "oq", ".", "discard_assets", ")", "if", "len", "(", "discarded", ")", ":", "self", ".", "datastore", "[", "'discarded'", "]", "=", "discarded", "assetcol", "=", "assetcol", ".", "reduce_also", "(", "sitecol", ")", "logging", ".", "info", "(", "'Building GMFs'", ")", "with", "self", ".", "monitor", "(", "'building/saving GMFs'", ")", ":", "imts", ",", "gmfs", "=", "to_gmfs", "(", "shakemap", ",", "oq", ".", "spatial_correlation", ",", "oq", ".", "cross_correlation", ",", "oq", ".", "site_effects", ",", "oq", ".", "truncation_level", ",", "E", ",", "oq", ".", "random_seed", ",", "oq", ".", "imtls", ")", "save_gmf_data", "(", "self", ".", "datastore", ",", "sitecol", ",", "gmfs", ",", "imts", ")", "return", "sitecol", ",", "assetcol" ]
Enabled only if there is a shakemap_id parameter in the job.ini. Download, unzip, parse USGS shakemap files and build a corresponding set of GMFs which are then filtered with the hazard site collection and stored in the datastore.
[ "Enabled", "only", "if", "there", "is", "a", "shakemap_id", "parameter", "in", "the", "job", ".", "ini", ".", "Download", "unzip", "parse", "USGS", "shakemap", "files", "and", "build", "a", "corresponding", "set", "of", "GMFs", "which", "are", "then", "filtered", "with", "the", "hazard", "site", "collection", "and", "stored", "in", "the", "datastore", "." ]
train
https://github.com/gem/oq-engine/blob/8294553a0b8aba33fd96437a35065d03547d0040/openquake/calculators/base.py#L766-L800
gem/oq-engine
openquake/calculators/base.py
RiskCalculator.build_riskinputs
def build_riskinputs(self, kind): """ :param kind: kind of hazard getter, can be 'poe' or 'gmf' :returns: a list of RiskInputs objects, sorted by IMT. """ logging.info('Building risk inputs from %d realization(s)', self.R) imtls = self.oqparam.imtls if not set(self.oqparam.risk_imtls) & set(imtls): rsk = ', '.join(self.oqparam.risk_imtls) haz = ', '.join(imtls) raise ValueError('The IMTs in the risk models (%s) are disjoint ' "from the IMTs in the hazard (%s)" % (rsk, haz)) self.riskmodel.taxonomy = self.assetcol.tagcol.taxonomy with self.monitor('building riskinputs', autoflush=True): riskinputs = list(self._gen_riskinputs(kind)) assert riskinputs logging.info('Built %d risk inputs', len(riskinputs)) return riskinputs
python
def build_riskinputs(self, kind): logging.info('Building risk inputs from %d realization(s)', self.R) imtls = self.oqparam.imtls if not set(self.oqparam.risk_imtls) & set(imtls): rsk = ', '.join(self.oqparam.risk_imtls) haz = ', '.join(imtls) raise ValueError('The IMTs in the risk models (%s) are disjoint ' "from the IMTs in the hazard (%s)" % (rsk, haz)) self.riskmodel.taxonomy = self.assetcol.tagcol.taxonomy with self.monitor('building riskinputs', autoflush=True): riskinputs = list(self._gen_riskinputs(kind)) assert riskinputs logging.info('Built %d risk inputs', len(riskinputs)) return riskinputs
[ "def", "build_riskinputs", "(", "self", ",", "kind", ")", ":", "logging", ".", "info", "(", "'Building risk inputs from %d realization(s)'", ",", "self", ".", "R", ")", "imtls", "=", "self", ".", "oqparam", ".", "imtls", "if", "not", "set", "(", "self", ".", "oqparam", ".", "risk_imtls", ")", "&", "set", "(", "imtls", ")", ":", "rsk", "=", "', '", ".", "join", "(", "self", ".", "oqparam", ".", "risk_imtls", ")", "haz", "=", "', '", ".", "join", "(", "imtls", ")", "raise", "ValueError", "(", "'The IMTs in the risk models (%s) are disjoint '", "\"from the IMTs in the hazard (%s)\"", "%", "(", "rsk", ",", "haz", ")", ")", "self", ".", "riskmodel", ".", "taxonomy", "=", "self", ".", "assetcol", ".", "tagcol", ".", "taxonomy", "with", "self", ".", "monitor", "(", "'building riskinputs'", ",", "autoflush", "=", "True", ")", ":", "riskinputs", "=", "list", "(", "self", ".", "_gen_riskinputs", "(", "kind", ")", ")", "assert", "riskinputs", "logging", ".", "info", "(", "'Built %d risk inputs'", ",", "len", "(", "riskinputs", ")", ")", "return", "riskinputs" ]
:param kind: kind of hazard getter, can be 'poe' or 'gmf' :returns: a list of RiskInputs objects, sorted by IMT.
[ ":", "param", "kind", ":", "kind", "of", "hazard", "getter", "can", "be", "poe", "or", "gmf", ":", "returns", ":", "a", "list", "of", "RiskInputs", "objects", "sorted", "by", "IMT", "." ]
train
https://github.com/gem/oq-engine/blob/8294553a0b8aba33fd96437a35065d03547d0040/openquake/calculators/base.py#L802-L821
gem/oq-engine
openquake/calculators/base.py
RiskCalculator.get_getter
def get_getter(self, kind, sid): """ :param kind: 'poe' or 'gmf' :param sid: a site ID :returns: a PmapGetter or GmfDataGetter """ hdf5cache = getattr(self, 'hdf5cache', None) if hdf5cache: dstore = hdf5cache elif (self.oqparam.hazard_calculation_id and 'gmf_data' not in self.datastore): # 'gmf_data' in self.datastore happens for ShakeMap calculations self.datastore.parent.close() # make sure it is closed dstore = self.datastore.parent else: dstore = self.datastore if kind == 'poe': # hcurves, shape (R, N) getter = getters.PmapGetter(dstore, self.rlzs_assoc, [sid]) else: # gmf getter = getters.GmfDataGetter(dstore, [sid], self.R) if dstore is self.datastore: getter.init() return getter
python
def get_getter(self, kind, sid): hdf5cache = getattr(self, 'hdf5cache', None) if hdf5cache: dstore = hdf5cache elif (self.oqparam.hazard_calculation_id and 'gmf_data' not in self.datastore): self.datastore.parent.close() dstore = self.datastore.parent else: dstore = self.datastore if kind == 'poe': getter = getters.PmapGetter(dstore, self.rlzs_assoc, [sid]) else: getter = getters.GmfDataGetter(dstore, [sid], self.R) if dstore is self.datastore: getter.init() return getter
[ "def", "get_getter", "(", "self", ",", "kind", ",", "sid", ")", ":", "hdf5cache", "=", "getattr", "(", "self", ",", "'hdf5cache'", ",", "None", ")", "if", "hdf5cache", ":", "dstore", "=", "hdf5cache", "elif", "(", "self", ".", "oqparam", ".", "hazard_calculation_id", "and", "'gmf_data'", "not", "in", "self", ".", "datastore", ")", ":", "# 'gmf_data' in self.datastore happens for ShakeMap calculations", "self", ".", "datastore", ".", "parent", ".", "close", "(", ")", "# make sure it is closed", "dstore", "=", "self", ".", "datastore", ".", "parent", "else", ":", "dstore", "=", "self", ".", "datastore", "if", "kind", "==", "'poe'", ":", "# hcurves, shape (R, N)", "getter", "=", "getters", ".", "PmapGetter", "(", "dstore", ",", "self", ".", "rlzs_assoc", ",", "[", "sid", "]", ")", "else", ":", "# gmf", "getter", "=", "getters", ".", "GmfDataGetter", "(", "dstore", ",", "[", "sid", "]", ",", "self", ".", "R", ")", "if", "dstore", "is", "self", ".", "datastore", ":", "getter", ".", "init", "(", ")", "return", "getter" ]
:param kind: 'poe' or 'gmf' :param sid: a site ID :returns: a PmapGetter or GmfDataGetter
[ ":", "param", "kind", ":", "poe", "or", "gmf", ":", "param", "sid", ":", "a", "site", "ID", ":", "returns", ":", "a", "PmapGetter", "or", "GmfDataGetter" ]
train
https://github.com/gem/oq-engine/blob/8294553a0b8aba33fd96437a35065d03547d0040/openquake/calculators/base.py#L823-L845
gem/oq-engine
openquake/calculators/base.py
RiskCalculator.execute
def execute(self): """ Parallelize on the riskinputs and returns a dictionary of results. Require a `.core_task` to be defined with signature (riskinputs, riskmodel, rlzs_assoc, monitor). """ if not hasattr(self, 'riskinputs'): # in the reportwriter return res = Starmap.apply( self.core_task.__func__, (self.riskinputs, self.riskmodel, self.param, self.monitor()), concurrent_tasks=self.oqparam.concurrent_tasks or 1, weight=get_weight ).reduce(self.combine) return res
python
def execute(self): if not hasattr(self, 'riskinputs'): return res = Starmap.apply( self.core_task.__func__, (self.riskinputs, self.riskmodel, self.param, self.monitor()), concurrent_tasks=self.oqparam.concurrent_tasks or 1, weight=get_weight ).reduce(self.combine) return res
[ "def", "execute", "(", "self", ")", ":", "if", "not", "hasattr", "(", "self", ",", "'riskinputs'", ")", ":", "# in the reportwriter", "return", "res", "=", "Starmap", ".", "apply", "(", "self", ".", "core_task", ".", "__func__", ",", "(", "self", ".", "riskinputs", ",", "self", ".", "riskmodel", ",", "self", ".", "param", ",", "self", ".", "monitor", "(", ")", ")", ",", "concurrent_tasks", "=", "self", ".", "oqparam", ".", "concurrent_tasks", "or", "1", ",", "weight", "=", "get_weight", ")", ".", "reduce", "(", "self", ".", "combine", ")", "return", "res" ]
Parallelize on the riskinputs and returns a dictionary of results. Require a `.core_task` to be defined with signature (riskinputs, riskmodel, rlzs_assoc, monitor).
[ "Parallelize", "on", "the", "riskinputs", "and", "returns", "a", "dictionary", "of", "results", ".", "Require", "a", ".", "core_task", "to", "be", "defined", "with", "signature", "(", "riskinputs", "riskmodel", "rlzs_assoc", "monitor", ")", "." ]
train
https://github.com/gem/oq-engine/blob/8294553a0b8aba33fd96437a35065d03547d0040/openquake/calculators/base.py#L864-L878
gem/oq-engine
openquake/baselib/zeromq.py
bind
def bind(end_point, socket_type): """ Bind to a zmq URL; raise a proper error if the URL is invalid; return a zmq socket. """ sock = context.socket(socket_type) try: sock.bind(end_point) except zmq.error.ZMQError as exc: sock.close() raise exc.__class__('%s: %s' % (exc, end_point)) return sock
python
def bind(end_point, socket_type): sock = context.socket(socket_type) try: sock.bind(end_point) except zmq.error.ZMQError as exc: sock.close() raise exc.__class__('%s: %s' % (exc, end_point)) return sock
[ "def", "bind", "(", "end_point", ",", "socket_type", ")", ":", "sock", "=", "context", ".", "socket", "(", "socket_type", ")", "try", ":", "sock", ".", "bind", "(", "end_point", ")", "except", "zmq", ".", "error", ".", "ZMQError", "as", "exc", ":", "sock", ".", "close", "(", ")", "raise", "exc", ".", "__class__", "(", "'%s: %s'", "%", "(", "exc", ",", "end_point", ")", ")", "return", "sock" ]
Bind to a zmq URL; raise a proper error if the URL is invalid; return a zmq socket.
[ "Bind", "to", "a", "zmq", "URL", ";", "raise", "a", "proper", "error", "if", "the", "URL", "is", "invalid", ";", "return", "a", "zmq", "socket", "." ]
train
https://github.com/gem/oq-engine/blob/8294553a0b8aba33fd96437a35065d03547d0040/openquake/baselib/zeromq.py#L30-L41
gem/oq-engine
openquake/baselib/zeromq.py
Socket.send
def send(self, obj): """ Send an object to the remote server; block and return the reply if the socket type is REQ. :param obj: the Python object to send """ self.zsocket.send_pyobj(obj) self.num_sent += 1 if self.socket_type == zmq.REQ: return self.zsocket.recv_pyobj()
python
def send(self, obj): self.zsocket.send_pyobj(obj) self.num_sent += 1 if self.socket_type == zmq.REQ: return self.zsocket.recv_pyobj()
[ "def", "send", "(", "self", ",", "obj", ")", ":", "self", ".", "zsocket", ".", "send_pyobj", "(", "obj", ")", "self", ".", "num_sent", "+=", "1", "if", "self", ".", "socket_type", "==", "zmq", ".", "REQ", ":", "return", "self", ".", "zsocket", ".", "recv_pyobj", "(", ")" ]
Send an object to the remote server; block and return the reply if the socket type is REQ. :param obj: the Python object to send
[ "Send", "an", "object", "to", "the", "remote", "server", ";", "block", "and", "return", "the", "reply", "if", "the", "socket", "type", "is", "REQ", "." ]
train
https://github.com/gem/oq-engine/blob/8294553a0b8aba33fd96437a35065d03547d0040/openquake/baselib/zeromq.py#L139-L150
gem/oq-engine
openquake/hazardlib/geo/utils.py
angular_distance
def angular_distance(km, lat, lat2=None): """ Return the angular distance of two points at the given latitude. >>> '%.3f' % angular_distance(100, lat=40) '1.174' >>> '%.3f' % angular_distance(100, lat=80) '5.179' """ if lat2 is not None: # use the largest latitude to compute the angular distance lat = max(abs(lat), abs(lat2)) return km * KM_TO_DEGREES / math.cos(lat * DEGREES_TO_RAD)
python
def angular_distance(km, lat, lat2=None): if lat2 is not None: lat = max(abs(lat), abs(lat2)) return km * KM_TO_DEGREES / math.cos(lat * DEGREES_TO_RAD)
[ "def", "angular_distance", "(", "km", ",", "lat", ",", "lat2", "=", "None", ")", ":", "if", "lat2", "is", "not", "None", ":", "# use the largest latitude to compute the angular distance", "lat", "=", "max", "(", "abs", "(", "lat", ")", ",", "abs", "(", "lat2", ")", ")", "return", "km", "*", "KM_TO_DEGREES", "/", "math", ".", "cos", "(", "lat", "*", "DEGREES_TO_RAD", ")" ]
Return the angular distance of two points at the given latitude. >>> '%.3f' % angular_distance(100, lat=40) '1.174' >>> '%.3f' % angular_distance(100, lat=80) '5.179'
[ "Return", "the", "angular", "distance", "of", "two", "points", "at", "the", "given", "latitude", "." ]
train
https://github.com/gem/oq-engine/blob/8294553a0b8aba33fd96437a35065d03547d0040/openquake/hazardlib/geo/utils.py#L45-L57
gem/oq-engine
openquake/hazardlib/geo/utils.py
assoc
def assoc(objects, sitecol, assoc_dist, mode, asset_refs=()): """ Associate geographic objects to a site collection. :param objects: something with .lons, .lats or ['lon'] ['lat'], or a list of lists of objects with a .location attribute (i.e. assets_by_site) :param assoc_dist: the maximum distance for association :param mode: if 'strict' fail if at least one site is not associated if 'error' fail if all sites are not associated :returns: (filtered site collection, filtered objects) """ if isinstance(objects, numpy.ndarray) or hasattr(objects, 'lons'): # objects is a geo array with lon, lat fields or a mesh-like instance return _GeographicObjects(objects).assoc(sitecol, assoc_dist, mode) else: # objects is the list assets_by_site return _GeographicObjects(sitecol).assoc2( objects, assoc_dist, mode, asset_refs)
python
def assoc(objects, sitecol, assoc_dist, mode, asset_refs=()): if isinstance(objects, numpy.ndarray) or hasattr(objects, 'lons'): return _GeographicObjects(objects).assoc(sitecol, assoc_dist, mode) else: return _GeographicObjects(sitecol).assoc2( objects, assoc_dist, mode, asset_refs)
[ "def", "assoc", "(", "objects", ",", "sitecol", ",", "assoc_dist", ",", "mode", ",", "asset_refs", "=", "(", ")", ")", ":", "if", "isinstance", "(", "objects", ",", "numpy", ".", "ndarray", ")", "or", "hasattr", "(", "objects", ",", "'lons'", ")", ":", "# objects is a geo array with lon, lat fields or a mesh-like instance", "return", "_GeographicObjects", "(", "objects", ")", ".", "assoc", "(", "sitecol", ",", "assoc_dist", ",", "mode", ")", "else", ":", "# objects is the list assets_by_site", "return", "_GeographicObjects", "(", "sitecol", ")", ".", "assoc2", "(", "objects", ",", "assoc_dist", ",", "mode", ",", "asset_refs", ")" ]
Associate geographic objects to a site collection. :param objects: something with .lons, .lats or ['lon'] ['lat'], or a list of lists of objects with a .location attribute (i.e. assets_by_site) :param assoc_dist: the maximum distance for association :param mode: if 'strict' fail if at least one site is not associated if 'error' fail if all sites are not associated :returns: (filtered site collection, filtered objects)
[ "Associate", "geographic", "objects", "to", "a", "site", "collection", "." ]
train
https://github.com/gem/oq-engine/blob/8294553a0b8aba33fd96437a35065d03547d0040/openquake/hazardlib/geo/utils.py#L177-L196
gem/oq-engine
openquake/hazardlib/geo/utils.py
clean_points
def clean_points(points): """ Given a list of :class:`~openquake.hazardlib.geo.point.Point` objects, return a new list with adjacent duplicate points removed. """ if not points: return points result = [points[0]] for point in points: if point != result[-1]: result.append(point) return result
python
def clean_points(points): if not points: return points result = [points[0]] for point in points: if point != result[-1]: result.append(point) return result
[ "def", "clean_points", "(", "points", ")", ":", "if", "not", "points", ":", "return", "points", "result", "=", "[", "points", "[", "0", "]", "]", "for", "point", "in", "points", ":", "if", "point", "!=", "result", "[", "-", "1", "]", ":", "result", ".", "append", "(", "point", ")", "return", "result" ]
Given a list of :class:`~openquake.hazardlib.geo.point.Point` objects, return a new list with adjacent duplicate points removed.
[ "Given", "a", "list", "of", ":", "class", ":", "~openquake", ".", "hazardlib", ".", "geo", ".", "point", ".", "Point", "objects", "return", "a", "new", "list", "with", "adjacent", "duplicate", "points", "removed", "." ]
train
https://github.com/gem/oq-engine/blob/8294553a0b8aba33fd96437a35065d03547d0040/openquake/hazardlib/geo/utils.py#L199-L211
gem/oq-engine
openquake/hazardlib/geo/utils.py
line_intersects_itself
def line_intersects_itself(lons, lats, closed_shape=False): """ Return ``True`` if line of points intersects itself. Line with the last point repeating the first one considered intersecting itself. The line is defined by lists (or numpy arrays) of points' longitudes and latitudes (depth is not taken into account). :param closed_shape: If ``True`` the line will be checked twice: first time with its original shape and second time with the points sequence being shifted by one point (the last point becomes first, the first turns second and so on). This is useful for checking that the sequence of points defines a valid :class:`~openquake.hazardlib.geo.polygon.Polygon`. """ assert len(lons) == len(lats) if len(lons) <= 3: # line can not intersect itself unless there are # at least four points return False west, east, north, south = get_spherical_bounding_box(lons, lats) proj = OrthographicProjection(west, east, north, south) xx, yy = proj(lons, lats) if not shapely.geometry.LineString(list(zip(xx, yy))).is_simple: return True if closed_shape: xx, yy = proj(numpy.roll(lons, 1), numpy.roll(lats, 1)) if not shapely.geometry.LineString(list(zip(xx, yy))).is_simple: return True return False
python
def line_intersects_itself(lons, lats, closed_shape=False): assert len(lons) == len(lats) if len(lons) <= 3: return False west, east, north, south = get_spherical_bounding_box(lons, lats) proj = OrthographicProjection(west, east, north, south) xx, yy = proj(lons, lats) if not shapely.geometry.LineString(list(zip(xx, yy))).is_simple: return True if closed_shape: xx, yy = proj(numpy.roll(lons, 1), numpy.roll(lats, 1)) if not shapely.geometry.LineString(list(zip(xx, yy))).is_simple: return True return False
[ "def", "line_intersects_itself", "(", "lons", ",", "lats", ",", "closed_shape", "=", "False", ")", ":", "assert", "len", "(", "lons", ")", "==", "len", "(", "lats", ")", "if", "len", "(", "lons", ")", "<=", "3", ":", "# line can not intersect itself unless there are", "# at least four points", "return", "False", "west", ",", "east", ",", "north", ",", "south", "=", "get_spherical_bounding_box", "(", "lons", ",", "lats", ")", "proj", "=", "OrthographicProjection", "(", "west", ",", "east", ",", "north", ",", "south", ")", "xx", ",", "yy", "=", "proj", "(", "lons", ",", "lats", ")", "if", "not", "shapely", ".", "geometry", ".", "LineString", "(", "list", "(", "zip", "(", "xx", ",", "yy", ")", ")", ")", ".", "is_simple", ":", "return", "True", "if", "closed_shape", ":", "xx", ",", "yy", "=", "proj", "(", "numpy", ".", "roll", "(", "lons", ",", "1", ")", ",", "numpy", ".", "roll", "(", "lats", ",", "1", ")", ")", "if", "not", "shapely", ".", "geometry", ".", "LineString", "(", "list", "(", "zip", "(", "xx", ",", "yy", ")", ")", ")", ".", "is_simple", ":", "return", "True", "return", "False" ]
Return ``True`` if line of points intersects itself. Line with the last point repeating the first one considered intersecting itself. The line is defined by lists (or numpy arrays) of points' longitudes and latitudes (depth is not taken into account). :param closed_shape: If ``True`` the line will be checked twice: first time with its original shape and second time with the points sequence being shifted by one point (the last point becomes first, the first turns second and so on). This is useful for checking that the sequence of points defines a valid :class:`~openquake.hazardlib.geo.polygon.Polygon`.
[ "Return", "True", "if", "line", "of", "points", "intersects", "itself", ".", "Line", "with", "the", "last", "point", "repeating", "the", "first", "one", "considered", "intersecting", "itself", "." ]
train
https://github.com/gem/oq-engine/blob/8294553a0b8aba33fd96437a35065d03547d0040/openquake/hazardlib/geo/utils.py#L214-L250
gem/oq-engine
openquake/hazardlib/geo/utils.py
get_bounding_box
def get_bounding_box(obj, maxdist): """ Return the dilated bounding box of a geometric object. :param obj: an object with method .get_bounding_box, or with an attribute .polygon or a list of locations :param maxdist: maximum distance in km """ if hasattr(obj, 'get_bounding_box'): return obj.get_bounding_box(maxdist) elif hasattr(obj, 'polygon'): bbox = obj.polygon.get_bbox() else: if isinstance(obj, list): # a list of locations lons = numpy.array([loc.longitude for loc in obj]) lats = numpy.array([loc.latitude for loc in obj]) else: # assume an array with fields lon, lat lons, lats = obj['lon'], obj['lat'] min_lon, max_lon = lons.min(), lons.max() if cross_idl(min_lon, max_lon): lons %= 360 bbox = lons.min(), lats.min(), lons.max(), lats.max() a1 = min(maxdist * KM_TO_DEGREES, 90) a2 = min(angular_distance(maxdist, bbox[1], bbox[3]), 180) return bbox[0] - a2, bbox[1] - a1, bbox[2] + a2, bbox[3] + a1
python
def get_bounding_box(obj, maxdist): if hasattr(obj, 'get_bounding_box'): return obj.get_bounding_box(maxdist) elif hasattr(obj, 'polygon'): bbox = obj.polygon.get_bbox() else: if isinstance(obj, list): lons = numpy.array([loc.longitude for loc in obj]) lats = numpy.array([loc.latitude for loc in obj]) else: lons, lats = obj['lon'], obj['lat'] min_lon, max_lon = lons.min(), lons.max() if cross_idl(min_lon, max_lon): lons %= 360 bbox = lons.min(), lats.min(), lons.max(), lats.max() a1 = min(maxdist * KM_TO_DEGREES, 90) a2 = min(angular_distance(maxdist, bbox[1], bbox[3]), 180) return bbox[0] - a2, bbox[1] - a1, bbox[2] + a2, bbox[3] + a1
[ "def", "get_bounding_box", "(", "obj", ",", "maxdist", ")", ":", "if", "hasattr", "(", "obj", ",", "'get_bounding_box'", ")", ":", "return", "obj", ".", "get_bounding_box", "(", "maxdist", ")", "elif", "hasattr", "(", "obj", ",", "'polygon'", ")", ":", "bbox", "=", "obj", ".", "polygon", ".", "get_bbox", "(", ")", "else", ":", "if", "isinstance", "(", "obj", ",", "list", ")", ":", "# a list of locations", "lons", "=", "numpy", ".", "array", "(", "[", "loc", ".", "longitude", "for", "loc", "in", "obj", "]", ")", "lats", "=", "numpy", ".", "array", "(", "[", "loc", ".", "latitude", "for", "loc", "in", "obj", "]", ")", "else", ":", "# assume an array with fields lon, lat", "lons", ",", "lats", "=", "obj", "[", "'lon'", "]", ",", "obj", "[", "'lat'", "]", "min_lon", ",", "max_lon", "=", "lons", ".", "min", "(", ")", ",", "lons", ".", "max", "(", ")", "if", "cross_idl", "(", "min_lon", ",", "max_lon", ")", ":", "lons", "%=", "360", "bbox", "=", "lons", ".", "min", "(", ")", ",", "lats", ".", "min", "(", ")", ",", "lons", ".", "max", "(", ")", ",", "lats", ".", "max", "(", ")", "a1", "=", "min", "(", "maxdist", "*", "KM_TO_DEGREES", ",", "90", ")", "a2", "=", "min", "(", "angular_distance", "(", "maxdist", ",", "bbox", "[", "1", "]", ",", "bbox", "[", "3", "]", ")", ",", "180", ")", "return", "bbox", "[", "0", "]", "-", "a2", ",", "bbox", "[", "1", "]", "-", "a1", ",", "bbox", "[", "2", "]", "+", "a2", ",", "bbox", "[", "3", "]", "+", "a1" ]
Return the dilated bounding box of a geometric object. :param obj: an object with method .get_bounding_box, or with an attribute .polygon or a list of locations :param maxdist: maximum distance in km
[ "Return", "the", "dilated", "bounding", "box", "of", "a", "geometric", "object", "." ]
train
https://github.com/gem/oq-engine/blob/8294553a0b8aba33fd96437a35065d03547d0040/openquake/hazardlib/geo/utils.py#L267-L292
gem/oq-engine
openquake/hazardlib/geo/utils.py
get_spherical_bounding_box
def get_spherical_bounding_box(lons, lats): """ Given a collection of points find and return the bounding box, as a pair of longitudes and a pair of latitudes. Parameters define longitudes and latitudes of a point collection respectively in a form of lists or numpy arrays. :return: A tuple of four items. These items represent western, eastern, northern and southern borders of the bounding box respectively. Values are floats in decimal degrees. :raises ValueError: If points collection has the longitudinal extent of more than 180 degrees (it is impossible to define a single hemisphere bound to poles that would contain the whole collection). """ north, south = numpy.max(lats), numpy.min(lats) west, east = numpy.min(lons), numpy.max(lons) assert (-180 <= west <= 180) and (-180 <= east <= 180), (west, east) if get_longitudinal_extent(west, east) < 0: # points are lying on both sides of the international date line # (meridian 180). the actual west longitude is the lowest positive # longitude and east one is the highest negative. if hasattr(lons, 'flatten'): # fixes test_surface_crossing_international_date_line lons = lons.flatten() west = min(lon for lon in lons if lon > 0) east = max(lon for lon in lons if lon < 0) if not all((get_longitudinal_extent(west, lon) >= 0 and get_longitudinal_extent(lon, east) >= 0) for lon in lons): raise ValueError('points collection has longitudinal extent ' 'wider than 180 deg') return SphericalBB(west, east, north, south)
python
def get_spherical_bounding_box(lons, lats): north, south = numpy.max(lats), numpy.min(lats) west, east = numpy.min(lons), numpy.max(lons) assert (-180 <= west <= 180) and (-180 <= east <= 180), (west, east) if get_longitudinal_extent(west, east) < 0: if hasattr(lons, 'flatten'): lons = lons.flatten() west = min(lon for lon in lons if lon > 0) east = max(lon for lon in lons if lon < 0) if not all((get_longitudinal_extent(west, lon) >= 0 and get_longitudinal_extent(lon, east) >= 0) for lon in lons): raise ValueError('points collection has longitudinal extent ' 'wider than 180 deg') return SphericalBB(west, east, north, south)
[ "def", "get_spherical_bounding_box", "(", "lons", ",", "lats", ")", ":", "north", ",", "south", "=", "numpy", ".", "max", "(", "lats", ")", ",", "numpy", ".", "min", "(", "lats", ")", "west", ",", "east", "=", "numpy", ".", "min", "(", "lons", ")", ",", "numpy", ".", "max", "(", "lons", ")", "assert", "(", "-", "180", "<=", "west", "<=", "180", ")", "and", "(", "-", "180", "<=", "east", "<=", "180", ")", ",", "(", "west", ",", "east", ")", "if", "get_longitudinal_extent", "(", "west", ",", "east", ")", "<", "0", ":", "# points are lying on both sides of the international date line", "# (meridian 180). the actual west longitude is the lowest positive", "# longitude and east one is the highest negative.", "if", "hasattr", "(", "lons", ",", "'flatten'", ")", ":", "# fixes test_surface_crossing_international_date_line", "lons", "=", "lons", ".", "flatten", "(", ")", "west", "=", "min", "(", "lon", "for", "lon", "in", "lons", "if", "lon", ">", "0", ")", "east", "=", "max", "(", "lon", "for", "lon", "in", "lons", "if", "lon", "<", "0", ")", "if", "not", "all", "(", "(", "get_longitudinal_extent", "(", "west", ",", "lon", ")", ">=", "0", "and", "get_longitudinal_extent", "(", "lon", ",", "east", ")", ">=", "0", ")", "for", "lon", "in", "lons", ")", ":", "raise", "ValueError", "(", "'points collection has longitudinal extent '", "'wider than 180 deg'", ")", "return", "SphericalBB", "(", "west", ",", "east", ",", "north", ",", "south", ")" ]
Given a collection of points find and return the bounding box, as a pair of longitudes and a pair of latitudes. Parameters define longitudes and latitudes of a point collection respectively in a form of lists or numpy arrays. :return: A tuple of four items. These items represent western, eastern, northern and southern borders of the bounding box respectively. Values are floats in decimal degrees. :raises ValueError: If points collection has the longitudinal extent of more than 180 degrees (it is impossible to define a single hemisphere bound to poles that would contain the whole collection).
[ "Given", "a", "collection", "of", "points", "find", "and", "return", "the", "bounding", "box", "as", "a", "pair", "of", "longitudes", "and", "a", "pair", "of", "latitudes", "." ]
train
https://github.com/gem/oq-engine/blob/8294553a0b8aba33fd96437a35065d03547d0040/openquake/hazardlib/geo/utils.py#L295-L329
gem/oq-engine
openquake/hazardlib/geo/utils.py
get_middle_point
def get_middle_point(lon1, lat1, lon2, lat2): """ Given two points return the point exactly in the middle lying on the same great circle arc. Parameters are point coordinates in degrees. :returns: Tuple of longitude and latitude of the point in the middle. """ if lon1 == lon2 and lat1 == lat2: return lon1, lat1 dist = geodetic.geodetic_distance(lon1, lat1, lon2, lat2) azimuth = geodetic.azimuth(lon1, lat1, lon2, lat2) return geodetic.point_at(lon1, lat1, azimuth, dist / 2.0)
python
def get_middle_point(lon1, lat1, lon2, lat2): if lon1 == lon2 and lat1 == lat2: return lon1, lat1 dist = geodetic.geodetic_distance(lon1, lat1, lon2, lat2) azimuth = geodetic.azimuth(lon1, lat1, lon2, lat2) return geodetic.point_at(lon1, lat1, azimuth, dist / 2.0)
[ "def", "get_middle_point", "(", "lon1", ",", "lat1", ",", "lon2", ",", "lat2", ")", ":", "if", "lon1", "==", "lon2", "and", "lat1", "==", "lat2", ":", "return", "lon1", ",", "lat1", "dist", "=", "geodetic", ".", "geodetic_distance", "(", "lon1", ",", "lat1", ",", "lon2", ",", "lat2", ")", "azimuth", "=", "geodetic", ".", "azimuth", "(", "lon1", ",", "lat1", ",", "lon2", ",", "lat2", ")", "return", "geodetic", ".", "point_at", "(", "lon1", ",", "lat1", ",", "azimuth", ",", "dist", "/", "2.0", ")" ]
Given two points return the point exactly in the middle lying on the same great circle arc. Parameters are point coordinates in degrees. :returns: Tuple of longitude and latitude of the point in the middle.
[ "Given", "two", "points", "return", "the", "point", "exactly", "in", "the", "middle", "lying", "on", "the", "same", "great", "circle", "arc", "." ]
train
https://github.com/gem/oq-engine/blob/8294553a0b8aba33fd96437a35065d03547d0040/openquake/hazardlib/geo/utils.py#L428-L442
gem/oq-engine
openquake/hazardlib/geo/utils.py
cartesian_to_spherical
def cartesian_to_spherical(vectors): """ Return the spherical coordinates for coordinates in Cartesian space. This function does an opposite to :func:`spherical_to_cartesian`. :param vectors: Array of 3d vectors in Cartesian space of shape (..., 3) :returns: Tuple of three arrays of the same shape as ``vectors`` representing longitude (decimal degrees), latitude (decimal degrees) and depth (km) in specified order. """ rr = numpy.sqrt(numpy.sum(vectors * vectors, axis=-1)) xx, yy, zz = vectors.T lats = numpy.degrees(numpy.arcsin((zz / rr).clip(-1., 1.))) lons = numpy.degrees(numpy.arctan2(yy, xx)) depths = EARTH_RADIUS - rr return lons.T, lats.T, depths
python
def cartesian_to_spherical(vectors): rr = numpy.sqrt(numpy.sum(vectors * vectors, axis=-1)) xx, yy, zz = vectors.T lats = numpy.degrees(numpy.arcsin((zz / rr).clip(-1., 1.))) lons = numpy.degrees(numpy.arctan2(yy, xx)) depths = EARTH_RADIUS - rr return lons.T, lats.T, depths
[ "def", "cartesian_to_spherical", "(", "vectors", ")", ":", "rr", "=", "numpy", ".", "sqrt", "(", "numpy", ".", "sum", "(", "vectors", "*", "vectors", ",", "axis", "=", "-", "1", ")", ")", "xx", ",", "yy", ",", "zz", "=", "vectors", ".", "T", "lats", "=", "numpy", ".", "degrees", "(", "numpy", ".", "arcsin", "(", "(", "zz", "/", "rr", ")", ".", "clip", "(", "-", "1.", ",", "1.", ")", ")", ")", "lons", "=", "numpy", ".", "degrees", "(", "numpy", ".", "arctan2", "(", "yy", ",", "xx", ")", ")", "depths", "=", "EARTH_RADIUS", "-", "rr", "return", "lons", ".", "T", ",", "lats", ".", "T", ",", "depths" ]
Return the spherical coordinates for coordinates in Cartesian space. This function does an opposite to :func:`spherical_to_cartesian`. :param vectors: Array of 3d vectors in Cartesian space of shape (..., 3) :returns: Tuple of three arrays of the same shape as ``vectors`` representing longitude (decimal degrees), latitude (decimal degrees) and depth (km) in specified order.
[ "Return", "the", "spherical", "coordinates", "for", "coordinates", "in", "Cartesian", "space", "." ]
train
https://github.com/gem/oq-engine/blob/8294553a0b8aba33fd96437a35065d03547d0040/openquake/hazardlib/geo/utils.py#L445-L463
gem/oq-engine
openquake/hazardlib/geo/utils.py
triangle_area
def triangle_area(e1, e2, e3): """ Get the area of triangle formed by three vectors. Parameters are three three-dimensional numpy arrays representing vectors of triangle's edges in Cartesian space. :returns: Float number, the area of the triangle in squared units of coordinates, or numpy array of shape of edges with one dimension less. Uses Heron formula, see http://mathworld.wolfram.com/HeronsFormula.html. """ # calculating edges length e1_length = numpy.sqrt(numpy.sum(e1 * e1, axis=-1)) e2_length = numpy.sqrt(numpy.sum(e2 * e2, axis=-1)) e3_length = numpy.sqrt(numpy.sum(e3 * e3, axis=-1)) # calculating half perimeter s = (e1_length + e2_length + e3_length) / 2.0 # applying Heron's formula return numpy.sqrt(s * (s - e1_length) * (s - e2_length) * (s - e3_length))
python
def triangle_area(e1, e2, e3): e1_length = numpy.sqrt(numpy.sum(e1 * e1, axis=-1)) e2_length = numpy.sqrt(numpy.sum(e2 * e2, axis=-1)) e3_length = numpy.sqrt(numpy.sum(e3 * e3, axis=-1)) s = (e1_length + e2_length + e3_length) / 2.0 return numpy.sqrt(s * (s - e1_length) * (s - e2_length) * (s - e3_length))
[ "def", "triangle_area", "(", "e1", ",", "e2", ",", "e3", ")", ":", "# calculating edges length", "e1_length", "=", "numpy", ".", "sqrt", "(", "numpy", ".", "sum", "(", "e1", "*", "e1", ",", "axis", "=", "-", "1", ")", ")", "e2_length", "=", "numpy", ".", "sqrt", "(", "numpy", ".", "sum", "(", "e2", "*", "e2", ",", "axis", "=", "-", "1", ")", ")", "e3_length", "=", "numpy", ".", "sqrt", "(", "numpy", ".", "sum", "(", "e3", "*", "e3", ",", "axis", "=", "-", "1", ")", ")", "# calculating half perimeter", "s", "=", "(", "e1_length", "+", "e2_length", "+", "e3_length", ")", "/", "2.0", "# applying Heron's formula", "return", "numpy", ".", "sqrt", "(", "s", "*", "(", "s", "-", "e1_length", ")", "*", "(", "s", "-", "e2_length", ")", "*", "(", "s", "-", "e3_length", ")", ")" ]
Get the area of triangle formed by three vectors. Parameters are three three-dimensional numpy arrays representing vectors of triangle's edges in Cartesian space. :returns: Float number, the area of the triangle in squared units of coordinates, or numpy array of shape of edges with one dimension less. Uses Heron formula, see http://mathworld.wolfram.com/HeronsFormula.html.
[ "Get", "the", "area", "of", "triangle", "formed", "by", "three", "vectors", "." ]
train
https://github.com/gem/oq-engine/blob/8294553a0b8aba33fd96437a35065d03547d0040/openquake/hazardlib/geo/utils.py#L466-L486
gem/oq-engine
openquake/hazardlib/geo/utils.py
normalized
def normalized(vector): """ Get unit vector for a given one. :param vector: Numpy vector as coordinates in Cartesian space, or an array of such. :returns: Numpy array of the same shape and structure where all vectors are normalized. That is, each coordinate component is divided by its vector's length. """ length = numpy.sum(vector * vector, axis=-1) length = numpy.sqrt(length.reshape(length.shape + (1, ))) return vector / length
python
def normalized(vector): length = numpy.sum(vector * vector, axis=-1) length = numpy.sqrt(length.reshape(length.shape + (1, ))) return vector / length
[ "def", "normalized", "(", "vector", ")", ":", "length", "=", "numpy", ".", "sum", "(", "vector", "*", "vector", ",", "axis", "=", "-", "1", ")", "length", "=", "numpy", ".", "sqrt", "(", "length", ".", "reshape", "(", "length", ".", "shape", "+", "(", "1", ",", ")", ")", ")", "return", "vector", "/", "length" ]
Get unit vector for a given one. :param vector: Numpy vector as coordinates in Cartesian space, or an array of such. :returns: Numpy array of the same shape and structure where all vectors are normalized. That is, each coordinate component is divided by its vector's length.
[ "Get", "unit", "vector", "for", "a", "given", "one", "." ]
train
https://github.com/gem/oq-engine/blob/8294553a0b8aba33fd96437a35065d03547d0040/openquake/hazardlib/geo/utils.py#L489-L502
gem/oq-engine
openquake/hazardlib/geo/utils.py
point_to_polygon_distance
def point_to_polygon_distance(polygon, pxx, pyy): """ Calculate the distance to polygon for each point of the collection on the 2d Cartesian plane. :param polygon: Shapely "Polygon" geometry object. :param pxx: List or numpy array of abscissae values of points to calculate the distance from. :param pyy: Same structure as ``pxx``, but with ordinate values. :returns: Numpy array of distances in units of coordinate system. Points that lie inside the polygon have zero distance. """ pxx = numpy.array(pxx) pyy = numpy.array(pyy) assert pxx.shape == pyy.shape if pxx.ndim == 0: pxx = pxx.reshape((1, )) pyy = pyy.reshape((1, )) result = numpy.array([ polygon.distance(shapely.geometry.Point(pxx.item(i), pyy.item(i))) for i in range(pxx.size) ]) return result.reshape(pxx.shape)
python
def point_to_polygon_distance(polygon, pxx, pyy): pxx = numpy.array(pxx) pyy = numpy.array(pyy) assert pxx.shape == pyy.shape if pxx.ndim == 0: pxx = pxx.reshape((1, )) pyy = pyy.reshape((1, )) result = numpy.array([ polygon.distance(shapely.geometry.Point(pxx.item(i), pyy.item(i))) for i in range(pxx.size) ]) return result.reshape(pxx.shape)
[ "def", "point_to_polygon_distance", "(", "polygon", ",", "pxx", ",", "pyy", ")", ":", "pxx", "=", "numpy", ".", "array", "(", "pxx", ")", "pyy", "=", "numpy", ".", "array", "(", "pyy", ")", "assert", "pxx", ".", "shape", "==", "pyy", ".", "shape", "if", "pxx", ".", "ndim", "==", "0", ":", "pxx", "=", "pxx", ".", "reshape", "(", "(", "1", ",", ")", ")", "pyy", "=", "pyy", ".", "reshape", "(", "(", "1", ",", ")", ")", "result", "=", "numpy", ".", "array", "(", "[", "polygon", ".", "distance", "(", "shapely", ".", "geometry", ".", "Point", "(", "pxx", ".", "item", "(", "i", ")", ",", "pyy", ".", "item", "(", "i", ")", ")", ")", "for", "i", "in", "range", "(", "pxx", ".", "size", ")", "]", ")", "return", "result", ".", "reshape", "(", "pxx", ".", "shape", ")" ]
Calculate the distance to polygon for each point of the collection on the 2d Cartesian plane. :param polygon: Shapely "Polygon" geometry object. :param pxx: List or numpy array of abscissae values of points to calculate the distance from. :param pyy: Same structure as ``pxx``, but with ordinate values. :returns: Numpy array of distances in units of coordinate system. Points that lie inside the polygon have zero distance.
[ "Calculate", "the", "distance", "to", "polygon", "for", "each", "point", "of", "the", "collection", "on", "the", "2d", "Cartesian", "plane", "." ]
train
https://github.com/gem/oq-engine/blob/8294553a0b8aba33fd96437a35065d03547d0040/openquake/hazardlib/geo/utils.py#L505-L531
gem/oq-engine
openquake/hazardlib/geo/utils.py
cross_idl
def cross_idl(lon1, lon2, *lons): """ Return True if two longitude values define line crossing international date line. >>> cross_idl(-45, 45) False >>> cross_idl(-180, -179) False >>> cross_idl(180, 179) False >>> cross_idl(45, -45) False >>> cross_idl(0, 0) False >>> cross_idl(-170, 170) True >>> cross_idl(170, -170) True >>> cross_idl(-180, 180) True """ lons = (lon1, lon2) + lons l1, l2 = min(lons), max(lons) # a line crosses the international date line if the end positions # have different sign and they are more than 180 degrees longitude apart return l1 * l2 < 0 and abs(l1 - l2) > 180
python
def cross_idl(lon1, lon2, *lons): lons = (lon1, lon2) + lons l1, l2 = min(lons), max(lons) return l1 * l2 < 0 and abs(l1 - l2) > 180
[ "def", "cross_idl", "(", "lon1", ",", "lon2", ",", "*", "lons", ")", ":", "lons", "=", "(", "lon1", ",", "lon2", ")", "+", "lons", "l1", ",", "l2", "=", "min", "(", "lons", ")", ",", "max", "(", "lons", ")", "# a line crosses the international date line if the end positions", "# have different sign and they are more than 180 degrees longitude apart", "return", "l1", "*", "l2", "<", "0", "and", "abs", "(", "l1", "-", "l2", ")", ">", "180" ]
Return True if two longitude values define line crossing international date line. >>> cross_idl(-45, 45) False >>> cross_idl(-180, -179) False >>> cross_idl(180, 179) False >>> cross_idl(45, -45) False >>> cross_idl(0, 0) False >>> cross_idl(-170, 170) True >>> cross_idl(170, -170) True >>> cross_idl(-180, 180) True
[ "Return", "True", "if", "two", "longitude", "values", "define", "line", "crossing", "international", "date", "line", "." ]
train
https://github.com/gem/oq-engine/blob/8294553a0b8aba33fd96437a35065d03547d0040/openquake/hazardlib/geo/utils.py#L548-L574
gem/oq-engine
openquake/hazardlib/geo/utils.py
normalize_lons
def normalize_lons(l1, l2): """ An international date line safe way of returning a range of longitudes. >>> normalize_lons(20, 30) # no IDL within the range [(20, 30)] >>> normalize_lons(-17, +17) # no IDL within the range [(-17, 17)] >>> normalize_lons(-178, +179) [(-180, -178), (179, 180)] >>> normalize_lons(178, -179) [(-180, -179), (178, 180)] >>> normalize_lons(179, -179) [(-180, -179), (179, 180)] >>> normalize_lons(177, -176) [(-180, -176), (177, 180)] """ if l1 > l2: # exchange lons l1, l2 = l2, l1 delta = l2 - l1 if l1 < 0 and l2 > 0 and delta > 180: return [(-180, l1), (l2, 180)] elif l1 > 0 and l2 > 180 and delta < 180: return [(l1, 180), (-180, l2 - 360)] elif l1 < -180 and l2 < 0 and delta < 180: return [(l1 + 360, 180), (l2, -180)] return [(l1, l2)]
python
def normalize_lons(l1, l2): if l1 > l2: l1, l2 = l2, l1 delta = l2 - l1 if l1 < 0 and l2 > 0 and delta > 180: return [(-180, l1), (l2, 180)] elif l1 > 0 and l2 > 180 and delta < 180: return [(l1, 180), (-180, l2 - 360)] elif l1 < -180 and l2 < 0 and delta < 180: return [(l1 + 360, 180), (l2, -180)] return [(l1, l2)]
[ "def", "normalize_lons", "(", "l1", ",", "l2", ")", ":", "if", "l1", ">", "l2", ":", "# exchange lons", "l1", ",", "l2", "=", "l2", ",", "l1", "delta", "=", "l2", "-", "l1", "if", "l1", "<", "0", "and", "l2", ">", "0", "and", "delta", ">", "180", ":", "return", "[", "(", "-", "180", ",", "l1", ")", ",", "(", "l2", ",", "180", ")", "]", "elif", "l1", ">", "0", "and", "l2", ">", "180", "and", "delta", "<", "180", ":", "return", "[", "(", "l1", ",", "180", ")", ",", "(", "-", "180", ",", "l2", "-", "360", ")", "]", "elif", "l1", "<", "-", "180", "and", "l2", "<", "0", "and", "delta", "<", "180", ":", "return", "[", "(", "l1", "+", "360", ",", "180", ")", ",", "(", "l2", ",", "-", "180", ")", "]", "return", "[", "(", "l1", ",", "l2", ")", "]" ]
An international date line safe way of returning a range of longitudes. >>> normalize_lons(20, 30) # no IDL within the range [(20, 30)] >>> normalize_lons(-17, +17) # no IDL within the range [(-17, 17)] >>> normalize_lons(-178, +179) [(-180, -178), (179, 180)] >>> normalize_lons(178, -179) [(-180, -179), (178, 180)] >>> normalize_lons(179, -179) [(-180, -179), (179, 180)] >>> normalize_lons(177, -176) [(-180, -176), (177, 180)]
[ "An", "international", "date", "line", "safe", "way", "of", "returning", "a", "range", "of", "longitudes", "." ]
train
https://github.com/gem/oq-engine/blob/8294553a0b8aba33fd96437a35065d03547d0040/openquake/hazardlib/geo/utils.py#L577-L603
gem/oq-engine
openquake/hazardlib/geo/utils.py
within
def within(bbox, lonlat_index): """ :param bbox: a bounding box in lon, lat :param lonlat_index: an rtree index in lon, lat :returns: array of indices within the bounding box """ lon1, lat1, lon2, lat2 = bbox set_ = set() for l1, l2 in normalize_lons(lon1, lon2): box = (l1, lat1, l2, lat2) set_ |= set(lonlat_index.intersection(box)) return numpy.array(sorted(set_), numpy.uint32)
python
def within(bbox, lonlat_index): lon1, lat1, lon2, lat2 = bbox set_ = set() for l1, l2 in normalize_lons(lon1, lon2): box = (l1, lat1, l2, lat2) set_ |= set(lonlat_index.intersection(box)) return numpy.array(sorted(set_), numpy.uint32)
[ "def", "within", "(", "bbox", ",", "lonlat_index", ")", ":", "lon1", ",", "lat1", ",", "lon2", ",", "lat2", "=", "bbox", "set_", "=", "set", "(", ")", "for", "l1", ",", "l2", "in", "normalize_lons", "(", "lon1", ",", "lon2", ")", ":", "box", "=", "(", "l1", ",", "lat1", ",", "l2", ",", "lat2", ")", "set_", "|=", "set", "(", "lonlat_index", ".", "intersection", "(", "box", ")", ")", "return", "numpy", ".", "array", "(", "sorted", "(", "set_", ")", ",", "numpy", ".", "uint32", ")" ]
:param bbox: a bounding box in lon, lat :param lonlat_index: an rtree index in lon, lat :returns: array of indices within the bounding box
[ ":", "param", "bbox", ":", "a", "bounding", "box", "in", "lon", "lat", ":", "param", "lonlat_index", ":", "an", "rtree", "index", "in", "lon", "lat", ":", "returns", ":", "array", "of", "indices", "within", "the", "bounding", "box" ]
train
https://github.com/gem/oq-engine/blob/8294553a0b8aba33fd96437a35065d03547d0040/openquake/hazardlib/geo/utils.py#L606-L617
gem/oq-engine
openquake/hazardlib/geo/utils.py
plane_fit
def plane_fit(points): """ This fits an n-dimensional plane to a set of points. See http://stackoverflow.com/questions/12299540/plane-fitting-to-4-or-more-xyz-points :parameter points: An instance of :class:~numpy.ndarray. The number of columns must be equal to three. :return: A point on the plane and the normal to the plane. """ points = numpy.transpose(points) points = numpy.reshape(points, (numpy.shape(points)[0], -1)) assert points.shape[0] < points.shape[1], points.shape ctr = points.mean(axis=1) x = points - ctr[:, None] M = numpy.dot(x, x.T) return ctr, numpy.linalg.svd(M)[0][:, -1]
python
def plane_fit(points): points = numpy.transpose(points) points = numpy.reshape(points, (numpy.shape(points)[0], -1)) assert points.shape[0] < points.shape[1], points.shape ctr = points.mean(axis=1) x = points - ctr[:, None] M = numpy.dot(x, x.T) return ctr, numpy.linalg.svd(M)[0][:, -1]
[ "def", "plane_fit", "(", "points", ")", ":", "points", "=", "numpy", ".", "transpose", "(", "points", ")", "points", "=", "numpy", ".", "reshape", "(", "points", ",", "(", "numpy", ".", "shape", "(", "points", ")", "[", "0", "]", ",", "-", "1", ")", ")", "assert", "points", ".", "shape", "[", "0", "]", "<", "points", ".", "shape", "[", "1", "]", ",", "points", ".", "shape", "ctr", "=", "points", ".", "mean", "(", "axis", "=", "1", ")", "x", "=", "points", "-", "ctr", "[", ":", ",", "None", "]", "M", "=", "numpy", ".", "dot", "(", "x", ",", "x", ".", "T", ")", "return", "ctr", ",", "numpy", ".", "linalg", ".", "svd", "(", "M", ")", "[", "0", "]", "[", ":", ",", "-", "1", "]" ]
This fits an n-dimensional plane to a set of points. See http://stackoverflow.com/questions/12299540/plane-fitting-to-4-or-more-xyz-points :parameter points: An instance of :class:~numpy.ndarray. The number of columns must be equal to three. :return: A point on the plane and the normal to the plane.
[ "This", "fits", "an", "n", "-", "dimensional", "plane", "to", "a", "set", "of", "points", ".", "See", "http", ":", "//", "stackoverflow", ".", "com", "/", "questions", "/", "12299540", "/", "plane", "-", "fitting", "-", "to", "-", "4", "-", "or", "-", "more", "-", "xyz", "-", "points" ]
train
https://github.com/gem/oq-engine/blob/8294553a0b8aba33fd96437a35065d03547d0040/openquake/hazardlib/geo/utils.py#L620-L637
gem/oq-engine
openquake/hazardlib/geo/utils.py
_GeographicObjects.get_closest
def get_closest(self, lon, lat, depth=0): """ Get the closest object to the given longitude and latitude and its distance. :param lon: longitude in degrees :param lat: latitude in degrees :param depth: depth in km (default 0) :returns: (object, distance) """ xyz = spherical_to_cartesian(lon, lat, depth) min_dist, idx = self.kdtree.query(xyz) return self.objects[idx], min_dist
python
def get_closest(self, lon, lat, depth=0): xyz = spherical_to_cartesian(lon, lat, depth) min_dist, idx = self.kdtree.query(xyz) return self.objects[idx], min_dist
[ "def", "get_closest", "(", "self", ",", "lon", ",", "lat", ",", "depth", "=", "0", ")", ":", "xyz", "=", "spherical_to_cartesian", "(", "lon", ",", "lat", ",", "depth", ")", "min_dist", ",", "idx", "=", "self", ".", "kdtree", ".", "query", "(", "xyz", ")", "return", "self", ".", "objects", "[", "idx", "]", ",", "min_dist" ]
Get the closest object to the given longitude and latitude and its distance. :param lon: longitude in degrees :param lat: latitude in degrees :param depth: depth in km (default 0) :returns: (object, distance)
[ "Get", "the", "closest", "object", "to", "the", "given", "longitude", "and", "latitude", "and", "its", "distance", "." ]
train
https://github.com/gem/oq-engine/blob/8294553a0b8aba33fd96437a35065d03547d0040/openquake/hazardlib/geo/utils.py#L85-L97
gem/oq-engine
openquake/hazardlib/geo/utils.py
_GeographicObjects.assoc
def assoc(self, sitecol, assoc_dist, mode): """ :param sitecol: a (filtered) site collection :param assoc_dist: the maximum distance for association :param mode: 'strict', 'warn' or 'filter' :returns: filtered site collection, filtered objects, discarded """ assert mode in 'strict warn filter', mode dic = {} discarded = [] for sid, lon, lat in zip(sitecol.sids, sitecol.lons, sitecol.lats): obj, distance = self.get_closest(lon, lat) if assoc_dist is None: dic[sid] = obj # associate all elif distance <= assoc_dist: dic[sid] = obj # associate within elif mode == 'warn': dic[sid] = obj # associate outside logging.warning( 'The closest vs30 site (%.1f %.1f) is distant more than %d' ' km from site #%d (%.1f %.1f)', obj['lon'], obj['lat'], int(distance), sid, lon, lat) elif mode == 'filter': discarded.append(obj) elif mode == 'strict': raise SiteAssociationError( 'There is nothing closer than %s km ' 'to site (%s %s)' % (assoc_dist, lon, lat)) if not dic: raise SiteAssociationError( 'No sites could be associated within %s km' % assoc_dist) return (sitecol.filtered(dic), numpy.array([dic[sid] for sid in sorted(dic)]), discarded)
python
def assoc(self, sitecol, assoc_dist, mode): assert mode in 'strict warn filter', mode dic = {} discarded = [] for sid, lon, lat in zip(sitecol.sids, sitecol.lons, sitecol.lats): obj, distance = self.get_closest(lon, lat) if assoc_dist is None: dic[sid] = obj elif distance <= assoc_dist: dic[sid] = obj elif mode == 'warn': dic[sid] = obj logging.warning( 'The closest vs30 site (%.1f %.1f) is distant more than %d' ' km from site int(distance), sid, lon, lat) elif mode == 'filter': discarded.append(obj) elif mode == 'strict': raise SiteAssociationError( 'There is nothing closer than %s km ' 'to site (%s %s)' % (assoc_dist, lon, lat)) if not dic: raise SiteAssociationError( 'No sites could be associated within %s km' % assoc_dist) return (sitecol.filtered(dic), numpy.array([dic[sid] for sid in sorted(dic)]), discarded)
[ "def", "assoc", "(", "self", ",", "sitecol", ",", "assoc_dist", ",", "mode", ")", ":", "assert", "mode", "in", "'strict warn filter'", ",", "mode", "dic", "=", "{", "}", "discarded", "=", "[", "]", "for", "sid", ",", "lon", ",", "lat", "in", "zip", "(", "sitecol", ".", "sids", ",", "sitecol", ".", "lons", ",", "sitecol", ".", "lats", ")", ":", "obj", ",", "distance", "=", "self", ".", "get_closest", "(", "lon", ",", "lat", ")", "if", "assoc_dist", "is", "None", ":", "dic", "[", "sid", "]", "=", "obj", "# associate all", "elif", "distance", "<=", "assoc_dist", ":", "dic", "[", "sid", "]", "=", "obj", "# associate within", "elif", "mode", "==", "'warn'", ":", "dic", "[", "sid", "]", "=", "obj", "# associate outside", "logging", ".", "warning", "(", "'The closest vs30 site (%.1f %.1f) is distant more than %d'", "' km from site #%d (%.1f %.1f)'", ",", "obj", "[", "'lon'", "]", ",", "obj", "[", "'lat'", "]", ",", "int", "(", "distance", ")", ",", "sid", ",", "lon", ",", "lat", ")", "elif", "mode", "==", "'filter'", ":", "discarded", ".", "append", "(", "obj", ")", "elif", "mode", "==", "'strict'", ":", "raise", "SiteAssociationError", "(", "'There is nothing closer than %s km '", "'to site (%s %s)'", "%", "(", "assoc_dist", ",", "lon", ",", "lat", ")", ")", "if", "not", "dic", ":", "raise", "SiteAssociationError", "(", "'No sites could be associated within %s km'", "%", "assoc_dist", ")", "return", "(", "sitecol", ".", "filtered", "(", "dic", ")", ",", "numpy", ".", "array", "(", "[", "dic", "[", "sid", "]", "for", "sid", "in", "sorted", "(", "dic", ")", "]", ")", ",", "discarded", ")" ]
:param sitecol: a (filtered) site collection :param assoc_dist: the maximum distance for association :param mode: 'strict', 'warn' or 'filter' :returns: filtered site collection, filtered objects, discarded
[ ":", "param", "sitecol", ":", "a", "(", "filtered", ")", "site", "collection", ":", "param", "assoc_dist", ":", "the", "maximum", "distance", "for", "association", ":", "param", "mode", ":", "strict", "warn", "or", "filter", ":", "returns", ":", "filtered", "site", "collection", "filtered", "objects", "discarded" ]
train
https://github.com/gem/oq-engine/blob/8294553a0b8aba33fd96437a35065d03547d0040/openquake/hazardlib/geo/utils.py#L99-L132
gem/oq-engine
openquake/hazardlib/geo/utils.py
_GeographicObjects.assoc2
def assoc2(self, assets_by_site, assoc_dist, mode, asset_refs): """ Associated a list of assets by site to the site collection used to instantiate GeographicObjects. :param assets_by_sites: a list of lists of assets :param assoc_dist: the maximum distance for association :param mode: 'strict', 'warn' or 'filter' :param asset_ref: ID of the assets are a list of strings :returns: filtered site collection, filtered assets by site, discarded """ assert mode in 'strict filter', mode self.objects.filtered # self.objects must be a SiteCollection asset_dt = numpy.dtype( [('asset_ref', vstr), ('lon', F32), ('lat', F32)]) assets_by_sid = collections.defaultdict(list) discarded = [] for assets in assets_by_site: lon, lat = assets[0].location obj, distance = self.get_closest(lon, lat) if distance <= assoc_dist: # keep the assets, otherwise discard them assets_by_sid[obj['sids']].extend(assets) elif mode == 'strict': raise SiteAssociationError( 'There is nothing closer than %s km ' 'to site (%s %s)' % (assoc_dist, lon, lat)) else: discarded.extend(assets) sids = sorted(assets_by_sid) if not sids: raise SiteAssociationError( 'Could not associate any site to any assets within the ' 'asset_hazard_distance of %s km' % assoc_dist) assets_by_site = [ sorted(assets_by_sid[sid], key=operator.attrgetter('ordinal')) for sid in sids] data = [(asset_refs[asset.ordinal],) + asset.location for asset in discarded] discarded = numpy.array(data, asset_dt) return self.objects.filtered(sids), assets_by_site, discarded
python
def assoc2(self, assets_by_site, assoc_dist, mode, asset_refs): assert mode in 'strict filter', mode self.objects.filtered asset_dt = numpy.dtype( [('asset_ref', vstr), ('lon', F32), ('lat', F32)]) assets_by_sid = collections.defaultdict(list) discarded = [] for assets in assets_by_site: lon, lat = assets[0].location obj, distance = self.get_closest(lon, lat) if distance <= assoc_dist: assets_by_sid[obj['sids']].extend(assets) elif mode == 'strict': raise SiteAssociationError( 'There is nothing closer than %s km ' 'to site (%s %s)' % (assoc_dist, lon, lat)) else: discarded.extend(assets) sids = sorted(assets_by_sid) if not sids: raise SiteAssociationError( 'Could not associate any site to any assets within the ' 'asset_hazard_distance of %s km' % assoc_dist) assets_by_site = [ sorted(assets_by_sid[sid], key=operator.attrgetter('ordinal')) for sid in sids] data = [(asset_refs[asset.ordinal],) + asset.location for asset in discarded] discarded = numpy.array(data, asset_dt) return self.objects.filtered(sids), assets_by_site, discarded
[ "def", "assoc2", "(", "self", ",", "assets_by_site", ",", "assoc_dist", ",", "mode", ",", "asset_refs", ")", ":", "assert", "mode", "in", "'strict filter'", ",", "mode", "self", ".", "objects", ".", "filtered", "# self.objects must be a SiteCollection", "asset_dt", "=", "numpy", ".", "dtype", "(", "[", "(", "'asset_ref'", ",", "vstr", ")", ",", "(", "'lon'", ",", "F32", ")", ",", "(", "'lat'", ",", "F32", ")", "]", ")", "assets_by_sid", "=", "collections", ".", "defaultdict", "(", "list", ")", "discarded", "=", "[", "]", "for", "assets", "in", "assets_by_site", ":", "lon", ",", "lat", "=", "assets", "[", "0", "]", ".", "location", "obj", ",", "distance", "=", "self", ".", "get_closest", "(", "lon", ",", "lat", ")", "if", "distance", "<=", "assoc_dist", ":", "# keep the assets, otherwise discard them", "assets_by_sid", "[", "obj", "[", "'sids'", "]", "]", ".", "extend", "(", "assets", ")", "elif", "mode", "==", "'strict'", ":", "raise", "SiteAssociationError", "(", "'There is nothing closer than %s km '", "'to site (%s %s)'", "%", "(", "assoc_dist", ",", "lon", ",", "lat", ")", ")", "else", ":", "discarded", ".", "extend", "(", "assets", ")", "sids", "=", "sorted", "(", "assets_by_sid", ")", "if", "not", "sids", ":", "raise", "SiteAssociationError", "(", "'Could not associate any site to any assets within the '", "'asset_hazard_distance of %s km'", "%", "assoc_dist", ")", "assets_by_site", "=", "[", "sorted", "(", "assets_by_sid", "[", "sid", "]", ",", "key", "=", "operator", ".", "attrgetter", "(", "'ordinal'", ")", ")", "for", "sid", "in", "sids", "]", "data", "=", "[", "(", "asset_refs", "[", "asset", ".", "ordinal", "]", ",", ")", "+", "asset", ".", "location", "for", "asset", "in", "discarded", "]", "discarded", "=", "numpy", ".", "array", "(", "data", ",", "asset_dt", ")", "return", "self", ".", "objects", ".", "filtered", "(", "sids", ")", ",", "assets_by_site", ",", "discarded" ]
Associated a list of assets by site to the site collection used to instantiate GeographicObjects. :param assets_by_sites: a list of lists of assets :param assoc_dist: the maximum distance for association :param mode: 'strict', 'warn' or 'filter' :param asset_ref: ID of the assets are a list of strings :returns: filtered site collection, filtered assets by site, discarded
[ "Associated", "a", "list", "of", "assets", "by", "site", "to", "the", "site", "collection", "used", "to", "instantiate", "GeographicObjects", "." ]
train
https://github.com/gem/oq-engine/blob/8294553a0b8aba33fd96437a35065d03547d0040/openquake/hazardlib/geo/utils.py#L134-L174
gem/oq-engine
openquake/risklib/read_nrml.py
get_vulnerability_functions_04
def get_vulnerability_functions_04(node, fname): """ :param node: a vulnerabilityModel node :param fname: path to the vulnerability file :returns: a dictionary imt, vf_id -> vulnerability function """ logging.warning('Please upgrade %s to NRML 0.5', fname) # NB: the IMTs can be duplicated and with different levels, each # vulnerability function in a set will get its own levels imts = set() vf_ids = set() # imt, vf_id -> vulnerability function vmodel = scientific.VulnerabilityModel(**node.attrib) for vset in node: imt_str = vset.IML['IMT'] imls = ~vset.IML imts.add(imt_str) for vfun in vset.getnodes('discreteVulnerability'): vf_id = vfun['vulnerabilityFunctionID'] if vf_id in vf_ids: raise InvalidFile( 'Duplicated vulnerabilityFunctionID: %s: %s, line %d' % (vf_id, fname, vfun.lineno)) vf_ids.add(vf_id) with context(fname, vfun): loss_ratios = ~vfun.lossRatio coefficients = ~vfun.coefficientsVariation if len(loss_ratios) != len(imls): raise InvalidFile( 'There are %d loss ratios, but %d imls: %s, line %d' % (len(loss_ratios), len(imls), fname, vfun.lossRatio.lineno)) if len(coefficients) != len(imls): raise InvalidFile( 'There are %d coefficients, but %d imls: %s, line %d' % (len(coefficients), len(imls), fname, vfun.coefficientsVariation.lineno)) with context(fname, vfun): vmodel[imt_str, vf_id] = scientific.VulnerabilityFunction( vf_id, imt_str, imls, loss_ratios, coefficients, vfun['probabilisticDistribution']) return vmodel
python
def get_vulnerability_functions_04(node, fname): logging.warning('Please upgrade %s to NRML 0.5', fname) imts = set() vf_ids = set() vmodel = scientific.VulnerabilityModel(**node.attrib) for vset in node: imt_str = vset.IML['IMT'] imls = ~vset.IML imts.add(imt_str) for vfun in vset.getnodes('discreteVulnerability'): vf_id = vfun['vulnerabilityFunctionID'] if vf_id in vf_ids: raise InvalidFile( 'Duplicated vulnerabilityFunctionID: %s: %s, line %d' % (vf_id, fname, vfun.lineno)) vf_ids.add(vf_id) with context(fname, vfun): loss_ratios = ~vfun.lossRatio coefficients = ~vfun.coefficientsVariation if len(loss_ratios) != len(imls): raise InvalidFile( 'There are %d loss ratios, but %d imls: %s, line %d' % (len(loss_ratios), len(imls), fname, vfun.lossRatio.lineno)) if len(coefficients) != len(imls): raise InvalidFile( 'There are %d coefficients, but %d imls: %s, line %d' % (len(coefficients), len(imls), fname, vfun.coefficientsVariation.lineno)) with context(fname, vfun): vmodel[imt_str, vf_id] = scientific.VulnerabilityFunction( vf_id, imt_str, imls, loss_ratios, coefficients, vfun['probabilisticDistribution']) return vmodel
[ "def", "get_vulnerability_functions_04", "(", "node", ",", "fname", ")", ":", "logging", ".", "warning", "(", "'Please upgrade %s to NRML 0.5'", ",", "fname", ")", "# NB: the IMTs can be duplicated and with different levels, each", "# vulnerability function in a set will get its own levels", "imts", "=", "set", "(", ")", "vf_ids", "=", "set", "(", ")", "# imt, vf_id -> vulnerability function", "vmodel", "=", "scientific", ".", "VulnerabilityModel", "(", "*", "*", "node", ".", "attrib", ")", "for", "vset", "in", "node", ":", "imt_str", "=", "vset", ".", "IML", "[", "'IMT'", "]", "imls", "=", "~", "vset", ".", "IML", "imts", ".", "add", "(", "imt_str", ")", "for", "vfun", "in", "vset", ".", "getnodes", "(", "'discreteVulnerability'", ")", ":", "vf_id", "=", "vfun", "[", "'vulnerabilityFunctionID'", "]", "if", "vf_id", "in", "vf_ids", ":", "raise", "InvalidFile", "(", "'Duplicated vulnerabilityFunctionID: %s: %s, line %d'", "%", "(", "vf_id", ",", "fname", ",", "vfun", ".", "lineno", ")", ")", "vf_ids", ".", "add", "(", "vf_id", ")", "with", "context", "(", "fname", ",", "vfun", ")", ":", "loss_ratios", "=", "~", "vfun", ".", "lossRatio", "coefficients", "=", "~", "vfun", ".", "coefficientsVariation", "if", "len", "(", "loss_ratios", ")", "!=", "len", "(", "imls", ")", ":", "raise", "InvalidFile", "(", "'There are %d loss ratios, but %d imls: %s, line %d'", "%", "(", "len", "(", "loss_ratios", ")", ",", "len", "(", "imls", ")", ",", "fname", ",", "vfun", ".", "lossRatio", ".", "lineno", ")", ")", "if", "len", "(", "coefficients", ")", "!=", "len", "(", "imls", ")", ":", "raise", "InvalidFile", "(", "'There are %d coefficients, but %d imls: %s, line %d'", "%", "(", "len", "(", "coefficients", ")", ",", "len", "(", "imls", ")", ",", "fname", ",", "vfun", ".", "coefficientsVariation", ".", "lineno", ")", ")", "with", "context", "(", "fname", ",", "vfun", ")", ":", "vmodel", "[", "imt_str", ",", "vf_id", "]", "=", "scientific", ".", "VulnerabilityFunction", "(", "vf_id", ",", "imt_str", ",", "imls", ",", "loss_ratios", ",", "coefficients", ",", "vfun", "[", "'probabilisticDistribution'", "]", ")", "return", "vmodel" ]
:param node: a vulnerabilityModel node :param fname: path to the vulnerability file :returns: a dictionary imt, vf_id -> vulnerability function
[ ":", "param", "node", ":", "a", "vulnerabilityModel", "node", ":", "param", "fname", ":", "path", "to", "the", "vulnerability", "file", ":", "returns", ":", "a", "dictionary", "imt", "vf_id", "-", ">", "vulnerability", "function" ]
train
https://github.com/gem/oq-engine/blob/8294553a0b8aba33fd96437a35065d03547d0040/openquake/risklib/read_nrml.py#L34-L78
gem/oq-engine
openquake/risklib/read_nrml.py
get_vulnerability_functions_05
def get_vulnerability_functions_05(node, fname): """ :param node: a vulnerabilityModel node :param fname: path of the vulnerability filter :returns: a dictionary imt, vf_id -> vulnerability function """ # NB: the IMTs can be duplicated and with different levels, each # vulnerability function in a set will get its own levels vf_ids = set() vmodel = scientific.VulnerabilityModel(**node.attrib) # imt, vf_id -> vulnerability function for vfun in node.getnodes('vulnerabilityFunction'): with context(fname, vfun): imt = vfun.imls['imt'] imls = numpy.array(~vfun.imls) vf_id = vfun['id'] if vf_id in vf_ids: raise InvalidFile( 'Duplicated vulnerabilityFunctionID: %s: %s, line %d' % (vf_id, fname, vfun.lineno)) vf_ids.add(vf_id) num_probs = None if vfun['dist'] == 'PM': loss_ratios, probs = [], [] for probabilities in vfun[1:]: loss_ratios.append(probabilities['lr']) probs.append(valid.probabilities(~probabilities)) if num_probs is None: num_probs = len(probs[-1]) elif len(probs[-1]) != num_probs: raise ValueError( 'Wrong number of probabilities (expected %d, ' 'got %d) in %s, line %d' % (num_probs, len(probs[-1]), fname, probabilities.lineno)) all_probs = numpy.array(probs) assert all_probs.shape == (len(loss_ratios), len(imls)), ( len(loss_ratios), len(imls)) vmodel[imt, vf_id] = ( scientific.VulnerabilityFunctionWithPMF( vf_id, imt, imls, numpy.array(loss_ratios), all_probs)) # the seed will be set by readinput.get_risk_model else: with context(fname, vfun): loss_ratios = ~vfun.meanLRs coefficients = ~vfun.covLRs if len(loss_ratios) != len(imls): raise InvalidFile( 'There are %d loss ratios, but %d imls: %s, line %d' % (len(loss_ratios), len(imls), fname, vfun.meanLRs.lineno)) if len(coefficients) != len(imls): raise InvalidFile( 'There are %d coefficients, but %d imls: %s, ' 'line %d' % (len(coefficients), len(imls), fname, vfun.covLRs.lineno)) with context(fname, vfun): vmodel[imt, vf_id] = scientific.VulnerabilityFunction( vf_id, imt, imls, loss_ratios, coefficients, vfun['dist']) return vmodel
python
def get_vulnerability_functions_05(node, fname): vf_ids = set() vmodel = scientific.VulnerabilityModel(**node.attrib) for vfun in node.getnodes('vulnerabilityFunction'): with context(fname, vfun): imt = vfun.imls['imt'] imls = numpy.array(~vfun.imls) vf_id = vfun['id'] if vf_id in vf_ids: raise InvalidFile( 'Duplicated vulnerabilityFunctionID: %s: %s, line %d' % (vf_id, fname, vfun.lineno)) vf_ids.add(vf_id) num_probs = None if vfun['dist'] == 'PM': loss_ratios, probs = [], [] for probabilities in vfun[1:]: loss_ratios.append(probabilities['lr']) probs.append(valid.probabilities(~probabilities)) if num_probs is None: num_probs = len(probs[-1]) elif len(probs[-1]) != num_probs: raise ValueError( 'Wrong number of probabilities (expected %d, ' 'got %d) in %s, line %d' % (num_probs, len(probs[-1]), fname, probabilities.lineno)) all_probs = numpy.array(probs) assert all_probs.shape == (len(loss_ratios), len(imls)), ( len(loss_ratios), len(imls)) vmodel[imt, vf_id] = ( scientific.VulnerabilityFunctionWithPMF( vf_id, imt, imls, numpy.array(loss_ratios), all_probs)) else: with context(fname, vfun): loss_ratios = ~vfun.meanLRs coefficients = ~vfun.covLRs if len(loss_ratios) != len(imls): raise InvalidFile( 'There are %d loss ratios, but %d imls: %s, line %d' % (len(loss_ratios), len(imls), fname, vfun.meanLRs.lineno)) if len(coefficients) != len(imls): raise InvalidFile( 'There are %d coefficients, but %d imls: %s, ' 'line %d' % (len(coefficients), len(imls), fname, vfun.covLRs.lineno)) with context(fname, vfun): vmodel[imt, vf_id] = scientific.VulnerabilityFunction( vf_id, imt, imls, loss_ratios, coefficients, vfun['dist']) return vmodel
[ "def", "get_vulnerability_functions_05", "(", "node", ",", "fname", ")", ":", "# NB: the IMTs can be duplicated and with different levels, each", "# vulnerability function in a set will get its own levels", "vf_ids", "=", "set", "(", ")", "vmodel", "=", "scientific", ".", "VulnerabilityModel", "(", "*", "*", "node", ".", "attrib", ")", "# imt, vf_id -> vulnerability function", "for", "vfun", "in", "node", ".", "getnodes", "(", "'vulnerabilityFunction'", ")", ":", "with", "context", "(", "fname", ",", "vfun", ")", ":", "imt", "=", "vfun", ".", "imls", "[", "'imt'", "]", "imls", "=", "numpy", ".", "array", "(", "~", "vfun", ".", "imls", ")", "vf_id", "=", "vfun", "[", "'id'", "]", "if", "vf_id", "in", "vf_ids", ":", "raise", "InvalidFile", "(", "'Duplicated vulnerabilityFunctionID: %s: %s, line %d'", "%", "(", "vf_id", ",", "fname", ",", "vfun", ".", "lineno", ")", ")", "vf_ids", ".", "add", "(", "vf_id", ")", "num_probs", "=", "None", "if", "vfun", "[", "'dist'", "]", "==", "'PM'", ":", "loss_ratios", ",", "probs", "=", "[", "]", ",", "[", "]", "for", "probabilities", "in", "vfun", "[", "1", ":", "]", ":", "loss_ratios", ".", "append", "(", "probabilities", "[", "'lr'", "]", ")", "probs", ".", "append", "(", "valid", ".", "probabilities", "(", "~", "probabilities", ")", ")", "if", "num_probs", "is", "None", ":", "num_probs", "=", "len", "(", "probs", "[", "-", "1", "]", ")", "elif", "len", "(", "probs", "[", "-", "1", "]", ")", "!=", "num_probs", ":", "raise", "ValueError", "(", "'Wrong number of probabilities (expected %d, '", "'got %d) in %s, line %d'", "%", "(", "num_probs", ",", "len", "(", "probs", "[", "-", "1", "]", ")", ",", "fname", ",", "probabilities", ".", "lineno", ")", ")", "all_probs", "=", "numpy", ".", "array", "(", "probs", ")", "assert", "all_probs", ".", "shape", "==", "(", "len", "(", "loss_ratios", ")", ",", "len", "(", "imls", ")", ")", ",", "(", "len", "(", "loss_ratios", ")", ",", "len", "(", "imls", ")", ")", "vmodel", "[", "imt", ",", "vf_id", "]", "=", "(", "scientific", ".", "VulnerabilityFunctionWithPMF", "(", "vf_id", ",", "imt", ",", "imls", ",", "numpy", ".", "array", "(", "loss_ratios", ")", ",", "all_probs", ")", ")", "# the seed will be set by readinput.get_risk_model", "else", ":", "with", "context", "(", "fname", ",", "vfun", ")", ":", "loss_ratios", "=", "~", "vfun", ".", "meanLRs", "coefficients", "=", "~", "vfun", ".", "covLRs", "if", "len", "(", "loss_ratios", ")", "!=", "len", "(", "imls", ")", ":", "raise", "InvalidFile", "(", "'There are %d loss ratios, but %d imls: %s, line %d'", "%", "(", "len", "(", "loss_ratios", ")", ",", "len", "(", "imls", ")", ",", "fname", ",", "vfun", ".", "meanLRs", ".", "lineno", ")", ")", "if", "len", "(", "coefficients", ")", "!=", "len", "(", "imls", ")", ":", "raise", "InvalidFile", "(", "'There are %d coefficients, but %d imls: %s, '", "'line %d'", "%", "(", "len", "(", "coefficients", ")", ",", "len", "(", "imls", ")", ",", "fname", ",", "vfun", ".", "covLRs", ".", "lineno", ")", ")", "with", "context", "(", "fname", ",", "vfun", ")", ":", "vmodel", "[", "imt", ",", "vf_id", "]", "=", "scientific", ".", "VulnerabilityFunction", "(", "vf_id", ",", "imt", ",", "imls", ",", "loss_ratios", ",", "coefficients", ",", "vfun", "[", "'dist'", "]", ")", "return", "vmodel" ]
:param node: a vulnerabilityModel node :param fname: path of the vulnerability filter :returns: a dictionary imt, vf_id -> vulnerability function
[ ":", "param", "node", ":", "a", "vulnerabilityModel", "node", ":", "param", "fname", ":", "path", "of", "the", "vulnerability", "filter", ":", "returns", ":", "a", "dictionary", "imt", "vf_id", "-", ">", "vulnerability", "function" ]
train
https://github.com/gem/oq-engine/blob/8294553a0b8aba33fd96437a35065d03547d0040/openquake/risklib/read_nrml.py#L82-L146
gem/oq-engine
openquake/risklib/read_nrml.py
ffconvert
def ffconvert(fname, limit_states, ff, min_iml=1E-10): """ Convert a fragility function into a numpy array plus a bunch of attributes. :param fname: path to the fragility model file :param limit_states: expected limit states :param ff: fragility function node :returns: a pair (array, dictionary) """ with context(fname, ff): ffs = ff[1:] imls = ff.imls nodamage = imls.attrib.get('noDamageLimit') if nodamage == 0: # use a cutoff to avoid log(0) in GMPE.to_distribution_values logging.warning('Found a noDamageLimit=0 in %s, line %s, ' 'using %g instead', fname, ff.lineno, min_iml) nodamage = min_iml with context(fname, imls): attrs = dict(format=ff['format'], imt=imls['imt'], id=ff['id'], nodamage=nodamage) LS = len(limit_states) if LS != len(ffs): with context(fname, ff): raise InvalidFile('expected %d limit states, found %d' % (LS, len(ffs))) if ff['format'] == 'continuous': minIML = float(imls['minIML']) if minIML == 0: # use a cutoff to avoid log(0) in GMPE.to_distribution_values logging.warning('Found minIML=0 in %s, line %s, using %g instead', fname, imls.lineno, min_iml) minIML = min_iml attrs['minIML'] = minIML attrs['maxIML'] = float(imls['maxIML']) array = numpy.zeros(LS, [('mean', F64), ('stddev', F64)]) for i, ls, node in zip(range(LS), limit_states, ff[1:]): if ls != node['ls']: with context(fname, node): raise InvalidFile('expected %s, found' % (ls, node['ls'])) array['mean'][i] = node['mean'] array['stddev'][i] = node['stddev'] elif ff['format'] == 'discrete': attrs['imls'] = ~imls valid.check_levels(attrs['imls'], attrs['imt'], min_iml) num_poes = len(attrs['imls']) array = numpy.zeros((LS, num_poes)) for i, ls, node in zip(range(LS), limit_states, ff[1:]): with context(fname, node): if ls != node['ls']: raise InvalidFile('expected %s, found' % (ls, node['ls'])) poes = (~node if isinstance(~node, list) else valid.probabilities(~node)) if len(poes) != num_poes: raise InvalidFile('expected %s, found' % (num_poes, len(poes))) array[i, :] = poes # NB: the format is constrained in nrml.FragilityNode to be either # discrete or continuous, there is no third option return array, attrs
python
def ffconvert(fname, limit_states, ff, min_iml=1E-10): with context(fname, ff): ffs = ff[1:] imls = ff.imls nodamage = imls.attrib.get('noDamageLimit') if nodamage == 0: logging.warning('Found a noDamageLimit=0 in %s, line %s, ' 'using %g instead', fname, ff.lineno, min_iml) nodamage = min_iml with context(fname, imls): attrs = dict(format=ff['format'], imt=imls['imt'], id=ff['id'], nodamage=nodamage) LS = len(limit_states) if LS != len(ffs): with context(fname, ff): raise InvalidFile('expected %d limit states, found %d' % (LS, len(ffs))) if ff['format'] == 'continuous': minIML = float(imls['minIML']) if minIML == 0: logging.warning('Found minIML=0 in %s, line %s, using %g instead', fname, imls.lineno, min_iml) minIML = min_iml attrs['minIML'] = minIML attrs['maxIML'] = float(imls['maxIML']) array = numpy.zeros(LS, [('mean', F64), ('stddev', F64)]) for i, ls, node in zip(range(LS), limit_states, ff[1:]): if ls != node['ls']: with context(fname, node): raise InvalidFile('expected %s, found' % (ls, node['ls'])) array['mean'][i] = node['mean'] array['stddev'][i] = node['stddev'] elif ff['format'] == 'discrete': attrs['imls'] = ~imls valid.check_levels(attrs['imls'], attrs['imt'], min_iml) num_poes = len(attrs['imls']) array = numpy.zeros((LS, num_poes)) for i, ls, node in zip(range(LS), limit_states, ff[1:]): with context(fname, node): if ls != node['ls']: raise InvalidFile('expected %s, found' % (ls, node['ls'])) poes = (~node if isinstance(~node, list) else valid.probabilities(~node)) if len(poes) != num_poes: raise InvalidFile('expected %s, found' % (num_poes, len(poes))) array[i, :] = poes return array, attrs
[ "def", "ffconvert", "(", "fname", ",", "limit_states", ",", "ff", ",", "min_iml", "=", "1E-10", ")", ":", "with", "context", "(", "fname", ",", "ff", ")", ":", "ffs", "=", "ff", "[", "1", ":", "]", "imls", "=", "ff", ".", "imls", "nodamage", "=", "imls", ".", "attrib", ".", "get", "(", "'noDamageLimit'", ")", "if", "nodamage", "==", "0", ":", "# use a cutoff to avoid log(0) in GMPE.to_distribution_values", "logging", ".", "warning", "(", "'Found a noDamageLimit=0 in %s, line %s, '", "'using %g instead'", ",", "fname", ",", "ff", ".", "lineno", ",", "min_iml", ")", "nodamage", "=", "min_iml", "with", "context", "(", "fname", ",", "imls", ")", ":", "attrs", "=", "dict", "(", "format", "=", "ff", "[", "'format'", "]", ",", "imt", "=", "imls", "[", "'imt'", "]", ",", "id", "=", "ff", "[", "'id'", "]", ",", "nodamage", "=", "nodamage", ")", "LS", "=", "len", "(", "limit_states", ")", "if", "LS", "!=", "len", "(", "ffs", ")", ":", "with", "context", "(", "fname", ",", "ff", ")", ":", "raise", "InvalidFile", "(", "'expected %d limit states, found %d'", "%", "(", "LS", ",", "len", "(", "ffs", ")", ")", ")", "if", "ff", "[", "'format'", "]", "==", "'continuous'", ":", "minIML", "=", "float", "(", "imls", "[", "'minIML'", "]", ")", "if", "minIML", "==", "0", ":", "# use a cutoff to avoid log(0) in GMPE.to_distribution_values", "logging", ".", "warning", "(", "'Found minIML=0 in %s, line %s, using %g instead'", ",", "fname", ",", "imls", ".", "lineno", ",", "min_iml", ")", "minIML", "=", "min_iml", "attrs", "[", "'minIML'", "]", "=", "minIML", "attrs", "[", "'maxIML'", "]", "=", "float", "(", "imls", "[", "'maxIML'", "]", ")", "array", "=", "numpy", ".", "zeros", "(", "LS", ",", "[", "(", "'mean'", ",", "F64", ")", ",", "(", "'stddev'", ",", "F64", ")", "]", ")", "for", "i", ",", "ls", ",", "node", "in", "zip", "(", "range", "(", "LS", ")", ",", "limit_states", ",", "ff", "[", "1", ":", "]", ")", ":", "if", "ls", "!=", "node", "[", "'ls'", "]", ":", "with", "context", "(", "fname", ",", "node", ")", ":", "raise", "InvalidFile", "(", "'expected %s, found'", "%", "(", "ls", ",", "node", "[", "'ls'", "]", ")", ")", "array", "[", "'mean'", "]", "[", "i", "]", "=", "node", "[", "'mean'", "]", "array", "[", "'stddev'", "]", "[", "i", "]", "=", "node", "[", "'stddev'", "]", "elif", "ff", "[", "'format'", "]", "==", "'discrete'", ":", "attrs", "[", "'imls'", "]", "=", "~", "imls", "valid", ".", "check_levels", "(", "attrs", "[", "'imls'", "]", ",", "attrs", "[", "'imt'", "]", ",", "min_iml", ")", "num_poes", "=", "len", "(", "attrs", "[", "'imls'", "]", ")", "array", "=", "numpy", ".", "zeros", "(", "(", "LS", ",", "num_poes", ")", ")", "for", "i", ",", "ls", ",", "node", "in", "zip", "(", "range", "(", "LS", ")", ",", "limit_states", ",", "ff", "[", "1", ":", "]", ")", ":", "with", "context", "(", "fname", ",", "node", ")", ":", "if", "ls", "!=", "node", "[", "'ls'", "]", ":", "raise", "InvalidFile", "(", "'expected %s, found'", "%", "(", "ls", ",", "node", "[", "'ls'", "]", ")", ")", "poes", "=", "(", "~", "node", "if", "isinstance", "(", "~", "node", ",", "list", ")", "else", "valid", ".", "probabilities", "(", "~", "node", ")", ")", "if", "len", "(", "poes", ")", "!=", "num_poes", ":", "raise", "InvalidFile", "(", "'expected %s, found'", "%", "(", "num_poes", ",", "len", "(", "poes", ")", ")", ")", "array", "[", "i", ",", ":", "]", "=", "poes", "# NB: the format is constrained in nrml.FragilityNode to be either", "# discrete or continuous, there is no third option", "return", "array", ",", "attrs" ]
Convert a fragility function into a numpy array plus a bunch of attributes. :param fname: path to the fragility model file :param limit_states: expected limit states :param ff: fragility function node :returns: a pair (array, dictionary)
[ "Convert", "a", "fragility", "function", "into", "a", "numpy", "array", "plus", "a", "bunch", "of", "attributes", "." ]
train
https://github.com/gem/oq-engine/blob/8294553a0b8aba33fd96437a35065d03547d0040/openquake/risklib/read_nrml.py#L152-L217
gem/oq-engine
openquake/risklib/read_nrml.py
get_fragility_model
def get_fragility_model(node, fname): """ :param node: a vulnerabilityModel node :param fname: path to the vulnerability file :returns: a dictionary imt, ff_id -> fragility function list """ with context(fname, node): fid = node['id'] asset_category = node['assetCategory'] loss_type = node['lossCategory'] description = ~node.description limit_states = ~node.limitStates ffs = node[2:] fmodel = scientific.FragilityModel( fid, asset_category, loss_type, description, limit_states) for ff in ffs: array, attrs = ffconvert(fname, limit_states, ff) attrs['id'] = ff['id'] ffl = scientific.FragilityFunctionList(array, **attrs) fmodel[ff.imls['imt'], ff['id']] = ffl return fmodel
python
def get_fragility_model(node, fname): with context(fname, node): fid = node['id'] asset_category = node['assetCategory'] loss_type = node['lossCategory'] description = ~node.description limit_states = ~node.limitStates ffs = node[2:] fmodel = scientific.FragilityModel( fid, asset_category, loss_type, description, limit_states) for ff in ffs: array, attrs = ffconvert(fname, limit_states, ff) attrs['id'] = ff['id'] ffl = scientific.FragilityFunctionList(array, **attrs) fmodel[ff.imls['imt'], ff['id']] = ffl return fmodel
[ "def", "get_fragility_model", "(", "node", ",", "fname", ")", ":", "with", "context", "(", "fname", ",", "node", ")", ":", "fid", "=", "node", "[", "'id'", "]", "asset_category", "=", "node", "[", "'assetCategory'", "]", "loss_type", "=", "node", "[", "'lossCategory'", "]", "description", "=", "~", "node", ".", "description", "limit_states", "=", "~", "node", ".", "limitStates", "ffs", "=", "node", "[", "2", ":", "]", "fmodel", "=", "scientific", ".", "FragilityModel", "(", "fid", ",", "asset_category", ",", "loss_type", ",", "description", ",", "limit_states", ")", "for", "ff", "in", "ffs", ":", "array", ",", "attrs", "=", "ffconvert", "(", "fname", ",", "limit_states", ",", "ff", ")", "attrs", "[", "'id'", "]", "=", "ff", "[", "'id'", "]", "ffl", "=", "scientific", ".", "FragilityFunctionList", "(", "array", ",", "*", "*", "attrs", ")", "fmodel", "[", "ff", ".", "imls", "[", "'imt'", "]", ",", "ff", "[", "'id'", "]", "]", "=", "ffl", "return", "fmodel" ]
:param node: a vulnerabilityModel node :param fname: path to the vulnerability file :returns: a dictionary imt, ff_id -> fragility function list
[ ":", "param", "node", ":", "a", "vulnerabilityModel", "node", ":", "param", "fname", ":", "path", "to", "the", "vulnerability", "file", ":", "returns", ":", "a", "dictionary", "imt", "ff_id", "-", ">", "fragility", "function", "list" ]
train
https://github.com/gem/oq-engine/blob/8294553a0b8aba33fd96437a35065d03547d0040/openquake/risklib/read_nrml.py#L221-L244
gem/oq-engine
openquake/risklib/read_nrml.py
convert_fragility_model_04
def convert_fragility_model_04(node, fname, fmcounter=itertools.count(1)): """ :param node: an :class:`openquake.commonib.node.Node` in NRML 0.4 :param fname: path of the fragility file :returns: an :class:`openquake.commonib.node.Node` in NRML 0.5 """ convert_type = {"lognormal": "logncdf"} new = Node('fragilityModel', dict(assetCategory='building', lossCategory='structural', id='fm_%d_converted_from_NRML_04' % next(fmcounter))) with context(fname, node): fmt = node['format'] descr = ~node.description limit_states = ~node.limitStates new.append(Node('description', {}, descr)) new.append((Node('limitStates', {}, ' '.join(limit_states)))) for ffs in node[2:]: IML = ffs.IML # NB: noDamageLimit = None is different than zero nodamage = ffs.attrib.get('noDamageLimit') ff = Node('fragilityFunction', {'format': fmt}) ff['id'] = ~ffs.taxonomy ff['shape'] = convert_type[ffs.attrib.get('type', 'lognormal')] if fmt == 'continuous': with context(fname, IML): attr = dict(imt=IML['IMT'], minIML=IML['minIML'], maxIML=IML['maxIML']) if nodamage is not None: attr['noDamageLimit'] = nodamage ff.append(Node('imls', attr)) for ffc in ffs[2:]: with context(fname, ffc): ls = ffc['ls'] param = ffc.params with context(fname, param): m, s = param['mean'], param['stddev'] ff.append(Node('params', dict(ls=ls, mean=m, stddev=s))) else: # discrete with context(fname, IML): imls = ' '.join(map(str, (~IML)[1])) attr = dict(imt=IML['IMT']) if nodamage is not None: attr['noDamageLimit'] = nodamage ff.append(Node('imls', attr, imls)) for ffd in ffs[2:]: ls = ffd['ls'] with context(fname, ffd): poes = ' '.join(map(str, ~ffd.poEs)) ff.append(Node('poes', dict(ls=ls), poes)) new.append(ff) return new
python
def convert_fragility_model_04(node, fname, fmcounter=itertools.count(1)): convert_type = {"lognormal": "logncdf"} new = Node('fragilityModel', dict(assetCategory='building', lossCategory='structural', id='fm_%d_converted_from_NRML_04' % next(fmcounter))) with context(fname, node): fmt = node['format'] descr = ~node.description limit_states = ~node.limitStates new.append(Node('description', {}, descr)) new.append((Node('limitStates', {}, ' '.join(limit_states)))) for ffs in node[2:]: IML = ffs.IML nodamage = ffs.attrib.get('noDamageLimit') ff = Node('fragilityFunction', {'format': fmt}) ff['id'] = ~ffs.taxonomy ff['shape'] = convert_type[ffs.attrib.get('type', 'lognormal')] if fmt == 'continuous': with context(fname, IML): attr = dict(imt=IML['IMT'], minIML=IML['minIML'], maxIML=IML['maxIML']) if nodamage is not None: attr['noDamageLimit'] = nodamage ff.append(Node('imls', attr)) for ffc in ffs[2:]: with context(fname, ffc): ls = ffc['ls'] param = ffc.params with context(fname, param): m, s = param['mean'], param['stddev'] ff.append(Node('params', dict(ls=ls, mean=m, stddev=s))) else: with context(fname, IML): imls = ' '.join(map(str, (~IML)[1])) attr = dict(imt=IML['IMT']) if nodamage is not None: attr['noDamageLimit'] = nodamage ff.append(Node('imls', attr, imls)) for ffd in ffs[2:]: ls = ffd['ls'] with context(fname, ffd): poes = ' '.join(map(str, ~ffd.poEs)) ff.append(Node('poes', dict(ls=ls), poes)) new.append(ff) return new
[ "def", "convert_fragility_model_04", "(", "node", ",", "fname", ",", "fmcounter", "=", "itertools", ".", "count", "(", "1", ")", ")", ":", "convert_type", "=", "{", "\"lognormal\"", ":", "\"logncdf\"", "}", "new", "=", "Node", "(", "'fragilityModel'", ",", "dict", "(", "assetCategory", "=", "'building'", ",", "lossCategory", "=", "'structural'", ",", "id", "=", "'fm_%d_converted_from_NRML_04'", "%", "next", "(", "fmcounter", ")", ")", ")", "with", "context", "(", "fname", ",", "node", ")", ":", "fmt", "=", "node", "[", "'format'", "]", "descr", "=", "~", "node", ".", "description", "limit_states", "=", "~", "node", ".", "limitStates", "new", ".", "append", "(", "Node", "(", "'description'", ",", "{", "}", ",", "descr", ")", ")", "new", ".", "append", "(", "(", "Node", "(", "'limitStates'", ",", "{", "}", ",", "' '", ".", "join", "(", "limit_states", ")", ")", ")", ")", "for", "ffs", "in", "node", "[", "2", ":", "]", ":", "IML", "=", "ffs", ".", "IML", "# NB: noDamageLimit = None is different than zero", "nodamage", "=", "ffs", ".", "attrib", ".", "get", "(", "'noDamageLimit'", ")", "ff", "=", "Node", "(", "'fragilityFunction'", ",", "{", "'format'", ":", "fmt", "}", ")", "ff", "[", "'id'", "]", "=", "~", "ffs", ".", "taxonomy", "ff", "[", "'shape'", "]", "=", "convert_type", "[", "ffs", ".", "attrib", ".", "get", "(", "'type'", ",", "'lognormal'", ")", "]", "if", "fmt", "==", "'continuous'", ":", "with", "context", "(", "fname", ",", "IML", ")", ":", "attr", "=", "dict", "(", "imt", "=", "IML", "[", "'IMT'", "]", ",", "minIML", "=", "IML", "[", "'minIML'", "]", ",", "maxIML", "=", "IML", "[", "'maxIML'", "]", ")", "if", "nodamage", "is", "not", "None", ":", "attr", "[", "'noDamageLimit'", "]", "=", "nodamage", "ff", ".", "append", "(", "Node", "(", "'imls'", ",", "attr", ")", ")", "for", "ffc", "in", "ffs", "[", "2", ":", "]", ":", "with", "context", "(", "fname", ",", "ffc", ")", ":", "ls", "=", "ffc", "[", "'ls'", "]", "param", "=", "ffc", ".", "params", "with", "context", "(", "fname", ",", "param", ")", ":", "m", ",", "s", "=", "param", "[", "'mean'", "]", ",", "param", "[", "'stddev'", "]", "ff", ".", "append", "(", "Node", "(", "'params'", ",", "dict", "(", "ls", "=", "ls", ",", "mean", "=", "m", ",", "stddev", "=", "s", ")", ")", ")", "else", ":", "# discrete", "with", "context", "(", "fname", ",", "IML", ")", ":", "imls", "=", "' '", ".", "join", "(", "map", "(", "str", ",", "(", "~", "IML", ")", "[", "1", "]", ")", ")", "attr", "=", "dict", "(", "imt", "=", "IML", "[", "'IMT'", "]", ")", "if", "nodamage", "is", "not", "None", ":", "attr", "[", "'noDamageLimit'", "]", "=", "nodamage", "ff", ".", "append", "(", "Node", "(", "'imls'", ",", "attr", ",", "imls", ")", ")", "for", "ffd", "in", "ffs", "[", "2", ":", "]", ":", "ls", "=", "ffd", "[", "'ls'", "]", "with", "context", "(", "fname", ",", "ffd", ")", ":", "poes", "=", "' '", ".", "join", "(", "map", "(", "str", ",", "~", "ffd", ".", "poEs", ")", ")", "ff", ".", "append", "(", "Node", "(", "'poes'", ",", "dict", "(", "ls", "=", "ls", ")", ",", "poes", ")", ")", "new", ".", "append", "(", "ff", ")", "return", "new" ]
:param node: an :class:`openquake.commonib.node.Node` in NRML 0.4 :param fname: path of the fragility file :returns: an :class:`openquake.commonib.node.Node` in NRML 0.5
[ ":", "param", "node", ":", "an", ":", "class", ":", "openquake", ".", "commonib", ".", "node", ".", "Node", "in", "NRML", "0", ".", "4", ":", "param", "fname", ":", "path", "of", "the", "fragility", "file", ":", "returns", ":", "an", ":", "class", ":", "openquake", ".", "commonib", ".", "node", ".", "Node", "in", "NRML", "0", ".", "5" ]
train
https://github.com/gem/oq-engine/blob/8294553a0b8aba33fd96437a35065d03547d0040/openquake/risklib/read_nrml.py#L282-L338
gem/oq-engine
openquake/risklib/read_nrml.py
get_fragility_model_04
def get_fragility_model_04(fmodel, fname): """ :param fmodel: a fragilityModel node :param fname: path of the fragility file :returns: an :class:`openquake.risklib.scientific.FragilityModel` instance """ logging.warning('Please upgrade %s to NRML 0.5', fname) node05 = convert_fragility_model_04(fmodel, fname) node05.limitStates.text = node05.limitStates.text.split() return get_fragility_model(node05, fname)
python
def get_fragility_model_04(fmodel, fname): logging.warning('Please upgrade %s to NRML 0.5', fname) node05 = convert_fragility_model_04(fmodel, fname) node05.limitStates.text = node05.limitStates.text.split() return get_fragility_model(node05, fname)
[ "def", "get_fragility_model_04", "(", "fmodel", ",", "fname", ")", ":", "logging", ".", "warning", "(", "'Please upgrade %s to NRML 0.5'", ",", "fname", ")", "node05", "=", "convert_fragility_model_04", "(", "fmodel", ",", "fname", ")", "node05", ".", "limitStates", ".", "text", "=", "node05", ".", "limitStates", ".", "text", ".", "split", "(", ")", "return", "get_fragility_model", "(", "node05", ",", "fname", ")" ]
:param fmodel: a fragilityModel node :param fname: path of the fragility file :returns: an :class:`openquake.risklib.scientific.FragilityModel` instance
[ ":", "param", "fmodel", ":", "a", "fragilityModel", "node", ":", "param", "fname", ":", "path", "of", "the", "fragility", "file", ":", "returns", ":", "an", ":", "class", ":", "openquake", ".", "risklib", ".", "scientific", ".", "FragilityModel", "instance" ]
train
https://github.com/gem/oq-engine/blob/8294553a0b8aba33fd96437a35065d03547d0040/openquake/risklib/read_nrml.py#L342-L354
gem/oq-engine
openquake/risklib/read_nrml.py
taxonomy
def taxonomy(value): """ Any ASCII character goes into a taxonomy, except spaces. """ try: value.encode('ascii') except UnicodeEncodeError: raise ValueError('tag %r is not ASCII' % value) if re.search(r'\s', value): raise ValueError('The taxonomy %r contains whitespace chars' % value) return value
python
def taxonomy(value): try: value.encode('ascii') except UnicodeEncodeError: raise ValueError('tag %r is not ASCII' % value) if re.search(r'\s', value): raise ValueError('The taxonomy %r contains whitespace chars' % value) return value
[ "def", "taxonomy", "(", "value", ")", ":", "try", ":", "value", ".", "encode", "(", "'ascii'", ")", "except", "UnicodeEncodeError", ":", "raise", "ValueError", "(", "'tag %r is not ASCII'", "%", "value", ")", "if", "re", ".", "search", "(", "r'\\s'", ",", "value", ")", ":", "raise", "ValueError", "(", "'The taxonomy %r contains whitespace chars'", "%", "value", ")", "return", "value" ]
Any ASCII character goes into a taxonomy, except spaces.
[ "Any", "ASCII", "character", "goes", "into", "a", "taxonomy", "except", "spaces", "." ]
train
https://github.com/gem/oq-engine/blob/8294553a0b8aba33fd96437a35065d03547d0040/openquake/risklib/read_nrml.py#L371-L381
gem/oq-engine
openquake/risklib/read_nrml.py
update_validators
def update_validators(): """ Call this to updade the global nrml.validators """ validators.update({ 'fragilityFunction.id': valid.utf8, # taxonomy 'vulnerabilityFunction.id': valid.utf8, # taxonomy 'consequenceFunction.id': valid.utf8, # taxonomy 'asset.id': valid.asset_id, 'costType.name': valid.cost_type, 'costType.type': valid.cost_type_type, 'cost.type': valid.cost_type, 'area.type': valid.name, 'isAbsolute': valid.boolean, 'insuranceLimit': valid.positivefloat, 'deductible': valid.positivefloat, 'occupants': valid.positivefloat, 'value': valid.positivefloat, 'retrofitted': valid.positivefloat, 'number': valid.compose(valid.positivefloat, valid.nonzero), 'vulnerabilitySetID': str, # any ASCII string is fine 'vulnerabilityFunctionID': str, # any ASCII string is fine 'lossCategory': valid.utf8, # a description field 'lr': valid.probability, 'lossRatio': valid.positivefloats, 'coefficientsVariation': valid.positivefloats, 'probabilisticDistribution': valid.Choice('LN', 'BT'), 'dist': valid.Choice('LN', 'BT', 'PM'), 'meanLRs': valid.positivefloats, 'covLRs': valid.positivefloats, 'format': valid.ChoiceCI('discrete', 'continuous'), 'mean': valid.positivefloat, 'stddev': valid.positivefloat, 'minIML': valid.positivefloat, 'maxIML': valid.positivefloat, 'limitStates': valid.namelist, 'noDamageLimit': valid.NoneOr(valid.positivefloat), 'loss_type': valid_loss_types, 'losses': valid.positivefloats, 'averageLoss': valid.positivefloat, 'stdDevLoss': valid.positivefloat, 'ffs.type': valid.ChoiceCI('lognormal'), 'assetLifeExpectancy': valid.positivefloat, 'interestRate': valid.positivefloat, 'lossType': valid_loss_types, 'aalOrig': valid.positivefloat, 'aalRetr': valid.positivefloat, 'ratio': valid.positivefloat, 'cf': asset_mean_stddev, 'damage': damage_triple, 'damageStates': valid.namelist, 'taxonomy': taxonomy, 'tagNames': valid.namelist, })
python
def update_validators(): validators.update({ 'fragilityFunction.id': valid.utf8, 'vulnerabilityFunction.id': valid.utf8, 'consequenceFunction.id': valid.utf8, 'asset.id': valid.asset_id, 'costType.name': valid.cost_type, 'costType.type': valid.cost_type_type, 'cost.type': valid.cost_type, 'area.type': valid.name, 'isAbsolute': valid.boolean, 'insuranceLimit': valid.positivefloat, 'deductible': valid.positivefloat, 'occupants': valid.positivefloat, 'value': valid.positivefloat, 'retrofitted': valid.positivefloat, 'number': valid.compose(valid.positivefloat, valid.nonzero), 'vulnerabilitySetID': str, 'vulnerabilityFunctionID': str, 'lossCategory': valid.utf8, 'lr': valid.probability, 'lossRatio': valid.positivefloats, 'coefficientsVariation': valid.positivefloats, 'probabilisticDistribution': valid.Choice('LN', 'BT'), 'dist': valid.Choice('LN', 'BT', 'PM'), 'meanLRs': valid.positivefloats, 'covLRs': valid.positivefloats, 'format': valid.ChoiceCI('discrete', 'continuous'), 'mean': valid.positivefloat, 'stddev': valid.positivefloat, 'minIML': valid.positivefloat, 'maxIML': valid.positivefloat, 'limitStates': valid.namelist, 'noDamageLimit': valid.NoneOr(valid.positivefloat), 'loss_type': valid_loss_types, 'losses': valid.positivefloats, 'averageLoss': valid.positivefloat, 'stdDevLoss': valid.positivefloat, 'ffs.type': valid.ChoiceCI('lognormal'), 'assetLifeExpectancy': valid.positivefloat, 'interestRate': valid.positivefloat, 'lossType': valid_loss_types, 'aalOrig': valid.positivefloat, 'aalRetr': valid.positivefloat, 'ratio': valid.positivefloat, 'cf': asset_mean_stddev, 'damage': damage_triple, 'damageStates': valid.namelist, 'taxonomy': taxonomy, 'tagNames': valid.namelist, })
[ "def", "update_validators", "(", ")", ":", "validators", ".", "update", "(", "{", "'fragilityFunction.id'", ":", "valid", ".", "utf8", ",", "# taxonomy", "'vulnerabilityFunction.id'", ":", "valid", ".", "utf8", ",", "# taxonomy", "'consequenceFunction.id'", ":", "valid", ".", "utf8", ",", "# taxonomy", "'asset.id'", ":", "valid", ".", "asset_id", ",", "'costType.name'", ":", "valid", ".", "cost_type", ",", "'costType.type'", ":", "valid", ".", "cost_type_type", ",", "'cost.type'", ":", "valid", ".", "cost_type", ",", "'area.type'", ":", "valid", ".", "name", ",", "'isAbsolute'", ":", "valid", ".", "boolean", ",", "'insuranceLimit'", ":", "valid", ".", "positivefloat", ",", "'deductible'", ":", "valid", ".", "positivefloat", ",", "'occupants'", ":", "valid", ".", "positivefloat", ",", "'value'", ":", "valid", ".", "positivefloat", ",", "'retrofitted'", ":", "valid", ".", "positivefloat", ",", "'number'", ":", "valid", ".", "compose", "(", "valid", ".", "positivefloat", ",", "valid", ".", "nonzero", ")", ",", "'vulnerabilitySetID'", ":", "str", ",", "# any ASCII string is fine", "'vulnerabilityFunctionID'", ":", "str", ",", "# any ASCII string is fine", "'lossCategory'", ":", "valid", ".", "utf8", ",", "# a description field", "'lr'", ":", "valid", ".", "probability", ",", "'lossRatio'", ":", "valid", ".", "positivefloats", ",", "'coefficientsVariation'", ":", "valid", ".", "positivefloats", ",", "'probabilisticDistribution'", ":", "valid", ".", "Choice", "(", "'LN'", ",", "'BT'", ")", ",", "'dist'", ":", "valid", ".", "Choice", "(", "'LN'", ",", "'BT'", ",", "'PM'", ")", ",", "'meanLRs'", ":", "valid", ".", "positivefloats", ",", "'covLRs'", ":", "valid", ".", "positivefloats", ",", "'format'", ":", "valid", ".", "ChoiceCI", "(", "'discrete'", ",", "'continuous'", ")", ",", "'mean'", ":", "valid", ".", "positivefloat", ",", "'stddev'", ":", "valid", ".", "positivefloat", ",", "'minIML'", ":", "valid", ".", "positivefloat", ",", "'maxIML'", ":", "valid", ".", "positivefloat", ",", "'limitStates'", ":", "valid", ".", "namelist", ",", "'noDamageLimit'", ":", "valid", ".", "NoneOr", "(", "valid", ".", "positivefloat", ")", ",", "'loss_type'", ":", "valid_loss_types", ",", "'losses'", ":", "valid", ".", "positivefloats", ",", "'averageLoss'", ":", "valid", ".", "positivefloat", ",", "'stdDevLoss'", ":", "valid", ".", "positivefloat", ",", "'ffs.type'", ":", "valid", ".", "ChoiceCI", "(", "'lognormal'", ")", ",", "'assetLifeExpectancy'", ":", "valid", ".", "positivefloat", ",", "'interestRate'", ":", "valid", ".", "positivefloat", ",", "'lossType'", ":", "valid_loss_types", ",", "'aalOrig'", ":", "valid", ".", "positivefloat", ",", "'aalRetr'", ":", "valid", ".", "positivefloat", ",", "'ratio'", ":", "valid", ".", "positivefloat", ",", "'cf'", ":", "asset_mean_stddev", ",", "'damage'", ":", "damage_triple", ",", "'damageStates'", ":", "valid", ".", "namelist", ",", "'taxonomy'", ":", "taxonomy", ",", "'tagNames'", ":", "valid", ".", "namelist", ",", "}", ")" ]
Call this to updade the global nrml.validators
[ "Call", "this", "to", "updade", "the", "global", "nrml", ".", "validators" ]
train
https://github.com/gem/oq-engine/blob/8294553a0b8aba33fd96437a35065d03547d0040/openquake/risklib/read_nrml.py#L384-L437
gem/oq-engine
openquake/calculators/extract.py
get_info
def get_info(dstore): """ :returns: {'stats': dic, 'loss_types': dic, 'num_rlzs': R} """ oq = dstore['oqparam'] stats = {stat: s for s, stat in enumerate(oq.hazard_stats())} loss_types = {lt: l for l, lt in enumerate(oq.loss_dt().names)} imt = {imt: i for i, imt in enumerate(oq.imtls)} num_rlzs = dstore['csm_info'].get_num_rlzs() return dict(stats=stats, num_rlzs=num_rlzs, loss_types=loss_types, imtls=oq.imtls, investigation_time=oq.investigation_time, poes=oq.poes, imt=imt, uhs_dt=oq.uhs_dt())
python
def get_info(dstore): oq = dstore['oqparam'] stats = {stat: s for s, stat in enumerate(oq.hazard_stats())} loss_types = {lt: l for l, lt in enumerate(oq.loss_dt().names)} imt = {imt: i for i, imt in enumerate(oq.imtls)} num_rlzs = dstore['csm_info'].get_num_rlzs() return dict(stats=stats, num_rlzs=num_rlzs, loss_types=loss_types, imtls=oq.imtls, investigation_time=oq.investigation_time, poes=oq.poes, imt=imt, uhs_dt=oq.uhs_dt())
[ "def", "get_info", "(", "dstore", ")", ":", "oq", "=", "dstore", "[", "'oqparam'", "]", "stats", "=", "{", "stat", ":", "s", "for", "s", ",", "stat", "in", "enumerate", "(", "oq", ".", "hazard_stats", "(", ")", ")", "}", "loss_types", "=", "{", "lt", ":", "l", "for", "l", ",", "lt", "in", "enumerate", "(", "oq", ".", "loss_dt", "(", ")", ".", "names", ")", "}", "imt", "=", "{", "imt", ":", "i", "for", "i", ",", "imt", "in", "enumerate", "(", "oq", ".", "imtls", ")", "}", "num_rlzs", "=", "dstore", "[", "'csm_info'", "]", ".", "get_num_rlzs", "(", ")", "return", "dict", "(", "stats", "=", "stats", ",", "num_rlzs", "=", "num_rlzs", ",", "loss_types", "=", "loss_types", ",", "imtls", "=", "oq", ".", "imtls", ",", "investigation_time", "=", "oq", ".", "investigation_time", ",", "poes", "=", "oq", ".", "poes", ",", "imt", "=", "imt", ",", "uhs_dt", "=", "oq", ".", "uhs_dt", "(", ")", ")" ]
:returns: {'stats': dic, 'loss_types': dic, 'num_rlzs': R}
[ ":", "returns", ":", "{", "stats", ":", "dic", "loss_types", ":", "dic", "num_rlzs", ":", "R", "}" ]
train
https://github.com/gem/oq-engine/blob/8294553a0b8aba33fd96437a35065d03547d0040/openquake/calculators/extract.py#L56-L67
gem/oq-engine
openquake/calculators/extract.py
parse
def parse(query_string, info={}): """ :returns: a normalized query_dict as in the following examples: >>> parse('kind=stats', {'stats': {'mean': 0, 'max': 1}}) {'kind': ['mean', 'max'], 'k': [0, 1], 'rlzs': False} >>> parse('kind=rlzs', {'stats': {}, 'num_rlzs': 3}) {'kind': ['rlz-000', 'rlz-001', 'rlz-002'], 'k': [0, 1, 2], 'rlzs': True} >>> parse('kind=mean', {'stats': {'mean': 0, 'max': 1}}) {'kind': ['mean'], 'k': [0], 'rlzs': False} >>> parse('kind=rlz-3&imt=PGA&site_id=0', {'stats': {}}) {'kind': ['rlz-3'], 'imt': ['PGA'], 'site_id': [0], 'k': [3], 'rlzs': True} """ qdic = parse_qs(query_string) loss_types = info.get('loss_types', []) for key, val in qdic.items(): # for instance, convert site_id to an int if key == 'loss_type': qdic[key] = [loss_types[k] for k in val] else: qdic[key] = [lit_eval(v) for v in val] if info: qdic['k'], qdic['kind'], qdic['rlzs'] = _normalize(qdic['kind'], info) return qdic
python
def parse(query_string, info={}): qdic = parse_qs(query_string) loss_types = info.get('loss_types', []) for key, val in qdic.items(): if key == 'loss_type': qdic[key] = [loss_types[k] for k in val] else: qdic[key] = [lit_eval(v) for v in val] if info: qdic['k'], qdic['kind'], qdic['rlzs'] = _normalize(qdic['kind'], info) return qdic
[ "def", "parse", "(", "query_string", ",", "info", "=", "{", "}", ")", ":", "qdic", "=", "parse_qs", "(", "query_string", ")", "loss_types", "=", "info", ".", "get", "(", "'loss_types'", ",", "[", "]", ")", "for", "key", ",", "val", "in", "qdic", ".", "items", "(", ")", ":", "# for instance, convert site_id to an int", "if", "key", "==", "'loss_type'", ":", "qdic", "[", "key", "]", "=", "[", "loss_types", "[", "k", "]", "for", "k", "in", "val", "]", "else", ":", "qdic", "[", "key", "]", "=", "[", "lit_eval", "(", "v", ")", "for", "v", "in", "val", "]", "if", "info", ":", "qdic", "[", "'k'", "]", ",", "qdic", "[", "'kind'", "]", ",", "qdic", "[", "'rlzs'", "]", "=", "_normalize", "(", "qdic", "[", "'kind'", "]", ",", "info", ")", "return", "qdic" ]
:returns: a normalized query_dict as in the following examples: >>> parse('kind=stats', {'stats': {'mean': 0, 'max': 1}}) {'kind': ['mean', 'max'], 'k': [0, 1], 'rlzs': False} >>> parse('kind=rlzs', {'stats': {}, 'num_rlzs': 3}) {'kind': ['rlz-000', 'rlz-001', 'rlz-002'], 'k': [0, 1, 2], 'rlzs': True} >>> parse('kind=mean', {'stats': {'mean': 0, 'max': 1}}) {'kind': ['mean'], 'k': [0], 'rlzs': False} >>> parse('kind=rlz-3&imt=PGA&site_id=0', {'stats': {}}) {'kind': ['rlz-3'], 'imt': ['PGA'], 'site_id': [0], 'k': [3], 'rlzs': True}
[ ":", "returns", ":", "a", "normalized", "query_dict", "as", "in", "the", "following", "examples", ":" ]
train
https://github.com/gem/oq-engine/blob/8294553a0b8aba33fd96437a35065d03547d0040/openquake/calculators/extract.py#L93-L115
gem/oq-engine
openquake/calculators/extract.py
barray
def barray(iterlines): """ Array of bytes """ lst = [line.encode('utf-8') for line in iterlines] arr = numpy.array(lst) return arr
python
def barray(iterlines): lst = [line.encode('utf-8') for line in iterlines] arr = numpy.array(lst) return arr
[ "def", "barray", "(", "iterlines", ")", ":", "lst", "=", "[", "line", ".", "encode", "(", "'utf-8'", ")", "for", "line", "in", "iterlines", "]", "arr", "=", "numpy", ".", "array", "(", "lst", ")", "return", "arr" ]
Array of bytes
[ "Array", "of", "bytes" ]
train
https://github.com/gem/oq-engine/blob/8294553a0b8aba33fd96437a35065d03547d0040/openquake/calculators/extract.py#L130-L136
gem/oq-engine
openquake/calculators/extract.py
extract_
def extract_(dstore, dspath): """ Extracts an HDF5 path object from the datastore, for instance extract(dstore, 'sitecol'). """ obj = dstore[dspath] if isinstance(obj, Dataset): return ArrayWrapper(obj.value, obj.attrs) elif isinstance(obj, Group): return ArrayWrapper(numpy.array(list(obj)), obj.attrs) else: return obj
python
def extract_(dstore, dspath): obj = dstore[dspath] if isinstance(obj, Dataset): return ArrayWrapper(obj.value, obj.attrs) elif isinstance(obj, Group): return ArrayWrapper(numpy.array(list(obj)), obj.attrs) else: return obj
[ "def", "extract_", "(", "dstore", ",", "dspath", ")", ":", "obj", "=", "dstore", "[", "dspath", "]", "if", "isinstance", "(", "obj", ",", "Dataset", ")", ":", "return", "ArrayWrapper", "(", "obj", ".", "value", ",", "obj", ".", "attrs", ")", "elif", "isinstance", "(", "obj", ",", "Group", ")", ":", "return", "ArrayWrapper", "(", "numpy", ".", "array", "(", "list", "(", "obj", ")", ")", ",", "obj", ".", "attrs", ")", "else", ":", "return", "obj" ]
Extracts an HDF5 path object from the datastore, for instance extract(dstore, 'sitecol').
[ "Extracts", "an", "HDF5", "path", "object", "from", "the", "datastore", "for", "instance", "extract", "(", "dstore", "sitecol", ")", "." ]
train
https://github.com/gem/oq-engine/blob/8294553a0b8aba33fd96437a35065d03547d0040/openquake/calculators/extract.py#L139-L150
gem/oq-engine
openquake/calculators/extract.py
extract_realizations
def extract_realizations(dstore, dummy): """ Extract an array of realizations. Use it as /extract/realizations """ rlzs = dstore['csm_info'].rlzs dt = [('ordinal', U32), ('weight', F32), ('gsims', '<S64')] arr = numpy.zeros(len(rlzs), dt) arr['ordinal'] = rlzs['ordinal'] arr['weight'] = rlzs['weight'] arr['gsims'] = rlzs['branch_path'] # this is used in scenario by QGIS return arr
python
def extract_realizations(dstore, dummy): rlzs = dstore['csm_info'].rlzs dt = [('ordinal', U32), ('weight', F32), ('gsims', '<S64')] arr = numpy.zeros(len(rlzs), dt) arr['ordinal'] = rlzs['ordinal'] arr['weight'] = rlzs['weight'] arr['gsims'] = rlzs['branch_path'] return arr
[ "def", "extract_realizations", "(", "dstore", ",", "dummy", ")", ":", "rlzs", "=", "dstore", "[", "'csm_info'", "]", ".", "rlzs", "dt", "=", "[", "(", "'ordinal'", ",", "U32", ")", ",", "(", "'weight'", ",", "F32", ")", ",", "(", "'gsims'", ",", "'<S64'", ")", "]", "arr", "=", "numpy", ".", "zeros", "(", "len", "(", "rlzs", ")", ",", "dt", ")", "arr", "[", "'ordinal'", "]", "=", "rlzs", "[", "'ordinal'", "]", "arr", "[", "'weight'", "]", "=", "rlzs", "[", "'weight'", "]", "arr", "[", "'gsims'", "]", "=", "rlzs", "[", "'branch_path'", "]", "# this is used in scenario by QGIS", "return", "arr" ]
Extract an array of realizations. Use it as /extract/realizations
[ "Extract", "an", "array", "of", "realizations", ".", "Use", "it", "as", "/", "extract", "/", "realizations" ]
train
https://github.com/gem/oq-engine/blob/8294553a0b8aba33fd96437a35065d03547d0040/openquake/calculators/extract.py#L188-L198
gem/oq-engine
openquake/calculators/extract.py
extract_exposure_metadata
def extract_exposure_metadata(dstore, what): """ Extract the loss categories and the tags of the exposure. Use it as /extract/exposure_metadata """ dic = {} dic1, dic2 = dstore['assetcol/tagcol'].__toh5__() dic.update(dic1) dic.update(dic2) if 'asset_risk' in dstore: dic['multi_risk'] = sorted( set(dstore['asset_risk'].dtype.names) - set(dstore['assetcol/array'].dtype.names)) names = [name for name in dstore['assetcol/array'].dtype.names if name.startswith(('value-', 'number', 'occupants_')) and not name.endswith('_None')] return ArrayWrapper(numpy.array(names), dic)
python
def extract_exposure_metadata(dstore, what): dic = {} dic1, dic2 = dstore['assetcol/tagcol'].__toh5__() dic.update(dic1) dic.update(dic2) if 'asset_risk' in dstore: dic['multi_risk'] = sorted( set(dstore['asset_risk'].dtype.names) - set(dstore['assetcol/array'].dtype.names)) names = [name for name in dstore['assetcol/array'].dtype.names if name.startswith(('value-', 'number', 'occupants_')) and not name.endswith('_None')] return ArrayWrapper(numpy.array(names), dic)
[ "def", "extract_exposure_metadata", "(", "dstore", ",", "what", ")", ":", "dic", "=", "{", "}", "dic1", ",", "dic2", "=", "dstore", "[", "'assetcol/tagcol'", "]", ".", "__toh5__", "(", ")", "dic", ".", "update", "(", "dic1", ")", "dic", ".", "update", "(", "dic2", ")", "if", "'asset_risk'", "in", "dstore", ":", "dic", "[", "'multi_risk'", "]", "=", "sorted", "(", "set", "(", "dstore", "[", "'asset_risk'", "]", ".", "dtype", ".", "names", ")", "-", "set", "(", "dstore", "[", "'assetcol/array'", "]", ".", "dtype", ".", "names", ")", ")", "names", "=", "[", "name", "for", "name", "in", "dstore", "[", "'assetcol/array'", "]", ".", "dtype", ".", "names", "if", "name", ".", "startswith", "(", "(", "'value-'", ",", "'number'", ",", "'occupants_'", ")", ")", "and", "not", "name", ".", "endswith", "(", "'_None'", ")", "]", "return", "ArrayWrapper", "(", "numpy", ".", "array", "(", "names", ")", ",", "dic", ")" ]
Extract the loss categories and the tags of the exposure. Use it as /extract/exposure_metadata
[ "Extract", "the", "loss", "categories", "and", "the", "tags", "of", "the", "exposure", ".", "Use", "it", "as", "/", "extract", "/", "exposure_metadata" ]
train
https://github.com/gem/oq-engine/blob/8294553a0b8aba33fd96437a35065d03547d0040/openquake/calculators/extract.py#L202-L218
gem/oq-engine
openquake/calculators/extract.py
extract_assets
def extract_assets(dstore, what): """ Extract an array of assets, optionally filtered by tag. Use it as /extract/assets?taxonomy=RC&taxonomy=MSBC&occupancy=RES """ qdict = parse(what) dic = {} dic1, dic2 = dstore['assetcol/tagcol'].__toh5__() dic.update(dic1) dic.update(dic2) arr = dstore['assetcol/array'].value for tag, vals in qdict.items(): cond = numpy.zeros(len(arr), bool) for val in vals: tagidx, = numpy.where(dic[tag] == val) cond |= arr[tag] == tagidx arr = arr[cond] return ArrayWrapper(arr, dic)
python
def extract_assets(dstore, what): qdict = parse(what) dic = {} dic1, dic2 = dstore['assetcol/tagcol'].__toh5__() dic.update(dic1) dic.update(dic2) arr = dstore['assetcol/array'].value for tag, vals in qdict.items(): cond = numpy.zeros(len(arr), bool) for val in vals: tagidx, = numpy.where(dic[tag] == val) cond |= arr[tag] == tagidx arr = arr[cond] return ArrayWrapper(arr, dic)
[ "def", "extract_assets", "(", "dstore", ",", "what", ")", ":", "qdict", "=", "parse", "(", "what", ")", "dic", "=", "{", "}", "dic1", ",", "dic2", "=", "dstore", "[", "'assetcol/tagcol'", "]", ".", "__toh5__", "(", ")", "dic", ".", "update", "(", "dic1", ")", "dic", ".", "update", "(", "dic2", ")", "arr", "=", "dstore", "[", "'assetcol/array'", "]", ".", "value", "for", "tag", ",", "vals", "in", "qdict", ".", "items", "(", ")", ":", "cond", "=", "numpy", ".", "zeros", "(", "len", "(", "arr", ")", ",", "bool", ")", "for", "val", "in", "vals", ":", "tagidx", ",", "=", "numpy", ".", "where", "(", "dic", "[", "tag", "]", "==", "val", ")", "cond", "|=", "arr", "[", "tag", "]", "==", "tagidx", "arr", "=", "arr", "[", "cond", "]", "return", "ArrayWrapper", "(", "arr", ",", "dic", ")" ]
Extract an array of assets, optionally filtered by tag. Use it as /extract/assets?taxonomy=RC&taxonomy=MSBC&occupancy=RES
[ "Extract", "an", "array", "of", "assets", "optionally", "filtered", "by", "tag", ".", "Use", "it", "as", "/", "extract", "/", "assets?taxonomy", "=", "RC&taxonomy", "=", "MSBC&occupancy", "=", "RES" ]
train
https://github.com/gem/oq-engine/blob/8294553a0b8aba33fd96437a35065d03547d0040/openquake/calculators/extract.py#L222-L239
gem/oq-engine
openquake/calculators/extract.py
extract_asset_values
def extract_asset_values(dstore, sid): """ Extract an array of asset values for the given sid. Use it as /extract/asset_values/0 :returns: (aid, loss_type1, ..., loss_typeN) composite array """ if sid: return extract(dstore, 'asset_values')[int(sid)] assetcol = extract(dstore, 'assetcol') asset_refs = assetcol.asset_refs assets_by_site = assetcol.assets_by_site() lts = assetcol.loss_types dt = numpy.dtype([('aref', asset_refs.dtype), ('aid', numpy.uint32)] + [(str(lt), numpy.float32) for lt in lts]) data = [] for assets in assets_by_site: vals = numpy.zeros(len(assets), dt) for a, asset in enumerate(assets): vals[a]['aref'] = asset_refs[a] vals[a]['aid'] = asset['ordinal'] for lt in lts: vals[a][lt] = asset['value-' + lt] data.append(vals) return data
python
def extract_asset_values(dstore, sid): if sid: return extract(dstore, 'asset_values')[int(sid)] assetcol = extract(dstore, 'assetcol') asset_refs = assetcol.asset_refs assets_by_site = assetcol.assets_by_site() lts = assetcol.loss_types dt = numpy.dtype([('aref', asset_refs.dtype), ('aid', numpy.uint32)] + [(str(lt), numpy.float32) for lt in lts]) data = [] for assets in assets_by_site: vals = numpy.zeros(len(assets), dt) for a, asset in enumerate(assets): vals[a]['aref'] = asset_refs[a] vals[a]['aid'] = asset['ordinal'] for lt in lts: vals[a][lt] = asset['value-' + lt] data.append(vals) return data
[ "def", "extract_asset_values", "(", "dstore", ",", "sid", ")", ":", "if", "sid", ":", "return", "extract", "(", "dstore", ",", "'asset_values'", ")", "[", "int", "(", "sid", ")", "]", "assetcol", "=", "extract", "(", "dstore", ",", "'assetcol'", ")", "asset_refs", "=", "assetcol", ".", "asset_refs", "assets_by_site", "=", "assetcol", ".", "assets_by_site", "(", ")", "lts", "=", "assetcol", ".", "loss_types", "dt", "=", "numpy", ".", "dtype", "(", "[", "(", "'aref'", ",", "asset_refs", ".", "dtype", ")", ",", "(", "'aid'", ",", "numpy", ".", "uint32", ")", "]", "+", "[", "(", "str", "(", "lt", ")", ",", "numpy", ".", "float32", ")", "for", "lt", "in", "lts", "]", ")", "data", "=", "[", "]", "for", "assets", "in", "assets_by_site", ":", "vals", "=", "numpy", ".", "zeros", "(", "len", "(", "assets", ")", ",", "dt", ")", "for", "a", ",", "asset", "in", "enumerate", "(", "assets", ")", ":", "vals", "[", "a", "]", "[", "'aref'", "]", "=", "asset_refs", "[", "a", "]", "vals", "[", "a", "]", "[", "'aid'", "]", "=", "asset", "[", "'ordinal'", "]", "for", "lt", "in", "lts", ":", "vals", "[", "a", "]", "[", "lt", "]", "=", "asset", "[", "'value-'", "+", "lt", "]", "data", ".", "append", "(", "vals", ")", "return", "data" ]
Extract an array of asset values for the given sid. Use it as /extract/asset_values/0 :returns: (aid, loss_type1, ..., loss_typeN) composite array
[ "Extract", "an", "array", "of", "asset", "values", "for", "the", "given", "sid", ".", "Use", "it", "as", "/", "extract", "/", "asset_values", "/", "0" ]
train
https://github.com/gem/oq-engine/blob/8294553a0b8aba33fd96437a35065d03547d0040/openquake/calculators/extract.py#L264-L289
gem/oq-engine
openquake/calculators/extract.py
extract_asset_tags
def extract_asset_tags(dstore, tagname): """ Extract an array of asset tags for the given tagname. Use it as /extract/asset_tags or /extract/asset_tags/taxonomy """ tagcol = dstore['assetcol/tagcol'] if tagname: yield tagname, barray(tagcol.gen_tags(tagname)) for tagname in tagcol.tagnames: yield tagname, barray(tagcol.gen_tags(tagname))
python
def extract_asset_tags(dstore, tagname): tagcol = dstore['assetcol/tagcol'] if tagname: yield tagname, barray(tagcol.gen_tags(tagname)) for tagname in tagcol.tagnames: yield tagname, barray(tagcol.gen_tags(tagname))
[ "def", "extract_asset_tags", "(", "dstore", ",", "tagname", ")", ":", "tagcol", "=", "dstore", "[", "'assetcol/tagcol'", "]", "if", "tagname", ":", "yield", "tagname", ",", "barray", "(", "tagcol", ".", "gen_tags", "(", "tagname", ")", ")", "for", "tagname", "in", "tagcol", ".", "tagnames", ":", "yield", "tagname", ",", "barray", "(", "tagcol", ".", "gen_tags", "(", "tagname", ")", ")" ]
Extract an array of asset tags for the given tagname. Use it as /extract/asset_tags or /extract/asset_tags/taxonomy
[ "Extract", "an", "array", "of", "asset", "tags", "for", "the", "given", "tagname", ".", "Use", "it", "as", "/", "extract", "/", "asset_tags", "or", "/", "extract", "/", "asset_tags", "/", "taxonomy" ]
train
https://github.com/gem/oq-engine/blob/8294553a0b8aba33fd96437a35065d03547d0040/openquake/calculators/extract.py#L293-L302
gem/oq-engine
openquake/calculators/extract.py
get_mesh
def get_mesh(sitecol, complete=True): """ :returns: a lon-lat or lon-lat-depth array depending if the site collection is at sea level or not """ sc = sitecol.complete if complete else sitecol if sc.at_sea_level(): mesh = numpy.zeros(len(sc), [('lon', F64), ('lat', F64)]) mesh['lon'] = sc.lons mesh['lat'] = sc.lats else: mesh = numpy.zeros(len(sc), [('lon', F64), ('lat', F64), ('depth', F64)]) mesh['lon'] = sc.lons mesh['lat'] = sc.lats mesh['depth'] = sc.depths return mesh
python
def get_mesh(sitecol, complete=True): sc = sitecol.complete if complete else sitecol if sc.at_sea_level(): mesh = numpy.zeros(len(sc), [('lon', F64), ('lat', F64)]) mesh['lon'] = sc.lons mesh['lat'] = sc.lats else: mesh = numpy.zeros(len(sc), [('lon', F64), ('lat', F64), ('depth', F64)]) mesh['lon'] = sc.lons mesh['lat'] = sc.lats mesh['depth'] = sc.depths return mesh
[ "def", "get_mesh", "(", "sitecol", ",", "complete", "=", "True", ")", ":", "sc", "=", "sitecol", ".", "complete", "if", "complete", "else", "sitecol", "if", "sc", ".", "at_sea_level", "(", ")", ":", "mesh", "=", "numpy", ".", "zeros", "(", "len", "(", "sc", ")", ",", "[", "(", "'lon'", ",", "F64", ")", ",", "(", "'lat'", ",", "F64", ")", "]", ")", "mesh", "[", "'lon'", "]", "=", "sc", ".", "lons", "mesh", "[", "'lat'", "]", "=", "sc", ".", "lats", "else", ":", "mesh", "=", "numpy", ".", "zeros", "(", "len", "(", "sc", ")", ",", "[", "(", "'lon'", ",", "F64", ")", ",", "(", "'lat'", ",", "F64", ")", ",", "(", "'depth'", ",", "F64", ")", "]", ")", "mesh", "[", "'lon'", "]", "=", "sc", ".", "lons", "mesh", "[", "'lat'", "]", "=", "sc", ".", "lats", "mesh", "[", "'depth'", "]", "=", "sc", ".", "depths", "return", "mesh" ]
:returns: a lon-lat or lon-lat-depth array depending if the site collection is at sea level or not
[ ":", "returns", ":", "a", "lon", "-", "lat", "or", "lon", "-", "lat", "-", "depth", "array", "depending", "if", "the", "site", "collection", "is", "at", "sea", "level", "or", "not" ]
train
https://github.com/gem/oq-engine/blob/8294553a0b8aba33fd96437a35065d03547d0040/openquake/calculators/extract.py#L305-L322
gem/oq-engine
openquake/calculators/extract.py
hazard_items
def hazard_items(dic, mesh, *extras, **kw): """ :param dic: dictionary of arrays of the same shape :param mesh: a mesh array with lon, lat fields of the same length :param extras: optional triples (field, dtype, values) :param kw: dictionary of parameters (like investigation_time) :returns: a list of pairs (key, value) suitable for storage in .npz format """ for item in kw.items(): yield item arr = dic[next(iter(dic))] dtlist = [(str(field), arr.dtype) for field in sorted(dic)] for field, dtype, values in extras: dtlist.append((str(field), dtype)) array = numpy.zeros(arr.shape, dtlist) for field in dic: array[field] = dic[field] for field, dtype, values in extras: array[field] = values yield 'all', util.compose_arrays(mesh, array)
python
def hazard_items(dic, mesh, *extras, **kw): for item in kw.items(): yield item arr = dic[next(iter(dic))] dtlist = [(str(field), arr.dtype) for field in sorted(dic)] for field, dtype, values in extras: dtlist.append((str(field), dtype)) array = numpy.zeros(arr.shape, dtlist) for field in dic: array[field] = dic[field] for field, dtype, values in extras: array[field] = values yield 'all', util.compose_arrays(mesh, array)
[ "def", "hazard_items", "(", "dic", ",", "mesh", ",", "*", "extras", ",", "*", "*", "kw", ")", ":", "for", "item", "in", "kw", ".", "items", "(", ")", ":", "yield", "item", "arr", "=", "dic", "[", "next", "(", "iter", "(", "dic", ")", ")", "]", "dtlist", "=", "[", "(", "str", "(", "field", ")", ",", "arr", ".", "dtype", ")", "for", "field", "in", "sorted", "(", "dic", ")", "]", "for", "field", ",", "dtype", ",", "values", "in", "extras", ":", "dtlist", ".", "append", "(", "(", "str", "(", "field", ")", ",", "dtype", ")", ")", "array", "=", "numpy", ".", "zeros", "(", "arr", ".", "shape", ",", "dtlist", ")", "for", "field", "in", "dic", ":", "array", "[", "field", "]", "=", "dic", "[", "field", "]", "for", "field", ",", "dtype", ",", "values", "in", "extras", ":", "array", "[", "field", "]", "=", "values", "yield", "'all'", ",", "util", ".", "compose_arrays", "(", "mesh", ",", "array", ")" ]
:param dic: dictionary of arrays of the same shape :param mesh: a mesh array with lon, lat fields of the same length :param extras: optional triples (field, dtype, values) :param kw: dictionary of parameters (like investigation_time) :returns: a list of pairs (key, value) suitable for storage in .npz format
[ ":", "param", "dic", ":", "dictionary", "of", "arrays", "of", "the", "same", "shape", ":", "param", "mesh", ":", "a", "mesh", "array", "with", "lon", "lat", "fields", "of", "the", "same", "length", ":", "param", "extras", ":", "optional", "triples", "(", "field", "dtype", "values", ")", ":", "param", "kw", ":", "dictionary", "of", "parameters", "(", "like", "investigation_time", ")", ":", "returns", ":", "a", "list", "of", "pairs", "(", "key", "value", ")", "suitable", "for", "storage", "in", ".", "npz", "format" ]
train
https://github.com/gem/oq-engine/blob/8294553a0b8aba33fd96437a35065d03547d0040/openquake/calculators/extract.py#L325-L344
gem/oq-engine
openquake/calculators/extract.py
extract_hcurves
def extract_hcurves(dstore, what): """ Extracts hazard curves. Use it as /extract/hcurves?kind=mean or /extract/hcurves?kind=rlz-0, /extract/hcurves?kind=stats, /extract/hcurves?kind=rlzs etc """ info = get_info(dstore) if what == '': # npz exports for QGIS sitecol = dstore['sitecol'] mesh = get_mesh(sitecol, complete=False) dic = _get_dict(dstore, 'hcurves-stats', info['imtls'], info['stats']) yield from hazard_items( dic, mesh, investigation_time=info['investigation_time']) return params = parse(what, info) if 'imt' in params: [imt] = params['imt'] slc = info['imtls'](imt) else: slc = ALL sids = params.get('site_id', ALL) if params['rlzs']: dset = dstore['hcurves-rlzs'] for k in params['k']: yield 'rlz-%03d' % k, hdf5.extract(dset, sids, k, slc)[:, 0] else: dset = dstore['hcurves-stats'] stats = list(info['stats']) for k in params['k']: yield stats[k], hdf5.extract(dset, sids, k, slc)[:, 0] yield from params.items()
python
def extract_hcurves(dstore, what): info = get_info(dstore) if what == '': sitecol = dstore['sitecol'] mesh = get_mesh(sitecol, complete=False) dic = _get_dict(dstore, 'hcurves-stats', info['imtls'], info['stats']) yield from hazard_items( dic, mesh, investigation_time=info['investigation_time']) return params = parse(what, info) if 'imt' in params: [imt] = params['imt'] slc = info['imtls'](imt) else: slc = ALL sids = params.get('site_id', ALL) if params['rlzs']: dset = dstore['hcurves-rlzs'] for k in params['k']: yield 'rlz-%03d' % k, hdf5.extract(dset, sids, k, slc)[:, 0] else: dset = dstore['hcurves-stats'] stats = list(info['stats']) for k in params['k']: yield stats[k], hdf5.extract(dset, sids, k, slc)[:, 0] yield from params.items()
[ "def", "extract_hcurves", "(", "dstore", ",", "what", ")", ":", "info", "=", "get_info", "(", "dstore", ")", "if", "what", "==", "''", ":", "# npz exports for QGIS", "sitecol", "=", "dstore", "[", "'sitecol'", "]", "mesh", "=", "get_mesh", "(", "sitecol", ",", "complete", "=", "False", ")", "dic", "=", "_get_dict", "(", "dstore", ",", "'hcurves-stats'", ",", "info", "[", "'imtls'", "]", ",", "info", "[", "'stats'", "]", ")", "yield", "from", "hazard_items", "(", "dic", ",", "mesh", ",", "investigation_time", "=", "info", "[", "'investigation_time'", "]", ")", "return", "params", "=", "parse", "(", "what", ",", "info", ")", "if", "'imt'", "in", "params", ":", "[", "imt", "]", "=", "params", "[", "'imt'", "]", "slc", "=", "info", "[", "'imtls'", "]", "(", "imt", ")", "else", ":", "slc", "=", "ALL", "sids", "=", "params", ".", "get", "(", "'site_id'", ",", "ALL", ")", "if", "params", "[", "'rlzs'", "]", ":", "dset", "=", "dstore", "[", "'hcurves-rlzs'", "]", "for", "k", "in", "params", "[", "'k'", "]", ":", "yield", "'rlz-%03d'", "%", "k", ",", "hdf5", ".", "extract", "(", "dset", ",", "sids", ",", "k", ",", "slc", ")", "[", ":", ",", "0", "]", "else", ":", "dset", "=", "dstore", "[", "'hcurves-stats'", "]", "stats", "=", "list", "(", "info", "[", "'stats'", "]", ")", "for", "k", "in", "params", "[", "'k'", "]", ":", "yield", "stats", "[", "k", "]", ",", "hdf5", ".", "extract", "(", "dset", ",", "sids", ",", "k", ",", "slc", ")", "[", ":", ",", "0", "]", "yield", "from", "params", ".", "items", "(", ")" ]
Extracts hazard curves. Use it as /extract/hcurves?kind=mean or /extract/hcurves?kind=rlz-0, /extract/hcurves?kind=stats, /extract/hcurves?kind=rlzs etc
[ "Extracts", "hazard", "curves", ".", "Use", "it", "as", "/", "extract", "/", "hcurves?kind", "=", "mean", "or", "/", "extract", "/", "hcurves?kind", "=", "rlz", "-", "0", "/", "extract", "/", "hcurves?kind", "=", "stats", "/", "extract", "/", "hcurves?kind", "=", "rlzs", "etc" ]
train
https://github.com/gem/oq-engine/blob/8294553a0b8aba33fd96437a35065d03547d0040/openquake/calculators/extract.py#L359-L389
gem/oq-engine
openquake/calculators/extract.py
extract_hmaps
def extract_hmaps(dstore, what): """ Extracts hazard maps. Use it as /extract/hmaps?imt=PGA """ info = get_info(dstore) if what == '': # npz exports for QGIS sitecol = dstore['sitecol'] mesh = get_mesh(sitecol, complete=False) dic = _get_dict(dstore, 'hmaps-stats', {imt: info['poes'] for imt in info['imtls']}, info['stats']) yield from hazard_items( dic, mesh, investigation_time=info['investigation_time']) return params = parse(what, info) if 'imt' in params: [imt] = params['imt'] m = info['imt'][imt] s = slice(m, m + 1) else: s = ALL if params['rlzs']: dset = dstore['hmaps-rlzs'] for k in params['k']: yield 'rlz-%03d' % k, hdf5.extract(dset, ALL, k, s, ALL)[:, 0] else: dset = dstore['hmaps-stats'] stats = list(info['stats']) for k in params['k']: yield stats[k], hdf5.extract(dset, ALL, k, s, ALL)[:, 0] yield from params.items()
python
def extract_hmaps(dstore, what): info = get_info(dstore) if what == '': sitecol = dstore['sitecol'] mesh = get_mesh(sitecol, complete=False) dic = _get_dict(dstore, 'hmaps-stats', {imt: info['poes'] for imt in info['imtls']}, info['stats']) yield from hazard_items( dic, mesh, investigation_time=info['investigation_time']) return params = parse(what, info) if 'imt' in params: [imt] = params['imt'] m = info['imt'][imt] s = slice(m, m + 1) else: s = ALL if params['rlzs']: dset = dstore['hmaps-rlzs'] for k in params['k']: yield 'rlz-%03d' % k, hdf5.extract(dset, ALL, k, s, ALL)[:, 0] else: dset = dstore['hmaps-stats'] stats = list(info['stats']) for k in params['k']: yield stats[k], hdf5.extract(dset, ALL, k, s, ALL)[:, 0] yield from params.items()
[ "def", "extract_hmaps", "(", "dstore", ",", "what", ")", ":", "info", "=", "get_info", "(", "dstore", ")", "if", "what", "==", "''", ":", "# npz exports for QGIS", "sitecol", "=", "dstore", "[", "'sitecol'", "]", "mesh", "=", "get_mesh", "(", "sitecol", ",", "complete", "=", "False", ")", "dic", "=", "_get_dict", "(", "dstore", ",", "'hmaps-stats'", ",", "{", "imt", ":", "info", "[", "'poes'", "]", "for", "imt", "in", "info", "[", "'imtls'", "]", "}", ",", "info", "[", "'stats'", "]", ")", "yield", "from", "hazard_items", "(", "dic", ",", "mesh", ",", "investigation_time", "=", "info", "[", "'investigation_time'", "]", ")", "return", "params", "=", "parse", "(", "what", ",", "info", ")", "if", "'imt'", "in", "params", ":", "[", "imt", "]", "=", "params", "[", "'imt'", "]", "m", "=", "info", "[", "'imt'", "]", "[", "imt", "]", "s", "=", "slice", "(", "m", ",", "m", "+", "1", ")", "else", ":", "s", "=", "ALL", "if", "params", "[", "'rlzs'", "]", ":", "dset", "=", "dstore", "[", "'hmaps-rlzs'", "]", "for", "k", "in", "params", "[", "'k'", "]", ":", "yield", "'rlz-%03d'", "%", "k", ",", "hdf5", ".", "extract", "(", "dset", ",", "ALL", ",", "k", ",", "s", ",", "ALL", ")", "[", ":", ",", "0", "]", "else", ":", "dset", "=", "dstore", "[", "'hmaps-stats'", "]", "stats", "=", "list", "(", "info", "[", "'stats'", "]", ")", "for", "k", "in", "params", "[", "'k'", "]", ":", "yield", "stats", "[", "k", "]", ",", "hdf5", ".", "extract", "(", "dset", ",", "ALL", ",", "k", ",", "s", ",", "ALL", ")", "[", ":", ",", "0", "]", "yield", "from", "params", ".", "items", "(", ")" ]
Extracts hazard maps. Use it as /extract/hmaps?imt=PGA
[ "Extracts", "hazard", "maps", ".", "Use", "it", "as", "/", "extract", "/", "hmaps?imt", "=", "PGA" ]
train
https://github.com/gem/oq-engine/blob/8294553a0b8aba33fd96437a35065d03547d0040/openquake/calculators/extract.py#L393-L423
gem/oq-engine
openquake/calculators/extract.py
extract_uhs
def extract_uhs(dstore, what): """ Extracts uniform hazard spectra. Use it as /extract/uhs?kind=mean or /extract/uhs?kind=rlz-0, etc """ info = get_info(dstore) if what == '': # npz exports for QGIS sitecol = dstore['sitecol'] mesh = get_mesh(sitecol, complete=False) dic = {} for stat, s in info['stats'].items(): hmap = dstore['hmaps-stats'][:, s] dic[stat] = calc.make_uhs(hmap, info) yield from hazard_items( dic, mesh, investigation_time=info['investigation_time']) return params = parse(what, info) periods = [] for m, imt in enumerate(info['imtls']): if imt == 'PGA' or imt.startswith('SA'): periods.append(m) if 'site_id' in params: sids = params['site_id'] else: sids = ALL if params['rlzs']: dset = dstore['hmaps-rlzs'] for k in params['k']: yield ('rlz-%03d' % k, hdf5.extract(dset, sids, k, periods, ALL)[:, 0]) else: dset = dstore['hmaps-stats'] stats = list(info['stats']) for k in params['k']: yield stats[k], hdf5.extract(dset, sids, k, periods, ALL)[:, 0] yield from params.items()
python
def extract_uhs(dstore, what): info = get_info(dstore) if what == '': sitecol = dstore['sitecol'] mesh = get_mesh(sitecol, complete=False) dic = {} for stat, s in info['stats'].items(): hmap = dstore['hmaps-stats'][:, s] dic[stat] = calc.make_uhs(hmap, info) yield from hazard_items( dic, mesh, investigation_time=info['investigation_time']) return params = parse(what, info) periods = [] for m, imt in enumerate(info['imtls']): if imt == 'PGA' or imt.startswith('SA'): periods.append(m) if 'site_id' in params: sids = params['site_id'] else: sids = ALL if params['rlzs']: dset = dstore['hmaps-rlzs'] for k in params['k']: yield ('rlz-%03d' % k, hdf5.extract(dset, sids, k, periods, ALL)[:, 0]) else: dset = dstore['hmaps-stats'] stats = list(info['stats']) for k in params['k']: yield stats[k], hdf5.extract(dset, sids, k, periods, ALL)[:, 0] yield from params.items()
[ "def", "extract_uhs", "(", "dstore", ",", "what", ")", ":", "info", "=", "get_info", "(", "dstore", ")", "if", "what", "==", "''", ":", "# npz exports for QGIS", "sitecol", "=", "dstore", "[", "'sitecol'", "]", "mesh", "=", "get_mesh", "(", "sitecol", ",", "complete", "=", "False", ")", "dic", "=", "{", "}", "for", "stat", ",", "s", "in", "info", "[", "'stats'", "]", ".", "items", "(", ")", ":", "hmap", "=", "dstore", "[", "'hmaps-stats'", "]", "[", ":", ",", "s", "]", "dic", "[", "stat", "]", "=", "calc", ".", "make_uhs", "(", "hmap", ",", "info", ")", "yield", "from", "hazard_items", "(", "dic", ",", "mesh", ",", "investigation_time", "=", "info", "[", "'investigation_time'", "]", ")", "return", "params", "=", "parse", "(", "what", ",", "info", ")", "periods", "=", "[", "]", "for", "m", ",", "imt", "in", "enumerate", "(", "info", "[", "'imtls'", "]", ")", ":", "if", "imt", "==", "'PGA'", "or", "imt", ".", "startswith", "(", "'SA'", ")", ":", "periods", ".", "append", "(", "m", ")", "if", "'site_id'", "in", "params", ":", "sids", "=", "params", "[", "'site_id'", "]", "else", ":", "sids", "=", "ALL", "if", "params", "[", "'rlzs'", "]", ":", "dset", "=", "dstore", "[", "'hmaps-rlzs'", "]", "for", "k", "in", "params", "[", "'k'", "]", ":", "yield", "(", "'rlz-%03d'", "%", "k", ",", "hdf5", ".", "extract", "(", "dset", ",", "sids", ",", "k", ",", "periods", ",", "ALL", ")", "[", ":", ",", "0", "]", ")", "else", ":", "dset", "=", "dstore", "[", "'hmaps-stats'", "]", "stats", "=", "list", "(", "info", "[", "'stats'", "]", ")", "for", "k", "in", "params", "[", "'k'", "]", ":", "yield", "stats", "[", "k", "]", ",", "hdf5", ".", "extract", "(", "dset", ",", "sids", ",", "k", ",", "periods", ",", "ALL", ")", "[", ":", ",", "0", "]", "yield", "from", "params", ".", "items", "(", ")" ]
Extracts uniform hazard spectra. Use it as /extract/uhs?kind=mean or /extract/uhs?kind=rlz-0, etc
[ "Extracts", "uniform", "hazard", "spectra", ".", "Use", "it", "as", "/", "extract", "/", "uhs?kind", "=", "mean", "or", "/", "extract", "/", "uhs?kind", "=", "rlz", "-", "0", "etc" ]
train
https://github.com/gem/oq-engine/blob/8294553a0b8aba33fd96437a35065d03547d0040/openquake/calculators/extract.py#L427-L462
gem/oq-engine
openquake/calculators/extract.py
extract_agg_curves
def extract_agg_curves(dstore, what): """ Aggregate loss curves of the given loss type and tags for event based risk calculations. Use it as /extract/agg_curves/structural?taxonomy=RC&zipcode=20126 :returns: array of shape (S, P), being P the number of return periods and S the number of statistics """ from openquake.calculators.export.loss_curves import get_loss_builder oq = dstore['oqparam'] loss_type, tags = get_loss_type_tags(what) if 'curves-stats' in dstore: # event_based_risk losses = _get_curves(dstore['curves-stats'], oq.lti[loss_type]) stats = dstore['curves-stats'].attrs['stats'] elif 'curves-rlzs' in dstore: # event_based_risk, 1 rlz losses = _get_curves(dstore['curves-rlzs'], oq.lti[loss_type]) assert losses.shape[1] == 1, 'There must be a single realization' stats = [b'mean'] # suitable to be stored as hdf5 attribute else: raise KeyError('No curves found in %s' % dstore) res = _filter_agg(dstore['assetcol'], losses, tags, stats) cc = dstore['assetcol/cost_calculator'] res.units = cc.get_units(loss_types=[loss_type]) res.return_periods = get_loss_builder(dstore).return_periods return res
python
def extract_agg_curves(dstore, what): from openquake.calculators.export.loss_curves import get_loss_builder oq = dstore['oqparam'] loss_type, tags = get_loss_type_tags(what) if 'curves-stats' in dstore: losses = _get_curves(dstore['curves-stats'], oq.lti[loss_type]) stats = dstore['curves-stats'].attrs['stats'] elif 'curves-rlzs' in dstore: losses = _get_curves(dstore['curves-rlzs'], oq.lti[loss_type]) assert losses.shape[1] == 1, 'There must be a single realization' stats = [b'mean'] else: raise KeyError('No curves found in %s' % dstore) res = _filter_agg(dstore['assetcol'], losses, tags, stats) cc = dstore['assetcol/cost_calculator'] res.units = cc.get_units(loss_types=[loss_type]) res.return_periods = get_loss_builder(dstore).return_periods return res
[ "def", "extract_agg_curves", "(", "dstore", ",", "what", ")", ":", "from", "openquake", ".", "calculators", ".", "export", ".", "loss_curves", "import", "get_loss_builder", "oq", "=", "dstore", "[", "'oqparam'", "]", "loss_type", ",", "tags", "=", "get_loss_type_tags", "(", "what", ")", "if", "'curves-stats'", "in", "dstore", ":", "# event_based_risk", "losses", "=", "_get_curves", "(", "dstore", "[", "'curves-stats'", "]", ",", "oq", ".", "lti", "[", "loss_type", "]", ")", "stats", "=", "dstore", "[", "'curves-stats'", "]", ".", "attrs", "[", "'stats'", "]", "elif", "'curves-rlzs'", "in", "dstore", ":", "# event_based_risk, 1 rlz", "losses", "=", "_get_curves", "(", "dstore", "[", "'curves-rlzs'", "]", ",", "oq", ".", "lti", "[", "loss_type", "]", ")", "assert", "losses", ".", "shape", "[", "1", "]", "==", "1", ",", "'There must be a single realization'", "stats", "=", "[", "b'mean'", "]", "# suitable to be stored as hdf5 attribute", "else", ":", "raise", "KeyError", "(", "'No curves found in %s'", "%", "dstore", ")", "res", "=", "_filter_agg", "(", "dstore", "[", "'assetcol'", "]", ",", "losses", ",", "tags", ",", "stats", ")", "cc", "=", "dstore", "[", "'assetcol/cost_calculator'", "]", "res", ".", "units", "=", "cc", ".", "get_units", "(", "loss_types", "=", "[", "loss_type", "]", ")", "res", ".", "return_periods", "=", "get_loss_builder", "(", "dstore", ")", ".", "return_periods", "return", "res" ]
Aggregate loss curves of the given loss type and tags for event based risk calculations. Use it as /extract/agg_curves/structural?taxonomy=RC&zipcode=20126 :returns: array of shape (S, P), being P the number of return periods and S the number of statistics
[ "Aggregate", "loss", "curves", "of", "the", "given", "loss", "type", "and", "tags", "for", "event", "based", "risk", "calculations", ".", "Use", "it", "as", "/", "extract", "/", "agg_curves", "/", "structural?taxonomy", "=", "RC&zipcode", "=", "20126", ":", "returns", ":", "array", "of", "shape", "(", "S", "P", ")", "being", "P", "the", "number", "of", "return", "periods", "and", "S", "the", "number", "of", "statistics" ]
train
https://github.com/gem/oq-engine/blob/8294553a0b8aba33fd96437a35065d03547d0040/openquake/calculators/extract.py#L522-L547
gem/oq-engine
openquake/calculators/extract.py
extract_agg_losses
def extract_agg_losses(dstore, what): """ Aggregate losses of the given loss type and tags. Use it as /extract/agg_losses/structural?taxonomy=RC&zipcode=20126 /extract/agg_losses/structural?taxonomy=RC&zipcode=* :returns: an array of shape (T, R) if one of the tag names has a `*` value an array of shape (R,), being R the number of realizations an array of length 0 if there is no data for the given tags """ loss_type, tags = get_loss_type_tags(what) if not loss_type: raise ValueError('loss_type not passed in agg_losses/<loss_type>') l = dstore['oqparam'].lti[loss_type] if 'losses_by_asset' in dstore: # scenario_risk stats = None losses = dstore['losses_by_asset'][:, :, l]['mean'] elif 'avg_losses-stats' in dstore: # event_based_risk, classical_risk stats = dstore['avg_losses-stats'].attrs['stats'] losses = dstore['avg_losses-stats'][:, :, l] elif 'avg_losses-rlzs' in dstore: # event_based_risk, classical_risk stats = [b'mean'] losses = dstore['avg_losses-rlzs'][:, :, l] else: raise KeyError('No losses found in %s' % dstore) return _filter_agg(dstore['assetcol'], losses, tags, stats)
python
def extract_agg_losses(dstore, what): loss_type, tags = get_loss_type_tags(what) if not loss_type: raise ValueError('loss_type not passed in agg_losses/<loss_type>') l = dstore['oqparam'].lti[loss_type] if 'losses_by_asset' in dstore: stats = None losses = dstore['losses_by_asset'][:, :, l]['mean'] elif 'avg_losses-stats' in dstore: stats = dstore['avg_losses-stats'].attrs['stats'] losses = dstore['avg_losses-stats'][:, :, l] elif 'avg_losses-rlzs' in dstore: stats = [b'mean'] losses = dstore['avg_losses-rlzs'][:, :, l] else: raise KeyError('No losses found in %s' % dstore) return _filter_agg(dstore['assetcol'], losses, tags, stats)
[ "def", "extract_agg_losses", "(", "dstore", ",", "what", ")", ":", "loss_type", ",", "tags", "=", "get_loss_type_tags", "(", "what", ")", "if", "not", "loss_type", ":", "raise", "ValueError", "(", "'loss_type not passed in agg_losses/<loss_type>'", ")", "l", "=", "dstore", "[", "'oqparam'", "]", ".", "lti", "[", "loss_type", "]", "if", "'losses_by_asset'", "in", "dstore", ":", "# scenario_risk", "stats", "=", "None", "losses", "=", "dstore", "[", "'losses_by_asset'", "]", "[", ":", ",", ":", ",", "l", "]", "[", "'mean'", "]", "elif", "'avg_losses-stats'", "in", "dstore", ":", "# event_based_risk, classical_risk", "stats", "=", "dstore", "[", "'avg_losses-stats'", "]", ".", "attrs", "[", "'stats'", "]", "losses", "=", "dstore", "[", "'avg_losses-stats'", "]", "[", ":", ",", ":", ",", "l", "]", "elif", "'avg_losses-rlzs'", "in", "dstore", ":", "# event_based_risk, classical_risk", "stats", "=", "[", "b'mean'", "]", "losses", "=", "dstore", "[", "'avg_losses-rlzs'", "]", "[", ":", ",", ":", ",", "l", "]", "else", ":", "raise", "KeyError", "(", "'No losses found in %s'", "%", "dstore", ")", "return", "_filter_agg", "(", "dstore", "[", "'assetcol'", "]", ",", "losses", ",", "tags", ",", "stats", ")" ]
Aggregate losses of the given loss type and tags. Use it as /extract/agg_losses/structural?taxonomy=RC&zipcode=20126 /extract/agg_losses/structural?taxonomy=RC&zipcode=* :returns: an array of shape (T, R) if one of the tag names has a `*` value an array of shape (R,), being R the number of realizations an array of length 0 if there is no data for the given tags
[ "Aggregate", "losses", "of", "the", "given", "loss", "type", "and", "tags", ".", "Use", "it", "as", "/", "extract", "/", "agg_losses", "/", "structural?taxonomy", "=", "RC&zipcode", "=", "20126", "/", "extract", "/", "agg_losses", "/", "structural?taxonomy", "=", "RC&zipcode", "=", "*" ]
train
https://github.com/gem/oq-engine/blob/8294553a0b8aba33fd96437a35065d03547d0040/openquake/calculators/extract.py#L551-L577
gem/oq-engine
openquake/calculators/extract.py
extract_agg_damages
def extract_agg_damages(dstore, what): """ Aggregate damages of the given loss type and tags. Use it as /extract/agg_damages/structural?taxonomy=RC&zipcode=20126 :returns: array of shape (R, D), being R the number of realizations and D the number of damage states, or an array of length 0 if there is no data for the given tags """ loss_type, tags = get_loss_type_tags(what) if 'dmg_by_asset' in dstore: # scenario_damage lti = dstore['oqparam'].lti[loss_type] losses = dstore['dmg_by_asset'][:, :, lti, 0] else: raise KeyError('No damages found in %s' % dstore) return _filter_agg(dstore['assetcol'], losses, tags)
python
def extract_agg_damages(dstore, what): loss_type, tags = get_loss_type_tags(what) if 'dmg_by_asset' in dstore: lti = dstore['oqparam'].lti[loss_type] losses = dstore['dmg_by_asset'][:, :, lti, 0] else: raise KeyError('No damages found in %s' % dstore) return _filter_agg(dstore['assetcol'], losses, tags)
[ "def", "extract_agg_damages", "(", "dstore", ",", "what", ")", ":", "loss_type", ",", "tags", "=", "get_loss_type_tags", "(", "what", ")", "if", "'dmg_by_asset'", "in", "dstore", ":", "# scenario_damage", "lti", "=", "dstore", "[", "'oqparam'", "]", ".", "lti", "[", "loss_type", "]", "losses", "=", "dstore", "[", "'dmg_by_asset'", "]", "[", ":", ",", ":", ",", "lti", ",", "0", "]", "else", ":", "raise", "KeyError", "(", "'No damages found in %s'", "%", "dstore", ")", "return", "_filter_agg", "(", "dstore", "[", "'assetcol'", "]", ",", "losses", ",", "tags", ")" ]
Aggregate damages of the given loss type and tags. Use it as /extract/agg_damages/structural?taxonomy=RC&zipcode=20126 :returns: array of shape (R, D), being R the number of realizations and D the number of damage states, or an array of length 0 if there is no data for the given tags
[ "Aggregate", "damages", "of", "the", "given", "loss", "type", "and", "tags", ".", "Use", "it", "as", "/", "extract", "/", "agg_damages", "/", "structural?taxonomy", "=", "RC&zipcode", "=", "20126" ]
train
https://github.com/gem/oq-engine/blob/8294553a0b8aba33fd96437a35065d03547d0040/openquake/calculators/extract.py#L581-L597
gem/oq-engine
openquake/calculators/extract.py
extract_aggregate
def extract_aggregate(dstore, what): """ /extract/aggregate/avg_losses? kind=mean&loss_type=structural&tag=taxonomy&tag=occupancy """ name, qstring = what.split('?', 1) info = get_info(dstore) qdic = parse(qstring, info) suffix = '-rlzs' if qdic['rlzs'] else '-stats' tagnames = qdic.get('tag', []) assetcol = dstore['assetcol'] ltypes = qdic.get('loss_type', []) if ltypes: array = dstore[name + suffix][:, qdic['k'][0], ltypes[0]] else: array = dstore[name + suffix][:, qdic['k'][0]] aw = ArrayWrapper(assetcol.aggregate_by(tagnames, array), {}) for tagname in tagnames: setattr(aw, tagname, getattr(assetcol.tagcol, tagname)) aw.tagnames = encode(tagnames) if not ltypes: aw.extra = ('loss_type',) + tuple(info['loss_types']) return aw
python
def extract_aggregate(dstore, what): name, qstring = what.split('?', 1) info = get_info(dstore) qdic = parse(qstring, info) suffix = '-rlzs' if qdic['rlzs'] else '-stats' tagnames = qdic.get('tag', []) assetcol = dstore['assetcol'] ltypes = qdic.get('loss_type', []) if ltypes: array = dstore[name + suffix][:, qdic['k'][0], ltypes[0]] else: array = dstore[name + suffix][:, qdic['k'][0]] aw = ArrayWrapper(assetcol.aggregate_by(tagnames, array), {}) for tagname in tagnames: setattr(aw, tagname, getattr(assetcol.tagcol, tagname)) aw.tagnames = encode(tagnames) if not ltypes: aw.extra = ('loss_type',) + tuple(info['loss_types']) return aw
[ "def", "extract_aggregate", "(", "dstore", ",", "what", ")", ":", "name", ",", "qstring", "=", "what", ".", "split", "(", "'?'", ",", "1", ")", "info", "=", "get_info", "(", "dstore", ")", "qdic", "=", "parse", "(", "qstring", ",", "info", ")", "suffix", "=", "'-rlzs'", "if", "qdic", "[", "'rlzs'", "]", "else", "'-stats'", "tagnames", "=", "qdic", ".", "get", "(", "'tag'", ",", "[", "]", ")", "assetcol", "=", "dstore", "[", "'assetcol'", "]", "ltypes", "=", "qdic", ".", "get", "(", "'loss_type'", ",", "[", "]", ")", "if", "ltypes", ":", "array", "=", "dstore", "[", "name", "+", "suffix", "]", "[", ":", ",", "qdic", "[", "'k'", "]", "[", "0", "]", ",", "ltypes", "[", "0", "]", "]", "else", ":", "array", "=", "dstore", "[", "name", "+", "suffix", "]", "[", ":", ",", "qdic", "[", "'k'", "]", "[", "0", "]", "]", "aw", "=", "ArrayWrapper", "(", "assetcol", ".", "aggregate_by", "(", "tagnames", ",", "array", ")", ",", "{", "}", ")", "for", "tagname", "in", "tagnames", ":", "setattr", "(", "aw", ",", "tagname", ",", "getattr", "(", "assetcol", ".", "tagcol", ",", "tagname", ")", ")", "aw", ".", "tagnames", "=", "encode", "(", "tagnames", ")", "if", "not", "ltypes", ":", "aw", ".", "extra", "=", "(", "'loss_type'", ",", ")", "+", "tuple", "(", "info", "[", "'loss_types'", "]", ")", "return", "aw" ]
/extract/aggregate/avg_losses? kind=mean&loss_type=structural&tag=taxonomy&tag=occupancy
[ "/", "extract", "/", "aggregate", "/", "avg_losses?", "kind", "=", "mean&loss_type", "=", "structural&tag", "=", "taxonomy&tag", "=", "occupancy" ]
train
https://github.com/gem/oq-engine/blob/8294553a0b8aba33fd96437a35065d03547d0040/openquake/calculators/extract.py#L601-L623
gem/oq-engine
openquake/calculators/extract.py
build_damage_dt
def build_damage_dt(dstore, mean_std=True): """ :param dstore: a datastore instance :param mean_std: a flag (default True) :returns: a composite dtype loss_type -> (mean_ds1, stdv_ds1, ...) or loss_type -> (ds1, ds2, ...) depending on the flag mean_std """ oq = dstore['oqparam'] damage_states = ['no_damage'] + list( dstore.get_attr('risk_model', 'limit_states')) dt_list = [] for ds in damage_states: ds = str(ds) if mean_std: dt_list.append(('%s_mean' % ds, F32)) dt_list.append(('%s_stdv' % ds, F32)) else: dt_list.append((ds, F32)) damage_dt = numpy.dtype(dt_list) loss_types = oq.loss_dt().names return numpy.dtype([(lt, damage_dt) for lt in loss_types])
python
def build_damage_dt(dstore, mean_std=True): oq = dstore['oqparam'] damage_states = ['no_damage'] + list( dstore.get_attr('risk_model', 'limit_states')) dt_list = [] for ds in damage_states: ds = str(ds) if mean_std: dt_list.append(('%s_mean' % ds, F32)) dt_list.append(('%s_stdv' % ds, F32)) else: dt_list.append((ds, F32)) damage_dt = numpy.dtype(dt_list) loss_types = oq.loss_dt().names return numpy.dtype([(lt, damage_dt) for lt in loss_types])
[ "def", "build_damage_dt", "(", "dstore", ",", "mean_std", "=", "True", ")", ":", "oq", "=", "dstore", "[", "'oqparam'", "]", "damage_states", "=", "[", "'no_damage'", "]", "+", "list", "(", "dstore", ".", "get_attr", "(", "'risk_model'", ",", "'limit_states'", ")", ")", "dt_list", "=", "[", "]", "for", "ds", "in", "damage_states", ":", "ds", "=", "str", "(", "ds", ")", "if", "mean_std", ":", "dt_list", ".", "append", "(", "(", "'%s_mean'", "%", "ds", ",", "F32", ")", ")", "dt_list", ".", "append", "(", "(", "'%s_stdv'", "%", "ds", ",", "F32", ")", ")", "else", ":", "dt_list", ".", "append", "(", "(", "ds", ",", "F32", ")", ")", "damage_dt", "=", "numpy", ".", "dtype", "(", "dt_list", ")", "loss_types", "=", "oq", ".", "loss_dt", "(", ")", ".", "names", "return", "numpy", ".", "dtype", "(", "[", "(", "lt", ",", "damage_dt", ")", "for", "lt", "in", "loss_types", "]", ")" ]
:param dstore: a datastore instance :param mean_std: a flag (default True) :returns: a composite dtype loss_type -> (mean_ds1, stdv_ds1, ...) or loss_type -> (ds1, ds2, ...) depending on the flag mean_std
[ ":", "param", "dstore", ":", "a", "datastore", "instance", ":", "param", "mean_std", ":", "a", "flag", "(", "default", "True", ")", ":", "returns", ":", "a", "composite", "dtype", "loss_type", "-", ">", "(", "mean_ds1", "stdv_ds1", "...", ")", "or", "loss_type", "-", ">", "(", "ds1", "ds2", "...", ")", "depending", "on", "the", "flag", "mean_std" ]
train
https://github.com/gem/oq-engine/blob/8294553a0b8aba33fd96437a35065d03547d0040/openquake/calculators/extract.py#L686-L707
gem/oq-engine
openquake/calculators/extract.py
build_damage_array
def build_damage_array(data, damage_dt): """ :param data: an array of shape (A, L, 1, D) or (A, L, 2, D) :param damage_dt: a damage composite data type loss_type -> states :returns: a composite array of length N and dtype damage_dt """ A, L, MS, D = data.shape dmg = numpy.zeros(A, damage_dt) for a in range(A): for l, lt in enumerate(damage_dt.names): std = any(f for f in damage_dt[lt].names if f.endswith('_stdv')) if MS == 1 or not std: # there is only the mean value dmg[lt][a] = tuple(data[a, l, 0]) else: # there are both mean and stddev # data[a, l].T has shape (D, 2) dmg[lt][a] = tuple(numpy.concatenate(data[a, l].T)) return dmg
python
def build_damage_array(data, damage_dt): A, L, MS, D = data.shape dmg = numpy.zeros(A, damage_dt) for a in range(A): for l, lt in enumerate(damage_dt.names): std = any(f for f in damage_dt[lt].names if f.endswith('_stdv')) if MS == 1 or not std: dmg[lt][a] = tuple(data[a, l, 0]) else: dmg[lt][a] = tuple(numpy.concatenate(data[a, l].T)) return dmg
[ "def", "build_damage_array", "(", "data", ",", "damage_dt", ")", ":", "A", ",", "L", ",", "MS", ",", "D", "=", "data", ".", "shape", "dmg", "=", "numpy", ".", "zeros", "(", "A", ",", "damage_dt", ")", "for", "a", "in", "range", "(", "A", ")", ":", "for", "l", ",", "lt", "in", "enumerate", "(", "damage_dt", ".", "names", ")", ":", "std", "=", "any", "(", "f", "for", "f", "in", "damage_dt", "[", "lt", "]", ".", "names", "if", "f", ".", "endswith", "(", "'_stdv'", ")", ")", "if", "MS", "==", "1", "or", "not", "std", ":", "# there is only the mean value", "dmg", "[", "lt", "]", "[", "a", "]", "=", "tuple", "(", "data", "[", "a", ",", "l", ",", "0", "]", ")", "else", ":", "# there are both mean and stddev", "# data[a, l].T has shape (D, 2)", "dmg", "[", "lt", "]", "[", "a", "]", "=", "tuple", "(", "numpy", ".", "concatenate", "(", "data", "[", "a", ",", "l", "]", ".", "T", ")", ")", "return", "dmg" ]
:param data: an array of shape (A, L, 1, D) or (A, L, 2, D) :param damage_dt: a damage composite data type loss_type -> states :returns: a composite array of length N and dtype damage_dt
[ ":", "param", "data", ":", "an", "array", "of", "shape", "(", "A", "L", "1", "D", ")", "or", "(", "A", "L", "2", "D", ")", ":", "param", "damage_dt", ":", "a", "damage", "composite", "data", "type", "loss_type", "-", ">", "states", ":", "returns", ":", "a", "composite", "array", "of", "length", "N", "and", "dtype", "damage_dt" ]
train
https://github.com/gem/oq-engine/blob/8294553a0b8aba33fd96437a35065d03547d0040/openquake/calculators/extract.py#L710-L726
gem/oq-engine
openquake/calculators/extract.py
extract_mfd
def extract_mfd(dstore, what): """ Display num_ruptures by magnitude for event based calculations. Example: http://127.0.0.1:8800/v1/calc/30/extract/event_based_mfd """ dd = collections.defaultdict(int) for rup in dstore['ruptures'].value: dd[rup['mag']] += 1 dt = numpy.dtype([('mag', float), ('freq', int)]) magfreq = numpy.array(sorted(dd.items(), key=operator.itemgetter(0)), dt) return magfreq
python
def extract_mfd(dstore, what): dd = collections.defaultdict(int) for rup in dstore['ruptures'].value: dd[rup['mag']] += 1 dt = numpy.dtype([('mag', float), ('freq', int)]) magfreq = numpy.array(sorted(dd.items(), key=operator.itemgetter(0)), dt) return magfreq
[ "def", "extract_mfd", "(", "dstore", ",", "what", ")", ":", "dd", "=", "collections", ".", "defaultdict", "(", "int", ")", "for", "rup", "in", "dstore", "[", "'ruptures'", "]", ".", "value", ":", "dd", "[", "rup", "[", "'mag'", "]", "]", "+=", "1", "dt", "=", "numpy", ".", "dtype", "(", "[", "(", "'mag'", ",", "float", ")", ",", "(", "'freq'", ",", "int", ")", "]", ")", "magfreq", "=", "numpy", ".", "array", "(", "sorted", "(", "dd", ".", "items", "(", ")", ",", "key", "=", "operator", ".", "itemgetter", "(", "0", ")", ")", ",", "dt", ")", "return", "magfreq" ]
Display num_ruptures by magnitude for event based calculations. Example: http://127.0.0.1:8800/v1/calc/30/extract/event_based_mfd
[ "Display", "num_ruptures", "by", "magnitude", "for", "event", "based", "calculations", ".", "Example", ":", "http", ":", "//", "127", ".", "0", ".", "0", ".", "1", ":", "8800", "/", "v1", "/", "calc", "/", "30", "/", "extract", "/", "event_based_mfd" ]
train
https://github.com/gem/oq-engine/blob/8294553a0b8aba33fd96437a35065d03547d0040/openquake/calculators/extract.py#L742-L752
gem/oq-engine
openquake/calculators/extract.py
extract_src_loss_table
def extract_src_loss_table(dstore, loss_type): """ Extract the source loss table for a give loss type, ordered in decreasing order. Example: http://127.0.0.1:8800/v1/calc/30/extract/src_loss_table/structural """ oq = dstore['oqparam'] li = oq.lti[loss_type] source_ids = dstore['source_info']['source_id'] idxs = dstore['ruptures'].value[['srcidx', 'grp_id']] losses = dstore['rup_loss_table'][:, li] slt = numpy.zeros(len(source_ids), [('grp_id', U32), (loss_type, F32)]) for loss, (srcidx, grp_id) in zip(losses, idxs): slt[srcidx][loss_type] += loss slt[srcidx]['grp_id'] = grp_id slt = util.compose_arrays(source_ids, slt, 'source_id') slt.sort(order=loss_type) return slt[::-1]
python
def extract_src_loss_table(dstore, loss_type): oq = dstore['oqparam'] li = oq.lti[loss_type] source_ids = dstore['source_info']['source_id'] idxs = dstore['ruptures'].value[['srcidx', 'grp_id']] losses = dstore['rup_loss_table'][:, li] slt = numpy.zeros(len(source_ids), [('grp_id', U32), (loss_type, F32)]) for loss, (srcidx, grp_id) in zip(losses, idxs): slt[srcidx][loss_type] += loss slt[srcidx]['grp_id'] = grp_id slt = util.compose_arrays(source_ids, slt, 'source_id') slt.sort(order=loss_type) return slt[::-1]
[ "def", "extract_src_loss_table", "(", "dstore", ",", "loss_type", ")", ":", "oq", "=", "dstore", "[", "'oqparam'", "]", "li", "=", "oq", ".", "lti", "[", "loss_type", "]", "source_ids", "=", "dstore", "[", "'source_info'", "]", "[", "'source_id'", "]", "idxs", "=", "dstore", "[", "'ruptures'", "]", ".", "value", "[", "[", "'srcidx'", ",", "'grp_id'", "]", "]", "losses", "=", "dstore", "[", "'rup_loss_table'", "]", "[", ":", ",", "li", "]", "slt", "=", "numpy", ".", "zeros", "(", "len", "(", "source_ids", ")", ",", "[", "(", "'grp_id'", ",", "U32", ")", ",", "(", "loss_type", ",", "F32", ")", "]", ")", "for", "loss", ",", "(", "srcidx", ",", "grp_id", ")", "in", "zip", "(", "losses", ",", "idxs", ")", ":", "slt", "[", "srcidx", "]", "[", "loss_type", "]", "+=", "loss", "slt", "[", "srcidx", "]", "[", "'grp_id'", "]", "=", "grp_id", "slt", "=", "util", ".", "compose_arrays", "(", "source_ids", ",", "slt", ",", "'source_id'", ")", "slt", ".", "sort", "(", "order", "=", "loss_type", ")", "return", "slt", "[", ":", ":", "-", "1", "]" ]
Extract the source loss table for a give loss type, ordered in decreasing order. Example: http://127.0.0.1:8800/v1/calc/30/extract/src_loss_table/structural
[ "Extract", "the", "source", "loss", "table", "for", "a", "give", "loss", "type", "ordered", "in", "decreasing", "order", ".", "Example", ":", "http", ":", "//", "127", ".", "0", ".", "0", ".", "1", ":", "8800", "/", "v1", "/", "calc", "/", "30", "/", "extract", "/", "src_loss_table", "/", "structural" ]
train
https://github.com/gem/oq-engine/blob/8294553a0b8aba33fd96437a35065d03547d0040/openquake/calculators/extract.py#L756-L773
gem/oq-engine
openquake/calculators/extract.py
extract_mean_std_curves
def extract_mean_std_curves(dstore, what): """ Yield imls/IMT and poes/IMT containg mean and stddev for all sites """ getter = getters.PmapGetter(dstore) arr = getter.get_mean().array for imt in getter.imtls: yield 'imls/' + imt, getter.imtls[imt] yield 'poes/' + imt, arr[:, getter.imtls(imt)]
python
def extract_mean_std_curves(dstore, what): getter = getters.PmapGetter(dstore) arr = getter.get_mean().array for imt in getter.imtls: yield 'imls/' + imt, getter.imtls[imt] yield 'poes/' + imt, arr[:, getter.imtls(imt)]
[ "def", "extract_mean_std_curves", "(", "dstore", ",", "what", ")", ":", "getter", "=", "getters", ".", "PmapGetter", "(", "dstore", ")", "arr", "=", "getter", ".", "get_mean", "(", ")", ".", "array", "for", "imt", "in", "getter", ".", "imtls", ":", "yield", "'imls/'", "+", "imt", ",", "getter", ".", "imtls", "[", "imt", "]", "yield", "'poes/'", "+", "imt", ",", "arr", "[", ":", ",", "getter", ".", "imtls", "(", "imt", ")", "]" ]
Yield imls/IMT and poes/IMT containg mean and stddev for all sites
[ "Yield", "imls", "/", "IMT", "and", "poes", "/", "IMT", "containg", "mean", "and", "stddev", "for", "all", "sites" ]
train
https://github.com/gem/oq-engine/blob/8294553a0b8aba33fd96437a35065d03547d0040/openquake/calculators/extract.py#L777-L785
gem/oq-engine
openquake/calculators/extract.py
losses_by_tag
def losses_by_tag(dstore, tag): """ Statistical average losses by tag. For instance call $ oq extract losses_by_tag/occupancy """ dt = [(tag, vstr)] + dstore['oqparam'].loss_dt_list() aids = dstore['assetcol/array'][tag] dset, stats = _get(dstore, 'avg_losses') arr = dset.value tagvalues = dstore['assetcol/tagcol/' + tag][1:] # except tagvalue="?" for s, stat in enumerate(stats): out = numpy.zeros(len(tagvalues), dt) for li, (lt, lt_dt) in enumerate(dt[1:]): for i, tagvalue in enumerate(tagvalues): out[i][tag] = tagvalue counts = arr[aids == i + 1, s, li].sum() if counts: out[i][lt] = counts yield stat, out
python
def losses_by_tag(dstore, tag): dt = [(tag, vstr)] + dstore['oqparam'].loss_dt_list() aids = dstore['assetcol/array'][tag] dset, stats = _get(dstore, 'avg_losses') arr = dset.value tagvalues = dstore['assetcol/tagcol/' + tag][1:] for s, stat in enumerate(stats): out = numpy.zeros(len(tagvalues), dt) for li, (lt, lt_dt) in enumerate(dt[1:]): for i, tagvalue in enumerate(tagvalues): out[i][tag] = tagvalue counts = arr[aids == i + 1, s, li].sum() if counts: out[i][lt] = counts yield stat, out
[ "def", "losses_by_tag", "(", "dstore", ",", "tag", ")", ":", "dt", "=", "[", "(", "tag", ",", "vstr", ")", "]", "+", "dstore", "[", "'oqparam'", "]", ".", "loss_dt_list", "(", ")", "aids", "=", "dstore", "[", "'assetcol/array'", "]", "[", "tag", "]", "dset", ",", "stats", "=", "_get", "(", "dstore", ",", "'avg_losses'", ")", "arr", "=", "dset", ".", "value", "tagvalues", "=", "dstore", "[", "'assetcol/tagcol/'", "+", "tag", "]", "[", "1", ":", "]", "# except tagvalue=\"?\"", "for", "s", ",", "stat", "in", "enumerate", "(", "stats", ")", ":", "out", "=", "numpy", ".", "zeros", "(", "len", "(", "tagvalues", ")", ",", "dt", ")", "for", "li", ",", "(", "lt", ",", "lt_dt", ")", "in", "enumerate", "(", "dt", "[", "1", ":", "]", ")", ":", "for", "i", ",", "tagvalue", "in", "enumerate", "(", "tagvalues", ")", ":", "out", "[", "i", "]", "[", "tag", "]", "=", "tagvalue", "counts", "=", "arr", "[", "aids", "==", "i", "+", "1", ",", "s", ",", "li", "]", ".", "sum", "(", ")", "if", "counts", ":", "out", "[", "i", "]", "[", "lt", "]", "=", "counts", "yield", "stat", ",", "out" ]
Statistical average losses by tag. For instance call $ oq extract losses_by_tag/occupancy
[ "Statistical", "average", "losses", "by", "tag", ".", "For", "instance", "call" ]
train
https://github.com/gem/oq-engine/blob/8294553a0b8aba33fd96437a35065d03547d0040/openquake/calculators/extract.py#L808-L827
gem/oq-engine
openquake/calculators/extract.py
extract_rupture
def extract_rupture(dstore, serial): """ Extract information about the given event index. Example: http://127.0.0.1:8800/v1/calc/30/extract/rupture/1066 """ ridx = list(dstore['ruptures']['serial']).index(int(serial)) [getter] = getters.gen_rupture_getters(dstore, slice(ridx, ridx + 1)) yield from getter.get_rupdict().items()
python
def extract_rupture(dstore, serial): ridx = list(dstore['ruptures']['serial']).index(int(serial)) [getter] = getters.gen_rupture_getters(dstore, slice(ridx, ridx + 1)) yield from getter.get_rupdict().items()
[ "def", "extract_rupture", "(", "dstore", ",", "serial", ")", ":", "ridx", "=", "list", "(", "dstore", "[", "'ruptures'", "]", "[", "'serial'", "]", ")", ".", "index", "(", "int", "(", "serial", ")", ")", "[", "getter", "]", "=", "getters", ".", "gen_rupture_getters", "(", "dstore", ",", "slice", "(", "ridx", ",", "ridx", "+", "1", ")", ")", "yield", "from", "getter", ".", "get_rupdict", "(", ")", ".", "items", "(", ")" ]
Extract information about the given event index. Example: http://127.0.0.1:8800/v1/calc/30/extract/rupture/1066
[ "Extract", "information", "about", "the", "given", "event", "index", ".", "Example", ":", "http", ":", "//", "127", ".", "0", ".", "0", ".", "1", ":", "8800", "/", "v1", "/", "calc", "/", "30", "/", "extract", "/", "rupture", "/", "1066" ]
train
https://github.com/gem/oq-engine/blob/8294553a0b8aba33fd96437a35065d03547d0040/openquake/calculators/extract.py#L831-L839
gem/oq-engine
openquake/calculators/extract.py
extract_event_info
def extract_event_info(dstore, eidx): """ Extract information about the given event index. Example: http://127.0.0.1:8800/v1/calc/30/extract/event_info/0 """ event = dstore['events'][int(eidx)] serial = int(event['eid'] // TWO32) ridx = list(dstore['ruptures']['serial']).index(serial) [getter] = getters.gen_rupture_getters(dstore, slice(ridx, ridx + 1)) rupdict = getter.get_rupdict() rlzi = event['rlz'] rlzs_assoc = dstore['csm_info'].get_rlzs_assoc() gsim = rlzs_assoc.gsim_by_trt[rlzi][rupdict['trt']] for key, val in rupdict.items(): yield key, val yield 'rlzi', rlzi yield 'gsim', repr(gsim)
python
def extract_event_info(dstore, eidx): event = dstore['events'][int(eidx)] serial = int(event['eid'] // TWO32) ridx = list(dstore['ruptures']['serial']).index(serial) [getter] = getters.gen_rupture_getters(dstore, slice(ridx, ridx + 1)) rupdict = getter.get_rupdict() rlzi = event['rlz'] rlzs_assoc = dstore['csm_info'].get_rlzs_assoc() gsim = rlzs_assoc.gsim_by_trt[rlzi][rupdict['trt']] for key, val in rupdict.items(): yield key, val yield 'rlzi', rlzi yield 'gsim', repr(gsim)
[ "def", "extract_event_info", "(", "dstore", ",", "eidx", ")", ":", "event", "=", "dstore", "[", "'events'", "]", "[", "int", "(", "eidx", ")", "]", "serial", "=", "int", "(", "event", "[", "'eid'", "]", "//", "TWO32", ")", "ridx", "=", "list", "(", "dstore", "[", "'ruptures'", "]", "[", "'serial'", "]", ")", ".", "index", "(", "serial", ")", "[", "getter", "]", "=", "getters", ".", "gen_rupture_getters", "(", "dstore", ",", "slice", "(", "ridx", ",", "ridx", "+", "1", ")", ")", "rupdict", "=", "getter", ".", "get_rupdict", "(", ")", "rlzi", "=", "event", "[", "'rlz'", "]", "rlzs_assoc", "=", "dstore", "[", "'csm_info'", "]", ".", "get_rlzs_assoc", "(", ")", "gsim", "=", "rlzs_assoc", ".", "gsim_by_trt", "[", "rlzi", "]", "[", "rupdict", "[", "'trt'", "]", "]", "for", "key", ",", "val", "in", "rupdict", ".", "items", "(", ")", ":", "yield", "key", ",", "val", "yield", "'rlzi'", ",", "rlzi", "yield", "'gsim'", ",", "repr", "(", "gsim", ")" ]
Extract information about the given event index. Example: http://127.0.0.1:8800/v1/calc/30/extract/event_info/0
[ "Extract", "information", "about", "the", "given", "event", "index", ".", "Example", ":", "http", ":", "//", "127", ".", "0", ".", "0", ".", "1", ":", "8800", "/", "v1", "/", "calc", "/", "30", "/", "extract", "/", "event_info", "/", "0" ]
train
https://github.com/gem/oq-engine/blob/8294553a0b8aba33fd96437a35065d03547d0040/openquake/calculators/extract.py#L843-L860
gem/oq-engine
openquake/calculators/extract.py
get_ruptures_within
def get_ruptures_within(dstore, bbox): """ Extract the ruptures within the given bounding box, a string minlon,minlat,maxlon,maxlat. Example: http://127.0.0.1:8800/v1/calc/30/extract/ruptures_with/8,44,10,46 """ minlon, minlat, maxlon, maxlat = map(float, bbox.split(',')) hypo = dstore['ruptures']['hypo'].T # shape (3, N) mask = ((minlon <= hypo[0]) * (minlat <= hypo[1]) * (maxlon >= hypo[0]) * (maxlat >= hypo[1])) return dstore['ruptures'][mask]
python
def get_ruptures_within(dstore, bbox): minlon, minlat, maxlon, maxlat = map(float, bbox.split(',')) hypo = dstore['ruptures']['hypo'].T mask = ((minlon <= hypo[0]) * (minlat <= hypo[1]) * (maxlon >= hypo[0]) * (maxlat >= hypo[1])) return dstore['ruptures'][mask]
[ "def", "get_ruptures_within", "(", "dstore", ",", "bbox", ")", ":", "minlon", ",", "minlat", ",", "maxlon", ",", "maxlat", "=", "map", "(", "float", ",", "bbox", ".", "split", "(", "','", ")", ")", "hypo", "=", "dstore", "[", "'ruptures'", "]", "[", "'hypo'", "]", ".", "T", "# shape (3, N)", "mask", "=", "(", "(", "minlon", "<=", "hypo", "[", "0", "]", ")", "*", "(", "minlat", "<=", "hypo", "[", "1", "]", ")", "*", "(", "maxlon", ">=", "hypo", "[", "0", "]", ")", "*", "(", "maxlat", ">=", "hypo", "[", "1", "]", ")", ")", "return", "dstore", "[", "'ruptures'", "]", "[", "mask", "]" ]
Extract the ruptures within the given bounding box, a string minlon,minlat,maxlon,maxlat. Example: http://127.0.0.1:8800/v1/calc/30/extract/ruptures_with/8,44,10,46
[ "Extract", "the", "ruptures", "within", "the", "given", "bounding", "box", "a", "string", "minlon", "minlat", "maxlon", "maxlat", ".", "Example", ":", "http", ":", "//", "127", ".", "0", ".", "0", ".", "1", ":", "8800", "/", "v1", "/", "calc", "/", "30", "/", "extract", "/", "ruptures_with", "/", "8", "44", "10", "46" ]
train
https://github.com/gem/oq-engine/blob/8294553a0b8aba33fd96437a35065d03547d0040/openquake/calculators/extract.py#L864-L875
gem/oq-engine
openquake/calculators/extract.py
extract_source_geom
def extract_source_geom(dstore, srcidxs): """ Extract the geometry of a given sources Example: http://127.0.0.1:8800/v1/calc/30/extract/source_geom/1,2,3 """ for i in srcidxs.split(','): rec = dstore['source_info'][int(i)] geom = dstore['source_geom'][rec['gidx1']:rec['gidx2']] yield rec['source_id'], geom
python
def extract_source_geom(dstore, srcidxs): for i in srcidxs.split(','): rec = dstore['source_info'][int(i)] geom = dstore['source_geom'][rec['gidx1']:rec['gidx2']] yield rec['source_id'], geom
[ "def", "extract_source_geom", "(", "dstore", ",", "srcidxs", ")", ":", "for", "i", "in", "srcidxs", ".", "split", "(", "','", ")", ":", "rec", "=", "dstore", "[", "'source_info'", "]", "[", "int", "(", "i", ")", "]", "geom", "=", "dstore", "[", "'source_geom'", "]", "[", "rec", "[", "'gidx1'", "]", ":", "rec", "[", "'gidx2'", "]", "]", "yield", "rec", "[", "'source_id'", "]", ",", "geom" ]
Extract the geometry of a given sources Example: http://127.0.0.1:8800/v1/calc/30/extract/source_geom/1,2,3
[ "Extract", "the", "geometry", "of", "a", "given", "sources", "Example", ":", "http", ":", "//", "127", ".", "0", ".", "0", ".", "1", ":", "8800", "/", "v1", "/", "calc", "/", "30", "/", "extract", "/", "source_geom", "/", "1", "2", "3" ]
train
https://github.com/gem/oq-engine/blob/8294553a0b8aba33fd96437a35065d03547d0040/openquake/calculators/extract.py#L879-L888
gem/oq-engine
openquake/calculators/extract.py
WebExtractor.get
def get(self, what): """ :param what: what to extract :returns: an ArrayWrapper instance """ url = '%s/v1/calc/%d/extract/%s' % (self.server, self.calc_id, what) logging.info('GET %s', url) resp = self.sess.get(url) if resp.status_code != 200: raise WebAPIError(resp.text) npz = numpy.load(io.BytesIO(resp.content)) attrs = {k: npz[k] for k in npz if k != 'array'} try: arr = npz['array'] except KeyError: arr = () return ArrayWrapper(arr, attrs)
python
def get(self, what): url = '%s/v1/calc/%d/extract/%s' % (self.server, self.calc_id, what) logging.info('GET %s', url) resp = self.sess.get(url) if resp.status_code != 200: raise WebAPIError(resp.text) npz = numpy.load(io.BytesIO(resp.content)) attrs = {k: npz[k] for k in npz if k != 'array'} try: arr = npz['array'] except KeyError: arr = () return ArrayWrapper(arr, attrs)
[ "def", "get", "(", "self", ",", "what", ")", ":", "url", "=", "'%s/v1/calc/%d/extract/%s'", "%", "(", "self", ".", "server", ",", "self", ".", "calc_id", ",", "what", ")", "logging", ".", "info", "(", "'GET %s'", ",", "url", ")", "resp", "=", "self", ".", "sess", ".", "get", "(", "url", ")", "if", "resp", ".", "status_code", "!=", "200", ":", "raise", "WebAPIError", "(", "resp", ".", "text", ")", "npz", "=", "numpy", ".", "load", "(", "io", ".", "BytesIO", "(", "resp", ".", "content", ")", ")", "attrs", "=", "{", "k", ":", "npz", "[", "k", "]", "for", "k", "in", "npz", "if", "k", "!=", "'array'", "}", "try", ":", "arr", "=", "npz", "[", "'array'", "]", "except", "KeyError", ":", "arr", "=", "(", ")", "return", "ArrayWrapper", "(", "arr", ",", "attrs", ")" ]
:param what: what to extract :returns: an ArrayWrapper instance
[ ":", "param", "what", ":", "what", "to", "extract", ":", "returns", ":", "an", "ArrayWrapper", "instance" ]
train
https://github.com/gem/oq-engine/blob/8294553a0b8aba33fd96437a35065d03547d0040/openquake/calculators/extract.py#L970-L986
gem/oq-engine
openquake/calculators/extract.py
WebExtractor.dump
def dump(self, fname): """ Dump the remote datastore on a local path. """ url = '%s/v1/calc/%d/datastore' % (self.server, self.calc_id) resp = self.sess.get(url, stream=True) down = 0 with open(fname, 'wb') as f: logging.info('Saving %s', fname) for chunk in resp.iter_content(CHUNKSIZE): f.write(chunk) down += len(chunk) println('Downloaded {:,} bytes'.format(down)) print()
python
def dump(self, fname): url = '%s/v1/calc/%d/datastore' % (self.server, self.calc_id) resp = self.sess.get(url, stream=True) down = 0 with open(fname, 'wb') as f: logging.info('Saving %s', fname) for chunk in resp.iter_content(CHUNKSIZE): f.write(chunk) down += len(chunk) println('Downloaded {:,} bytes'.format(down)) print()
[ "def", "dump", "(", "self", ",", "fname", ")", ":", "url", "=", "'%s/v1/calc/%d/datastore'", "%", "(", "self", ".", "server", ",", "self", ".", "calc_id", ")", "resp", "=", "self", ".", "sess", ".", "get", "(", "url", ",", "stream", "=", "True", ")", "down", "=", "0", "with", "open", "(", "fname", ",", "'wb'", ")", "as", "f", ":", "logging", ".", "info", "(", "'Saving %s'", ",", "fname", ")", "for", "chunk", "in", "resp", ".", "iter_content", "(", "CHUNKSIZE", ")", ":", "f", ".", "write", "(", "chunk", ")", "down", "+=", "len", "(", "chunk", ")", "println", "(", "'Downloaded {:,} bytes'", ".", "format", "(", "down", ")", ")", "print", "(", ")" ]
Dump the remote datastore on a local path.
[ "Dump", "the", "remote", "datastore", "on", "a", "local", "path", "." ]
train
https://github.com/gem/oq-engine/blob/8294553a0b8aba33fd96437a35065d03547d0040/openquake/calculators/extract.py#L988-L1001