repository_name
stringlengths
5
67
func_path_in_repository
stringlengths
4
234
func_name
stringlengths
0
314
whole_func_string
stringlengths
52
3.87M
language
stringclasses
6 values
func_code_string
stringlengths
39
1.84M
func_code_tokens
sequencelengths
15
672k
func_documentation_string
stringlengths
1
47.2k
func_documentation_tokens
sequencelengths
1
3.92k
split_name
stringclasses
1 value
func_code_url
stringlengths
85
339
gem/oq-engine
openquake/calculators/export/hazard.py
export_ruptures_csv
def export_ruptures_csv(ekey, dstore): """ :param ekey: export key, i.e. a pair (datastore key, fmt) :param dstore: datastore object """ oq = dstore['oqparam'] if 'scenario' in oq.calculation_mode: return [] dest = dstore.export_path('ruptures.csv') header = ('rupid multiplicity mag centroid_lon centroid_lat ' 'centroid_depth trt strike dip rake boundary').split() rows = [] for rgetter in gen_rupture_getters(dstore): rups = rgetter.get_ruptures() rup_data = calc.RuptureData(rgetter.trt, rgetter.rlzs_by_gsim) for r in rup_data.to_array(rups): rows.append( (r['rup_id'], r['multiplicity'], r['mag'], r['lon'], r['lat'], r['depth'], rgetter.trt, r['strike'], r['dip'], r['rake'], r['boundary'])) rows.sort() # by rupture serial comment = 'investigation_time=%s, ses_per_logic_tree_path=%s' % ( oq.investigation_time, oq.ses_per_logic_tree_path) writers.write_csv(dest, rows, header=header, sep='\t', comment=comment) return [dest]
python
def export_ruptures_csv(ekey, dstore): oq = dstore['oqparam'] if 'scenario' in oq.calculation_mode: return [] dest = dstore.export_path('ruptures.csv') header = ('rupid multiplicity mag centroid_lon centroid_lat ' 'centroid_depth trt strike dip rake boundary').split() rows = [] for rgetter in gen_rupture_getters(dstore): rups = rgetter.get_ruptures() rup_data = calc.RuptureData(rgetter.trt, rgetter.rlzs_by_gsim) for r in rup_data.to_array(rups): rows.append( (r['rup_id'], r['multiplicity'], r['mag'], r['lon'], r['lat'], r['depth'], rgetter.trt, r['strike'], r['dip'], r['rake'], r['boundary'])) rows.sort() comment = 'investigation_time=%s, ses_per_logic_tree_path=%s' % ( oq.investigation_time, oq.ses_per_logic_tree_path) writers.write_csv(dest, rows, header=header, sep='\t', comment=comment) return [dest]
[ "def", "export_ruptures_csv", "(", "ekey", ",", "dstore", ")", ":", "oq", "=", "dstore", "[", "'oqparam'", "]", "if", "'scenario'", "in", "oq", ".", "calculation_mode", ":", "return", "[", "]", "dest", "=", "dstore", ".", "export_path", "(", "'ruptures.csv'", ")", "header", "=", "(", "'rupid multiplicity mag centroid_lon centroid_lat '", "'centroid_depth trt strike dip rake boundary'", ")", ".", "split", "(", ")", "rows", "=", "[", "]", "for", "rgetter", "in", "gen_rupture_getters", "(", "dstore", ")", ":", "rups", "=", "rgetter", ".", "get_ruptures", "(", ")", "rup_data", "=", "calc", ".", "RuptureData", "(", "rgetter", ".", "trt", ",", "rgetter", ".", "rlzs_by_gsim", ")", "for", "r", "in", "rup_data", ".", "to_array", "(", "rups", ")", ":", "rows", ".", "append", "(", "(", "r", "[", "'rup_id'", "]", ",", "r", "[", "'multiplicity'", "]", ",", "r", "[", "'mag'", "]", ",", "r", "[", "'lon'", "]", ",", "r", "[", "'lat'", "]", ",", "r", "[", "'depth'", "]", ",", "rgetter", ".", "trt", ",", "r", "[", "'strike'", "]", ",", "r", "[", "'dip'", "]", ",", "r", "[", "'rake'", "]", ",", "r", "[", "'boundary'", "]", ")", ")", "rows", ".", "sort", "(", ")", "# by rupture serial", "comment", "=", "'investigation_time=%s, ses_per_logic_tree_path=%s'", "%", "(", "oq", ".", "investigation_time", ",", "oq", ".", "ses_per_logic_tree_path", ")", "writers", ".", "write_csv", "(", "dest", ",", "rows", ",", "header", "=", "header", ",", "sep", "=", "'\\t'", ",", "comment", "=", "comment", ")", "return", "[", "dest", "]" ]
:param ekey: export key, i.e. a pair (datastore key, fmt) :param dstore: datastore object
[ ":", "param", "ekey", ":", "export", "key", "i", ".", "e", ".", "a", "pair", "(", "datastore", "key", "fmt", ")", ":", "param", "dstore", ":", "datastore", "object" ]
train
https://github.com/gem/oq-engine/blob/8294553a0b8aba33fd96437a35065d03547d0040/openquake/calculators/export/hazard.py#L81-L106
gem/oq-engine
openquake/calculators/export/hazard.py
export_hmaps_csv
def export_hmaps_csv(key, dest, sitemesh, array, comment): """ Export the hazard maps of the given realization into CSV. :param key: output_type and export_type :param dest: name of the exported file :param sitemesh: site collection :param array: a composite array of dtype hmap_dt :param comment: comment to use as header of the exported CSV file """ curves = util.compose_arrays(sitemesh, array) writers.write_csv(dest, curves, comment=comment) return [dest]
python
def export_hmaps_csv(key, dest, sitemesh, array, comment): curves = util.compose_arrays(sitemesh, array) writers.write_csv(dest, curves, comment=comment) return [dest]
[ "def", "export_hmaps_csv", "(", "key", ",", "dest", ",", "sitemesh", ",", "array", ",", "comment", ")", ":", "curves", "=", "util", ".", "compose_arrays", "(", "sitemesh", ",", "array", ")", "writers", ".", "write_csv", "(", "dest", ",", "curves", ",", "comment", "=", "comment", ")", "return", "[", "dest", "]" ]
Export the hazard maps of the given realization into CSV. :param key: output_type and export_type :param dest: name of the exported file :param sitemesh: site collection :param array: a composite array of dtype hmap_dt :param comment: comment to use as header of the exported CSV file
[ "Export", "the", "hazard", "maps", "of", "the", "given", "realization", "into", "CSV", "." ]
train
https://github.com/gem/oq-engine/blob/8294553a0b8aba33fd96437a35065d03547d0040/openquake/calculators/export/hazard.py#L223-L235
gem/oq-engine
openquake/calculators/export/hazard.py
add_imt
def add_imt(fname, imt): """ >>> add_imt('/path/to/hcurve_23.csv', 'SA(0.1)') '/path/to/hcurve-SA(0.1)_23.csv' """ name = os.path.basename(fname) newname = re.sub(r'(_\d+\.)', '-%s\\1' % imt, name) return os.path.join(os.path.dirname(fname), newname)
python
def add_imt(fname, imt): name = os.path.basename(fname) newname = re.sub(r'(_\d+\.)', '-%s\\1' % imt, name) return os.path.join(os.path.dirname(fname), newname)
[ "def", "add_imt", "(", "fname", ",", "imt", ")", ":", "name", "=", "os", ".", "path", ".", "basename", "(", "fname", ")", "newname", "=", "re", ".", "sub", "(", "r'(_\\d+\\.)'", ",", "'-%s\\\\1'", "%", "imt", ",", "name", ")", "return", "os", ".", "path", ".", "join", "(", "os", ".", "path", ".", "dirname", "(", "fname", ")", ",", "newname", ")" ]
>>> add_imt('/path/to/hcurve_23.csv', 'SA(0.1)') '/path/to/hcurve-SA(0.1)_23.csv'
[ ">>>", "add_imt", "(", "/", "path", "/", "to", "/", "hcurve_23", ".", "csv", "SA", "(", "0", ".", "1", ")", ")", "/", "path", "/", "to", "/", "hcurve", "-", "SA", "(", "0", ".", "1", ")", "_23", ".", "csv" ]
train
https://github.com/gem/oq-engine/blob/8294553a0b8aba33fd96437a35065d03547d0040/openquake/calculators/export/hazard.py#L238-L245
gem/oq-engine
openquake/calculators/export/hazard.py
export_hcurves_by_imt_csv
def export_hcurves_by_imt_csv( key, kind, rlzs_assoc, fname, sitecol, array, oq, checksum): """ Export the curves of the given realization into CSV. :param key: output_type and export_type :param kind: a string with the kind of output (realization or statistics) :param rlzs_assoc: a :class:`openquake.commonlib.source.RlzsAssoc` instance :param fname: name of the exported file :param sitecol: site collection :param array: an array of shape (N, L) and dtype numpy.float32 :param oq: job.ini parameters """ nsites = len(sitecol) fnames = [] for imt, imls in oq.imtls.items(): slc = oq.imtls(imt) dest = add_imt(fname, imt) lst = [('lon', F32), ('lat', F32), ('depth', F32)] for iml in imls: lst.append(('poe-%s' % iml, F32)) hcurves = numpy.zeros(nsites, lst) for sid, lon, lat, dep in zip( range(nsites), sitecol.lons, sitecol.lats, sitecol.depths): hcurves[sid] = (lon, lat, dep) + tuple(array[sid, slc]) fnames.append(writers.write_csv(dest, hcurves, comment=_comment( rlzs_assoc, kind, oq.investigation_time) + ( ', imt="%s", checksum=%d' % (imt, checksum) ), header=[name for (name, dt) in lst])) return fnames
python
def export_hcurves_by_imt_csv( key, kind, rlzs_assoc, fname, sitecol, array, oq, checksum): nsites = len(sitecol) fnames = [] for imt, imls in oq.imtls.items(): slc = oq.imtls(imt) dest = add_imt(fname, imt) lst = [('lon', F32), ('lat', F32), ('depth', F32)] for iml in imls: lst.append(('poe-%s' % iml, F32)) hcurves = numpy.zeros(nsites, lst) for sid, lon, lat, dep in zip( range(nsites), sitecol.lons, sitecol.lats, sitecol.depths): hcurves[sid] = (lon, lat, dep) + tuple(array[sid, slc]) fnames.append(writers.write_csv(dest, hcurves, comment=_comment( rlzs_assoc, kind, oq.investigation_time) + ( ', imt="%s", checksum=%d' % (imt, checksum) ), header=[name for (name, dt) in lst])) return fnames
[ "def", "export_hcurves_by_imt_csv", "(", "key", ",", "kind", ",", "rlzs_assoc", ",", "fname", ",", "sitecol", ",", "array", ",", "oq", ",", "checksum", ")", ":", "nsites", "=", "len", "(", "sitecol", ")", "fnames", "=", "[", "]", "for", "imt", ",", "imls", "in", "oq", ".", "imtls", ".", "items", "(", ")", ":", "slc", "=", "oq", ".", "imtls", "(", "imt", ")", "dest", "=", "add_imt", "(", "fname", ",", "imt", ")", "lst", "=", "[", "(", "'lon'", ",", "F32", ")", ",", "(", "'lat'", ",", "F32", ")", ",", "(", "'depth'", ",", "F32", ")", "]", "for", "iml", "in", "imls", ":", "lst", ".", "append", "(", "(", "'poe-%s'", "%", "iml", ",", "F32", ")", ")", "hcurves", "=", "numpy", ".", "zeros", "(", "nsites", ",", "lst", ")", "for", "sid", ",", "lon", ",", "lat", ",", "dep", "in", "zip", "(", "range", "(", "nsites", ")", ",", "sitecol", ".", "lons", ",", "sitecol", ".", "lats", ",", "sitecol", ".", "depths", ")", ":", "hcurves", "[", "sid", "]", "=", "(", "lon", ",", "lat", ",", "dep", ")", "+", "tuple", "(", "array", "[", "sid", ",", "slc", "]", ")", "fnames", ".", "append", "(", "writers", ".", "write_csv", "(", "dest", ",", "hcurves", ",", "comment", "=", "_comment", "(", "rlzs_assoc", ",", "kind", ",", "oq", ".", "investigation_time", ")", "+", "(", "', imt=\"%s\", checksum=%d'", "%", "(", "imt", ",", "checksum", ")", ")", ",", "header", "=", "[", "name", "for", "(", "name", ",", "dt", ")", "in", "lst", "]", ")", ")", "return", "fnames" ]
Export the curves of the given realization into CSV. :param key: output_type and export_type :param kind: a string with the kind of output (realization or statistics) :param rlzs_assoc: a :class:`openquake.commonlib.source.RlzsAssoc` instance :param fname: name of the exported file :param sitecol: site collection :param array: an array of shape (N, L) and dtype numpy.float32 :param oq: job.ini parameters
[ "Export", "the", "curves", "of", "the", "given", "realization", "into", "CSV", "." ]
train
https://github.com/gem/oq-engine/blob/8294553a0b8aba33fd96437a35065d03547d0040/openquake/calculators/export/hazard.py#L248-L277
gem/oq-engine
openquake/calculators/export/hazard.py
hazard_curve_name
def hazard_curve_name(dstore, ekey, kind, rlzs_assoc): """ :param calc_id: the calculation ID :param ekey: the export key :param kind: the kind of key :param rlzs_assoc: a RlzsAssoc instance """ key, fmt = ekey prefix = {'hcurves': 'hazard_curve', 'hmaps': 'hazard_map', 'uhs': 'hazard_uhs'}[key] if kind.startswith('quantile-'): # strip the 7 characters 'hazard_' fname = dstore.build_fname('quantile_' + prefix[7:], kind[9:], fmt) else: fname = dstore.build_fname(prefix, kind, fmt) return fname
python
def hazard_curve_name(dstore, ekey, kind, rlzs_assoc): key, fmt = ekey prefix = {'hcurves': 'hazard_curve', 'hmaps': 'hazard_map', 'uhs': 'hazard_uhs'}[key] if kind.startswith('quantile-'): fname = dstore.build_fname('quantile_' + prefix[7:], kind[9:], fmt) else: fname = dstore.build_fname(prefix, kind, fmt) return fname
[ "def", "hazard_curve_name", "(", "dstore", ",", "ekey", ",", "kind", ",", "rlzs_assoc", ")", ":", "key", ",", "fmt", "=", "ekey", "prefix", "=", "{", "'hcurves'", ":", "'hazard_curve'", ",", "'hmaps'", ":", "'hazard_map'", ",", "'uhs'", ":", "'hazard_uhs'", "}", "[", "key", "]", "if", "kind", ".", "startswith", "(", "'quantile-'", ")", ":", "# strip the 7 characters 'hazard_'", "fname", "=", "dstore", ".", "build_fname", "(", "'quantile_'", "+", "prefix", "[", "7", ":", "]", ",", "kind", "[", "9", ":", "]", ",", "fmt", ")", "else", ":", "fname", "=", "dstore", ".", "build_fname", "(", "prefix", ",", "kind", ",", "fmt", ")", "return", "fname" ]
:param calc_id: the calculation ID :param ekey: the export key :param kind: the kind of key :param rlzs_assoc: a RlzsAssoc instance
[ ":", "param", "calc_id", ":", "the", "calculation", "ID", ":", "param", "ekey", ":", "the", "export", "key", ":", "param", "kind", ":", "the", "kind", "of", "key", ":", "param", "rlzs_assoc", ":", "a", "RlzsAssoc", "instance" ]
train
https://github.com/gem/oq-engine/blob/8294553a0b8aba33fd96437a35065d03547d0040/openquake/calculators/export/hazard.py#L280-L294
gem/oq-engine
openquake/calculators/export/hazard.py
get_kkf
def get_kkf(ekey): """ :param ekey: export key, for instance ('uhs/rlz-1', 'xml') :returns: key, kind and fmt from the export key, i.e. 'uhs', 'rlz-1', 'xml' """ key, fmt = ekey if '/' in key: key, kind = key.split('/', 1) else: kind = '' return key, kind, fmt
python
def get_kkf(ekey): key, fmt = ekey if '/' in key: key, kind = key.split('/', 1) else: kind = '' return key, kind, fmt
[ "def", "get_kkf", "(", "ekey", ")", ":", "key", ",", "fmt", "=", "ekey", "if", "'/'", "in", "key", ":", "key", ",", "kind", "=", "key", ".", "split", "(", "'/'", ",", "1", ")", "else", ":", "kind", "=", "''", "return", "key", ",", "kind", ",", "fmt" ]
:param ekey: export key, for instance ('uhs/rlz-1', 'xml') :returns: key, kind and fmt from the export key, i.e. 'uhs', 'rlz-1', 'xml'
[ ":", "param", "ekey", ":", "export", "key", "for", "instance", "(", "uhs", "/", "rlz", "-", "1", "xml", ")", ":", "returns", ":", "key", "kind", "and", "fmt", "from", "the", "export", "key", "i", ".", "e", ".", "uhs", "rlz", "-", "1", "xml" ]
train
https://github.com/gem/oq-engine/blob/8294553a0b8aba33fd96437a35065d03547d0040/openquake/calculators/export/hazard.py#L319-L329
gem/oq-engine
openquake/calculators/export/hazard.py
export_hcurves_csv
def export_hcurves_csv(ekey, dstore): """ Exports the hazard curves into several .csv files :param ekey: export key, i.e. a pair (datastore key, fmt) :param dstore: datastore object """ oq = dstore['oqparam'] info = get_info(dstore) rlzs_assoc = dstore['csm_info'].get_rlzs_assoc() R = len(rlzs_assoc.realizations) sitecol = dstore['sitecol'] sitemesh = get_mesh(sitecol) key, kind, fmt = get_kkf(ekey) fnames = [] checksum = dstore.get_attr('/', 'checksum32') hmap_dt = oq.hmap_dt() for kind in oq.get_kinds(kind, R): fname = hazard_curve_name(dstore, (key, fmt), kind, rlzs_assoc) comment = _comment(rlzs_assoc, kind, oq.investigation_time) if (key in ('hmaps', 'uhs') and oq.uniform_hazard_spectra or oq.hazard_maps): hmap = extract(dstore, 'hmaps?kind=' + kind)[kind] if key == 'uhs' and oq.poes and oq.uniform_hazard_spectra: uhs_curves = calc.make_uhs(hmap, info) writers.write_csv( fname, util.compose_arrays(sitemesh, uhs_curves), comment=comment + ', checksum=%d' % checksum) fnames.append(fname) elif key == 'hmaps' and oq.poes and oq.hazard_maps: fnames.extend( export_hmaps_csv(ekey, fname, sitemesh, hmap.flatten().view(hmap_dt), comment + ', checksum=%d' % checksum)) elif key == 'hcurves': hcurves = extract(dstore, 'hcurves?kind=' + kind)[kind] fnames.extend( export_hcurves_by_imt_csv( ekey, kind, rlzs_assoc, fname, sitecol, hcurves, oq, checksum)) return sorted(fnames)
python
def export_hcurves_csv(ekey, dstore): oq = dstore['oqparam'] info = get_info(dstore) rlzs_assoc = dstore['csm_info'].get_rlzs_assoc() R = len(rlzs_assoc.realizations) sitecol = dstore['sitecol'] sitemesh = get_mesh(sitecol) key, kind, fmt = get_kkf(ekey) fnames = [] checksum = dstore.get_attr('/', 'checksum32') hmap_dt = oq.hmap_dt() for kind in oq.get_kinds(kind, R): fname = hazard_curve_name(dstore, (key, fmt), kind, rlzs_assoc) comment = _comment(rlzs_assoc, kind, oq.investigation_time) if (key in ('hmaps', 'uhs') and oq.uniform_hazard_spectra or oq.hazard_maps): hmap = extract(dstore, 'hmaps?kind=' + kind)[kind] if key == 'uhs' and oq.poes and oq.uniform_hazard_spectra: uhs_curves = calc.make_uhs(hmap, info) writers.write_csv( fname, util.compose_arrays(sitemesh, uhs_curves), comment=comment + ', checksum=%d' % checksum) fnames.append(fname) elif key == 'hmaps' and oq.poes and oq.hazard_maps: fnames.extend( export_hmaps_csv(ekey, fname, sitemesh, hmap.flatten().view(hmap_dt), comment + ', checksum=%d' % checksum)) elif key == 'hcurves': hcurves = extract(dstore, 'hcurves?kind=' + kind)[kind] fnames.extend( export_hcurves_by_imt_csv( ekey, kind, rlzs_assoc, fname, sitecol, hcurves, oq, checksum)) return sorted(fnames)
[ "def", "export_hcurves_csv", "(", "ekey", ",", "dstore", ")", ":", "oq", "=", "dstore", "[", "'oqparam'", "]", "info", "=", "get_info", "(", "dstore", ")", "rlzs_assoc", "=", "dstore", "[", "'csm_info'", "]", ".", "get_rlzs_assoc", "(", ")", "R", "=", "len", "(", "rlzs_assoc", ".", "realizations", ")", "sitecol", "=", "dstore", "[", "'sitecol'", "]", "sitemesh", "=", "get_mesh", "(", "sitecol", ")", "key", ",", "kind", ",", "fmt", "=", "get_kkf", "(", "ekey", ")", "fnames", "=", "[", "]", "checksum", "=", "dstore", ".", "get_attr", "(", "'/'", ",", "'checksum32'", ")", "hmap_dt", "=", "oq", ".", "hmap_dt", "(", ")", "for", "kind", "in", "oq", ".", "get_kinds", "(", "kind", ",", "R", ")", ":", "fname", "=", "hazard_curve_name", "(", "dstore", ",", "(", "key", ",", "fmt", ")", ",", "kind", ",", "rlzs_assoc", ")", "comment", "=", "_comment", "(", "rlzs_assoc", ",", "kind", ",", "oq", ".", "investigation_time", ")", "if", "(", "key", "in", "(", "'hmaps'", ",", "'uhs'", ")", "and", "oq", ".", "uniform_hazard_spectra", "or", "oq", ".", "hazard_maps", ")", ":", "hmap", "=", "extract", "(", "dstore", ",", "'hmaps?kind='", "+", "kind", ")", "[", "kind", "]", "if", "key", "==", "'uhs'", "and", "oq", ".", "poes", "and", "oq", ".", "uniform_hazard_spectra", ":", "uhs_curves", "=", "calc", ".", "make_uhs", "(", "hmap", ",", "info", ")", "writers", ".", "write_csv", "(", "fname", ",", "util", ".", "compose_arrays", "(", "sitemesh", ",", "uhs_curves", ")", ",", "comment", "=", "comment", "+", "', checksum=%d'", "%", "checksum", ")", "fnames", ".", "append", "(", "fname", ")", "elif", "key", "==", "'hmaps'", "and", "oq", ".", "poes", "and", "oq", ".", "hazard_maps", ":", "fnames", ".", "extend", "(", "export_hmaps_csv", "(", "ekey", ",", "fname", ",", "sitemesh", ",", "hmap", ".", "flatten", "(", ")", ".", "view", "(", "hmap_dt", ")", ",", "comment", "+", "', checksum=%d'", "%", "checksum", ")", ")", "elif", "key", "==", "'hcurves'", ":", "hcurves", "=", "extract", "(", "dstore", ",", "'hcurves?kind='", "+", "kind", ")", "[", "kind", "]", "fnames", ".", "extend", "(", "export_hcurves_by_imt_csv", "(", "ekey", ",", "kind", ",", "rlzs_assoc", ",", "fname", ",", "sitecol", ",", "hcurves", ",", "oq", ",", "checksum", ")", ")", "return", "sorted", "(", "fnames", ")" ]
Exports the hazard curves into several .csv files :param ekey: export key, i.e. a pair (datastore key, fmt) :param dstore: datastore object
[ "Exports", "the", "hazard", "curves", "into", "several", ".", "csv", "files" ]
train
https://github.com/gem/oq-engine/blob/8294553a0b8aba33fd96437a35065d03547d0040/openquake/calculators/export/hazard.py#L333-L373
gem/oq-engine
openquake/calculators/export/hazard.py
get_metadata
def get_metadata(realizations, kind): """ :param list realizations: realization objects :param str kind: kind of data, i.e. a key in the datastore :returns: a dictionary with smlt_path, gsimlt_path, statistics, quantile_value """ metadata = {} if kind.startswith('rlz-'): rlz = realizations[int(kind[4:])] metadata['smlt_path'] = '_'.join(rlz.sm_lt_path) metadata['gsimlt_path'] = rlz.gsim_rlz.uid elif kind.startswith('quantile-'): metadata['statistics'] = 'quantile' metadata['quantile_value'] = float(kind[9:]) elif kind == 'mean': metadata['statistics'] = 'mean' elif kind == 'max': metadata['statistics'] = 'max' elif kind == 'std': metadata['statistics'] = 'std' return metadata
python
def get_metadata(realizations, kind): metadata = {} if kind.startswith('rlz-'): rlz = realizations[int(kind[4:])] metadata['smlt_path'] = '_'.join(rlz.sm_lt_path) metadata['gsimlt_path'] = rlz.gsim_rlz.uid elif kind.startswith('quantile-'): metadata['statistics'] = 'quantile' metadata['quantile_value'] = float(kind[9:]) elif kind == 'mean': metadata['statistics'] = 'mean' elif kind == 'max': metadata['statistics'] = 'max' elif kind == 'std': metadata['statistics'] = 'std' return metadata
[ "def", "get_metadata", "(", "realizations", ",", "kind", ")", ":", "metadata", "=", "{", "}", "if", "kind", ".", "startswith", "(", "'rlz-'", ")", ":", "rlz", "=", "realizations", "[", "int", "(", "kind", "[", "4", ":", "]", ")", "]", "metadata", "[", "'smlt_path'", "]", "=", "'_'", ".", "join", "(", "rlz", ".", "sm_lt_path", ")", "metadata", "[", "'gsimlt_path'", "]", "=", "rlz", ".", "gsim_rlz", ".", "uid", "elif", "kind", ".", "startswith", "(", "'quantile-'", ")", ":", "metadata", "[", "'statistics'", "]", "=", "'quantile'", "metadata", "[", "'quantile_value'", "]", "=", "float", "(", "kind", "[", "9", ":", "]", ")", "elif", "kind", "==", "'mean'", ":", "metadata", "[", "'statistics'", "]", "=", "'mean'", "elif", "kind", "==", "'max'", ":", "metadata", "[", "'statistics'", "]", "=", "'max'", "elif", "kind", "==", "'std'", ":", "metadata", "[", "'statistics'", "]", "=", "'std'", "return", "metadata" ]
:param list realizations: realization objects :param str kind: kind of data, i.e. a key in the datastore :returns: a dictionary with smlt_path, gsimlt_path, statistics, quantile_value
[ ":", "param", "list", "realizations", ":", "realization", "objects", ":", "param", "str", "kind", ":", "kind", "of", "data", "i", ".", "e", ".", "a", "key", "in", "the", "datastore", ":", "returns", ":", "a", "dictionary", "with", "smlt_path", "gsimlt_path", "statistics", "quantile_value" ]
train
https://github.com/gem/oq-engine/blob/8294553a0b8aba33fd96437a35065d03547d0040/openquake/calculators/export/hazard.py#L379-L402
gem/oq-engine
openquake/calculators/export/hazard.py
export_gmf
def export_gmf(ekey, dstore): """ :param ekey: export key, i.e. a pair (datastore key, fmt) :param dstore: datastore object """ oq = dstore['oqparam'] if not oq.calculation_mode.startswith('scenario'): return [] sitecol = dstore['sitecol'] investigation_time = (None if oq.calculation_mode == 'scenario' else oq.investigation_time) fmt = ekey[-1] gmf_data = dstore['gmf_data'] nbytes = gmf_data.attrs['nbytes'] logging.info('Internal size of the GMFs: %s', humansize(nbytes)) if nbytes > GMF_MAX_SIZE: logging.warning(GMF_WARNING, dstore.filename) data = gmf_data['data'].value ses_idx = 1 # for scenario only events = [] for eid, gmfa in group_array(data, 'eid').items(): rup = Event(eid, ses_idx, sorted(set(gmfa['sid'])), gmfa) events.append(rup) fname = dstore.build_fname('gmf', 'scenario', fmt) writer = hazard_writers.EventBasedGMFXMLWriter( fname, sm_lt_path='', gsim_lt_path='') writer.serialize( GmfCollection(sitecol, oq.imtls, events, investigation_time)) return [fname]
python
def export_gmf(ekey, dstore): oq = dstore['oqparam'] if not oq.calculation_mode.startswith('scenario'): return [] sitecol = dstore['sitecol'] investigation_time = (None if oq.calculation_mode == 'scenario' else oq.investigation_time) fmt = ekey[-1] gmf_data = dstore['gmf_data'] nbytes = gmf_data.attrs['nbytes'] logging.info('Internal size of the GMFs: %s', humansize(nbytes)) if nbytes > GMF_MAX_SIZE: logging.warning(GMF_WARNING, dstore.filename) data = gmf_data['data'].value ses_idx = 1 events = [] for eid, gmfa in group_array(data, 'eid').items(): rup = Event(eid, ses_idx, sorted(set(gmfa['sid'])), gmfa) events.append(rup) fname = dstore.build_fname('gmf', 'scenario', fmt) writer = hazard_writers.EventBasedGMFXMLWriter( fname, sm_lt_path='', gsim_lt_path='') writer.serialize( GmfCollection(sitecol, oq.imtls, events, investigation_time)) return [fname]
[ "def", "export_gmf", "(", "ekey", ",", "dstore", ")", ":", "oq", "=", "dstore", "[", "'oqparam'", "]", "if", "not", "oq", ".", "calculation_mode", ".", "startswith", "(", "'scenario'", ")", ":", "return", "[", "]", "sitecol", "=", "dstore", "[", "'sitecol'", "]", "investigation_time", "=", "(", "None", "if", "oq", ".", "calculation_mode", "==", "'scenario'", "else", "oq", ".", "investigation_time", ")", "fmt", "=", "ekey", "[", "-", "1", "]", "gmf_data", "=", "dstore", "[", "'gmf_data'", "]", "nbytes", "=", "gmf_data", ".", "attrs", "[", "'nbytes'", "]", "logging", ".", "info", "(", "'Internal size of the GMFs: %s'", ",", "humansize", "(", "nbytes", ")", ")", "if", "nbytes", ">", "GMF_MAX_SIZE", ":", "logging", ".", "warning", "(", "GMF_WARNING", ",", "dstore", ".", "filename", ")", "data", "=", "gmf_data", "[", "'data'", "]", ".", "value", "ses_idx", "=", "1", "# for scenario only", "events", "=", "[", "]", "for", "eid", ",", "gmfa", "in", "group_array", "(", "data", ",", "'eid'", ")", ".", "items", "(", ")", ":", "rup", "=", "Event", "(", "eid", ",", "ses_idx", ",", "sorted", "(", "set", "(", "gmfa", "[", "'sid'", "]", ")", ")", ",", "gmfa", ")", "events", ".", "append", "(", "rup", ")", "fname", "=", "dstore", ".", "build_fname", "(", "'gmf'", ",", "'scenario'", ",", "fmt", ")", "writer", "=", "hazard_writers", ".", "EventBasedGMFXMLWriter", "(", "fname", ",", "sm_lt_path", "=", "''", ",", "gsim_lt_path", "=", "''", ")", "writer", ".", "serialize", "(", "GmfCollection", "(", "sitecol", ",", "oq", ".", "imtls", ",", "events", ",", "investigation_time", ")", ")", "return", "[", "fname", "]" ]
:param ekey: export key, i.e. a pair (datastore key, fmt) :param dstore: datastore object
[ ":", "param", "ekey", ":", "export", "key", "i", ".", "e", ".", "a", "pair", "(", "datastore", "key", "fmt", ")", ":", "param", "dstore", ":", "datastore", "object" ]
train
https://github.com/gem/oq-engine/blob/8294553a0b8aba33fd96437a35065d03547d0040/openquake/calculators/export/hazard.py#L535-L563
gem/oq-engine
openquake/calculators/export/hazard.py
save_disagg_to_csv
def save_disagg_to_csv(metadata, matrices): """ Save disaggregation matrices to multiple .csv files. """ skip_keys = ('Mag', 'Dist', 'Lon', 'Lat', 'Eps', 'TRT') base_header = ','.join( '%s=%s' % (key, value) for key, value in metadata.items() if value is not None and key not in skip_keys) for disag_tup, (poe, iml, matrix, fname) in matrices.items(): header = '%s,poe=%.7f,iml=%.7e\n' % (base_header, poe, iml) if disag_tup == ('Mag', 'Lon', 'Lat'): matrix = numpy.swapaxes(matrix, 0, 1) matrix = numpy.swapaxes(matrix, 1, 2) disag_tup = ('Lon', 'Lat', 'Mag') axis = [metadata[v] for v in disag_tup] header += ','.join(v for v in disag_tup) header += ',poe' # compute axis mid points axis = [(ax[: -1] + ax[1:]) / 2. if ax.dtype == float else ax for ax in axis] values = None if len(axis) == 1: values = numpy.array([axis[0], matrix.flatten()]).T else: grids = numpy.meshgrid(*axis, indexing='ij') values = [g.flatten() for g in grids] values.append(matrix.flatten()) values = numpy.array(values).T writers.write_csv(fname, values, comment=header, fmt='%.5E')
python
def save_disagg_to_csv(metadata, matrices): skip_keys = ('Mag', 'Dist', 'Lon', 'Lat', 'Eps', 'TRT') base_header = ','.join( '%s=%s' % (key, value) for key, value in metadata.items() if value is not None and key not in skip_keys) for disag_tup, (poe, iml, matrix, fname) in matrices.items(): header = '%s,poe=%.7f,iml=%.7e\n' % (base_header, poe, iml) if disag_tup == ('Mag', 'Lon', 'Lat'): matrix = numpy.swapaxes(matrix, 0, 1) matrix = numpy.swapaxes(matrix, 1, 2) disag_tup = ('Lon', 'Lat', 'Mag') axis = [metadata[v] for v in disag_tup] header += ','.join(v for v in disag_tup) header += ',poe' axis = [(ax[: -1] + ax[1:]) / 2. if ax.dtype == float else ax for ax in axis] values = None if len(axis) == 1: values = numpy.array([axis[0], matrix.flatten()]).T else: grids = numpy.meshgrid(*axis, indexing='ij') values = [g.flatten() for g in grids] values.append(matrix.flatten()) values = numpy.array(values).T writers.write_csv(fname, values, comment=header, fmt='%.5E')
[ "def", "save_disagg_to_csv", "(", "metadata", ",", "matrices", ")", ":", "skip_keys", "=", "(", "'Mag'", ",", "'Dist'", ",", "'Lon'", ",", "'Lat'", ",", "'Eps'", ",", "'TRT'", ")", "base_header", "=", "','", ".", "join", "(", "'%s=%s'", "%", "(", "key", ",", "value", ")", "for", "key", ",", "value", "in", "metadata", ".", "items", "(", ")", "if", "value", "is", "not", "None", "and", "key", "not", "in", "skip_keys", ")", "for", "disag_tup", ",", "(", "poe", ",", "iml", ",", "matrix", ",", "fname", ")", "in", "matrices", ".", "items", "(", ")", ":", "header", "=", "'%s,poe=%.7f,iml=%.7e\\n'", "%", "(", "base_header", ",", "poe", ",", "iml", ")", "if", "disag_tup", "==", "(", "'Mag'", ",", "'Lon'", ",", "'Lat'", ")", ":", "matrix", "=", "numpy", ".", "swapaxes", "(", "matrix", ",", "0", ",", "1", ")", "matrix", "=", "numpy", ".", "swapaxes", "(", "matrix", ",", "1", ",", "2", ")", "disag_tup", "=", "(", "'Lon'", ",", "'Lat'", ",", "'Mag'", ")", "axis", "=", "[", "metadata", "[", "v", "]", "for", "v", "in", "disag_tup", "]", "header", "+=", "','", ".", "join", "(", "v", "for", "v", "in", "disag_tup", ")", "header", "+=", "',poe'", "# compute axis mid points", "axis", "=", "[", "(", "ax", "[", ":", "-", "1", "]", "+", "ax", "[", "1", ":", "]", ")", "/", "2.", "if", "ax", ".", "dtype", "==", "float", "else", "ax", "for", "ax", "in", "axis", "]", "values", "=", "None", "if", "len", "(", "axis", ")", "==", "1", ":", "values", "=", "numpy", ".", "array", "(", "[", "axis", "[", "0", "]", ",", "matrix", ".", "flatten", "(", ")", "]", ")", ".", "T", "else", ":", "grids", "=", "numpy", ".", "meshgrid", "(", "*", "axis", ",", "indexing", "=", "'ij'", ")", "values", "=", "[", "g", ".", "flatten", "(", ")", "for", "g", "in", "grids", "]", "values", ".", "append", "(", "matrix", ".", "flatten", "(", ")", ")", "values", "=", "numpy", ".", "array", "(", "values", ")", ".", "T", "writers", ".", "write_csv", "(", "fname", ",", "values", ",", "comment", "=", "header", ",", "fmt", "=", "'%.5E'", ")" ]
Save disaggregation matrices to multiple .csv files.
[ "Save", "disaggregation", "matrices", "to", "multiple", ".", "csv", "files", "." ]
train
https://github.com/gem/oq-engine/blob/8294553a0b8aba33fd96437a35065d03547d0040/openquake/calculators/export/hazard.py#L743-L776
gem/oq-engine
openquake/hazardlib/gsim/bradley_2013b.py
Bradley2013bChchCBD.get_mean_and_stddevs
def get_mean_and_stddevs(self, sites, rup, dists, imt, stddev_types): """ See :meth:`superclass method <.base.GroundShakingIntensityModel.get_mean_and_stddevs>` for spec of input and result values. """ # extracting dictionary of coefficients specific to required # intensity measure type. C = self.COEFFS[imt] if isinstance(imt, PGA): imt_per = 0.0 else: imt_per = imt.period # Fix site parameters for consistent dS2S application. sites.vs30 = np.array([250]) sites.z1pt0 = np.array([330]) # intensity on a reference soil is used for both mean # and stddev calculations. ln_y_ref = self._get_ln_y_ref(rup, dists, C) # exp1 and exp2 are parts of eq. 7 exp1 = np.exp(C['phi3'] * (sites.vs30.clip(-np.inf, 1130) - 360)) exp2 = np.exp(C['phi3'] * (1130 - 360)) # v1 is the period dependent site term. The Vs30 above which, the # amplification is constant v1 = self._get_v1(imt) # Get log-mean from regular unadjusted model b13a_mean = self._get_mean(sites, C, ln_y_ref, exp1, exp2, v1) # Adjust mean and standard deviation mean = b13a_mean + self._get_dL2L(imt_per) + self._get_dS2S(imt_per) mean += convert_to_LHC(imt) stddevs = self._get_adjusted_stddevs(sites, rup, C, stddev_types, ln_y_ref, exp1, exp2, imt_per) return mean, stddevs
python
def get_mean_and_stddevs(self, sites, rup, dists, imt, stddev_types): C = self.COEFFS[imt] if isinstance(imt, PGA): imt_per = 0.0 else: imt_per = imt.period sites.vs30 = np.array([250]) sites.z1pt0 = np.array([330]) ln_y_ref = self._get_ln_y_ref(rup, dists, C) exp1 = np.exp(C['phi3'] * (sites.vs30.clip(-np.inf, 1130) - 360)) exp2 = np.exp(C['phi3'] * (1130 - 360)) v1 = self._get_v1(imt) b13a_mean = self._get_mean(sites, C, ln_y_ref, exp1, exp2, v1) mean = b13a_mean + self._get_dL2L(imt_per) + self._get_dS2S(imt_per) mean += convert_to_LHC(imt) stddevs = self._get_adjusted_stddevs(sites, rup, C, stddev_types, ln_y_ref, exp1, exp2, imt_per) return mean, stddevs
[ "def", "get_mean_and_stddevs", "(", "self", ",", "sites", ",", "rup", ",", "dists", ",", "imt", ",", "stddev_types", ")", ":", "# extracting dictionary of coefficients specific to required", "# intensity measure type.", "C", "=", "self", ".", "COEFFS", "[", "imt", "]", "if", "isinstance", "(", "imt", ",", "PGA", ")", ":", "imt_per", "=", "0.0", "else", ":", "imt_per", "=", "imt", ".", "period", "# Fix site parameters for consistent dS2S application.", "sites", ".", "vs30", "=", "np", ".", "array", "(", "[", "250", "]", ")", "sites", ".", "z1pt0", "=", "np", ".", "array", "(", "[", "330", "]", ")", "# intensity on a reference soil is used for both mean", "# and stddev calculations.", "ln_y_ref", "=", "self", ".", "_get_ln_y_ref", "(", "rup", ",", "dists", ",", "C", ")", "# exp1 and exp2 are parts of eq. 7", "exp1", "=", "np", ".", "exp", "(", "C", "[", "'phi3'", "]", "*", "(", "sites", ".", "vs30", ".", "clip", "(", "-", "np", ".", "inf", ",", "1130", ")", "-", "360", ")", ")", "exp2", "=", "np", ".", "exp", "(", "C", "[", "'phi3'", "]", "*", "(", "1130", "-", "360", ")", ")", "# v1 is the period dependent site term. The Vs30 above which, the", "# amplification is constant", "v1", "=", "self", ".", "_get_v1", "(", "imt", ")", "# Get log-mean from regular unadjusted model", "b13a_mean", "=", "self", ".", "_get_mean", "(", "sites", ",", "C", ",", "ln_y_ref", ",", "exp1", ",", "exp2", ",", "v1", ")", "# Adjust mean and standard deviation", "mean", "=", "b13a_mean", "+", "self", ".", "_get_dL2L", "(", "imt_per", ")", "+", "self", ".", "_get_dS2S", "(", "imt_per", ")", "mean", "+=", "convert_to_LHC", "(", "imt", ")", "stddevs", "=", "self", ".", "_get_adjusted_stddevs", "(", "sites", ",", "rup", ",", "C", ",", "stddev_types", ",", "ln_y_ref", ",", "exp1", ",", "exp2", ",", "imt_per", ")", "return", "mean", ",", "stddevs" ]
See :meth:`superclass method <.base.GroundShakingIntensityModel.get_mean_and_stddevs>` for spec of input and result values.
[ "See", ":", "meth", ":", "superclass", "method", "<", ".", "base", ".", "GroundShakingIntensityModel", ".", "get_mean_and_stddevs", ">", "for", "spec", "of", "input", "and", "result", "values", "." ]
train
https://github.com/gem/oq-engine/blob/8294553a0b8aba33fd96437a35065d03547d0040/openquake/hazardlib/gsim/bradley_2013b.py#L63-L96
gem/oq-engine
openquake/hazardlib/gsim/bradley_2013b.py
Bradley2013bChchCBD._interp_function
def _interp_function(self, y_ip1, y_i, t_ip1, t_i, imt_per): """ Generic interpolation function used in equation 19 of 2013 report. """ return y_i + (y_ip1 - y_i) / (t_ip1 - t_i) * (imt_per - t_i)
python
def _interp_function(self, y_ip1, y_i, t_ip1, t_i, imt_per): return y_i + (y_ip1 - y_i) / (t_ip1 - t_i) * (imt_per - t_i)
[ "def", "_interp_function", "(", "self", ",", "y_ip1", ",", "y_i", ",", "t_ip1", ",", "t_i", ",", "imt_per", ")", ":", "return", "y_i", "+", "(", "y_ip1", "-", "y_i", ")", "/", "(", "t_ip1", "-", "t_i", ")", "*", "(", "imt_per", "-", "t_i", ")" ]
Generic interpolation function used in equation 19 of 2013 report.
[ "Generic", "interpolation", "function", "used", "in", "equation", "19", "of", "2013", "report", "." ]
train
https://github.com/gem/oq-engine/blob/8294553a0b8aba33fd96437a35065d03547d0040/openquake/hazardlib/gsim/bradley_2013b.py#L154-L158
gem/oq-engine
openquake/hazardlib/gsim/bradley_2013b.py
Bradley2013bChchCBD._get_SRF_tau
def _get_SRF_tau(self, imt_per): """ Table 6 and equation 19 of 2013 report. """ if imt_per < 1: srf = 0.87 elif 1 <= imt_per < 5: srf = self._interp_function(0.58, 0.87, 5, 1, imt_per) elif 5 <= imt_per <= 10: srf = 0.58 else: srf = 1 return srf
python
def _get_SRF_tau(self, imt_per): if imt_per < 1: srf = 0.87 elif 1 <= imt_per < 5: srf = self._interp_function(0.58, 0.87, 5, 1, imt_per) elif 5 <= imt_per <= 10: srf = 0.58 else: srf = 1 return srf
[ "def", "_get_SRF_tau", "(", "self", ",", "imt_per", ")", ":", "if", "imt_per", "<", "1", ":", "srf", "=", "0.87", "elif", "1", "<=", "imt_per", "<", "5", ":", "srf", "=", "self", ".", "_interp_function", "(", "0.58", ",", "0.87", ",", "5", ",", "1", ",", "imt_per", ")", "elif", "5", "<=", "imt_per", "<=", "10", ":", "srf", "=", "0.58", "else", ":", "srf", "=", "1", "return", "srf" ]
Table 6 and equation 19 of 2013 report.
[ "Table", "6", "and", "equation", "19", "of", "2013", "report", "." ]
train
https://github.com/gem/oq-engine/blob/8294553a0b8aba33fd96437a35065d03547d0040/openquake/hazardlib/gsim/bradley_2013b.py#L160-L173
gem/oq-engine
openquake/hazardlib/gsim/bradley_2013b.py
Bradley2013bChchCBD._get_SRF_phi
def _get_SRF_phi(self, imt_per): """ Table 7 and equation 19 of 2013 report. NB change in notation, 2013 report calls this term 'sigma' but it is referred to here as phi. """ if imt_per < 0.6: srf = 0.8 elif 0.6 <= imt_per < 1: srf = self._interp_function(0.7, 0.8, 1, 0.6, imt_per) elif 1 <= imt_per <= 10: srf = self._interp_function(0.6, 0.7, 10, 1, imt_per) else: srf = 1 return srf
python
def _get_SRF_phi(self, imt_per): if imt_per < 0.6: srf = 0.8 elif 0.6 <= imt_per < 1: srf = self._interp_function(0.7, 0.8, 1, 0.6, imt_per) elif 1 <= imt_per <= 10: srf = self._interp_function(0.6, 0.7, 10, 1, imt_per) else: srf = 1 return srf
[ "def", "_get_SRF_phi", "(", "self", ",", "imt_per", ")", ":", "if", "imt_per", "<", "0.6", ":", "srf", "=", "0.8", "elif", "0.6", "<=", "imt_per", "<", "1", ":", "srf", "=", "self", ".", "_interp_function", "(", "0.7", ",", "0.8", ",", "1", ",", "0.6", ",", "imt_per", ")", "elif", "1", "<=", "imt_per", "<=", "10", ":", "srf", "=", "self", ".", "_interp_function", "(", "0.6", ",", "0.7", ",", "10", ",", "1", ",", "imt_per", ")", "else", ":", "srf", "=", "1", "return", "srf" ]
Table 7 and equation 19 of 2013 report. NB change in notation, 2013 report calls this term 'sigma' but it is referred to here as phi.
[ "Table", "7", "and", "equation", "19", "of", "2013", "report", ".", "NB", "change", "in", "notation", "2013", "report", "calls", "this", "term", "sigma", "but", "it", "is", "referred", "to", "here", "as", "phi", "." ]
train
https://github.com/gem/oq-engine/blob/8294553a0b8aba33fd96437a35065d03547d0040/openquake/hazardlib/gsim/bradley_2013b.py#L175-L190
gem/oq-engine
openquake/hazardlib/gsim/bradley_2013b.py
Bradley2013bChchCBD._get_SRF_sigma
def _get_SRF_sigma(self, imt_per): """ Table 8 and equation 19 of 2013 report. NB change in notation, 2013 report calls this term 'sigma_t' but it is referred to here as sigma. Note that Table 8 is identical to Table 7 in the 2013 report. """ if imt_per < 0.6: srf = 0.8 elif 0.6 <= imt_per < 1: srf = self._interp_function(0.7, 0.8, 1, 0.6, imt_per) elif 1 <= imt_per <= 10: srf = self._interp_function(0.6, 0.7, 10, 1, imt_per) else: srf = 1 return srf
python
def _get_SRF_sigma(self, imt_per): if imt_per < 0.6: srf = 0.8 elif 0.6 <= imt_per < 1: srf = self._interp_function(0.7, 0.8, 1, 0.6, imt_per) elif 1 <= imt_per <= 10: srf = self._interp_function(0.6, 0.7, 10, 1, imt_per) else: srf = 1 return srf
[ "def", "_get_SRF_sigma", "(", "self", ",", "imt_per", ")", ":", "if", "imt_per", "<", "0.6", ":", "srf", "=", "0.8", "elif", "0.6", "<=", "imt_per", "<", "1", ":", "srf", "=", "self", ".", "_interp_function", "(", "0.7", ",", "0.8", ",", "1", ",", "0.6", ",", "imt_per", ")", "elif", "1", "<=", "imt_per", "<=", "10", ":", "srf", "=", "self", ".", "_interp_function", "(", "0.6", ",", "0.7", ",", "10", ",", "1", ",", "imt_per", ")", "else", ":", "srf", "=", "1", "return", "srf" ]
Table 8 and equation 19 of 2013 report. NB change in notation, 2013 report calls this term 'sigma_t' but it is referred to here as sigma. Note that Table 8 is identical to Table 7 in the 2013 report.
[ "Table", "8", "and", "equation", "19", "of", "2013", "report", ".", "NB", "change", "in", "notation", "2013", "report", "calls", "this", "term", "sigma_t", "but", "it", "is", "referred", "to", "here", "as", "sigma", ".", "Note", "that", "Table", "8", "is", "identical", "to", "Table", "7", "in", "the", "2013", "report", "." ]
train
https://github.com/gem/oq-engine/blob/8294553a0b8aba33fd96437a35065d03547d0040/openquake/hazardlib/gsim/bradley_2013b.py#L192-L208
gem/oq-engine
openquake/hazardlib/gsim/bradley_2013b.py
Bradley2013bChchCBD._get_dL2L
def _get_dL2L(self, imt_per): """ Table 3 and equation 19 of 2013 report. """ if imt_per < 0.18: dL2L = -0.06 elif 0.18 <= imt_per < 0.35: dL2L = self._interp_function(0.12, -0.06, 0.35, 0.18, imt_per) elif 0.35 <= imt_per <= 10: dL2L = self._interp_function(0.65, 0.12, 10, 0.35, imt_per) else: dL2L = 0 return dL2L
python
def _get_dL2L(self, imt_per): if imt_per < 0.18: dL2L = -0.06 elif 0.18 <= imt_per < 0.35: dL2L = self._interp_function(0.12, -0.06, 0.35, 0.18, imt_per) elif 0.35 <= imt_per <= 10: dL2L = self._interp_function(0.65, 0.12, 10, 0.35, imt_per) else: dL2L = 0 return dL2L
[ "def", "_get_dL2L", "(", "self", ",", "imt_per", ")", ":", "if", "imt_per", "<", "0.18", ":", "dL2L", "=", "-", "0.06", "elif", "0.18", "<=", "imt_per", "<", "0.35", ":", "dL2L", "=", "self", ".", "_interp_function", "(", "0.12", ",", "-", "0.06", ",", "0.35", ",", "0.18", ",", "imt_per", ")", "elif", "0.35", "<=", "imt_per", "<=", "10", ":", "dL2L", "=", "self", ".", "_interp_function", "(", "0.65", ",", "0.12", ",", "10", ",", "0.35", ",", "imt_per", ")", "else", ":", "dL2L", "=", "0", "return", "dL2L" ]
Table 3 and equation 19 of 2013 report.
[ "Table", "3", "and", "equation", "19", "of", "2013", "report", "." ]
train
https://github.com/gem/oq-engine/blob/8294553a0b8aba33fd96437a35065d03547d0040/openquake/hazardlib/gsim/bradley_2013b.py#L210-L223
gem/oq-engine
openquake/hazardlib/gsim/bradley_2013b.py
Bradley2013bChchCBD._get_dS2S
def _get_dS2S(self, imt_per): """ Table 4 of 2013 report """ if imt_per == 0: dS2S = 0.05 elif 0 < imt_per < 0.15: dS2S = self._interp_function(-0.15, 0.05, 0.15, 0, imt_per) elif 0.15 <= imt_per < 0.45: dS2S = self._interp_function(0.4, -0.15, 0.45, 0.15, imt_per) elif 0.45 <= imt_per < 3.2: dS2S = 0.4 elif 3.2 <= imt_per < 5: dS2S = self._interp_function(0.08, 0.4, 5, 3.2, imt_per) elif 5 <= imt_per <= 10: dS2S = 0.08 else: dS2S = 0 return dS2S
python
def _get_dS2S(self, imt_per): if imt_per == 0: dS2S = 0.05 elif 0 < imt_per < 0.15: dS2S = self._interp_function(-0.15, 0.05, 0.15, 0, imt_per) elif 0.15 <= imt_per < 0.45: dS2S = self._interp_function(0.4, -0.15, 0.45, 0.15, imt_per) elif 0.45 <= imt_per < 3.2: dS2S = 0.4 elif 3.2 <= imt_per < 5: dS2S = self._interp_function(0.08, 0.4, 5, 3.2, imt_per) elif 5 <= imt_per <= 10: dS2S = 0.08 else: dS2S = 0 return dS2S
[ "def", "_get_dS2S", "(", "self", ",", "imt_per", ")", ":", "if", "imt_per", "==", "0", ":", "dS2S", "=", "0.05", "elif", "0", "<", "imt_per", "<", "0.15", ":", "dS2S", "=", "self", ".", "_interp_function", "(", "-", "0.15", ",", "0.05", ",", "0.15", ",", "0", ",", "imt_per", ")", "elif", "0.15", "<=", "imt_per", "<", "0.45", ":", "dS2S", "=", "self", ".", "_interp_function", "(", "0.4", ",", "-", "0.15", ",", "0.45", ",", "0.15", ",", "imt_per", ")", "elif", "0.45", "<=", "imt_per", "<", "3.2", ":", "dS2S", "=", "0.4", "elif", "3.2", "<=", "imt_per", "<", "5", ":", "dS2S", "=", "self", ".", "_interp_function", "(", "0.08", ",", "0.4", ",", "5", ",", "3.2", ",", "imt_per", ")", "elif", "5", "<=", "imt_per", "<=", "10", ":", "dS2S", "=", "0.08", "else", ":", "dS2S", "=", "0", "return", "dS2S" ]
Table 4 of 2013 report
[ "Table", "4", "of", "2013", "report" ]
train
https://github.com/gem/oq-engine/blob/8294553a0b8aba33fd96437a35065d03547d0040/openquake/hazardlib/gsim/bradley_2013b.py#L225-L244
gem/oq-engine
openquake/hazardlib/calc/filters.py
context
def context(src): """ Used to add the source_id to the error message. To be used as with context(src): operation_with(src) Typically the operation is filtering a source, that can fail for tricky geometries. """ try: yield except Exception: etype, err, tb = sys.exc_info() msg = 'An error occurred with source id=%s. Error: %s' msg %= (src.source_id, err) raise_(etype, msg, tb)
python
def context(src): try: yield except Exception: etype, err, tb = sys.exc_info() msg = 'An error occurred with source id=%s. Error: %s' msg %= (src.source_id, err) raise_(etype, msg, tb)
[ "def", "context", "(", "src", ")", ":", "try", ":", "yield", "except", "Exception", ":", "etype", ",", "err", ",", "tb", "=", "sys", ".", "exc_info", "(", ")", "msg", "=", "'An error occurred with source id=%s. Error: %s'", "msg", "%=", "(", "src", ".", "source_id", ",", "err", ")", "raise_", "(", "etype", ",", "msg", ",", "tb", ")" ]
Used to add the source_id to the error message. To be used as with context(src): operation_with(src) Typically the operation is filtering a source, that can fail for tricky geometries.
[ "Used", "to", "add", "the", "source_id", "to", "the", "error", "message", ".", "To", "be", "used", "as" ]
train
https://github.com/gem/oq-engine/blob/8294553a0b8aba33fd96437a35065d03547d0040/openquake/hazardlib/calc/filters.py#L39-L55
gem/oq-engine
openquake/hazardlib/calc/filters.py
split_sources
def split_sources(srcs): """ :param srcs: sources :returns: a pair (split sources, split time) or just the split_sources """ from openquake.hazardlib.source import splittable sources = [] split_time = {} # src.id -> time for src in srcs: t0 = time.time() mag_a, mag_b = src.get_min_max_mag() min_mag = src.min_mag if mag_b < min_mag: # discard the source completely continue has_serial = hasattr(src, 'serial') if has_serial: src.serial = numpy.arange( src.serial, src.serial + src.num_ruptures) if not splittable(src): sources.append(src) split_time[src.id] = time.time() - t0 continue if min_mag: splits = [] for s in src: s.min_mag = min_mag mag_a, mag_b = s.get_min_max_mag() if mag_b < min_mag: continue s.num_ruptures = s.count_ruptures() if s.num_ruptures: splits.append(s) else: splits = list(src) split_time[src.id] = time.time() - t0 sources.extend(splits) has_samples = hasattr(src, 'samples') if len(splits) > 1: start = 0 for i, split in enumerate(splits): split.source_id = '%s:%s' % (src.source_id, i) split.src_group_id = src.src_group_id split.id = src.id if has_serial: nr = split.num_ruptures split.serial = src.serial[start:start + nr] start += nr if has_samples: split.samples = src.samples elif splits: # single source splits[0].id = src.id if has_serial: splits[0].serial = src.serial if has_samples: splits[0].samples = src.samples return sources, split_time
python
def split_sources(srcs): from openquake.hazardlib.source import splittable sources = [] split_time = {} for src in srcs: t0 = time.time() mag_a, mag_b = src.get_min_max_mag() min_mag = src.min_mag if mag_b < min_mag: continue has_serial = hasattr(src, 'serial') if has_serial: src.serial = numpy.arange( src.serial, src.serial + src.num_ruptures) if not splittable(src): sources.append(src) split_time[src.id] = time.time() - t0 continue if min_mag: splits = [] for s in src: s.min_mag = min_mag mag_a, mag_b = s.get_min_max_mag() if mag_b < min_mag: continue s.num_ruptures = s.count_ruptures() if s.num_ruptures: splits.append(s) else: splits = list(src) split_time[src.id] = time.time() - t0 sources.extend(splits) has_samples = hasattr(src, 'samples') if len(splits) > 1: start = 0 for i, split in enumerate(splits): split.source_id = '%s:%s' % (src.source_id, i) split.src_group_id = src.src_group_id split.id = src.id if has_serial: nr = split.num_ruptures split.serial = src.serial[start:start + nr] start += nr if has_samples: split.samples = src.samples elif splits: splits[0].id = src.id if has_serial: splits[0].serial = src.serial if has_samples: splits[0].samples = src.samples return sources, split_time
[ "def", "split_sources", "(", "srcs", ")", ":", "from", "openquake", ".", "hazardlib", ".", "source", "import", "splittable", "sources", "=", "[", "]", "split_time", "=", "{", "}", "# src.id -> time", "for", "src", "in", "srcs", ":", "t0", "=", "time", ".", "time", "(", ")", "mag_a", ",", "mag_b", "=", "src", ".", "get_min_max_mag", "(", ")", "min_mag", "=", "src", ".", "min_mag", "if", "mag_b", "<", "min_mag", ":", "# discard the source completely", "continue", "has_serial", "=", "hasattr", "(", "src", ",", "'serial'", ")", "if", "has_serial", ":", "src", ".", "serial", "=", "numpy", ".", "arange", "(", "src", ".", "serial", ",", "src", ".", "serial", "+", "src", ".", "num_ruptures", ")", "if", "not", "splittable", "(", "src", ")", ":", "sources", ".", "append", "(", "src", ")", "split_time", "[", "src", ".", "id", "]", "=", "time", ".", "time", "(", ")", "-", "t0", "continue", "if", "min_mag", ":", "splits", "=", "[", "]", "for", "s", "in", "src", ":", "s", ".", "min_mag", "=", "min_mag", "mag_a", ",", "mag_b", "=", "s", ".", "get_min_max_mag", "(", ")", "if", "mag_b", "<", "min_mag", ":", "continue", "s", ".", "num_ruptures", "=", "s", ".", "count_ruptures", "(", ")", "if", "s", ".", "num_ruptures", ":", "splits", ".", "append", "(", "s", ")", "else", ":", "splits", "=", "list", "(", "src", ")", "split_time", "[", "src", ".", "id", "]", "=", "time", ".", "time", "(", ")", "-", "t0", "sources", ".", "extend", "(", "splits", ")", "has_samples", "=", "hasattr", "(", "src", ",", "'samples'", ")", "if", "len", "(", "splits", ")", ">", "1", ":", "start", "=", "0", "for", "i", ",", "split", "in", "enumerate", "(", "splits", ")", ":", "split", ".", "source_id", "=", "'%s:%s'", "%", "(", "src", ".", "source_id", ",", "i", ")", "split", ".", "src_group_id", "=", "src", ".", "src_group_id", "split", ".", "id", "=", "src", ".", "id", "if", "has_serial", ":", "nr", "=", "split", ".", "num_ruptures", "split", ".", "serial", "=", "src", ".", "serial", "[", "start", ":", "start", "+", "nr", "]", "start", "+=", "nr", "if", "has_samples", ":", "split", ".", "samples", "=", "src", ".", "samples", "elif", "splits", ":", "# single source", "splits", "[", "0", "]", ".", "id", "=", "src", ".", "id", "if", "has_serial", ":", "splits", "[", "0", "]", ".", "serial", "=", "src", ".", "serial", "if", "has_samples", ":", "splits", "[", "0", "]", ".", "samples", "=", "src", ".", "samples", "return", "sources", ",", "split_time" ]
:param srcs: sources :returns: a pair (split sources, split time) or just the split_sources
[ ":", "param", "srcs", ":", "sources", ":", "returns", ":", "a", "pair", "(", "split", "sources", "split", "time", ")", "or", "just", "the", "split_sources" ]
train
https://github.com/gem/oq-engine/blob/8294553a0b8aba33fd96437a35065d03547d0040/openquake/hazardlib/calc/filters.py#L186-L241
gem/oq-engine
openquake/hazardlib/calc/filters.py
IntegrationDistance.get_bounding_box
def get_bounding_box(self, lon, lat, trt=None, mag=None): """ Build a bounding box around the given lon, lat by computing the maximum_distance at the given tectonic region type and magnitude. :param lon: longitude :param lat: latitude :param trt: tectonic region type, possibly None :param mag: magnitude, possibly None :returns: min_lon, min_lat, max_lon, max_lat """ if trt is None: # take the greatest integration distance maxdist = max(self(trt, mag) for trt in self.dic) else: # get the integration distance for the given TRT maxdist = self(trt, mag) a1 = min(maxdist * KM_TO_DEGREES, 90) a2 = min(angular_distance(maxdist, lat), 180) return lon - a2, lat - a1, lon + a2, lat + a1
python
def get_bounding_box(self, lon, lat, trt=None, mag=None): if trt is None: maxdist = max(self(trt, mag) for trt in self.dic) else: maxdist = self(trt, mag) a1 = min(maxdist * KM_TO_DEGREES, 90) a2 = min(angular_distance(maxdist, lat), 180) return lon - a2, lat - a1, lon + a2, lat + a1
[ "def", "get_bounding_box", "(", "self", ",", "lon", ",", "lat", ",", "trt", "=", "None", ",", "mag", "=", "None", ")", ":", "if", "trt", "is", "None", ":", "# take the greatest integration distance", "maxdist", "=", "max", "(", "self", "(", "trt", ",", "mag", ")", "for", "trt", "in", "self", ".", "dic", ")", "else", ":", "# get the integration distance for the given TRT", "maxdist", "=", "self", "(", "trt", ",", "mag", ")", "a1", "=", "min", "(", "maxdist", "*", "KM_TO_DEGREES", ",", "90", ")", "a2", "=", "min", "(", "angular_distance", "(", "maxdist", ",", "lat", ")", ",", "180", ")", "return", "lon", "-", "a2", ",", "lat", "-", "a1", ",", "lon", "+", "a2", ",", "lat", "+", "a1" ]
Build a bounding box around the given lon, lat by computing the maximum_distance at the given tectonic region type and magnitude. :param lon: longitude :param lat: latitude :param trt: tectonic region type, possibly None :param mag: magnitude, possibly None :returns: min_lon, min_lat, max_lon, max_lat
[ "Build", "a", "bounding", "box", "around", "the", "given", "lon", "lat", "by", "computing", "the", "maximum_distance", "at", "the", "given", "tectonic", "region", "type", "and", "magnitude", "." ]
train
https://github.com/gem/oq-engine/blob/8294553a0b8aba33fd96437a35065d03547d0040/openquake/hazardlib/calc/filters.py#L138-L155
gem/oq-engine
openquake/hazardlib/calc/filters.py
IntegrationDistance.get_affected_box
def get_affected_box(self, src): """ Get the enlarged bounding box of a source. :param src: a source object :returns: a bounding box (min_lon, min_lat, max_lon, max_lat) """ mag = src.get_min_max_mag()[1] maxdist = self(src.tectonic_region_type, mag) bbox = get_bounding_box(src, maxdist) return (fix_lon(bbox[0]), bbox[1], fix_lon(bbox[2]), bbox[3])
python
def get_affected_box(self, src): mag = src.get_min_max_mag()[1] maxdist = self(src.tectonic_region_type, mag) bbox = get_bounding_box(src, maxdist) return (fix_lon(bbox[0]), bbox[1], fix_lon(bbox[2]), bbox[3])
[ "def", "get_affected_box", "(", "self", ",", "src", ")", ":", "mag", "=", "src", ".", "get_min_max_mag", "(", ")", "[", "1", "]", "maxdist", "=", "self", "(", "src", ".", "tectonic_region_type", ",", "mag", ")", "bbox", "=", "get_bounding_box", "(", "src", ",", "maxdist", ")", "return", "(", "fix_lon", "(", "bbox", "[", "0", "]", ")", ",", "bbox", "[", "1", "]", ",", "fix_lon", "(", "bbox", "[", "2", "]", ")", ",", "bbox", "[", "3", "]", ")" ]
Get the enlarged bounding box of a source. :param src: a source object :returns: a bounding box (min_lon, min_lat, max_lon, max_lat)
[ "Get", "the", "enlarged", "bounding", "box", "of", "a", "source", "." ]
train
https://github.com/gem/oq-engine/blob/8294553a0b8aba33fd96437a35065d03547d0040/openquake/hazardlib/calc/filters.py#L157-L167
gem/oq-engine
openquake/hazardlib/calc/filters.py
SourceFilter.sitecol
def sitecol(self): """ Read the site collection from .filename and cache it """ if 'sitecol' in vars(self): return self.__dict__['sitecol'] if self.filename is None or not os.path.exists(self.filename): # case of nofilter/None sitecol return with hdf5.File(self.filename, 'r') as h5: self.__dict__['sitecol'] = sc = h5.get('sitecol') return sc
python
def sitecol(self): if 'sitecol' in vars(self): return self.__dict__['sitecol'] if self.filename is None or not os.path.exists(self.filename): return with hdf5.File(self.filename, 'r') as h5: self.__dict__['sitecol'] = sc = h5.get('sitecol') return sc
[ "def", "sitecol", "(", "self", ")", ":", "if", "'sitecol'", "in", "vars", "(", "self", ")", ":", "return", "self", ".", "__dict__", "[", "'sitecol'", "]", "if", "self", ".", "filename", "is", "None", "or", "not", "os", ".", "path", ".", "exists", "(", "self", ".", "filename", ")", ":", "# case of nofilter/None sitecol", "return", "with", "hdf5", ".", "File", "(", "self", ".", "filename", ",", "'r'", ")", "as", "h5", ":", "self", ".", "__dict__", "[", "'sitecol'", "]", "=", "sc", "=", "h5", ".", "get", "(", "'sitecol'", ")", "return", "sc" ]
Read the site collection from .filename and cache it
[ "Read", "the", "site", "collection", "from", ".", "filename", "and", "cache", "it" ]
train
https://github.com/gem/oq-engine/blob/8294553a0b8aba33fd96437a35065d03547d0040/openquake/hazardlib/calc/filters.py#L276-L287
gem/oq-engine
openquake/hazardlib/calc/filters.py
SourceFilter.get_rectangle
def get_rectangle(self, src): """ :param src: a source object :returns: ((min_lon, min_lat), width, height), useful for plotting """ min_lon, min_lat, max_lon, max_lat = ( self.integration_distance.get_affected_box(src)) return (min_lon, min_lat), (max_lon - min_lon) % 360, max_lat - min_lat
python
def get_rectangle(self, src): min_lon, min_lat, max_lon, max_lat = ( self.integration_distance.get_affected_box(src)) return (min_lon, min_lat), (max_lon - min_lon) % 360, max_lat - min_lat
[ "def", "get_rectangle", "(", "self", ",", "src", ")", ":", "min_lon", ",", "min_lat", ",", "max_lon", ",", "max_lat", "=", "(", "self", ".", "integration_distance", ".", "get_affected_box", "(", "src", ")", ")", "return", "(", "min_lon", ",", "min_lat", ")", ",", "(", "max_lon", "-", "min_lon", ")", "%", "360", ",", "max_lat", "-", "min_lat" ]
:param src: a source object :returns: ((min_lon, min_lat), width, height), useful for plotting
[ ":", "param", "src", ":", "a", "source", "object", ":", "returns", ":", "((", "min_lon", "min_lat", ")", "width", "height", ")", "useful", "for", "plotting" ]
train
https://github.com/gem/oq-engine/blob/8294553a0b8aba33fd96437a35065d03547d0040/openquake/hazardlib/calc/filters.py#L289-L296
gem/oq-engine
openquake/hazardlib/calc/filters.py
SourceFilter.get_bounding_boxes
def get_bounding_boxes(self, trt=None, mag=None): """ :param trt: a tectonic region type (used for the integration distance) :param mag: a magnitude (used for the integration distance) :returns: a list of bounding boxes, one per site """ bbs = [] for site in self.sitecol: bb = self.integration_distance.get_bounding_box( site.location.longitude, site.location.latitude, trt, mag) bbs.append(bb) return bbs
python
def get_bounding_boxes(self, trt=None, mag=None): bbs = [] for site in self.sitecol: bb = self.integration_distance.get_bounding_box( site.location.longitude, site.location.latitude, trt, mag) bbs.append(bb) return bbs
[ "def", "get_bounding_boxes", "(", "self", ",", "trt", "=", "None", ",", "mag", "=", "None", ")", ":", "bbs", "=", "[", "]", "for", "site", "in", "self", ".", "sitecol", ":", "bb", "=", "self", ".", "integration_distance", ".", "get_bounding_box", "(", "site", ".", "location", ".", "longitude", ",", "site", ".", "location", ".", "latitude", ",", "trt", ",", "mag", ")", "bbs", ".", "append", "(", "bb", ")", "return", "bbs" ]
:param trt: a tectonic region type (used for the integration distance) :param mag: a magnitude (used for the integration distance) :returns: a list of bounding boxes, one per site
[ ":", "param", "trt", ":", "a", "tectonic", "region", "type", "(", "used", "for", "the", "integration", "distance", ")", ":", "param", "mag", ":", "a", "magnitude", "(", "used", "for", "the", "integration", "distance", ")", ":", "returns", ":", "a", "list", "of", "bounding", "boxes", "one", "per", "site" ]
train
https://github.com/gem/oq-engine/blob/8294553a0b8aba33fd96437a35065d03547d0040/openquake/hazardlib/calc/filters.py#L319-L330
gem/oq-engine
openquake/hazardlib/calc/filters.py
SourceFilter.close_sids
def close_sids(self, rec, trt, mag): """ :param rec: a record with fields minlon, minlat, maxlon, maxlat :param trt: tectonic region type string :param mag: magnitude :returns: the site indices within the bounding box enlarged by the integration distance for the given TRT and magnitude """ if self.sitecol is None: return [] elif not self.integration_distance: # do not filter return self.sitecol.sids if hasattr(rec, 'dtype'): bbox = rec['minlon'], rec['minlat'], rec['maxlon'], rec['maxlat'] else: bbox = rec # assume it is a 4-tuple maxdist = self.integration_distance(trt, mag) a1 = min(maxdist * KM_TO_DEGREES, 90) a2 = min(angular_distance(maxdist, bbox[1], bbox[3]), 180) bb = bbox[0] - a2, bbox[1] - a1, bbox[2] + a2, bbox[3] + a1 if hasattr(self, 'index'): # RtreeFilter return within(bb, self.index) return self.sitecol.within_bbox(bb)
python
def close_sids(self, rec, trt, mag): if self.sitecol is None: return [] elif not self.integration_distance: return self.sitecol.sids if hasattr(rec, 'dtype'): bbox = rec['minlon'], rec['minlat'], rec['maxlon'], rec['maxlat'] else: bbox = rec maxdist = self.integration_distance(trt, mag) a1 = min(maxdist * KM_TO_DEGREES, 90) a2 = min(angular_distance(maxdist, bbox[1], bbox[3]), 180) bb = bbox[0] - a2, bbox[1] - a1, bbox[2] + a2, bbox[3] + a1 if hasattr(self, 'index'): return within(bb, self.index) return self.sitecol.within_bbox(bb)
[ "def", "close_sids", "(", "self", ",", "rec", ",", "trt", ",", "mag", ")", ":", "if", "self", ".", "sitecol", "is", "None", ":", "return", "[", "]", "elif", "not", "self", ".", "integration_distance", ":", "# do not filter", "return", "self", ".", "sitecol", ".", "sids", "if", "hasattr", "(", "rec", ",", "'dtype'", ")", ":", "bbox", "=", "rec", "[", "'minlon'", "]", ",", "rec", "[", "'minlat'", "]", ",", "rec", "[", "'maxlon'", "]", ",", "rec", "[", "'maxlat'", "]", "else", ":", "bbox", "=", "rec", "# assume it is a 4-tuple", "maxdist", "=", "self", ".", "integration_distance", "(", "trt", ",", "mag", ")", "a1", "=", "min", "(", "maxdist", "*", "KM_TO_DEGREES", ",", "90", ")", "a2", "=", "min", "(", "angular_distance", "(", "maxdist", ",", "bbox", "[", "1", "]", ",", "bbox", "[", "3", "]", ")", ",", "180", ")", "bb", "=", "bbox", "[", "0", "]", "-", "a2", ",", "bbox", "[", "1", "]", "-", "a1", ",", "bbox", "[", "2", "]", "+", "a2", ",", "bbox", "[", "3", "]", "+", "a1", "if", "hasattr", "(", "self", ",", "'index'", ")", ":", "# RtreeFilter", "return", "within", "(", "bb", ",", "self", ".", "index", ")", "return", "self", ".", "sitecol", ".", "within_bbox", "(", "bb", ")" ]
:param rec: a record with fields minlon, minlat, maxlon, maxlat :param trt: tectonic region type string :param mag: magnitude :returns: the site indices within the bounding box enlarged by the integration distance for the given TRT and magnitude
[ ":", "param", "rec", ":", "a", "record", "with", "fields", "minlon", "minlat", "maxlon", "maxlat", ":", "param", "trt", ":", "tectonic", "region", "type", "string", ":", "param", "mag", ":", "magnitude", ":", "returns", ":", "the", "site", "indices", "within", "the", "bounding", "box", "enlarged", "by", "the", "integration", "distance", "for", "the", "given", "TRT", "and", "magnitude" ]
train
https://github.com/gem/oq-engine/blob/8294553a0b8aba33fd96437a35065d03547d0040/openquake/hazardlib/calc/filters.py#L332-L358
gem/oq-engine
openquake/hazardlib/calc/filters.py
SourceFilter.filter
def filter(self, sources): """ :param sources: a sequence of sources :yields: sources with .indices """ for src in sources: if hasattr(src, 'indices'): # already filtered yield src continue box = self.integration_distance.get_affected_box(src) indices = self.sitecol.within_bbox(box) if len(indices): src.indices = indices yield src
python
def filter(self, sources): for src in sources: if hasattr(src, 'indices'): yield src continue box = self.integration_distance.get_affected_box(src) indices = self.sitecol.within_bbox(box) if len(indices): src.indices = indices yield src
[ "def", "filter", "(", "self", ",", "sources", ")", ":", "for", "src", "in", "sources", ":", "if", "hasattr", "(", "src", ",", "'indices'", ")", ":", "# already filtered", "yield", "src", "continue", "box", "=", "self", ".", "integration_distance", ".", "get_affected_box", "(", "src", ")", "indices", "=", "self", ".", "sitecol", ".", "within_bbox", "(", "box", ")", "if", "len", "(", "indices", ")", ":", "src", ".", "indices", "=", "indices", "yield", "src" ]
:param sources: a sequence of sources :yields: sources with .indices
[ ":", "param", "sources", ":", "a", "sequence", "of", "sources", ":", "yields", ":", "sources", "with", ".", "indices" ]
train
https://github.com/gem/oq-engine/blob/8294553a0b8aba33fd96437a35065d03547d0040/openquake/hazardlib/calc/filters.py#L360-L373
gem/oq-engine
openquake/hazardlib/calc/filters.py
RtreeFilter.filter
def filter(self, sources): """ :param sources: a sequence of sources :yields: rtree-filtered sources """ if self.sitecol is None: # do not filter yield from sources return for src in sources: box = self.integration_distance.get_affected_box(src) indices = within(box, self.index) if len(indices): src.indices = indices yield src
python
def filter(self, sources): if self.sitecol is None: yield from sources return for src in sources: box = self.integration_distance.get_affected_box(src) indices = within(box, self.index) if len(indices): src.indices = indices yield src
[ "def", "filter", "(", "self", ",", "sources", ")", ":", "if", "self", ".", "sitecol", "is", "None", ":", "# do not filter", "yield", "from", "sources", "return", "for", "src", "in", "sources", ":", "box", "=", "self", ".", "integration_distance", ".", "get_affected_box", "(", "src", ")", "indices", "=", "within", "(", "box", ",", "self", ".", "index", ")", "if", "len", "(", "indices", ")", ":", "src", ".", "indices", "=", "indices", "yield", "src" ]
:param sources: a sequence of sources :yields: rtree-filtered sources
[ ":", "param", "sources", ":", "a", "sequence", "of", "sources", ":", "yields", ":", "rtree", "-", "filtered", "sources" ]
train
https://github.com/gem/oq-engine/blob/8294553a0b8aba33fd96437a35065d03547d0040/openquake/hazardlib/calc/filters.py#L419-L432
gem/oq-engine
openquake/commands/to_shapefile.py
to_shapefile
def to_shapefile(output, input_nrml_file, validate): """ Convert a NRML source model file to ESRI Shapefile(s). For each type of source geometry defined in the NRML file (point, area, simple fault, complex fault, planar) a separate shapefile is created. Each shapefile is differentiated by a specific ending('_point', '_area', '_simple', '_complex', '_planar'). NB: nonparametric sources are not supported. """ input_parser = shapefileparser.SourceModelParser() source_model = input_parser.read(input_nrml_file, validate) if not output: output = os.path.splitext(input_nrml_file)[0] print('Extracting %s_ files' % output) shapefileparser.ShapefileParser().write(output, source_model)
python
def to_shapefile(output, input_nrml_file, validate): input_parser = shapefileparser.SourceModelParser() source_model = input_parser.read(input_nrml_file, validate) if not output: output = os.path.splitext(input_nrml_file)[0] print('Extracting %s_ files' % output) shapefileparser.ShapefileParser().write(output, source_model)
[ "def", "to_shapefile", "(", "output", ",", "input_nrml_file", ",", "validate", ")", ":", "input_parser", "=", "shapefileparser", ".", "SourceModelParser", "(", ")", "source_model", "=", "input_parser", ".", "read", "(", "input_nrml_file", ",", "validate", ")", "if", "not", "output", ":", "output", "=", "os", ".", "path", ".", "splitext", "(", "input_nrml_file", ")", "[", "0", "]", "print", "(", "'Extracting %s_ files'", "%", "output", ")", "shapefileparser", ".", "ShapefileParser", "(", ")", ".", "write", "(", "output", ",", "source_model", ")" ]
Convert a NRML source model file to ESRI Shapefile(s). For each type of source geometry defined in the NRML file (point, area, simple fault, complex fault, planar) a separate shapefile is created. Each shapefile is differentiated by a specific ending('_point', '_area', '_simple', '_complex', '_planar'). NB: nonparametric sources are not supported.
[ "Convert", "a", "NRML", "source", "model", "file", "to", "ESRI", "Shapefile", "(", "s", ")", "." ]
train
https://github.com/gem/oq-engine/blob/8294553a0b8aba33fd96437a35065d03547d0040/openquake/commands/to_shapefile.py#L24-L40
gem/oq-engine
openquake/hazardlib/geo/surface/simple_fault.py
simple_fault_node
def simple_fault_node(fault_trace, dip, upper_depth, lower_depth): """ :param fault_trace: an object with an attribute .points :param dip: dip parameter :param upper_depth: upper seismogenic depth :param lower_depth: lower seismogenic depth :returns: a Node of kind simpleFaultGeometry """ node = Node('simpleFaultGeometry') line = [] for p in fault_trace.points: line.append(p.longitude) line.append(p.latitude) node.append(Node('gml:LineString', nodes=[Node('gml:posList', {}, line)])) node.append(Node('dip', {}, dip)) node.append(Node('upperSeismoDepth', {}, upper_depth)) node.append(Node('lowerSeismoDepth', {}, lower_depth)) return node
python
def simple_fault_node(fault_trace, dip, upper_depth, lower_depth): node = Node('simpleFaultGeometry') line = [] for p in fault_trace.points: line.append(p.longitude) line.append(p.latitude) node.append(Node('gml:LineString', nodes=[Node('gml:posList', {}, line)])) node.append(Node('dip', {}, dip)) node.append(Node('upperSeismoDepth', {}, upper_depth)) node.append(Node('lowerSeismoDepth', {}, lower_depth)) return node
[ "def", "simple_fault_node", "(", "fault_trace", ",", "dip", ",", "upper_depth", ",", "lower_depth", ")", ":", "node", "=", "Node", "(", "'simpleFaultGeometry'", ")", "line", "=", "[", "]", "for", "p", "in", "fault_trace", ".", "points", ":", "line", ".", "append", "(", "p", ".", "longitude", ")", "line", ".", "append", "(", "p", ".", "latitude", ")", "node", ".", "append", "(", "Node", "(", "'gml:LineString'", ",", "nodes", "=", "[", "Node", "(", "'gml:posList'", ",", "{", "}", ",", "line", ")", "]", ")", ")", "node", ".", "append", "(", "Node", "(", "'dip'", ",", "{", "}", ",", "dip", ")", ")", "node", ".", "append", "(", "Node", "(", "'upperSeismoDepth'", ",", "{", "}", ",", "upper_depth", ")", ")", "node", ".", "append", "(", "Node", "(", "'lowerSeismoDepth'", ",", "{", "}", ",", "lower_depth", ")", ")", "return", "node" ]
:param fault_trace: an object with an attribute .points :param dip: dip parameter :param upper_depth: upper seismogenic depth :param lower_depth: lower seismogenic depth :returns: a Node of kind simpleFaultGeometry
[ ":", "param", "fault_trace", ":", "an", "object", "with", "an", "attribute", ".", "points", ":", "param", "dip", ":", "dip", "parameter", ":", "param", "upper_depth", ":", "upper", "seismogenic", "depth", ":", "param", "lower_depth", ":", "lower", "seismogenic", "depth", ":", "returns", ":", "a", "Node", "of", "kind", "simpleFaultGeometry" ]
train
https://github.com/gem/oq-engine/blob/8294553a0b8aba33fd96437a35065d03547d0040/openquake/hazardlib/geo/surface/simple_fault.py#L35-L52
gem/oq-engine
openquake/hazardlib/geo/surface/simple_fault.py
SimpleFaultSurface.check_fault_data
def check_fault_data(cls, fault_trace, upper_seismogenic_depth, lower_seismogenic_depth, dip, mesh_spacing): """ Verify the fault data and raise ``ValueError`` if anything is wrong. This method doesn't have to be called by hands before creating the surface object, because it is called from :meth:`from_fault_data`. """ if not len(fault_trace) >= 2: raise ValueError("the fault trace must have at least two points") if not fault_trace.horizontal(): raise ValueError("the fault trace must be horizontal") tlats = [point.latitude for point in fault_trace.points] tlons = [point.longitude for point in fault_trace.points] if geo_utils.line_intersects_itself(tlons, tlats): raise ValueError("fault trace intersects itself") if not 0.0 < dip <= 90.0: raise ValueError("dip must be between 0.0 and 90.0") if not lower_seismogenic_depth > upper_seismogenic_depth: raise ValueError("lower seismogenic depth must be greater than " "upper seismogenic depth") if not upper_seismogenic_depth >= fault_trace[0].depth: raise ValueError("upper seismogenic depth must be greater than " "or equal to depth of fault trace") if not mesh_spacing > 0.0: raise ValueError("mesh spacing must be positive")
python
def check_fault_data(cls, fault_trace, upper_seismogenic_depth, lower_seismogenic_depth, dip, mesh_spacing): if not len(fault_trace) >= 2: raise ValueError("the fault trace must have at least two points") if not fault_trace.horizontal(): raise ValueError("the fault trace must be horizontal") tlats = [point.latitude for point in fault_trace.points] tlons = [point.longitude for point in fault_trace.points] if geo_utils.line_intersects_itself(tlons, tlats): raise ValueError("fault trace intersects itself") if not 0.0 < dip <= 90.0: raise ValueError("dip must be between 0.0 and 90.0") if not lower_seismogenic_depth > upper_seismogenic_depth: raise ValueError("lower seismogenic depth must be greater than " "upper seismogenic depth") if not upper_seismogenic_depth >= fault_trace[0].depth: raise ValueError("upper seismogenic depth must be greater than " "or equal to depth of fault trace") if not mesh_spacing > 0.0: raise ValueError("mesh spacing must be positive")
[ "def", "check_fault_data", "(", "cls", ",", "fault_trace", ",", "upper_seismogenic_depth", ",", "lower_seismogenic_depth", ",", "dip", ",", "mesh_spacing", ")", ":", "if", "not", "len", "(", "fault_trace", ")", ">=", "2", ":", "raise", "ValueError", "(", "\"the fault trace must have at least two points\"", ")", "if", "not", "fault_trace", ".", "horizontal", "(", ")", ":", "raise", "ValueError", "(", "\"the fault trace must be horizontal\"", ")", "tlats", "=", "[", "point", ".", "latitude", "for", "point", "in", "fault_trace", ".", "points", "]", "tlons", "=", "[", "point", ".", "longitude", "for", "point", "in", "fault_trace", ".", "points", "]", "if", "geo_utils", ".", "line_intersects_itself", "(", "tlons", ",", "tlats", ")", ":", "raise", "ValueError", "(", "\"fault trace intersects itself\"", ")", "if", "not", "0.0", "<", "dip", "<=", "90.0", ":", "raise", "ValueError", "(", "\"dip must be between 0.0 and 90.0\"", ")", "if", "not", "lower_seismogenic_depth", ">", "upper_seismogenic_depth", ":", "raise", "ValueError", "(", "\"lower seismogenic depth must be greater than \"", "\"upper seismogenic depth\"", ")", "if", "not", "upper_seismogenic_depth", ">=", "fault_trace", "[", "0", "]", ".", "depth", ":", "raise", "ValueError", "(", "\"upper seismogenic depth must be greater than \"", "\"or equal to depth of fault trace\"", ")", "if", "not", "mesh_spacing", ">", "0.0", ":", "raise", "ValueError", "(", "\"mesh spacing must be positive\"", ")" ]
Verify the fault data and raise ``ValueError`` if anything is wrong. This method doesn't have to be called by hands before creating the surface object, because it is called from :meth:`from_fault_data`.
[ "Verify", "the", "fault", "data", "and", "raise", "ValueError", "if", "anything", "is", "wrong", "." ]
train
https://github.com/gem/oq-engine/blob/8294553a0b8aba33fd96437a35065d03547d0040/openquake/hazardlib/geo/surface/simple_fault.py#L107-L132
gem/oq-engine
openquake/hazardlib/geo/surface/simple_fault.py
SimpleFaultSurface.from_fault_data
def from_fault_data(cls, fault_trace, upper_seismogenic_depth, lower_seismogenic_depth, dip, mesh_spacing): """ Create and return a fault surface using fault source data. :param openquake.hazardlib.geo.line.Line fault_trace: Geographical line representing the intersection between the fault surface and the earth surface. The line must be horizontal (i.e. all depth values must be equal). If the depths are not given, they are assumed to be zero, meaning the trace intersects the surface at sea level, e.g. fault_trace = Line([Point(1, 1), Point(1, 2)]). :param upper_seismo_depth: Minimum depth ruptures can reach, in km (i.e. depth to fault's top edge). :param lower_seismo_depth: Maximum depth ruptures can reach, in km (i.e. depth to fault's bottom edge). :param dip: Dip angle (i.e. angle between fault surface and earth surface), in degrees. :param mesh_spacing: Distance between two subsequent points in a mesh, in km. :returns: An instance of :class:`SimpleFaultSurface` created using that data. Uses :meth:`check_fault_data` for checking parameters. """ cls.check_fault_data(fault_trace, upper_seismogenic_depth, lower_seismogenic_depth, dip, mesh_spacing) # Loops over points in the top edge, for each point # on the top edge compute corresponding point on the bottom edge, then # computes equally spaced points between top and bottom points. vdist_top = upper_seismogenic_depth - fault_trace[0].depth vdist_bottom = lower_seismogenic_depth - fault_trace[0].depth hdist_top = vdist_top / math.tan(math.radians(dip)) hdist_bottom = vdist_bottom / math.tan(math.radians(dip)) strike = fault_trace[0].azimuth(fault_trace[-1]) azimuth = (strike + 90.0) % 360 mesh = [] for point in fault_trace.resample(mesh_spacing): top = point.point_at(hdist_top, vdist_top, azimuth) bottom = point.point_at(hdist_bottom, vdist_bottom, azimuth) mesh.append(top.equally_spaced_points(bottom, mesh_spacing)) # number of rows corresponds to number of points along dip # number of columns corresponds to number of points along strike surface_points = numpy.array(mesh).transpose().tolist() mesh = RectangularMesh.from_points_list(surface_points) assert 1 not in mesh.shape, ( "Mesh must have at least 2 nodes along both length and width." " Possible cause: Mesh spacing could be too large with respect to" " the fault length and width." ) self = cls(mesh) self.surface_nodes = [simple_fault_node( fault_trace, dip, upper_seismogenic_depth, lower_seismogenic_depth)] return self
python
def from_fault_data(cls, fault_trace, upper_seismogenic_depth, lower_seismogenic_depth, dip, mesh_spacing): cls.check_fault_data(fault_trace, upper_seismogenic_depth, lower_seismogenic_depth, dip, mesh_spacing) vdist_top = upper_seismogenic_depth - fault_trace[0].depth vdist_bottom = lower_seismogenic_depth - fault_trace[0].depth hdist_top = vdist_top / math.tan(math.radians(dip)) hdist_bottom = vdist_bottom / math.tan(math.radians(dip)) strike = fault_trace[0].azimuth(fault_trace[-1]) azimuth = (strike + 90.0) % 360 mesh = [] for point in fault_trace.resample(mesh_spacing): top = point.point_at(hdist_top, vdist_top, azimuth) bottom = point.point_at(hdist_bottom, vdist_bottom, azimuth) mesh.append(top.equally_spaced_points(bottom, mesh_spacing)) surface_points = numpy.array(mesh).transpose().tolist() mesh = RectangularMesh.from_points_list(surface_points) assert 1 not in mesh.shape, ( "Mesh must have at least 2 nodes along both length and width." " Possible cause: Mesh spacing could be too large with respect to" " the fault length and width." ) self = cls(mesh) self.surface_nodes = [simple_fault_node( fault_trace, dip, upper_seismogenic_depth, lower_seismogenic_depth)] return self
[ "def", "from_fault_data", "(", "cls", ",", "fault_trace", ",", "upper_seismogenic_depth", ",", "lower_seismogenic_depth", ",", "dip", ",", "mesh_spacing", ")", ":", "cls", ".", "check_fault_data", "(", "fault_trace", ",", "upper_seismogenic_depth", ",", "lower_seismogenic_depth", ",", "dip", ",", "mesh_spacing", ")", "# Loops over points in the top edge, for each point", "# on the top edge compute corresponding point on the bottom edge, then", "# computes equally spaced points between top and bottom points.", "vdist_top", "=", "upper_seismogenic_depth", "-", "fault_trace", "[", "0", "]", ".", "depth", "vdist_bottom", "=", "lower_seismogenic_depth", "-", "fault_trace", "[", "0", "]", ".", "depth", "hdist_top", "=", "vdist_top", "/", "math", ".", "tan", "(", "math", ".", "radians", "(", "dip", ")", ")", "hdist_bottom", "=", "vdist_bottom", "/", "math", ".", "tan", "(", "math", ".", "radians", "(", "dip", ")", ")", "strike", "=", "fault_trace", "[", "0", "]", ".", "azimuth", "(", "fault_trace", "[", "-", "1", "]", ")", "azimuth", "=", "(", "strike", "+", "90.0", ")", "%", "360", "mesh", "=", "[", "]", "for", "point", "in", "fault_trace", ".", "resample", "(", "mesh_spacing", ")", ":", "top", "=", "point", ".", "point_at", "(", "hdist_top", ",", "vdist_top", ",", "azimuth", ")", "bottom", "=", "point", ".", "point_at", "(", "hdist_bottom", ",", "vdist_bottom", ",", "azimuth", ")", "mesh", ".", "append", "(", "top", ".", "equally_spaced_points", "(", "bottom", ",", "mesh_spacing", ")", ")", "# number of rows corresponds to number of points along dip", "# number of columns corresponds to number of points along strike", "surface_points", "=", "numpy", ".", "array", "(", "mesh", ")", ".", "transpose", "(", ")", ".", "tolist", "(", ")", "mesh", "=", "RectangularMesh", ".", "from_points_list", "(", "surface_points", ")", "assert", "1", "not", "in", "mesh", ".", "shape", ",", "(", "\"Mesh must have at least 2 nodes along both length and width.\"", "\" Possible cause: Mesh spacing could be too large with respect to\"", "\" the fault length and width.\"", ")", "self", "=", "cls", "(", "mesh", ")", "self", ".", "surface_nodes", "=", "[", "simple_fault_node", "(", "fault_trace", ",", "dip", ",", "upper_seismogenic_depth", ",", "lower_seismogenic_depth", ")", "]", "return", "self" ]
Create and return a fault surface using fault source data. :param openquake.hazardlib.geo.line.Line fault_trace: Geographical line representing the intersection between the fault surface and the earth surface. The line must be horizontal (i.e. all depth values must be equal). If the depths are not given, they are assumed to be zero, meaning the trace intersects the surface at sea level, e.g. fault_trace = Line([Point(1, 1), Point(1, 2)]). :param upper_seismo_depth: Minimum depth ruptures can reach, in km (i.e. depth to fault's top edge). :param lower_seismo_depth: Maximum depth ruptures can reach, in km (i.e. depth to fault's bottom edge). :param dip: Dip angle (i.e. angle between fault surface and earth surface), in degrees. :param mesh_spacing: Distance between two subsequent points in a mesh, in km. :returns: An instance of :class:`SimpleFaultSurface` created using that data. Uses :meth:`check_fault_data` for checking parameters.
[ "Create", "and", "return", "a", "fault", "surface", "using", "fault", "source", "data", "." ]
train
https://github.com/gem/oq-engine/blob/8294553a0b8aba33fd96437a35065d03547d0040/openquake/hazardlib/geo/surface/simple_fault.py#L135-L196
gem/oq-engine
openquake/hazardlib/geo/surface/simple_fault.py
SimpleFaultSurface.get_fault_patch_vertices
def get_fault_patch_vertices(cls, rupture_top_edge, upper_seismogenic_depth, lower_seismogenic_depth, dip, index_patch=1): """ Get surface main vertices. Parameters are the same as for :meth:`from_fault_data`, excluding fault_trace, and mesh spacing. :param rupture_top_edge: A instances of :class:`openquake.hazardlib.geo.line.Line` representing the rupture surface's top edge. :param index_patch: Indicate the patch of the fault in order to output the vertices. The fault patch numbering follows the same logic of the right-hand rule i.e. patch with index 1 is the first patch along the trace. :returns: Four :class:~openquake.hazardlib.geo.point.Point objects representing the four vertices of the target patch. """ # Similar to :meth:`from_fault_data`, we just don't resample edges dip_tan = math.tan(math.radians(dip)) hdist_bottom = ( lower_seismogenic_depth - upper_seismogenic_depth) / dip_tan strike = rupture_top_edge[0].azimuth(rupture_top_edge[-1]) azimuth = (strike + 90.0) % 360 # Collect coordinates of vertices on the top and bottom edge lons = [] lats = [] deps = [] t_lon = [] t_lat = [] t_dep = [] for point in rupture_top_edge.points: top_edge_point = point bottom_edge_point = point.point_at(hdist_bottom, 0, azimuth) lons.append(top_edge_point.longitude) lats.append(top_edge_point.latitude) deps.append(upper_seismogenic_depth) t_lon.append(bottom_edge_point.longitude) t_lat.append(bottom_edge_point.latitude) t_dep.append(lower_seismogenic_depth) all_lons = numpy.array(lons + list(reversed(t_lon)), float) all_lats = numpy.array(lats + list(reversed(t_lat)), float) all_deps = numpy.array(deps + list(reversed(t_dep)), float) index1 = int(index_patch - 1) index2 = int(index_patch) index3 = int(2 * len(rupture_top_edge) - (index_patch + 1)) index4 = int(2 * len(rupture_top_edge) - index_patch) p0 = Point(all_lons[index1], all_lats[index1], all_deps[index1]) p1 = Point(all_lons[index2], all_lats[index2], all_deps[index2]) p2 = Point(all_lons[index3], all_lats[index3], all_deps[index3]) p3 = Point(all_lons[index4], all_lats[index4], all_deps[index4]) return p0, p1, p2, p3
python
def get_fault_patch_vertices(cls, rupture_top_edge, upper_seismogenic_depth, lower_seismogenic_depth, dip, index_patch=1): dip_tan = math.tan(math.radians(dip)) hdist_bottom = ( lower_seismogenic_depth - upper_seismogenic_depth) / dip_tan strike = rupture_top_edge[0].azimuth(rupture_top_edge[-1]) azimuth = (strike + 90.0) % 360 lons = [] lats = [] deps = [] t_lon = [] t_lat = [] t_dep = [] for point in rupture_top_edge.points: top_edge_point = point bottom_edge_point = point.point_at(hdist_bottom, 0, azimuth) lons.append(top_edge_point.longitude) lats.append(top_edge_point.latitude) deps.append(upper_seismogenic_depth) t_lon.append(bottom_edge_point.longitude) t_lat.append(bottom_edge_point.latitude) t_dep.append(lower_seismogenic_depth) all_lons = numpy.array(lons + list(reversed(t_lon)), float) all_lats = numpy.array(lats + list(reversed(t_lat)), float) all_deps = numpy.array(deps + list(reversed(t_dep)), float) index1 = int(index_patch - 1) index2 = int(index_patch) index3 = int(2 * len(rupture_top_edge) - (index_patch + 1)) index4 = int(2 * len(rupture_top_edge) - index_patch) p0 = Point(all_lons[index1], all_lats[index1], all_deps[index1]) p1 = Point(all_lons[index2], all_lats[index2], all_deps[index2]) p2 = Point(all_lons[index3], all_lats[index3], all_deps[index3]) p3 = Point(all_lons[index4], all_lats[index4], all_deps[index4]) return p0, p1, p2, p3
[ "def", "get_fault_patch_vertices", "(", "cls", ",", "rupture_top_edge", ",", "upper_seismogenic_depth", ",", "lower_seismogenic_depth", ",", "dip", ",", "index_patch", "=", "1", ")", ":", "# Similar to :meth:`from_fault_data`, we just don't resample edges", "dip_tan", "=", "math", ".", "tan", "(", "math", ".", "radians", "(", "dip", ")", ")", "hdist_bottom", "=", "(", "lower_seismogenic_depth", "-", "upper_seismogenic_depth", ")", "/", "dip_tan", "strike", "=", "rupture_top_edge", "[", "0", "]", ".", "azimuth", "(", "rupture_top_edge", "[", "-", "1", "]", ")", "azimuth", "=", "(", "strike", "+", "90.0", ")", "%", "360", "# Collect coordinates of vertices on the top and bottom edge", "lons", "=", "[", "]", "lats", "=", "[", "]", "deps", "=", "[", "]", "t_lon", "=", "[", "]", "t_lat", "=", "[", "]", "t_dep", "=", "[", "]", "for", "point", "in", "rupture_top_edge", ".", "points", ":", "top_edge_point", "=", "point", "bottom_edge_point", "=", "point", ".", "point_at", "(", "hdist_bottom", ",", "0", ",", "azimuth", ")", "lons", ".", "append", "(", "top_edge_point", ".", "longitude", ")", "lats", ".", "append", "(", "top_edge_point", ".", "latitude", ")", "deps", ".", "append", "(", "upper_seismogenic_depth", ")", "t_lon", ".", "append", "(", "bottom_edge_point", ".", "longitude", ")", "t_lat", ".", "append", "(", "bottom_edge_point", ".", "latitude", ")", "t_dep", ".", "append", "(", "lower_seismogenic_depth", ")", "all_lons", "=", "numpy", ".", "array", "(", "lons", "+", "list", "(", "reversed", "(", "t_lon", ")", ")", ",", "float", ")", "all_lats", "=", "numpy", ".", "array", "(", "lats", "+", "list", "(", "reversed", "(", "t_lat", ")", ")", ",", "float", ")", "all_deps", "=", "numpy", ".", "array", "(", "deps", "+", "list", "(", "reversed", "(", "t_dep", ")", ")", ",", "float", ")", "index1", "=", "int", "(", "index_patch", "-", "1", ")", "index2", "=", "int", "(", "index_patch", ")", "index3", "=", "int", "(", "2", "*", "len", "(", "rupture_top_edge", ")", "-", "(", "index_patch", "+", "1", ")", ")", "index4", "=", "int", "(", "2", "*", "len", "(", "rupture_top_edge", ")", "-", "index_patch", ")", "p0", "=", "Point", "(", "all_lons", "[", "index1", "]", ",", "all_lats", "[", "index1", "]", ",", "all_deps", "[", "index1", "]", ")", "p1", "=", "Point", "(", "all_lons", "[", "index2", "]", ",", "all_lats", "[", "index2", "]", ",", "all_deps", "[", "index2", "]", ")", "p2", "=", "Point", "(", "all_lons", "[", "index3", "]", ",", "all_lats", "[", "index3", "]", ",", "all_deps", "[", "index3", "]", ")", "p3", "=", "Point", "(", "all_lons", "[", "index4", "]", ",", "all_lats", "[", "index4", "]", ",", "all_deps", "[", "index4", "]", ")", "return", "p0", ",", "p1", ",", "p2", ",", "p3" ]
Get surface main vertices. Parameters are the same as for :meth:`from_fault_data`, excluding fault_trace, and mesh spacing. :param rupture_top_edge: A instances of :class:`openquake.hazardlib.geo.line.Line` representing the rupture surface's top edge. :param index_patch: Indicate the patch of the fault in order to output the vertices. The fault patch numbering follows the same logic of the right-hand rule i.e. patch with index 1 is the first patch along the trace. :returns: Four :class:~openquake.hazardlib.geo.point.Point objects representing the four vertices of the target patch.
[ "Get", "surface", "main", "vertices", ".", "Parameters", "are", "the", "same", "as", "for", ":", "meth", ":", "from_fault_data", "excluding", "fault_trace", "and", "mesh", "spacing", "." ]
train
https://github.com/gem/oq-engine/blob/8294553a0b8aba33fd96437a35065d03547d0040/openquake/hazardlib/geo/surface/simple_fault.py#L199-L253
gem/oq-engine
openquake/hazardlib/geo/surface/simple_fault.py
SimpleFaultSurface.hypocentre_patch_index
def hypocentre_patch_index(cls, hypocentre, rupture_top_edge, upper_seismogenic_depth, lower_seismogenic_depth, dip): """ This methods finds the index of the fault patch including the hypocentre. :param hypocentre: :class:`~openquake.hazardlib.geo.point.Point` object representing the location of hypocentre. :param rupture_top_edge: A instances of :class:`openquake.hazardlib.geo.line.Line` representing the rupture surface's top edge. :param upper_seismo_depth: Minimum depth ruptures can reach, in km (i.e. depth to fault's top edge). :param lower_seismo_depth: Maximum depth ruptures can reach, in km (i.e. depth to fault's bottom edge). :param dip: Dip angle (i.e. angle between fault surface and earth surface), in degrees. :return: An integer corresponding to the index of the fault patch which contains the hypocentre. """ totaln_patch = len(rupture_top_edge) indexlist = [] dist_list = [] for i, index in enumerate(range(1, totaln_patch)): p0, p1, p2, p3 = cls.get_fault_patch_vertices( rupture_top_edge, upper_seismogenic_depth, lower_seismogenic_depth, dip, index_patch=index) [normal, dist_to_plane] = get_plane_equation(p0, p1, p2, hypocentre) indexlist.append(index) dist_list.append(dist_to_plane) if numpy.allclose(dist_to_plane, 0., atol=25., rtol=0.): return index break index = indexlist[numpy.argmin(dist_list)] return index
python
def hypocentre_patch_index(cls, hypocentre, rupture_top_edge, upper_seismogenic_depth, lower_seismogenic_depth, dip): totaln_patch = len(rupture_top_edge) indexlist = [] dist_list = [] for i, index in enumerate(range(1, totaln_patch)): p0, p1, p2, p3 = cls.get_fault_patch_vertices( rupture_top_edge, upper_seismogenic_depth, lower_seismogenic_depth, dip, index_patch=index) [normal, dist_to_plane] = get_plane_equation(p0, p1, p2, hypocentre) indexlist.append(index) dist_list.append(dist_to_plane) if numpy.allclose(dist_to_plane, 0., atol=25., rtol=0.): return index break index = indexlist[numpy.argmin(dist_list)] return index
[ "def", "hypocentre_patch_index", "(", "cls", ",", "hypocentre", ",", "rupture_top_edge", ",", "upper_seismogenic_depth", ",", "lower_seismogenic_depth", ",", "dip", ")", ":", "totaln_patch", "=", "len", "(", "rupture_top_edge", ")", "indexlist", "=", "[", "]", "dist_list", "=", "[", "]", "for", "i", ",", "index", "in", "enumerate", "(", "range", "(", "1", ",", "totaln_patch", ")", ")", ":", "p0", ",", "p1", ",", "p2", ",", "p3", "=", "cls", ".", "get_fault_patch_vertices", "(", "rupture_top_edge", ",", "upper_seismogenic_depth", ",", "lower_seismogenic_depth", ",", "dip", ",", "index_patch", "=", "index", ")", "[", "normal", ",", "dist_to_plane", "]", "=", "get_plane_equation", "(", "p0", ",", "p1", ",", "p2", ",", "hypocentre", ")", "indexlist", ".", "append", "(", "index", ")", "dist_list", ".", "append", "(", "dist_to_plane", ")", "if", "numpy", ".", "allclose", "(", "dist_to_plane", ",", "0.", ",", "atol", "=", "25.", ",", "rtol", "=", "0.", ")", ":", "return", "index", "break", "index", "=", "indexlist", "[", "numpy", ".", "argmin", "(", "dist_list", ")", "]", "return", "index" ]
This methods finds the index of the fault patch including the hypocentre. :param hypocentre: :class:`~openquake.hazardlib.geo.point.Point` object representing the location of hypocentre. :param rupture_top_edge: A instances of :class:`openquake.hazardlib.geo.line.Line` representing the rupture surface's top edge. :param upper_seismo_depth: Minimum depth ruptures can reach, in km (i.e. depth to fault's top edge). :param lower_seismo_depth: Maximum depth ruptures can reach, in km (i.e. depth to fault's bottom edge). :param dip: Dip angle (i.e. angle between fault surface and earth surface), in degrees. :return: An integer corresponding to the index of the fault patch which contains the hypocentre.
[ "This", "methods", "finds", "the", "index", "of", "the", "fault", "patch", "including", "the", "hypocentre", "." ]
train
https://github.com/gem/oq-engine/blob/8294553a0b8aba33fd96437a35065d03547d0040/openquake/hazardlib/geo/surface/simple_fault.py#L256-L298
gem/oq-engine
openquake/hazardlib/geo/surface/simple_fault.py
SimpleFaultSurface.get_surface_vertexes
def get_surface_vertexes(cls, fault_trace, upper_seismogenic_depth, lower_seismogenic_depth, dip): """ Get surface main vertexes. Parameters are the same as for :meth:`from_fault_data`, excluding mesh spacing. :returns: Instance of :class:`~openquake.hazardlib.geo.polygon.Polygon` describing the surface projection of the simple fault with specified parameters. """ # Similar to :meth:`from_fault_data`, we just don't resample edges dip_tan = math.tan(math.radians(dip)) hdist_top = upper_seismogenic_depth / dip_tan hdist_bottom = lower_seismogenic_depth / dip_tan strike = fault_trace[0].azimuth(fault_trace[-1]) azimuth = (strike + 90.0) % 360 # Collect coordinates of vertices on the top and bottom edge lons = [] lats = [] for point in fault_trace.points: top_edge_point = point.point_at(hdist_top, 0, azimuth) bottom_edge_point = point.point_at(hdist_bottom, 0, azimuth) lons.append(top_edge_point.longitude) lats.append(top_edge_point.latitude) lons.append(bottom_edge_point.longitude) lats.append(bottom_edge_point.latitude) lons = numpy.array(lons, float) lats = numpy.array(lats, float) return lons, lats
python
def get_surface_vertexes(cls, fault_trace, upper_seismogenic_depth, lower_seismogenic_depth, dip): dip_tan = math.tan(math.radians(dip)) hdist_top = upper_seismogenic_depth / dip_tan hdist_bottom = lower_seismogenic_depth / dip_tan strike = fault_trace[0].azimuth(fault_trace[-1]) azimuth = (strike + 90.0) % 360 lons = [] lats = [] for point in fault_trace.points: top_edge_point = point.point_at(hdist_top, 0, azimuth) bottom_edge_point = point.point_at(hdist_bottom, 0, azimuth) lons.append(top_edge_point.longitude) lats.append(top_edge_point.latitude) lons.append(bottom_edge_point.longitude) lats.append(bottom_edge_point.latitude) lons = numpy.array(lons, float) lats = numpy.array(lats, float) return lons, lats
[ "def", "get_surface_vertexes", "(", "cls", ",", "fault_trace", ",", "upper_seismogenic_depth", ",", "lower_seismogenic_depth", ",", "dip", ")", ":", "# Similar to :meth:`from_fault_data`, we just don't resample edges", "dip_tan", "=", "math", ".", "tan", "(", "math", ".", "radians", "(", "dip", ")", ")", "hdist_top", "=", "upper_seismogenic_depth", "/", "dip_tan", "hdist_bottom", "=", "lower_seismogenic_depth", "/", "dip_tan", "strike", "=", "fault_trace", "[", "0", "]", ".", "azimuth", "(", "fault_trace", "[", "-", "1", "]", ")", "azimuth", "=", "(", "strike", "+", "90.0", ")", "%", "360", "# Collect coordinates of vertices on the top and bottom edge", "lons", "=", "[", "]", "lats", "=", "[", "]", "for", "point", "in", "fault_trace", ".", "points", ":", "top_edge_point", "=", "point", ".", "point_at", "(", "hdist_top", ",", "0", ",", "azimuth", ")", "bottom_edge_point", "=", "point", ".", "point_at", "(", "hdist_bottom", ",", "0", ",", "azimuth", ")", "lons", ".", "append", "(", "top_edge_point", ".", "longitude", ")", "lats", ".", "append", "(", "top_edge_point", ".", "latitude", ")", "lons", ".", "append", "(", "bottom_edge_point", ".", "longitude", ")", "lats", ".", "append", "(", "bottom_edge_point", ".", "latitude", ")", "lons", "=", "numpy", ".", "array", "(", "lons", ",", "float", ")", "lats", "=", "numpy", ".", "array", "(", "lats", ",", "float", ")", "return", "lons", ",", "lats" ]
Get surface main vertexes. Parameters are the same as for :meth:`from_fault_data`, excluding mesh spacing. :returns: Instance of :class:`~openquake.hazardlib.geo.polygon.Polygon` describing the surface projection of the simple fault with specified parameters.
[ "Get", "surface", "main", "vertexes", "." ]
train
https://github.com/gem/oq-engine/blob/8294553a0b8aba33fd96437a35065d03547d0040/openquake/hazardlib/geo/surface/simple_fault.py#L301-L336
gem/oq-engine
openquake/hazardlib/geo/surface/simple_fault.py
SimpleFaultSurface.surface_projection_from_fault_data
def surface_projection_from_fault_data(cls, fault_trace, upper_seismogenic_depth, lower_seismogenic_depth, dip): """ Get a surface projection of the simple fault surface. Parameters are the same as for :meth:`from_fault_data`, excluding mesh spacing. :returns: Instance of :class:`~openquake.hazardlib.geo.polygon.Polygon` describing the surface projection of the simple fault with specified parameters. """ lons, lats = cls.get_surface_vertexes(fault_trace, upper_seismogenic_depth, lower_seismogenic_depth, dip) return Mesh(lons, lats, depths=None).get_convex_hull()
python
def surface_projection_from_fault_data(cls, fault_trace, upper_seismogenic_depth, lower_seismogenic_depth, dip): lons, lats = cls.get_surface_vertexes(fault_trace, upper_seismogenic_depth, lower_seismogenic_depth, dip) return Mesh(lons, lats, depths=None).get_convex_hull()
[ "def", "surface_projection_from_fault_data", "(", "cls", ",", "fault_trace", ",", "upper_seismogenic_depth", ",", "lower_seismogenic_depth", ",", "dip", ")", ":", "lons", ",", "lats", "=", "cls", ".", "get_surface_vertexes", "(", "fault_trace", ",", "upper_seismogenic_depth", ",", "lower_seismogenic_depth", ",", "dip", ")", "return", "Mesh", "(", "lons", ",", "lats", ",", "depths", "=", "None", ")", ".", "get_convex_hull", "(", ")" ]
Get a surface projection of the simple fault surface. Parameters are the same as for :meth:`from_fault_data`, excluding mesh spacing. :returns: Instance of :class:`~openquake.hazardlib.geo.polygon.Polygon` describing the surface projection of the simple fault with specified parameters.
[ "Get", "a", "surface", "projection", "of", "the", "simple", "fault", "surface", "." ]
train
https://github.com/gem/oq-engine/blob/8294553a0b8aba33fd96437a35065d03547d0040/openquake/hazardlib/geo/surface/simple_fault.py#L339-L356
gem/oq-engine
openquake/hazardlib/gsim/zhao_2006.py
ZhaoEtAl2006Asc.get_mean_and_stddevs
def get_mean_and_stddevs(self, sites, rup, dists, imt, stddev_types): """ See :meth:`superclass method <.base.GroundShakingIntensityModel.get_mean_and_stddevs>` for spec of input and result values. """ # extracting dictionary of coefficients specific to required # intensity measure type. C = self.COEFFS_ASC[imt] # mean value as given by equation 1, p. 901, without considering the # interface and intraslab terms (that is SI, SS, SSL = 0) and the # inter and intra event terms, plus the magnitude-squared term # correction factor (equation 5 p. 909). mean = self._compute_magnitude_term(C, rup.mag) +\ self._compute_distance_term(C, rup.mag, dists.rrup) +\ self._compute_focal_depth_term(C, rup.hypo_depth) +\ self._compute_faulting_style_term(C, rup.rake) +\ self._compute_site_class_term(C, sites.vs30) +\ self._compute_magnitude_squared_term(P=0.0, M=6.3, Q=C['QC'], W=C['WC'], mag=rup.mag) # convert from cm/s**2 to g mean = np.log(np.exp(mean) * 1e-2 / g) stddevs = self._get_stddevs(C['sigma'], C['tauC'], stddev_types, num_sites=len(sites.vs30)) return mean, stddevs
python
def get_mean_and_stddevs(self, sites, rup, dists, imt, stddev_types): C = self.COEFFS_ASC[imt] mean = self._compute_magnitude_term(C, rup.mag) +\ self._compute_distance_term(C, rup.mag, dists.rrup) +\ self._compute_focal_depth_term(C, rup.hypo_depth) +\ self._compute_faulting_style_term(C, rup.rake) +\ self._compute_site_class_term(C, sites.vs30) +\ self._compute_magnitude_squared_term(P=0.0, M=6.3, Q=C['QC'], W=C['WC'], mag=rup.mag) mean = np.log(np.exp(mean) * 1e-2 / g) stddevs = self._get_stddevs(C['sigma'], C['tauC'], stddev_types, num_sites=len(sites.vs30)) return mean, stddevs
[ "def", "get_mean_and_stddevs", "(", "self", ",", "sites", ",", "rup", ",", "dists", ",", "imt", ",", "stddev_types", ")", ":", "# extracting dictionary of coefficients specific to required", "# intensity measure type.", "C", "=", "self", ".", "COEFFS_ASC", "[", "imt", "]", "# mean value as given by equation 1, p. 901, without considering the", "# interface and intraslab terms (that is SI, SS, SSL = 0) and the", "# inter and intra event terms, plus the magnitude-squared term", "# correction factor (equation 5 p. 909).", "mean", "=", "self", ".", "_compute_magnitude_term", "(", "C", ",", "rup", ".", "mag", ")", "+", "self", ".", "_compute_distance_term", "(", "C", ",", "rup", ".", "mag", ",", "dists", ".", "rrup", ")", "+", "self", ".", "_compute_focal_depth_term", "(", "C", ",", "rup", ".", "hypo_depth", ")", "+", "self", ".", "_compute_faulting_style_term", "(", "C", ",", "rup", ".", "rake", ")", "+", "self", ".", "_compute_site_class_term", "(", "C", ",", "sites", ".", "vs30", ")", "+", "self", ".", "_compute_magnitude_squared_term", "(", "P", "=", "0.0", ",", "M", "=", "6.3", ",", "Q", "=", "C", "[", "'QC'", "]", ",", "W", "=", "C", "[", "'WC'", "]", ",", "mag", "=", "rup", ".", "mag", ")", "# convert from cm/s**2 to g", "mean", "=", "np", ".", "log", "(", "np", ".", "exp", "(", "mean", ")", "*", "1e-2", "/", "g", ")", "stddevs", "=", "self", ".", "_get_stddevs", "(", "C", "[", "'sigma'", "]", ",", "C", "[", "'tauC'", "]", ",", "stddev_types", ",", "num_sites", "=", "len", "(", "sites", ".", "vs30", ")", ")", "return", "mean", ",", "stddevs" ]
See :meth:`superclass method <.base.GroundShakingIntensityModel.get_mean_and_stddevs>` for spec of input and result values.
[ "See", ":", "meth", ":", "superclass", "method", "<", ".", "base", ".", "GroundShakingIntensityModel", ".", "get_mean_and_stddevs", ">", "for", "spec", "of", "input", "and", "result", "values", "." ]
train
https://github.com/gem/oq-engine/blob/8294553a0b8aba33fd96437a35065d03547d0040/openquake/hazardlib/gsim/zhao_2006.py#L81-L109
gem/oq-engine
openquake/hazardlib/gsim/zhao_2006.py
ZhaoEtAl2006Asc._compute_distance_term
def _compute_distance_term(self, C, mag, rrup): """ Compute second and third terms in equation 1, p. 901. """ term1 = C['b'] * rrup term2 = - np.log(rrup + C['c'] * np.exp(C['d'] * mag)) return term1 + term2
python
def _compute_distance_term(self, C, mag, rrup): term1 = C['b'] * rrup term2 = - np.log(rrup + C['c'] * np.exp(C['d'] * mag)) return term1 + term2
[ "def", "_compute_distance_term", "(", "self", ",", "C", ",", "mag", ",", "rrup", ")", ":", "term1", "=", "C", "[", "'b'", "]", "*", "rrup", "term2", "=", "-", "np", ".", "log", "(", "rrup", "+", "C", "[", "'c'", "]", "*", "np", ".", "exp", "(", "C", "[", "'d'", "]", "*", "mag", ")", ")", "return", "term1", "+", "term2" ]
Compute second and third terms in equation 1, p. 901.
[ "Compute", "second", "and", "third", "terms", "in", "equation", "1", "p", ".", "901", "." ]
train
https://github.com/gem/oq-engine/blob/8294553a0b8aba33fd96437a35065d03547d0040/openquake/hazardlib/gsim/zhao_2006.py#L133-L140
gem/oq-engine
openquake/hazardlib/gsim/zhao_2006.py
ZhaoEtAl2006Asc._compute_focal_depth_term
def _compute_focal_depth_term(self, C, hypo_depth): """ Compute fourth term in equation 1, p. 901. """ # p. 901. "(i.e, depth is capped at 125 km)". focal_depth = hypo_depth if focal_depth > 125.0: focal_depth = 125.0 # p. 902. "We used the value of 15 km for the # depth coefficient hc ...". hc = 15.0 # p. 901. "When h is larger than hc, the depth terms takes # effect ...". The next sentence specifies h>=hc. return float(focal_depth >= hc) * C['e'] * (focal_depth - hc)
python
def _compute_focal_depth_term(self, C, hypo_depth): focal_depth = hypo_depth if focal_depth > 125.0: focal_depth = 125.0 hc = 15.0 return float(focal_depth >= hc) * C['e'] * (focal_depth - hc)
[ "def", "_compute_focal_depth_term", "(", "self", ",", "C", ",", "hypo_depth", ")", ":", "# p. 901. \"(i.e, depth is capped at 125 km)\".", "focal_depth", "=", "hypo_depth", "if", "focal_depth", ">", "125.0", ":", "focal_depth", "=", "125.0", "# p. 902. \"We used the value of 15 km for the", "# depth coefficient hc ...\".", "hc", "=", "15.0", "# p. 901. \"When h is larger than hc, the depth terms takes", "# effect ...\". The next sentence specifies h>=hc.", "return", "float", "(", "focal_depth", ">=", "hc", ")", "*", "C", "[", "'e'", "]", "*", "(", "focal_depth", "-", "hc", ")" ]
Compute fourth term in equation 1, p. 901.
[ "Compute", "fourth", "term", "in", "equation", "1", "p", ".", "901", "." ]
train
https://github.com/gem/oq-engine/blob/8294553a0b8aba33fd96437a35065d03547d0040/openquake/hazardlib/gsim/zhao_2006.py#L142-L157
gem/oq-engine
openquake/hazardlib/gsim/zhao_2006.py
ZhaoEtAl2006Asc._compute_site_class_term
def _compute_site_class_term(self, C, vs30): """ Compute nine-th term in equation 1, p. 901. """ # map vs30 value to site class, see table 2, p. 901. site_term = np.zeros(len(vs30)) # hard rock site_term[vs30 > 1100.0] = C['CH'] # rock site_term[(vs30 > 600) & (vs30 <= 1100)] = C['C1'] # hard soil site_term[(vs30 > 300) & (vs30 <= 600)] = C['C2'] # medium soil site_term[(vs30 > 200) & (vs30 <= 300)] = C['C3'] # soft soil site_term[vs30 <= 200] = C['C4'] return site_term
python
def _compute_site_class_term(self, C, vs30): site_term = np.zeros(len(vs30)) site_term[vs30 > 1100.0] = C['CH'] site_term[(vs30 > 600) & (vs30 <= 1100)] = C['C1'] site_term[(vs30 > 300) & (vs30 <= 600)] = C['C2'] site_term[(vs30 > 200) & (vs30 <= 300)] = C['C3'] site_term[vs30 <= 200] = C['C4'] return site_term
[ "def", "_compute_site_class_term", "(", "self", ",", "C", ",", "vs30", ")", ":", "# map vs30 value to site class, see table 2, p. 901.", "site_term", "=", "np", ".", "zeros", "(", "len", "(", "vs30", ")", ")", "# hard rock", "site_term", "[", "vs30", ">", "1100.0", "]", "=", "C", "[", "'CH'", "]", "# rock", "site_term", "[", "(", "vs30", ">", "600", ")", "&", "(", "vs30", "<=", "1100", ")", "]", "=", "C", "[", "'C1'", "]", "# hard soil", "site_term", "[", "(", "vs30", ">", "300", ")", "&", "(", "vs30", "<=", "600", ")", "]", "=", "C", "[", "'C2'", "]", "# medium soil", "site_term", "[", "(", "vs30", ">", "200", ")", "&", "(", "vs30", "<=", "300", ")", "]", "=", "C", "[", "'C3'", "]", "# soft soil", "site_term", "[", "vs30", "<=", "200", "]", "=", "C", "[", "'C4'", "]", "return", "site_term" ]
Compute nine-th term in equation 1, p. 901.
[ "Compute", "nine", "-", "th", "term", "in", "equation", "1", "p", ".", "901", "." ]
train
https://github.com/gem/oq-engine/blob/8294553a0b8aba33fd96437a35065d03547d0040/openquake/hazardlib/gsim/zhao_2006.py#L168-L190
gem/oq-engine
openquake/hazardlib/gsim/zhao_2006.py
ZhaoEtAl2006Asc._compute_magnitude_squared_term
def _compute_magnitude_squared_term(self, P, M, Q, W, mag): """ Compute magnitude squared term, equation 5, p. 909. """ return P * (mag - M) + Q * (mag - M) ** 2 + W
python
def _compute_magnitude_squared_term(self, P, M, Q, W, mag): return P * (mag - M) + Q * (mag - M) ** 2 + W
[ "def", "_compute_magnitude_squared_term", "(", "self", ",", "P", ",", "M", ",", "Q", ",", "W", ",", "mag", ")", ":", "return", "P", "*", "(", "mag", "-", "M", ")", "+", "Q", "*", "(", "mag", "-", "M", ")", "**", "2", "+", "W" ]
Compute magnitude squared term, equation 5, p. 909.
[ "Compute", "magnitude", "squared", "term", "equation", "5", "p", ".", "909", "." ]
train
https://github.com/gem/oq-engine/blob/8294553a0b8aba33fd96437a35065d03547d0040/openquake/hazardlib/gsim/zhao_2006.py#L192-L196
gem/oq-engine
openquake/hazardlib/gsim/zhao_2006.py
ZhaoEtAl2006SSlab._compute_slab_correction_term
def _compute_slab_correction_term(self, C, rrup): """ Compute path modification term for slab events, that is the 8-th term in equation 1, p. 901. """ slab_term = C['SSL'] * np.log(rrup) return slab_term
python
def _compute_slab_correction_term(self, C, rrup): slab_term = C['SSL'] * np.log(rrup) return slab_term
[ "def", "_compute_slab_correction_term", "(", "self", ",", "C", ",", "rrup", ")", ":", "slab_term", "=", "C", "[", "'SSL'", "]", "*", "np", ".", "log", "(", "rrup", ")", "return", "slab_term" ]
Compute path modification term for slab events, that is the 8-th term in equation 1, p. 901.
[ "Compute", "path", "modification", "term", "for", "slab", "events", "that", "is", "the", "8", "-", "th", "term", "in", "equation", "1", "p", ".", "901", "." ]
train
https://github.com/gem/oq-engine/blob/8294553a0b8aba33fd96437a35065d03547d0040/openquake/hazardlib/gsim/zhao_2006.py#L369-L376
gem/oq-engine
openquake/hazardlib/gsim/zhao_2006.py
ZhaoEtAl2006AscSGS.get_mean_and_stddevs
def get_mean_and_stddevs(self, sites, rup, dists, imt, stddev_types): """ Using a minimum distance of 5km for the calculation. """ dists_mod = copy.deepcopy(dists) dists_mod.rrup[dists.rrup <= 5.] = 5. return super().get_mean_and_stddevs( sites, rup, dists_mod, imt, stddev_types)
python
def get_mean_and_stddevs(self, sites, rup, dists, imt, stddev_types): dists_mod = copy.deepcopy(dists) dists_mod.rrup[dists.rrup <= 5.] = 5. return super().get_mean_and_stddevs( sites, rup, dists_mod, imt, stddev_types)
[ "def", "get_mean_and_stddevs", "(", "self", ",", "sites", ",", "rup", ",", "dists", ",", "imt", ",", "stddev_types", ")", ":", "dists_mod", "=", "copy", ".", "deepcopy", "(", "dists", ")", "dists_mod", ".", "rrup", "[", "dists", ".", "rrup", "<=", "5.", "]", "=", "5.", "return", "super", "(", ")", ".", "get_mean_and_stddevs", "(", "sites", ",", "rup", ",", "dists_mod", ",", "imt", ",", "stddev_types", ")" ]
Using a minimum distance of 5km for the calculation.
[ "Using", "a", "minimum", "distance", "of", "5km", "for", "the", "calculation", "." ]
train
https://github.com/gem/oq-engine/blob/8294553a0b8aba33fd96437a35065d03547d0040/openquake/hazardlib/gsim/zhao_2006.py#L654-L663
gem/oq-engine
openquake/engine/utils/__init__.py
confirm
def confirm(prompt): """ Ask for confirmation, given a ``prompt`` and return a boolean value. """ while True: try: answer = input(prompt) except KeyboardInterrupt: # the user presses ctrl+c, just say 'no' return False answer = answer.strip().lower() if answer not in ('y', 'n'): print('Please enter y or n') continue return answer == 'y'
python
def confirm(prompt): while True: try: answer = input(prompt) except KeyboardInterrupt: return False answer = answer.strip().lower() if answer not in ('y', 'n'): print('Please enter y or n') continue return answer == 'y'
[ "def", "confirm", "(", "prompt", ")", ":", "while", "True", ":", "try", ":", "answer", "=", "input", "(", "prompt", ")", "except", "KeyboardInterrupt", ":", "# the user presses ctrl+c, just say 'no'", "return", "False", "answer", "=", "answer", ".", "strip", "(", ")", ".", "lower", "(", ")", "if", "answer", "not", "in", "(", "'y'", ",", "'n'", ")", ":", "print", "(", "'Please enter y or n'", ")", "continue", "return", "answer", "==", "'y'" ]
Ask for confirmation, given a ``prompt`` and return a boolean value.
[ "Ask", "for", "confirmation", "given", "a", "prompt", "and", "return", "a", "boolean", "value", "." ]
train
https://github.com/gem/oq-engine/blob/8294553a0b8aba33fd96437a35065d03547d0040/openquake/engine/utils/__init__.py#L20-L34
gem/oq-engine
openquake/risklib/asset.py
build_asset_array
def build_asset_array(assets_by_site, tagnames=(), time_event=None): """ :param assets_by_site: a list of lists of assets :param tagnames: a list of tag names :returns: an array `assetcol` """ for assets in assets_by_site: if len(assets): first_asset = assets[0] break else: # no break raise ValueError('There are no assets!') loss_types = [] occupancy_periods = [] for name in sorted(first_asset.values): if name.startswith('occupants_'): period = name.split('_', 1)[1] if period != 'None': # see scenario_risk test_case_2d occupancy_periods.append(period) loss_types.append(name) # discard occupants for different time periods else: loss_types.append('value-' + name) # loss_types can be ['value-business_interruption', 'value-contents', # 'value-nonstructural', 'occupants_None', 'occupants_day', # 'occupants_night', 'occupants_transit'] deductible_d = first_asset.deductibles or {} limit_d = first_asset.insurance_limits or {} if deductible_d or limit_d: logging.warning('Exposures with insuranceLimit/deductible fields are ' 'deprecated and may be removed in the future') retro = ['retrofitted'] if first_asset._retrofitted else [] float_fields = loss_types + retro int_fields = [(str(name), U16) for name in tagnames] tagi = {str(name): i for i, name in enumerate(tagnames)} asset_dt = numpy.dtype( [('ordinal', U32), ('lon', F32), ('lat', F32), ('site_id', U32), ('number', F32), ('area', F32)] + [ (str(name), float) for name in float_fields] + int_fields) num_assets = sum(len(assets) for assets in assets_by_site) assetcol = numpy.zeros(num_assets, asset_dt) asset_ordinal = 0 fields = set(asset_dt.fields) for sid, assets_ in enumerate(assets_by_site): for asset in assets_: asset.ordinal = asset_ordinal record = assetcol[asset_ordinal] asset_ordinal += 1 for field in fields: if field == 'ordinal': value = asset.ordinal elif field == 'number': value = asset.number elif field == 'area': value = asset.area elif field == 'site_id': value = sid elif field == 'lon': value = asset.location[0] elif field == 'lat': value = asset.location[1] elif field.startswith('occupants_'): value = asset.values[field] elif field == 'retrofitted': value = asset.retrofitted() elif field in tagnames: value = asset.tagidxs[tagi[field]] else: name, lt = field.split('-') value = asset.value(lt, time_event) record[field] = value return assetcol, ' '.join(occupancy_periods)
python
def build_asset_array(assets_by_site, tagnames=(), time_event=None): for assets in assets_by_site: if len(assets): first_asset = assets[0] break else: raise ValueError('There are no assets!') loss_types = [] occupancy_periods = [] for name in sorted(first_asset.values): if name.startswith('occupants_'): period = name.split('_', 1)[1] if period != 'None': occupancy_periods.append(period) loss_types.append(name) else: loss_types.append('value-' + name) deductible_d = first_asset.deductibles or {} limit_d = first_asset.insurance_limits or {} if deductible_d or limit_d: logging.warning('Exposures with insuranceLimit/deductible fields are ' 'deprecated and may be removed in the future') retro = ['retrofitted'] if first_asset._retrofitted else [] float_fields = loss_types + retro int_fields = [(str(name), U16) for name in tagnames] tagi = {str(name): i for i, name in enumerate(tagnames)} asset_dt = numpy.dtype( [('ordinal', U32), ('lon', F32), ('lat', F32), ('site_id', U32), ('number', F32), ('area', F32)] + [ (str(name), float) for name in float_fields] + int_fields) num_assets = sum(len(assets) for assets in assets_by_site) assetcol = numpy.zeros(num_assets, asset_dt) asset_ordinal = 0 fields = set(asset_dt.fields) for sid, assets_ in enumerate(assets_by_site): for asset in assets_: asset.ordinal = asset_ordinal record = assetcol[asset_ordinal] asset_ordinal += 1 for field in fields: if field == 'ordinal': value = asset.ordinal elif field == 'number': value = asset.number elif field == 'area': value = asset.area elif field == 'site_id': value = sid elif field == 'lon': value = asset.location[0] elif field == 'lat': value = asset.location[1] elif field.startswith('occupants_'): value = asset.values[field] elif field == 'retrofitted': value = asset.retrofitted() elif field in tagnames: value = asset.tagidxs[tagi[field]] else: name, lt = field.split('-') value = asset.value(lt, time_event) record[field] = value return assetcol, ' '.join(occupancy_periods)
[ "def", "build_asset_array", "(", "assets_by_site", ",", "tagnames", "=", "(", ")", ",", "time_event", "=", "None", ")", ":", "for", "assets", "in", "assets_by_site", ":", "if", "len", "(", "assets", ")", ":", "first_asset", "=", "assets", "[", "0", "]", "break", "else", ":", "# no break", "raise", "ValueError", "(", "'There are no assets!'", ")", "loss_types", "=", "[", "]", "occupancy_periods", "=", "[", "]", "for", "name", "in", "sorted", "(", "first_asset", ".", "values", ")", ":", "if", "name", ".", "startswith", "(", "'occupants_'", ")", ":", "period", "=", "name", ".", "split", "(", "'_'", ",", "1", ")", "[", "1", "]", "if", "period", "!=", "'None'", ":", "# see scenario_risk test_case_2d", "occupancy_periods", ".", "append", "(", "period", ")", "loss_types", ".", "append", "(", "name", ")", "# discard occupants for different time periods", "else", ":", "loss_types", ".", "append", "(", "'value-'", "+", "name", ")", "# loss_types can be ['value-business_interruption', 'value-contents',", "# 'value-nonstructural', 'occupants_None', 'occupants_day',", "# 'occupants_night', 'occupants_transit']", "deductible_d", "=", "first_asset", ".", "deductibles", "or", "{", "}", "limit_d", "=", "first_asset", ".", "insurance_limits", "or", "{", "}", "if", "deductible_d", "or", "limit_d", ":", "logging", ".", "warning", "(", "'Exposures with insuranceLimit/deductible fields are '", "'deprecated and may be removed in the future'", ")", "retro", "=", "[", "'retrofitted'", "]", "if", "first_asset", ".", "_retrofitted", "else", "[", "]", "float_fields", "=", "loss_types", "+", "retro", "int_fields", "=", "[", "(", "str", "(", "name", ")", ",", "U16", ")", "for", "name", "in", "tagnames", "]", "tagi", "=", "{", "str", "(", "name", ")", ":", "i", "for", "i", ",", "name", "in", "enumerate", "(", "tagnames", ")", "}", "asset_dt", "=", "numpy", ".", "dtype", "(", "[", "(", "'ordinal'", ",", "U32", ")", ",", "(", "'lon'", ",", "F32", ")", ",", "(", "'lat'", ",", "F32", ")", ",", "(", "'site_id'", ",", "U32", ")", ",", "(", "'number'", ",", "F32", ")", ",", "(", "'area'", ",", "F32", ")", "]", "+", "[", "(", "str", "(", "name", ")", ",", "float", ")", "for", "name", "in", "float_fields", "]", "+", "int_fields", ")", "num_assets", "=", "sum", "(", "len", "(", "assets", ")", "for", "assets", "in", "assets_by_site", ")", "assetcol", "=", "numpy", ".", "zeros", "(", "num_assets", ",", "asset_dt", ")", "asset_ordinal", "=", "0", "fields", "=", "set", "(", "asset_dt", ".", "fields", ")", "for", "sid", ",", "assets_", "in", "enumerate", "(", "assets_by_site", ")", ":", "for", "asset", "in", "assets_", ":", "asset", ".", "ordinal", "=", "asset_ordinal", "record", "=", "assetcol", "[", "asset_ordinal", "]", "asset_ordinal", "+=", "1", "for", "field", "in", "fields", ":", "if", "field", "==", "'ordinal'", ":", "value", "=", "asset", ".", "ordinal", "elif", "field", "==", "'number'", ":", "value", "=", "asset", ".", "number", "elif", "field", "==", "'area'", ":", "value", "=", "asset", ".", "area", "elif", "field", "==", "'site_id'", ":", "value", "=", "sid", "elif", "field", "==", "'lon'", ":", "value", "=", "asset", ".", "location", "[", "0", "]", "elif", "field", "==", "'lat'", ":", "value", "=", "asset", ".", "location", "[", "1", "]", "elif", "field", ".", "startswith", "(", "'occupants_'", ")", ":", "value", "=", "asset", ".", "values", "[", "field", "]", "elif", "field", "==", "'retrofitted'", ":", "value", "=", "asset", ".", "retrofitted", "(", ")", "elif", "field", "in", "tagnames", ":", "value", "=", "asset", ".", "tagidxs", "[", "tagi", "[", "field", "]", "]", "else", ":", "name", ",", "lt", "=", "field", ".", "split", "(", "'-'", ")", "value", "=", "asset", ".", "value", "(", "lt", ",", "time_event", ")", "record", "[", "field", "]", "=", "value", "return", "assetcol", ",", "' '", ".", "join", "(", "occupancy_periods", ")" ]
:param assets_by_site: a list of lists of assets :param tagnames: a list of tag names :returns: an array `assetcol`
[ ":", "param", "assets_by_site", ":", "a", "list", "of", "lists", "of", "assets", ":", "param", "tagnames", ":", "a", "list", "of", "tag", "names", ":", "returns", ":", "an", "array", "assetcol" ]
train
https://github.com/gem/oq-engine/blob/8294553a0b8aba33fd96437a35065d03547d0040/openquake/risklib/asset.py#L583-L655
gem/oq-engine
openquake/risklib/asset.py
_get_exposure
def _get_exposure(fname, stop=None): """ :param fname: path of the XML file containing the exposure :param stop: node at which to stop parsing (or None) :returns: a pair (Exposure instance, list of asset nodes) """ [exposure] = nrml.read(fname, stop=stop) if not exposure.tag.endswith('exposureModel'): raise InvalidFile('%s: expected exposureModel, got %s' % (fname, exposure.tag)) description = exposure.description try: conversions = exposure.conversions except AttributeError: conversions = Node('conversions', nodes=[Node('costTypes', [])]) try: inslimit = conversions.insuranceLimit except AttributeError: inslimit = Node('insuranceLimit', text=True) try: deductible = conversions.deductible except AttributeError: deductible = Node('deductible', text=True) try: area = conversions.area except AttributeError: # NB: the area type cannot be an empty string because when sending # around the CostCalculator object we would run into this numpy bug # about pickling dictionaries with empty strings: # https://github.com/numpy/numpy/pull/5475 area = Node('area', dict(type='?')) try: occupancy_periods = exposure.occupancyPeriods.text or '' except AttributeError: occupancy_periods = '' try: tagNames = exposure.tagNames except AttributeError: tagNames = Node('tagNames', text='') tagnames = ~tagNames or [] if set(tagnames) & {'taxonomy', 'exposure', 'country'}: raise InvalidFile('taxonomy, exposure and country are reserved names ' 'you cannot use it in <tagNames>: %s' % fname) tagnames.insert(0, 'taxonomy') # read the cost types and make some check cost_types = [] retrofitted = False for ct in conversions.costTypes: with context(fname, ct): ctname = ct['name'] if ctname == 'structural' and 'retrofittedType' in ct.attrib: if ct['retrofittedType'] != ct['type']: raise ValueError( 'The retrofittedType %s is different from the type' '%s' % (ct['retrofittedType'], ct['type'])) if ct['retrofittedUnit'] != ct['unit']: raise ValueError( 'The retrofittedUnit %s is different from the unit' '%s' % (ct['retrofittedUnit'], ct['unit'])) retrofitted = True cost_types.append( (ctname, valid.cost_type_type(ct['type']), ct['unit'])) if 'occupants' in cost_types: cost_types.append(('occupants', 'per_area', 'people')) cost_types.sort(key=operator.itemgetter(0)) cost_types = numpy.array(cost_types, cost_type_dt) insurance_limit_is_absolute = il = inslimit.get('isAbsolute') deductible_is_absolute = de = deductible.get('isAbsolute') cc = CostCalculator( {}, {}, {}, True if de is None else de, True if il is None else il, {name: i for i, name in enumerate(tagnames)}, ) for ct in cost_types: name = ct['name'] # structural, nonstructural, ... cc.cost_types[name] = ct['type'] # aggregated, per_asset, per_area cc.area_types[name] = area['type'] cc.units[name] = ct['unit'] assets = [] asset_refs = [] exp = Exposure( exposure['id'], exposure['category'], description.text, cost_types, occupancy_periods, insurance_limit_is_absolute, deductible_is_absolute, retrofitted, area.attrib, assets, asset_refs, cc, TagCollection(tagnames)) assets_text = exposure.assets.text.strip() if assets_text: # the <assets> tag contains a list of file names dirname = os.path.dirname(fname) exp.datafiles = [os.path.join(dirname, f) for f in assets_text.split()] else: exp.datafiles = [] return exp, exposure.assets
python
def _get_exposure(fname, stop=None): [exposure] = nrml.read(fname, stop=stop) if not exposure.tag.endswith('exposureModel'): raise InvalidFile('%s: expected exposureModel, got %s' % (fname, exposure.tag)) description = exposure.description try: conversions = exposure.conversions except AttributeError: conversions = Node('conversions', nodes=[Node('costTypes', [])]) try: inslimit = conversions.insuranceLimit except AttributeError: inslimit = Node('insuranceLimit', text=True) try: deductible = conversions.deductible except AttributeError: deductible = Node('deductible', text=True) try: area = conversions.area except AttributeError: area = Node('area', dict(type='?')) try: occupancy_periods = exposure.occupancyPeriods.text or '' except AttributeError: occupancy_periods = '' try: tagNames = exposure.tagNames except AttributeError: tagNames = Node('tagNames', text='') tagnames = ~tagNames or [] if set(tagnames) & {'taxonomy', 'exposure', 'country'}: raise InvalidFile('taxonomy, exposure and country are reserved names ' 'you cannot use it in <tagNames>: %s' % fname) tagnames.insert(0, 'taxonomy') cost_types = [] retrofitted = False for ct in conversions.costTypes: with context(fname, ct): ctname = ct['name'] if ctname == 'structural' and 'retrofittedType' in ct.attrib: if ct['retrofittedType'] != ct['type']: raise ValueError( 'The retrofittedType %s is different from the type' '%s' % (ct['retrofittedType'], ct['type'])) if ct['retrofittedUnit'] != ct['unit']: raise ValueError( 'The retrofittedUnit %s is different from the unit' '%s' % (ct['retrofittedUnit'], ct['unit'])) retrofitted = True cost_types.append( (ctname, valid.cost_type_type(ct['type']), ct['unit'])) if 'occupants' in cost_types: cost_types.append(('occupants', 'per_area', 'people')) cost_types.sort(key=operator.itemgetter(0)) cost_types = numpy.array(cost_types, cost_type_dt) insurance_limit_is_absolute = il = inslimit.get('isAbsolute') deductible_is_absolute = de = deductible.get('isAbsolute') cc = CostCalculator( {}, {}, {}, True if de is None else de, True if il is None else il, {name: i for i, name in enumerate(tagnames)}, ) for ct in cost_types: name = ct['name'] cc.cost_types[name] = ct['type'] cc.area_types[name] = area['type'] cc.units[name] = ct['unit'] assets = [] asset_refs = [] exp = Exposure( exposure['id'], exposure['category'], description.text, cost_types, occupancy_periods, insurance_limit_is_absolute, deductible_is_absolute, retrofitted, area.attrib, assets, asset_refs, cc, TagCollection(tagnames)) assets_text = exposure.assets.text.strip() if assets_text: dirname = os.path.dirname(fname) exp.datafiles = [os.path.join(dirname, f) for f in assets_text.split()] else: exp.datafiles = [] return exp, exposure.assets
[ "def", "_get_exposure", "(", "fname", ",", "stop", "=", "None", ")", ":", "[", "exposure", "]", "=", "nrml", ".", "read", "(", "fname", ",", "stop", "=", "stop", ")", "if", "not", "exposure", ".", "tag", ".", "endswith", "(", "'exposureModel'", ")", ":", "raise", "InvalidFile", "(", "'%s: expected exposureModel, got %s'", "%", "(", "fname", ",", "exposure", ".", "tag", ")", ")", "description", "=", "exposure", ".", "description", "try", ":", "conversions", "=", "exposure", ".", "conversions", "except", "AttributeError", ":", "conversions", "=", "Node", "(", "'conversions'", ",", "nodes", "=", "[", "Node", "(", "'costTypes'", ",", "[", "]", ")", "]", ")", "try", ":", "inslimit", "=", "conversions", ".", "insuranceLimit", "except", "AttributeError", ":", "inslimit", "=", "Node", "(", "'insuranceLimit'", ",", "text", "=", "True", ")", "try", ":", "deductible", "=", "conversions", ".", "deductible", "except", "AttributeError", ":", "deductible", "=", "Node", "(", "'deductible'", ",", "text", "=", "True", ")", "try", ":", "area", "=", "conversions", ".", "area", "except", "AttributeError", ":", "# NB: the area type cannot be an empty string because when sending", "# around the CostCalculator object we would run into this numpy bug", "# about pickling dictionaries with empty strings:", "# https://github.com/numpy/numpy/pull/5475", "area", "=", "Node", "(", "'area'", ",", "dict", "(", "type", "=", "'?'", ")", ")", "try", ":", "occupancy_periods", "=", "exposure", ".", "occupancyPeriods", ".", "text", "or", "''", "except", "AttributeError", ":", "occupancy_periods", "=", "''", "try", ":", "tagNames", "=", "exposure", ".", "tagNames", "except", "AttributeError", ":", "tagNames", "=", "Node", "(", "'tagNames'", ",", "text", "=", "''", ")", "tagnames", "=", "~", "tagNames", "or", "[", "]", "if", "set", "(", "tagnames", ")", "&", "{", "'taxonomy'", ",", "'exposure'", ",", "'country'", "}", ":", "raise", "InvalidFile", "(", "'taxonomy, exposure and country are reserved names '", "'you cannot use it in <tagNames>: %s'", "%", "fname", ")", "tagnames", ".", "insert", "(", "0", ",", "'taxonomy'", ")", "# read the cost types and make some check", "cost_types", "=", "[", "]", "retrofitted", "=", "False", "for", "ct", "in", "conversions", ".", "costTypes", ":", "with", "context", "(", "fname", ",", "ct", ")", ":", "ctname", "=", "ct", "[", "'name'", "]", "if", "ctname", "==", "'structural'", "and", "'retrofittedType'", "in", "ct", ".", "attrib", ":", "if", "ct", "[", "'retrofittedType'", "]", "!=", "ct", "[", "'type'", "]", ":", "raise", "ValueError", "(", "'The retrofittedType %s is different from the type'", "'%s'", "%", "(", "ct", "[", "'retrofittedType'", "]", ",", "ct", "[", "'type'", "]", ")", ")", "if", "ct", "[", "'retrofittedUnit'", "]", "!=", "ct", "[", "'unit'", "]", ":", "raise", "ValueError", "(", "'The retrofittedUnit %s is different from the unit'", "'%s'", "%", "(", "ct", "[", "'retrofittedUnit'", "]", ",", "ct", "[", "'unit'", "]", ")", ")", "retrofitted", "=", "True", "cost_types", ".", "append", "(", "(", "ctname", ",", "valid", ".", "cost_type_type", "(", "ct", "[", "'type'", "]", ")", ",", "ct", "[", "'unit'", "]", ")", ")", "if", "'occupants'", "in", "cost_types", ":", "cost_types", ".", "append", "(", "(", "'occupants'", ",", "'per_area'", ",", "'people'", ")", ")", "cost_types", ".", "sort", "(", "key", "=", "operator", ".", "itemgetter", "(", "0", ")", ")", "cost_types", "=", "numpy", ".", "array", "(", "cost_types", ",", "cost_type_dt", ")", "insurance_limit_is_absolute", "=", "il", "=", "inslimit", ".", "get", "(", "'isAbsolute'", ")", "deductible_is_absolute", "=", "de", "=", "deductible", ".", "get", "(", "'isAbsolute'", ")", "cc", "=", "CostCalculator", "(", "{", "}", ",", "{", "}", ",", "{", "}", ",", "True", "if", "de", "is", "None", "else", "de", ",", "True", "if", "il", "is", "None", "else", "il", ",", "{", "name", ":", "i", "for", "i", ",", "name", "in", "enumerate", "(", "tagnames", ")", "}", ",", ")", "for", "ct", "in", "cost_types", ":", "name", "=", "ct", "[", "'name'", "]", "# structural, nonstructural, ...", "cc", ".", "cost_types", "[", "name", "]", "=", "ct", "[", "'type'", "]", "# aggregated, per_asset, per_area", "cc", ".", "area_types", "[", "name", "]", "=", "area", "[", "'type'", "]", "cc", ".", "units", "[", "name", "]", "=", "ct", "[", "'unit'", "]", "assets", "=", "[", "]", "asset_refs", "=", "[", "]", "exp", "=", "Exposure", "(", "exposure", "[", "'id'", "]", ",", "exposure", "[", "'category'", "]", ",", "description", ".", "text", ",", "cost_types", ",", "occupancy_periods", ",", "insurance_limit_is_absolute", ",", "deductible_is_absolute", ",", "retrofitted", ",", "area", ".", "attrib", ",", "assets", ",", "asset_refs", ",", "cc", ",", "TagCollection", "(", "tagnames", ")", ")", "assets_text", "=", "exposure", ".", "assets", ".", "text", ".", "strip", "(", ")", "if", "assets_text", ":", "# the <assets> tag contains a list of file names", "dirname", "=", "os", ".", "path", ".", "dirname", "(", "fname", ")", "exp", ".", "datafiles", "=", "[", "os", ".", "path", ".", "join", "(", "dirname", ",", "f", ")", "for", "f", "in", "assets_text", ".", "split", "(", ")", "]", "else", ":", "exp", ".", "datafiles", "=", "[", "]", "return", "exp", ",", "exposure", ".", "assets" ]
:param fname: path of the XML file containing the exposure :param stop: node at which to stop parsing (or None) :returns: a pair (Exposure instance, list of asset nodes)
[ ":", "param", "fname", ":", "path", "of", "the", "XML", "file", "containing", "the", "exposure", ":", "param", "stop", ":", "node", "at", "which", "to", "stop", "parsing", "(", "or", "None", ")", ":", "returns", ":", "a", "pair", "(", "Exposure", "instance", "list", "of", "asset", "nodes", ")" ]
train
https://github.com/gem/oq-engine/blob/8294553a0b8aba33fd96437a35065d03547d0040/openquake/risklib/asset.py#L665-L762
gem/oq-engine
openquake/risklib/asset.py
CostCalculator.get_units
def get_units(self, loss_types): """ :param: a list of loss types :returns: an array of units as byte strings, suitable for HDF5 """ lst = [] for lt in loss_types: if lt.endswith('_ins'): lt = lt[:-4] if lt == 'occupants': unit = 'people' else: unit = self.units[lt] lst.append(encode(unit)) return numpy.array(lst)
python
def get_units(self, loss_types): lst = [] for lt in loss_types: if lt.endswith('_ins'): lt = lt[:-4] if lt == 'occupants': unit = 'people' else: unit = self.units[lt] lst.append(encode(unit)) return numpy.array(lst)
[ "def", "get_units", "(", "self", ",", "loss_types", ")", ":", "lst", "=", "[", "]", "for", "lt", "in", "loss_types", ":", "if", "lt", ".", "endswith", "(", "'_ins'", ")", ":", "lt", "=", "lt", "[", ":", "-", "4", "]", "if", "lt", "==", "'occupants'", ":", "unit", "=", "'people'", "else", ":", "unit", "=", "self", ".", "units", "[", "lt", "]", "lst", ".", "append", "(", "encode", "(", "unit", ")", ")", "return", "numpy", ".", "array", "(", "lst", ")" ]
:param: a list of loss types :returns: an array of units as byte strings, suitable for HDF5
[ ":", "param", ":", "a", "list", "of", "loss", "types", ":", "returns", ":", "an", "array", "of", "units", "as", "byte", "strings", "suitable", "for", "HDF5" ]
train
https://github.com/gem/oq-engine/blob/8294553a0b8aba33fd96437a35065d03547d0040/openquake/risklib/asset.py#L82-L96
gem/oq-engine
openquake/risklib/asset.py
Asset.value
def value(self, loss_type, time_event=None): """ :returns: the total asset value for `loss_type` """ if loss_type == 'occupants': return self.values['occupants_' + str(time_event)] try: # extract from the cache val = self._cost[loss_type] except KeyError: # compute val = self.calc(loss_type, self.values, self.area, self.number) self._cost[loss_type] = val return val
python
def value(self, loss_type, time_event=None): if loss_type == 'occupants': return self.values['occupants_' + str(time_event)] try: val = self._cost[loss_type] except KeyError: val = self.calc(loss_type, self.values, self.area, self.number) self._cost[loss_type] = val return val
[ "def", "value", "(", "self", ",", "loss_type", ",", "time_event", "=", "None", ")", ":", "if", "loss_type", "==", "'occupants'", ":", "return", "self", ".", "values", "[", "'occupants_'", "+", "str", "(", "time_event", ")", "]", "try", ":", "# extract from the cache", "val", "=", "self", ".", "_cost", "[", "loss_type", "]", "except", "KeyError", ":", "# compute", "val", "=", "self", ".", "calc", "(", "loss_type", ",", "self", ".", "values", ",", "self", ".", "area", ",", "self", ".", "number", ")", "self", ".", "_cost", "[", "loss_type", "]", "=", "val", "return", "val" ]
:returns: the total asset value for `loss_type`
[ ":", "returns", ":", "the", "total", "asset", "value", "for", "loss_type" ]
train
https://github.com/gem/oq-engine/blob/8294553a0b8aba33fd96437a35065d03547d0040/openquake/risklib/asset.py#L195-L206
gem/oq-engine
openquake/risklib/asset.py
Asset.deductible
def deductible(self, loss_type, dummy=None): """ :returns: the deductible fraction of the asset cost for `loss_type` """ val = self.calc(loss_type, self.deductibles, self.area, self.number) if self.calc.deduct_abs: # convert to relative value return val / self.calc(loss_type, self.values, self.area, self.number) else: return val
python
def deductible(self, loss_type, dummy=None): val = self.calc(loss_type, self.deductibles, self.area, self.number) if self.calc.deduct_abs: return val / self.calc(loss_type, self.values, self.area, self.number) else: return val
[ "def", "deductible", "(", "self", ",", "loss_type", ",", "dummy", "=", "None", ")", ":", "val", "=", "self", ".", "calc", "(", "loss_type", ",", "self", ".", "deductibles", ",", "self", ".", "area", ",", "self", ".", "number", ")", "if", "self", ".", "calc", ".", "deduct_abs", ":", "# convert to relative value", "return", "val", "/", "self", ".", "calc", "(", "loss_type", ",", "self", ".", "values", ",", "self", ".", "area", ",", "self", ".", "number", ")", "else", ":", "return", "val" ]
:returns: the deductible fraction of the asset cost for `loss_type`
[ ":", "returns", ":", "the", "deductible", "fraction", "of", "the", "asset", "cost", "for", "loss_type" ]
train
https://github.com/gem/oq-engine/blob/8294553a0b8aba33fd96437a35065d03547d0040/openquake/risklib/asset.py#L208-L217
gem/oq-engine
openquake/risklib/asset.py
Asset.insurance_limit
def insurance_limit(self, loss_type, dummy=None): """ :returns: the limit fraction of the asset cost for `loss_type` """ val = self.calc(loss_type, self.insurance_limits, self.area, self.number) if self.calc.limit_abs: # convert to relative value return val / self.calc(loss_type, self.values, self.area, self.number) else: return val
python
def insurance_limit(self, loss_type, dummy=None): val = self.calc(loss_type, self.insurance_limits, self.area, self.number) if self.calc.limit_abs: return val / self.calc(loss_type, self.values, self.area, self.number) else: return val
[ "def", "insurance_limit", "(", "self", ",", "loss_type", ",", "dummy", "=", "None", ")", ":", "val", "=", "self", ".", "calc", "(", "loss_type", ",", "self", ".", "insurance_limits", ",", "self", ".", "area", ",", "self", ".", "number", ")", "if", "self", ".", "calc", ".", "limit_abs", ":", "# convert to relative value", "return", "val", "/", "self", ".", "calc", "(", "loss_type", ",", "self", ".", "values", ",", "self", ".", "area", ",", "self", ".", "number", ")", "else", ":", "return", "val" ]
:returns: the limit fraction of the asset cost for `loss_type`
[ ":", "returns", ":", "the", "limit", "fraction", "of", "the", "asset", "cost", "for", "loss_type" ]
train
https://github.com/gem/oq-engine/blob/8294553a0b8aba33fd96437a35065d03547d0040/openquake/risklib/asset.py#L219-L229
gem/oq-engine
openquake/risklib/asset.py
Asset.retrofitted
def retrofitted(self): """ :returns: the asset retrofitted value """ return self.calc('structural', {'structural': self._retrofitted}, self.area, self.number)
python
def retrofitted(self): return self.calc('structural', {'structural': self._retrofitted}, self.area, self.number)
[ "def", "retrofitted", "(", "self", ")", ":", "return", "self", ".", "calc", "(", "'structural'", ",", "{", "'structural'", ":", "self", ".", "_retrofitted", "}", ",", "self", ".", "area", ",", "self", ".", "number", ")" ]
:returns: the asset retrofitted value
[ ":", "returns", ":", "the", "asset", "retrofitted", "value" ]
train
https://github.com/gem/oq-engine/blob/8294553a0b8aba33fd96437a35065d03547d0040/openquake/risklib/asset.py#L231-L236
gem/oq-engine
openquake/risklib/asset.py
Asset.tagmask
def tagmask(self, tags): """ :returns: a boolean array with True where the assets has tags """ mask = numpy.zeros(len(tags), bool) for t, tag in enumerate(tags): tagname, tagvalue = tag.split('=') mask[t] = self.tagvalue(tagname) == tagvalue return mask
python
def tagmask(self, tags): mask = numpy.zeros(len(tags), bool) for t, tag in enumerate(tags): tagname, tagvalue = tag.split('=') mask[t] = self.tagvalue(tagname) == tagvalue return mask
[ "def", "tagmask", "(", "self", ",", "tags", ")", ":", "mask", "=", "numpy", ".", "zeros", "(", "len", "(", "tags", ")", ",", "bool", ")", "for", "t", ",", "tag", "in", "enumerate", "(", "tags", ")", ":", "tagname", ",", "tagvalue", "=", "tag", ".", "split", "(", "'='", ")", "mask", "[", "t", "]", "=", "self", ".", "tagvalue", "(", "tagname", ")", "==", "tagvalue", "return", "mask" ]
:returns: a boolean array with True where the assets has tags
[ ":", "returns", ":", "a", "boolean", "array", "with", "True", "where", "the", "assets", "has", "tags" ]
train
https://github.com/gem/oq-engine/blob/8294553a0b8aba33fd96437a35065d03547d0040/openquake/risklib/asset.py#L238-L246
gem/oq-engine
openquake/risklib/asset.py
TagCollection.add
def add(self, tagname, tagvalue): """ :returns: numeric index associated to the tag """ dic = getattr(self, tagname + '_idx') try: return dic[tagvalue] except KeyError: dic[tagvalue] = idx = len(dic) getattr(self, tagname).append(tagvalue) if idx > TWO16: raise InvalidFile('contains more then %d tags' % TWO16) return idx
python
def add(self, tagname, tagvalue): dic = getattr(self, tagname + '_idx') try: return dic[tagvalue] except KeyError: dic[tagvalue] = idx = len(dic) getattr(self, tagname).append(tagvalue) if idx > TWO16: raise InvalidFile('contains more then %d tags' % TWO16) return idx
[ "def", "add", "(", "self", ",", "tagname", ",", "tagvalue", ")", ":", "dic", "=", "getattr", "(", "self", ",", "tagname", "+", "'_idx'", ")", "try", ":", "return", "dic", "[", "tagvalue", "]", "except", "KeyError", ":", "dic", "[", "tagvalue", "]", "=", "idx", "=", "len", "(", "dic", ")", "getattr", "(", "self", ",", "tagname", ")", ".", "append", "(", "tagvalue", ")", "if", "idx", ">", "TWO16", ":", "raise", "InvalidFile", "(", "'contains more then %d tags'", "%", "TWO16", ")", "return", "idx" ]
:returns: numeric index associated to the tag
[ ":", "returns", ":", "numeric", "index", "associated", "to", "the", "tag" ]
train
https://github.com/gem/oq-engine/blob/8294553a0b8aba33fd96437a35065d03547d0040/openquake/risklib/asset.py#L287-L299
gem/oq-engine
openquake/risklib/asset.py
TagCollection.add_tags
def add_tags(self, dic, prefix): """ :param dic: a dictionary tagname -> tagvalue :returns: a list of tag indices, one per tagname """ # fill missing tagvalues with "?", raise an error for unknown tagnames idxs = [] for tagname in self.tagnames: if tagname in ('exposure', 'country'): idxs.append(self.add(tagname, prefix)) continue try: tagvalue = dic.pop(tagname) except KeyError: tagvalue = '?' else: if tagvalue in '?*': raise ValueError( 'Invalid tagvalue="%s"' % tagvalue) idxs.append(self.add(tagname, tagvalue)) if dic: raise ValueError( 'Unknown tagname %s or <tagNames> not ' 'specified in the exposure' % ', '.join(dic)) return idxs
python
def add_tags(self, dic, prefix): idxs = [] for tagname in self.tagnames: if tagname in ('exposure', 'country'): idxs.append(self.add(tagname, prefix)) continue try: tagvalue = dic.pop(tagname) except KeyError: tagvalue = '?' else: if tagvalue in '?*': raise ValueError( 'Invalid tagvalue="%s"' % tagvalue) idxs.append(self.add(tagname, tagvalue)) if dic: raise ValueError( 'Unknown tagname %s or <tagNames> not ' 'specified in the exposure' % ', '.join(dic)) return idxs
[ "def", "add_tags", "(", "self", ",", "dic", ",", "prefix", ")", ":", "# fill missing tagvalues with \"?\", raise an error for unknown tagnames", "idxs", "=", "[", "]", "for", "tagname", "in", "self", ".", "tagnames", ":", "if", "tagname", "in", "(", "'exposure'", ",", "'country'", ")", ":", "idxs", ".", "append", "(", "self", ".", "add", "(", "tagname", ",", "prefix", ")", ")", "continue", "try", ":", "tagvalue", "=", "dic", ".", "pop", "(", "tagname", ")", "except", "KeyError", ":", "tagvalue", "=", "'?'", "else", ":", "if", "tagvalue", "in", "'?*'", ":", "raise", "ValueError", "(", "'Invalid tagvalue=\"%s\"'", "%", "tagvalue", ")", "idxs", ".", "append", "(", "self", ".", "add", "(", "tagname", ",", "tagvalue", ")", ")", "if", "dic", ":", "raise", "ValueError", "(", "'Unknown tagname %s or <tagNames> not '", "'specified in the exposure'", "%", "', '", ".", "join", "(", "dic", ")", ")", "return", "idxs" ]
:param dic: a dictionary tagname -> tagvalue :returns: a list of tag indices, one per tagname
[ ":", "param", "dic", ":", "a", "dictionary", "tagname", "-", ">", "tagvalue", ":", "returns", ":", "a", "list", "of", "tag", "indices", "one", "per", "tagname" ]
train
https://github.com/gem/oq-engine/blob/8294553a0b8aba33fd96437a35065d03547d0040/openquake/risklib/asset.py#L301-L325
gem/oq-engine
openquake/risklib/asset.py
TagCollection.get_tag
def get_tag(self, tagname, tagidx): """ :returns: the tag associated to the given tagname and tag index """ return '%s=%s' % (tagname, decode(getattr(self, tagname)[tagidx]))
python
def get_tag(self, tagname, tagidx): return '%s=%s' % (tagname, decode(getattr(self, tagname)[tagidx]))
[ "def", "get_tag", "(", "self", ",", "tagname", ",", "tagidx", ")", ":", "return", "'%s=%s'", "%", "(", "tagname", ",", "decode", "(", "getattr", "(", "self", ",", "tagname", ")", "[", "tagidx", "]", ")", ")" ]
:returns: the tag associated to the given tagname and tag index
[ ":", "returns", ":", "the", "tag", "associated", "to", "the", "given", "tagname", "and", "tag", "index" ]
train
https://github.com/gem/oq-engine/blob/8294553a0b8aba33fd96437a35065d03547d0040/openquake/risklib/asset.py#L332-L336
gem/oq-engine
openquake/risklib/asset.py
TagCollection.get_tagvalues
def get_tagvalues(self, tagnames, tagidxs): """ :returns: the tag associated to the given tagname and tag index """ values = tuple(getattr(self, tagname)[tagidx + 1] for tagidx, tagname in zip(tagidxs, tagnames)) return values
python
def get_tagvalues(self, tagnames, tagidxs): values = tuple(getattr(self, tagname)[tagidx + 1] for tagidx, tagname in zip(tagidxs, tagnames)) return values
[ "def", "get_tagvalues", "(", "self", ",", "tagnames", ",", "tagidxs", ")", ":", "values", "=", "tuple", "(", "getattr", "(", "self", ",", "tagname", ")", "[", "tagidx", "+", "1", "]", "for", "tagidx", ",", "tagname", "in", "zip", "(", "tagidxs", ",", "tagnames", ")", ")", "return", "values" ]
:returns: the tag associated to the given tagname and tag index
[ ":", "returns", ":", "the", "tag", "associated", "to", "the", "given", "tagname", "and", "tag", "index" ]
train
https://github.com/gem/oq-engine/blob/8294553a0b8aba33fd96437a35065d03547d0040/openquake/risklib/asset.py#L338-L344
gem/oq-engine
openquake/risklib/asset.py
TagCollection.gen_tags
def gen_tags(self, tagname): """ :yields: the tags associated to the given tagname """ for tagvalue in getattr(self, tagname): yield '%s=%s' % (tagname, decode(tagvalue))
python
def gen_tags(self, tagname): for tagvalue in getattr(self, tagname): yield '%s=%s' % (tagname, decode(tagvalue))
[ "def", "gen_tags", "(", "self", ",", "tagname", ")", ":", "for", "tagvalue", "in", "getattr", "(", "self", ",", "tagname", ")", ":", "yield", "'%s=%s'", "%", "(", "tagname", ",", "decode", "(", "tagvalue", ")", ")" ]
:yields: the tags associated to the given tagname
[ ":", "yields", ":", "the", "tags", "associated", "to", "the", "given", "tagname" ]
train
https://github.com/gem/oq-engine/blob/8294553a0b8aba33fd96437a35065d03547d0040/openquake/risklib/asset.py#L346-L351
gem/oq-engine
openquake/risklib/asset.py
TagCollection.agg_shape
def agg_shape(self, shp, aggregate_by): """ :returns: a shape shp + (T, ...) depending on the tagnames """ return shp + tuple( len(getattr(self, tagname)) - 1 for tagname in aggregate_by)
python
def agg_shape(self, shp, aggregate_by): return shp + tuple( len(getattr(self, tagname)) - 1 for tagname in aggregate_by)
[ "def", "agg_shape", "(", "self", ",", "shp", ",", "aggregate_by", ")", ":", "return", "shp", "+", "tuple", "(", "len", "(", "getattr", "(", "self", ",", "tagname", ")", ")", "-", "1", "for", "tagname", "in", "aggregate_by", ")" ]
:returns: a shape shp + (T, ...) depending on the tagnames
[ ":", "returns", ":", "a", "shape", "shp", "+", "(", "T", "...", ")", "depending", "on", "the", "tagnames" ]
train
https://github.com/gem/oq-engine/blob/8294553a0b8aba33fd96437a35065d03547d0040/openquake/risklib/asset.py#L353-L358
gem/oq-engine
openquake/risklib/asset.py
AssetCollection.num_taxonomies_by_site
def num_taxonomies_by_site(self): """ :returns: an array with the number of assets per each site """ dic = general.group_array(self.array, 'site_id') num_taxonomies = numpy.zeros(self.tot_sites, U32) for sid, arr in dic.items(): num_taxonomies[sid] = len(numpy.unique(arr['taxonomy'])) return num_taxonomies
python
def num_taxonomies_by_site(self): dic = general.group_array(self.array, 'site_id') num_taxonomies = numpy.zeros(self.tot_sites, U32) for sid, arr in dic.items(): num_taxonomies[sid] = len(numpy.unique(arr['taxonomy'])) return num_taxonomies
[ "def", "num_taxonomies_by_site", "(", "self", ")", ":", "dic", "=", "general", ".", "group_array", "(", "self", ".", "array", ",", "'site_id'", ")", "num_taxonomies", "=", "numpy", ".", "zeros", "(", "self", ".", "tot_sites", ",", "U32", ")", "for", "sid", ",", "arr", "in", "dic", ".", "items", "(", ")", ":", "num_taxonomies", "[", "sid", "]", "=", "len", "(", "numpy", ".", "unique", "(", "arr", "[", "'taxonomy'", "]", ")", ")", "return", "num_taxonomies" ]
:returns: an array with the number of assets per each site
[ ":", "returns", ":", "an", "array", "with", "the", "number", "of", "assets", "per", "each", "site" ]
train
https://github.com/gem/oq-engine/blob/8294553a0b8aba33fd96437a35065d03547d0040/openquake/risklib/asset.py#L426-L434
gem/oq-engine
openquake/risklib/asset.py
AssetCollection.get_aids_by_tag
def get_aids_by_tag(self): """ :returns: dict tag -> asset ordinals """ aids_by_tag = general.AccumDict(accum=set()) for aid, ass in enumerate(self): for tagname in self.tagnames: tag = self.tagcol.get_tag(tagname, ass[tagname]) aids_by_tag[tag].add(aid) return aids_by_tag
python
def get_aids_by_tag(self): aids_by_tag = general.AccumDict(accum=set()) for aid, ass in enumerate(self): for tagname in self.tagnames: tag = self.tagcol.get_tag(tagname, ass[tagname]) aids_by_tag[tag].add(aid) return aids_by_tag
[ "def", "get_aids_by_tag", "(", "self", ")", ":", "aids_by_tag", "=", "general", ".", "AccumDict", "(", "accum", "=", "set", "(", ")", ")", "for", "aid", ",", "ass", "in", "enumerate", "(", "self", ")", ":", "for", "tagname", "in", "self", ".", "tagnames", ":", "tag", "=", "self", ".", "tagcol", ".", "get_tag", "(", "tagname", ",", "ass", "[", "tagname", "]", ")", "aids_by_tag", "[", "tag", "]", ".", "add", "(", "aid", ")", "return", "aids_by_tag" ]
:returns: dict tag -> asset ordinals
[ ":", "returns", ":", "dict", "tag", "-", ">", "asset", "ordinals" ]
train
https://github.com/gem/oq-engine/blob/8294553a0b8aba33fd96437a35065d03547d0040/openquake/risklib/asset.py#L436-L445
gem/oq-engine
openquake/risklib/asset.py
AssetCollection.assets_by_site
def assets_by_site(self): """ :returns: numpy array of lists with the assets by each site """ assets_by_site = [[] for sid in range(self.tot_sites)] for i, ass in enumerate(self.array): assets_by_site[ass['site_id']].append(self[i]) return numpy.array(assets_by_site)
python
def assets_by_site(self): assets_by_site = [[] for sid in range(self.tot_sites)] for i, ass in enumerate(self.array): assets_by_site[ass['site_id']].append(self[i]) return numpy.array(assets_by_site)
[ "def", "assets_by_site", "(", "self", ")", ":", "assets_by_site", "=", "[", "[", "]", "for", "sid", "in", "range", "(", "self", ".", "tot_sites", ")", "]", "for", "i", ",", "ass", "in", "enumerate", "(", "self", ".", "array", ")", ":", "assets_by_site", "[", "ass", "[", "'site_id'", "]", "]", ".", "append", "(", "self", "[", "i", "]", ")", "return", "numpy", ".", "array", "(", "assets_by_site", ")" ]
:returns: numpy array of lists with the assets by each site
[ ":", "returns", ":", "numpy", "array", "of", "lists", "with", "the", "assets", "by", "each", "site" ]
train
https://github.com/gem/oq-engine/blob/8294553a0b8aba33fd96437a35065d03547d0040/openquake/risklib/asset.py#L454-L461
gem/oq-engine
openquake/risklib/asset.py
AssetCollection.aggregate_by
def aggregate_by(self, tagnames, array): """ :param tagnames: a list of valid tag names :param array: an array with the same length as the asset collection :returns: an array of aggregate values with the proper shape """ missing = set(tagnames) - set(self.tagcol.tagnames) if missing: raise ValueError('Unknown tagname(s) %s' % missing) A, *shp = array.shape if A != len(self): raise ValueError('The array must have length %d, got %d' % (len(self), A)) if not tagnames: return array.sum(axis=0) shape = [len(getattr(self.tagcol, tagname))-1 for tagname in tagnames] acc = numpy.zeros(shape, (F32, shp) if shp else F32) for asset, row in zip(self.array, array): acc[tuple(idx - 1 for idx in asset[tagnames])] += row return acc
python
def aggregate_by(self, tagnames, array): missing = set(tagnames) - set(self.tagcol.tagnames) if missing: raise ValueError('Unknown tagname(s) %s' % missing) A, *shp = array.shape if A != len(self): raise ValueError('The array must have length %d, got %d' % (len(self), A)) if not tagnames: return array.sum(axis=0) shape = [len(getattr(self.tagcol, tagname))-1 for tagname in tagnames] acc = numpy.zeros(shape, (F32, shp) if shp else F32) for asset, row in zip(self.array, array): acc[tuple(idx - 1 for idx in asset[tagnames])] += row return acc
[ "def", "aggregate_by", "(", "self", ",", "tagnames", ",", "array", ")", ":", "missing", "=", "set", "(", "tagnames", ")", "-", "set", "(", "self", ".", "tagcol", ".", "tagnames", ")", "if", "missing", ":", "raise", "ValueError", "(", "'Unknown tagname(s) %s'", "%", "missing", ")", "A", ",", "", "*", "shp", "=", "array", ".", "shape", "if", "A", "!=", "len", "(", "self", ")", ":", "raise", "ValueError", "(", "'The array must have length %d, got %d'", "%", "(", "len", "(", "self", ")", ",", "A", ")", ")", "if", "not", "tagnames", ":", "return", "array", ".", "sum", "(", "axis", "=", "0", ")", "shape", "=", "[", "len", "(", "getattr", "(", "self", ".", "tagcol", ",", "tagname", ")", ")", "-", "1", "for", "tagname", "in", "tagnames", "]", "acc", "=", "numpy", ".", "zeros", "(", "shape", ",", "(", "F32", ",", "shp", ")", "if", "shp", "else", "F32", ")", "for", "asset", ",", "row", "in", "zip", "(", "self", ".", "array", ",", "array", ")", ":", "acc", "[", "tuple", "(", "idx", "-", "1", "for", "idx", "in", "asset", "[", "tagnames", "]", ")", "]", "+=", "row", "return", "acc" ]
:param tagnames: a list of valid tag names :param array: an array with the same length as the asset collection :returns: an array of aggregate values with the proper shape
[ ":", "param", "tagnames", ":", "a", "list", "of", "valid", "tag", "names", ":", "param", "array", ":", "an", "array", "with", "the", "same", "length", "as", "the", "asset", "collection", ":", "returns", ":", "an", "array", "of", "aggregate", "values", "with", "the", "proper", "shape" ]
train
https://github.com/gem/oq-engine/blob/8294553a0b8aba33fd96437a35065d03547d0040/openquake/risklib/asset.py#L463-L482
gem/oq-engine
openquake/risklib/asset.py
AssetCollection.agg_value
def agg_value(self, *tagnames): """ :param tagnames: tagnames of lengths T1, T2, ... respectively :returns: the values of the exposure aggregated by tagnames as an array of shape (T1, T2, ..., L) """ aval = numpy.zeros((len(self), len(self.loss_types)), F32) # (A, L) for asset in self: for lti, lt in enumerate(self.loss_types): if lt == 'occupants': aval[asset['ordinal'], lti] = asset[lt + '_None'] else: aval[asset['ordinal'], lti] = asset['value-' + lt] return self.aggregate_by(list(tagnames), aval)
python
def agg_value(self, *tagnames): aval = numpy.zeros((len(self), len(self.loss_types)), F32) for asset in self: for lti, lt in enumerate(self.loss_types): if lt == 'occupants': aval[asset['ordinal'], lti] = asset[lt + '_None'] else: aval[asset['ordinal'], lti] = asset['value-' + lt] return self.aggregate_by(list(tagnames), aval)
[ "def", "agg_value", "(", "self", ",", "*", "tagnames", ")", ":", "aval", "=", "numpy", ".", "zeros", "(", "(", "len", "(", "self", ")", ",", "len", "(", "self", ".", "loss_types", ")", ")", ",", "F32", ")", "# (A, L)", "for", "asset", "in", "self", ":", "for", "lti", ",", "lt", "in", "enumerate", "(", "self", ".", "loss_types", ")", ":", "if", "lt", "==", "'occupants'", ":", "aval", "[", "asset", "[", "'ordinal'", "]", ",", "lti", "]", "=", "asset", "[", "lt", "+", "'_None'", "]", "else", ":", "aval", "[", "asset", "[", "'ordinal'", "]", ",", "lti", "]", "=", "asset", "[", "'value-'", "+", "lt", "]", "return", "self", ".", "aggregate_by", "(", "list", "(", "tagnames", ")", ",", "aval", ")" ]
:param tagnames: tagnames of lengths T1, T2, ... respectively :returns: the values of the exposure aggregated by tagnames as an array of shape (T1, T2, ..., L)
[ ":", "param", "tagnames", ":", "tagnames", "of", "lengths", "T1", "T2", "...", "respectively", ":", "returns", ":", "the", "values", "of", "the", "exposure", "aggregated", "by", "tagnames", "as", "an", "array", "of", "shape", "(", "T1", "T2", "...", "L", ")" ]
train
https://github.com/gem/oq-engine/blob/8294553a0b8aba33fd96437a35065d03547d0040/openquake/risklib/asset.py#L484-L499
gem/oq-engine
openquake/risklib/asset.py
AssetCollection.reduce
def reduce(self, sitecol): """ :returns: a reduced AssetCollection on the given sitecol """ ok_indices = numpy.sum( [self.array['site_id'] == sid for sid in sitecol.sids], axis=0, dtype=bool) new = object.__new__(self.__class__) vars(new).update(vars(self)) new.array = self.array[ok_indices] new.array['ordinal'] = numpy.arange(len(new.array)) new.asset_refs = self.asset_refs[ok_indices] return new
python
def reduce(self, sitecol): ok_indices = numpy.sum( [self.array['site_id'] == sid for sid in sitecol.sids], axis=0, dtype=bool) new = object.__new__(self.__class__) vars(new).update(vars(self)) new.array = self.array[ok_indices] new.array['ordinal'] = numpy.arange(len(new.array)) new.asset_refs = self.asset_refs[ok_indices] return new
[ "def", "reduce", "(", "self", ",", "sitecol", ")", ":", "ok_indices", "=", "numpy", ".", "sum", "(", "[", "self", ".", "array", "[", "'site_id'", "]", "==", "sid", "for", "sid", "in", "sitecol", ".", "sids", "]", ",", "axis", "=", "0", ",", "dtype", "=", "bool", ")", "new", "=", "object", ".", "__new__", "(", "self", ".", "__class__", ")", "vars", "(", "new", ")", ".", "update", "(", "vars", "(", "self", ")", ")", "new", ".", "array", "=", "self", ".", "array", "[", "ok_indices", "]", "new", ".", "array", "[", "'ordinal'", "]", "=", "numpy", ".", "arange", "(", "len", "(", "new", ".", "array", ")", ")", "new", ".", "asset_refs", "=", "self", ".", "asset_refs", "[", "ok_indices", "]", "return", "new" ]
:returns: a reduced AssetCollection on the given sitecol
[ ":", "returns", ":", "a", "reduced", "AssetCollection", "on", "the", "given", "sitecol" ]
train
https://github.com/gem/oq-engine/blob/8294553a0b8aba33fd96437a35065d03547d0040/openquake/risklib/asset.py#L501-L513
gem/oq-engine
openquake/risklib/asset.py
AssetCollection.reduce_also
def reduce_also(self, sitecol): """ :returns: a reduced AssetCollection on the given sitecol NB: diffently from .reduce, also the SiteCollection is reduced and turned into a complete site collection. """ array = [] asset_refs = [] for idx, sid in enumerate(sitecol.sids): mask = self.array['site_id'] == sid arr = self.array[mask] arr['site_id'] = idx array.append(arr) asset_refs.append(self.asset_refs[mask]) new = object.__new__(self.__class__) vars(new).update(vars(self)) new.tot_sites = len(sitecol) new.array = numpy.concatenate(array) new.array['ordinal'] = numpy.arange(len(new.array)) new.asset_refs = numpy.concatenate(asset_refs) sitecol.make_complete() return new
python
def reduce_also(self, sitecol): array = [] asset_refs = [] for idx, sid in enumerate(sitecol.sids): mask = self.array['site_id'] == sid arr = self.array[mask] arr['site_id'] = idx array.append(arr) asset_refs.append(self.asset_refs[mask]) new = object.__new__(self.__class__) vars(new).update(vars(self)) new.tot_sites = len(sitecol) new.array = numpy.concatenate(array) new.array['ordinal'] = numpy.arange(len(new.array)) new.asset_refs = numpy.concatenate(asset_refs) sitecol.make_complete() return new
[ "def", "reduce_also", "(", "self", ",", "sitecol", ")", ":", "array", "=", "[", "]", "asset_refs", "=", "[", "]", "for", "idx", ",", "sid", "in", "enumerate", "(", "sitecol", ".", "sids", ")", ":", "mask", "=", "self", ".", "array", "[", "'site_id'", "]", "==", "sid", "arr", "=", "self", ".", "array", "[", "mask", "]", "arr", "[", "'site_id'", "]", "=", "idx", "array", ".", "append", "(", "arr", ")", "asset_refs", ".", "append", "(", "self", ".", "asset_refs", "[", "mask", "]", ")", "new", "=", "object", ".", "__new__", "(", "self", ".", "__class__", ")", "vars", "(", "new", ")", ".", "update", "(", "vars", "(", "self", ")", ")", "new", ".", "tot_sites", "=", "len", "(", "sitecol", ")", "new", ".", "array", "=", "numpy", ".", "concatenate", "(", "array", ")", "new", ".", "array", "[", "'ordinal'", "]", "=", "numpy", ".", "arange", "(", "len", "(", "new", ".", "array", ")", ")", "new", ".", "asset_refs", "=", "numpy", ".", "concatenate", "(", "asset_refs", ")", "sitecol", ".", "make_complete", "(", ")", "return", "new" ]
:returns: a reduced AssetCollection on the given sitecol NB: diffently from .reduce, also the SiteCollection is reduced and turned into a complete site collection.
[ ":", "returns", ":", "a", "reduced", "AssetCollection", "on", "the", "given", "sitecol", "NB", ":", "diffently", "from", ".", "reduce", "also", "the", "SiteCollection", "is", "reduced", "and", "turned", "into", "a", "complete", "site", "collection", "." ]
train
https://github.com/gem/oq-engine/blob/8294553a0b8aba33fd96437a35065d03547d0040/openquake/risklib/asset.py#L515-L536
YAML Metadata Warning: empty or missing yaml metadata in repo card (https://huggingface.co/docs/hub/datasets-cards)

Cleaning

Unlike the original dataset, the func_code_string column was updated to remove any comments and keep just the code. The original version can still be found in the whole_func_string.

import re

def remove_comments_docstrings(code, language):
    if language == 'python':
        # Remove docstrings
        code = re.sub(r'"""(.*?)"""', '', code, flags=re.DOTALL)
        code = re.sub(r"'''(.*?)'''", '', code, flags=re.DOTALL)
        # Remove comments
        code = re.sub(r'#.*', '', code)
    elif language == 'java' or language == 'javascript':
        # Remove multiline comments
        code = re.sub(r'/\*.*?\*/', '', code, flags=re.DOTALL)
        # Remove single line comments
        code = re.sub(r'//.*', '', code)
    elif language == 'go':
        # Similar to Java/Javascript
        code = re.sub(r'/\*.*?\*/', '', code, flags=re.DOTALL)
        code = re.sub(r'//.*', '', code)
    elif language == 'ruby':
        # Remove multiline comments
        code = re.sub(r'=begin.*?=end', '', code, flags=re.DOTALL)
        # Remove single line comments
        code = re.sub(r'#.*', '', code)
    elif language == 'php':
        # Remove multiline comments
        code = re.sub(r'/\*.*?\*/', '', code, flags=re.DOTALL)
        # Remove single line and hash comments
        code = re.sub(r'//.*', '', code)
        code = re.sub(r'#.*', '', code)
    return code.strip()

The validity of that snippet can be tested with the following example:

# Example DataFrame
import pandas as pd
example = {
  'language': ['python', 'java', 'javascript', 'go', 'ruby', 'php'],
  'func_code_string': [
    '"""Example docstring""" def foo(): # This is a comment\n    return 1',
    '/** Java doc */ public class Test { // Comment\n public void method() {} }',
    '/* JS doc */ function test() { // Comment\n return true; }',
    '/* Go doc */ package main // Import comment\nimport "fmt"',
    '=begin Ruby doc =end def foo # Comment\n 1 + 1 end',
    '<?php /* PHP doc */ // Comment\necho "Hello"; # Another comment ?>'
]}
example_df = pd.DataFrame(example)

example_df['cleaned_code'] = example_df.apply(lambda x: remove_comments_docstrings(x['func_code_string'], x['language']), axis=1)
print(example_df[['language', 'cleaned_code']])
Downloads last month
2
Edit dataset card