id
int32 0
252k
| repo
stringlengths 7
55
| path
stringlengths 4
127
| func_name
stringlengths 1
88
| original_string
stringlengths 75
19.8k
| language
stringclasses 1
value | code
stringlengths 75
19.8k
| code_tokens
sequence | docstring
stringlengths 3
17.3k
| docstring_tokens
sequence | sha
stringlengths 40
40
| url
stringlengths 87
242
|
---|---|---|---|---|---|---|---|---|---|---|---|
200 | gem/oq-engine | openquake/hazardlib/scalerel/wc1994.py | WC1994.get_std_dev_area | def get_std_dev_area(self, mag, rake):
"""
Standard deviation for WC1994. Magnitude is ignored.
"""
assert rake is None or -180 <= rake <= 180
if rake is None:
# their "All" case
return 0.24
elif (-45 <= rake <= 45) or (rake >= 135) or (rake <= -135):
# strike slip
return 0.22
elif rake > 0:
# thrust/reverse
return 0.26
else:
# normal
return 0.22 | python | def get_std_dev_area(self, mag, rake):
"""
Standard deviation for WC1994. Magnitude is ignored.
"""
assert rake is None or -180 <= rake <= 180
if rake is None:
# their "All" case
return 0.24
elif (-45 <= rake <= 45) or (rake >= 135) or (rake <= -135):
# strike slip
return 0.22
elif rake > 0:
# thrust/reverse
return 0.26
else:
# normal
return 0.22 | [
"def",
"get_std_dev_area",
"(",
"self",
",",
"mag",
",",
"rake",
")",
":",
"assert",
"rake",
"is",
"None",
"or",
"-",
"180",
"<=",
"rake",
"<=",
"180",
"if",
"rake",
"is",
"None",
":",
"# their \"All\" case",
"return",
"0.24",
"elif",
"(",
"-",
"45",
"<=",
"rake",
"<=",
"45",
")",
"or",
"(",
"rake",
">=",
"135",
")",
"or",
"(",
"rake",
"<=",
"-",
"135",
")",
":",
"# strike slip",
"return",
"0.22",
"elif",
"rake",
">",
"0",
":",
"# thrust/reverse",
"return",
"0.26",
"else",
":",
"# normal",
"return",
"0.22"
] | Standard deviation for WC1994. Magnitude is ignored. | [
"Standard",
"deviation",
"for",
"WC1994",
".",
"Magnitude",
"is",
"ignored",
"."
] | 8294553a0b8aba33fd96437a35065d03547d0040 | https://github.com/gem/oq-engine/blob/8294553a0b8aba33fd96437a35065d03547d0040/openquake/hazardlib/scalerel/wc1994.py#L54-L70 |
201 | gem/oq-engine | openquake/hazardlib/scalerel/wc1994.py | WC1994.get_std_dev_mag | def get_std_dev_mag(self, rake):
"""
Standard deviation on the magnitude for the WC1994 area relation.
"""
assert rake is None or -180 <= rake <= 180
if rake is None:
# their "All" case
return 0.24
elif (-45 <= rake <= 45) or (rake >= 135) or (rake <= -135):
# strike slip
return 0.23
elif rake > 0:
# thrust/reverse
return 0.25
else:
# normal
return 0.25 | python | def get_std_dev_mag(self, rake):
"""
Standard deviation on the magnitude for the WC1994 area relation.
"""
assert rake is None or -180 <= rake <= 180
if rake is None:
# their "All" case
return 0.24
elif (-45 <= rake <= 45) or (rake >= 135) or (rake <= -135):
# strike slip
return 0.23
elif rake > 0:
# thrust/reverse
return 0.25
else:
# normal
return 0.25 | [
"def",
"get_std_dev_mag",
"(",
"self",
",",
"rake",
")",
":",
"assert",
"rake",
"is",
"None",
"or",
"-",
"180",
"<=",
"rake",
"<=",
"180",
"if",
"rake",
"is",
"None",
":",
"# their \"All\" case",
"return",
"0.24",
"elif",
"(",
"-",
"45",
"<=",
"rake",
"<=",
"45",
")",
"or",
"(",
"rake",
">=",
"135",
")",
"or",
"(",
"rake",
"<=",
"-",
"135",
")",
":",
"# strike slip",
"return",
"0.23",
"elif",
"rake",
">",
"0",
":",
"# thrust/reverse",
"return",
"0.25",
"else",
":",
"# normal",
"return",
"0.25"
] | Standard deviation on the magnitude for the WC1994 area relation. | [
"Standard",
"deviation",
"on",
"the",
"magnitude",
"for",
"the",
"WC1994",
"area",
"relation",
"."
] | 8294553a0b8aba33fd96437a35065d03547d0040 | https://github.com/gem/oq-engine/blob/8294553a0b8aba33fd96437a35065d03547d0040/openquake/hazardlib/scalerel/wc1994.py#L72-L88 |
202 | gem/oq-engine | openquake/hazardlib/gsim/mgmpe/generic_gmpe_avgsa.py | GenericGmpeAvgSA.set_parameters | def set_parameters(self):
"""
Combines the parameters of the GMPE provided at the construction
level with the ones assigned to the average GMPE.
"""
for key in dir(self):
if key.startswith('REQUIRES_'):
setattr(self, key, getattr(self.gmpe, key))
if key.startswith('DEFINED_'):
if not key.endswith('FOR_INTENSITY_MEASURE_TYPES'):
setattr(self, key, getattr(self.gmpe, key)) | python | def set_parameters(self):
"""
Combines the parameters of the GMPE provided at the construction
level with the ones assigned to the average GMPE.
"""
for key in dir(self):
if key.startswith('REQUIRES_'):
setattr(self, key, getattr(self.gmpe, key))
if key.startswith('DEFINED_'):
if not key.endswith('FOR_INTENSITY_MEASURE_TYPES'):
setattr(self, key, getattr(self.gmpe, key)) | [
"def",
"set_parameters",
"(",
"self",
")",
":",
"for",
"key",
"in",
"dir",
"(",
"self",
")",
":",
"if",
"key",
".",
"startswith",
"(",
"'REQUIRES_'",
")",
":",
"setattr",
"(",
"self",
",",
"key",
",",
"getattr",
"(",
"self",
".",
"gmpe",
",",
"key",
")",
")",
"if",
"key",
".",
"startswith",
"(",
"'DEFINED_'",
")",
":",
"if",
"not",
"key",
".",
"endswith",
"(",
"'FOR_INTENSITY_MEASURE_TYPES'",
")",
":",
"setattr",
"(",
"self",
",",
"key",
",",
"getattr",
"(",
"self",
".",
"gmpe",
",",
"key",
")",
")"
] | Combines the parameters of the GMPE provided at the construction
level with the ones assigned to the average GMPE. | [
"Combines",
"the",
"parameters",
"of",
"the",
"GMPE",
"provided",
"at",
"the",
"construction",
"level",
"with",
"the",
"ones",
"assigned",
"to",
"the",
"average",
"GMPE",
"."
] | 8294553a0b8aba33fd96437a35065d03547d0040 | https://github.com/gem/oq-engine/blob/8294553a0b8aba33fd96437a35065d03547d0040/openquake/hazardlib/gsim/mgmpe/generic_gmpe_avgsa.py#L87-L97 |
203 | gem/oq-engine | openquake/hazardlib/geo/mesh.py | Mesh.from_points_list | def from_points_list(cls, points):
"""
Create a mesh object from a collection of points.
:param point:
List of :class:`~openquake.hazardlib.geo.point.Point` objects.
:returns:
An instance of :class:`Mesh` with one-dimensional arrays
of coordinates from ``points``.
"""
lons = numpy.zeros(len(points), dtype=float)
lats = lons.copy()
depths = lons.copy()
for i in range(len(points)):
lons[i] = points[i].longitude
lats[i] = points[i].latitude
depths[i] = points[i].depth
if not depths.any():
# all points have zero depth, no need to waste memory
depths = None
return cls(lons, lats, depths) | python | def from_points_list(cls, points):
"""
Create a mesh object from a collection of points.
:param point:
List of :class:`~openquake.hazardlib.geo.point.Point` objects.
:returns:
An instance of :class:`Mesh` with one-dimensional arrays
of coordinates from ``points``.
"""
lons = numpy.zeros(len(points), dtype=float)
lats = lons.copy()
depths = lons.copy()
for i in range(len(points)):
lons[i] = points[i].longitude
lats[i] = points[i].latitude
depths[i] = points[i].depth
if not depths.any():
# all points have zero depth, no need to waste memory
depths = None
return cls(lons, lats, depths) | [
"def",
"from_points_list",
"(",
"cls",
",",
"points",
")",
":",
"lons",
"=",
"numpy",
".",
"zeros",
"(",
"len",
"(",
"points",
")",
",",
"dtype",
"=",
"float",
")",
"lats",
"=",
"lons",
".",
"copy",
"(",
")",
"depths",
"=",
"lons",
".",
"copy",
"(",
")",
"for",
"i",
"in",
"range",
"(",
"len",
"(",
"points",
")",
")",
":",
"lons",
"[",
"i",
"]",
"=",
"points",
"[",
"i",
"]",
".",
"longitude",
"lats",
"[",
"i",
"]",
"=",
"points",
"[",
"i",
"]",
".",
"latitude",
"depths",
"[",
"i",
"]",
"=",
"points",
"[",
"i",
"]",
".",
"depth",
"if",
"not",
"depths",
".",
"any",
"(",
")",
":",
"# all points have zero depth, no need to waste memory",
"depths",
"=",
"None",
"return",
"cls",
"(",
"lons",
",",
"lats",
",",
"depths",
")"
] | Create a mesh object from a collection of points.
:param point:
List of :class:`~openquake.hazardlib.geo.point.Point` objects.
:returns:
An instance of :class:`Mesh` with one-dimensional arrays
of coordinates from ``points``. | [
"Create",
"a",
"mesh",
"object",
"from",
"a",
"collection",
"of",
"points",
"."
] | 8294553a0b8aba33fd96437a35065d03547d0040 | https://github.com/gem/oq-engine/blob/8294553a0b8aba33fd96437a35065d03547d0040/openquake/hazardlib/geo/mesh.py#L134-L154 |
204 | gem/oq-engine | openquake/hazardlib/geo/mesh.py | Mesh.get_min_distance | def get_min_distance(self, mesh):
"""
Compute and return the minimum distance from the mesh to each point
in another mesh.
:returns:
numpy array of distances in km of shape (self.size, mesh.size)
Method doesn't make any assumptions on arrangement of the points
in either mesh and instead calculates the distance from each point of
this mesh to each point of the target mesh and returns the lowest found
for each.
"""
return cdist(self.xyz, mesh.xyz).min(axis=0) | python | def get_min_distance(self, mesh):
"""
Compute and return the minimum distance from the mesh to each point
in another mesh.
:returns:
numpy array of distances in km of shape (self.size, mesh.size)
Method doesn't make any assumptions on arrangement of the points
in either mesh and instead calculates the distance from each point of
this mesh to each point of the target mesh and returns the lowest found
for each.
"""
return cdist(self.xyz, mesh.xyz).min(axis=0) | [
"def",
"get_min_distance",
"(",
"self",
",",
"mesh",
")",
":",
"return",
"cdist",
"(",
"self",
".",
"xyz",
",",
"mesh",
".",
"xyz",
")",
".",
"min",
"(",
"axis",
"=",
"0",
")"
] | Compute and return the minimum distance from the mesh to each point
in another mesh.
:returns:
numpy array of distances in km of shape (self.size, mesh.size)
Method doesn't make any assumptions on arrangement of the points
in either mesh and instead calculates the distance from each point of
this mesh to each point of the target mesh and returns the lowest found
for each. | [
"Compute",
"and",
"return",
"the",
"minimum",
"distance",
"from",
"the",
"mesh",
"to",
"each",
"point",
"in",
"another",
"mesh",
"."
] | 8294553a0b8aba33fd96437a35065d03547d0040 | https://github.com/gem/oq-engine/blob/8294553a0b8aba33fd96437a35065d03547d0040/openquake/hazardlib/geo/mesh.py#L236-L249 |
205 | gem/oq-engine | openquake/hazardlib/geo/mesh.py | Mesh.get_closest_points | def get_closest_points(self, mesh):
"""
Find closest point of this mesh for each point in the other mesh
:returns:
:class:`Mesh` object of the same shape as `mesh` with closest
points from this one at respective indices.
"""
min_idx = cdist(self.xyz, mesh.xyz).argmin(axis=0) # lose shape
if hasattr(mesh, 'shape'):
min_idx = min_idx.reshape(mesh.shape)
lons = self.lons.take(min_idx)
lats = self.lats.take(min_idx)
deps = self.depths.take(min_idx)
return Mesh(lons, lats, deps) | python | def get_closest_points(self, mesh):
"""
Find closest point of this mesh for each point in the other mesh
:returns:
:class:`Mesh` object of the same shape as `mesh` with closest
points from this one at respective indices.
"""
min_idx = cdist(self.xyz, mesh.xyz).argmin(axis=0) # lose shape
if hasattr(mesh, 'shape'):
min_idx = min_idx.reshape(mesh.shape)
lons = self.lons.take(min_idx)
lats = self.lats.take(min_idx)
deps = self.depths.take(min_idx)
return Mesh(lons, lats, deps) | [
"def",
"get_closest_points",
"(",
"self",
",",
"mesh",
")",
":",
"min_idx",
"=",
"cdist",
"(",
"self",
".",
"xyz",
",",
"mesh",
".",
"xyz",
")",
".",
"argmin",
"(",
"axis",
"=",
"0",
")",
"# lose shape",
"if",
"hasattr",
"(",
"mesh",
",",
"'shape'",
")",
":",
"min_idx",
"=",
"min_idx",
".",
"reshape",
"(",
"mesh",
".",
"shape",
")",
"lons",
"=",
"self",
".",
"lons",
".",
"take",
"(",
"min_idx",
")",
"lats",
"=",
"self",
".",
"lats",
".",
"take",
"(",
"min_idx",
")",
"deps",
"=",
"self",
".",
"depths",
".",
"take",
"(",
"min_idx",
")",
"return",
"Mesh",
"(",
"lons",
",",
"lats",
",",
"deps",
")"
] | Find closest point of this mesh for each point in the other mesh
:returns:
:class:`Mesh` object of the same shape as `mesh` with closest
points from this one at respective indices. | [
"Find",
"closest",
"point",
"of",
"this",
"mesh",
"for",
"each",
"point",
"in",
"the",
"other",
"mesh"
] | 8294553a0b8aba33fd96437a35065d03547d0040 | https://github.com/gem/oq-engine/blob/8294553a0b8aba33fd96437a35065d03547d0040/openquake/hazardlib/geo/mesh.py#L251-L265 |
206 | gem/oq-engine | openquake/hazardlib/geo/mesh.py | Mesh.get_distance_matrix | def get_distance_matrix(self):
"""
Compute and return distances between each pairs of points in the mesh.
This method requires that the coordinate arrays are one-dimensional.
NB: the depth of the points is ignored
.. warning::
Because of its quadratic space and time complexity this method
is safe to use for meshes of up to several thousand points. For
mesh of 10k points it needs ~800 Mb for just the resulting matrix
and four times that much for intermediate storage.
:returns:
Two-dimensional numpy array, square matrix of distances. The matrix
has zeros on main diagonal and positive distances in kilometers
on all other cells. That is, value in cell (3, 5) is the distance
between mesh's points 3 and 5 in km, and it is equal to value
in cell (5, 3).
Uses :func:`openquake.hazardlib.geo.geodetic.geodetic_distance`.
"""
assert self.lons.ndim == 1
distances = geodetic.geodetic_distance(
self.lons.reshape(self.lons.shape + (1, )),
self.lats.reshape(self.lats.shape + (1, )),
self.lons,
self.lats)
return numpy.matrix(distances, copy=False) | python | def get_distance_matrix(self):
"""
Compute and return distances between each pairs of points in the mesh.
This method requires that the coordinate arrays are one-dimensional.
NB: the depth of the points is ignored
.. warning::
Because of its quadratic space and time complexity this method
is safe to use for meshes of up to several thousand points. For
mesh of 10k points it needs ~800 Mb for just the resulting matrix
and four times that much for intermediate storage.
:returns:
Two-dimensional numpy array, square matrix of distances. The matrix
has zeros on main diagonal and positive distances in kilometers
on all other cells. That is, value in cell (3, 5) is the distance
between mesh's points 3 and 5 in km, and it is equal to value
in cell (5, 3).
Uses :func:`openquake.hazardlib.geo.geodetic.geodetic_distance`.
"""
assert self.lons.ndim == 1
distances = geodetic.geodetic_distance(
self.lons.reshape(self.lons.shape + (1, )),
self.lats.reshape(self.lats.shape + (1, )),
self.lons,
self.lats)
return numpy.matrix(distances, copy=False) | [
"def",
"get_distance_matrix",
"(",
"self",
")",
":",
"assert",
"self",
".",
"lons",
".",
"ndim",
"==",
"1",
"distances",
"=",
"geodetic",
".",
"geodetic_distance",
"(",
"self",
".",
"lons",
".",
"reshape",
"(",
"self",
".",
"lons",
".",
"shape",
"+",
"(",
"1",
",",
")",
")",
",",
"self",
".",
"lats",
".",
"reshape",
"(",
"self",
".",
"lats",
".",
"shape",
"+",
"(",
"1",
",",
")",
")",
",",
"self",
".",
"lons",
",",
"self",
".",
"lats",
")",
"return",
"numpy",
".",
"matrix",
"(",
"distances",
",",
"copy",
"=",
"False",
")"
] | Compute and return distances between each pairs of points in the mesh.
This method requires that the coordinate arrays are one-dimensional.
NB: the depth of the points is ignored
.. warning::
Because of its quadratic space and time complexity this method
is safe to use for meshes of up to several thousand points. For
mesh of 10k points it needs ~800 Mb for just the resulting matrix
and four times that much for intermediate storage.
:returns:
Two-dimensional numpy array, square matrix of distances. The matrix
has zeros on main diagonal and positive distances in kilometers
on all other cells. That is, value in cell (3, 5) is the distance
between mesh's points 3 and 5 in km, and it is equal to value
in cell (5, 3).
Uses :func:`openquake.hazardlib.geo.geodetic.geodetic_distance`. | [
"Compute",
"and",
"return",
"distances",
"between",
"each",
"pairs",
"of",
"points",
"in",
"the",
"mesh",
"."
] | 8294553a0b8aba33fd96437a35065d03547d0040 | https://github.com/gem/oq-engine/blob/8294553a0b8aba33fd96437a35065d03547d0040/openquake/hazardlib/geo/mesh.py#L267-L295 |
207 | gem/oq-engine | openquake/hazardlib/geo/mesh.py | Mesh._get_proj_convex_hull | def _get_proj_convex_hull(self):
"""
Create a projection centered in the center of this mesh and define
a convex polygon in that projection, enveloping all the points
of the mesh.
:returns:
Tuple of two items: projection function and shapely 2d polygon.
Note that the result geometry can be line or point depending
on number of points in the mesh and their arrangement.
"""
# create a projection centered in the center of points collection
proj = geo_utils.OrthographicProjection(
*geo_utils.get_spherical_bounding_box(self.lons, self.lats))
# project all the points and create a shapely multipoint object.
# need to copy an array because otherwise shapely misinterprets it
coords = numpy.transpose(proj(self.lons.flat, self.lats.flat)).copy()
multipoint = shapely.geometry.MultiPoint(coords)
# create a 2d polygon from a convex hull around that multipoint
return proj, multipoint.convex_hull | python | def _get_proj_convex_hull(self):
"""
Create a projection centered in the center of this mesh and define
a convex polygon in that projection, enveloping all the points
of the mesh.
:returns:
Tuple of two items: projection function and shapely 2d polygon.
Note that the result geometry can be line or point depending
on number of points in the mesh and their arrangement.
"""
# create a projection centered in the center of points collection
proj = geo_utils.OrthographicProjection(
*geo_utils.get_spherical_bounding_box(self.lons, self.lats))
# project all the points and create a shapely multipoint object.
# need to copy an array because otherwise shapely misinterprets it
coords = numpy.transpose(proj(self.lons.flat, self.lats.flat)).copy()
multipoint = shapely.geometry.MultiPoint(coords)
# create a 2d polygon from a convex hull around that multipoint
return proj, multipoint.convex_hull | [
"def",
"_get_proj_convex_hull",
"(",
"self",
")",
":",
"# create a projection centered in the center of points collection",
"proj",
"=",
"geo_utils",
".",
"OrthographicProjection",
"(",
"*",
"geo_utils",
".",
"get_spherical_bounding_box",
"(",
"self",
".",
"lons",
",",
"self",
".",
"lats",
")",
")",
"# project all the points and create a shapely multipoint object.",
"# need to copy an array because otherwise shapely misinterprets it",
"coords",
"=",
"numpy",
".",
"transpose",
"(",
"proj",
"(",
"self",
".",
"lons",
".",
"flat",
",",
"self",
".",
"lats",
".",
"flat",
")",
")",
".",
"copy",
"(",
")",
"multipoint",
"=",
"shapely",
".",
"geometry",
".",
"MultiPoint",
"(",
"coords",
")",
"# create a 2d polygon from a convex hull around that multipoint",
"return",
"proj",
",",
"multipoint",
".",
"convex_hull"
] | Create a projection centered in the center of this mesh and define
a convex polygon in that projection, enveloping all the points
of the mesh.
:returns:
Tuple of two items: projection function and shapely 2d polygon.
Note that the result geometry can be line or point depending
on number of points in the mesh and their arrangement. | [
"Create",
"a",
"projection",
"centered",
"in",
"the",
"center",
"of",
"this",
"mesh",
"and",
"define",
"a",
"convex",
"polygon",
"in",
"that",
"projection",
"enveloping",
"all",
"the",
"points",
"of",
"the",
"mesh",
"."
] | 8294553a0b8aba33fd96437a35065d03547d0040 | https://github.com/gem/oq-engine/blob/8294553a0b8aba33fd96437a35065d03547d0040/openquake/hazardlib/geo/mesh.py#L297-L317 |
208 | gem/oq-engine | openquake/hazardlib/geo/mesh.py | Mesh.get_joyner_boore_distance | def get_joyner_boore_distance(self, mesh):
"""
Compute and return Joyner-Boore distance to each point of ``mesh``.
Point's depth is ignored.
See
:meth:`openquake.hazardlib.geo.surface.base.BaseSurface.get_joyner_boore_distance`
for definition of this distance.
:returns:
numpy array of distances in km of the same shape as ``mesh``.
Distance value is considered to be zero if a point
lies inside the polygon enveloping the projection of the mesh
or on one of its edges.
"""
# we perform a hybrid calculation (geodetic mesh-to-mesh distance
# and distance on the projection plane for close points). first,
# we find the closest geodetic distance for each point of target
# mesh to this one. in general that distance is greater than
# the exact distance to enclosing polygon of this mesh and it
# depends on mesh spacing. but the difference can be neglected
# if calculated geodetic distance is over some threshold.
# get the highest slice from the 3D mesh
distances = geodetic.min_geodetic_distance(
(self.lons, self.lats), (mesh.lons, mesh.lats))
# here we find the points for which calculated mesh-to-mesh
# distance is below a threshold. this threshold is arbitrary:
# lower values increase the maximum possible error, higher
# values reduce the efficiency of that filtering. the maximum
# error is equal to the maximum difference between a distance
# from site to two adjacent points of the mesh and distance
# from site to the line connecting them. thus the error is
# a function of distance threshold and mesh spacing. the error
# is maximum when the site lies on a perpendicular to the line
# connecting points of the mesh and that passes the middle
# point between them. the error then can be calculated as
# ``err = trsh - d = trsh - \sqrt(trsh^2 - (ms/2)^2)``, where
# ``trsh`` and ``d`` are distance to mesh points (the one
# we found on the previous step) and distance to the line
# connecting them (the actual distance) and ``ms`` is mesh
# spacing. the threshold of 40 km gives maximum error of 314
# meters for meshes with spacing of 10 km and 5.36 km for
# meshes with spacing of 40 km. if mesh spacing is over
# ``(trsh / \sqrt(2)) * 2`` then points lying in the middle
# of mesh cells (that is inside the polygon) will be filtered
# out by the threshold and have positive distance instead of 0.
# so for threshold of 40 km mesh spacing should not be more
# than 56 km (typical values are 5 to 10 km).
idxs = (distances < 40).nonzero()[0] # indices on the first dimension
if not len(idxs):
# no point is close enough, return distances as they are
return distances
# for all the points that are closer than the threshold we need
# to recalculate the distance and set it to zero, if point falls
# inside the enclosing polygon of the mesh. for doing that we
# project both this mesh and the points of the second mesh--selected
# by distance threshold--to the same Cartesian space, define
# minimum shapely polygon enclosing the mesh and calculate point
# to polygon distance, which gives the most accurate value
# of distance in km (and that value is zero for points inside
# the polygon).
proj, polygon = self._get_proj_enclosing_polygon()
if not isinstance(polygon, shapely.geometry.Polygon):
# either line or point is our enclosing polygon. draw
# a square with side of 10 m around in order to have
# a proper polygon instead.
polygon = polygon.buffer(self.DIST_TOLERANCE, 1)
mesh_xx, mesh_yy = proj(mesh.lons[idxs], mesh.lats[idxs])
# replace geodetic distance values for points-closer-than-the-threshold
# by more accurate point-to-polygon distance values.
distances[idxs] = geo_utils.point_to_polygon_distance(
polygon, mesh_xx, mesh_yy)
return distances | python | def get_joyner_boore_distance(self, mesh):
"""
Compute and return Joyner-Boore distance to each point of ``mesh``.
Point's depth is ignored.
See
:meth:`openquake.hazardlib.geo.surface.base.BaseSurface.get_joyner_boore_distance`
for definition of this distance.
:returns:
numpy array of distances in km of the same shape as ``mesh``.
Distance value is considered to be zero if a point
lies inside the polygon enveloping the projection of the mesh
or on one of its edges.
"""
# we perform a hybrid calculation (geodetic mesh-to-mesh distance
# and distance on the projection plane for close points). first,
# we find the closest geodetic distance for each point of target
# mesh to this one. in general that distance is greater than
# the exact distance to enclosing polygon of this mesh and it
# depends on mesh spacing. but the difference can be neglected
# if calculated geodetic distance is over some threshold.
# get the highest slice from the 3D mesh
distances = geodetic.min_geodetic_distance(
(self.lons, self.lats), (mesh.lons, mesh.lats))
# here we find the points for which calculated mesh-to-mesh
# distance is below a threshold. this threshold is arbitrary:
# lower values increase the maximum possible error, higher
# values reduce the efficiency of that filtering. the maximum
# error is equal to the maximum difference between a distance
# from site to two adjacent points of the mesh and distance
# from site to the line connecting them. thus the error is
# a function of distance threshold and mesh spacing. the error
# is maximum when the site lies on a perpendicular to the line
# connecting points of the mesh and that passes the middle
# point between them. the error then can be calculated as
# ``err = trsh - d = trsh - \sqrt(trsh^2 - (ms/2)^2)``, where
# ``trsh`` and ``d`` are distance to mesh points (the one
# we found on the previous step) and distance to the line
# connecting them (the actual distance) and ``ms`` is mesh
# spacing. the threshold of 40 km gives maximum error of 314
# meters for meshes with spacing of 10 km and 5.36 km for
# meshes with spacing of 40 km. if mesh spacing is over
# ``(trsh / \sqrt(2)) * 2`` then points lying in the middle
# of mesh cells (that is inside the polygon) will be filtered
# out by the threshold and have positive distance instead of 0.
# so for threshold of 40 km mesh spacing should not be more
# than 56 km (typical values are 5 to 10 km).
idxs = (distances < 40).nonzero()[0] # indices on the first dimension
if not len(idxs):
# no point is close enough, return distances as they are
return distances
# for all the points that are closer than the threshold we need
# to recalculate the distance and set it to zero, if point falls
# inside the enclosing polygon of the mesh. for doing that we
# project both this mesh and the points of the second mesh--selected
# by distance threshold--to the same Cartesian space, define
# minimum shapely polygon enclosing the mesh and calculate point
# to polygon distance, which gives the most accurate value
# of distance in km (and that value is zero for points inside
# the polygon).
proj, polygon = self._get_proj_enclosing_polygon()
if not isinstance(polygon, shapely.geometry.Polygon):
# either line or point is our enclosing polygon. draw
# a square with side of 10 m around in order to have
# a proper polygon instead.
polygon = polygon.buffer(self.DIST_TOLERANCE, 1)
mesh_xx, mesh_yy = proj(mesh.lons[idxs], mesh.lats[idxs])
# replace geodetic distance values for points-closer-than-the-threshold
# by more accurate point-to-polygon distance values.
distances[idxs] = geo_utils.point_to_polygon_distance(
polygon, mesh_xx, mesh_yy)
return distances | [
"def",
"get_joyner_boore_distance",
"(",
"self",
",",
"mesh",
")",
":",
"# we perform a hybrid calculation (geodetic mesh-to-mesh distance",
"# and distance on the projection plane for close points). first,",
"# we find the closest geodetic distance for each point of target",
"# mesh to this one. in general that distance is greater than",
"# the exact distance to enclosing polygon of this mesh and it",
"# depends on mesh spacing. but the difference can be neglected",
"# if calculated geodetic distance is over some threshold.",
"# get the highest slice from the 3D mesh",
"distances",
"=",
"geodetic",
".",
"min_geodetic_distance",
"(",
"(",
"self",
".",
"lons",
",",
"self",
".",
"lats",
")",
",",
"(",
"mesh",
".",
"lons",
",",
"mesh",
".",
"lats",
")",
")",
"# here we find the points for which calculated mesh-to-mesh",
"# distance is below a threshold. this threshold is arbitrary:",
"# lower values increase the maximum possible error, higher",
"# values reduce the efficiency of that filtering. the maximum",
"# error is equal to the maximum difference between a distance",
"# from site to two adjacent points of the mesh and distance",
"# from site to the line connecting them. thus the error is",
"# a function of distance threshold and mesh spacing. the error",
"# is maximum when the site lies on a perpendicular to the line",
"# connecting points of the mesh and that passes the middle",
"# point between them. the error then can be calculated as",
"# ``err = trsh - d = trsh - \\sqrt(trsh^2 - (ms/2)^2)``, where",
"# ``trsh`` and ``d`` are distance to mesh points (the one",
"# we found on the previous step) and distance to the line",
"# connecting them (the actual distance) and ``ms`` is mesh",
"# spacing. the threshold of 40 km gives maximum error of 314",
"# meters for meshes with spacing of 10 km and 5.36 km for",
"# meshes with spacing of 40 km. if mesh spacing is over",
"# ``(trsh / \\sqrt(2)) * 2`` then points lying in the middle",
"# of mesh cells (that is inside the polygon) will be filtered",
"# out by the threshold and have positive distance instead of 0.",
"# so for threshold of 40 km mesh spacing should not be more",
"# than 56 km (typical values are 5 to 10 km).",
"idxs",
"=",
"(",
"distances",
"<",
"40",
")",
".",
"nonzero",
"(",
")",
"[",
"0",
"]",
"# indices on the first dimension",
"if",
"not",
"len",
"(",
"idxs",
")",
":",
"# no point is close enough, return distances as they are",
"return",
"distances",
"# for all the points that are closer than the threshold we need",
"# to recalculate the distance and set it to zero, if point falls",
"# inside the enclosing polygon of the mesh. for doing that we",
"# project both this mesh and the points of the second mesh--selected",
"# by distance threshold--to the same Cartesian space, define",
"# minimum shapely polygon enclosing the mesh and calculate point",
"# to polygon distance, which gives the most accurate value",
"# of distance in km (and that value is zero for points inside",
"# the polygon).",
"proj",
",",
"polygon",
"=",
"self",
".",
"_get_proj_enclosing_polygon",
"(",
")",
"if",
"not",
"isinstance",
"(",
"polygon",
",",
"shapely",
".",
"geometry",
".",
"Polygon",
")",
":",
"# either line or point is our enclosing polygon. draw",
"# a square with side of 10 m around in order to have",
"# a proper polygon instead.",
"polygon",
"=",
"polygon",
".",
"buffer",
"(",
"self",
".",
"DIST_TOLERANCE",
",",
"1",
")",
"mesh_xx",
",",
"mesh_yy",
"=",
"proj",
"(",
"mesh",
".",
"lons",
"[",
"idxs",
"]",
",",
"mesh",
".",
"lats",
"[",
"idxs",
"]",
")",
"# replace geodetic distance values for points-closer-than-the-threshold",
"# by more accurate point-to-polygon distance values.",
"distances",
"[",
"idxs",
"]",
"=",
"geo_utils",
".",
"point_to_polygon_distance",
"(",
"polygon",
",",
"mesh_xx",
",",
"mesh_yy",
")",
"return",
"distances"
] | Compute and return Joyner-Boore distance to each point of ``mesh``.
Point's depth is ignored.
See
:meth:`openquake.hazardlib.geo.surface.base.BaseSurface.get_joyner_boore_distance`
for definition of this distance.
:returns:
numpy array of distances in km of the same shape as ``mesh``.
Distance value is considered to be zero if a point
lies inside the polygon enveloping the projection of the mesh
or on one of its edges. | [
"Compute",
"and",
"return",
"Joyner",
"-",
"Boore",
"distance",
"to",
"each",
"point",
"of",
"mesh",
".",
"Point",
"s",
"depth",
"is",
"ignored",
"."
] | 8294553a0b8aba33fd96437a35065d03547d0040 | https://github.com/gem/oq-engine/blob/8294553a0b8aba33fd96437a35065d03547d0040/openquake/hazardlib/geo/mesh.py#L319-L393 |
209 | gem/oq-engine | openquake/hazardlib/geo/mesh.py | Mesh.get_convex_hull | def get_convex_hull(self):
"""
Get a convex polygon object that contains projections of all the points
of the mesh.
:returns:
Instance of :class:`openquake.hazardlib.geo.polygon.Polygon` that
is a convex hull around all the points in this mesh. If the
original mesh had only one point, the resulting polygon has a
square shape with a side length of 10 meters. If there were only
two points, resulting polygon is a stripe 10 meters wide.
"""
proj, polygon2d = self._get_proj_convex_hull()
# if mesh had only one point, the convex hull is a point. if there
# were two, it is a line string. we need to return a convex polygon
# object, so extend that area-less geometries by some arbitrarily
# small distance.
if isinstance(polygon2d, (shapely.geometry.LineString,
shapely.geometry.Point)):
polygon2d = polygon2d.buffer(self.DIST_TOLERANCE, 1)
# avoid circular imports
from openquake.hazardlib.geo.polygon import Polygon
return Polygon._from_2d(polygon2d, proj) | python | def get_convex_hull(self):
"""
Get a convex polygon object that contains projections of all the points
of the mesh.
:returns:
Instance of :class:`openquake.hazardlib.geo.polygon.Polygon` that
is a convex hull around all the points in this mesh. If the
original mesh had only one point, the resulting polygon has a
square shape with a side length of 10 meters. If there were only
two points, resulting polygon is a stripe 10 meters wide.
"""
proj, polygon2d = self._get_proj_convex_hull()
# if mesh had only one point, the convex hull is a point. if there
# were two, it is a line string. we need to return a convex polygon
# object, so extend that area-less geometries by some arbitrarily
# small distance.
if isinstance(polygon2d, (shapely.geometry.LineString,
shapely.geometry.Point)):
polygon2d = polygon2d.buffer(self.DIST_TOLERANCE, 1)
# avoid circular imports
from openquake.hazardlib.geo.polygon import Polygon
return Polygon._from_2d(polygon2d, proj) | [
"def",
"get_convex_hull",
"(",
"self",
")",
":",
"proj",
",",
"polygon2d",
"=",
"self",
".",
"_get_proj_convex_hull",
"(",
")",
"# if mesh had only one point, the convex hull is a point. if there",
"# were two, it is a line string. we need to return a convex polygon",
"# object, so extend that area-less geometries by some arbitrarily",
"# small distance.",
"if",
"isinstance",
"(",
"polygon2d",
",",
"(",
"shapely",
".",
"geometry",
".",
"LineString",
",",
"shapely",
".",
"geometry",
".",
"Point",
")",
")",
":",
"polygon2d",
"=",
"polygon2d",
".",
"buffer",
"(",
"self",
".",
"DIST_TOLERANCE",
",",
"1",
")",
"# avoid circular imports",
"from",
"openquake",
".",
"hazardlib",
".",
"geo",
".",
"polygon",
"import",
"Polygon",
"return",
"Polygon",
".",
"_from_2d",
"(",
"polygon2d",
",",
"proj",
")"
] | Get a convex polygon object that contains projections of all the points
of the mesh.
:returns:
Instance of :class:`openquake.hazardlib.geo.polygon.Polygon` that
is a convex hull around all the points in this mesh. If the
original mesh had only one point, the resulting polygon has a
square shape with a side length of 10 meters. If there were only
two points, resulting polygon is a stripe 10 meters wide. | [
"Get",
"a",
"convex",
"polygon",
"object",
"that",
"contains",
"projections",
"of",
"all",
"the",
"points",
"of",
"the",
"mesh",
"."
] | 8294553a0b8aba33fd96437a35065d03547d0040 | https://github.com/gem/oq-engine/blob/8294553a0b8aba33fd96437a35065d03547d0040/openquake/hazardlib/geo/mesh.py#L457-L480 |
210 | gem/oq-engine | openquake/hazardlib/geo/mesh.py | RectangularMesh.from_points_list | def from_points_list(cls, points):
"""
Create a rectangular mesh object from a list of lists of points.
Lists in a list are supposed to have the same length.
:param point:
List of lists of :class:`~openquake.hazardlib.geo.point.Point`
objects.
"""
assert points is not None and len(points) > 0 and len(points[0]) > 0, \
'list of at least one non-empty list of points is required'
lons = numpy.zeros((len(points), len(points[0])), dtype=float)
lats = lons.copy()
depths = lons.copy()
num_cols = len(points[0])
for i, row in enumerate(points):
assert len(row) == num_cols, \
'lists of points are not of uniform length'
for j, point in enumerate(row):
lons[i, j] = point.longitude
lats[i, j] = point.latitude
depths[i, j] = point.depth
if not depths.any():
depths = None
return cls(lons, lats, depths) | python | def from_points_list(cls, points):
"""
Create a rectangular mesh object from a list of lists of points.
Lists in a list are supposed to have the same length.
:param point:
List of lists of :class:`~openquake.hazardlib.geo.point.Point`
objects.
"""
assert points is not None and len(points) > 0 and len(points[0]) > 0, \
'list of at least one non-empty list of points is required'
lons = numpy.zeros((len(points), len(points[0])), dtype=float)
lats = lons.copy()
depths = lons.copy()
num_cols = len(points[0])
for i, row in enumerate(points):
assert len(row) == num_cols, \
'lists of points are not of uniform length'
for j, point in enumerate(row):
lons[i, j] = point.longitude
lats[i, j] = point.latitude
depths[i, j] = point.depth
if not depths.any():
depths = None
return cls(lons, lats, depths) | [
"def",
"from_points_list",
"(",
"cls",
",",
"points",
")",
":",
"assert",
"points",
"is",
"not",
"None",
"and",
"len",
"(",
"points",
")",
">",
"0",
"and",
"len",
"(",
"points",
"[",
"0",
"]",
")",
">",
"0",
",",
"'list of at least one non-empty list of points is required'",
"lons",
"=",
"numpy",
".",
"zeros",
"(",
"(",
"len",
"(",
"points",
")",
",",
"len",
"(",
"points",
"[",
"0",
"]",
")",
")",
",",
"dtype",
"=",
"float",
")",
"lats",
"=",
"lons",
".",
"copy",
"(",
")",
"depths",
"=",
"lons",
".",
"copy",
"(",
")",
"num_cols",
"=",
"len",
"(",
"points",
"[",
"0",
"]",
")",
"for",
"i",
",",
"row",
"in",
"enumerate",
"(",
"points",
")",
":",
"assert",
"len",
"(",
"row",
")",
"==",
"num_cols",
",",
"'lists of points are not of uniform length'",
"for",
"j",
",",
"point",
"in",
"enumerate",
"(",
"row",
")",
":",
"lons",
"[",
"i",
",",
"j",
"]",
"=",
"point",
".",
"longitude",
"lats",
"[",
"i",
",",
"j",
"]",
"=",
"point",
".",
"latitude",
"depths",
"[",
"i",
",",
"j",
"]",
"=",
"point",
".",
"depth",
"if",
"not",
"depths",
".",
"any",
"(",
")",
":",
"depths",
"=",
"None",
"return",
"cls",
"(",
"lons",
",",
"lats",
",",
"depths",
")"
] | Create a rectangular mesh object from a list of lists of points.
Lists in a list are supposed to have the same length.
:param point:
List of lists of :class:`~openquake.hazardlib.geo.point.Point`
objects. | [
"Create",
"a",
"rectangular",
"mesh",
"object",
"from",
"a",
"list",
"of",
"lists",
"of",
"points",
".",
"Lists",
"in",
"a",
"list",
"are",
"supposed",
"to",
"have",
"the",
"same",
"length",
"."
] | 8294553a0b8aba33fd96437a35065d03547d0040 | https://github.com/gem/oq-engine/blob/8294553a0b8aba33fd96437a35065d03547d0040/openquake/hazardlib/geo/mesh.py#L497-L521 |
211 | gem/oq-engine | openquake/hazardlib/geo/mesh.py | RectangularMesh.get_middle_point | def get_middle_point(self):
"""
Return the middle point of the mesh.
:returns:
An instance of :class:`~openquake.hazardlib.geo.point.Point`.
The middle point is taken from the middle row and a middle column
of the mesh if there are odd number of both. Otherwise the geometric
mean point of two or four middle points.
"""
num_rows, num_cols = self.lons.shape
mid_row = num_rows // 2
depth = 0
if num_rows & 1 == 1:
# there are odd number of rows
mid_col = num_cols // 2
if num_cols & 1 == 1:
# odd number of columns, we can easily take
# the middle point
depth = self.depths[mid_row, mid_col]
return Point(self.lons[mid_row, mid_col],
self.lats[mid_row, mid_col], depth)
else:
# even number of columns, need to take two middle
# points on the middle row
lon1, lon2 = self.lons[mid_row, mid_col - 1: mid_col + 1]
lat1, lat2 = self.lats[mid_row, mid_col - 1: mid_col + 1]
depth1 = self.depths[mid_row, mid_col - 1]
depth2 = self.depths[mid_row, mid_col]
else:
# there are even number of rows. take the row just above
# and the one just below the middle and find middle point
# of each
submesh1 = self[mid_row - 1: mid_row]
submesh2 = self[mid_row: mid_row + 1]
p1, p2 = submesh1.get_middle_point(), submesh2.get_middle_point()
lon1, lat1, depth1 = p1.longitude, p1.latitude, p1.depth
lon2, lat2, depth2 = p2.longitude, p2.latitude, p2.depth
# we need to find the middle between two points
depth = (depth1 + depth2) / 2.0
lon, lat = geo_utils.get_middle_point(lon1, lat1, lon2, lat2)
return Point(lon, lat, depth) | python | def get_middle_point(self):
"""
Return the middle point of the mesh.
:returns:
An instance of :class:`~openquake.hazardlib.geo.point.Point`.
The middle point is taken from the middle row and a middle column
of the mesh if there are odd number of both. Otherwise the geometric
mean point of two or four middle points.
"""
num_rows, num_cols = self.lons.shape
mid_row = num_rows // 2
depth = 0
if num_rows & 1 == 1:
# there are odd number of rows
mid_col = num_cols // 2
if num_cols & 1 == 1:
# odd number of columns, we can easily take
# the middle point
depth = self.depths[mid_row, mid_col]
return Point(self.lons[mid_row, mid_col],
self.lats[mid_row, mid_col], depth)
else:
# even number of columns, need to take two middle
# points on the middle row
lon1, lon2 = self.lons[mid_row, mid_col - 1: mid_col + 1]
lat1, lat2 = self.lats[mid_row, mid_col - 1: mid_col + 1]
depth1 = self.depths[mid_row, mid_col - 1]
depth2 = self.depths[mid_row, mid_col]
else:
# there are even number of rows. take the row just above
# and the one just below the middle and find middle point
# of each
submesh1 = self[mid_row - 1: mid_row]
submesh2 = self[mid_row: mid_row + 1]
p1, p2 = submesh1.get_middle_point(), submesh2.get_middle_point()
lon1, lat1, depth1 = p1.longitude, p1.latitude, p1.depth
lon2, lat2, depth2 = p2.longitude, p2.latitude, p2.depth
# we need to find the middle between two points
depth = (depth1 + depth2) / 2.0
lon, lat = geo_utils.get_middle_point(lon1, lat1, lon2, lat2)
return Point(lon, lat, depth) | [
"def",
"get_middle_point",
"(",
"self",
")",
":",
"num_rows",
",",
"num_cols",
"=",
"self",
".",
"lons",
".",
"shape",
"mid_row",
"=",
"num_rows",
"//",
"2",
"depth",
"=",
"0",
"if",
"num_rows",
"&",
"1",
"==",
"1",
":",
"# there are odd number of rows",
"mid_col",
"=",
"num_cols",
"//",
"2",
"if",
"num_cols",
"&",
"1",
"==",
"1",
":",
"# odd number of columns, we can easily take",
"# the middle point",
"depth",
"=",
"self",
".",
"depths",
"[",
"mid_row",
",",
"mid_col",
"]",
"return",
"Point",
"(",
"self",
".",
"lons",
"[",
"mid_row",
",",
"mid_col",
"]",
",",
"self",
".",
"lats",
"[",
"mid_row",
",",
"mid_col",
"]",
",",
"depth",
")",
"else",
":",
"# even number of columns, need to take two middle",
"# points on the middle row",
"lon1",
",",
"lon2",
"=",
"self",
".",
"lons",
"[",
"mid_row",
",",
"mid_col",
"-",
"1",
":",
"mid_col",
"+",
"1",
"]",
"lat1",
",",
"lat2",
"=",
"self",
".",
"lats",
"[",
"mid_row",
",",
"mid_col",
"-",
"1",
":",
"mid_col",
"+",
"1",
"]",
"depth1",
"=",
"self",
".",
"depths",
"[",
"mid_row",
",",
"mid_col",
"-",
"1",
"]",
"depth2",
"=",
"self",
".",
"depths",
"[",
"mid_row",
",",
"mid_col",
"]",
"else",
":",
"# there are even number of rows. take the row just above",
"# and the one just below the middle and find middle point",
"# of each",
"submesh1",
"=",
"self",
"[",
"mid_row",
"-",
"1",
":",
"mid_row",
"]",
"submesh2",
"=",
"self",
"[",
"mid_row",
":",
"mid_row",
"+",
"1",
"]",
"p1",
",",
"p2",
"=",
"submesh1",
".",
"get_middle_point",
"(",
")",
",",
"submesh2",
".",
"get_middle_point",
"(",
")",
"lon1",
",",
"lat1",
",",
"depth1",
"=",
"p1",
".",
"longitude",
",",
"p1",
".",
"latitude",
",",
"p1",
".",
"depth",
"lon2",
",",
"lat2",
",",
"depth2",
"=",
"p2",
".",
"longitude",
",",
"p2",
".",
"latitude",
",",
"p2",
".",
"depth",
"# we need to find the middle between two points",
"depth",
"=",
"(",
"depth1",
"+",
"depth2",
")",
"/",
"2.0",
"lon",
",",
"lat",
"=",
"geo_utils",
".",
"get_middle_point",
"(",
"lon1",
",",
"lat1",
",",
"lon2",
",",
"lat2",
")",
"return",
"Point",
"(",
"lon",
",",
"lat",
",",
"depth",
")"
] | Return the middle point of the mesh.
:returns:
An instance of :class:`~openquake.hazardlib.geo.point.Point`.
The middle point is taken from the middle row and a middle column
of the mesh if there are odd number of both. Otherwise the geometric
mean point of two or four middle points. | [
"Return",
"the",
"middle",
"point",
"of",
"the",
"mesh",
"."
] | 8294553a0b8aba33fd96437a35065d03547d0040 | https://github.com/gem/oq-engine/blob/8294553a0b8aba33fd96437a35065d03547d0040/openquake/hazardlib/geo/mesh.py#L523-L566 |
212 | gem/oq-engine | openquake/hazardlib/geo/mesh.py | RectangularMesh.get_cell_dimensions | def get_cell_dimensions(self):
"""
Calculate centroid, width, length and area of each mesh cell.
:returns:
Tuple of four elements, each being 2d numpy array.
Each array has both dimensions less by one the dimensions
of the mesh, since they represent cells, not vertices.
Arrays contain the following cell information:
#. centroids, 3d vectors in a Cartesian space,
#. length (size along row of points) in km,
#. width (size along column of points) in km,
#. area in square km.
"""
points, along_azimuth, updip, diag = self.triangulate()
top = along_azimuth[:-1]
left = updip[:, :-1]
tl_area = geo_utils.triangle_area(top, left, diag)
top_length = numpy.sqrt(numpy.sum(top * top, axis=-1))
left_length = numpy.sqrt(numpy.sum(left * left, axis=-1))
bottom = along_azimuth[1:]
right = updip[:, 1:]
br_area = geo_utils.triangle_area(bottom, right, diag)
bottom_length = numpy.sqrt(numpy.sum(bottom * bottom, axis=-1))
right_length = numpy.sqrt(numpy.sum(right * right, axis=-1))
cell_area = tl_area + br_area
tl_center = (points[:-1, :-1] + points[:-1, 1:] + points[1:, :-1]) / 3
br_center = (points[:-1, 1:] + points[1:, :-1] + points[1:, 1:]) / 3
cell_center = ((tl_center * tl_area.reshape(tl_area.shape + (1, ))
+ br_center * br_area.reshape(br_area.shape + (1, )))
/ cell_area.reshape(cell_area.shape + (1, )))
cell_length = ((top_length * tl_area + bottom_length * br_area)
/ cell_area)
cell_width = ((left_length * tl_area + right_length * br_area)
/ cell_area)
return cell_center, cell_length, cell_width, cell_area | python | def get_cell_dimensions(self):
"""
Calculate centroid, width, length and area of each mesh cell.
:returns:
Tuple of four elements, each being 2d numpy array.
Each array has both dimensions less by one the dimensions
of the mesh, since they represent cells, not vertices.
Arrays contain the following cell information:
#. centroids, 3d vectors in a Cartesian space,
#. length (size along row of points) in km,
#. width (size along column of points) in km,
#. area in square km.
"""
points, along_azimuth, updip, diag = self.triangulate()
top = along_azimuth[:-1]
left = updip[:, :-1]
tl_area = geo_utils.triangle_area(top, left, diag)
top_length = numpy.sqrt(numpy.sum(top * top, axis=-1))
left_length = numpy.sqrt(numpy.sum(left * left, axis=-1))
bottom = along_azimuth[1:]
right = updip[:, 1:]
br_area = geo_utils.triangle_area(bottom, right, diag)
bottom_length = numpy.sqrt(numpy.sum(bottom * bottom, axis=-1))
right_length = numpy.sqrt(numpy.sum(right * right, axis=-1))
cell_area = tl_area + br_area
tl_center = (points[:-1, :-1] + points[:-1, 1:] + points[1:, :-1]) / 3
br_center = (points[:-1, 1:] + points[1:, :-1] + points[1:, 1:]) / 3
cell_center = ((tl_center * tl_area.reshape(tl_area.shape + (1, ))
+ br_center * br_area.reshape(br_area.shape + (1, )))
/ cell_area.reshape(cell_area.shape + (1, )))
cell_length = ((top_length * tl_area + bottom_length * br_area)
/ cell_area)
cell_width = ((left_length * tl_area + right_length * br_area)
/ cell_area)
return cell_center, cell_length, cell_width, cell_area | [
"def",
"get_cell_dimensions",
"(",
"self",
")",
":",
"points",
",",
"along_azimuth",
",",
"updip",
",",
"diag",
"=",
"self",
".",
"triangulate",
"(",
")",
"top",
"=",
"along_azimuth",
"[",
":",
"-",
"1",
"]",
"left",
"=",
"updip",
"[",
":",
",",
":",
"-",
"1",
"]",
"tl_area",
"=",
"geo_utils",
".",
"triangle_area",
"(",
"top",
",",
"left",
",",
"diag",
")",
"top_length",
"=",
"numpy",
".",
"sqrt",
"(",
"numpy",
".",
"sum",
"(",
"top",
"*",
"top",
",",
"axis",
"=",
"-",
"1",
")",
")",
"left_length",
"=",
"numpy",
".",
"sqrt",
"(",
"numpy",
".",
"sum",
"(",
"left",
"*",
"left",
",",
"axis",
"=",
"-",
"1",
")",
")",
"bottom",
"=",
"along_azimuth",
"[",
"1",
":",
"]",
"right",
"=",
"updip",
"[",
":",
",",
"1",
":",
"]",
"br_area",
"=",
"geo_utils",
".",
"triangle_area",
"(",
"bottom",
",",
"right",
",",
"diag",
")",
"bottom_length",
"=",
"numpy",
".",
"sqrt",
"(",
"numpy",
".",
"sum",
"(",
"bottom",
"*",
"bottom",
",",
"axis",
"=",
"-",
"1",
")",
")",
"right_length",
"=",
"numpy",
".",
"sqrt",
"(",
"numpy",
".",
"sum",
"(",
"right",
"*",
"right",
",",
"axis",
"=",
"-",
"1",
")",
")",
"cell_area",
"=",
"tl_area",
"+",
"br_area",
"tl_center",
"=",
"(",
"points",
"[",
":",
"-",
"1",
",",
":",
"-",
"1",
"]",
"+",
"points",
"[",
":",
"-",
"1",
",",
"1",
":",
"]",
"+",
"points",
"[",
"1",
":",
",",
":",
"-",
"1",
"]",
")",
"/",
"3",
"br_center",
"=",
"(",
"points",
"[",
":",
"-",
"1",
",",
"1",
":",
"]",
"+",
"points",
"[",
"1",
":",
",",
":",
"-",
"1",
"]",
"+",
"points",
"[",
"1",
":",
",",
"1",
":",
"]",
")",
"/",
"3",
"cell_center",
"=",
"(",
"(",
"tl_center",
"*",
"tl_area",
".",
"reshape",
"(",
"tl_area",
".",
"shape",
"+",
"(",
"1",
",",
")",
")",
"+",
"br_center",
"*",
"br_area",
".",
"reshape",
"(",
"br_area",
".",
"shape",
"+",
"(",
"1",
",",
")",
")",
")",
"/",
"cell_area",
".",
"reshape",
"(",
"cell_area",
".",
"shape",
"+",
"(",
"1",
",",
")",
")",
")",
"cell_length",
"=",
"(",
"(",
"top_length",
"*",
"tl_area",
"+",
"bottom_length",
"*",
"br_area",
")",
"/",
"cell_area",
")",
"cell_width",
"=",
"(",
"(",
"left_length",
"*",
"tl_area",
"+",
"right_length",
"*",
"br_area",
")",
"/",
"cell_area",
")",
"return",
"cell_center",
",",
"cell_length",
",",
"cell_width",
",",
"cell_area"
] | Calculate centroid, width, length and area of each mesh cell.
:returns:
Tuple of four elements, each being 2d numpy array.
Each array has both dimensions less by one the dimensions
of the mesh, since they represent cells, not vertices.
Arrays contain the following cell information:
#. centroids, 3d vectors in a Cartesian space,
#. length (size along row of points) in km,
#. width (size along column of points) in km,
#. area in square km. | [
"Calculate",
"centroid",
"width",
"length",
"and",
"area",
"of",
"each",
"mesh",
"cell",
"."
] | 8294553a0b8aba33fd96437a35065d03547d0040 | https://github.com/gem/oq-engine/blob/8294553a0b8aba33fd96437a35065d03547d0040/openquake/hazardlib/geo/mesh.py#L704-L746 |
213 | gem/oq-engine | openquake/hazardlib/geo/mesh.py | RectangularMesh.triangulate | def triangulate(self):
"""
Convert mesh points to vectors in Cartesian space.
:returns:
Tuple of four elements, each being 2d numpy array of 3d vectors
(the same structure and shape as the mesh itself). Those arrays
are:
#. points vectors,
#. vectors directed from each point (excluding the last column)
to the next one in a same row →,
#. vectors directed from each point (excluding the first row)
to the previous one in a same column ↑,
#. vectors pointing from a bottom left point of each mesh cell
to top right one ↗.
So the last three arrays of vectors allow to construct triangles
covering the whole mesh.
"""
points = geo_utils.spherical_to_cartesian(self.lons, self.lats,
self.depths)
# triangulate the mesh by defining vectors of triangles edges:
# →
along_azimuth = points[:, 1:] - points[:, :-1]
# ↑
updip = points[:-1] - points[1:]
# ↗
diag = points[:-1, 1:] - points[1:, :-1]
return points, along_azimuth, updip, diag | python | def triangulate(self):
"""
Convert mesh points to vectors in Cartesian space.
:returns:
Tuple of four elements, each being 2d numpy array of 3d vectors
(the same structure and shape as the mesh itself). Those arrays
are:
#. points vectors,
#. vectors directed from each point (excluding the last column)
to the next one in a same row →,
#. vectors directed from each point (excluding the first row)
to the previous one in a same column ↑,
#. vectors pointing from a bottom left point of each mesh cell
to top right one ↗.
So the last three arrays of vectors allow to construct triangles
covering the whole mesh.
"""
points = geo_utils.spherical_to_cartesian(self.lons, self.lats,
self.depths)
# triangulate the mesh by defining vectors of triangles edges:
# →
along_azimuth = points[:, 1:] - points[:, :-1]
# ↑
updip = points[:-1] - points[1:]
# ↗
diag = points[:-1, 1:] - points[1:, :-1]
return points, along_azimuth, updip, diag | [
"def",
"triangulate",
"(",
"self",
")",
":",
"points",
"=",
"geo_utils",
".",
"spherical_to_cartesian",
"(",
"self",
".",
"lons",
",",
"self",
".",
"lats",
",",
"self",
".",
"depths",
")",
"# triangulate the mesh by defining vectors of triangles edges:",
"# →",
"along_azimuth",
"=",
"points",
"[",
":",
",",
"1",
":",
"]",
"-",
"points",
"[",
":",
",",
":",
"-",
"1",
"]",
"# ↑",
"updip",
"=",
"points",
"[",
":",
"-",
"1",
"]",
"-",
"points",
"[",
"1",
":",
"]",
"# ↗",
"diag",
"=",
"points",
"[",
":",
"-",
"1",
",",
"1",
":",
"]",
"-",
"points",
"[",
"1",
":",
",",
":",
"-",
"1",
"]",
"return",
"points",
",",
"along_azimuth",
",",
"updip",
",",
"diag"
] | Convert mesh points to vectors in Cartesian space.
:returns:
Tuple of four elements, each being 2d numpy array of 3d vectors
(the same structure and shape as the mesh itself). Those arrays
are:
#. points vectors,
#. vectors directed from each point (excluding the last column)
to the next one in a same row →,
#. vectors directed from each point (excluding the first row)
to the previous one in a same column ↑,
#. vectors pointing from a bottom left point of each mesh cell
to top right one ↗.
So the last three arrays of vectors allow to construct triangles
covering the whole mesh. | [
"Convert",
"mesh",
"points",
"to",
"vectors",
"in",
"Cartesian",
"space",
"."
] | 8294553a0b8aba33fd96437a35065d03547d0040 | https://github.com/gem/oq-engine/blob/8294553a0b8aba33fd96437a35065d03547d0040/openquake/hazardlib/geo/mesh.py#L748-L778 |
214 | gem/oq-engine | openquake/hmtk/seismicity/smoothing/kernels/isotropic_gaussian.py | IsotropicGaussian.smooth_data | def smooth_data(self, data, config, is_3d=False):
'''
Applies the smoothing kernel to the data
:param np.ndarray data:
Raw earthquake count in the form [Longitude, Latitude, Depth,
Count]
:param dict config:
Configuration parameters must contain:
* BandWidth: The bandwidth of the kernel (in km) (float)
* Length_Limit: Maximum number of standard deviations
:returns:
* smoothed_value: np.ndarray vector of smoothed values
* Total (summed) rate of the original values
* Total (summed) rate of the smoothed values
'''
max_dist = config['Length_Limit'] * config['BandWidth']
smoothed_value = np.zeros(len(data), dtype=float)
for iloc in range(0, len(data)):
dist_val = haversine(data[:, 0], data[:, 1],
data[iloc, 0], data[iloc, 1])
if is_3d:
dist_val = np.sqrt(dist_val.flatten() ** 2.0 +
(data[:, 2] - data[iloc, 2]) ** 2.0)
id0 = np.where(dist_val <= max_dist)[0]
w_val = (np.exp(-(dist_val[id0] ** 2.0) /
(config['BandWidth'] ** 2.))).flatten()
smoothed_value[iloc] = np.sum(w_val * data[id0, 3]) / np.sum(w_val)
return smoothed_value, np.sum(data[:, -1]), np.sum(smoothed_value) | python | def smooth_data(self, data, config, is_3d=False):
'''
Applies the smoothing kernel to the data
:param np.ndarray data:
Raw earthquake count in the form [Longitude, Latitude, Depth,
Count]
:param dict config:
Configuration parameters must contain:
* BandWidth: The bandwidth of the kernel (in km) (float)
* Length_Limit: Maximum number of standard deviations
:returns:
* smoothed_value: np.ndarray vector of smoothed values
* Total (summed) rate of the original values
* Total (summed) rate of the smoothed values
'''
max_dist = config['Length_Limit'] * config['BandWidth']
smoothed_value = np.zeros(len(data), dtype=float)
for iloc in range(0, len(data)):
dist_val = haversine(data[:, 0], data[:, 1],
data[iloc, 0], data[iloc, 1])
if is_3d:
dist_val = np.sqrt(dist_val.flatten() ** 2.0 +
(data[:, 2] - data[iloc, 2]) ** 2.0)
id0 = np.where(dist_val <= max_dist)[0]
w_val = (np.exp(-(dist_val[id0] ** 2.0) /
(config['BandWidth'] ** 2.))).flatten()
smoothed_value[iloc] = np.sum(w_val * data[id0, 3]) / np.sum(w_val)
return smoothed_value, np.sum(data[:, -1]), np.sum(smoothed_value) | [
"def",
"smooth_data",
"(",
"self",
",",
"data",
",",
"config",
",",
"is_3d",
"=",
"False",
")",
":",
"max_dist",
"=",
"config",
"[",
"'Length_Limit'",
"]",
"*",
"config",
"[",
"'BandWidth'",
"]",
"smoothed_value",
"=",
"np",
".",
"zeros",
"(",
"len",
"(",
"data",
")",
",",
"dtype",
"=",
"float",
")",
"for",
"iloc",
"in",
"range",
"(",
"0",
",",
"len",
"(",
"data",
")",
")",
":",
"dist_val",
"=",
"haversine",
"(",
"data",
"[",
":",
",",
"0",
"]",
",",
"data",
"[",
":",
",",
"1",
"]",
",",
"data",
"[",
"iloc",
",",
"0",
"]",
",",
"data",
"[",
"iloc",
",",
"1",
"]",
")",
"if",
"is_3d",
":",
"dist_val",
"=",
"np",
".",
"sqrt",
"(",
"dist_val",
".",
"flatten",
"(",
")",
"**",
"2.0",
"+",
"(",
"data",
"[",
":",
",",
"2",
"]",
"-",
"data",
"[",
"iloc",
",",
"2",
"]",
")",
"**",
"2.0",
")",
"id0",
"=",
"np",
".",
"where",
"(",
"dist_val",
"<=",
"max_dist",
")",
"[",
"0",
"]",
"w_val",
"=",
"(",
"np",
".",
"exp",
"(",
"-",
"(",
"dist_val",
"[",
"id0",
"]",
"**",
"2.0",
")",
"/",
"(",
"config",
"[",
"'BandWidth'",
"]",
"**",
"2.",
")",
")",
")",
".",
"flatten",
"(",
")",
"smoothed_value",
"[",
"iloc",
"]",
"=",
"np",
".",
"sum",
"(",
"w_val",
"*",
"data",
"[",
"id0",
",",
"3",
"]",
")",
"/",
"np",
".",
"sum",
"(",
"w_val",
")",
"return",
"smoothed_value",
",",
"np",
".",
"sum",
"(",
"data",
"[",
":",
",",
"-",
"1",
"]",
")",
",",
"np",
".",
"sum",
"(",
"smoothed_value",
")"
] | Applies the smoothing kernel to the data
:param np.ndarray data:
Raw earthquake count in the form [Longitude, Latitude, Depth,
Count]
:param dict config:
Configuration parameters must contain:
* BandWidth: The bandwidth of the kernel (in km) (float)
* Length_Limit: Maximum number of standard deviations
:returns:
* smoothed_value: np.ndarray vector of smoothed values
* Total (summed) rate of the original values
* Total (summed) rate of the smoothed values | [
"Applies",
"the",
"smoothing",
"kernel",
"to",
"the",
"data"
] | 8294553a0b8aba33fd96437a35065d03547d0040 | https://github.com/gem/oq-engine/blob/8294553a0b8aba33fd96437a35065d03547d0040/openquake/hmtk/seismicity/smoothing/kernels/isotropic_gaussian.py#L69-L99 |
215 | gem/oq-engine | openquake/commands/purge.py | purge_one | def purge_one(calc_id, user):
"""
Remove one calculation ID from the database and remove its datastore
"""
filename = os.path.join(datadir, 'calc_%s.hdf5' % calc_id)
err = dbcmd('del_calc', calc_id, user)
if err:
print(err)
elif os.path.exists(filename): # not removed yet
os.remove(filename)
print('Removed %s' % filename) | python | def purge_one(calc_id, user):
"""
Remove one calculation ID from the database and remove its datastore
"""
filename = os.path.join(datadir, 'calc_%s.hdf5' % calc_id)
err = dbcmd('del_calc', calc_id, user)
if err:
print(err)
elif os.path.exists(filename): # not removed yet
os.remove(filename)
print('Removed %s' % filename) | [
"def",
"purge_one",
"(",
"calc_id",
",",
"user",
")",
":",
"filename",
"=",
"os",
".",
"path",
".",
"join",
"(",
"datadir",
",",
"'calc_%s.hdf5'",
"%",
"calc_id",
")",
"err",
"=",
"dbcmd",
"(",
"'del_calc'",
",",
"calc_id",
",",
"user",
")",
"if",
"err",
":",
"print",
"(",
"err",
")",
"elif",
"os",
".",
"path",
".",
"exists",
"(",
"filename",
")",
":",
"# not removed yet",
"os",
".",
"remove",
"(",
"filename",
")",
"print",
"(",
"'Removed %s'",
"%",
"filename",
")"
] | Remove one calculation ID from the database and remove its datastore | [
"Remove",
"one",
"calculation",
"ID",
"from",
"the",
"database",
"and",
"remove",
"its",
"datastore"
] | 8294553a0b8aba33fd96437a35065d03547d0040 | https://github.com/gem/oq-engine/blob/8294553a0b8aba33fd96437a35065d03547d0040/openquake/commands/purge.py#L28-L38 |
216 | gem/oq-engine | openquake/commands/purge.py | purge_all | def purge_all(user=None, fast=False):
"""
Remove all calculations of the given user
"""
user = user or getpass.getuser()
if os.path.exists(datadir):
if fast:
shutil.rmtree(datadir)
print('Removed %s' % datadir)
else:
for fname in os.listdir(datadir):
mo = re.match('calc_(\d+)\.hdf5', fname)
if mo is not None:
calc_id = int(mo.group(1))
purge_one(calc_id, user) | python | def purge_all(user=None, fast=False):
"""
Remove all calculations of the given user
"""
user = user or getpass.getuser()
if os.path.exists(datadir):
if fast:
shutil.rmtree(datadir)
print('Removed %s' % datadir)
else:
for fname in os.listdir(datadir):
mo = re.match('calc_(\d+)\.hdf5', fname)
if mo is not None:
calc_id = int(mo.group(1))
purge_one(calc_id, user) | [
"def",
"purge_all",
"(",
"user",
"=",
"None",
",",
"fast",
"=",
"False",
")",
":",
"user",
"=",
"user",
"or",
"getpass",
".",
"getuser",
"(",
")",
"if",
"os",
".",
"path",
".",
"exists",
"(",
"datadir",
")",
":",
"if",
"fast",
":",
"shutil",
".",
"rmtree",
"(",
"datadir",
")",
"print",
"(",
"'Removed %s'",
"%",
"datadir",
")",
"else",
":",
"for",
"fname",
"in",
"os",
".",
"listdir",
"(",
"datadir",
")",
":",
"mo",
"=",
"re",
".",
"match",
"(",
"'calc_(\\d+)\\.hdf5'",
",",
"fname",
")",
"if",
"mo",
"is",
"not",
"None",
":",
"calc_id",
"=",
"int",
"(",
"mo",
".",
"group",
"(",
"1",
")",
")",
"purge_one",
"(",
"calc_id",
",",
"user",
")"
] | Remove all calculations of the given user | [
"Remove",
"all",
"calculations",
"of",
"the",
"given",
"user"
] | 8294553a0b8aba33fd96437a35065d03547d0040 | https://github.com/gem/oq-engine/blob/8294553a0b8aba33fd96437a35065d03547d0040/openquake/commands/purge.py#L42-L56 |
217 | gem/oq-engine | openquake/commands/purge.py | purge | def purge(calc_id):
"""
Remove the given calculation. If you want to remove all calculations,
use oq reset.
"""
if calc_id < 0:
try:
calc_id = datastore.get_calc_ids(datadir)[calc_id]
except IndexError:
print('Calculation %d not found' % calc_id)
return
purge_one(calc_id, getpass.getuser()) | python | def purge(calc_id):
"""
Remove the given calculation. If you want to remove all calculations,
use oq reset.
"""
if calc_id < 0:
try:
calc_id = datastore.get_calc_ids(datadir)[calc_id]
except IndexError:
print('Calculation %d not found' % calc_id)
return
purge_one(calc_id, getpass.getuser()) | [
"def",
"purge",
"(",
"calc_id",
")",
":",
"if",
"calc_id",
"<",
"0",
":",
"try",
":",
"calc_id",
"=",
"datastore",
".",
"get_calc_ids",
"(",
"datadir",
")",
"[",
"calc_id",
"]",
"except",
"IndexError",
":",
"print",
"(",
"'Calculation %d not found'",
"%",
"calc_id",
")",
"return",
"purge_one",
"(",
"calc_id",
",",
"getpass",
".",
"getuser",
"(",
")",
")"
] | Remove the given calculation. If you want to remove all calculations,
use oq reset. | [
"Remove",
"the",
"given",
"calculation",
".",
"If",
"you",
"want",
"to",
"remove",
"all",
"calculations",
"use",
"oq",
"reset",
"."
] | 8294553a0b8aba33fd96437a35065d03547d0040 | https://github.com/gem/oq-engine/blob/8294553a0b8aba33fd96437a35065d03547d0040/openquake/commands/purge.py#L60-L71 |
218 | gem/oq-engine | openquake/hmtk/plotting/patch.py | PolygonPatch | def PolygonPatch(polygon, **kwargs):
"""Constructs a matplotlib patch from a geometric object
The `polygon` may be a Shapely or GeoJSON-like object possibly with holes.
The `kwargs` are those supported by the matplotlib.patches.Polygon class
constructor. Returns an instance of matplotlib.patches.PathPatch.
Example (using Shapely Point and a matplotlib axes):
>> b = Point(0, 0).buffer(1.0)
>> patch = PolygonPatch(b, fc='blue', ec='blue', alpha=0.5)
>> axis.add_patch(patch)
"""
def coding(ob):
# The codes will be all "LINETO" commands, except for "MOVETO"s at the
# beginning of each subpath
n = len(getattr(ob, 'coords', None) or ob)
vals = ones(n, dtype=Path.code_type) * Path.LINETO
vals[0] = Path.MOVETO
return vals
if hasattr(polygon, 'geom_type'): # Shapely
ptype = polygon.geom_type
if ptype == 'Polygon':
polygon = [Polygon(polygon)]
elif ptype == 'MultiPolygon':
polygon = [Polygon(p) for p in polygon]
else:
raise ValueError(
"A polygon or multi-polygon representation is required")
else: # GeoJSON
polygon = getattr(polygon, '__geo_interface__', polygon)
ptype = polygon["type"]
if ptype == 'Polygon':
polygon = [Polygon(polygon)]
elif ptype == 'MultiPolygon':
polygon = [Polygon(p) for p in polygon['coordinates']]
else:
raise ValueError(
"A polygon or multi-polygon representation is required")
vertices = concatenate([
concatenate([asarray(t.exterior)[:, :2]] +
[asarray(r)[:, :2] for r in t.interiors])
for t in polygon])
codes = concatenate([
concatenate([coding(t.exterior)] +
[coding(r) for r in t.interiors]) for t in polygon])
return PathPatch(Path(vertices, codes), **kwargs) | python | def PolygonPatch(polygon, **kwargs):
"""Constructs a matplotlib patch from a geometric object
The `polygon` may be a Shapely or GeoJSON-like object possibly with holes.
The `kwargs` are those supported by the matplotlib.patches.Polygon class
constructor. Returns an instance of matplotlib.patches.PathPatch.
Example (using Shapely Point and a matplotlib axes):
>> b = Point(0, 0).buffer(1.0)
>> patch = PolygonPatch(b, fc='blue', ec='blue', alpha=0.5)
>> axis.add_patch(patch)
"""
def coding(ob):
# The codes will be all "LINETO" commands, except for "MOVETO"s at the
# beginning of each subpath
n = len(getattr(ob, 'coords', None) or ob)
vals = ones(n, dtype=Path.code_type) * Path.LINETO
vals[0] = Path.MOVETO
return vals
if hasattr(polygon, 'geom_type'): # Shapely
ptype = polygon.geom_type
if ptype == 'Polygon':
polygon = [Polygon(polygon)]
elif ptype == 'MultiPolygon':
polygon = [Polygon(p) for p in polygon]
else:
raise ValueError(
"A polygon or multi-polygon representation is required")
else: # GeoJSON
polygon = getattr(polygon, '__geo_interface__', polygon)
ptype = polygon["type"]
if ptype == 'Polygon':
polygon = [Polygon(polygon)]
elif ptype == 'MultiPolygon':
polygon = [Polygon(p) for p in polygon['coordinates']]
else:
raise ValueError(
"A polygon or multi-polygon representation is required")
vertices = concatenate([
concatenate([asarray(t.exterior)[:, :2]] +
[asarray(r)[:, :2] for r in t.interiors])
for t in polygon])
codes = concatenate([
concatenate([coding(t.exterior)] +
[coding(r) for r in t.interiors]) for t in polygon])
return PathPatch(Path(vertices, codes), **kwargs) | [
"def",
"PolygonPatch",
"(",
"polygon",
",",
"*",
"*",
"kwargs",
")",
":",
"def",
"coding",
"(",
"ob",
")",
":",
"# The codes will be all \"LINETO\" commands, except for \"MOVETO\"s at the",
"# beginning of each subpath",
"n",
"=",
"len",
"(",
"getattr",
"(",
"ob",
",",
"'coords'",
",",
"None",
")",
"or",
"ob",
")",
"vals",
"=",
"ones",
"(",
"n",
",",
"dtype",
"=",
"Path",
".",
"code_type",
")",
"*",
"Path",
".",
"LINETO",
"vals",
"[",
"0",
"]",
"=",
"Path",
".",
"MOVETO",
"return",
"vals",
"if",
"hasattr",
"(",
"polygon",
",",
"'geom_type'",
")",
":",
"# Shapely",
"ptype",
"=",
"polygon",
".",
"geom_type",
"if",
"ptype",
"==",
"'Polygon'",
":",
"polygon",
"=",
"[",
"Polygon",
"(",
"polygon",
")",
"]",
"elif",
"ptype",
"==",
"'MultiPolygon'",
":",
"polygon",
"=",
"[",
"Polygon",
"(",
"p",
")",
"for",
"p",
"in",
"polygon",
"]",
"else",
":",
"raise",
"ValueError",
"(",
"\"A polygon or multi-polygon representation is required\"",
")",
"else",
":",
"# GeoJSON",
"polygon",
"=",
"getattr",
"(",
"polygon",
",",
"'__geo_interface__'",
",",
"polygon",
")",
"ptype",
"=",
"polygon",
"[",
"\"type\"",
"]",
"if",
"ptype",
"==",
"'Polygon'",
":",
"polygon",
"=",
"[",
"Polygon",
"(",
"polygon",
")",
"]",
"elif",
"ptype",
"==",
"'MultiPolygon'",
":",
"polygon",
"=",
"[",
"Polygon",
"(",
"p",
")",
"for",
"p",
"in",
"polygon",
"[",
"'coordinates'",
"]",
"]",
"else",
":",
"raise",
"ValueError",
"(",
"\"A polygon or multi-polygon representation is required\"",
")",
"vertices",
"=",
"concatenate",
"(",
"[",
"concatenate",
"(",
"[",
"asarray",
"(",
"t",
".",
"exterior",
")",
"[",
":",
",",
":",
"2",
"]",
"]",
"+",
"[",
"asarray",
"(",
"r",
")",
"[",
":",
",",
":",
"2",
"]",
"for",
"r",
"in",
"t",
".",
"interiors",
"]",
")",
"for",
"t",
"in",
"polygon",
"]",
")",
"codes",
"=",
"concatenate",
"(",
"[",
"concatenate",
"(",
"[",
"coding",
"(",
"t",
".",
"exterior",
")",
"]",
"+",
"[",
"coding",
"(",
"r",
")",
"for",
"r",
"in",
"t",
".",
"interiors",
"]",
")",
"for",
"t",
"in",
"polygon",
"]",
")",
"return",
"PathPatch",
"(",
"Path",
"(",
"vertices",
",",
"codes",
")",
",",
"*",
"*",
"kwargs",
")"
] | Constructs a matplotlib patch from a geometric object
The `polygon` may be a Shapely or GeoJSON-like object possibly with holes.
The `kwargs` are those supported by the matplotlib.patches.Polygon class
constructor. Returns an instance of matplotlib.patches.PathPatch.
Example (using Shapely Point and a matplotlib axes):
>> b = Point(0, 0).buffer(1.0)
>> patch = PolygonPatch(b, fc='blue', ec='blue', alpha=0.5)
>> axis.add_patch(patch) | [
"Constructs",
"a",
"matplotlib",
"patch",
"from",
"a",
"geometric",
"object"
] | 8294553a0b8aba33fd96437a35065d03547d0040 | https://github.com/gem/oq-engine/blob/8294553a0b8aba33fd96437a35065d03547d0040/openquake/hmtk/plotting/patch.py#L43-L93 |
219 | gem/oq-engine | openquake/hazardlib/gsim/kotha_2019.py | KothaEtAl2019.retreive_sigma_mu_data | def retreive_sigma_mu_data(self):
"""
For the general form of the GMPE this retrieves the sigma mu
values from the hdf5 file using the "general" model, i.e. sigma mu
factors that are independent of the choice of region or depth
"""
fle = h5py.File(os.path.join(BASE_PATH,
"KothaEtAl2019_SigmaMu_Fixed.hdf5"), "r")
self.mags = fle["M"][:]
self.dists = fle["R"][:]
self.periods = fle["T"][:]
self.pga = fle["PGA"][:]
self.pgv = fle["PGV"][:]
self.s_a = fle["SA"][:]
fle.close() | python | def retreive_sigma_mu_data(self):
"""
For the general form of the GMPE this retrieves the sigma mu
values from the hdf5 file using the "general" model, i.e. sigma mu
factors that are independent of the choice of region or depth
"""
fle = h5py.File(os.path.join(BASE_PATH,
"KothaEtAl2019_SigmaMu_Fixed.hdf5"), "r")
self.mags = fle["M"][:]
self.dists = fle["R"][:]
self.periods = fle["T"][:]
self.pga = fle["PGA"][:]
self.pgv = fle["PGV"][:]
self.s_a = fle["SA"][:]
fle.close() | [
"def",
"retreive_sigma_mu_data",
"(",
"self",
")",
":",
"fle",
"=",
"h5py",
".",
"File",
"(",
"os",
".",
"path",
".",
"join",
"(",
"BASE_PATH",
",",
"\"KothaEtAl2019_SigmaMu_Fixed.hdf5\"",
")",
",",
"\"r\"",
")",
"self",
".",
"mags",
"=",
"fle",
"[",
"\"M\"",
"]",
"[",
":",
"]",
"self",
".",
"dists",
"=",
"fle",
"[",
"\"R\"",
"]",
"[",
":",
"]",
"self",
".",
"periods",
"=",
"fle",
"[",
"\"T\"",
"]",
"[",
":",
"]",
"self",
".",
"pga",
"=",
"fle",
"[",
"\"PGA\"",
"]",
"[",
":",
"]",
"self",
".",
"pgv",
"=",
"fle",
"[",
"\"PGV\"",
"]",
"[",
":",
"]",
"self",
".",
"s_a",
"=",
"fle",
"[",
"\"SA\"",
"]",
"[",
":",
"]",
"fle",
".",
"close",
"(",
")"
] | For the general form of the GMPE this retrieves the sigma mu
values from the hdf5 file using the "general" model, i.e. sigma mu
factors that are independent of the choice of region or depth | [
"For",
"the",
"general",
"form",
"of",
"the",
"GMPE",
"this",
"retrieves",
"the",
"sigma",
"mu",
"values",
"from",
"the",
"hdf5",
"file",
"using",
"the",
"general",
"model",
"i",
".",
"e",
".",
"sigma",
"mu",
"factors",
"that",
"are",
"independent",
"of",
"the",
"choice",
"of",
"region",
"or",
"depth"
] | 8294553a0b8aba33fd96437a35065d03547d0040 | https://github.com/gem/oq-engine/blob/8294553a0b8aba33fd96437a35065d03547d0040/openquake/hazardlib/gsim/kotha_2019.py#L129-L143 |
220 | gem/oq-engine | openquake/hazardlib/gsim/kotha_2019.py | KothaEtAl2019.get_magnitude_scaling | def get_magnitude_scaling(self, C, mag):
"""
Returns the magnitude scaling term
"""
d_m = mag - self.CONSTANTS["Mh"]
if mag < self.CONSTANTS["Mh"]:
return C["e1"] + C["b1"] * d_m + C["b2"] * (d_m ** 2.0)
else:
return C["e1"] + C["b3"] * d_m | python | def get_magnitude_scaling(self, C, mag):
"""
Returns the magnitude scaling term
"""
d_m = mag - self.CONSTANTS["Mh"]
if mag < self.CONSTANTS["Mh"]:
return C["e1"] + C["b1"] * d_m + C["b2"] * (d_m ** 2.0)
else:
return C["e1"] + C["b3"] * d_m | [
"def",
"get_magnitude_scaling",
"(",
"self",
",",
"C",
",",
"mag",
")",
":",
"d_m",
"=",
"mag",
"-",
"self",
".",
"CONSTANTS",
"[",
"\"Mh\"",
"]",
"if",
"mag",
"<",
"self",
".",
"CONSTANTS",
"[",
"\"Mh\"",
"]",
":",
"return",
"C",
"[",
"\"e1\"",
"]",
"+",
"C",
"[",
"\"b1\"",
"]",
"*",
"d_m",
"+",
"C",
"[",
"\"b2\"",
"]",
"*",
"(",
"d_m",
"**",
"2.0",
")",
"else",
":",
"return",
"C",
"[",
"\"e1\"",
"]",
"+",
"C",
"[",
"\"b3\"",
"]",
"*",
"d_m"
] | Returns the magnitude scaling term | [
"Returns",
"the",
"magnitude",
"scaling",
"term"
] | 8294553a0b8aba33fd96437a35065d03547d0040 | https://github.com/gem/oq-engine/blob/8294553a0b8aba33fd96437a35065d03547d0040/openquake/hazardlib/gsim/kotha_2019.py#L174-L182 |
221 | gem/oq-engine | openquake/hazardlib/gsim/kotha_2019.py | KothaEtAl2019.get_distance_term | def get_distance_term(self, C, rup, rjb, imt):
"""
Returns the distance attenuation factor
"""
h = self._get_h(C, rup.hypo_depth)
rval = np.sqrt(rjb ** 2. + h ** 2.)
c3 = self.get_distance_coefficients(C, imt)
f_r = (C["c1"] + C["c2"] * (rup.mag - self.CONSTANTS["Mref"])) *\
np.log(rval / self.CONSTANTS["Rref"]) +\
c3 * (rval - self.CONSTANTS["Rref"])
return f_r | python | def get_distance_term(self, C, rup, rjb, imt):
"""
Returns the distance attenuation factor
"""
h = self._get_h(C, rup.hypo_depth)
rval = np.sqrt(rjb ** 2. + h ** 2.)
c3 = self.get_distance_coefficients(C, imt)
f_r = (C["c1"] + C["c2"] * (rup.mag - self.CONSTANTS["Mref"])) *\
np.log(rval / self.CONSTANTS["Rref"]) +\
c3 * (rval - self.CONSTANTS["Rref"])
return f_r | [
"def",
"get_distance_term",
"(",
"self",
",",
"C",
",",
"rup",
",",
"rjb",
",",
"imt",
")",
":",
"h",
"=",
"self",
".",
"_get_h",
"(",
"C",
",",
"rup",
".",
"hypo_depth",
")",
"rval",
"=",
"np",
".",
"sqrt",
"(",
"rjb",
"**",
"2.",
"+",
"h",
"**",
"2.",
")",
"c3",
"=",
"self",
".",
"get_distance_coefficients",
"(",
"C",
",",
"imt",
")",
"f_r",
"=",
"(",
"C",
"[",
"\"c1\"",
"]",
"+",
"C",
"[",
"\"c2\"",
"]",
"*",
"(",
"rup",
".",
"mag",
"-",
"self",
".",
"CONSTANTS",
"[",
"\"Mref\"",
"]",
")",
")",
"*",
"np",
".",
"log",
"(",
"rval",
"/",
"self",
".",
"CONSTANTS",
"[",
"\"Rref\"",
"]",
")",
"+",
"c3",
"*",
"(",
"rval",
"-",
"self",
".",
"CONSTANTS",
"[",
"\"Rref\"",
"]",
")",
"return",
"f_r"
] | Returns the distance attenuation factor | [
"Returns",
"the",
"distance",
"attenuation",
"factor"
] | 8294553a0b8aba33fd96437a35065d03547d0040 | https://github.com/gem/oq-engine/blob/8294553a0b8aba33fd96437a35065d03547d0040/openquake/hazardlib/gsim/kotha_2019.py#L184-L195 |
222 | gem/oq-engine | openquake/hazardlib/gsim/kotha_2019.py | KothaEtAl2019.get_distance_coefficients | def get_distance_coefficients(self, C, imt):
"""
Returns the c3 term
"""
c3 = self.c3[imt]["c3"] if self.c3 else C["c3"]
return c3 | python | def get_distance_coefficients(self, C, imt):
"""
Returns the c3 term
"""
c3 = self.c3[imt]["c3"] if self.c3 else C["c3"]
return c3 | [
"def",
"get_distance_coefficients",
"(",
"self",
",",
"C",
",",
"imt",
")",
":",
"c3",
"=",
"self",
".",
"c3",
"[",
"imt",
"]",
"[",
"\"c3\"",
"]",
"if",
"self",
".",
"c3",
"else",
"C",
"[",
"\"c3\"",
"]",
"return",
"c3"
] | Returns the c3 term | [
"Returns",
"the",
"c3",
"term"
] | 8294553a0b8aba33fd96437a35065d03547d0040 | https://github.com/gem/oq-engine/blob/8294553a0b8aba33fd96437a35065d03547d0040/openquake/hazardlib/gsim/kotha_2019.py#L208-L213 |
223 | gem/oq-engine | openquake/hazardlib/gsim/kotha_2019.py | KothaEtAl2019.get_sigma_mu_adjustment | def get_sigma_mu_adjustment(self, C, imt, rup, dists):
"""
Returns the sigma mu adjustment factor
"""
if imt.name in "PGA PGV":
# PGA and PGV are 2D arrays of dimension [nmags, ndists]
sigma_mu = getattr(self, imt.name.lower())
if rup.mag <= self.mags[0]:
sigma_mu_m = sigma_mu[0, :]
elif rup.mag >= self.mags[-1]:
sigma_mu_m = sigma_mu[-1, :]
else:
intpl1 = interp1d(self.mags, sigma_mu, axis=0)
sigma_mu_m = intpl1(rup.mag)
# Linear interpolation with distance
intpl2 = interp1d(self.dists, sigma_mu_m, bounds_error=False,
fill_value=(sigma_mu_m[0], sigma_mu_m[-1]))
return intpl2(dists.rjb)
# In the case of SA the array is of dimension [nmags, ndists, nperiods]
# Get values for given magnitude
if rup.mag <= self.mags[0]:
sigma_mu_m = self.s_a[0, :, :]
elif rup.mag >= self.mags[-1]:
sigma_mu_m = self.s_a[-1, :, :]
else:
intpl1 = interp1d(self.mags, self.s_a, axis=0)
sigma_mu_m = intpl1(rup.mag)
# Get values for period - N.B. ln T, linear sigma mu interpolation
if imt.period <= self.periods[0]:
sigma_mu_t = sigma_mu_m[:, 0]
elif imt.period >= self.periods[-1]:
sigma_mu_t = sigma_mu_m[:, -1]
else:
intpl2 = interp1d(np.log(self.periods), sigma_mu_m, axis=1)
sigma_mu_t = intpl2(np.log(imt.period))
intpl3 = interp1d(self.dists, sigma_mu_t, bounds_error=False,
fill_value=(sigma_mu_t[0], sigma_mu_t[-1]))
return intpl3(dists.rjb) | python | def get_sigma_mu_adjustment(self, C, imt, rup, dists):
"""
Returns the sigma mu adjustment factor
"""
if imt.name in "PGA PGV":
# PGA and PGV are 2D arrays of dimension [nmags, ndists]
sigma_mu = getattr(self, imt.name.lower())
if rup.mag <= self.mags[0]:
sigma_mu_m = sigma_mu[0, :]
elif rup.mag >= self.mags[-1]:
sigma_mu_m = sigma_mu[-1, :]
else:
intpl1 = interp1d(self.mags, sigma_mu, axis=0)
sigma_mu_m = intpl1(rup.mag)
# Linear interpolation with distance
intpl2 = interp1d(self.dists, sigma_mu_m, bounds_error=False,
fill_value=(sigma_mu_m[0], sigma_mu_m[-1]))
return intpl2(dists.rjb)
# In the case of SA the array is of dimension [nmags, ndists, nperiods]
# Get values for given magnitude
if rup.mag <= self.mags[0]:
sigma_mu_m = self.s_a[0, :, :]
elif rup.mag >= self.mags[-1]:
sigma_mu_m = self.s_a[-1, :, :]
else:
intpl1 = interp1d(self.mags, self.s_a, axis=0)
sigma_mu_m = intpl1(rup.mag)
# Get values for period - N.B. ln T, linear sigma mu interpolation
if imt.period <= self.periods[0]:
sigma_mu_t = sigma_mu_m[:, 0]
elif imt.period >= self.periods[-1]:
sigma_mu_t = sigma_mu_m[:, -1]
else:
intpl2 = interp1d(np.log(self.periods), sigma_mu_m, axis=1)
sigma_mu_t = intpl2(np.log(imt.period))
intpl3 = interp1d(self.dists, sigma_mu_t, bounds_error=False,
fill_value=(sigma_mu_t[0], sigma_mu_t[-1]))
return intpl3(dists.rjb) | [
"def",
"get_sigma_mu_adjustment",
"(",
"self",
",",
"C",
",",
"imt",
",",
"rup",
",",
"dists",
")",
":",
"if",
"imt",
".",
"name",
"in",
"\"PGA PGV\"",
":",
"# PGA and PGV are 2D arrays of dimension [nmags, ndists]",
"sigma_mu",
"=",
"getattr",
"(",
"self",
",",
"imt",
".",
"name",
".",
"lower",
"(",
")",
")",
"if",
"rup",
".",
"mag",
"<=",
"self",
".",
"mags",
"[",
"0",
"]",
":",
"sigma_mu_m",
"=",
"sigma_mu",
"[",
"0",
",",
":",
"]",
"elif",
"rup",
".",
"mag",
">=",
"self",
".",
"mags",
"[",
"-",
"1",
"]",
":",
"sigma_mu_m",
"=",
"sigma_mu",
"[",
"-",
"1",
",",
":",
"]",
"else",
":",
"intpl1",
"=",
"interp1d",
"(",
"self",
".",
"mags",
",",
"sigma_mu",
",",
"axis",
"=",
"0",
")",
"sigma_mu_m",
"=",
"intpl1",
"(",
"rup",
".",
"mag",
")",
"# Linear interpolation with distance",
"intpl2",
"=",
"interp1d",
"(",
"self",
".",
"dists",
",",
"sigma_mu_m",
",",
"bounds_error",
"=",
"False",
",",
"fill_value",
"=",
"(",
"sigma_mu_m",
"[",
"0",
"]",
",",
"sigma_mu_m",
"[",
"-",
"1",
"]",
")",
")",
"return",
"intpl2",
"(",
"dists",
".",
"rjb",
")",
"# In the case of SA the array is of dimension [nmags, ndists, nperiods]",
"# Get values for given magnitude",
"if",
"rup",
".",
"mag",
"<=",
"self",
".",
"mags",
"[",
"0",
"]",
":",
"sigma_mu_m",
"=",
"self",
".",
"s_a",
"[",
"0",
",",
":",
",",
":",
"]",
"elif",
"rup",
".",
"mag",
">=",
"self",
".",
"mags",
"[",
"-",
"1",
"]",
":",
"sigma_mu_m",
"=",
"self",
".",
"s_a",
"[",
"-",
"1",
",",
":",
",",
":",
"]",
"else",
":",
"intpl1",
"=",
"interp1d",
"(",
"self",
".",
"mags",
",",
"self",
".",
"s_a",
",",
"axis",
"=",
"0",
")",
"sigma_mu_m",
"=",
"intpl1",
"(",
"rup",
".",
"mag",
")",
"# Get values for period - N.B. ln T, linear sigma mu interpolation",
"if",
"imt",
".",
"period",
"<=",
"self",
".",
"periods",
"[",
"0",
"]",
":",
"sigma_mu_t",
"=",
"sigma_mu_m",
"[",
":",
",",
"0",
"]",
"elif",
"imt",
".",
"period",
">=",
"self",
".",
"periods",
"[",
"-",
"1",
"]",
":",
"sigma_mu_t",
"=",
"sigma_mu_m",
"[",
":",
",",
"-",
"1",
"]",
"else",
":",
"intpl2",
"=",
"interp1d",
"(",
"np",
".",
"log",
"(",
"self",
".",
"periods",
")",
",",
"sigma_mu_m",
",",
"axis",
"=",
"1",
")",
"sigma_mu_t",
"=",
"intpl2",
"(",
"np",
".",
"log",
"(",
"imt",
".",
"period",
")",
")",
"intpl3",
"=",
"interp1d",
"(",
"self",
".",
"dists",
",",
"sigma_mu_t",
",",
"bounds_error",
"=",
"False",
",",
"fill_value",
"=",
"(",
"sigma_mu_t",
"[",
"0",
"]",
",",
"sigma_mu_t",
"[",
"-",
"1",
"]",
")",
")",
"return",
"intpl3",
"(",
"dists",
".",
"rjb",
")"
] | Returns the sigma mu adjustment factor | [
"Returns",
"the",
"sigma",
"mu",
"adjustment",
"factor"
] | 8294553a0b8aba33fd96437a35065d03547d0040 | https://github.com/gem/oq-engine/blob/8294553a0b8aba33fd96437a35065d03547d0040/openquake/hazardlib/gsim/kotha_2019.py#L221-L258 |
224 | gem/oq-engine | openquake/hazardlib/gsim/kotha_2019.py | KothaEtAl2019SERA.get_site_amplification | def get_site_amplification(self, C, sites):
"""
Returns the linear site amplification term depending on whether the
Vs30 is observed of inferred
"""
ampl = np.zeros(sites.vs30.shape)
# For observed vs30 sites
ampl[sites.vs30measured] = (C["d0_obs"] + C["d1_obs"] *
np.log(sites.vs30[sites.vs30measured]))
# For inferred Vs30 sites
idx = np.logical_not(sites.vs30measured)
ampl[idx] = (C["d0_inf"] + C["d1_inf"] * np.log(sites.vs30[idx]))
return ampl | python | def get_site_amplification(self, C, sites):
"""
Returns the linear site amplification term depending on whether the
Vs30 is observed of inferred
"""
ampl = np.zeros(sites.vs30.shape)
# For observed vs30 sites
ampl[sites.vs30measured] = (C["d0_obs"] + C["d1_obs"] *
np.log(sites.vs30[sites.vs30measured]))
# For inferred Vs30 sites
idx = np.logical_not(sites.vs30measured)
ampl[idx] = (C["d0_inf"] + C["d1_inf"] * np.log(sites.vs30[idx]))
return ampl | [
"def",
"get_site_amplification",
"(",
"self",
",",
"C",
",",
"sites",
")",
":",
"ampl",
"=",
"np",
".",
"zeros",
"(",
"sites",
".",
"vs30",
".",
"shape",
")",
"# For observed vs30 sites",
"ampl",
"[",
"sites",
".",
"vs30measured",
"]",
"=",
"(",
"C",
"[",
"\"d0_obs\"",
"]",
"+",
"C",
"[",
"\"d1_obs\"",
"]",
"*",
"np",
".",
"log",
"(",
"sites",
".",
"vs30",
"[",
"sites",
".",
"vs30measured",
"]",
")",
")",
"# For inferred Vs30 sites",
"idx",
"=",
"np",
".",
"logical_not",
"(",
"sites",
".",
"vs30measured",
")",
"ampl",
"[",
"idx",
"]",
"=",
"(",
"C",
"[",
"\"d0_inf\"",
"]",
"+",
"C",
"[",
"\"d1_inf\"",
"]",
"*",
"np",
".",
"log",
"(",
"sites",
".",
"vs30",
"[",
"idx",
"]",
")",
")",
"return",
"ampl"
] | Returns the linear site amplification term depending on whether the
Vs30 is observed of inferred | [
"Returns",
"the",
"linear",
"site",
"amplification",
"term",
"depending",
"on",
"whether",
"the",
"Vs30",
"is",
"observed",
"of",
"inferred"
] | 8294553a0b8aba33fd96437a35065d03547d0040 | https://github.com/gem/oq-engine/blob/8294553a0b8aba33fd96437a35065d03547d0040/openquake/hazardlib/gsim/kotha_2019.py#L332-L344 |
225 | gem/oq-engine | openquake/hazardlib/gsim/kotha_2019.py | KothaEtAl2019SERA.get_stddevs | def get_stddevs(self, C, stddev_shape, stddev_types, sites):
"""
Returns the standard deviations, with different site standard
deviation for inferred vs. observed vs30 sites.
"""
stddevs = []
tau = C["tau_event"]
sigma_s = np.zeros(sites.vs30measured.shape, dtype=float)
sigma_s[sites.vs30measured] += C["sigma_s_obs"]
sigma_s[np.logical_not(sites.vs30measured)] += C["sigma_s_inf"]
phi = np.sqrt(C["phi0"] ** 2.0 + sigma_s ** 2.)
for stddev_type in stddev_types:
assert stddev_type in self.DEFINED_FOR_STANDARD_DEVIATION_TYPES
if stddev_type == const.StdDev.TOTAL:
stddevs.append(np.sqrt(tau ** 2. + phi ** 2.) +
np.zeros(stddev_shape))
elif stddev_type == const.StdDev.INTRA_EVENT:
stddevs.append(phi + np.zeros(stddev_shape))
elif stddev_type == const.StdDev.INTER_EVENT:
stddevs.append(tau + np.zeros(stddev_shape))
return stddevs | python | def get_stddevs(self, C, stddev_shape, stddev_types, sites):
"""
Returns the standard deviations, with different site standard
deviation for inferred vs. observed vs30 sites.
"""
stddevs = []
tau = C["tau_event"]
sigma_s = np.zeros(sites.vs30measured.shape, dtype=float)
sigma_s[sites.vs30measured] += C["sigma_s_obs"]
sigma_s[np.logical_not(sites.vs30measured)] += C["sigma_s_inf"]
phi = np.sqrt(C["phi0"] ** 2.0 + sigma_s ** 2.)
for stddev_type in stddev_types:
assert stddev_type in self.DEFINED_FOR_STANDARD_DEVIATION_TYPES
if stddev_type == const.StdDev.TOTAL:
stddevs.append(np.sqrt(tau ** 2. + phi ** 2.) +
np.zeros(stddev_shape))
elif stddev_type == const.StdDev.INTRA_EVENT:
stddevs.append(phi + np.zeros(stddev_shape))
elif stddev_type == const.StdDev.INTER_EVENT:
stddevs.append(tau + np.zeros(stddev_shape))
return stddevs | [
"def",
"get_stddevs",
"(",
"self",
",",
"C",
",",
"stddev_shape",
",",
"stddev_types",
",",
"sites",
")",
":",
"stddevs",
"=",
"[",
"]",
"tau",
"=",
"C",
"[",
"\"tau_event\"",
"]",
"sigma_s",
"=",
"np",
".",
"zeros",
"(",
"sites",
".",
"vs30measured",
".",
"shape",
",",
"dtype",
"=",
"float",
")",
"sigma_s",
"[",
"sites",
".",
"vs30measured",
"]",
"+=",
"C",
"[",
"\"sigma_s_obs\"",
"]",
"sigma_s",
"[",
"np",
".",
"logical_not",
"(",
"sites",
".",
"vs30measured",
")",
"]",
"+=",
"C",
"[",
"\"sigma_s_inf\"",
"]",
"phi",
"=",
"np",
".",
"sqrt",
"(",
"C",
"[",
"\"phi0\"",
"]",
"**",
"2.0",
"+",
"sigma_s",
"**",
"2.",
")",
"for",
"stddev_type",
"in",
"stddev_types",
":",
"assert",
"stddev_type",
"in",
"self",
".",
"DEFINED_FOR_STANDARD_DEVIATION_TYPES",
"if",
"stddev_type",
"==",
"const",
".",
"StdDev",
".",
"TOTAL",
":",
"stddevs",
".",
"append",
"(",
"np",
".",
"sqrt",
"(",
"tau",
"**",
"2.",
"+",
"phi",
"**",
"2.",
")",
"+",
"np",
".",
"zeros",
"(",
"stddev_shape",
")",
")",
"elif",
"stddev_type",
"==",
"const",
".",
"StdDev",
".",
"INTRA_EVENT",
":",
"stddevs",
".",
"append",
"(",
"phi",
"+",
"np",
".",
"zeros",
"(",
"stddev_shape",
")",
")",
"elif",
"stddev_type",
"==",
"const",
".",
"StdDev",
".",
"INTER_EVENT",
":",
"stddevs",
".",
"append",
"(",
"tau",
"+",
"np",
".",
"zeros",
"(",
"stddev_shape",
")",
")",
"return",
"stddevs"
] | Returns the standard deviations, with different site standard
deviation for inferred vs. observed vs30 sites. | [
"Returns",
"the",
"standard",
"deviations",
"with",
"different",
"site",
"standard",
"deviation",
"for",
"inferred",
"vs",
".",
"observed",
"vs30",
"sites",
"."
] | 8294553a0b8aba33fd96437a35065d03547d0040 | https://github.com/gem/oq-engine/blob/8294553a0b8aba33fd96437a35065d03547d0040/openquake/hazardlib/gsim/kotha_2019.py#L346-L366 |
226 | gem/oq-engine | openquake/hazardlib/geo/geodetic.py | geodetic_distance | def geodetic_distance(lons1, lats1, lons2, lats2, diameter=2*EARTH_RADIUS):
"""
Calculate the geodetic distance between two points or two collections
of points.
Parameters are coordinates in decimal degrees. They could be scalar
float numbers or numpy arrays, in which case they should "broadcast
together".
Implements http://williams.best.vwh.net/avform.htm#Dist
:returns:
Distance in km, floating point scalar or numpy array of such.
"""
lons1, lats1, lons2, lats2 = _prepare_coords(lons1, lats1, lons2, lats2)
distance = numpy.arcsin(numpy.sqrt(
numpy.sin((lats1 - lats2) / 2.0) ** 2.0
+ numpy.cos(lats1) * numpy.cos(lats2)
* numpy.sin((lons1 - lons2) / 2.0) ** 2.0
))
return diameter * distance | python | def geodetic_distance(lons1, lats1, lons2, lats2, diameter=2*EARTH_RADIUS):
"""
Calculate the geodetic distance between two points or two collections
of points.
Parameters are coordinates in decimal degrees. They could be scalar
float numbers or numpy arrays, in which case they should "broadcast
together".
Implements http://williams.best.vwh.net/avform.htm#Dist
:returns:
Distance in km, floating point scalar or numpy array of such.
"""
lons1, lats1, lons2, lats2 = _prepare_coords(lons1, lats1, lons2, lats2)
distance = numpy.arcsin(numpy.sqrt(
numpy.sin((lats1 - lats2) / 2.0) ** 2.0
+ numpy.cos(lats1) * numpy.cos(lats2)
* numpy.sin((lons1 - lons2) / 2.0) ** 2.0
))
return diameter * distance | [
"def",
"geodetic_distance",
"(",
"lons1",
",",
"lats1",
",",
"lons2",
",",
"lats2",
",",
"diameter",
"=",
"2",
"*",
"EARTH_RADIUS",
")",
":",
"lons1",
",",
"lats1",
",",
"lons2",
",",
"lats2",
"=",
"_prepare_coords",
"(",
"lons1",
",",
"lats1",
",",
"lons2",
",",
"lats2",
")",
"distance",
"=",
"numpy",
".",
"arcsin",
"(",
"numpy",
".",
"sqrt",
"(",
"numpy",
".",
"sin",
"(",
"(",
"lats1",
"-",
"lats2",
")",
"/",
"2.0",
")",
"**",
"2.0",
"+",
"numpy",
".",
"cos",
"(",
"lats1",
")",
"*",
"numpy",
".",
"cos",
"(",
"lats2",
")",
"*",
"numpy",
".",
"sin",
"(",
"(",
"lons1",
"-",
"lons2",
")",
"/",
"2.0",
")",
"**",
"2.0",
")",
")",
"return",
"diameter",
"*",
"distance"
] | Calculate the geodetic distance between two points or two collections
of points.
Parameters are coordinates in decimal degrees. They could be scalar
float numbers or numpy arrays, in which case they should "broadcast
together".
Implements http://williams.best.vwh.net/avform.htm#Dist
:returns:
Distance in km, floating point scalar or numpy array of such. | [
"Calculate",
"the",
"geodetic",
"distance",
"between",
"two",
"points",
"or",
"two",
"collections",
"of",
"points",
"."
] | 8294553a0b8aba33fd96437a35065d03547d0040 | https://github.com/gem/oq-engine/blob/8294553a0b8aba33fd96437a35065d03547d0040/openquake/hazardlib/geo/geodetic.py#L34-L54 |
227 | gem/oq-engine | openquake/hazardlib/geo/geodetic.py | azimuth | def azimuth(lons1, lats1, lons2, lats2):
"""
Calculate the azimuth between two points or two collections of points.
Parameters are the same as for :func:`geodetic_distance`.
Implements an "alternative formula" from
http://williams.best.vwh.net/avform.htm#Crs
:returns:
Azimuth as an angle between direction to north from first point and
direction to the second point measured clockwise in decimal degrees.
"""
lons1, lats1, lons2, lats2 = _prepare_coords(lons1, lats1, lons2, lats2)
cos_lat2 = numpy.cos(lats2)
true_course = numpy.degrees(numpy.arctan2(
numpy.sin(lons1 - lons2) * cos_lat2,
numpy.cos(lats1) * numpy.sin(lats2)
- numpy.sin(lats1) * cos_lat2 * numpy.cos(lons1 - lons2)
))
return (360 - true_course) % 360 | python | def azimuth(lons1, lats1, lons2, lats2):
"""
Calculate the azimuth between two points or two collections of points.
Parameters are the same as for :func:`geodetic_distance`.
Implements an "alternative formula" from
http://williams.best.vwh.net/avform.htm#Crs
:returns:
Azimuth as an angle between direction to north from first point and
direction to the second point measured clockwise in decimal degrees.
"""
lons1, lats1, lons2, lats2 = _prepare_coords(lons1, lats1, lons2, lats2)
cos_lat2 = numpy.cos(lats2)
true_course = numpy.degrees(numpy.arctan2(
numpy.sin(lons1 - lons2) * cos_lat2,
numpy.cos(lats1) * numpy.sin(lats2)
- numpy.sin(lats1) * cos_lat2 * numpy.cos(lons1 - lons2)
))
return (360 - true_course) % 360 | [
"def",
"azimuth",
"(",
"lons1",
",",
"lats1",
",",
"lons2",
",",
"lats2",
")",
":",
"lons1",
",",
"lats1",
",",
"lons2",
",",
"lats2",
"=",
"_prepare_coords",
"(",
"lons1",
",",
"lats1",
",",
"lons2",
",",
"lats2",
")",
"cos_lat2",
"=",
"numpy",
".",
"cos",
"(",
"lats2",
")",
"true_course",
"=",
"numpy",
".",
"degrees",
"(",
"numpy",
".",
"arctan2",
"(",
"numpy",
".",
"sin",
"(",
"lons1",
"-",
"lons2",
")",
"*",
"cos_lat2",
",",
"numpy",
".",
"cos",
"(",
"lats1",
")",
"*",
"numpy",
".",
"sin",
"(",
"lats2",
")",
"-",
"numpy",
".",
"sin",
"(",
"lats1",
")",
"*",
"cos_lat2",
"*",
"numpy",
".",
"cos",
"(",
"lons1",
"-",
"lons2",
")",
")",
")",
"return",
"(",
"360",
"-",
"true_course",
")",
"%",
"360"
] | Calculate the azimuth between two points or two collections of points.
Parameters are the same as for :func:`geodetic_distance`.
Implements an "alternative formula" from
http://williams.best.vwh.net/avform.htm#Crs
:returns:
Azimuth as an angle between direction to north from first point and
direction to the second point measured clockwise in decimal degrees. | [
"Calculate",
"the",
"azimuth",
"between",
"two",
"points",
"or",
"two",
"collections",
"of",
"points",
"."
] | 8294553a0b8aba33fd96437a35065d03547d0040 | https://github.com/gem/oq-engine/blob/8294553a0b8aba33fd96437a35065d03547d0040/openquake/hazardlib/geo/geodetic.py#L57-L77 |
228 | gem/oq-engine | openquake/hazardlib/geo/geodetic.py | min_distance_to_segment | def min_distance_to_segment(seglons, seglats, lons, lats):
"""
This function computes the shortest distance to a segment in a 2D reference
system.
:parameter seglons:
A list or an array of floats specifying the longitude values of the two
vertexes delimiting the segment.
:parameter seglats:
A list or an array of floats specifying the latitude values of the two
vertexes delimiting the segment.
:parameter lons:
A list or a 1D array of floats specifying the longitude values of the
points for which the calculation of the shortest distance is requested.
:parameter lats:
A list or a 1D array of floats specifying the latitude values of the
points for which the calculation of the shortest distance is requested.
:returns:
An array of the same shape as lons which contains for each point
defined by (lons, lats) the shortest distance to the segment.
Distances are negative for those points that stay on the 'left side'
of the segment direction and whose projection lies within the segment
edges. For all other points, distance is positive.
"""
# Check the size of the seglons, seglats arrays
assert len(seglons) == len(seglats) == 2
# Compute the azimuth of the segment
seg_azim = azimuth(seglons[0], seglats[0], seglons[1], seglats[1])
# Compute the azimuth of the direction obtained
# connecting the first point defining the segment and each site
azimuth1 = azimuth(seglons[0], seglats[0], lons, lats)
# Compute the azimuth of the direction obtained
# connecting the second point defining the segment and each site
azimuth2 = azimuth(seglons[1], seglats[1], lons, lats)
# Find the points inside the band defined by the two lines perpendicular
# to the segment direction passing through the two vertexes of the segment.
# For these points the closest distance is the distance from the great arc.
idx_in = numpy.nonzero(
(numpy.cos(numpy.radians(seg_azim-azimuth1)) >= 0.0) &
(numpy.cos(numpy.radians(seg_azim-azimuth2)) <= 0.0))
# Find the points outside the band defined by the two line perpendicular
# to the segment direction passing through the two vertexes of the segment.
# For these points the closest distance is the minimum of the distance from
# the two point vertexes.
idx_out = numpy.nonzero(
(numpy.cos(numpy.radians(seg_azim-azimuth1)) < 0.0) |
(numpy.cos(numpy.radians(seg_azim-azimuth2)) > 0.0))
# Find the indexes of points 'on the left of the segment'
idx_neg = numpy.nonzero(numpy.sin(numpy.radians(
(azimuth1-seg_azim))) < 0.0)
# Now let's compute the distances for the two cases.
dists = numpy.zeros_like(lons)
if len(idx_in[0]):
dists[idx_in] = distance_to_arc(
seglons[0], seglats[0], seg_azim, lons[idx_in], lats[idx_in])
if len(idx_out[0]):
dists[idx_out] = min_geodetic_distance(
(seglons, seglats), (lons[idx_out], lats[idx_out]))
# Finally we correct the sign of the distances in order to make sure that
# the points on the right semispace defined using as a reference the
# direction defined by the segment (i.e. the direction defined by going
# from the first point to the second one) have a positive distance and
# the others a negative one.
dists = abs(dists)
dists[idx_neg] = - dists[idx_neg]
return dists | python | def min_distance_to_segment(seglons, seglats, lons, lats):
"""
This function computes the shortest distance to a segment in a 2D reference
system.
:parameter seglons:
A list or an array of floats specifying the longitude values of the two
vertexes delimiting the segment.
:parameter seglats:
A list or an array of floats specifying the latitude values of the two
vertexes delimiting the segment.
:parameter lons:
A list or a 1D array of floats specifying the longitude values of the
points for which the calculation of the shortest distance is requested.
:parameter lats:
A list or a 1D array of floats specifying the latitude values of the
points for which the calculation of the shortest distance is requested.
:returns:
An array of the same shape as lons which contains for each point
defined by (lons, lats) the shortest distance to the segment.
Distances are negative for those points that stay on the 'left side'
of the segment direction and whose projection lies within the segment
edges. For all other points, distance is positive.
"""
# Check the size of the seglons, seglats arrays
assert len(seglons) == len(seglats) == 2
# Compute the azimuth of the segment
seg_azim = azimuth(seglons[0], seglats[0], seglons[1], seglats[1])
# Compute the azimuth of the direction obtained
# connecting the first point defining the segment and each site
azimuth1 = azimuth(seglons[0], seglats[0], lons, lats)
# Compute the azimuth of the direction obtained
# connecting the second point defining the segment and each site
azimuth2 = azimuth(seglons[1], seglats[1], lons, lats)
# Find the points inside the band defined by the two lines perpendicular
# to the segment direction passing through the two vertexes of the segment.
# For these points the closest distance is the distance from the great arc.
idx_in = numpy.nonzero(
(numpy.cos(numpy.radians(seg_azim-azimuth1)) >= 0.0) &
(numpy.cos(numpy.radians(seg_azim-azimuth2)) <= 0.0))
# Find the points outside the band defined by the two line perpendicular
# to the segment direction passing through the two vertexes of the segment.
# For these points the closest distance is the minimum of the distance from
# the two point vertexes.
idx_out = numpy.nonzero(
(numpy.cos(numpy.radians(seg_azim-azimuth1)) < 0.0) |
(numpy.cos(numpy.radians(seg_azim-azimuth2)) > 0.0))
# Find the indexes of points 'on the left of the segment'
idx_neg = numpy.nonzero(numpy.sin(numpy.radians(
(azimuth1-seg_azim))) < 0.0)
# Now let's compute the distances for the two cases.
dists = numpy.zeros_like(lons)
if len(idx_in[0]):
dists[idx_in] = distance_to_arc(
seglons[0], seglats[0], seg_azim, lons[idx_in], lats[idx_in])
if len(idx_out[0]):
dists[idx_out] = min_geodetic_distance(
(seglons, seglats), (lons[idx_out], lats[idx_out]))
# Finally we correct the sign of the distances in order to make sure that
# the points on the right semispace defined using as a reference the
# direction defined by the segment (i.e. the direction defined by going
# from the first point to the second one) have a positive distance and
# the others a negative one.
dists = abs(dists)
dists[idx_neg] = - dists[idx_neg]
return dists | [
"def",
"min_distance_to_segment",
"(",
"seglons",
",",
"seglats",
",",
"lons",
",",
"lats",
")",
":",
"# Check the size of the seglons, seglats arrays",
"assert",
"len",
"(",
"seglons",
")",
"==",
"len",
"(",
"seglats",
")",
"==",
"2",
"# Compute the azimuth of the segment",
"seg_azim",
"=",
"azimuth",
"(",
"seglons",
"[",
"0",
"]",
",",
"seglats",
"[",
"0",
"]",
",",
"seglons",
"[",
"1",
"]",
",",
"seglats",
"[",
"1",
"]",
")",
"# Compute the azimuth of the direction obtained",
"# connecting the first point defining the segment and each site",
"azimuth1",
"=",
"azimuth",
"(",
"seglons",
"[",
"0",
"]",
",",
"seglats",
"[",
"0",
"]",
",",
"lons",
",",
"lats",
")",
"# Compute the azimuth of the direction obtained",
"# connecting the second point defining the segment and each site",
"azimuth2",
"=",
"azimuth",
"(",
"seglons",
"[",
"1",
"]",
",",
"seglats",
"[",
"1",
"]",
",",
"lons",
",",
"lats",
")",
"# Find the points inside the band defined by the two lines perpendicular",
"# to the segment direction passing through the two vertexes of the segment.",
"# For these points the closest distance is the distance from the great arc.",
"idx_in",
"=",
"numpy",
".",
"nonzero",
"(",
"(",
"numpy",
".",
"cos",
"(",
"numpy",
".",
"radians",
"(",
"seg_azim",
"-",
"azimuth1",
")",
")",
">=",
"0.0",
")",
"&",
"(",
"numpy",
".",
"cos",
"(",
"numpy",
".",
"radians",
"(",
"seg_azim",
"-",
"azimuth2",
")",
")",
"<=",
"0.0",
")",
")",
"# Find the points outside the band defined by the two line perpendicular",
"# to the segment direction passing through the two vertexes of the segment.",
"# For these points the closest distance is the minimum of the distance from",
"# the two point vertexes.",
"idx_out",
"=",
"numpy",
".",
"nonzero",
"(",
"(",
"numpy",
".",
"cos",
"(",
"numpy",
".",
"radians",
"(",
"seg_azim",
"-",
"azimuth1",
")",
")",
"<",
"0.0",
")",
"|",
"(",
"numpy",
".",
"cos",
"(",
"numpy",
".",
"radians",
"(",
"seg_azim",
"-",
"azimuth2",
")",
")",
">",
"0.0",
")",
")",
"# Find the indexes of points 'on the left of the segment'",
"idx_neg",
"=",
"numpy",
".",
"nonzero",
"(",
"numpy",
".",
"sin",
"(",
"numpy",
".",
"radians",
"(",
"(",
"azimuth1",
"-",
"seg_azim",
")",
")",
")",
"<",
"0.0",
")",
"# Now let's compute the distances for the two cases.",
"dists",
"=",
"numpy",
".",
"zeros_like",
"(",
"lons",
")",
"if",
"len",
"(",
"idx_in",
"[",
"0",
"]",
")",
":",
"dists",
"[",
"idx_in",
"]",
"=",
"distance_to_arc",
"(",
"seglons",
"[",
"0",
"]",
",",
"seglats",
"[",
"0",
"]",
",",
"seg_azim",
",",
"lons",
"[",
"idx_in",
"]",
",",
"lats",
"[",
"idx_in",
"]",
")",
"if",
"len",
"(",
"idx_out",
"[",
"0",
"]",
")",
":",
"dists",
"[",
"idx_out",
"]",
"=",
"min_geodetic_distance",
"(",
"(",
"seglons",
",",
"seglats",
")",
",",
"(",
"lons",
"[",
"idx_out",
"]",
",",
"lats",
"[",
"idx_out",
"]",
")",
")",
"# Finally we correct the sign of the distances in order to make sure that",
"# the points on the right semispace defined using as a reference the",
"# direction defined by the segment (i.e. the direction defined by going",
"# from the first point to the second one) have a positive distance and",
"# the others a negative one.",
"dists",
"=",
"abs",
"(",
"dists",
")",
"dists",
"[",
"idx_neg",
"]",
"=",
"-",
"dists",
"[",
"idx_neg",
"]",
"return",
"dists"
] | This function computes the shortest distance to a segment in a 2D reference
system.
:parameter seglons:
A list or an array of floats specifying the longitude values of the two
vertexes delimiting the segment.
:parameter seglats:
A list or an array of floats specifying the latitude values of the two
vertexes delimiting the segment.
:parameter lons:
A list or a 1D array of floats specifying the longitude values of the
points for which the calculation of the shortest distance is requested.
:parameter lats:
A list or a 1D array of floats specifying the latitude values of the
points for which the calculation of the shortest distance is requested.
:returns:
An array of the same shape as lons which contains for each point
defined by (lons, lats) the shortest distance to the segment.
Distances are negative for those points that stay on the 'left side'
of the segment direction and whose projection lies within the segment
edges. For all other points, distance is positive. | [
"This",
"function",
"computes",
"the",
"shortest",
"distance",
"to",
"a",
"segment",
"in",
"a",
"2D",
"reference",
"system",
"."
] | 8294553a0b8aba33fd96437a35065d03547d0040 | https://github.com/gem/oq-engine/blob/8294553a0b8aba33fd96437a35065d03547d0040/openquake/hazardlib/geo/geodetic.py#L99-L174 |
229 | gem/oq-engine | openquake/hazardlib/geo/geodetic.py | min_geodetic_distance | def min_geodetic_distance(a, b):
"""
Compute the minimum distance between first mesh and each point
of the second mesh when both are defined on the earth surface.
:param a: a pair of (lons, lats) or an array of cartesian coordinates
:param b: a pair of (lons, lats) or an array of cartesian coordinates
"""
if isinstance(a, tuple):
a = spherical_to_cartesian(a[0].flatten(), a[1].flatten())
if isinstance(b, tuple):
b = spherical_to_cartesian(b[0].flatten(), b[1].flatten())
return cdist(a, b).min(axis=0) | python | def min_geodetic_distance(a, b):
"""
Compute the minimum distance between first mesh and each point
of the second mesh when both are defined on the earth surface.
:param a: a pair of (lons, lats) or an array of cartesian coordinates
:param b: a pair of (lons, lats) or an array of cartesian coordinates
"""
if isinstance(a, tuple):
a = spherical_to_cartesian(a[0].flatten(), a[1].flatten())
if isinstance(b, tuple):
b = spherical_to_cartesian(b[0].flatten(), b[1].flatten())
return cdist(a, b).min(axis=0) | [
"def",
"min_geodetic_distance",
"(",
"a",
",",
"b",
")",
":",
"if",
"isinstance",
"(",
"a",
",",
"tuple",
")",
":",
"a",
"=",
"spherical_to_cartesian",
"(",
"a",
"[",
"0",
"]",
".",
"flatten",
"(",
")",
",",
"a",
"[",
"1",
"]",
".",
"flatten",
"(",
")",
")",
"if",
"isinstance",
"(",
"b",
",",
"tuple",
")",
":",
"b",
"=",
"spherical_to_cartesian",
"(",
"b",
"[",
"0",
"]",
".",
"flatten",
"(",
")",
",",
"b",
"[",
"1",
"]",
".",
"flatten",
"(",
")",
")",
"return",
"cdist",
"(",
"a",
",",
"b",
")",
".",
"min",
"(",
"axis",
"=",
"0",
")"
] | Compute the minimum distance between first mesh and each point
of the second mesh when both are defined on the earth surface.
:param a: a pair of (lons, lats) or an array of cartesian coordinates
:param b: a pair of (lons, lats) or an array of cartesian coordinates | [
"Compute",
"the",
"minimum",
"distance",
"between",
"first",
"mesh",
"and",
"each",
"point",
"of",
"the",
"second",
"mesh",
"when",
"both",
"are",
"defined",
"on",
"the",
"earth",
"surface",
"."
] | 8294553a0b8aba33fd96437a35065d03547d0040 | https://github.com/gem/oq-engine/blob/8294553a0b8aba33fd96437a35065d03547d0040/openquake/hazardlib/geo/geodetic.py#L224-L236 |
230 | gem/oq-engine | openquake/hazardlib/geo/geodetic.py | intervals_between | def intervals_between(lon1, lat1, depth1, lon2, lat2, depth2, length):
"""
Find a list of points between two given ones that lie on the same
great circle arc and are equally spaced by ``length`` km.
:param float lon1, lat1, depth1:
Coordinates of a point to start placing intervals from. The first
point in the resulting list has these coordinates.
:param float lon2, lat2, depth2:
Coordinates of the other end of the great circle arc segment
to put intervals on. The last resulting point might be closer
to the first reference point than the second one or further,
since the number of segments is taken as rounded division of
length between two reference points and ``length``.
:param length:
Required distance between two subsequent resulting points, in km.
:returns:
Tuple of three 1d numpy arrays: longitudes, latitudes and depths
of resulting points respectively.
Rounds the distance between two reference points with respect
to ``length`` and calls :func:`npoints_towards`.
"""
assert length > 0
hdist = geodetic_distance(lon1, lat1, lon2, lat2)
vdist = depth2 - depth1
# if this method is called multiple times with coordinates that are
# separated by the same distance, because of floating point imprecisions
# the total distance may have slightly different values (for instance if
# the distance between two set of points is 65 km, total distance can be
# 64.9999999999989910 and 65.0000000000020322). These two values bring to
# two different values of num_intervals (32 in the first case and 33 in
# the second), and this is a problem because for the same distance we
# should have the same number of intervals. To reduce potential differences
# due to floating point errors, we therefore round total_distance to a
# fixed precision (7)
total_distance = round(numpy.sqrt(hdist ** 2 + vdist ** 2), 7)
num_intervals = int(round(total_distance / length))
if num_intervals == 0:
return numpy.array([lon1]), numpy.array([lat1]), numpy.array([depth1])
dist_factor = (length * num_intervals) / total_distance
return npoints_towards(
lon1, lat1, depth1, azimuth(lon1, lat1, lon2, lat2),
hdist * dist_factor, vdist * dist_factor, num_intervals + 1) | python | def intervals_between(lon1, lat1, depth1, lon2, lat2, depth2, length):
"""
Find a list of points between two given ones that lie on the same
great circle arc and are equally spaced by ``length`` km.
:param float lon1, lat1, depth1:
Coordinates of a point to start placing intervals from. The first
point in the resulting list has these coordinates.
:param float lon2, lat2, depth2:
Coordinates of the other end of the great circle arc segment
to put intervals on. The last resulting point might be closer
to the first reference point than the second one or further,
since the number of segments is taken as rounded division of
length between two reference points and ``length``.
:param length:
Required distance between two subsequent resulting points, in km.
:returns:
Tuple of three 1d numpy arrays: longitudes, latitudes and depths
of resulting points respectively.
Rounds the distance between two reference points with respect
to ``length`` and calls :func:`npoints_towards`.
"""
assert length > 0
hdist = geodetic_distance(lon1, lat1, lon2, lat2)
vdist = depth2 - depth1
# if this method is called multiple times with coordinates that are
# separated by the same distance, because of floating point imprecisions
# the total distance may have slightly different values (for instance if
# the distance between two set of points is 65 km, total distance can be
# 64.9999999999989910 and 65.0000000000020322). These two values bring to
# two different values of num_intervals (32 in the first case and 33 in
# the second), and this is a problem because for the same distance we
# should have the same number of intervals. To reduce potential differences
# due to floating point errors, we therefore round total_distance to a
# fixed precision (7)
total_distance = round(numpy.sqrt(hdist ** 2 + vdist ** 2), 7)
num_intervals = int(round(total_distance / length))
if num_intervals == 0:
return numpy.array([lon1]), numpy.array([lat1]), numpy.array([depth1])
dist_factor = (length * num_intervals) / total_distance
return npoints_towards(
lon1, lat1, depth1, azimuth(lon1, lat1, lon2, lat2),
hdist * dist_factor, vdist * dist_factor, num_intervals + 1) | [
"def",
"intervals_between",
"(",
"lon1",
",",
"lat1",
",",
"depth1",
",",
"lon2",
",",
"lat2",
",",
"depth2",
",",
"length",
")",
":",
"assert",
"length",
">",
"0",
"hdist",
"=",
"geodetic_distance",
"(",
"lon1",
",",
"lat1",
",",
"lon2",
",",
"lat2",
")",
"vdist",
"=",
"depth2",
"-",
"depth1",
"# if this method is called multiple times with coordinates that are",
"# separated by the same distance, because of floating point imprecisions",
"# the total distance may have slightly different values (for instance if",
"# the distance between two set of points is 65 km, total distance can be",
"# 64.9999999999989910 and 65.0000000000020322). These two values bring to",
"# two different values of num_intervals (32 in the first case and 33 in",
"# the second), and this is a problem because for the same distance we",
"# should have the same number of intervals. To reduce potential differences",
"# due to floating point errors, we therefore round total_distance to a",
"# fixed precision (7)",
"total_distance",
"=",
"round",
"(",
"numpy",
".",
"sqrt",
"(",
"hdist",
"**",
"2",
"+",
"vdist",
"**",
"2",
")",
",",
"7",
")",
"num_intervals",
"=",
"int",
"(",
"round",
"(",
"total_distance",
"/",
"length",
")",
")",
"if",
"num_intervals",
"==",
"0",
":",
"return",
"numpy",
".",
"array",
"(",
"[",
"lon1",
"]",
")",
",",
"numpy",
".",
"array",
"(",
"[",
"lat1",
"]",
")",
",",
"numpy",
".",
"array",
"(",
"[",
"depth1",
"]",
")",
"dist_factor",
"=",
"(",
"length",
"*",
"num_intervals",
")",
"/",
"total_distance",
"return",
"npoints_towards",
"(",
"lon1",
",",
"lat1",
",",
"depth1",
",",
"azimuth",
"(",
"lon1",
",",
"lat1",
",",
"lon2",
",",
"lat2",
")",
",",
"hdist",
"*",
"dist_factor",
",",
"vdist",
"*",
"dist_factor",
",",
"num_intervals",
"+",
"1",
")"
] | Find a list of points between two given ones that lie on the same
great circle arc and are equally spaced by ``length`` km.
:param float lon1, lat1, depth1:
Coordinates of a point to start placing intervals from. The first
point in the resulting list has these coordinates.
:param float lon2, lat2, depth2:
Coordinates of the other end of the great circle arc segment
to put intervals on. The last resulting point might be closer
to the first reference point than the second one or further,
since the number of segments is taken as rounded division of
length between two reference points and ``length``.
:param length:
Required distance between two subsequent resulting points, in km.
:returns:
Tuple of three 1d numpy arrays: longitudes, latitudes and depths
of resulting points respectively.
Rounds the distance between two reference points with respect
to ``length`` and calls :func:`npoints_towards`. | [
"Find",
"a",
"list",
"of",
"points",
"between",
"two",
"given",
"ones",
"that",
"lie",
"on",
"the",
"same",
"great",
"circle",
"arc",
"and",
"are",
"equally",
"spaced",
"by",
"length",
"km",
"."
] | 8294553a0b8aba33fd96437a35065d03547d0040 | https://github.com/gem/oq-engine/blob/8294553a0b8aba33fd96437a35065d03547d0040/openquake/hazardlib/geo/geodetic.py#L259-L302 |
231 | gem/oq-engine | openquake/hazardlib/geo/geodetic.py | npoints_between | def npoints_between(lon1, lat1, depth1, lon2, lat2, depth2, npoints):
"""
Find a list of specified number of points between two given ones that are
equally spaced along the great circle arc connecting given points.
:param float lon1, lat1, depth1:
Coordinates of a point to start from. The first point in a resulting
list has these coordinates.
:param float lon2, lat2, depth2:
Coordinates of a point to finish at. The last point in a resulting
list has these coordinates.
:param npoints:
Integer number of points to return. First and last points count,
so if there have to be two intervals, ``npoints`` should be 3.
:returns:
Tuple of three 1d numpy arrays: longitudes, latitudes and depths
of resulting points respectively.
Finds distance between two reference points and calls
:func:`npoints_towards`.
"""
hdist = geodetic_distance(lon1, lat1, lon2, lat2)
vdist = depth2 - depth1
rlons, rlats, rdepths = npoints_towards(
lon1, lat1, depth1, azimuth(lon1, lat1, lon2, lat2),
hdist, vdist, npoints
)
# the last point should be left intact
rlons[-1] = lon2
rlats[-1] = lat2
rdepths[-1] = depth2
return rlons, rlats, rdepths | python | def npoints_between(lon1, lat1, depth1, lon2, lat2, depth2, npoints):
"""
Find a list of specified number of points between two given ones that are
equally spaced along the great circle arc connecting given points.
:param float lon1, lat1, depth1:
Coordinates of a point to start from. The first point in a resulting
list has these coordinates.
:param float lon2, lat2, depth2:
Coordinates of a point to finish at. The last point in a resulting
list has these coordinates.
:param npoints:
Integer number of points to return. First and last points count,
so if there have to be two intervals, ``npoints`` should be 3.
:returns:
Tuple of three 1d numpy arrays: longitudes, latitudes and depths
of resulting points respectively.
Finds distance between two reference points and calls
:func:`npoints_towards`.
"""
hdist = geodetic_distance(lon1, lat1, lon2, lat2)
vdist = depth2 - depth1
rlons, rlats, rdepths = npoints_towards(
lon1, lat1, depth1, azimuth(lon1, lat1, lon2, lat2),
hdist, vdist, npoints
)
# the last point should be left intact
rlons[-1] = lon2
rlats[-1] = lat2
rdepths[-1] = depth2
return rlons, rlats, rdepths | [
"def",
"npoints_between",
"(",
"lon1",
",",
"lat1",
",",
"depth1",
",",
"lon2",
",",
"lat2",
",",
"depth2",
",",
"npoints",
")",
":",
"hdist",
"=",
"geodetic_distance",
"(",
"lon1",
",",
"lat1",
",",
"lon2",
",",
"lat2",
")",
"vdist",
"=",
"depth2",
"-",
"depth1",
"rlons",
",",
"rlats",
",",
"rdepths",
"=",
"npoints_towards",
"(",
"lon1",
",",
"lat1",
",",
"depth1",
",",
"azimuth",
"(",
"lon1",
",",
"lat1",
",",
"lon2",
",",
"lat2",
")",
",",
"hdist",
",",
"vdist",
",",
"npoints",
")",
"# the last point should be left intact",
"rlons",
"[",
"-",
"1",
"]",
"=",
"lon2",
"rlats",
"[",
"-",
"1",
"]",
"=",
"lat2",
"rdepths",
"[",
"-",
"1",
"]",
"=",
"depth2",
"return",
"rlons",
",",
"rlats",
",",
"rdepths"
] | Find a list of specified number of points between two given ones that are
equally spaced along the great circle arc connecting given points.
:param float lon1, lat1, depth1:
Coordinates of a point to start from. The first point in a resulting
list has these coordinates.
:param float lon2, lat2, depth2:
Coordinates of a point to finish at. The last point in a resulting
list has these coordinates.
:param npoints:
Integer number of points to return. First and last points count,
so if there have to be two intervals, ``npoints`` should be 3.
:returns:
Tuple of three 1d numpy arrays: longitudes, latitudes and depths
of resulting points respectively.
Finds distance between two reference points and calls
:func:`npoints_towards`. | [
"Find",
"a",
"list",
"of",
"specified",
"number",
"of",
"points",
"between",
"two",
"given",
"ones",
"that",
"are",
"equally",
"spaced",
"along",
"the",
"great",
"circle",
"arc",
"connecting",
"given",
"points",
"."
] | 8294553a0b8aba33fd96437a35065d03547d0040 | https://github.com/gem/oq-engine/blob/8294553a0b8aba33fd96437a35065d03547d0040/openquake/hazardlib/geo/geodetic.py#L305-L336 |
232 | gem/oq-engine | openquake/hazardlib/geo/geodetic.py | npoints_towards | def npoints_towards(lon, lat, depth, azimuth, hdist, vdist, npoints):
"""
Find a list of specified number of points starting from a given one
along a great circle arc with a given azimuth measured in a given point.
:param float lon, lat, depth:
Coordinates of a point to start from. The first point in a resulting
list has these coordinates.
:param azimuth:
A direction representing a great circle arc together with a reference
point.
:param hdist:
Horizontal (geodetic) distance from reference point to the last point
of the resulting list, in km.
:param vdist:
Vertical (depth) distance between reference and the last point, in km.
:param npoints:
Integer number of points to return. First and last points count,
so if there have to be two intervals, ``npoints`` should be 3.
:returns:
Tuple of three 1d numpy arrays: longitudes, latitudes and depths
of resulting points respectively.
Implements "completely general but more complicated algorithm" from
http://williams.best.vwh.net/avform.htm#LL
"""
assert npoints > 1
rlon, rlat = numpy.radians(lon), numpy.radians(lat)
tc = numpy.radians(360 - azimuth)
hdists = numpy.arange(npoints, dtype=float)
hdists *= (hdist / EARTH_RADIUS) / (npoints - 1)
vdists = numpy.arange(npoints, dtype=float)
vdists *= vdist / (npoints - 1)
sin_dists = numpy.sin(hdists)
cos_dists = numpy.cos(hdists)
sin_lat = numpy.sin(rlat)
cos_lat = numpy.cos(rlat)
sin_lats = sin_lat * cos_dists + cos_lat * sin_dists * numpy.cos(tc)
lats = numpy.degrees(numpy.arcsin(sin_lats))
dlon = numpy.arctan2(numpy.sin(tc) * sin_dists * cos_lat,
cos_dists - sin_lat * sin_lats)
lons = numpy.mod(rlon - dlon + numpy.pi, 2 * numpy.pi) - numpy.pi
lons = numpy.degrees(lons)
depths = vdists + depth
# the first point should be left intact
lons[0] = lon
lats[0] = lat
depths[0] = depth
return lons, lats, depths | python | def npoints_towards(lon, lat, depth, azimuth, hdist, vdist, npoints):
"""
Find a list of specified number of points starting from a given one
along a great circle arc with a given azimuth measured in a given point.
:param float lon, lat, depth:
Coordinates of a point to start from. The first point in a resulting
list has these coordinates.
:param azimuth:
A direction representing a great circle arc together with a reference
point.
:param hdist:
Horizontal (geodetic) distance from reference point to the last point
of the resulting list, in km.
:param vdist:
Vertical (depth) distance between reference and the last point, in km.
:param npoints:
Integer number of points to return. First and last points count,
so if there have to be two intervals, ``npoints`` should be 3.
:returns:
Tuple of three 1d numpy arrays: longitudes, latitudes and depths
of resulting points respectively.
Implements "completely general but more complicated algorithm" from
http://williams.best.vwh.net/avform.htm#LL
"""
assert npoints > 1
rlon, rlat = numpy.radians(lon), numpy.radians(lat)
tc = numpy.radians(360 - azimuth)
hdists = numpy.arange(npoints, dtype=float)
hdists *= (hdist / EARTH_RADIUS) / (npoints - 1)
vdists = numpy.arange(npoints, dtype=float)
vdists *= vdist / (npoints - 1)
sin_dists = numpy.sin(hdists)
cos_dists = numpy.cos(hdists)
sin_lat = numpy.sin(rlat)
cos_lat = numpy.cos(rlat)
sin_lats = sin_lat * cos_dists + cos_lat * sin_dists * numpy.cos(tc)
lats = numpy.degrees(numpy.arcsin(sin_lats))
dlon = numpy.arctan2(numpy.sin(tc) * sin_dists * cos_lat,
cos_dists - sin_lat * sin_lats)
lons = numpy.mod(rlon - dlon + numpy.pi, 2 * numpy.pi) - numpy.pi
lons = numpy.degrees(lons)
depths = vdists + depth
# the first point should be left intact
lons[0] = lon
lats[0] = lat
depths[0] = depth
return lons, lats, depths | [
"def",
"npoints_towards",
"(",
"lon",
",",
"lat",
",",
"depth",
",",
"azimuth",
",",
"hdist",
",",
"vdist",
",",
"npoints",
")",
":",
"assert",
"npoints",
">",
"1",
"rlon",
",",
"rlat",
"=",
"numpy",
".",
"radians",
"(",
"lon",
")",
",",
"numpy",
".",
"radians",
"(",
"lat",
")",
"tc",
"=",
"numpy",
".",
"radians",
"(",
"360",
"-",
"azimuth",
")",
"hdists",
"=",
"numpy",
".",
"arange",
"(",
"npoints",
",",
"dtype",
"=",
"float",
")",
"hdists",
"*=",
"(",
"hdist",
"/",
"EARTH_RADIUS",
")",
"/",
"(",
"npoints",
"-",
"1",
")",
"vdists",
"=",
"numpy",
".",
"arange",
"(",
"npoints",
",",
"dtype",
"=",
"float",
")",
"vdists",
"*=",
"vdist",
"/",
"(",
"npoints",
"-",
"1",
")",
"sin_dists",
"=",
"numpy",
".",
"sin",
"(",
"hdists",
")",
"cos_dists",
"=",
"numpy",
".",
"cos",
"(",
"hdists",
")",
"sin_lat",
"=",
"numpy",
".",
"sin",
"(",
"rlat",
")",
"cos_lat",
"=",
"numpy",
".",
"cos",
"(",
"rlat",
")",
"sin_lats",
"=",
"sin_lat",
"*",
"cos_dists",
"+",
"cos_lat",
"*",
"sin_dists",
"*",
"numpy",
".",
"cos",
"(",
"tc",
")",
"lats",
"=",
"numpy",
".",
"degrees",
"(",
"numpy",
".",
"arcsin",
"(",
"sin_lats",
")",
")",
"dlon",
"=",
"numpy",
".",
"arctan2",
"(",
"numpy",
".",
"sin",
"(",
"tc",
")",
"*",
"sin_dists",
"*",
"cos_lat",
",",
"cos_dists",
"-",
"sin_lat",
"*",
"sin_lats",
")",
"lons",
"=",
"numpy",
".",
"mod",
"(",
"rlon",
"-",
"dlon",
"+",
"numpy",
".",
"pi",
",",
"2",
"*",
"numpy",
".",
"pi",
")",
"-",
"numpy",
".",
"pi",
"lons",
"=",
"numpy",
".",
"degrees",
"(",
"lons",
")",
"depths",
"=",
"vdists",
"+",
"depth",
"# the first point should be left intact",
"lons",
"[",
"0",
"]",
"=",
"lon",
"lats",
"[",
"0",
"]",
"=",
"lat",
"depths",
"[",
"0",
"]",
"=",
"depth",
"return",
"lons",
",",
"lats",
",",
"depths"
] | Find a list of specified number of points starting from a given one
along a great circle arc with a given azimuth measured in a given point.
:param float lon, lat, depth:
Coordinates of a point to start from. The first point in a resulting
list has these coordinates.
:param azimuth:
A direction representing a great circle arc together with a reference
point.
:param hdist:
Horizontal (geodetic) distance from reference point to the last point
of the resulting list, in km.
:param vdist:
Vertical (depth) distance between reference and the last point, in km.
:param npoints:
Integer number of points to return. First and last points count,
so if there have to be two intervals, ``npoints`` should be 3.
:returns:
Tuple of three 1d numpy arrays: longitudes, latitudes and depths
of resulting points respectively.
Implements "completely general but more complicated algorithm" from
http://williams.best.vwh.net/avform.htm#LL | [
"Find",
"a",
"list",
"of",
"specified",
"number",
"of",
"points",
"starting",
"from",
"a",
"given",
"one",
"along",
"a",
"great",
"circle",
"arc",
"with",
"a",
"given",
"azimuth",
"measured",
"in",
"a",
"given",
"point",
"."
] | 8294553a0b8aba33fd96437a35065d03547d0040 | https://github.com/gem/oq-engine/blob/8294553a0b8aba33fd96437a35065d03547d0040/openquake/hazardlib/geo/geodetic.py#L339-L393 |
233 | gem/oq-engine | openquake/hazardlib/geo/geodetic.py | _prepare_coords | def _prepare_coords(lons1, lats1, lons2, lats2):
"""
Convert two pairs of spherical coordinates in decimal degrees
to numpy arrays of radians. Makes sure that respective coordinates
in pairs have the same shape.
"""
lons1 = numpy.radians(lons1)
lats1 = numpy.radians(lats1)
assert lons1.shape == lats1.shape
lons2 = numpy.radians(lons2)
lats2 = numpy.radians(lats2)
assert lons2.shape == lats2.shape
return lons1, lats1, lons2, lats2 | python | def _prepare_coords(lons1, lats1, lons2, lats2):
"""
Convert two pairs of spherical coordinates in decimal degrees
to numpy arrays of radians. Makes sure that respective coordinates
in pairs have the same shape.
"""
lons1 = numpy.radians(lons1)
lats1 = numpy.radians(lats1)
assert lons1.shape == lats1.shape
lons2 = numpy.radians(lons2)
lats2 = numpy.radians(lats2)
assert lons2.shape == lats2.shape
return lons1, lats1, lons2, lats2 | [
"def",
"_prepare_coords",
"(",
"lons1",
",",
"lats1",
",",
"lons2",
",",
"lats2",
")",
":",
"lons1",
"=",
"numpy",
".",
"radians",
"(",
"lons1",
")",
"lats1",
"=",
"numpy",
".",
"radians",
"(",
"lats1",
")",
"assert",
"lons1",
".",
"shape",
"==",
"lats1",
".",
"shape",
"lons2",
"=",
"numpy",
".",
"radians",
"(",
"lons2",
")",
"lats2",
"=",
"numpy",
".",
"radians",
"(",
"lats2",
")",
"assert",
"lons2",
".",
"shape",
"==",
"lats2",
".",
"shape",
"return",
"lons1",
",",
"lats1",
",",
"lons2",
",",
"lats2"
] | Convert two pairs of spherical coordinates in decimal degrees
to numpy arrays of radians. Makes sure that respective coordinates
in pairs have the same shape. | [
"Convert",
"two",
"pairs",
"of",
"spherical",
"coordinates",
"in",
"decimal",
"degrees",
"to",
"numpy",
"arrays",
"of",
"radians",
".",
"Makes",
"sure",
"that",
"respective",
"coordinates",
"in",
"pairs",
"have",
"the",
"same",
"shape",
"."
] | 8294553a0b8aba33fd96437a35065d03547d0040 | https://github.com/gem/oq-engine/blob/8294553a0b8aba33fd96437a35065d03547d0040/openquake/hazardlib/geo/geodetic.py#L528-L540 |
234 | gem/oq-engine | openquake/hmtk/sources/simple_fault_source.py | mtkSimpleFaultSource.select_catalogue | def select_catalogue(self, selector, distance,
distance_metric='joyner-boore', upper_eq_depth=None,
lower_eq_depth=None):
'''
Selects earthquakes within a distance of the fault
:param selector:
Populated instance of :class:
`openquake.hmtk.seismicity.selector.CatalogueSelector`
:param distance:
Distance from point (km) for selection
:param str distance_metric
Choice of fault source distance metric 'joyner-boore' or 'rupture'
:param float upper_eq_depth:
Upper hypocentral depth of hypocentres to be selected
:param float lower_eq_depth:
Lower hypocentral depth of hypocentres to be selected
'''
if selector.catalogue.get_number_events() < 1:
raise ValueError('No events found in catalogue!')
# rupture metric is selected and dip != 90 or 'rupture'
if ('rupture' in distance_metric) and (fabs(self.dip - 90) > 1E-5):
# Use rupture distance
self.catalogue = selector.within_rupture_distance(
self.geometry,
distance,
upper_depth=upper_eq_depth,
lower_depth=lower_eq_depth)
else:
# Use Joyner-Boore distance
self.catalogue = selector.within_joyner_boore_distance(
self.geometry,
distance,
upper_depth=upper_eq_depth,
lower_depth=lower_eq_depth)
if self.catalogue.get_number_events() < 5:
# Throw a warning regarding the small number of earthquakes in
# the source!
warnings.warn('Source %s (%s) has fewer than 5 events'
% (self.id, self.name)) | python | def select_catalogue(self, selector, distance,
distance_metric='joyner-boore', upper_eq_depth=None,
lower_eq_depth=None):
'''
Selects earthquakes within a distance of the fault
:param selector:
Populated instance of :class:
`openquake.hmtk.seismicity.selector.CatalogueSelector`
:param distance:
Distance from point (km) for selection
:param str distance_metric
Choice of fault source distance metric 'joyner-boore' or 'rupture'
:param float upper_eq_depth:
Upper hypocentral depth of hypocentres to be selected
:param float lower_eq_depth:
Lower hypocentral depth of hypocentres to be selected
'''
if selector.catalogue.get_number_events() < 1:
raise ValueError('No events found in catalogue!')
# rupture metric is selected and dip != 90 or 'rupture'
if ('rupture' in distance_metric) and (fabs(self.dip - 90) > 1E-5):
# Use rupture distance
self.catalogue = selector.within_rupture_distance(
self.geometry,
distance,
upper_depth=upper_eq_depth,
lower_depth=lower_eq_depth)
else:
# Use Joyner-Boore distance
self.catalogue = selector.within_joyner_boore_distance(
self.geometry,
distance,
upper_depth=upper_eq_depth,
lower_depth=lower_eq_depth)
if self.catalogue.get_number_events() < 5:
# Throw a warning regarding the small number of earthquakes in
# the source!
warnings.warn('Source %s (%s) has fewer than 5 events'
% (self.id, self.name)) | [
"def",
"select_catalogue",
"(",
"self",
",",
"selector",
",",
"distance",
",",
"distance_metric",
"=",
"'joyner-boore'",
",",
"upper_eq_depth",
"=",
"None",
",",
"lower_eq_depth",
"=",
"None",
")",
":",
"if",
"selector",
".",
"catalogue",
".",
"get_number_events",
"(",
")",
"<",
"1",
":",
"raise",
"ValueError",
"(",
"'No events found in catalogue!'",
")",
"# rupture metric is selected and dip != 90 or 'rupture'",
"if",
"(",
"'rupture'",
"in",
"distance_metric",
")",
"and",
"(",
"fabs",
"(",
"self",
".",
"dip",
"-",
"90",
")",
">",
"1E-5",
")",
":",
"# Use rupture distance",
"self",
".",
"catalogue",
"=",
"selector",
".",
"within_rupture_distance",
"(",
"self",
".",
"geometry",
",",
"distance",
",",
"upper_depth",
"=",
"upper_eq_depth",
",",
"lower_depth",
"=",
"lower_eq_depth",
")",
"else",
":",
"# Use Joyner-Boore distance",
"self",
".",
"catalogue",
"=",
"selector",
".",
"within_joyner_boore_distance",
"(",
"self",
".",
"geometry",
",",
"distance",
",",
"upper_depth",
"=",
"upper_eq_depth",
",",
"lower_depth",
"=",
"lower_eq_depth",
")",
"if",
"self",
".",
"catalogue",
".",
"get_number_events",
"(",
")",
"<",
"5",
":",
"# Throw a warning regarding the small number of earthquakes in",
"# the source!",
"warnings",
".",
"warn",
"(",
"'Source %s (%s) has fewer than 5 events'",
"%",
"(",
"self",
".",
"id",
",",
"self",
".",
"name",
")",
")"
] | Selects earthquakes within a distance of the fault
:param selector:
Populated instance of :class:
`openquake.hmtk.seismicity.selector.CatalogueSelector`
:param distance:
Distance from point (km) for selection
:param str distance_metric
Choice of fault source distance metric 'joyner-boore' or 'rupture'
:param float upper_eq_depth:
Upper hypocentral depth of hypocentres to be selected
:param float lower_eq_depth:
Lower hypocentral depth of hypocentres to be selected | [
"Selects",
"earthquakes",
"within",
"a",
"distance",
"of",
"the",
"fault"
] | 8294553a0b8aba33fd96437a35065d03547d0040 | https://github.com/gem/oq-engine/blob/8294553a0b8aba33fd96437a35065d03547d0040/openquake/hmtk/sources/simple_fault_source.py#L191-L237 |
235 | gem/oq-engine | openquake/hmtk/plotting/faults/geology_mfd_plot.py | plot_recurrence_models | def plot_recurrence_models(
configs, area, slip, msr, rake,
shear_modulus=30.0, disp_length_ratio=1.25E-5, msr_sigma=0.,
figure_size=(8, 6), filename=None, filetype='png', dpi=300, ax=None):
"""
Plots a set of recurrence models
:param list configs:
List of configuration dictionaries
"""
if ax is None:
fig, ax = plt.subplots(figsize=figure_size)
else:
fig = ax.get_figure()
for config in configs:
model = RecurrenceBranch(area, slip, msr, rake, shear_modulus,
disp_length_ratio, msr_sigma, weight=1.0)
model.get_recurrence(config)
occurrence = model.recurrence.occur_rates
cumulative = np.array([np.sum(occurrence[iloc:])
for iloc in range(0, len(occurrence))])
if 'AndersonLuco' in config['Model_Name']:
flt_label = config['Model_Name'] + ' - ' + config['Model_Type'] +\
' Type'
else:
flt_label = config['Model_Name']
flt_color = np.random.uniform(0.1, 1.0, 3)
ax.semilogy(model.magnitudes, cumulative, '-', label=flt_label,
color=flt_color, linewidth=2.)
ax.semilogy(model.magnitudes, model.recurrence.occur_rates, '--',
color=flt_color, linewidth=2.)
ax.set_xlabel('Magnitude')
ax.set_ylabel('Annual Rate')
ax.legend(bbox_to_anchor=(1.1, 1.0))
_save_image(fig, filename, filetype, dpi) | python | def plot_recurrence_models(
configs, area, slip, msr, rake,
shear_modulus=30.0, disp_length_ratio=1.25E-5, msr_sigma=0.,
figure_size=(8, 6), filename=None, filetype='png', dpi=300, ax=None):
"""
Plots a set of recurrence models
:param list configs:
List of configuration dictionaries
"""
if ax is None:
fig, ax = plt.subplots(figsize=figure_size)
else:
fig = ax.get_figure()
for config in configs:
model = RecurrenceBranch(area, slip, msr, rake, shear_modulus,
disp_length_ratio, msr_sigma, weight=1.0)
model.get_recurrence(config)
occurrence = model.recurrence.occur_rates
cumulative = np.array([np.sum(occurrence[iloc:])
for iloc in range(0, len(occurrence))])
if 'AndersonLuco' in config['Model_Name']:
flt_label = config['Model_Name'] + ' - ' + config['Model_Type'] +\
' Type'
else:
flt_label = config['Model_Name']
flt_color = np.random.uniform(0.1, 1.0, 3)
ax.semilogy(model.magnitudes, cumulative, '-', label=flt_label,
color=flt_color, linewidth=2.)
ax.semilogy(model.magnitudes, model.recurrence.occur_rates, '--',
color=flt_color, linewidth=2.)
ax.set_xlabel('Magnitude')
ax.set_ylabel('Annual Rate')
ax.legend(bbox_to_anchor=(1.1, 1.0))
_save_image(fig, filename, filetype, dpi) | [
"def",
"plot_recurrence_models",
"(",
"configs",
",",
"area",
",",
"slip",
",",
"msr",
",",
"rake",
",",
"shear_modulus",
"=",
"30.0",
",",
"disp_length_ratio",
"=",
"1.25E-5",
",",
"msr_sigma",
"=",
"0.",
",",
"figure_size",
"=",
"(",
"8",
",",
"6",
")",
",",
"filename",
"=",
"None",
",",
"filetype",
"=",
"'png'",
",",
"dpi",
"=",
"300",
",",
"ax",
"=",
"None",
")",
":",
"if",
"ax",
"is",
"None",
":",
"fig",
",",
"ax",
"=",
"plt",
".",
"subplots",
"(",
"figsize",
"=",
"figure_size",
")",
"else",
":",
"fig",
"=",
"ax",
".",
"get_figure",
"(",
")",
"for",
"config",
"in",
"configs",
":",
"model",
"=",
"RecurrenceBranch",
"(",
"area",
",",
"slip",
",",
"msr",
",",
"rake",
",",
"shear_modulus",
",",
"disp_length_ratio",
",",
"msr_sigma",
",",
"weight",
"=",
"1.0",
")",
"model",
".",
"get_recurrence",
"(",
"config",
")",
"occurrence",
"=",
"model",
".",
"recurrence",
".",
"occur_rates",
"cumulative",
"=",
"np",
".",
"array",
"(",
"[",
"np",
".",
"sum",
"(",
"occurrence",
"[",
"iloc",
":",
"]",
")",
"for",
"iloc",
"in",
"range",
"(",
"0",
",",
"len",
"(",
"occurrence",
")",
")",
"]",
")",
"if",
"'AndersonLuco'",
"in",
"config",
"[",
"'Model_Name'",
"]",
":",
"flt_label",
"=",
"config",
"[",
"'Model_Name'",
"]",
"+",
"' - '",
"+",
"config",
"[",
"'Model_Type'",
"]",
"+",
"' Type'",
"else",
":",
"flt_label",
"=",
"config",
"[",
"'Model_Name'",
"]",
"flt_color",
"=",
"np",
".",
"random",
".",
"uniform",
"(",
"0.1",
",",
"1.0",
",",
"3",
")",
"ax",
".",
"semilogy",
"(",
"model",
".",
"magnitudes",
",",
"cumulative",
",",
"'-'",
",",
"label",
"=",
"flt_label",
",",
"color",
"=",
"flt_color",
",",
"linewidth",
"=",
"2.",
")",
"ax",
".",
"semilogy",
"(",
"model",
".",
"magnitudes",
",",
"model",
".",
"recurrence",
".",
"occur_rates",
",",
"'--'",
",",
"color",
"=",
"flt_color",
",",
"linewidth",
"=",
"2.",
")",
"ax",
".",
"set_xlabel",
"(",
"'Magnitude'",
")",
"ax",
".",
"set_ylabel",
"(",
"'Annual Rate'",
")",
"ax",
".",
"legend",
"(",
"bbox_to_anchor",
"=",
"(",
"1.1",
",",
"1.0",
")",
")",
"_save_image",
"(",
"fig",
",",
"filename",
",",
"filetype",
",",
"dpi",
")"
] | Plots a set of recurrence models
:param list configs:
List of configuration dictionaries | [
"Plots",
"a",
"set",
"of",
"recurrence",
"models"
] | 8294553a0b8aba33fd96437a35065d03547d0040 | https://github.com/gem/oq-engine/blob/8294553a0b8aba33fd96437a35065d03547d0040/openquake/hmtk/plotting/faults/geology_mfd_plot.py#L69-L105 |
236 | gem/oq-engine | openquake/hazardlib/sourcewriter.py | build_area_source_geometry | def build_area_source_geometry(area_source):
"""
Returns the area source geometry as a Node
:param area_source:
Area source model as an instance of the :class:
`openquake.hazardlib.source.area.AreaSource`
:returns:
Instance of :class:`openquake.baselib.node.Node`
"""
geom = []
for lon_lat in zip(area_source.polygon.lons, area_source.polygon.lats):
geom.extend(lon_lat)
poslist_node = Node("gml:posList", text=geom)
linear_ring_node = Node("gml:LinearRing", nodes=[poslist_node])
exterior_node = Node("gml:exterior", nodes=[linear_ring_node])
polygon_node = Node("gml:Polygon", nodes=[exterior_node])
upper_depth_node = Node(
"upperSeismoDepth", text=area_source.upper_seismogenic_depth)
lower_depth_node = Node(
"lowerSeismoDepth", text=area_source.lower_seismogenic_depth)
return Node(
"areaGeometry", {'discretization': area_source.area_discretization},
nodes=[polygon_node, upper_depth_node, lower_depth_node]) | python | def build_area_source_geometry(area_source):
"""
Returns the area source geometry as a Node
:param area_source:
Area source model as an instance of the :class:
`openquake.hazardlib.source.area.AreaSource`
:returns:
Instance of :class:`openquake.baselib.node.Node`
"""
geom = []
for lon_lat in zip(area_source.polygon.lons, area_source.polygon.lats):
geom.extend(lon_lat)
poslist_node = Node("gml:posList", text=geom)
linear_ring_node = Node("gml:LinearRing", nodes=[poslist_node])
exterior_node = Node("gml:exterior", nodes=[linear_ring_node])
polygon_node = Node("gml:Polygon", nodes=[exterior_node])
upper_depth_node = Node(
"upperSeismoDepth", text=area_source.upper_seismogenic_depth)
lower_depth_node = Node(
"lowerSeismoDepth", text=area_source.lower_seismogenic_depth)
return Node(
"areaGeometry", {'discretization': area_source.area_discretization},
nodes=[polygon_node, upper_depth_node, lower_depth_node]) | [
"def",
"build_area_source_geometry",
"(",
"area_source",
")",
":",
"geom",
"=",
"[",
"]",
"for",
"lon_lat",
"in",
"zip",
"(",
"area_source",
".",
"polygon",
".",
"lons",
",",
"area_source",
".",
"polygon",
".",
"lats",
")",
":",
"geom",
".",
"extend",
"(",
"lon_lat",
")",
"poslist_node",
"=",
"Node",
"(",
"\"gml:posList\"",
",",
"text",
"=",
"geom",
")",
"linear_ring_node",
"=",
"Node",
"(",
"\"gml:LinearRing\"",
",",
"nodes",
"=",
"[",
"poslist_node",
"]",
")",
"exterior_node",
"=",
"Node",
"(",
"\"gml:exterior\"",
",",
"nodes",
"=",
"[",
"linear_ring_node",
"]",
")",
"polygon_node",
"=",
"Node",
"(",
"\"gml:Polygon\"",
",",
"nodes",
"=",
"[",
"exterior_node",
"]",
")",
"upper_depth_node",
"=",
"Node",
"(",
"\"upperSeismoDepth\"",
",",
"text",
"=",
"area_source",
".",
"upper_seismogenic_depth",
")",
"lower_depth_node",
"=",
"Node",
"(",
"\"lowerSeismoDepth\"",
",",
"text",
"=",
"area_source",
".",
"lower_seismogenic_depth",
")",
"return",
"Node",
"(",
"\"areaGeometry\"",
",",
"{",
"'discretization'",
":",
"area_source",
".",
"area_discretization",
"}",
",",
"nodes",
"=",
"[",
"polygon_node",
",",
"upper_depth_node",
",",
"lower_depth_node",
"]",
")"
] | Returns the area source geometry as a Node
:param area_source:
Area source model as an instance of the :class:
`openquake.hazardlib.source.area.AreaSource`
:returns:
Instance of :class:`openquake.baselib.node.Node` | [
"Returns",
"the",
"area",
"source",
"geometry",
"as",
"a",
"Node"
] | 8294553a0b8aba33fd96437a35065d03547d0040 | https://github.com/gem/oq-engine/blob/8294553a0b8aba33fd96437a35065d03547d0040/openquake/hazardlib/sourcewriter.py#L35-L58 |
237 | gem/oq-engine | openquake/hazardlib/sourcewriter.py | build_point_source_geometry | def build_point_source_geometry(point_source):
"""
Returns the poing source geometry as a Node
:param point_source:
Point source model as an instance of the :class:
`openquake.hazardlib.source.point.PointSource`
:returns:
Instance of :class:`openquake.baselib.node.Node`
"""
xy = point_source.location.x, point_source.location.y
pos_node = Node("gml:pos", text=xy)
point_node = Node("gml:Point", nodes=[pos_node])
upper_depth_node = Node(
"upperSeismoDepth", text=point_source.upper_seismogenic_depth)
lower_depth_node = Node(
"lowerSeismoDepth", text=point_source.lower_seismogenic_depth)
return Node(
"pointGeometry",
nodes=[point_node, upper_depth_node, lower_depth_node]) | python | def build_point_source_geometry(point_source):
"""
Returns the poing source geometry as a Node
:param point_source:
Point source model as an instance of the :class:
`openquake.hazardlib.source.point.PointSource`
:returns:
Instance of :class:`openquake.baselib.node.Node`
"""
xy = point_source.location.x, point_source.location.y
pos_node = Node("gml:pos", text=xy)
point_node = Node("gml:Point", nodes=[pos_node])
upper_depth_node = Node(
"upperSeismoDepth", text=point_source.upper_seismogenic_depth)
lower_depth_node = Node(
"lowerSeismoDepth", text=point_source.lower_seismogenic_depth)
return Node(
"pointGeometry",
nodes=[point_node, upper_depth_node, lower_depth_node]) | [
"def",
"build_point_source_geometry",
"(",
"point_source",
")",
":",
"xy",
"=",
"point_source",
".",
"location",
".",
"x",
",",
"point_source",
".",
"location",
".",
"y",
"pos_node",
"=",
"Node",
"(",
"\"gml:pos\"",
",",
"text",
"=",
"xy",
")",
"point_node",
"=",
"Node",
"(",
"\"gml:Point\"",
",",
"nodes",
"=",
"[",
"pos_node",
"]",
")",
"upper_depth_node",
"=",
"Node",
"(",
"\"upperSeismoDepth\"",
",",
"text",
"=",
"point_source",
".",
"upper_seismogenic_depth",
")",
"lower_depth_node",
"=",
"Node",
"(",
"\"lowerSeismoDepth\"",
",",
"text",
"=",
"point_source",
".",
"lower_seismogenic_depth",
")",
"return",
"Node",
"(",
"\"pointGeometry\"",
",",
"nodes",
"=",
"[",
"point_node",
",",
"upper_depth_node",
",",
"lower_depth_node",
"]",
")"
] | Returns the poing source geometry as a Node
:param point_source:
Point source model as an instance of the :class:
`openquake.hazardlib.source.point.PointSource`
:returns:
Instance of :class:`openquake.baselib.node.Node` | [
"Returns",
"the",
"poing",
"source",
"geometry",
"as",
"a",
"Node"
] | 8294553a0b8aba33fd96437a35065d03547d0040 | https://github.com/gem/oq-engine/blob/8294553a0b8aba33fd96437a35065d03547d0040/openquake/hazardlib/sourcewriter.py#L61-L80 |
238 | gem/oq-engine | openquake/hazardlib/sourcewriter.py | build_linestring_node | def build_linestring_node(line, with_depth=False):
"""
Parses a line to a Node class
:param line:
Line as instance of :class:`openquake.hazardlib.geo.line.Line`
:param bool with_depth:
Include the depth values (True) or not (False):
:returns:
Instance of :class:`openquake.baselib.node.Node`
"""
geom = []
for p in line.points:
if with_depth:
geom.extend((p.x, p.y, p.z))
else:
geom.extend((p.x, p.y))
poslist_node = Node("gml:posList", text=geom)
return Node("gml:LineString", nodes=[poslist_node]) | python | def build_linestring_node(line, with_depth=False):
"""
Parses a line to a Node class
:param line:
Line as instance of :class:`openquake.hazardlib.geo.line.Line`
:param bool with_depth:
Include the depth values (True) or not (False):
:returns:
Instance of :class:`openquake.baselib.node.Node`
"""
geom = []
for p in line.points:
if with_depth:
geom.extend((p.x, p.y, p.z))
else:
geom.extend((p.x, p.y))
poslist_node = Node("gml:posList", text=geom)
return Node("gml:LineString", nodes=[poslist_node]) | [
"def",
"build_linestring_node",
"(",
"line",
",",
"with_depth",
"=",
"False",
")",
":",
"geom",
"=",
"[",
"]",
"for",
"p",
"in",
"line",
".",
"points",
":",
"if",
"with_depth",
":",
"geom",
".",
"extend",
"(",
"(",
"p",
".",
"x",
",",
"p",
".",
"y",
",",
"p",
".",
"z",
")",
")",
"else",
":",
"geom",
".",
"extend",
"(",
"(",
"p",
".",
"x",
",",
"p",
".",
"y",
")",
")",
"poslist_node",
"=",
"Node",
"(",
"\"gml:posList\"",
",",
"text",
"=",
"geom",
")",
"return",
"Node",
"(",
"\"gml:LineString\"",
",",
"nodes",
"=",
"[",
"poslist_node",
"]",
")"
] | Parses a line to a Node class
:param line:
Line as instance of :class:`openquake.hazardlib.geo.line.Line`
:param bool with_depth:
Include the depth values (True) or not (False):
:returns:
Instance of :class:`openquake.baselib.node.Node` | [
"Parses",
"a",
"line",
"to",
"a",
"Node",
"class"
] | 8294553a0b8aba33fd96437a35065d03547d0040 | https://github.com/gem/oq-engine/blob/8294553a0b8aba33fd96437a35065d03547d0040/openquake/hazardlib/sourcewriter.py#L83-L101 |
239 | gem/oq-engine | openquake/hazardlib/sourcewriter.py | build_simple_fault_geometry | def build_simple_fault_geometry(fault_source):
"""
Returns the simple fault source geometry as a Node
:param fault_source:
Simple fault source model as an instance of the :class:
`openquake.hazardlib.source.simple_fault.SimpleFaultSource`
:returns:
Instance of :class:`openquake.baselib.node.Node`
"""
linestring_node = build_linestring_node(fault_source.fault_trace,
with_depth=False)
dip_node = Node("dip", text=fault_source.dip)
upper_depth_node = Node(
"upperSeismoDepth", text=fault_source.upper_seismogenic_depth)
lower_depth_node = Node(
"lowerSeismoDepth", text=fault_source.lower_seismogenic_depth)
return Node("simpleFaultGeometry",
nodes=[linestring_node, dip_node, upper_depth_node,
lower_depth_node]) | python | def build_simple_fault_geometry(fault_source):
"""
Returns the simple fault source geometry as a Node
:param fault_source:
Simple fault source model as an instance of the :class:
`openquake.hazardlib.source.simple_fault.SimpleFaultSource`
:returns:
Instance of :class:`openquake.baselib.node.Node`
"""
linestring_node = build_linestring_node(fault_source.fault_trace,
with_depth=False)
dip_node = Node("dip", text=fault_source.dip)
upper_depth_node = Node(
"upperSeismoDepth", text=fault_source.upper_seismogenic_depth)
lower_depth_node = Node(
"lowerSeismoDepth", text=fault_source.lower_seismogenic_depth)
return Node("simpleFaultGeometry",
nodes=[linestring_node, dip_node, upper_depth_node,
lower_depth_node]) | [
"def",
"build_simple_fault_geometry",
"(",
"fault_source",
")",
":",
"linestring_node",
"=",
"build_linestring_node",
"(",
"fault_source",
".",
"fault_trace",
",",
"with_depth",
"=",
"False",
")",
"dip_node",
"=",
"Node",
"(",
"\"dip\"",
",",
"text",
"=",
"fault_source",
".",
"dip",
")",
"upper_depth_node",
"=",
"Node",
"(",
"\"upperSeismoDepth\"",
",",
"text",
"=",
"fault_source",
".",
"upper_seismogenic_depth",
")",
"lower_depth_node",
"=",
"Node",
"(",
"\"lowerSeismoDepth\"",
",",
"text",
"=",
"fault_source",
".",
"lower_seismogenic_depth",
")",
"return",
"Node",
"(",
"\"simpleFaultGeometry\"",
",",
"nodes",
"=",
"[",
"linestring_node",
",",
"dip_node",
",",
"upper_depth_node",
",",
"lower_depth_node",
"]",
")"
] | Returns the simple fault source geometry as a Node
:param fault_source:
Simple fault source model as an instance of the :class:
`openquake.hazardlib.source.simple_fault.SimpleFaultSource`
:returns:
Instance of :class:`openquake.baselib.node.Node` | [
"Returns",
"the",
"simple",
"fault",
"source",
"geometry",
"as",
"a",
"Node"
] | 8294553a0b8aba33fd96437a35065d03547d0040 | https://github.com/gem/oq-engine/blob/8294553a0b8aba33fd96437a35065d03547d0040/openquake/hazardlib/sourcewriter.py#L104-L123 |
240 | gem/oq-engine | openquake/hazardlib/sourcewriter.py | build_complex_fault_geometry | def build_complex_fault_geometry(fault_source):
"""
Returns the complex fault source geometry as a Node
:param fault_source:
Complex fault source model as an instance of the :class:
`openquake.hazardlib.source.complex_fault.ComplexFaultSource`
:returns:
Instance of :class:`openquake.baselib.node.Node`
"""
num_edges = len(fault_source.edges)
edge_nodes = []
for iloc, edge in enumerate(fault_source.edges):
if iloc == 0:
# Top Edge
node_name = "faultTopEdge"
elif iloc == (num_edges - 1):
# Bottom edge
node_name = "faultBottomEdge"
else:
# Intermediate edge
node_name = "intermediateEdge"
edge_nodes.append(
Node(node_name,
nodes=[build_linestring_node(edge, with_depth=True)]))
return Node("complexFaultGeometry", nodes=edge_nodes) | python | def build_complex_fault_geometry(fault_source):
"""
Returns the complex fault source geometry as a Node
:param fault_source:
Complex fault source model as an instance of the :class:
`openquake.hazardlib.source.complex_fault.ComplexFaultSource`
:returns:
Instance of :class:`openquake.baselib.node.Node`
"""
num_edges = len(fault_source.edges)
edge_nodes = []
for iloc, edge in enumerate(fault_source.edges):
if iloc == 0:
# Top Edge
node_name = "faultTopEdge"
elif iloc == (num_edges - 1):
# Bottom edge
node_name = "faultBottomEdge"
else:
# Intermediate edge
node_name = "intermediateEdge"
edge_nodes.append(
Node(node_name,
nodes=[build_linestring_node(edge, with_depth=True)]))
return Node("complexFaultGeometry", nodes=edge_nodes) | [
"def",
"build_complex_fault_geometry",
"(",
"fault_source",
")",
":",
"num_edges",
"=",
"len",
"(",
"fault_source",
".",
"edges",
")",
"edge_nodes",
"=",
"[",
"]",
"for",
"iloc",
",",
"edge",
"in",
"enumerate",
"(",
"fault_source",
".",
"edges",
")",
":",
"if",
"iloc",
"==",
"0",
":",
"# Top Edge",
"node_name",
"=",
"\"faultTopEdge\"",
"elif",
"iloc",
"==",
"(",
"num_edges",
"-",
"1",
")",
":",
"# Bottom edge",
"node_name",
"=",
"\"faultBottomEdge\"",
"else",
":",
"# Intermediate edge",
"node_name",
"=",
"\"intermediateEdge\"",
"edge_nodes",
".",
"append",
"(",
"Node",
"(",
"node_name",
",",
"nodes",
"=",
"[",
"build_linestring_node",
"(",
"edge",
",",
"with_depth",
"=",
"True",
")",
"]",
")",
")",
"return",
"Node",
"(",
"\"complexFaultGeometry\"",
",",
"nodes",
"=",
"edge_nodes",
")"
] | Returns the complex fault source geometry as a Node
:param fault_source:
Complex fault source model as an instance of the :class:
`openquake.hazardlib.source.complex_fault.ComplexFaultSource`
:returns:
Instance of :class:`openquake.baselib.node.Node` | [
"Returns",
"the",
"complex",
"fault",
"source",
"geometry",
"as",
"a",
"Node"
] | 8294553a0b8aba33fd96437a35065d03547d0040 | https://github.com/gem/oq-engine/blob/8294553a0b8aba33fd96437a35065d03547d0040/openquake/hazardlib/sourcewriter.py#L126-L152 |
241 | gem/oq-engine | openquake/hazardlib/sourcewriter.py | build_evenly_discretised_mfd | def build_evenly_discretised_mfd(mfd):
"""
Returns the evenly discretized MFD as a Node
:param mfd:
MFD as instance of :class:
`openquake.hazardlib.mfd.evenly_discretized.EvenlyDiscretizedMFD`
:returns:
Instance of :class:`openquake.baselib.node.Node`
"""
occur_rates = Node("occurRates", text=mfd.occurrence_rates)
return Node("incrementalMFD",
{"binWidth": mfd.bin_width, "minMag": mfd.min_mag},
nodes=[occur_rates]) | python | def build_evenly_discretised_mfd(mfd):
"""
Returns the evenly discretized MFD as a Node
:param mfd:
MFD as instance of :class:
`openquake.hazardlib.mfd.evenly_discretized.EvenlyDiscretizedMFD`
:returns:
Instance of :class:`openquake.baselib.node.Node`
"""
occur_rates = Node("occurRates", text=mfd.occurrence_rates)
return Node("incrementalMFD",
{"binWidth": mfd.bin_width, "minMag": mfd.min_mag},
nodes=[occur_rates]) | [
"def",
"build_evenly_discretised_mfd",
"(",
"mfd",
")",
":",
"occur_rates",
"=",
"Node",
"(",
"\"occurRates\"",
",",
"text",
"=",
"mfd",
".",
"occurrence_rates",
")",
"return",
"Node",
"(",
"\"incrementalMFD\"",
",",
"{",
"\"binWidth\"",
":",
"mfd",
".",
"bin_width",
",",
"\"minMag\"",
":",
"mfd",
".",
"min_mag",
"}",
",",
"nodes",
"=",
"[",
"occur_rates",
"]",
")"
] | Returns the evenly discretized MFD as a Node
:param mfd:
MFD as instance of :class:
`openquake.hazardlib.mfd.evenly_discretized.EvenlyDiscretizedMFD`
:returns:
Instance of :class:`openquake.baselib.node.Node` | [
"Returns",
"the",
"evenly",
"discretized",
"MFD",
"as",
"a",
"Node"
] | 8294553a0b8aba33fd96437a35065d03547d0040 | https://github.com/gem/oq-engine/blob/8294553a0b8aba33fd96437a35065d03547d0040/openquake/hazardlib/sourcewriter.py#L156-L169 |
242 | gem/oq-engine | openquake/hazardlib/sourcewriter.py | build_truncated_gr_mfd | def build_truncated_gr_mfd(mfd):
"""
Parses the truncated Gutenberg Richter MFD as a Node
:param mfd:
MFD as instance of :class:
`openquake.hazardlib.mfd.truncated_gr.TruncatedGRMFD`
:returns:
Instance of :class:`openquake.baselib.node.Node`
"""
return Node("truncGutenbergRichterMFD",
{"aValue": mfd.a_val, "bValue": mfd.b_val,
"minMag": mfd.min_mag, "maxMag": mfd.max_mag}) | python | def build_truncated_gr_mfd(mfd):
"""
Parses the truncated Gutenberg Richter MFD as a Node
:param mfd:
MFD as instance of :class:
`openquake.hazardlib.mfd.truncated_gr.TruncatedGRMFD`
:returns:
Instance of :class:`openquake.baselib.node.Node`
"""
return Node("truncGutenbergRichterMFD",
{"aValue": mfd.a_val, "bValue": mfd.b_val,
"minMag": mfd.min_mag, "maxMag": mfd.max_mag}) | [
"def",
"build_truncated_gr_mfd",
"(",
"mfd",
")",
":",
"return",
"Node",
"(",
"\"truncGutenbergRichterMFD\"",
",",
"{",
"\"aValue\"",
":",
"mfd",
".",
"a_val",
",",
"\"bValue\"",
":",
"mfd",
".",
"b_val",
",",
"\"minMag\"",
":",
"mfd",
".",
"min_mag",
",",
"\"maxMag\"",
":",
"mfd",
".",
"max_mag",
"}",
")"
] | Parses the truncated Gutenberg Richter MFD as a Node
:param mfd:
MFD as instance of :class:
`openquake.hazardlib.mfd.truncated_gr.TruncatedGRMFD`
:returns:
Instance of :class:`openquake.baselib.node.Node` | [
"Parses",
"the",
"truncated",
"Gutenberg",
"Richter",
"MFD",
"as",
"a",
"Node"
] | 8294553a0b8aba33fd96437a35065d03547d0040 | https://github.com/gem/oq-engine/blob/8294553a0b8aba33fd96437a35065d03547d0040/openquake/hazardlib/sourcewriter.py#L173-L185 |
243 | gem/oq-engine | openquake/hazardlib/sourcewriter.py | build_arbitrary_mfd | def build_arbitrary_mfd(mfd):
"""
Parses the arbitrary MFD as a Node
:param mfd:
MFD as instance of :class:
`openquake.hazardlib.mfd.arbitrary.ArbitraryMFD`
:returns:
Instance of :class:`openquake.baselib.node.Node`
"""
magnitudes = Node("magnitudes", text=mfd.magnitudes)
occur_rates = Node("occurRates", text=mfd.occurrence_rates)
return Node("arbitraryMFD", nodes=[magnitudes, occur_rates]) | python | def build_arbitrary_mfd(mfd):
"""
Parses the arbitrary MFD as a Node
:param mfd:
MFD as instance of :class:
`openquake.hazardlib.mfd.arbitrary.ArbitraryMFD`
:returns:
Instance of :class:`openquake.baselib.node.Node`
"""
magnitudes = Node("magnitudes", text=mfd.magnitudes)
occur_rates = Node("occurRates", text=mfd.occurrence_rates)
return Node("arbitraryMFD", nodes=[magnitudes, occur_rates]) | [
"def",
"build_arbitrary_mfd",
"(",
"mfd",
")",
":",
"magnitudes",
"=",
"Node",
"(",
"\"magnitudes\"",
",",
"text",
"=",
"mfd",
".",
"magnitudes",
")",
"occur_rates",
"=",
"Node",
"(",
"\"occurRates\"",
",",
"text",
"=",
"mfd",
".",
"occurrence_rates",
")",
"return",
"Node",
"(",
"\"arbitraryMFD\"",
",",
"nodes",
"=",
"[",
"magnitudes",
",",
"occur_rates",
"]",
")"
] | Parses the arbitrary MFD as a Node
:param mfd:
MFD as instance of :class:
`openquake.hazardlib.mfd.arbitrary.ArbitraryMFD`
:returns:
Instance of :class:`openquake.baselib.node.Node` | [
"Parses",
"the",
"arbitrary",
"MFD",
"as",
"a",
"Node"
] | 8294553a0b8aba33fd96437a35065d03547d0040 | https://github.com/gem/oq-engine/blob/8294553a0b8aba33fd96437a35065d03547d0040/openquake/hazardlib/sourcewriter.py#L189-L201 |
244 | gem/oq-engine | openquake/hazardlib/sourcewriter.py | build_youngs_coppersmith_mfd | def build_youngs_coppersmith_mfd(mfd):
"""
Parses the Youngs & Coppersmith MFD as a node. Note that the MFD does
not hold the total moment rate, but only the characteristic rate. Therefore
the node is written to the characteristic rate version regardless of
whether or not it was originally created from total moment rate
:param mfd:
MFD as instance of :class:
`openquake.hazardlib.mfd.youngs_coppersmith_1985.
YoungsCoppersmith1985MFD`
:returns:
Instance of :class:`openquake.baselib.node.Node`
"""
return Node("YoungsCoppersmithMFD",
{"minMag": mfd.min_mag, "bValue": mfd.b_val,
"characteristicMag": mfd.char_mag,
"characteristicRate": mfd.char_rate,
"binWidth": mfd.bin_width}) | python | def build_youngs_coppersmith_mfd(mfd):
"""
Parses the Youngs & Coppersmith MFD as a node. Note that the MFD does
not hold the total moment rate, but only the characteristic rate. Therefore
the node is written to the characteristic rate version regardless of
whether or not it was originally created from total moment rate
:param mfd:
MFD as instance of :class:
`openquake.hazardlib.mfd.youngs_coppersmith_1985.
YoungsCoppersmith1985MFD`
:returns:
Instance of :class:`openquake.baselib.node.Node`
"""
return Node("YoungsCoppersmithMFD",
{"minMag": mfd.min_mag, "bValue": mfd.b_val,
"characteristicMag": mfd.char_mag,
"characteristicRate": mfd.char_rate,
"binWidth": mfd.bin_width}) | [
"def",
"build_youngs_coppersmith_mfd",
"(",
"mfd",
")",
":",
"return",
"Node",
"(",
"\"YoungsCoppersmithMFD\"",
",",
"{",
"\"minMag\"",
":",
"mfd",
".",
"min_mag",
",",
"\"bValue\"",
":",
"mfd",
".",
"b_val",
",",
"\"characteristicMag\"",
":",
"mfd",
".",
"char_mag",
",",
"\"characteristicRate\"",
":",
"mfd",
".",
"char_rate",
",",
"\"binWidth\"",
":",
"mfd",
".",
"bin_width",
"}",
")"
] | Parses the Youngs & Coppersmith MFD as a node. Note that the MFD does
not hold the total moment rate, but only the characteristic rate. Therefore
the node is written to the characteristic rate version regardless of
whether or not it was originally created from total moment rate
:param mfd:
MFD as instance of :class:
`openquake.hazardlib.mfd.youngs_coppersmith_1985.
YoungsCoppersmith1985MFD`
:returns:
Instance of :class:`openquake.baselib.node.Node` | [
"Parses",
"the",
"Youngs",
"&",
"Coppersmith",
"MFD",
"as",
"a",
"node",
".",
"Note",
"that",
"the",
"MFD",
"does",
"not",
"hold",
"the",
"total",
"moment",
"rate",
"but",
"only",
"the",
"characteristic",
"rate",
".",
"Therefore",
"the",
"node",
"is",
"written",
"to",
"the",
"characteristic",
"rate",
"version",
"regardless",
"of",
"whether",
"or",
"not",
"it",
"was",
"originally",
"created",
"from",
"total",
"moment",
"rate"
] | 8294553a0b8aba33fd96437a35065d03547d0040 | https://github.com/gem/oq-engine/blob/8294553a0b8aba33fd96437a35065d03547d0040/openquake/hazardlib/sourcewriter.py#L205-L223 |
245 | gem/oq-engine | openquake/hazardlib/sourcewriter.py | build_multi_mfd | def build_multi_mfd(mfd):
"""
Parses the MultiMFD as a Node
:param mfd:
MFD as instance of :class:
`openquake.hazardlib.mfd.multi_mfd.MultiMFD`
:returns:
Instance of :class:`openquake.baselib.node.Node`
"""
node = Node("multiMFD", dict(kind=mfd.kind, size=mfd.size))
for name in sorted(mfd.kwargs):
values = mfd.kwargs[name]
if name in ('magnitudes', 'occurRates'):
if len(values[0]) > 1: # tested in multipoint_test.py
values = list(numpy.concatenate(values))
else:
values = sum(values, [])
node.append(Node(name, text=values))
if 'occurRates' in mfd.kwargs:
lengths = [len(rates) for rates in mfd.kwargs['occurRates']]
node.append(Node('lengths', text=lengths))
return node | python | def build_multi_mfd(mfd):
"""
Parses the MultiMFD as a Node
:param mfd:
MFD as instance of :class:
`openquake.hazardlib.mfd.multi_mfd.MultiMFD`
:returns:
Instance of :class:`openquake.baselib.node.Node`
"""
node = Node("multiMFD", dict(kind=mfd.kind, size=mfd.size))
for name in sorted(mfd.kwargs):
values = mfd.kwargs[name]
if name in ('magnitudes', 'occurRates'):
if len(values[0]) > 1: # tested in multipoint_test.py
values = list(numpy.concatenate(values))
else:
values = sum(values, [])
node.append(Node(name, text=values))
if 'occurRates' in mfd.kwargs:
lengths = [len(rates) for rates in mfd.kwargs['occurRates']]
node.append(Node('lengths', text=lengths))
return node | [
"def",
"build_multi_mfd",
"(",
"mfd",
")",
":",
"node",
"=",
"Node",
"(",
"\"multiMFD\"",
",",
"dict",
"(",
"kind",
"=",
"mfd",
".",
"kind",
",",
"size",
"=",
"mfd",
".",
"size",
")",
")",
"for",
"name",
"in",
"sorted",
"(",
"mfd",
".",
"kwargs",
")",
":",
"values",
"=",
"mfd",
".",
"kwargs",
"[",
"name",
"]",
"if",
"name",
"in",
"(",
"'magnitudes'",
",",
"'occurRates'",
")",
":",
"if",
"len",
"(",
"values",
"[",
"0",
"]",
")",
">",
"1",
":",
"# tested in multipoint_test.py",
"values",
"=",
"list",
"(",
"numpy",
".",
"concatenate",
"(",
"values",
")",
")",
"else",
":",
"values",
"=",
"sum",
"(",
"values",
",",
"[",
"]",
")",
"node",
".",
"append",
"(",
"Node",
"(",
"name",
",",
"text",
"=",
"values",
")",
")",
"if",
"'occurRates'",
"in",
"mfd",
".",
"kwargs",
":",
"lengths",
"=",
"[",
"len",
"(",
"rates",
")",
"for",
"rates",
"in",
"mfd",
".",
"kwargs",
"[",
"'occurRates'",
"]",
"]",
"node",
".",
"append",
"(",
"Node",
"(",
"'lengths'",
",",
"text",
"=",
"lengths",
")",
")",
"return",
"node"
] | Parses the MultiMFD as a Node
:param mfd:
MFD as instance of :class:
`openquake.hazardlib.mfd.multi_mfd.MultiMFD`
:returns:
Instance of :class:`openquake.baselib.node.Node` | [
"Parses",
"the",
"MultiMFD",
"as",
"a",
"Node"
] | 8294553a0b8aba33fd96437a35065d03547d0040 | https://github.com/gem/oq-engine/blob/8294553a0b8aba33fd96437a35065d03547d0040/openquake/hazardlib/sourcewriter.py#L227-L249 |
246 | gem/oq-engine | openquake/hazardlib/sourcewriter.py | build_nodal_plane_dist | def build_nodal_plane_dist(npd):
"""
Returns the nodal plane distribution as a Node instance
:param npd:
Nodal plane distribution as instance of :class:
`openquake.hazardlib.pmf.PMF`
:returns:
Instance of :class:`openquake.baselib.node.Node`
"""
npds = []
for prob, npd in npd.data:
nodal_plane = Node(
"nodalPlane", {"dip": npd.dip, "probability": prob,
"strike": npd.strike, "rake": npd.rake})
npds.append(nodal_plane)
return Node("nodalPlaneDist", nodes=npds) | python | def build_nodal_plane_dist(npd):
"""
Returns the nodal plane distribution as a Node instance
:param npd:
Nodal plane distribution as instance of :class:
`openquake.hazardlib.pmf.PMF`
:returns:
Instance of :class:`openquake.baselib.node.Node`
"""
npds = []
for prob, npd in npd.data:
nodal_plane = Node(
"nodalPlane", {"dip": npd.dip, "probability": prob,
"strike": npd.strike, "rake": npd.rake})
npds.append(nodal_plane)
return Node("nodalPlaneDist", nodes=npds) | [
"def",
"build_nodal_plane_dist",
"(",
"npd",
")",
":",
"npds",
"=",
"[",
"]",
"for",
"prob",
",",
"npd",
"in",
"npd",
".",
"data",
":",
"nodal_plane",
"=",
"Node",
"(",
"\"nodalPlane\"",
",",
"{",
"\"dip\"",
":",
"npd",
".",
"dip",
",",
"\"probability\"",
":",
"prob",
",",
"\"strike\"",
":",
"npd",
".",
"strike",
",",
"\"rake\"",
":",
"npd",
".",
"rake",
"}",
")",
"npds",
".",
"append",
"(",
"nodal_plane",
")",
"return",
"Node",
"(",
"\"nodalPlaneDist\"",
",",
"nodes",
"=",
"npds",
")"
] | Returns the nodal plane distribution as a Node instance
:param npd:
Nodal plane distribution as instance of :class:
`openquake.hazardlib.pmf.PMF`
:returns:
Instance of :class:`openquake.baselib.node.Node` | [
"Returns",
"the",
"nodal",
"plane",
"distribution",
"as",
"a",
"Node",
"instance"
] | 8294553a0b8aba33fd96437a35065d03547d0040 | https://github.com/gem/oq-engine/blob/8294553a0b8aba33fd96437a35065d03547d0040/openquake/hazardlib/sourcewriter.py#L252-L268 |
247 | gem/oq-engine | openquake/hazardlib/sourcewriter.py | build_hypo_depth_dist | def build_hypo_depth_dist(hdd):
"""
Returns the hypocentral depth distribution as a Node instance
:param hdd:
Hypocentral depth distribution as an instance of :class:
`openquake.hzardlib.pmf.PMF`
:returns:
Instance of :class:`openquake.baselib.node.Node`
"""
hdds = []
for (prob, depth) in hdd.data:
hdds.append(
Node("hypoDepth", {"depth": depth, "probability": prob}))
return Node("hypoDepthDist", nodes=hdds) | python | def build_hypo_depth_dist(hdd):
"""
Returns the hypocentral depth distribution as a Node instance
:param hdd:
Hypocentral depth distribution as an instance of :class:
`openquake.hzardlib.pmf.PMF`
:returns:
Instance of :class:`openquake.baselib.node.Node`
"""
hdds = []
for (prob, depth) in hdd.data:
hdds.append(
Node("hypoDepth", {"depth": depth, "probability": prob}))
return Node("hypoDepthDist", nodes=hdds) | [
"def",
"build_hypo_depth_dist",
"(",
"hdd",
")",
":",
"hdds",
"=",
"[",
"]",
"for",
"(",
"prob",
",",
"depth",
")",
"in",
"hdd",
".",
"data",
":",
"hdds",
".",
"append",
"(",
"Node",
"(",
"\"hypoDepth\"",
",",
"{",
"\"depth\"",
":",
"depth",
",",
"\"probability\"",
":",
"prob",
"}",
")",
")",
"return",
"Node",
"(",
"\"hypoDepthDist\"",
",",
"nodes",
"=",
"hdds",
")"
] | Returns the hypocentral depth distribution as a Node instance
:param hdd:
Hypocentral depth distribution as an instance of :class:
`openquake.hzardlib.pmf.PMF`
:returns:
Instance of :class:`openquake.baselib.node.Node` | [
"Returns",
"the",
"hypocentral",
"depth",
"distribution",
"as",
"a",
"Node",
"instance"
] | 8294553a0b8aba33fd96437a35065d03547d0040 | https://github.com/gem/oq-engine/blob/8294553a0b8aba33fd96437a35065d03547d0040/openquake/hazardlib/sourcewriter.py#L271-L285 |
248 | gem/oq-engine | openquake/hazardlib/sourcewriter.py | get_distributed_seismicity_source_nodes | def get_distributed_seismicity_source_nodes(source):
"""
Returns list of nodes of attributes common to all distributed seismicity
source classes
:param source:
Seismic source as instance of :class:
`openquake.hazardlib.source.area.AreaSource` or :class:
`openquake.hazardlib.source.point.PointSource`
:returns:
List of instances of :class:`openquake.baselib.node.Node`
"""
source_nodes = []
# parse msr
source_nodes.append(
Node("magScaleRel",
text=source.magnitude_scaling_relationship.__class__.__name__))
# Parse aspect ratio
source_nodes.append(
Node("ruptAspectRatio", text=source.rupture_aspect_ratio))
# Parse MFD
source_nodes.append(obj_to_node(source.mfd))
# Parse nodal plane distribution
source_nodes.append(
build_nodal_plane_dist(source.nodal_plane_distribution))
# Parse hypocentral depth distribution
source_nodes.append(
build_hypo_depth_dist(source.hypocenter_distribution))
return source_nodes | python | def get_distributed_seismicity_source_nodes(source):
"""
Returns list of nodes of attributes common to all distributed seismicity
source classes
:param source:
Seismic source as instance of :class:
`openquake.hazardlib.source.area.AreaSource` or :class:
`openquake.hazardlib.source.point.PointSource`
:returns:
List of instances of :class:`openquake.baselib.node.Node`
"""
source_nodes = []
# parse msr
source_nodes.append(
Node("magScaleRel",
text=source.magnitude_scaling_relationship.__class__.__name__))
# Parse aspect ratio
source_nodes.append(
Node("ruptAspectRatio", text=source.rupture_aspect_ratio))
# Parse MFD
source_nodes.append(obj_to_node(source.mfd))
# Parse nodal plane distribution
source_nodes.append(
build_nodal_plane_dist(source.nodal_plane_distribution))
# Parse hypocentral depth distribution
source_nodes.append(
build_hypo_depth_dist(source.hypocenter_distribution))
return source_nodes | [
"def",
"get_distributed_seismicity_source_nodes",
"(",
"source",
")",
":",
"source_nodes",
"=",
"[",
"]",
"# parse msr",
"source_nodes",
".",
"append",
"(",
"Node",
"(",
"\"magScaleRel\"",
",",
"text",
"=",
"source",
".",
"magnitude_scaling_relationship",
".",
"__class__",
".",
"__name__",
")",
")",
"# Parse aspect ratio",
"source_nodes",
".",
"append",
"(",
"Node",
"(",
"\"ruptAspectRatio\"",
",",
"text",
"=",
"source",
".",
"rupture_aspect_ratio",
")",
")",
"# Parse MFD",
"source_nodes",
".",
"append",
"(",
"obj_to_node",
"(",
"source",
".",
"mfd",
")",
")",
"# Parse nodal plane distribution",
"source_nodes",
".",
"append",
"(",
"build_nodal_plane_dist",
"(",
"source",
".",
"nodal_plane_distribution",
")",
")",
"# Parse hypocentral depth distribution",
"source_nodes",
".",
"append",
"(",
"build_hypo_depth_dist",
"(",
"source",
".",
"hypocenter_distribution",
")",
")",
"return",
"source_nodes"
] | Returns list of nodes of attributes common to all distributed seismicity
source classes
:param source:
Seismic source as instance of :class:
`openquake.hazardlib.source.area.AreaSource` or :class:
`openquake.hazardlib.source.point.PointSource`
:returns:
List of instances of :class:`openquake.baselib.node.Node` | [
"Returns",
"list",
"of",
"nodes",
"of",
"attributes",
"common",
"to",
"all",
"distributed",
"seismicity",
"source",
"classes"
] | 8294553a0b8aba33fd96437a35065d03547d0040 | https://github.com/gem/oq-engine/blob/8294553a0b8aba33fd96437a35065d03547d0040/openquake/hazardlib/sourcewriter.py#L288-L316 |
249 | gem/oq-engine | openquake/hazardlib/sourcewriter.py | get_fault_source_nodes | def get_fault_source_nodes(source):
"""
Returns list of nodes of attributes common to all fault source classes
:param source:
Fault source as instance of :class:
`openquake.hazardlib.source.simple_fault.SimpleFaultSource` or :class:
`openquake.hazardlib.source.complex_fault.ComplexFaultSource`
:returns:
List of instances of :class:`openquake.baselib.node.Node`
"""
source_nodes = []
# parse msr
source_nodes.append(
Node(
"magScaleRel",
text=source.magnitude_scaling_relationship.__class__.__name__))
# Parse aspect ratio
source_nodes.append(
Node("ruptAspectRatio", text=source.rupture_aspect_ratio))
# Parse MFD
source_nodes.append(obj_to_node(source.mfd))
# Parse Rake
source_nodes.append(Node("rake", text=source.rake))
if len(getattr(source, 'hypo_list', [])):
source_nodes.append(build_hypo_list_node(source.hypo_list))
if len(getattr(source, 'slip_list', [])):
source_nodes.append(build_slip_list_node(source.slip_list))
return source_nodes | python | def get_fault_source_nodes(source):
"""
Returns list of nodes of attributes common to all fault source classes
:param source:
Fault source as instance of :class:
`openquake.hazardlib.source.simple_fault.SimpleFaultSource` or :class:
`openquake.hazardlib.source.complex_fault.ComplexFaultSource`
:returns:
List of instances of :class:`openquake.baselib.node.Node`
"""
source_nodes = []
# parse msr
source_nodes.append(
Node(
"magScaleRel",
text=source.magnitude_scaling_relationship.__class__.__name__))
# Parse aspect ratio
source_nodes.append(
Node("ruptAspectRatio", text=source.rupture_aspect_ratio))
# Parse MFD
source_nodes.append(obj_to_node(source.mfd))
# Parse Rake
source_nodes.append(Node("rake", text=source.rake))
if len(getattr(source, 'hypo_list', [])):
source_nodes.append(build_hypo_list_node(source.hypo_list))
if len(getattr(source, 'slip_list', [])):
source_nodes.append(build_slip_list_node(source.slip_list))
return source_nodes | [
"def",
"get_fault_source_nodes",
"(",
"source",
")",
":",
"source_nodes",
"=",
"[",
"]",
"# parse msr",
"source_nodes",
".",
"append",
"(",
"Node",
"(",
"\"magScaleRel\"",
",",
"text",
"=",
"source",
".",
"magnitude_scaling_relationship",
".",
"__class__",
".",
"__name__",
")",
")",
"# Parse aspect ratio",
"source_nodes",
".",
"append",
"(",
"Node",
"(",
"\"ruptAspectRatio\"",
",",
"text",
"=",
"source",
".",
"rupture_aspect_ratio",
")",
")",
"# Parse MFD",
"source_nodes",
".",
"append",
"(",
"obj_to_node",
"(",
"source",
".",
"mfd",
")",
")",
"# Parse Rake",
"source_nodes",
".",
"append",
"(",
"Node",
"(",
"\"rake\"",
",",
"text",
"=",
"source",
".",
"rake",
")",
")",
"if",
"len",
"(",
"getattr",
"(",
"source",
",",
"'hypo_list'",
",",
"[",
"]",
")",
")",
":",
"source_nodes",
".",
"append",
"(",
"build_hypo_list_node",
"(",
"source",
".",
"hypo_list",
")",
")",
"if",
"len",
"(",
"getattr",
"(",
"source",
",",
"'slip_list'",
",",
"[",
"]",
")",
")",
":",
"source_nodes",
".",
"append",
"(",
"build_slip_list_node",
"(",
"source",
".",
"slip_list",
")",
")",
"return",
"source_nodes"
] | Returns list of nodes of attributes common to all fault source classes
:param source:
Fault source as instance of :class:
`openquake.hazardlib.source.simple_fault.SimpleFaultSource` or :class:
`openquake.hazardlib.source.complex_fault.ComplexFaultSource`
:returns:
List of instances of :class:`openquake.baselib.node.Node` | [
"Returns",
"list",
"of",
"nodes",
"of",
"attributes",
"common",
"to",
"all",
"fault",
"source",
"classes"
] | 8294553a0b8aba33fd96437a35065d03547d0040 | https://github.com/gem/oq-engine/blob/8294553a0b8aba33fd96437a35065d03547d0040/openquake/hazardlib/sourcewriter.py#L348-L376 |
250 | gem/oq-engine | openquake/hazardlib/sourcewriter.py | get_source_attributes | def get_source_attributes(source):
"""
Retreives a dictionary of source attributes from the source class
:param source:
Seismic source as instance of :class:
`openquake.hazardlib.source.base.BaseSeismicSource`
:returns:
Dictionary of source attributes
"""
attrs = {"id": source.source_id,
"name": source.name,
"tectonicRegion": source.tectonic_region_type}
if isinstance(source, NonParametricSeismicSource):
if source.data[0][0].weight is not None:
weights = []
for data in source.data:
weights.append(data[0].weight)
attrs['rup_weights'] = numpy.array(weights)
print(attrs)
return attrs | python | def get_source_attributes(source):
"""
Retreives a dictionary of source attributes from the source class
:param source:
Seismic source as instance of :class:
`openquake.hazardlib.source.base.BaseSeismicSource`
:returns:
Dictionary of source attributes
"""
attrs = {"id": source.source_id,
"name": source.name,
"tectonicRegion": source.tectonic_region_type}
if isinstance(source, NonParametricSeismicSource):
if source.data[0][0].weight is not None:
weights = []
for data in source.data:
weights.append(data[0].weight)
attrs['rup_weights'] = numpy.array(weights)
print(attrs)
return attrs | [
"def",
"get_source_attributes",
"(",
"source",
")",
":",
"attrs",
"=",
"{",
"\"id\"",
":",
"source",
".",
"source_id",
",",
"\"name\"",
":",
"source",
".",
"name",
",",
"\"tectonicRegion\"",
":",
"source",
".",
"tectonic_region_type",
"}",
"if",
"isinstance",
"(",
"source",
",",
"NonParametricSeismicSource",
")",
":",
"if",
"source",
".",
"data",
"[",
"0",
"]",
"[",
"0",
"]",
".",
"weight",
"is",
"not",
"None",
":",
"weights",
"=",
"[",
"]",
"for",
"data",
"in",
"source",
".",
"data",
":",
"weights",
".",
"append",
"(",
"data",
"[",
"0",
"]",
".",
"weight",
")",
"attrs",
"[",
"'rup_weights'",
"]",
"=",
"numpy",
".",
"array",
"(",
"weights",
")",
"print",
"(",
"attrs",
")",
"return",
"attrs"
] | Retreives a dictionary of source attributes from the source class
:param source:
Seismic source as instance of :class:
`openquake.hazardlib.source.base.BaseSeismicSource`
:returns:
Dictionary of source attributes | [
"Retreives",
"a",
"dictionary",
"of",
"source",
"attributes",
"from",
"the",
"source",
"class"
] | 8294553a0b8aba33fd96437a35065d03547d0040 | https://github.com/gem/oq-engine/blob/8294553a0b8aba33fd96437a35065d03547d0040/openquake/hazardlib/sourcewriter.py#L379-L399 |
251 | gem/oq-engine | openquake/hazardlib/sourcewriter.py | build_area_source_node | def build_area_source_node(area_source):
"""
Parses an area source to a Node class
:param area_source:
Area source as instance of :class:
`openquake.hazardlib.source.area.AreaSource`
:returns:
Instance of :class:`openquake.baselib.node.Node`
"""
# parse geometry
source_nodes = [build_area_source_geometry(area_source)]
# parse common distributed attributes
source_nodes.extend(get_distributed_seismicity_source_nodes(area_source))
return Node(
"areaSource", get_source_attributes(area_source), nodes=source_nodes) | python | def build_area_source_node(area_source):
"""
Parses an area source to a Node class
:param area_source:
Area source as instance of :class:
`openquake.hazardlib.source.area.AreaSource`
:returns:
Instance of :class:`openquake.baselib.node.Node`
"""
# parse geometry
source_nodes = [build_area_source_geometry(area_source)]
# parse common distributed attributes
source_nodes.extend(get_distributed_seismicity_source_nodes(area_source))
return Node(
"areaSource", get_source_attributes(area_source), nodes=source_nodes) | [
"def",
"build_area_source_node",
"(",
"area_source",
")",
":",
"# parse geometry",
"source_nodes",
"=",
"[",
"build_area_source_geometry",
"(",
"area_source",
")",
"]",
"# parse common distributed attributes",
"source_nodes",
".",
"extend",
"(",
"get_distributed_seismicity_source_nodes",
"(",
"area_source",
")",
")",
"return",
"Node",
"(",
"\"areaSource\"",
",",
"get_source_attributes",
"(",
"area_source",
")",
",",
"nodes",
"=",
"source_nodes",
")"
] | Parses an area source to a Node class
:param area_source:
Area source as instance of :class:
`openquake.hazardlib.source.area.AreaSource`
:returns:
Instance of :class:`openquake.baselib.node.Node` | [
"Parses",
"an",
"area",
"source",
"to",
"a",
"Node",
"class"
] | 8294553a0b8aba33fd96437a35065d03547d0040 | https://github.com/gem/oq-engine/blob/8294553a0b8aba33fd96437a35065d03547d0040/openquake/hazardlib/sourcewriter.py#L403-L418 |
252 | gem/oq-engine | openquake/hazardlib/sourcewriter.py | build_simple_fault_source_node | def build_simple_fault_source_node(fault_source):
"""
Parses a simple fault source to a Node class
:param fault_source:
Simple fault source as instance of :class:
`openquake.hazardlib.source.simple_fault.SimpleFaultSource`
:returns:
Instance of :class:`openquake.baselib.node.Node`
"""
# Parse geometry
source_nodes = [build_simple_fault_geometry(fault_source)]
# Parse common fault source attributes
source_nodes.extend(get_fault_source_nodes(fault_source))
return Node("simpleFaultSource",
get_source_attributes(fault_source),
nodes=source_nodes) | python | def build_simple_fault_source_node(fault_source):
"""
Parses a simple fault source to a Node class
:param fault_source:
Simple fault source as instance of :class:
`openquake.hazardlib.source.simple_fault.SimpleFaultSource`
:returns:
Instance of :class:`openquake.baselib.node.Node`
"""
# Parse geometry
source_nodes = [build_simple_fault_geometry(fault_source)]
# Parse common fault source attributes
source_nodes.extend(get_fault_source_nodes(fault_source))
return Node("simpleFaultSource",
get_source_attributes(fault_source),
nodes=source_nodes) | [
"def",
"build_simple_fault_source_node",
"(",
"fault_source",
")",
":",
"# Parse geometry",
"source_nodes",
"=",
"[",
"build_simple_fault_geometry",
"(",
"fault_source",
")",
"]",
"# Parse common fault source attributes",
"source_nodes",
".",
"extend",
"(",
"get_fault_source_nodes",
"(",
"fault_source",
")",
")",
"return",
"Node",
"(",
"\"simpleFaultSource\"",
",",
"get_source_attributes",
"(",
"fault_source",
")",
",",
"nodes",
"=",
"source_nodes",
")"
] | Parses a simple fault source to a Node class
:param fault_source:
Simple fault source as instance of :class:
`openquake.hazardlib.source.simple_fault.SimpleFaultSource`
:returns:
Instance of :class:`openquake.baselib.node.Node` | [
"Parses",
"a",
"simple",
"fault",
"source",
"to",
"a",
"Node",
"class"
] | 8294553a0b8aba33fd96437a35065d03547d0040 | https://github.com/gem/oq-engine/blob/8294553a0b8aba33fd96437a35065d03547d0040/openquake/hazardlib/sourcewriter.py#L524-L540 |
253 | gem/oq-engine | openquake/hazardlib/sourcewriter.py | build_complex_fault_source_node | def build_complex_fault_source_node(fault_source):
"""
Parses a complex fault source to a Node class
:param fault_source:
Simple fault source as instance of :class:
`openquake.hazardlib.source.complex_fault.ComplexFaultSource`
:returns:
Instance of :class:`openquake.baselib.node.Node`
"""
# Parse geometry
source_nodes = [build_complex_fault_geometry(fault_source)]
# Parse common fault source attributes
source_nodes.extend(get_fault_source_nodes(fault_source))
return Node("complexFaultSource",
get_source_attributes(fault_source),
nodes=source_nodes) | python | def build_complex_fault_source_node(fault_source):
"""
Parses a complex fault source to a Node class
:param fault_source:
Simple fault source as instance of :class:
`openquake.hazardlib.source.complex_fault.ComplexFaultSource`
:returns:
Instance of :class:`openquake.baselib.node.Node`
"""
# Parse geometry
source_nodes = [build_complex_fault_geometry(fault_source)]
# Parse common fault source attributes
source_nodes.extend(get_fault_source_nodes(fault_source))
return Node("complexFaultSource",
get_source_attributes(fault_source),
nodes=source_nodes) | [
"def",
"build_complex_fault_source_node",
"(",
"fault_source",
")",
":",
"# Parse geometry",
"source_nodes",
"=",
"[",
"build_complex_fault_geometry",
"(",
"fault_source",
")",
"]",
"# Parse common fault source attributes",
"source_nodes",
".",
"extend",
"(",
"get_fault_source_nodes",
"(",
"fault_source",
")",
")",
"return",
"Node",
"(",
"\"complexFaultSource\"",
",",
"get_source_attributes",
"(",
"fault_source",
")",
",",
"nodes",
"=",
"source_nodes",
")"
] | Parses a complex fault source to a Node class
:param fault_source:
Simple fault source as instance of :class:
`openquake.hazardlib.source.complex_fault.ComplexFaultSource`
:returns:
Instance of :class:`openquake.baselib.node.Node` | [
"Parses",
"a",
"complex",
"fault",
"source",
"to",
"a",
"Node",
"class"
] | 8294553a0b8aba33fd96437a35065d03547d0040 | https://github.com/gem/oq-engine/blob/8294553a0b8aba33fd96437a35065d03547d0040/openquake/hazardlib/sourcewriter.py#L544-L560 |
254 | gem/oq-engine | openquake/hazardlib/sourcewriter.py | write_source_model | def write_source_model(dest, sources_or_groups, name=None,
investigation_time=None):
"""
Writes a source model to XML.
:param dest:
Destination path
:param sources_or_groups:
Source model in different formats
:param name:
Name of the source model (if missing, extracted from the filename)
"""
if isinstance(sources_or_groups, nrml.SourceModel):
with open(dest, 'wb') as f:
nrml.write([obj_to_node(sources_or_groups)], f, '%s')
return
if isinstance(sources_or_groups[0], sourceconverter.SourceGroup):
groups = sources_or_groups
else: # passed a list of sources
srcs_by_trt = groupby(
sources_or_groups, operator.attrgetter('tectonic_region_type'))
groups = [sourceconverter.SourceGroup(trt, srcs_by_trt[trt])
for trt in srcs_by_trt]
name = name or os.path.splitext(os.path.basename(dest))[0]
nodes = list(map(obj_to_node, sorted(groups)))
attrs = {"name": name}
if investigation_time is not None:
attrs['investigation_time'] = investigation_time
source_model = Node("sourceModel", attrs, nodes=nodes)
with open(dest, 'wb') as f:
nrml.write([source_model], f, '%s')
return dest | python | def write_source_model(dest, sources_or_groups, name=None,
investigation_time=None):
"""
Writes a source model to XML.
:param dest:
Destination path
:param sources_or_groups:
Source model in different formats
:param name:
Name of the source model (if missing, extracted from the filename)
"""
if isinstance(sources_or_groups, nrml.SourceModel):
with open(dest, 'wb') as f:
nrml.write([obj_to_node(sources_or_groups)], f, '%s')
return
if isinstance(sources_or_groups[0], sourceconverter.SourceGroup):
groups = sources_or_groups
else: # passed a list of sources
srcs_by_trt = groupby(
sources_or_groups, operator.attrgetter('tectonic_region_type'))
groups = [sourceconverter.SourceGroup(trt, srcs_by_trt[trt])
for trt in srcs_by_trt]
name = name or os.path.splitext(os.path.basename(dest))[0]
nodes = list(map(obj_to_node, sorted(groups)))
attrs = {"name": name}
if investigation_time is not None:
attrs['investigation_time'] = investigation_time
source_model = Node("sourceModel", attrs, nodes=nodes)
with open(dest, 'wb') as f:
nrml.write([source_model], f, '%s')
return dest | [
"def",
"write_source_model",
"(",
"dest",
",",
"sources_or_groups",
",",
"name",
"=",
"None",
",",
"investigation_time",
"=",
"None",
")",
":",
"if",
"isinstance",
"(",
"sources_or_groups",
",",
"nrml",
".",
"SourceModel",
")",
":",
"with",
"open",
"(",
"dest",
",",
"'wb'",
")",
"as",
"f",
":",
"nrml",
".",
"write",
"(",
"[",
"obj_to_node",
"(",
"sources_or_groups",
")",
"]",
",",
"f",
",",
"'%s'",
")",
"return",
"if",
"isinstance",
"(",
"sources_or_groups",
"[",
"0",
"]",
",",
"sourceconverter",
".",
"SourceGroup",
")",
":",
"groups",
"=",
"sources_or_groups",
"else",
":",
"# passed a list of sources",
"srcs_by_trt",
"=",
"groupby",
"(",
"sources_or_groups",
",",
"operator",
".",
"attrgetter",
"(",
"'tectonic_region_type'",
")",
")",
"groups",
"=",
"[",
"sourceconverter",
".",
"SourceGroup",
"(",
"trt",
",",
"srcs_by_trt",
"[",
"trt",
"]",
")",
"for",
"trt",
"in",
"srcs_by_trt",
"]",
"name",
"=",
"name",
"or",
"os",
".",
"path",
".",
"splitext",
"(",
"os",
".",
"path",
".",
"basename",
"(",
"dest",
")",
")",
"[",
"0",
"]",
"nodes",
"=",
"list",
"(",
"map",
"(",
"obj_to_node",
",",
"sorted",
"(",
"groups",
")",
")",
")",
"attrs",
"=",
"{",
"\"name\"",
":",
"name",
"}",
"if",
"investigation_time",
"is",
"not",
"None",
":",
"attrs",
"[",
"'investigation_time'",
"]",
"=",
"investigation_time",
"source_model",
"=",
"Node",
"(",
"\"sourceModel\"",
",",
"attrs",
",",
"nodes",
"=",
"nodes",
")",
"with",
"open",
"(",
"dest",
",",
"'wb'",
")",
"as",
"f",
":",
"nrml",
".",
"write",
"(",
"[",
"source_model",
"]",
",",
"f",
",",
"'%s'",
")",
"return",
"dest"
] | Writes a source model to XML.
:param dest:
Destination path
:param sources_or_groups:
Source model in different formats
:param name:
Name of the source model (if missing, extracted from the filename) | [
"Writes",
"a",
"source",
"model",
"to",
"XML",
"."
] | 8294553a0b8aba33fd96437a35065d03547d0040 | https://github.com/gem/oq-engine/blob/8294553a0b8aba33fd96437a35065d03547d0040/openquake/hazardlib/sourcewriter.py#L610-L641 |
255 | gem/oq-engine | openquake/hazardlib/gsim/sharma_2009.py | SharmaEtAl2009._get_stddevs | def _get_stddevs(self, coeffs, stddev_types, num_sites):
"""
Return total sigma as reported in Table 2, p. 1202.
"""
stddevs = []
for stddev_type in stddev_types:
assert stddev_type in self.DEFINED_FOR_STANDARD_DEVIATION_TYPES
stddevs.append(coeffs['sigma'] + np.zeros(num_sites))
return np.array(stddevs) | python | def _get_stddevs(self, coeffs, stddev_types, num_sites):
"""
Return total sigma as reported in Table 2, p. 1202.
"""
stddevs = []
for stddev_type in stddev_types:
assert stddev_type in self.DEFINED_FOR_STANDARD_DEVIATION_TYPES
stddevs.append(coeffs['sigma'] + np.zeros(num_sites))
return np.array(stddevs) | [
"def",
"_get_stddevs",
"(",
"self",
",",
"coeffs",
",",
"stddev_types",
",",
"num_sites",
")",
":",
"stddevs",
"=",
"[",
"]",
"for",
"stddev_type",
"in",
"stddev_types",
":",
"assert",
"stddev_type",
"in",
"self",
".",
"DEFINED_FOR_STANDARD_DEVIATION_TYPES",
"stddevs",
".",
"append",
"(",
"coeffs",
"[",
"'sigma'",
"]",
"+",
"np",
".",
"zeros",
"(",
"num_sites",
")",
")",
"return",
"np",
".",
"array",
"(",
"stddevs",
")"
] | Return total sigma as reported in Table 2, p. 1202. | [
"Return",
"total",
"sigma",
"as",
"reported",
"in",
"Table",
"2",
"p",
".",
"1202",
"."
] | 8294553a0b8aba33fd96437a35065d03547d0040 | https://github.com/gem/oq-engine/blob/8294553a0b8aba33fd96437a35065d03547d0040/openquake/hazardlib/gsim/sharma_2009.py#L121-L129 |
256 | gem/oq-engine | openquake/hazardlib/gsim/sharma_2009.py | SharmaEtAl2009.get_fault_type_dummy_variables | def get_fault_type_dummy_variables(self, rup):
"""
Fault-type classification dummy variable based on rup.rake.
"``H`` is 1 for a strike-slip mechanism and 0 for a reverse mechanism"
(p. 1201).
Note:
UserWarning is raised if mechanism is determined to be normal
faulting, since as summarized in Table 2 on p. 1197 the data used
for regression included only reverse and stike-slip events.
"""
# normal faulting
is_normal = np.array(
self.RAKE_THRESH < -rup.rake < (180. - self.RAKE_THRESH))
# reverse raulting
is_reverse = np.array(
self.RAKE_THRESH < rup.rake < (180. - self.RAKE_THRESH))
if not self.ALREADY_WARNED and is_normal.any():
# make sure that the warning is printed only once to avoid
# flooding the terminal
msg = ('Normal faulting not supported by %s; '
'treating as strike-slip' % type(self).__name__)
warnings.warn(msg, UserWarning)
self.ALREADY_WARNED = True
is_strike_slip = ~is_reverse | is_normal
is_strike_slip = is_strike_slip.astype(float)
return is_strike_slip | python | def get_fault_type_dummy_variables(self, rup):
"""
Fault-type classification dummy variable based on rup.rake.
"``H`` is 1 for a strike-slip mechanism and 0 for a reverse mechanism"
(p. 1201).
Note:
UserWarning is raised if mechanism is determined to be normal
faulting, since as summarized in Table 2 on p. 1197 the data used
for regression included only reverse and stike-slip events.
"""
# normal faulting
is_normal = np.array(
self.RAKE_THRESH < -rup.rake < (180. - self.RAKE_THRESH))
# reverse raulting
is_reverse = np.array(
self.RAKE_THRESH < rup.rake < (180. - self.RAKE_THRESH))
if not self.ALREADY_WARNED and is_normal.any():
# make sure that the warning is printed only once to avoid
# flooding the terminal
msg = ('Normal faulting not supported by %s; '
'treating as strike-slip' % type(self).__name__)
warnings.warn(msg, UserWarning)
self.ALREADY_WARNED = True
is_strike_slip = ~is_reverse | is_normal
is_strike_slip = is_strike_slip.astype(float)
return is_strike_slip | [
"def",
"get_fault_type_dummy_variables",
"(",
"self",
",",
"rup",
")",
":",
"# normal faulting",
"is_normal",
"=",
"np",
".",
"array",
"(",
"self",
".",
"RAKE_THRESH",
"<",
"-",
"rup",
".",
"rake",
"<",
"(",
"180.",
"-",
"self",
".",
"RAKE_THRESH",
")",
")",
"# reverse raulting",
"is_reverse",
"=",
"np",
".",
"array",
"(",
"self",
".",
"RAKE_THRESH",
"<",
"rup",
".",
"rake",
"<",
"(",
"180.",
"-",
"self",
".",
"RAKE_THRESH",
")",
")",
"if",
"not",
"self",
".",
"ALREADY_WARNED",
"and",
"is_normal",
".",
"any",
"(",
")",
":",
"# make sure that the warning is printed only once to avoid",
"# flooding the terminal",
"msg",
"=",
"(",
"'Normal faulting not supported by %s; '",
"'treating as strike-slip'",
"%",
"type",
"(",
"self",
")",
".",
"__name__",
")",
"warnings",
".",
"warn",
"(",
"msg",
",",
"UserWarning",
")",
"self",
".",
"ALREADY_WARNED",
"=",
"True",
"is_strike_slip",
"=",
"~",
"is_reverse",
"|",
"is_normal",
"is_strike_slip",
"=",
"is_strike_slip",
".",
"astype",
"(",
"float",
")",
"return",
"is_strike_slip"
] | Fault-type classification dummy variable based on rup.rake.
"``H`` is 1 for a strike-slip mechanism and 0 for a reverse mechanism"
(p. 1201).
Note:
UserWarning is raised if mechanism is determined to be normal
faulting, since as summarized in Table 2 on p. 1197 the data used
for regression included only reverse and stike-slip events. | [
"Fault",
"-",
"type",
"classification",
"dummy",
"variable",
"based",
"on",
"rup",
".",
"rake",
"."
] | 8294553a0b8aba33fd96437a35065d03547d0040 | https://github.com/gem/oq-engine/blob/8294553a0b8aba33fd96437a35065d03547d0040/openquake/hazardlib/gsim/sharma_2009.py#L176-L208 |
257 | gem/oq-engine | openquake/hmtk/parsers/strain/strain_csv_parser.py | ReadStrainCsv.read_data | def read_data(self, scaling_factor=1E-9, strain_headers=None):
'''
Reads the data from the csv file
:param float scaling_factor:
Scaling factor used for all strain values (default 1E-9 for
nanostrain)
:param list strain_headers:
List of the variables in the file that correspond to strain
parameters
:returns:
strain - Strain model as an instance of the :class:
openquake.hmtk.strain.geodetic_strain.GeodeticStrain
'''
if strain_headers:
self.strain.data_variables = strain_headers
else:
self.strain.data_variables = STRAIN_VARIABLES
datafile = open(self.filename, 'r')
reader = csv.DictReader(datafile)
self.strain.data = dict([(name, []) for name in reader.fieldnames])
for row in reader:
for name in row.keys():
if 'region' in name.lower():
self.strain.data[name].append(row[name])
elif name in self.strain.data_variables:
self.strain.data[name].append(
scaling_factor * float(row[name]))
else:
self.strain.data[name].append(float(row[name]))
for key in self.strain.data.keys():
if 'region' in key:
self.strain.data[key] = np.array(self.strain.data[key],
dtype='S13')
else:
self.strain.data[key] = np.array(self.strain.data[key])
self._check_invalid_longitudes()
if 'region' not in self.strain.data:
print('No tectonic regionalisation found in input file!')
self.strain.data_variables = self.strain.data.keys()
# Update data with secondary data (i.e. 2nd invariant, e1h, e2h etc.
self.strain.get_secondary_strain_data()
return self.strain | python | def read_data(self, scaling_factor=1E-9, strain_headers=None):
'''
Reads the data from the csv file
:param float scaling_factor:
Scaling factor used for all strain values (default 1E-9 for
nanostrain)
:param list strain_headers:
List of the variables in the file that correspond to strain
parameters
:returns:
strain - Strain model as an instance of the :class:
openquake.hmtk.strain.geodetic_strain.GeodeticStrain
'''
if strain_headers:
self.strain.data_variables = strain_headers
else:
self.strain.data_variables = STRAIN_VARIABLES
datafile = open(self.filename, 'r')
reader = csv.DictReader(datafile)
self.strain.data = dict([(name, []) for name in reader.fieldnames])
for row in reader:
for name in row.keys():
if 'region' in name.lower():
self.strain.data[name].append(row[name])
elif name in self.strain.data_variables:
self.strain.data[name].append(
scaling_factor * float(row[name]))
else:
self.strain.data[name].append(float(row[name]))
for key in self.strain.data.keys():
if 'region' in key:
self.strain.data[key] = np.array(self.strain.data[key],
dtype='S13')
else:
self.strain.data[key] = np.array(self.strain.data[key])
self._check_invalid_longitudes()
if 'region' not in self.strain.data:
print('No tectonic regionalisation found in input file!')
self.strain.data_variables = self.strain.data.keys()
# Update data with secondary data (i.e. 2nd invariant, e1h, e2h etc.
self.strain.get_secondary_strain_data()
return self.strain | [
"def",
"read_data",
"(",
"self",
",",
"scaling_factor",
"=",
"1E-9",
",",
"strain_headers",
"=",
"None",
")",
":",
"if",
"strain_headers",
":",
"self",
".",
"strain",
".",
"data_variables",
"=",
"strain_headers",
"else",
":",
"self",
".",
"strain",
".",
"data_variables",
"=",
"STRAIN_VARIABLES",
"datafile",
"=",
"open",
"(",
"self",
".",
"filename",
",",
"'r'",
")",
"reader",
"=",
"csv",
".",
"DictReader",
"(",
"datafile",
")",
"self",
".",
"strain",
".",
"data",
"=",
"dict",
"(",
"[",
"(",
"name",
",",
"[",
"]",
")",
"for",
"name",
"in",
"reader",
".",
"fieldnames",
"]",
")",
"for",
"row",
"in",
"reader",
":",
"for",
"name",
"in",
"row",
".",
"keys",
"(",
")",
":",
"if",
"'region'",
"in",
"name",
".",
"lower",
"(",
")",
":",
"self",
".",
"strain",
".",
"data",
"[",
"name",
"]",
".",
"append",
"(",
"row",
"[",
"name",
"]",
")",
"elif",
"name",
"in",
"self",
".",
"strain",
".",
"data_variables",
":",
"self",
".",
"strain",
".",
"data",
"[",
"name",
"]",
".",
"append",
"(",
"scaling_factor",
"*",
"float",
"(",
"row",
"[",
"name",
"]",
")",
")",
"else",
":",
"self",
".",
"strain",
".",
"data",
"[",
"name",
"]",
".",
"append",
"(",
"float",
"(",
"row",
"[",
"name",
"]",
")",
")",
"for",
"key",
"in",
"self",
".",
"strain",
".",
"data",
".",
"keys",
"(",
")",
":",
"if",
"'region'",
"in",
"key",
":",
"self",
".",
"strain",
".",
"data",
"[",
"key",
"]",
"=",
"np",
".",
"array",
"(",
"self",
".",
"strain",
".",
"data",
"[",
"key",
"]",
",",
"dtype",
"=",
"'S13'",
")",
"else",
":",
"self",
".",
"strain",
".",
"data",
"[",
"key",
"]",
"=",
"np",
".",
"array",
"(",
"self",
".",
"strain",
".",
"data",
"[",
"key",
"]",
")",
"self",
".",
"_check_invalid_longitudes",
"(",
")",
"if",
"'region'",
"not",
"in",
"self",
".",
"strain",
".",
"data",
":",
"print",
"(",
"'No tectonic regionalisation found in input file!'",
")",
"self",
".",
"strain",
".",
"data_variables",
"=",
"self",
".",
"strain",
".",
"data",
".",
"keys",
"(",
")",
"# Update data with secondary data (i.e. 2nd invariant, e1h, e2h etc.",
"self",
".",
"strain",
".",
"get_secondary_strain_data",
"(",
")",
"return",
"self",
".",
"strain"
] | Reads the data from the csv file
:param float scaling_factor:
Scaling factor used for all strain values (default 1E-9 for
nanostrain)
:param list strain_headers:
List of the variables in the file that correspond to strain
parameters
:returns:
strain - Strain model as an instance of the :class:
openquake.hmtk.strain.geodetic_strain.GeodeticStrain | [
"Reads",
"the",
"data",
"from",
"the",
"csv",
"file"
] | 8294553a0b8aba33fd96437a35065d03547d0040 | https://github.com/gem/oq-engine/blob/8294553a0b8aba33fd96437a35065d03547d0040/openquake/hmtk/parsers/strain/strain_csv_parser.py#L82-L132 |
258 | gem/oq-engine | openquake/hmtk/parsers/strain/strain_csv_parser.py | ReadStrainCsv._check_invalid_longitudes | def _check_invalid_longitudes(self):
'''
Checks to ensure that all longitudes are in the range -180. to 180
'''
idlon = self.strain.data['longitude'] > 180.
if np.any(idlon):
self.strain.data['longitude'][idlon] = \
self.strain.data['longitude'][idlon] - 360. | python | def _check_invalid_longitudes(self):
'''
Checks to ensure that all longitudes are in the range -180. to 180
'''
idlon = self.strain.data['longitude'] > 180.
if np.any(idlon):
self.strain.data['longitude'][idlon] = \
self.strain.data['longitude'][idlon] - 360. | [
"def",
"_check_invalid_longitudes",
"(",
"self",
")",
":",
"idlon",
"=",
"self",
".",
"strain",
".",
"data",
"[",
"'longitude'",
"]",
">",
"180.",
"if",
"np",
".",
"any",
"(",
"idlon",
")",
":",
"self",
".",
"strain",
".",
"data",
"[",
"'longitude'",
"]",
"[",
"idlon",
"]",
"=",
"self",
".",
"strain",
".",
"data",
"[",
"'longitude'",
"]",
"[",
"idlon",
"]",
"-",
"360."
] | Checks to ensure that all longitudes are in the range -180. to 180 | [
"Checks",
"to",
"ensure",
"that",
"all",
"longitudes",
"are",
"in",
"the",
"range",
"-",
"180",
".",
"to",
"180"
] | 8294553a0b8aba33fd96437a35065d03547d0040 | https://github.com/gem/oq-engine/blob/8294553a0b8aba33fd96437a35065d03547d0040/openquake/hmtk/parsers/strain/strain_csv_parser.py#L134-L141 |
259 | gem/oq-engine | openquake/hmtk/parsers/strain/strain_csv_parser.py | WriteStrainCsv.write_file | def write_file(self, strain, scaling_factor=1E-9):
'''
Main writer function for the csv file
:param strain:
Instance of :class: openquake.hmtk.strain.geodetic_strain.GeodeticStrain
:param float scaling_factor:
Scaling factor used for all strain values (default 1E-9 for
nanostrain)
'''
if not isinstance(strain, GeodeticStrain):
raise ValueError('Strain data must be instance of GeodeticStrain')
for key in strain.data.keys():
if key in strain.data_variables:
# Return strain value back to original scaling
if key in ['longitude', 'latitude']:
continue
strain.data[key] = strain.data[key] / scaling_factor
# Slice seismicity rates into separate dictionary vectors
strain, output_variables = self.slice_rates_to_data(strain)
outfile = open(self.filename, 'wt')
print('Writing strain data to file %s' % self.filename)
writer = csv.DictWriter(outfile,
fieldnames=output_variables)
writer.writeheader()
for iloc in range(0, strain.get_number_observations()):
row_dict = {}
for key in output_variables:
if len(strain.data[key]) > 0:
# Ignores empty dictionary attributes
row_dict[key] = strain.data[key][iloc]
writer.writerow(row_dict)
outfile.close()
print('done!') | python | def write_file(self, strain, scaling_factor=1E-9):
'''
Main writer function for the csv file
:param strain:
Instance of :class: openquake.hmtk.strain.geodetic_strain.GeodeticStrain
:param float scaling_factor:
Scaling factor used for all strain values (default 1E-9 for
nanostrain)
'''
if not isinstance(strain, GeodeticStrain):
raise ValueError('Strain data must be instance of GeodeticStrain')
for key in strain.data.keys():
if key in strain.data_variables:
# Return strain value back to original scaling
if key in ['longitude', 'latitude']:
continue
strain.data[key] = strain.data[key] / scaling_factor
# Slice seismicity rates into separate dictionary vectors
strain, output_variables = self.slice_rates_to_data(strain)
outfile = open(self.filename, 'wt')
print('Writing strain data to file %s' % self.filename)
writer = csv.DictWriter(outfile,
fieldnames=output_variables)
writer.writeheader()
for iloc in range(0, strain.get_number_observations()):
row_dict = {}
for key in output_variables:
if len(strain.data[key]) > 0:
# Ignores empty dictionary attributes
row_dict[key] = strain.data[key][iloc]
writer.writerow(row_dict)
outfile.close()
print('done!') | [
"def",
"write_file",
"(",
"self",
",",
"strain",
",",
"scaling_factor",
"=",
"1E-9",
")",
":",
"if",
"not",
"isinstance",
"(",
"strain",
",",
"GeodeticStrain",
")",
":",
"raise",
"ValueError",
"(",
"'Strain data must be instance of GeodeticStrain'",
")",
"for",
"key",
"in",
"strain",
".",
"data",
".",
"keys",
"(",
")",
":",
"if",
"key",
"in",
"strain",
".",
"data_variables",
":",
"# Return strain value back to original scaling",
"if",
"key",
"in",
"[",
"'longitude'",
",",
"'latitude'",
"]",
":",
"continue",
"strain",
".",
"data",
"[",
"key",
"]",
"=",
"strain",
".",
"data",
"[",
"key",
"]",
"/",
"scaling_factor",
"# Slice seismicity rates into separate dictionary vectors",
"strain",
",",
"output_variables",
"=",
"self",
".",
"slice_rates_to_data",
"(",
"strain",
")",
"outfile",
"=",
"open",
"(",
"self",
".",
"filename",
",",
"'wt'",
")",
"print",
"(",
"'Writing strain data to file %s'",
"%",
"self",
".",
"filename",
")",
"writer",
"=",
"csv",
".",
"DictWriter",
"(",
"outfile",
",",
"fieldnames",
"=",
"output_variables",
")",
"writer",
".",
"writeheader",
"(",
")",
"for",
"iloc",
"in",
"range",
"(",
"0",
",",
"strain",
".",
"get_number_observations",
"(",
")",
")",
":",
"row_dict",
"=",
"{",
"}",
"for",
"key",
"in",
"output_variables",
":",
"if",
"len",
"(",
"strain",
".",
"data",
"[",
"key",
"]",
")",
">",
"0",
":",
"# Ignores empty dictionary attributes",
"row_dict",
"[",
"key",
"]",
"=",
"strain",
".",
"data",
"[",
"key",
"]",
"[",
"iloc",
"]",
"writer",
".",
"writerow",
"(",
"row_dict",
")",
"outfile",
".",
"close",
"(",
")",
"print",
"(",
"'done!'",
")"
] | Main writer function for the csv file
:param strain:
Instance of :class: openquake.hmtk.strain.geodetic_strain.GeodeticStrain
:param float scaling_factor:
Scaling factor used for all strain values (default 1E-9 for
nanostrain) | [
"Main",
"writer",
"function",
"for",
"the",
"csv",
"file"
] | 8294553a0b8aba33fd96437a35065d03547d0040 | https://github.com/gem/oq-engine/blob/8294553a0b8aba33fd96437a35065d03547d0040/openquake/hmtk/parsers/strain/strain_csv_parser.py#L160-L196 |
260 | gem/oq-engine | openquake/hmtk/parsers/strain/strain_csv_parser.py | WriteStrainCsv.slice_rates_to_data | def slice_rates_to_data(self, strain):
'''
For the strain data, checks to see if seismicity rates have been
calculated. If so, each column in the array is sliced and stored as a
single vector in the strain.data dictionary with the corresponding
magnitude as a key.
:param strain:
Instance of :class: openquake.hmtk.strain.geodetic_strain.GeodeticStrain
:returns:
strain - Instance of strain class with updated data dictionary
output_variables - Updated list of headers
'''
output_variables = list(strain.data)
cond = (isinstance(strain.target_magnitudes, np.ndarray) or
isinstance(strain.target_magnitudes, list))
if cond:
magnitude_list = ['%.3f' % mag for mag in strain.target_magnitudes]
else:
return strain, output_variables
# Ensure that the number of rows in the rate array corresponds to the
# number of observations
assert np.shape(strain.seismicity_rate)[0] == \
strain.get_number_observations()
for iloc, magnitude in enumerate(magnitude_list):
strain.data[magnitude] = strain.seismicity_rate[:, iloc]
output_variables.extend(magnitude_list)
return strain, output_variables | python | def slice_rates_to_data(self, strain):
'''
For the strain data, checks to see if seismicity rates have been
calculated. If so, each column in the array is sliced and stored as a
single vector in the strain.data dictionary with the corresponding
magnitude as a key.
:param strain:
Instance of :class: openquake.hmtk.strain.geodetic_strain.GeodeticStrain
:returns:
strain - Instance of strain class with updated data dictionary
output_variables - Updated list of headers
'''
output_variables = list(strain.data)
cond = (isinstance(strain.target_magnitudes, np.ndarray) or
isinstance(strain.target_magnitudes, list))
if cond:
magnitude_list = ['%.3f' % mag for mag in strain.target_magnitudes]
else:
return strain, output_variables
# Ensure that the number of rows in the rate array corresponds to the
# number of observations
assert np.shape(strain.seismicity_rate)[0] == \
strain.get_number_observations()
for iloc, magnitude in enumerate(magnitude_list):
strain.data[magnitude] = strain.seismicity_rate[:, iloc]
output_variables.extend(magnitude_list)
return strain, output_variables | [
"def",
"slice_rates_to_data",
"(",
"self",
",",
"strain",
")",
":",
"output_variables",
"=",
"list",
"(",
"strain",
".",
"data",
")",
"cond",
"=",
"(",
"isinstance",
"(",
"strain",
".",
"target_magnitudes",
",",
"np",
".",
"ndarray",
")",
"or",
"isinstance",
"(",
"strain",
".",
"target_magnitudes",
",",
"list",
")",
")",
"if",
"cond",
":",
"magnitude_list",
"=",
"[",
"'%.3f'",
"%",
"mag",
"for",
"mag",
"in",
"strain",
".",
"target_magnitudes",
"]",
"else",
":",
"return",
"strain",
",",
"output_variables",
"# Ensure that the number of rows in the rate array corresponds to the",
"# number of observations",
"assert",
"np",
".",
"shape",
"(",
"strain",
".",
"seismicity_rate",
")",
"[",
"0",
"]",
"==",
"strain",
".",
"get_number_observations",
"(",
")",
"for",
"iloc",
",",
"magnitude",
"in",
"enumerate",
"(",
"magnitude_list",
")",
":",
"strain",
".",
"data",
"[",
"magnitude",
"]",
"=",
"strain",
".",
"seismicity_rate",
"[",
":",
",",
"iloc",
"]",
"output_variables",
".",
"extend",
"(",
"magnitude_list",
")",
"return",
"strain",
",",
"output_variables"
] | For the strain data, checks to see if seismicity rates have been
calculated. If so, each column in the array is sliced and stored as a
single vector in the strain.data dictionary with the corresponding
magnitude as a key.
:param strain:
Instance of :class: openquake.hmtk.strain.geodetic_strain.GeodeticStrain
:returns:
strain - Instance of strain class with updated data dictionary
output_variables - Updated list of headers | [
"For",
"the",
"strain",
"data",
"checks",
"to",
"see",
"if",
"seismicity",
"rates",
"have",
"been",
"calculated",
".",
"If",
"so",
"each",
"column",
"in",
"the",
"array",
"is",
"sliced",
"and",
"stored",
"as",
"a",
"single",
"vector",
"in",
"the",
"strain",
".",
"data",
"dictionary",
"with",
"the",
"corresponding",
"magnitude",
"as",
"a",
"key",
"."
] | 8294553a0b8aba33fd96437a35065d03547d0040 | https://github.com/gem/oq-engine/blob/8294553a0b8aba33fd96437a35065d03547d0040/openquake/hmtk/parsers/strain/strain_csv_parser.py#L198-L228 |
261 | gem/oq-engine | openquake/baselib/__init__.py | read | def read(*paths, **validators):
"""
Load the configuration, make each section available in a separate dict.
The configuration location can specified via an environment variable:
- OQ_CONFIG_FILE
In the absence of this environment variable the following paths will be
used:
- sys.prefix + /openquake.cfg when in a virtualenv
- /etc/openquake/openquake.cfg outside of a virtualenv
If those files are missing, the fallback is the source code:
- openquake/engine/openquake.cfg
Please note: settings in the site configuration file are overridden
by settings with the same key names in the OQ_CONFIG_FILE openquake.cfg.
"""
paths = config.paths + list(paths)
parser = configparser.ConfigParser()
found = parser.read(os.path.normpath(os.path.expanduser(p)) for p in paths)
if not found:
raise IOError('No configuration file found in %s' % str(paths))
config.found = found
config.clear()
for section in parser.sections():
config[section] = sec = DotDict(parser.items(section))
for k, v in sec.items():
sec[k] = validators.get(k, lambda x: x)(v) | python | def read(*paths, **validators):
"""
Load the configuration, make each section available in a separate dict.
The configuration location can specified via an environment variable:
- OQ_CONFIG_FILE
In the absence of this environment variable the following paths will be
used:
- sys.prefix + /openquake.cfg when in a virtualenv
- /etc/openquake/openquake.cfg outside of a virtualenv
If those files are missing, the fallback is the source code:
- openquake/engine/openquake.cfg
Please note: settings in the site configuration file are overridden
by settings with the same key names in the OQ_CONFIG_FILE openquake.cfg.
"""
paths = config.paths + list(paths)
parser = configparser.ConfigParser()
found = parser.read(os.path.normpath(os.path.expanduser(p)) for p in paths)
if not found:
raise IOError('No configuration file found in %s' % str(paths))
config.found = found
config.clear()
for section in parser.sections():
config[section] = sec = DotDict(parser.items(section))
for k, v in sec.items():
sec[k] = validators.get(k, lambda x: x)(v) | [
"def",
"read",
"(",
"*",
"paths",
",",
"*",
"*",
"validators",
")",
":",
"paths",
"=",
"config",
".",
"paths",
"+",
"list",
"(",
"paths",
")",
"parser",
"=",
"configparser",
".",
"ConfigParser",
"(",
")",
"found",
"=",
"parser",
".",
"read",
"(",
"os",
".",
"path",
".",
"normpath",
"(",
"os",
".",
"path",
".",
"expanduser",
"(",
"p",
")",
")",
"for",
"p",
"in",
"paths",
")",
"if",
"not",
"found",
":",
"raise",
"IOError",
"(",
"'No configuration file found in %s'",
"%",
"str",
"(",
"paths",
")",
")",
"config",
".",
"found",
"=",
"found",
"config",
".",
"clear",
"(",
")",
"for",
"section",
"in",
"parser",
".",
"sections",
"(",
")",
":",
"config",
"[",
"section",
"]",
"=",
"sec",
"=",
"DotDict",
"(",
"parser",
".",
"items",
"(",
"section",
")",
")",
"for",
"k",
",",
"v",
"in",
"sec",
".",
"items",
"(",
")",
":",
"sec",
"[",
"k",
"]",
"=",
"validators",
".",
"get",
"(",
"k",
",",
"lambda",
"x",
":",
"x",
")",
"(",
"v",
")"
] | Load the configuration, make each section available in a separate dict.
The configuration location can specified via an environment variable:
- OQ_CONFIG_FILE
In the absence of this environment variable the following paths will be
used:
- sys.prefix + /openquake.cfg when in a virtualenv
- /etc/openquake/openquake.cfg outside of a virtualenv
If those files are missing, the fallback is the source code:
- openquake/engine/openquake.cfg
Please note: settings in the site configuration file are overridden
by settings with the same key names in the OQ_CONFIG_FILE openquake.cfg. | [
"Load",
"the",
"configuration",
"make",
"each",
"section",
"available",
"in",
"a",
"separate",
"dict",
"."
] | 8294553a0b8aba33fd96437a35065d03547d0040 | https://github.com/gem/oq-engine/blob/8294553a0b8aba33fd96437a35065d03547d0040/openquake/baselib/__init__.py#L56-L84 |
262 | gem/oq-engine | openquake/baselib/__init__.py | boolean | def boolean(flag):
"""
Convert string in boolean
"""
s = flag.lower()
if s in ('1', 'yes', 'true'):
return True
elif s in ('0', 'no', 'false'):
return False
raise ValueError('Unknown flag %r' % s) | python | def boolean(flag):
"""
Convert string in boolean
"""
s = flag.lower()
if s in ('1', 'yes', 'true'):
return True
elif s in ('0', 'no', 'false'):
return False
raise ValueError('Unknown flag %r' % s) | [
"def",
"boolean",
"(",
"flag",
")",
":",
"s",
"=",
"flag",
".",
"lower",
"(",
")",
"if",
"s",
"in",
"(",
"'1'",
",",
"'yes'",
",",
"'true'",
")",
":",
"return",
"True",
"elif",
"s",
"in",
"(",
"'0'",
",",
"'no'",
",",
"'false'",
")",
":",
"return",
"False",
"raise",
"ValueError",
"(",
"'Unknown flag %r'",
"%",
"s",
")"
] | Convert string in boolean | [
"Convert",
"string",
"in",
"boolean"
] | 8294553a0b8aba33fd96437a35065d03547d0040 | https://github.com/gem/oq-engine/blob/8294553a0b8aba33fd96437a35065d03547d0040/openquake/baselib/__init__.py#L90-L99 |
263 | gem/oq-engine | openquake/hazardlib/gsim/atkinson_boore_2006.py | AtkinsonBoore2006._get_mean | def _get_mean(self, vs30, mag, rrup, imt, scale_fac):
"""
Compute and return mean
"""
C_HR, C_BC, C_SR, SC = self._extract_coeffs(imt)
rrup = self._clip_distances(rrup)
f0 = self._compute_f0_factor(rrup)
f1 = self._compute_f1_factor(rrup)
f2 = self._compute_f2_factor(rrup)
pga_bc = self._get_pga_bc(
f0, f1, f2, SC, mag, rrup, vs30, scale_fac
)
# compute mean values for hard-rock sites (vs30 >= 2000),
# and non-hard-rock sites (vs30 < 2000) and add soil amplification
# term
mean = np.zeros_like(vs30)
self._compute_mean(C_HR, f0, f1, f2, SC, mag, rrup,
vs30 >= 2000.0, mean, scale_fac)
self._compute_mean(C_BC, f0, f1, f2, SC, mag, rrup,
vs30 < 2000.0, mean, scale_fac)
self._compute_soil_amplification(C_SR, vs30, pga_bc, mean)
# convert from base 10 to base e
if imt == PGV():
mean = np.log(10 ** mean)
else:
# convert from cm/s**2 to g
mean = np.log((10 ** mean) * 1e-2 / g)
return mean | python | def _get_mean(self, vs30, mag, rrup, imt, scale_fac):
"""
Compute and return mean
"""
C_HR, C_BC, C_SR, SC = self._extract_coeffs(imt)
rrup = self._clip_distances(rrup)
f0 = self._compute_f0_factor(rrup)
f1 = self._compute_f1_factor(rrup)
f2 = self._compute_f2_factor(rrup)
pga_bc = self._get_pga_bc(
f0, f1, f2, SC, mag, rrup, vs30, scale_fac
)
# compute mean values for hard-rock sites (vs30 >= 2000),
# and non-hard-rock sites (vs30 < 2000) and add soil amplification
# term
mean = np.zeros_like(vs30)
self._compute_mean(C_HR, f0, f1, f2, SC, mag, rrup,
vs30 >= 2000.0, mean, scale_fac)
self._compute_mean(C_BC, f0, f1, f2, SC, mag, rrup,
vs30 < 2000.0, mean, scale_fac)
self._compute_soil_amplification(C_SR, vs30, pga_bc, mean)
# convert from base 10 to base e
if imt == PGV():
mean = np.log(10 ** mean)
else:
# convert from cm/s**2 to g
mean = np.log((10 ** mean) * 1e-2 / g)
return mean | [
"def",
"_get_mean",
"(",
"self",
",",
"vs30",
",",
"mag",
",",
"rrup",
",",
"imt",
",",
"scale_fac",
")",
":",
"C_HR",
",",
"C_BC",
",",
"C_SR",
",",
"SC",
"=",
"self",
".",
"_extract_coeffs",
"(",
"imt",
")",
"rrup",
"=",
"self",
".",
"_clip_distances",
"(",
"rrup",
")",
"f0",
"=",
"self",
".",
"_compute_f0_factor",
"(",
"rrup",
")",
"f1",
"=",
"self",
".",
"_compute_f1_factor",
"(",
"rrup",
")",
"f2",
"=",
"self",
".",
"_compute_f2_factor",
"(",
"rrup",
")",
"pga_bc",
"=",
"self",
".",
"_get_pga_bc",
"(",
"f0",
",",
"f1",
",",
"f2",
",",
"SC",
",",
"mag",
",",
"rrup",
",",
"vs30",
",",
"scale_fac",
")",
"# compute mean values for hard-rock sites (vs30 >= 2000),",
"# and non-hard-rock sites (vs30 < 2000) and add soil amplification",
"# term",
"mean",
"=",
"np",
".",
"zeros_like",
"(",
"vs30",
")",
"self",
".",
"_compute_mean",
"(",
"C_HR",
",",
"f0",
",",
"f1",
",",
"f2",
",",
"SC",
",",
"mag",
",",
"rrup",
",",
"vs30",
">=",
"2000.0",
",",
"mean",
",",
"scale_fac",
")",
"self",
".",
"_compute_mean",
"(",
"C_BC",
",",
"f0",
",",
"f1",
",",
"f2",
",",
"SC",
",",
"mag",
",",
"rrup",
",",
"vs30",
"<",
"2000.0",
",",
"mean",
",",
"scale_fac",
")",
"self",
".",
"_compute_soil_amplification",
"(",
"C_SR",
",",
"vs30",
",",
"pga_bc",
",",
"mean",
")",
"# convert from base 10 to base e",
"if",
"imt",
"==",
"PGV",
"(",
")",
":",
"mean",
"=",
"np",
".",
"log",
"(",
"10",
"**",
"mean",
")",
"else",
":",
"# convert from cm/s**2 to g",
"mean",
"=",
"np",
".",
"log",
"(",
"(",
"10",
"**",
"mean",
")",
"*",
"1e-2",
"/",
"g",
")",
"return",
"mean"
] | Compute and return mean | [
"Compute",
"and",
"return",
"mean"
] | 8294553a0b8aba33fd96437a35065d03547d0040 | https://github.com/gem/oq-engine/blob/8294553a0b8aba33fd96437a35065d03547d0040/openquake/hazardlib/gsim/atkinson_boore_2006.py#L109-L142 |
264 | gem/oq-engine | openquake/hazardlib/gsim/atkinson_boore_2006.py | AtkinsonBoore2006._get_pga_bc | def _get_pga_bc(self, f0, f1, f2, SC, mag, rrup, vs30, scale_fac):
"""
Compute and return PGA on BC boundary
"""
pga_bc = np.zeros_like(vs30)
self._compute_mean(self.COEFFS_BC[PGA()], f0, f1, f2, SC, mag,
rrup, vs30 < 2000.0, pga_bc, scale_fac)
return (10 ** pga_bc) * 1e-2 / g | python | def _get_pga_bc(self, f0, f1, f2, SC, mag, rrup, vs30, scale_fac):
"""
Compute and return PGA on BC boundary
"""
pga_bc = np.zeros_like(vs30)
self._compute_mean(self.COEFFS_BC[PGA()], f0, f1, f2, SC, mag,
rrup, vs30 < 2000.0, pga_bc, scale_fac)
return (10 ** pga_bc) * 1e-2 / g | [
"def",
"_get_pga_bc",
"(",
"self",
",",
"f0",
",",
"f1",
",",
"f2",
",",
"SC",
",",
"mag",
",",
"rrup",
",",
"vs30",
",",
"scale_fac",
")",
":",
"pga_bc",
"=",
"np",
".",
"zeros_like",
"(",
"vs30",
")",
"self",
".",
"_compute_mean",
"(",
"self",
".",
"COEFFS_BC",
"[",
"PGA",
"(",
")",
"]",
",",
"f0",
",",
"f1",
",",
"f2",
",",
"SC",
",",
"mag",
",",
"rrup",
",",
"vs30",
"<",
"2000.0",
",",
"pga_bc",
",",
"scale_fac",
")",
"return",
"(",
"10",
"**",
"pga_bc",
")",
"*",
"1e-2",
"/",
"g"
] | Compute and return PGA on BC boundary | [
"Compute",
"and",
"return",
"PGA",
"on",
"BC",
"boundary"
] | 8294553a0b8aba33fd96437a35065d03547d0040 | https://github.com/gem/oq-engine/blob/8294553a0b8aba33fd96437a35065d03547d0040/openquake/hazardlib/gsim/atkinson_boore_2006.py#L144-L152 |
265 | gem/oq-engine | openquake/hazardlib/gsim/atkinson_boore_2006.py | AtkinsonBoore2006._extract_coeffs | def _extract_coeffs(self, imt):
"""
Extract dictionaries of coefficients specific to required
intensity measure type.
"""
C_HR = self.COEFFS_HARD_ROCK[imt]
C_BC = self.COEFFS_BC[imt]
C_SR = self.COEFFS_SOIL_RESPONSE[imt]
SC = self.COEFFS_STRESS[imt]
return C_HR, C_BC, C_SR, SC | python | def _extract_coeffs(self, imt):
"""
Extract dictionaries of coefficients specific to required
intensity measure type.
"""
C_HR = self.COEFFS_HARD_ROCK[imt]
C_BC = self.COEFFS_BC[imt]
C_SR = self.COEFFS_SOIL_RESPONSE[imt]
SC = self.COEFFS_STRESS[imt]
return C_HR, C_BC, C_SR, SC | [
"def",
"_extract_coeffs",
"(",
"self",
",",
"imt",
")",
":",
"C_HR",
"=",
"self",
".",
"COEFFS_HARD_ROCK",
"[",
"imt",
"]",
"C_BC",
"=",
"self",
".",
"COEFFS_BC",
"[",
"imt",
"]",
"C_SR",
"=",
"self",
".",
"COEFFS_SOIL_RESPONSE",
"[",
"imt",
"]",
"SC",
"=",
"self",
".",
"COEFFS_STRESS",
"[",
"imt",
"]",
"return",
"C_HR",
",",
"C_BC",
",",
"C_SR",
",",
"SC"
] | Extract dictionaries of coefficients specific to required
intensity measure type. | [
"Extract",
"dictionaries",
"of",
"coefficients",
"specific",
"to",
"required",
"intensity",
"measure",
"type",
"."
] | 8294553a0b8aba33fd96437a35065d03547d0040 | https://github.com/gem/oq-engine/blob/8294553a0b8aba33fd96437a35065d03547d0040/openquake/hazardlib/gsim/atkinson_boore_2006.py#L154-L164 |
266 | gem/oq-engine | openquake/calculators/getters.py | PmapGetter.init | def init(self):
"""
Read the poes and set the .data attribute with the hazard curves
"""
if hasattr(self, 'data'): # already initialized
return
if isinstance(self.dstore, str):
self.dstore = hdf5.File(self.dstore, 'r')
else:
self.dstore.open('r') # if not
if self.sids is None:
self.sids = self.dstore['sitecol'].sids
oq = self.dstore['oqparam']
self.imtls = oq.imtls
self.poes = self.poes or oq.poes
self.data = {}
try:
hcurves = self.get_hcurves(self.imtls) # shape (R, N)
except IndexError: # no data
return
for sid, hcurve_by_rlz in zip(self.sids, hcurves.T):
self.data[sid] = datadict = {}
for rlzi, hcurve in enumerate(hcurve_by_rlz):
datadict[rlzi] = lst = [None for imt in self.imtls]
for imti, imt in enumerate(self.imtls):
lst[imti] = hcurve[imt] | python | def init(self):
"""
Read the poes and set the .data attribute with the hazard curves
"""
if hasattr(self, 'data'): # already initialized
return
if isinstance(self.dstore, str):
self.dstore = hdf5.File(self.dstore, 'r')
else:
self.dstore.open('r') # if not
if self.sids is None:
self.sids = self.dstore['sitecol'].sids
oq = self.dstore['oqparam']
self.imtls = oq.imtls
self.poes = self.poes or oq.poes
self.data = {}
try:
hcurves = self.get_hcurves(self.imtls) # shape (R, N)
except IndexError: # no data
return
for sid, hcurve_by_rlz in zip(self.sids, hcurves.T):
self.data[sid] = datadict = {}
for rlzi, hcurve in enumerate(hcurve_by_rlz):
datadict[rlzi] = lst = [None for imt in self.imtls]
for imti, imt in enumerate(self.imtls):
lst[imti] = hcurve[imt] | [
"def",
"init",
"(",
"self",
")",
":",
"if",
"hasattr",
"(",
"self",
",",
"'data'",
")",
":",
"# already initialized",
"return",
"if",
"isinstance",
"(",
"self",
".",
"dstore",
",",
"str",
")",
":",
"self",
".",
"dstore",
"=",
"hdf5",
".",
"File",
"(",
"self",
".",
"dstore",
",",
"'r'",
")",
"else",
":",
"self",
".",
"dstore",
".",
"open",
"(",
"'r'",
")",
"# if not",
"if",
"self",
".",
"sids",
"is",
"None",
":",
"self",
".",
"sids",
"=",
"self",
".",
"dstore",
"[",
"'sitecol'",
"]",
".",
"sids",
"oq",
"=",
"self",
".",
"dstore",
"[",
"'oqparam'",
"]",
"self",
".",
"imtls",
"=",
"oq",
".",
"imtls",
"self",
".",
"poes",
"=",
"self",
".",
"poes",
"or",
"oq",
".",
"poes",
"self",
".",
"data",
"=",
"{",
"}",
"try",
":",
"hcurves",
"=",
"self",
".",
"get_hcurves",
"(",
"self",
".",
"imtls",
")",
"# shape (R, N)",
"except",
"IndexError",
":",
"# no data",
"return",
"for",
"sid",
",",
"hcurve_by_rlz",
"in",
"zip",
"(",
"self",
".",
"sids",
",",
"hcurves",
".",
"T",
")",
":",
"self",
".",
"data",
"[",
"sid",
"]",
"=",
"datadict",
"=",
"{",
"}",
"for",
"rlzi",
",",
"hcurve",
"in",
"enumerate",
"(",
"hcurve_by_rlz",
")",
":",
"datadict",
"[",
"rlzi",
"]",
"=",
"lst",
"=",
"[",
"None",
"for",
"imt",
"in",
"self",
".",
"imtls",
"]",
"for",
"imti",
",",
"imt",
"in",
"enumerate",
"(",
"self",
".",
"imtls",
")",
":",
"lst",
"[",
"imti",
"]",
"=",
"hcurve",
"[",
"imt",
"]"
] | Read the poes and set the .data attribute with the hazard curves | [
"Read",
"the",
"poes",
"and",
"set",
"the",
".",
"data",
"attribute",
"with",
"the",
"hazard",
"curves"
] | 8294553a0b8aba33fd96437a35065d03547d0040 | https://github.com/gem/oq-engine/blob/8294553a0b8aba33fd96437a35065d03547d0040/openquake/calculators/getters.py#L66-L91 |
267 | gem/oq-engine | openquake/calculators/getters.py | PmapGetter.get_mean | def get_mean(self, grp=None):
"""
Compute the mean curve as a ProbabilityMap
:param grp:
if not None must be a string of the form "grp-XX"; in that case
returns the mean considering only the contribution for group XX
"""
self.init()
if len(self.weights) == 1: # one realization
# the standard deviation is zero
pmap = self.get(0, grp)
for sid, pcurve in pmap.items():
array = numpy.zeros(pcurve.array.shape[:-1] + (2,))
array[:, 0] = pcurve.array[:, 0]
pcurve.array = array
return pmap
else: # multiple realizations
dic = ({g: self.dstore['poes/' + g] for g in self.dstore['poes']}
if grp is None else {grp: self.dstore['poes/' + grp]})
pmaps = self.rlzs_assoc.combine_pmaps(dic)
return stats.compute_pmap_stats(
pmaps, [stats.mean_curve, stats.std_curve],
self.weights, self.imtls) | python | def get_mean(self, grp=None):
"""
Compute the mean curve as a ProbabilityMap
:param grp:
if not None must be a string of the form "grp-XX"; in that case
returns the mean considering only the contribution for group XX
"""
self.init()
if len(self.weights) == 1: # one realization
# the standard deviation is zero
pmap = self.get(0, grp)
for sid, pcurve in pmap.items():
array = numpy.zeros(pcurve.array.shape[:-1] + (2,))
array[:, 0] = pcurve.array[:, 0]
pcurve.array = array
return pmap
else: # multiple realizations
dic = ({g: self.dstore['poes/' + g] for g in self.dstore['poes']}
if grp is None else {grp: self.dstore['poes/' + grp]})
pmaps = self.rlzs_assoc.combine_pmaps(dic)
return stats.compute_pmap_stats(
pmaps, [stats.mean_curve, stats.std_curve],
self.weights, self.imtls) | [
"def",
"get_mean",
"(",
"self",
",",
"grp",
"=",
"None",
")",
":",
"self",
".",
"init",
"(",
")",
"if",
"len",
"(",
"self",
".",
"weights",
")",
"==",
"1",
":",
"# one realization",
"# the standard deviation is zero",
"pmap",
"=",
"self",
".",
"get",
"(",
"0",
",",
"grp",
")",
"for",
"sid",
",",
"pcurve",
"in",
"pmap",
".",
"items",
"(",
")",
":",
"array",
"=",
"numpy",
".",
"zeros",
"(",
"pcurve",
".",
"array",
".",
"shape",
"[",
":",
"-",
"1",
"]",
"+",
"(",
"2",
",",
")",
")",
"array",
"[",
":",
",",
"0",
"]",
"=",
"pcurve",
".",
"array",
"[",
":",
",",
"0",
"]",
"pcurve",
".",
"array",
"=",
"array",
"return",
"pmap",
"else",
":",
"# multiple realizations",
"dic",
"=",
"(",
"{",
"g",
":",
"self",
".",
"dstore",
"[",
"'poes/'",
"+",
"g",
"]",
"for",
"g",
"in",
"self",
".",
"dstore",
"[",
"'poes'",
"]",
"}",
"if",
"grp",
"is",
"None",
"else",
"{",
"grp",
":",
"self",
".",
"dstore",
"[",
"'poes/'",
"+",
"grp",
"]",
"}",
")",
"pmaps",
"=",
"self",
".",
"rlzs_assoc",
".",
"combine_pmaps",
"(",
"dic",
")",
"return",
"stats",
".",
"compute_pmap_stats",
"(",
"pmaps",
",",
"[",
"stats",
".",
"mean_curve",
",",
"stats",
".",
"std_curve",
"]",
",",
"self",
".",
"weights",
",",
"self",
".",
"imtls",
")"
] | Compute the mean curve as a ProbabilityMap
:param grp:
if not None must be a string of the form "grp-XX"; in that case
returns the mean considering only the contribution for group XX | [
"Compute",
"the",
"mean",
"curve",
"as",
"a",
"ProbabilityMap"
] | 8294553a0b8aba33fd96437a35065d03547d0040 | https://github.com/gem/oq-engine/blob/8294553a0b8aba33fd96437a35065d03547d0040/openquake/calculators/getters.py#L189-L212 |
268 | gem/oq-engine | openquake/calculators/getters.py | GmfGetter.init | def init(self):
"""
Initialize the computers. Should be called on the workers
"""
if hasattr(self, 'computers'): # init already called
return
with hdf5.File(self.rupgetter.filename, 'r') as parent:
self.weights = parent['weights'].value
self.computers = []
for ebr in self.rupgetter.get_ruptures(self.srcfilter):
sitecol = self.sitecol.filtered(ebr.sids)
try:
computer = calc.gmf.GmfComputer(
ebr, sitecol, self.oqparam.imtls, self.cmaker,
self.oqparam.truncation_level, self.correl_model)
except FarAwayRupture:
# due to numeric errors, ruptures within the maximum_distance
# when written, can be outside when read; I found a case with
# a distance of 99.9996936 km over a maximum distance of 100 km
continue
self.computers.append(computer) | python | def init(self):
"""
Initialize the computers. Should be called on the workers
"""
if hasattr(self, 'computers'): # init already called
return
with hdf5.File(self.rupgetter.filename, 'r') as parent:
self.weights = parent['weights'].value
self.computers = []
for ebr in self.rupgetter.get_ruptures(self.srcfilter):
sitecol = self.sitecol.filtered(ebr.sids)
try:
computer = calc.gmf.GmfComputer(
ebr, sitecol, self.oqparam.imtls, self.cmaker,
self.oqparam.truncation_level, self.correl_model)
except FarAwayRupture:
# due to numeric errors, ruptures within the maximum_distance
# when written, can be outside when read; I found a case with
# a distance of 99.9996936 km over a maximum distance of 100 km
continue
self.computers.append(computer) | [
"def",
"init",
"(",
"self",
")",
":",
"if",
"hasattr",
"(",
"self",
",",
"'computers'",
")",
":",
"# init already called",
"return",
"with",
"hdf5",
".",
"File",
"(",
"self",
".",
"rupgetter",
".",
"filename",
",",
"'r'",
")",
"as",
"parent",
":",
"self",
".",
"weights",
"=",
"parent",
"[",
"'weights'",
"]",
".",
"value",
"self",
".",
"computers",
"=",
"[",
"]",
"for",
"ebr",
"in",
"self",
".",
"rupgetter",
".",
"get_ruptures",
"(",
"self",
".",
"srcfilter",
")",
":",
"sitecol",
"=",
"self",
".",
"sitecol",
".",
"filtered",
"(",
"ebr",
".",
"sids",
")",
"try",
":",
"computer",
"=",
"calc",
".",
"gmf",
".",
"GmfComputer",
"(",
"ebr",
",",
"sitecol",
",",
"self",
".",
"oqparam",
".",
"imtls",
",",
"self",
".",
"cmaker",
",",
"self",
".",
"oqparam",
".",
"truncation_level",
",",
"self",
".",
"correl_model",
")",
"except",
"FarAwayRupture",
":",
"# due to numeric errors, ruptures within the maximum_distance",
"# when written, can be outside when read; I found a case with",
"# a distance of 99.9996936 km over a maximum distance of 100 km",
"continue",
"self",
".",
"computers",
".",
"append",
"(",
"computer",
")"
] | Initialize the computers. Should be called on the workers | [
"Initialize",
"the",
"computers",
".",
"Should",
"be",
"called",
"on",
"the",
"workers"
] | 8294553a0b8aba33fd96437a35065d03547d0040 | https://github.com/gem/oq-engine/blob/8294553a0b8aba33fd96437a35065d03547d0040/openquake/calculators/getters.py#L306-L326 |
269 | gem/oq-engine | openquake/hazardlib/gsim/skarlatoudis_2013.py | SkarlatoudisEtAlSSlab2013._compute_forearc_backarc_term | def _compute_forearc_backarc_term(self, C, sites, dists, rup):
"""
Compute back-arc term of Equation 3
"""
# flag 1 (R < 335 & R >= 205)
flag1 = np.zeros(len(dists.rhypo))
ind1 = np.logical_and((dists.rhypo < 335), (dists.rhypo >= 205))
flag1[ind1] = 1.0
# flag 2 (R >= 335)
flag2 = np.zeros(len(dists.rhypo))
ind2 = (dists.rhypo >= 335)
flag2[ind2] = 1.0
# flag 3 (R < 240 & R >= 140)
flag3 = np.zeros(len(dists.rhypo))
ind3 = np.logical_and((dists.rhypo < 240), (dists.rhypo >= 140))
flag3[ind3] = 1.0
# flag 4 (R >= 240)
flag4 = np.zeros(len(dists.rhypo))
ind4 = (dists.rhypo >= 240)
flag4[ind4] = 1.0
A = flag1 * ((205 - dists.rhypo)/150) + flag2
B = flag3 * ((140 - dists.rhypo)/100) + flag4
if (rup.hypo_depth < 80):
FHR = A
else:
FHR = B
H0 = 100
# Heaviside function
if (rup.hypo_depth >= H0):
H = 1
else:
H = 0
# ARC = 0 for back-arc - ARC = 1 for forearc
ARC = np.zeros(len(sites.backarc))
idxarc = (sites.backarc == 1)
ARC[idxarc] = 1.0
return ((C['c41'] * (1 - ARC) * H) + (C['c42'] * (1 - ARC) * H * FHR) +
(C['c51'] * ARC * H) + (C['c52'] * ARC * H * FHR)) | python | def _compute_forearc_backarc_term(self, C, sites, dists, rup):
"""
Compute back-arc term of Equation 3
"""
# flag 1 (R < 335 & R >= 205)
flag1 = np.zeros(len(dists.rhypo))
ind1 = np.logical_and((dists.rhypo < 335), (dists.rhypo >= 205))
flag1[ind1] = 1.0
# flag 2 (R >= 335)
flag2 = np.zeros(len(dists.rhypo))
ind2 = (dists.rhypo >= 335)
flag2[ind2] = 1.0
# flag 3 (R < 240 & R >= 140)
flag3 = np.zeros(len(dists.rhypo))
ind3 = np.logical_and((dists.rhypo < 240), (dists.rhypo >= 140))
flag3[ind3] = 1.0
# flag 4 (R >= 240)
flag4 = np.zeros(len(dists.rhypo))
ind4 = (dists.rhypo >= 240)
flag4[ind4] = 1.0
A = flag1 * ((205 - dists.rhypo)/150) + flag2
B = flag3 * ((140 - dists.rhypo)/100) + flag4
if (rup.hypo_depth < 80):
FHR = A
else:
FHR = B
H0 = 100
# Heaviside function
if (rup.hypo_depth >= H0):
H = 1
else:
H = 0
# ARC = 0 for back-arc - ARC = 1 for forearc
ARC = np.zeros(len(sites.backarc))
idxarc = (sites.backarc == 1)
ARC[idxarc] = 1.0
return ((C['c41'] * (1 - ARC) * H) + (C['c42'] * (1 - ARC) * H * FHR) +
(C['c51'] * ARC * H) + (C['c52'] * ARC * H * FHR)) | [
"def",
"_compute_forearc_backarc_term",
"(",
"self",
",",
"C",
",",
"sites",
",",
"dists",
",",
"rup",
")",
":",
"# flag 1 (R < 335 & R >= 205)",
"flag1",
"=",
"np",
".",
"zeros",
"(",
"len",
"(",
"dists",
".",
"rhypo",
")",
")",
"ind1",
"=",
"np",
".",
"logical_and",
"(",
"(",
"dists",
".",
"rhypo",
"<",
"335",
")",
",",
"(",
"dists",
".",
"rhypo",
">=",
"205",
")",
")",
"flag1",
"[",
"ind1",
"]",
"=",
"1.0",
"# flag 2 (R >= 335)",
"flag2",
"=",
"np",
".",
"zeros",
"(",
"len",
"(",
"dists",
".",
"rhypo",
")",
")",
"ind2",
"=",
"(",
"dists",
".",
"rhypo",
">=",
"335",
")",
"flag2",
"[",
"ind2",
"]",
"=",
"1.0",
"# flag 3 (R < 240 & R >= 140)",
"flag3",
"=",
"np",
".",
"zeros",
"(",
"len",
"(",
"dists",
".",
"rhypo",
")",
")",
"ind3",
"=",
"np",
".",
"logical_and",
"(",
"(",
"dists",
".",
"rhypo",
"<",
"240",
")",
",",
"(",
"dists",
".",
"rhypo",
">=",
"140",
")",
")",
"flag3",
"[",
"ind3",
"]",
"=",
"1.0",
"# flag 4 (R >= 240)",
"flag4",
"=",
"np",
".",
"zeros",
"(",
"len",
"(",
"dists",
".",
"rhypo",
")",
")",
"ind4",
"=",
"(",
"dists",
".",
"rhypo",
">=",
"240",
")",
"flag4",
"[",
"ind4",
"]",
"=",
"1.0",
"A",
"=",
"flag1",
"*",
"(",
"(",
"205",
"-",
"dists",
".",
"rhypo",
")",
"/",
"150",
")",
"+",
"flag2",
"B",
"=",
"flag3",
"*",
"(",
"(",
"140",
"-",
"dists",
".",
"rhypo",
")",
"/",
"100",
")",
"+",
"flag4",
"if",
"(",
"rup",
".",
"hypo_depth",
"<",
"80",
")",
":",
"FHR",
"=",
"A",
"else",
":",
"FHR",
"=",
"B",
"H0",
"=",
"100",
"# Heaviside function",
"if",
"(",
"rup",
".",
"hypo_depth",
">=",
"H0",
")",
":",
"H",
"=",
"1",
"else",
":",
"H",
"=",
"0",
"# ARC = 0 for back-arc - ARC = 1 for forearc",
"ARC",
"=",
"np",
".",
"zeros",
"(",
"len",
"(",
"sites",
".",
"backarc",
")",
")",
"idxarc",
"=",
"(",
"sites",
".",
"backarc",
"==",
"1",
")",
"ARC",
"[",
"idxarc",
"]",
"=",
"1.0",
"return",
"(",
"(",
"C",
"[",
"'c41'",
"]",
"*",
"(",
"1",
"-",
"ARC",
")",
"*",
"H",
")",
"+",
"(",
"C",
"[",
"'c42'",
"]",
"*",
"(",
"1",
"-",
"ARC",
")",
"*",
"H",
"*",
"FHR",
")",
"+",
"(",
"C",
"[",
"'c51'",
"]",
"*",
"ARC",
"*",
"H",
")",
"+",
"(",
"C",
"[",
"'c52'",
"]",
"*",
"ARC",
"*",
"H",
"*",
"FHR",
")",
")"
] | Compute back-arc term of Equation 3 | [
"Compute",
"back",
"-",
"arc",
"term",
"of",
"Equation",
"3"
] | 8294553a0b8aba33fd96437a35065d03547d0040 | https://github.com/gem/oq-engine/blob/8294553a0b8aba33fd96437a35065d03547d0040/openquake/hazardlib/gsim/skarlatoudis_2013.py#L177-L219 |
270 | gem/oq-engine | openquake/hazardlib/gsim/gmpe_table.py | AmplificationTable._build_data | def _build_data(self, amplification_group):
"""
Creates the numpy array tables from the hdf5 tables
"""
# Determine shape of the tables
n_levels = len(amplification_group)
# Checks the first group in the amplification group and returns the
# shape of the SA array - implicitly assumes the SA array in all
# amplification groups is the same shape
level = next(iter(amplification_group))
n_d, n_p, n_m = amplification_group[level]["IMLs/SA"].shape
assert n_d == len(self.distances), (n_d, len(self.distances))
assert n_m == len(self.magnitudes), (n_m, len(self.magnitudes))
# Instantiate the arrays with ones
self.mean = {"SA": numpy.ones([n_d, n_p, n_m, n_levels]),
"PGA": numpy.ones([n_d, 1, n_m, n_levels]),
"PGV": numpy.ones([n_d, 1, n_m, n_levels])}
self.sigma = {}
for stddev_type in [const.StdDev.TOTAL, const.StdDev.INTER_EVENT,
const.StdDev.INTRA_EVENT]:
level = next(iter(amplification_group))
if stddev_type in amplification_group[level]:
self.sigma[stddev_type] = deepcopy(self.mean)
for iloc, (level, amp_model) in enumerate(amplification_group.items()):
if "SA" in amp_model["IMLs"]:
if iloc == 0:
self.periods = amp_model["IMLs/T"][:]
else:
assert numpy.allclose(self.periods, amp_model["IMLs/T"][:])
for imt in ["SA", "PGA", "PGV"]:
if imt in amp_model["IMLs"]:
self.mean[imt][:, :, :, self.argidx[iloc]] = \
amp_model["IMLs/" + imt][:]
for stddev_type in self.sigma:
self.sigma[stddev_type][imt][
:, :, :, self.argidx[iloc]] = \
amp_model["/".join([stddev_type, imt])][:]
self.shape = (n_d, n_p, n_m, n_levels) | python | def _build_data(self, amplification_group):
"""
Creates the numpy array tables from the hdf5 tables
"""
# Determine shape of the tables
n_levels = len(amplification_group)
# Checks the first group in the amplification group and returns the
# shape of the SA array - implicitly assumes the SA array in all
# amplification groups is the same shape
level = next(iter(amplification_group))
n_d, n_p, n_m = amplification_group[level]["IMLs/SA"].shape
assert n_d == len(self.distances), (n_d, len(self.distances))
assert n_m == len(self.magnitudes), (n_m, len(self.magnitudes))
# Instantiate the arrays with ones
self.mean = {"SA": numpy.ones([n_d, n_p, n_m, n_levels]),
"PGA": numpy.ones([n_d, 1, n_m, n_levels]),
"PGV": numpy.ones([n_d, 1, n_m, n_levels])}
self.sigma = {}
for stddev_type in [const.StdDev.TOTAL, const.StdDev.INTER_EVENT,
const.StdDev.INTRA_EVENT]:
level = next(iter(amplification_group))
if stddev_type in amplification_group[level]:
self.sigma[stddev_type] = deepcopy(self.mean)
for iloc, (level, amp_model) in enumerate(amplification_group.items()):
if "SA" in amp_model["IMLs"]:
if iloc == 0:
self.periods = amp_model["IMLs/T"][:]
else:
assert numpy.allclose(self.periods, amp_model["IMLs/T"][:])
for imt in ["SA", "PGA", "PGV"]:
if imt in amp_model["IMLs"]:
self.mean[imt][:, :, :, self.argidx[iloc]] = \
amp_model["IMLs/" + imt][:]
for stddev_type in self.sigma:
self.sigma[stddev_type][imt][
:, :, :, self.argidx[iloc]] = \
amp_model["/".join([stddev_type, imt])][:]
self.shape = (n_d, n_p, n_m, n_levels) | [
"def",
"_build_data",
"(",
"self",
",",
"amplification_group",
")",
":",
"# Determine shape of the tables",
"n_levels",
"=",
"len",
"(",
"amplification_group",
")",
"# Checks the first group in the amplification group and returns the",
"# shape of the SA array - implicitly assumes the SA array in all",
"# amplification groups is the same shape",
"level",
"=",
"next",
"(",
"iter",
"(",
"amplification_group",
")",
")",
"n_d",
",",
"n_p",
",",
"n_m",
"=",
"amplification_group",
"[",
"level",
"]",
"[",
"\"IMLs/SA\"",
"]",
".",
"shape",
"assert",
"n_d",
"==",
"len",
"(",
"self",
".",
"distances",
")",
",",
"(",
"n_d",
",",
"len",
"(",
"self",
".",
"distances",
")",
")",
"assert",
"n_m",
"==",
"len",
"(",
"self",
".",
"magnitudes",
")",
",",
"(",
"n_m",
",",
"len",
"(",
"self",
".",
"magnitudes",
")",
")",
"# Instantiate the arrays with ones",
"self",
".",
"mean",
"=",
"{",
"\"SA\"",
":",
"numpy",
".",
"ones",
"(",
"[",
"n_d",
",",
"n_p",
",",
"n_m",
",",
"n_levels",
"]",
")",
",",
"\"PGA\"",
":",
"numpy",
".",
"ones",
"(",
"[",
"n_d",
",",
"1",
",",
"n_m",
",",
"n_levels",
"]",
")",
",",
"\"PGV\"",
":",
"numpy",
".",
"ones",
"(",
"[",
"n_d",
",",
"1",
",",
"n_m",
",",
"n_levels",
"]",
")",
"}",
"self",
".",
"sigma",
"=",
"{",
"}",
"for",
"stddev_type",
"in",
"[",
"const",
".",
"StdDev",
".",
"TOTAL",
",",
"const",
".",
"StdDev",
".",
"INTER_EVENT",
",",
"const",
".",
"StdDev",
".",
"INTRA_EVENT",
"]",
":",
"level",
"=",
"next",
"(",
"iter",
"(",
"amplification_group",
")",
")",
"if",
"stddev_type",
"in",
"amplification_group",
"[",
"level",
"]",
":",
"self",
".",
"sigma",
"[",
"stddev_type",
"]",
"=",
"deepcopy",
"(",
"self",
".",
"mean",
")",
"for",
"iloc",
",",
"(",
"level",
",",
"amp_model",
")",
"in",
"enumerate",
"(",
"amplification_group",
".",
"items",
"(",
")",
")",
":",
"if",
"\"SA\"",
"in",
"amp_model",
"[",
"\"IMLs\"",
"]",
":",
"if",
"iloc",
"==",
"0",
":",
"self",
".",
"periods",
"=",
"amp_model",
"[",
"\"IMLs/T\"",
"]",
"[",
":",
"]",
"else",
":",
"assert",
"numpy",
".",
"allclose",
"(",
"self",
".",
"periods",
",",
"amp_model",
"[",
"\"IMLs/T\"",
"]",
"[",
":",
"]",
")",
"for",
"imt",
"in",
"[",
"\"SA\"",
",",
"\"PGA\"",
",",
"\"PGV\"",
"]",
":",
"if",
"imt",
"in",
"amp_model",
"[",
"\"IMLs\"",
"]",
":",
"self",
".",
"mean",
"[",
"imt",
"]",
"[",
":",
",",
":",
",",
":",
",",
"self",
".",
"argidx",
"[",
"iloc",
"]",
"]",
"=",
"amp_model",
"[",
"\"IMLs/\"",
"+",
"imt",
"]",
"[",
":",
"]",
"for",
"stddev_type",
"in",
"self",
".",
"sigma",
":",
"self",
".",
"sigma",
"[",
"stddev_type",
"]",
"[",
"imt",
"]",
"[",
":",
",",
":",
",",
":",
",",
"self",
".",
"argidx",
"[",
"iloc",
"]",
"]",
"=",
"amp_model",
"[",
"\"/\"",
".",
"join",
"(",
"[",
"stddev_type",
",",
"imt",
"]",
")",
"]",
"[",
":",
"]",
"self",
".",
"shape",
"=",
"(",
"n_d",
",",
"n_p",
",",
"n_m",
",",
"n_levels",
")"
] | Creates the numpy array tables from the hdf5 tables | [
"Creates",
"the",
"numpy",
"array",
"tables",
"from",
"the",
"hdf5",
"tables"
] | 8294553a0b8aba33fd96437a35065d03547d0040 | https://github.com/gem/oq-engine/blob/8294553a0b8aba33fd96437a35065d03547d0040/openquake/hazardlib/gsim/gmpe_table.py#L112-L150 |
271 | gem/oq-engine | openquake/hazardlib/gsim/gmpe_table.py | AmplificationTable.get_amplification_factors | def get_amplification_factors(self, imt, sctx, rctx, dists, stddev_types):
"""
Returns the amplification factors for the given rupture and site
conditions.
:param imt:
Intensity measure type as an instance of the :class:
`openquake.hazardlib.imt`
:param sctx:
SiteCollection instance
:param rctx:
Rupture instance
:param dists:
Source to site distances (km)
:param stddev_types:
List of required standard deviation types
:returns:
* mean_amp - Amplification factors applied to the median ground
motion
* sigma_amps - List of modification factors applied to the
standard deviations of ground motion
"""
dist_level_table = self.get_mean_table(imt, rctx)
sigma_tables = self.get_sigma_tables(imt, rctx, stddev_types)
mean_interpolator = interp1d(self.values,
numpy.log10(dist_level_table),
axis=1)
sigma_interpolators = [interp1d(self.values, sigma_table, axis=1)
for sigma_table in sigma_tables]
if self.element == "Rupture":
mean_amp = 10.0 ** mean_interpolator(
getattr(rctx, self.parameter))[0] * numpy.ones_like(dists)
sigma_amps = []
for sig_interpolator in sigma_interpolators:
sigma_amps.append(sig_interpolator(
getattr(rctx, self.parameter))[0] * numpy.ones_like(dists))
else:
mean_amp = 10.0 ** mean_interpolator(
getattr(sctx, self.parameter))[0, :]
sigma_amps = []
for sig_interpolator in sigma_interpolators:
sigma_amps.append(sig_interpolator(
getattr(sctx, self.parameter))[0, :] *
numpy.ones_like(dists))
return mean_amp, sigma_amps | python | def get_amplification_factors(self, imt, sctx, rctx, dists, stddev_types):
"""
Returns the amplification factors for the given rupture and site
conditions.
:param imt:
Intensity measure type as an instance of the :class:
`openquake.hazardlib.imt`
:param sctx:
SiteCollection instance
:param rctx:
Rupture instance
:param dists:
Source to site distances (km)
:param stddev_types:
List of required standard deviation types
:returns:
* mean_amp - Amplification factors applied to the median ground
motion
* sigma_amps - List of modification factors applied to the
standard deviations of ground motion
"""
dist_level_table = self.get_mean_table(imt, rctx)
sigma_tables = self.get_sigma_tables(imt, rctx, stddev_types)
mean_interpolator = interp1d(self.values,
numpy.log10(dist_level_table),
axis=1)
sigma_interpolators = [interp1d(self.values, sigma_table, axis=1)
for sigma_table in sigma_tables]
if self.element == "Rupture":
mean_amp = 10.0 ** mean_interpolator(
getattr(rctx, self.parameter))[0] * numpy.ones_like(dists)
sigma_amps = []
for sig_interpolator in sigma_interpolators:
sigma_amps.append(sig_interpolator(
getattr(rctx, self.parameter))[0] * numpy.ones_like(dists))
else:
mean_amp = 10.0 ** mean_interpolator(
getattr(sctx, self.parameter))[0, :]
sigma_amps = []
for sig_interpolator in sigma_interpolators:
sigma_amps.append(sig_interpolator(
getattr(sctx, self.parameter))[0, :] *
numpy.ones_like(dists))
return mean_amp, sigma_amps | [
"def",
"get_amplification_factors",
"(",
"self",
",",
"imt",
",",
"sctx",
",",
"rctx",
",",
"dists",
",",
"stddev_types",
")",
":",
"dist_level_table",
"=",
"self",
".",
"get_mean_table",
"(",
"imt",
",",
"rctx",
")",
"sigma_tables",
"=",
"self",
".",
"get_sigma_tables",
"(",
"imt",
",",
"rctx",
",",
"stddev_types",
")",
"mean_interpolator",
"=",
"interp1d",
"(",
"self",
".",
"values",
",",
"numpy",
".",
"log10",
"(",
"dist_level_table",
")",
",",
"axis",
"=",
"1",
")",
"sigma_interpolators",
"=",
"[",
"interp1d",
"(",
"self",
".",
"values",
",",
"sigma_table",
",",
"axis",
"=",
"1",
")",
"for",
"sigma_table",
"in",
"sigma_tables",
"]",
"if",
"self",
".",
"element",
"==",
"\"Rupture\"",
":",
"mean_amp",
"=",
"10.0",
"**",
"mean_interpolator",
"(",
"getattr",
"(",
"rctx",
",",
"self",
".",
"parameter",
")",
")",
"[",
"0",
"]",
"*",
"numpy",
".",
"ones_like",
"(",
"dists",
")",
"sigma_amps",
"=",
"[",
"]",
"for",
"sig_interpolator",
"in",
"sigma_interpolators",
":",
"sigma_amps",
".",
"append",
"(",
"sig_interpolator",
"(",
"getattr",
"(",
"rctx",
",",
"self",
".",
"parameter",
")",
")",
"[",
"0",
"]",
"*",
"numpy",
".",
"ones_like",
"(",
"dists",
")",
")",
"else",
":",
"mean_amp",
"=",
"10.0",
"**",
"mean_interpolator",
"(",
"getattr",
"(",
"sctx",
",",
"self",
".",
"parameter",
")",
")",
"[",
"0",
",",
":",
"]",
"sigma_amps",
"=",
"[",
"]",
"for",
"sig_interpolator",
"in",
"sigma_interpolators",
":",
"sigma_amps",
".",
"append",
"(",
"sig_interpolator",
"(",
"getattr",
"(",
"sctx",
",",
"self",
".",
"parameter",
")",
")",
"[",
"0",
",",
":",
"]",
"*",
"numpy",
".",
"ones_like",
"(",
"dists",
")",
")",
"return",
"mean_amp",
",",
"sigma_amps"
] | Returns the amplification factors for the given rupture and site
conditions.
:param imt:
Intensity measure type as an instance of the :class:
`openquake.hazardlib.imt`
:param sctx:
SiteCollection instance
:param rctx:
Rupture instance
:param dists:
Source to site distances (km)
:param stddev_types:
List of required standard deviation types
:returns:
* mean_amp - Amplification factors applied to the median ground
motion
* sigma_amps - List of modification factors applied to the
standard deviations of ground motion | [
"Returns",
"the",
"amplification",
"factors",
"for",
"the",
"given",
"rupture",
"and",
"site",
"conditions",
"."
] | 8294553a0b8aba33fd96437a35065d03547d0040 | https://github.com/gem/oq-engine/blob/8294553a0b8aba33fd96437a35065d03547d0040/openquake/hazardlib/gsim/gmpe_table.py#L158-L202 |
272 | gem/oq-engine | openquake/hazardlib/gsim/gmpe_table.py | AmplificationTable.get_mean_table | def get_mean_table(self, imt, rctx):
"""
Returns amplification factors for the mean, given the rupture and
intensity measure type.
:returns:
amplification table as an array of [Number Distances,
Number Levels]
"""
# Levels by Distances
if imt.name in 'PGA PGV':
interpolator = interp1d(self.magnitudes,
numpy.log10(self.mean[imt.name]), axis=2)
output_table = 10.0 ** (
interpolator(rctx.mag).reshape(self.shape[0], self.shape[3]))
else:
# For spectral accelerations - need two step process
# Interpolate period - log-log space
interpolator = interp1d(numpy.log10(self.periods),
numpy.log10(self.mean["SA"]),
axis=1)
period_table = interpolator(numpy.log10(imt.period))
# Interpolate magnitude - linear-log space
mag_interpolator = interp1d(self.magnitudes, period_table, axis=1)
output_table = 10.0 ** mag_interpolator(rctx.mag)
return output_table | python | def get_mean_table(self, imt, rctx):
"""
Returns amplification factors for the mean, given the rupture and
intensity measure type.
:returns:
amplification table as an array of [Number Distances,
Number Levels]
"""
# Levels by Distances
if imt.name in 'PGA PGV':
interpolator = interp1d(self.magnitudes,
numpy.log10(self.mean[imt.name]), axis=2)
output_table = 10.0 ** (
interpolator(rctx.mag).reshape(self.shape[0], self.shape[3]))
else:
# For spectral accelerations - need two step process
# Interpolate period - log-log space
interpolator = interp1d(numpy.log10(self.periods),
numpy.log10(self.mean["SA"]),
axis=1)
period_table = interpolator(numpy.log10(imt.period))
# Interpolate magnitude - linear-log space
mag_interpolator = interp1d(self.magnitudes, period_table, axis=1)
output_table = 10.0 ** mag_interpolator(rctx.mag)
return output_table | [
"def",
"get_mean_table",
"(",
"self",
",",
"imt",
",",
"rctx",
")",
":",
"# Levels by Distances",
"if",
"imt",
".",
"name",
"in",
"'PGA PGV'",
":",
"interpolator",
"=",
"interp1d",
"(",
"self",
".",
"magnitudes",
",",
"numpy",
".",
"log10",
"(",
"self",
".",
"mean",
"[",
"imt",
".",
"name",
"]",
")",
",",
"axis",
"=",
"2",
")",
"output_table",
"=",
"10.0",
"**",
"(",
"interpolator",
"(",
"rctx",
".",
"mag",
")",
".",
"reshape",
"(",
"self",
".",
"shape",
"[",
"0",
"]",
",",
"self",
".",
"shape",
"[",
"3",
"]",
")",
")",
"else",
":",
"# For spectral accelerations - need two step process",
"# Interpolate period - log-log space",
"interpolator",
"=",
"interp1d",
"(",
"numpy",
".",
"log10",
"(",
"self",
".",
"periods",
")",
",",
"numpy",
".",
"log10",
"(",
"self",
".",
"mean",
"[",
"\"SA\"",
"]",
")",
",",
"axis",
"=",
"1",
")",
"period_table",
"=",
"interpolator",
"(",
"numpy",
".",
"log10",
"(",
"imt",
".",
"period",
")",
")",
"# Interpolate magnitude - linear-log space",
"mag_interpolator",
"=",
"interp1d",
"(",
"self",
".",
"magnitudes",
",",
"period_table",
",",
"axis",
"=",
"1",
")",
"output_table",
"=",
"10.0",
"**",
"mag_interpolator",
"(",
"rctx",
".",
"mag",
")",
"return",
"output_table"
] | Returns amplification factors for the mean, given the rupture and
intensity measure type.
:returns:
amplification table as an array of [Number Distances,
Number Levels] | [
"Returns",
"amplification",
"factors",
"for",
"the",
"mean",
"given",
"the",
"rupture",
"and",
"intensity",
"measure",
"type",
"."
] | 8294553a0b8aba33fd96437a35065d03547d0040 | https://github.com/gem/oq-engine/blob/8294553a0b8aba33fd96437a35065d03547d0040/openquake/hazardlib/gsim/gmpe_table.py#L204-L229 |
273 | gem/oq-engine | openquake/hazardlib/gsim/gmpe_table.py | AmplificationTable.get_sigma_tables | def get_sigma_tables(self, imt, rctx, stddev_types):
"""
Returns modification factors for the standard deviations, given the
rupture and intensity measure type.
:returns:
List of standard deviation modification tables, each as an array
of [Number Distances, Number Levels]
"""
output_tables = []
for stddev_type in stddev_types:
# For PGA and PGV only needs to apply magnitude interpolation
if imt.name in 'PGA PGV':
interpolator = interp1d(self.magnitudes,
self.sigma[stddev_type][imt.name],
axis=2)
output_tables.append(
interpolator(rctx.mag).reshape(self.shape[0],
self.shape[3]))
else:
# For spectral accelerations - need two step process
# Interpolate period
interpolator = interp1d(numpy.log10(self.periods),
self.sigma[stddev_type]["SA"],
axis=1)
period_table = interpolator(numpy.log10(imt.period))
mag_interpolator = interp1d(self.magnitudes,
period_table,
axis=1)
output_tables.append(mag_interpolator(rctx.mag))
return output_tables | python | def get_sigma_tables(self, imt, rctx, stddev_types):
"""
Returns modification factors for the standard deviations, given the
rupture and intensity measure type.
:returns:
List of standard deviation modification tables, each as an array
of [Number Distances, Number Levels]
"""
output_tables = []
for stddev_type in stddev_types:
# For PGA and PGV only needs to apply magnitude interpolation
if imt.name in 'PGA PGV':
interpolator = interp1d(self.magnitudes,
self.sigma[stddev_type][imt.name],
axis=2)
output_tables.append(
interpolator(rctx.mag).reshape(self.shape[0],
self.shape[3]))
else:
# For spectral accelerations - need two step process
# Interpolate period
interpolator = interp1d(numpy.log10(self.periods),
self.sigma[stddev_type]["SA"],
axis=1)
period_table = interpolator(numpy.log10(imt.period))
mag_interpolator = interp1d(self.magnitudes,
period_table,
axis=1)
output_tables.append(mag_interpolator(rctx.mag))
return output_tables | [
"def",
"get_sigma_tables",
"(",
"self",
",",
"imt",
",",
"rctx",
",",
"stddev_types",
")",
":",
"output_tables",
"=",
"[",
"]",
"for",
"stddev_type",
"in",
"stddev_types",
":",
"# For PGA and PGV only needs to apply magnitude interpolation",
"if",
"imt",
".",
"name",
"in",
"'PGA PGV'",
":",
"interpolator",
"=",
"interp1d",
"(",
"self",
".",
"magnitudes",
",",
"self",
".",
"sigma",
"[",
"stddev_type",
"]",
"[",
"imt",
".",
"name",
"]",
",",
"axis",
"=",
"2",
")",
"output_tables",
".",
"append",
"(",
"interpolator",
"(",
"rctx",
".",
"mag",
")",
".",
"reshape",
"(",
"self",
".",
"shape",
"[",
"0",
"]",
",",
"self",
".",
"shape",
"[",
"3",
"]",
")",
")",
"else",
":",
"# For spectral accelerations - need two step process",
"# Interpolate period",
"interpolator",
"=",
"interp1d",
"(",
"numpy",
".",
"log10",
"(",
"self",
".",
"periods",
")",
",",
"self",
".",
"sigma",
"[",
"stddev_type",
"]",
"[",
"\"SA\"",
"]",
",",
"axis",
"=",
"1",
")",
"period_table",
"=",
"interpolator",
"(",
"numpy",
".",
"log10",
"(",
"imt",
".",
"period",
")",
")",
"mag_interpolator",
"=",
"interp1d",
"(",
"self",
".",
"magnitudes",
",",
"period_table",
",",
"axis",
"=",
"1",
")",
"output_tables",
".",
"append",
"(",
"mag_interpolator",
"(",
"rctx",
".",
"mag",
")",
")",
"return",
"output_tables"
] | Returns modification factors for the standard deviations, given the
rupture and intensity measure type.
:returns:
List of standard deviation modification tables, each as an array
of [Number Distances, Number Levels] | [
"Returns",
"modification",
"factors",
"for",
"the",
"standard",
"deviations",
"given",
"the",
"rupture",
"and",
"intensity",
"measure",
"type",
"."
] | 8294553a0b8aba33fd96437a35065d03547d0040 | https://github.com/gem/oq-engine/blob/8294553a0b8aba33fd96437a35065d03547d0040/openquake/hazardlib/gsim/gmpe_table.py#L231-L263 |
274 | gem/oq-engine | openquake/hazardlib/gsim/gmpe_table.py | GMPETable.init | def init(self, fle=None):
"""
Executes the preprocessing steps at the instantiation stage to read in
the tables from hdf5 and hold them in memory.
"""
if fle is None:
fname = self.kwargs.get('gmpe_table', self.GMPE_TABLE)
if fname is None:
raise ValueError('You forgot to set GMPETable.GMPE_TABLE!')
elif os.path.isabs(fname):
self.GMPE_TABLE = fname
else:
# NB: (hackish) GMPE_DIR must be set externally
self.GMPE_TABLE = os.path.abspath(
os.path.join(self.GMPE_DIR, fname))
fle = h5py.File(self.GMPE_TABLE, "r")
try:
# this is the format inside the datastore
self.distance_type = fle["distance_type"].value
except KeyError:
# this is the original format outside the datastore
self.distance_type = decode(fle["Distances"].attrs["metric"])
self.REQUIRES_DISTANCES = set([self.distance_type])
# Load in magnitude
self.m_w = fle["Mw"][:]
# Load in distances
self.distances = fle["Distances"][:]
# Load intensity measure types and levels
self.imls = hdf_arrays_to_dict(fle["IMLs"])
self.DEFINED_FOR_INTENSITY_MEASURE_TYPES = set(self._supported_imts())
if "SA" in self.imls and "T" not in self.imls:
raise ValueError("Spectral Acceleration must be accompanied by "
"periods")
# Get the standard deviations
self._setup_standard_deviations(fle)
if "Amplification" in fle:
self._setup_amplification(fle) | python | def init(self, fle=None):
"""
Executes the preprocessing steps at the instantiation stage to read in
the tables from hdf5 and hold them in memory.
"""
if fle is None:
fname = self.kwargs.get('gmpe_table', self.GMPE_TABLE)
if fname is None:
raise ValueError('You forgot to set GMPETable.GMPE_TABLE!')
elif os.path.isabs(fname):
self.GMPE_TABLE = fname
else:
# NB: (hackish) GMPE_DIR must be set externally
self.GMPE_TABLE = os.path.abspath(
os.path.join(self.GMPE_DIR, fname))
fle = h5py.File(self.GMPE_TABLE, "r")
try:
# this is the format inside the datastore
self.distance_type = fle["distance_type"].value
except KeyError:
# this is the original format outside the datastore
self.distance_type = decode(fle["Distances"].attrs["metric"])
self.REQUIRES_DISTANCES = set([self.distance_type])
# Load in magnitude
self.m_w = fle["Mw"][:]
# Load in distances
self.distances = fle["Distances"][:]
# Load intensity measure types and levels
self.imls = hdf_arrays_to_dict(fle["IMLs"])
self.DEFINED_FOR_INTENSITY_MEASURE_TYPES = set(self._supported_imts())
if "SA" in self.imls and "T" not in self.imls:
raise ValueError("Spectral Acceleration must be accompanied by "
"periods")
# Get the standard deviations
self._setup_standard_deviations(fle)
if "Amplification" in fle:
self._setup_amplification(fle) | [
"def",
"init",
"(",
"self",
",",
"fle",
"=",
"None",
")",
":",
"if",
"fle",
"is",
"None",
":",
"fname",
"=",
"self",
".",
"kwargs",
".",
"get",
"(",
"'gmpe_table'",
",",
"self",
".",
"GMPE_TABLE",
")",
"if",
"fname",
"is",
"None",
":",
"raise",
"ValueError",
"(",
"'You forgot to set GMPETable.GMPE_TABLE!'",
")",
"elif",
"os",
".",
"path",
".",
"isabs",
"(",
"fname",
")",
":",
"self",
".",
"GMPE_TABLE",
"=",
"fname",
"else",
":",
"# NB: (hackish) GMPE_DIR must be set externally",
"self",
".",
"GMPE_TABLE",
"=",
"os",
".",
"path",
".",
"abspath",
"(",
"os",
".",
"path",
".",
"join",
"(",
"self",
".",
"GMPE_DIR",
",",
"fname",
")",
")",
"fle",
"=",
"h5py",
".",
"File",
"(",
"self",
".",
"GMPE_TABLE",
",",
"\"r\"",
")",
"try",
":",
"# this is the format inside the datastore",
"self",
".",
"distance_type",
"=",
"fle",
"[",
"\"distance_type\"",
"]",
".",
"value",
"except",
"KeyError",
":",
"# this is the original format outside the datastore",
"self",
".",
"distance_type",
"=",
"decode",
"(",
"fle",
"[",
"\"Distances\"",
"]",
".",
"attrs",
"[",
"\"metric\"",
"]",
")",
"self",
".",
"REQUIRES_DISTANCES",
"=",
"set",
"(",
"[",
"self",
".",
"distance_type",
"]",
")",
"# Load in magnitude",
"self",
".",
"m_w",
"=",
"fle",
"[",
"\"Mw\"",
"]",
"[",
":",
"]",
"# Load in distances",
"self",
".",
"distances",
"=",
"fle",
"[",
"\"Distances\"",
"]",
"[",
":",
"]",
"# Load intensity measure types and levels",
"self",
".",
"imls",
"=",
"hdf_arrays_to_dict",
"(",
"fle",
"[",
"\"IMLs\"",
"]",
")",
"self",
".",
"DEFINED_FOR_INTENSITY_MEASURE_TYPES",
"=",
"set",
"(",
"self",
".",
"_supported_imts",
"(",
")",
")",
"if",
"\"SA\"",
"in",
"self",
".",
"imls",
"and",
"\"T\"",
"not",
"in",
"self",
".",
"imls",
":",
"raise",
"ValueError",
"(",
"\"Spectral Acceleration must be accompanied by \"",
"\"periods\"",
")",
"# Get the standard deviations",
"self",
".",
"_setup_standard_deviations",
"(",
"fle",
")",
"if",
"\"Amplification\"",
"in",
"fle",
":",
"self",
".",
"_setup_amplification",
"(",
"fle",
")"
] | Executes the preprocessing steps at the instantiation stage to read in
the tables from hdf5 and hold them in memory. | [
"Executes",
"the",
"preprocessing",
"steps",
"at",
"the",
"instantiation",
"stage",
"to",
"read",
"in",
"the",
"tables",
"from",
"hdf5",
"and",
"hold",
"them",
"in",
"memory",
"."
] | 8294553a0b8aba33fd96437a35065d03547d0040 | https://github.com/gem/oq-engine/blob/8294553a0b8aba33fd96437a35065d03547d0040/openquake/hazardlib/gsim/gmpe_table.py#L306-L342 |
275 | gem/oq-engine | openquake/hazardlib/gsim/gmpe_table.py | GMPETable._setup_amplification | def _setup_amplification(self, fle):
"""
If amplification data is specified then reads into memory and updates
the required rupture and site parameters
"""
self.amplification = AmplificationTable(fle["Amplification"],
self.m_w,
self.distances)
if self.amplification.element == "Sites":
self.REQUIRES_SITES_PARAMETERS = set(
[self.amplification.parameter])
elif self.amplification.element == "Rupture":
# set the site and rupture parameters on the instance
self.REQUIRES_SITES_PARAMETERS = set()
self.REQUIRES_RUPTURE_PARAMETERS = (
self.REQUIRES_RUPTURE_PARAMETERS |
{self.amplification.parameter}) | python | def _setup_amplification(self, fle):
"""
If amplification data is specified then reads into memory and updates
the required rupture and site parameters
"""
self.amplification = AmplificationTable(fle["Amplification"],
self.m_w,
self.distances)
if self.amplification.element == "Sites":
self.REQUIRES_SITES_PARAMETERS = set(
[self.amplification.parameter])
elif self.amplification.element == "Rupture":
# set the site and rupture parameters on the instance
self.REQUIRES_SITES_PARAMETERS = set()
self.REQUIRES_RUPTURE_PARAMETERS = (
self.REQUIRES_RUPTURE_PARAMETERS |
{self.amplification.parameter}) | [
"def",
"_setup_amplification",
"(",
"self",
",",
"fle",
")",
":",
"self",
".",
"amplification",
"=",
"AmplificationTable",
"(",
"fle",
"[",
"\"Amplification\"",
"]",
",",
"self",
".",
"m_w",
",",
"self",
".",
"distances",
")",
"if",
"self",
".",
"amplification",
".",
"element",
"==",
"\"Sites\"",
":",
"self",
".",
"REQUIRES_SITES_PARAMETERS",
"=",
"set",
"(",
"[",
"self",
".",
"amplification",
".",
"parameter",
"]",
")",
"elif",
"self",
".",
"amplification",
".",
"element",
"==",
"\"Rupture\"",
":",
"# set the site and rupture parameters on the instance",
"self",
".",
"REQUIRES_SITES_PARAMETERS",
"=",
"set",
"(",
")",
"self",
".",
"REQUIRES_RUPTURE_PARAMETERS",
"=",
"(",
"self",
".",
"REQUIRES_RUPTURE_PARAMETERS",
"|",
"{",
"self",
".",
"amplification",
".",
"parameter",
"}",
")"
] | If amplification data is specified then reads into memory and updates
the required rupture and site parameters | [
"If",
"amplification",
"data",
"is",
"specified",
"then",
"reads",
"into",
"memory",
"and",
"updates",
"the",
"required",
"rupture",
"and",
"site",
"parameters"
] | 8294553a0b8aba33fd96437a35065d03547d0040 | https://github.com/gem/oq-engine/blob/8294553a0b8aba33fd96437a35065d03547d0040/openquake/hazardlib/gsim/gmpe_table.py#L364-L380 |
276 | gem/oq-engine | openquake/hazardlib/gsim/gmpe_table.py | GMPETable._supported_imts | def _supported_imts(self):
"""
Updates the list of supported IMTs from the tables
"""
imt_list = []
for key in self.imls:
if "SA" in key:
imt_list.append(imt_module.SA)
elif key == "T":
continue
else:
try:
factory = getattr(imt_module, key)
except Exception:
continue
imt_list.append(factory)
return imt_list | python | def _supported_imts(self):
"""
Updates the list of supported IMTs from the tables
"""
imt_list = []
for key in self.imls:
if "SA" in key:
imt_list.append(imt_module.SA)
elif key == "T":
continue
else:
try:
factory = getattr(imt_module, key)
except Exception:
continue
imt_list.append(factory)
return imt_list | [
"def",
"_supported_imts",
"(",
"self",
")",
":",
"imt_list",
"=",
"[",
"]",
"for",
"key",
"in",
"self",
".",
"imls",
":",
"if",
"\"SA\"",
"in",
"key",
":",
"imt_list",
".",
"append",
"(",
"imt_module",
".",
"SA",
")",
"elif",
"key",
"==",
"\"T\"",
":",
"continue",
"else",
":",
"try",
":",
"factory",
"=",
"getattr",
"(",
"imt_module",
",",
"key",
")",
"except",
"Exception",
":",
"continue",
"imt_list",
".",
"append",
"(",
"factory",
")",
"return",
"imt_list"
] | Updates the list of supported IMTs from the tables | [
"Updates",
"the",
"list",
"of",
"supported",
"IMTs",
"from",
"the",
"tables"
] | 8294553a0b8aba33fd96437a35065d03547d0040 | https://github.com/gem/oq-engine/blob/8294553a0b8aba33fd96437a35065d03547d0040/openquake/hazardlib/gsim/gmpe_table.py#L382-L398 |
277 | gem/oq-engine | openquake/hazardlib/gsim/gmpe_table.py | GMPETable.get_mean_and_stddevs | def get_mean_and_stddevs(self, sctx, rctx, dctx, imt, stddev_types):
"""
Returns the mean and standard deviations
"""
# Return Distance Tables
imls = self._return_tables(rctx.mag, imt, "IMLs")
# Get distance vector for the given magnitude
idx = numpy.searchsorted(self.m_w, rctx.mag)
dists = self.distances[:, 0, idx - 1]
# Get mean and standard deviations
mean = self._get_mean(imls, dctx, dists)
stddevs = self._get_stddevs(dists, rctx.mag, dctx, imt, stddev_types)
if self.amplification:
# Apply amplification
mean_amp, sigma_amp = self.amplification.get_amplification_factors(
imt,
sctx,
rctx,
getattr(dctx, self.distance_type),
stddev_types)
mean = numpy.log(mean) + numpy.log(mean_amp)
for iloc in range(len(stddev_types)):
stddevs[iloc] *= sigma_amp[iloc]
return mean, stddevs
else:
return numpy.log(mean), stddevs | python | def get_mean_and_stddevs(self, sctx, rctx, dctx, imt, stddev_types):
"""
Returns the mean and standard deviations
"""
# Return Distance Tables
imls = self._return_tables(rctx.mag, imt, "IMLs")
# Get distance vector for the given magnitude
idx = numpy.searchsorted(self.m_w, rctx.mag)
dists = self.distances[:, 0, idx - 1]
# Get mean and standard deviations
mean = self._get_mean(imls, dctx, dists)
stddevs = self._get_stddevs(dists, rctx.mag, dctx, imt, stddev_types)
if self.amplification:
# Apply amplification
mean_amp, sigma_amp = self.amplification.get_amplification_factors(
imt,
sctx,
rctx,
getattr(dctx, self.distance_type),
stddev_types)
mean = numpy.log(mean) + numpy.log(mean_amp)
for iloc in range(len(stddev_types)):
stddevs[iloc] *= sigma_amp[iloc]
return mean, stddevs
else:
return numpy.log(mean), stddevs | [
"def",
"get_mean_and_stddevs",
"(",
"self",
",",
"sctx",
",",
"rctx",
",",
"dctx",
",",
"imt",
",",
"stddev_types",
")",
":",
"# Return Distance Tables",
"imls",
"=",
"self",
".",
"_return_tables",
"(",
"rctx",
".",
"mag",
",",
"imt",
",",
"\"IMLs\"",
")",
"# Get distance vector for the given magnitude",
"idx",
"=",
"numpy",
".",
"searchsorted",
"(",
"self",
".",
"m_w",
",",
"rctx",
".",
"mag",
")",
"dists",
"=",
"self",
".",
"distances",
"[",
":",
",",
"0",
",",
"idx",
"-",
"1",
"]",
"# Get mean and standard deviations",
"mean",
"=",
"self",
".",
"_get_mean",
"(",
"imls",
",",
"dctx",
",",
"dists",
")",
"stddevs",
"=",
"self",
".",
"_get_stddevs",
"(",
"dists",
",",
"rctx",
".",
"mag",
",",
"dctx",
",",
"imt",
",",
"stddev_types",
")",
"if",
"self",
".",
"amplification",
":",
"# Apply amplification",
"mean_amp",
",",
"sigma_amp",
"=",
"self",
".",
"amplification",
".",
"get_amplification_factors",
"(",
"imt",
",",
"sctx",
",",
"rctx",
",",
"getattr",
"(",
"dctx",
",",
"self",
".",
"distance_type",
")",
",",
"stddev_types",
")",
"mean",
"=",
"numpy",
".",
"log",
"(",
"mean",
")",
"+",
"numpy",
".",
"log",
"(",
"mean_amp",
")",
"for",
"iloc",
"in",
"range",
"(",
"len",
"(",
"stddev_types",
")",
")",
":",
"stddevs",
"[",
"iloc",
"]",
"*=",
"sigma_amp",
"[",
"iloc",
"]",
"return",
"mean",
",",
"stddevs",
"else",
":",
"return",
"numpy",
".",
"log",
"(",
"mean",
")",
",",
"stddevs"
] | Returns the mean and standard deviations | [
"Returns",
"the",
"mean",
"and",
"standard",
"deviations"
] | 8294553a0b8aba33fd96437a35065d03547d0040 | https://github.com/gem/oq-engine/blob/8294553a0b8aba33fd96437a35065d03547d0040/openquake/hazardlib/gsim/gmpe_table.py#L400-L425 |
278 | gem/oq-engine | openquake/hazardlib/gsim/gmpe_table.py | GMPETable._get_stddevs | def _get_stddevs(self, dists, mag, dctx, imt, stddev_types):
"""
Returns the total standard deviation of the intensity measure level
from the tables.
:param fle:
HDF5 data stream as instance of :class:`h5py.File`
:param distances:
The distance vector for the given magnitude and IMT
:param key:
The distance type
:param mag:
The rupture magnitude
"""
stddevs = []
for stddev_type in stddev_types:
if stddev_type not in self.DEFINED_FOR_STANDARD_DEVIATION_TYPES:
raise ValueError("Standard Deviation type %s not supported"
% stddev_type)
sigma = self._return_tables(mag, imt, stddev_type)
interpolator_std = interp1d(dists, sigma,
bounds_error=False)
stddev = interpolator_std(getattr(dctx, self.distance_type))
stddev[getattr(dctx, self.distance_type) < dists[0]] = sigma[0]
stddev[getattr(dctx, self.distance_type) > dists[-1]] = sigma[-1]
stddevs.append(stddev)
return stddevs | python | def _get_stddevs(self, dists, mag, dctx, imt, stddev_types):
"""
Returns the total standard deviation of the intensity measure level
from the tables.
:param fle:
HDF5 data stream as instance of :class:`h5py.File`
:param distances:
The distance vector for the given magnitude and IMT
:param key:
The distance type
:param mag:
The rupture magnitude
"""
stddevs = []
for stddev_type in stddev_types:
if stddev_type not in self.DEFINED_FOR_STANDARD_DEVIATION_TYPES:
raise ValueError("Standard Deviation type %s not supported"
% stddev_type)
sigma = self._return_tables(mag, imt, stddev_type)
interpolator_std = interp1d(dists, sigma,
bounds_error=False)
stddev = interpolator_std(getattr(dctx, self.distance_type))
stddev[getattr(dctx, self.distance_type) < dists[0]] = sigma[0]
stddev[getattr(dctx, self.distance_type) > dists[-1]] = sigma[-1]
stddevs.append(stddev)
return stddevs | [
"def",
"_get_stddevs",
"(",
"self",
",",
"dists",
",",
"mag",
",",
"dctx",
",",
"imt",
",",
"stddev_types",
")",
":",
"stddevs",
"=",
"[",
"]",
"for",
"stddev_type",
"in",
"stddev_types",
":",
"if",
"stddev_type",
"not",
"in",
"self",
".",
"DEFINED_FOR_STANDARD_DEVIATION_TYPES",
":",
"raise",
"ValueError",
"(",
"\"Standard Deviation type %s not supported\"",
"%",
"stddev_type",
")",
"sigma",
"=",
"self",
".",
"_return_tables",
"(",
"mag",
",",
"imt",
",",
"stddev_type",
")",
"interpolator_std",
"=",
"interp1d",
"(",
"dists",
",",
"sigma",
",",
"bounds_error",
"=",
"False",
")",
"stddev",
"=",
"interpolator_std",
"(",
"getattr",
"(",
"dctx",
",",
"self",
".",
"distance_type",
")",
")",
"stddev",
"[",
"getattr",
"(",
"dctx",
",",
"self",
".",
"distance_type",
")",
"<",
"dists",
"[",
"0",
"]",
"]",
"=",
"sigma",
"[",
"0",
"]",
"stddev",
"[",
"getattr",
"(",
"dctx",
",",
"self",
".",
"distance_type",
")",
">",
"dists",
"[",
"-",
"1",
"]",
"]",
"=",
"sigma",
"[",
"-",
"1",
"]",
"stddevs",
".",
"append",
"(",
"stddev",
")",
"return",
"stddevs"
] | Returns the total standard deviation of the intensity measure level
from the tables.
:param fle:
HDF5 data stream as instance of :class:`h5py.File`
:param distances:
The distance vector for the given magnitude and IMT
:param key:
The distance type
:param mag:
The rupture magnitude | [
"Returns",
"the",
"total",
"standard",
"deviation",
"of",
"the",
"intensity",
"measure",
"level",
"from",
"the",
"tables",
"."
] | 8294553a0b8aba33fd96437a35065d03547d0040 | https://github.com/gem/oq-engine/blob/8294553a0b8aba33fd96437a35065d03547d0040/openquake/hazardlib/gsim/gmpe_table.py#L454-L480 |
279 | gem/oq-engine | openquake/hazardlib/gsim/gmpe_table.py | GMPETable._return_tables | def _return_tables(self, mag, imt, val_type):
"""
Returns the vector of ground motions or standard deviations
corresponding to the specific magnitude and intensity measure type.
:param val_type:
String indicating the type of data {"IMLs", "Total", "Inter" etc}
"""
if imt.name in 'PGA PGV':
# Get scalar imt
if val_type == "IMLs":
iml_table = self.imls[imt.name][:]
else:
iml_table = self.stddevs[val_type][imt.name][:]
n_d, n_s, n_m = iml_table.shape
iml_table = iml_table.reshape([n_d, n_m])
else:
if val_type == "IMLs":
periods = self.imls["T"][:]
iml_table = self.imls["SA"][:]
else:
periods = self.stddevs[val_type]["T"][:]
iml_table = self.stddevs[val_type]["SA"][:]
low_period = round(periods[0], 7)
high_period = round(periods[-1], 7)
if (round(imt.period, 7) < low_period) or (
round(imt.period, 7) > high_period):
raise ValueError("Spectral period %.3f outside of valid range "
"(%.3f to %.3f)" % (imt.period, periods[0],
periods[-1]))
# Apply log-log interpolation for spectral period
interpolator = interp1d(numpy.log10(periods),
numpy.log10(iml_table),
axis=1)
iml_table = 10. ** interpolator(numpy.log10(imt.period))
return self.apply_magnitude_interpolation(mag, iml_table) | python | def _return_tables(self, mag, imt, val_type):
"""
Returns the vector of ground motions or standard deviations
corresponding to the specific magnitude and intensity measure type.
:param val_type:
String indicating the type of data {"IMLs", "Total", "Inter" etc}
"""
if imt.name in 'PGA PGV':
# Get scalar imt
if val_type == "IMLs":
iml_table = self.imls[imt.name][:]
else:
iml_table = self.stddevs[val_type][imt.name][:]
n_d, n_s, n_m = iml_table.shape
iml_table = iml_table.reshape([n_d, n_m])
else:
if val_type == "IMLs":
periods = self.imls["T"][:]
iml_table = self.imls["SA"][:]
else:
periods = self.stddevs[val_type]["T"][:]
iml_table = self.stddevs[val_type]["SA"][:]
low_period = round(periods[0], 7)
high_period = round(periods[-1], 7)
if (round(imt.period, 7) < low_period) or (
round(imt.period, 7) > high_period):
raise ValueError("Spectral period %.3f outside of valid range "
"(%.3f to %.3f)" % (imt.period, periods[0],
periods[-1]))
# Apply log-log interpolation for spectral period
interpolator = interp1d(numpy.log10(periods),
numpy.log10(iml_table),
axis=1)
iml_table = 10. ** interpolator(numpy.log10(imt.period))
return self.apply_magnitude_interpolation(mag, iml_table) | [
"def",
"_return_tables",
"(",
"self",
",",
"mag",
",",
"imt",
",",
"val_type",
")",
":",
"if",
"imt",
".",
"name",
"in",
"'PGA PGV'",
":",
"# Get scalar imt",
"if",
"val_type",
"==",
"\"IMLs\"",
":",
"iml_table",
"=",
"self",
".",
"imls",
"[",
"imt",
".",
"name",
"]",
"[",
":",
"]",
"else",
":",
"iml_table",
"=",
"self",
".",
"stddevs",
"[",
"val_type",
"]",
"[",
"imt",
".",
"name",
"]",
"[",
":",
"]",
"n_d",
",",
"n_s",
",",
"n_m",
"=",
"iml_table",
".",
"shape",
"iml_table",
"=",
"iml_table",
".",
"reshape",
"(",
"[",
"n_d",
",",
"n_m",
"]",
")",
"else",
":",
"if",
"val_type",
"==",
"\"IMLs\"",
":",
"periods",
"=",
"self",
".",
"imls",
"[",
"\"T\"",
"]",
"[",
":",
"]",
"iml_table",
"=",
"self",
".",
"imls",
"[",
"\"SA\"",
"]",
"[",
":",
"]",
"else",
":",
"periods",
"=",
"self",
".",
"stddevs",
"[",
"val_type",
"]",
"[",
"\"T\"",
"]",
"[",
":",
"]",
"iml_table",
"=",
"self",
".",
"stddevs",
"[",
"val_type",
"]",
"[",
"\"SA\"",
"]",
"[",
":",
"]",
"low_period",
"=",
"round",
"(",
"periods",
"[",
"0",
"]",
",",
"7",
")",
"high_period",
"=",
"round",
"(",
"periods",
"[",
"-",
"1",
"]",
",",
"7",
")",
"if",
"(",
"round",
"(",
"imt",
".",
"period",
",",
"7",
")",
"<",
"low_period",
")",
"or",
"(",
"round",
"(",
"imt",
".",
"period",
",",
"7",
")",
">",
"high_period",
")",
":",
"raise",
"ValueError",
"(",
"\"Spectral period %.3f outside of valid range \"",
"\"(%.3f to %.3f)\"",
"%",
"(",
"imt",
".",
"period",
",",
"periods",
"[",
"0",
"]",
",",
"periods",
"[",
"-",
"1",
"]",
")",
")",
"# Apply log-log interpolation for spectral period",
"interpolator",
"=",
"interp1d",
"(",
"numpy",
".",
"log10",
"(",
"periods",
")",
",",
"numpy",
".",
"log10",
"(",
"iml_table",
")",
",",
"axis",
"=",
"1",
")",
"iml_table",
"=",
"10.",
"**",
"interpolator",
"(",
"numpy",
".",
"log10",
"(",
"imt",
".",
"period",
")",
")",
"return",
"self",
".",
"apply_magnitude_interpolation",
"(",
"mag",
",",
"iml_table",
")"
] | Returns the vector of ground motions or standard deviations
corresponding to the specific magnitude and intensity measure type.
:param val_type:
String indicating the type of data {"IMLs", "Total", "Inter" etc} | [
"Returns",
"the",
"vector",
"of",
"ground",
"motions",
"or",
"standard",
"deviations",
"corresponding",
"to",
"the",
"specific",
"magnitude",
"and",
"intensity",
"measure",
"type",
"."
] | 8294553a0b8aba33fd96437a35065d03547d0040 | https://github.com/gem/oq-engine/blob/8294553a0b8aba33fd96437a35065d03547d0040/openquake/hazardlib/gsim/gmpe_table.py#L482-L518 |
280 | gem/oq-engine | openquake/hazardlib/gsim/gmpe_table.py | GMPETable.apply_magnitude_interpolation | def apply_magnitude_interpolation(self, mag, iml_table):
"""
Interpolates the tables to the required magnitude level
:param float mag:
Magnitude
:param iml_table:
Intensity measure level table
"""
# do not allow "mag" to exceed maximum table magnitude
if mag > self.m_w[-1]:
mag = self.m_w[-1]
# Get magnitude values
if mag < self.m_w[0] or mag > self.m_w[-1]:
raise ValueError("Magnitude %.2f outside of supported range "
"(%.2f to %.2f)" % (mag,
self.m_w[0],
self.m_w[-1]))
# It is assumed that log10 of the spectral acceleration scales
# linearly (or approximately linearly) with magnitude
m_interpolator = interp1d(self.m_w, numpy.log10(iml_table), axis=1)
return 10.0 ** m_interpolator(mag) | python | def apply_magnitude_interpolation(self, mag, iml_table):
"""
Interpolates the tables to the required magnitude level
:param float mag:
Magnitude
:param iml_table:
Intensity measure level table
"""
# do not allow "mag" to exceed maximum table magnitude
if mag > self.m_w[-1]:
mag = self.m_w[-1]
# Get magnitude values
if mag < self.m_w[0] or mag > self.m_w[-1]:
raise ValueError("Magnitude %.2f outside of supported range "
"(%.2f to %.2f)" % (mag,
self.m_w[0],
self.m_w[-1]))
# It is assumed that log10 of the spectral acceleration scales
# linearly (or approximately linearly) with magnitude
m_interpolator = interp1d(self.m_w, numpy.log10(iml_table), axis=1)
return 10.0 ** m_interpolator(mag) | [
"def",
"apply_magnitude_interpolation",
"(",
"self",
",",
"mag",
",",
"iml_table",
")",
":",
"# do not allow \"mag\" to exceed maximum table magnitude",
"if",
"mag",
">",
"self",
".",
"m_w",
"[",
"-",
"1",
"]",
":",
"mag",
"=",
"self",
".",
"m_w",
"[",
"-",
"1",
"]",
"# Get magnitude values",
"if",
"mag",
"<",
"self",
".",
"m_w",
"[",
"0",
"]",
"or",
"mag",
">",
"self",
".",
"m_w",
"[",
"-",
"1",
"]",
":",
"raise",
"ValueError",
"(",
"\"Magnitude %.2f outside of supported range \"",
"\"(%.2f to %.2f)\"",
"%",
"(",
"mag",
",",
"self",
".",
"m_w",
"[",
"0",
"]",
",",
"self",
".",
"m_w",
"[",
"-",
"1",
"]",
")",
")",
"# It is assumed that log10 of the spectral acceleration scales",
"# linearly (or approximately linearly) with magnitude",
"m_interpolator",
"=",
"interp1d",
"(",
"self",
".",
"m_w",
",",
"numpy",
".",
"log10",
"(",
"iml_table",
")",
",",
"axis",
"=",
"1",
")",
"return",
"10.0",
"**",
"m_interpolator",
"(",
"mag",
")"
] | Interpolates the tables to the required magnitude level
:param float mag:
Magnitude
:param iml_table:
Intensity measure level table | [
"Interpolates",
"the",
"tables",
"to",
"the",
"required",
"magnitude",
"level"
] | 8294553a0b8aba33fd96437a35065d03547d0040 | https://github.com/gem/oq-engine/blob/8294553a0b8aba33fd96437a35065d03547d0040/openquake/hazardlib/gsim/gmpe_table.py#L520-L542 |
281 | gem/oq-engine | openquake/hazardlib/gsim/sadigh_1997.py | SadighEtAl1997._get_mean_deep_soil | def _get_mean_deep_soil(self, mag, rake, rrup, is_reverse, imt):
"""
Calculate and return the mean intensity for deep soil sites.
Implements an equation from table 4.
"""
if mag <= self.NEAR_FIELD_SATURATION_MAG:
c4 = self.COEFFS_SOIL_IMT_INDEPENDENT['c4lowmag']
c5 = self.COEFFS_SOIL_IMT_INDEPENDENT['c5lowmag']
else:
c4 = self.COEFFS_SOIL_IMT_INDEPENDENT['c4himag']
c5 = self.COEFFS_SOIL_IMT_INDEPENDENT['c5himag']
c2 = self.COEFFS_SOIL_IMT_INDEPENDENT['c2']
c3 = self.COEFFS_SOIL_IMT_INDEPENDENT['c3']
C = self.COEFFS_SOIL[imt]
if is_reverse:
c1 = self.COEFFS_SOIL_IMT_INDEPENDENT['c1r']
c6 = C['c6r']
else:
c1 = self.COEFFS_SOIL_IMT_INDEPENDENT['c1ss']
c6 = C['c6ss']
# clip mag if greater than 8.5. This is to avoid
# ValueError: negative number cannot be raised to a fractional power
mag = 8.5 if mag > 8.5 else mag
return (c1 + c2 * mag + c6 + C['c7'] * ((8.5 - mag) ** 2.5)
- c3 * numpy.log(rrup + c4 * numpy.exp(c5 * mag))) | python | def _get_mean_deep_soil(self, mag, rake, rrup, is_reverse, imt):
"""
Calculate and return the mean intensity for deep soil sites.
Implements an equation from table 4.
"""
if mag <= self.NEAR_FIELD_SATURATION_MAG:
c4 = self.COEFFS_SOIL_IMT_INDEPENDENT['c4lowmag']
c5 = self.COEFFS_SOIL_IMT_INDEPENDENT['c5lowmag']
else:
c4 = self.COEFFS_SOIL_IMT_INDEPENDENT['c4himag']
c5 = self.COEFFS_SOIL_IMT_INDEPENDENT['c5himag']
c2 = self.COEFFS_SOIL_IMT_INDEPENDENT['c2']
c3 = self.COEFFS_SOIL_IMT_INDEPENDENT['c3']
C = self.COEFFS_SOIL[imt]
if is_reverse:
c1 = self.COEFFS_SOIL_IMT_INDEPENDENT['c1r']
c6 = C['c6r']
else:
c1 = self.COEFFS_SOIL_IMT_INDEPENDENT['c1ss']
c6 = C['c6ss']
# clip mag if greater than 8.5. This is to avoid
# ValueError: negative number cannot be raised to a fractional power
mag = 8.5 if mag > 8.5 else mag
return (c1 + c2 * mag + c6 + C['c7'] * ((8.5 - mag) ** 2.5)
- c3 * numpy.log(rrup + c4 * numpy.exp(c5 * mag))) | [
"def",
"_get_mean_deep_soil",
"(",
"self",
",",
"mag",
",",
"rake",
",",
"rrup",
",",
"is_reverse",
",",
"imt",
")",
":",
"if",
"mag",
"<=",
"self",
".",
"NEAR_FIELD_SATURATION_MAG",
":",
"c4",
"=",
"self",
".",
"COEFFS_SOIL_IMT_INDEPENDENT",
"[",
"'c4lowmag'",
"]",
"c5",
"=",
"self",
".",
"COEFFS_SOIL_IMT_INDEPENDENT",
"[",
"'c5lowmag'",
"]",
"else",
":",
"c4",
"=",
"self",
".",
"COEFFS_SOIL_IMT_INDEPENDENT",
"[",
"'c4himag'",
"]",
"c5",
"=",
"self",
".",
"COEFFS_SOIL_IMT_INDEPENDENT",
"[",
"'c5himag'",
"]",
"c2",
"=",
"self",
".",
"COEFFS_SOIL_IMT_INDEPENDENT",
"[",
"'c2'",
"]",
"c3",
"=",
"self",
".",
"COEFFS_SOIL_IMT_INDEPENDENT",
"[",
"'c3'",
"]",
"C",
"=",
"self",
".",
"COEFFS_SOIL",
"[",
"imt",
"]",
"if",
"is_reverse",
":",
"c1",
"=",
"self",
".",
"COEFFS_SOIL_IMT_INDEPENDENT",
"[",
"'c1r'",
"]",
"c6",
"=",
"C",
"[",
"'c6r'",
"]",
"else",
":",
"c1",
"=",
"self",
".",
"COEFFS_SOIL_IMT_INDEPENDENT",
"[",
"'c1ss'",
"]",
"c6",
"=",
"C",
"[",
"'c6ss'",
"]",
"# clip mag if greater than 8.5. This is to avoid",
"# ValueError: negative number cannot be raised to a fractional power",
"mag",
"=",
"8.5",
"if",
"mag",
">",
"8.5",
"else",
"mag",
"return",
"(",
"c1",
"+",
"c2",
"*",
"mag",
"+",
"c6",
"+",
"C",
"[",
"'c7'",
"]",
"*",
"(",
"(",
"8.5",
"-",
"mag",
")",
"**",
"2.5",
")",
"-",
"c3",
"*",
"numpy",
".",
"log",
"(",
"rrup",
"+",
"c4",
"*",
"numpy",
".",
"exp",
"(",
"c5",
"*",
"mag",
")",
")",
")"
] | Calculate and return the mean intensity for deep soil sites.
Implements an equation from table 4. | [
"Calculate",
"and",
"return",
"the",
"mean",
"intensity",
"for",
"deep",
"soil",
"sites",
"."
] | 8294553a0b8aba33fd96437a35065d03547d0040 | https://github.com/gem/oq-engine/blob/8294553a0b8aba33fd96437a35065d03547d0040/openquake/hazardlib/gsim/sadigh_1997.py#L114-L139 |
282 | gem/oq-engine | openquake/hazardlib/gsim/sadigh_1997.py | SadighEtAl1997._get_mean_rock | def _get_mean_rock(self, mag, _rake, rrup, is_reverse, imt):
"""
Calculate and return the mean intensity for rock sites.
Implements an equation from table 2.
"""
if mag <= self.NEAR_FIELD_SATURATION_MAG:
C = self.COEFFS_ROCK_LOWMAG[imt]
else:
C = self.COEFFS_ROCK_HIMAG[imt]
# clip mag if greater than 8.5. This is to avoid
# ValueError: negative number cannot be raised to a fractional power
mag = 8.5 if mag > 8.5 else mag
mean = (
C['c1'] + C['c2'] * mag + C['c3'] * ((8.5 - mag) ** 2.5)
+ C['c4'] * numpy.log(rrup + numpy.exp(C['c5'] + C['c6'] * mag))
+ C['c7'] * numpy.log(rrup + 2)
)
if is_reverse:
# footnote in table 2 says that for reverse ruptures
# the mean amplitude value should be multiplied by 1.2
mean += 0.1823215567939546 # == log(1.2)
return mean | python | def _get_mean_rock(self, mag, _rake, rrup, is_reverse, imt):
"""
Calculate and return the mean intensity for rock sites.
Implements an equation from table 2.
"""
if mag <= self.NEAR_FIELD_SATURATION_MAG:
C = self.COEFFS_ROCK_LOWMAG[imt]
else:
C = self.COEFFS_ROCK_HIMAG[imt]
# clip mag if greater than 8.5. This is to avoid
# ValueError: negative number cannot be raised to a fractional power
mag = 8.5 if mag > 8.5 else mag
mean = (
C['c1'] + C['c2'] * mag + C['c3'] * ((8.5 - mag) ** 2.5)
+ C['c4'] * numpy.log(rrup + numpy.exp(C['c5'] + C['c6'] * mag))
+ C['c7'] * numpy.log(rrup + 2)
)
if is_reverse:
# footnote in table 2 says that for reverse ruptures
# the mean amplitude value should be multiplied by 1.2
mean += 0.1823215567939546 # == log(1.2)
return mean | [
"def",
"_get_mean_rock",
"(",
"self",
",",
"mag",
",",
"_rake",
",",
"rrup",
",",
"is_reverse",
",",
"imt",
")",
":",
"if",
"mag",
"<=",
"self",
".",
"NEAR_FIELD_SATURATION_MAG",
":",
"C",
"=",
"self",
".",
"COEFFS_ROCK_LOWMAG",
"[",
"imt",
"]",
"else",
":",
"C",
"=",
"self",
".",
"COEFFS_ROCK_HIMAG",
"[",
"imt",
"]",
"# clip mag if greater than 8.5. This is to avoid",
"# ValueError: negative number cannot be raised to a fractional power",
"mag",
"=",
"8.5",
"if",
"mag",
">",
"8.5",
"else",
"mag",
"mean",
"=",
"(",
"C",
"[",
"'c1'",
"]",
"+",
"C",
"[",
"'c2'",
"]",
"*",
"mag",
"+",
"C",
"[",
"'c3'",
"]",
"*",
"(",
"(",
"8.5",
"-",
"mag",
")",
"**",
"2.5",
")",
"+",
"C",
"[",
"'c4'",
"]",
"*",
"numpy",
".",
"log",
"(",
"rrup",
"+",
"numpy",
".",
"exp",
"(",
"C",
"[",
"'c5'",
"]",
"+",
"C",
"[",
"'c6'",
"]",
"*",
"mag",
")",
")",
"+",
"C",
"[",
"'c7'",
"]",
"*",
"numpy",
".",
"log",
"(",
"rrup",
"+",
"2",
")",
")",
"if",
"is_reverse",
":",
"# footnote in table 2 says that for reverse ruptures",
"# the mean amplitude value should be multiplied by 1.2",
"mean",
"+=",
"0.1823215567939546",
"# == log(1.2)",
"return",
"mean"
] | Calculate and return the mean intensity for rock sites.
Implements an equation from table 2. | [
"Calculate",
"and",
"return",
"the",
"mean",
"intensity",
"for",
"rock",
"sites",
"."
] | 8294553a0b8aba33fd96437a35065d03547d0040 | https://github.com/gem/oq-engine/blob/8294553a0b8aba33fd96437a35065d03547d0040/openquake/hazardlib/gsim/sadigh_1997.py#L141-L163 |
283 | gem/oq-engine | openquake/hazardlib/gsim/sadigh_1997.py | SadighEtAl1997._get_stddev_rock | def _get_stddev_rock(self, mag, imt):
"""
Calculate and return total standard deviation for rock sites.
Implements formulae from table 3.
"""
C = self.COEFFS_ROCK_STDDERR[imt]
if mag > C['maxmag']:
return C['maxsigma']
else:
return C['sigma0'] + C['magfactor'] * mag | python | def _get_stddev_rock(self, mag, imt):
"""
Calculate and return total standard deviation for rock sites.
Implements formulae from table 3.
"""
C = self.COEFFS_ROCK_STDDERR[imt]
if mag > C['maxmag']:
return C['maxsigma']
else:
return C['sigma0'] + C['magfactor'] * mag | [
"def",
"_get_stddev_rock",
"(",
"self",
",",
"mag",
",",
"imt",
")",
":",
"C",
"=",
"self",
".",
"COEFFS_ROCK_STDDERR",
"[",
"imt",
"]",
"if",
"mag",
">",
"C",
"[",
"'maxmag'",
"]",
":",
"return",
"C",
"[",
"'maxsigma'",
"]",
"else",
":",
"return",
"C",
"[",
"'sigma0'",
"]",
"+",
"C",
"[",
"'magfactor'",
"]",
"*",
"mag"
] | Calculate and return total standard deviation for rock sites.
Implements formulae from table 3. | [
"Calculate",
"and",
"return",
"total",
"standard",
"deviation",
"for",
"rock",
"sites",
"."
] | 8294553a0b8aba33fd96437a35065d03547d0040 | https://github.com/gem/oq-engine/blob/8294553a0b8aba33fd96437a35065d03547d0040/openquake/hazardlib/gsim/sadigh_1997.py#L165-L175 |
284 | gem/oq-engine | openquake/hazardlib/gsim/sadigh_1997.py | SadighEtAl1997._get_stddev_deep_soil | def _get_stddev_deep_soil(self, mag, imt):
"""
Calculate and return total standard deviation for deep soil sites.
Implements formulae from the last column of table 4.
"""
# footnote from table 4 says that stderr for magnitudes over 7
# is equal to one of magnitude 7.
if mag > 7:
mag = 7
C = self.COEFFS_SOIL[imt]
return C['sigma0'] + C['magfactor'] * mag | python | def _get_stddev_deep_soil(self, mag, imt):
"""
Calculate and return total standard deviation for deep soil sites.
Implements formulae from the last column of table 4.
"""
# footnote from table 4 says that stderr for magnitudes over 7
# is equal to one of magnitude 7.
if mag > 7:
mag = 7
C = self.COEFFS_SOIL[imt]
return C['sigma0'] + C['magfactor'] * mag | [
"def",
"_get_stddev_deep_soil",
"(",
"self",
",",
"mag",
",",
"imt",
")",
":",
"# footnote from table 4 says that stderr for magnitudes over 7",
"# is equal to one of magnitude 7.",
"if",
"mag",
">",
"7",
":",
"mag",
"=",
"7",
"C",
"=",
"self",
".",
"COEFFS_SOIL",
"[",
"imt",
"]",
"return",
"C",
"[",
"'sigma0'",
"]",
"+",
"C",
"[",
"'magfactor'",
"]",
"*",
"mag"
] | Calculate and return total standard deviation for deep soil sites.
Implements formulae from the last column of table 4. | [
"Calculate",
"and",
"return",
"total",
"standard",
"deviation",
"for",
"deep",
"soil",
"sites",
"."
] | 8294553a0b8aba33fd96437a35065d03547d0040 | https://github.com/gem/oq-engine/blob/8294553a0b8aba33fd96437a35065d03547d0040/openquake/hazardlib/gsim/sadigh_1997.py#L177-L188 |
285 | gem/oq-engine | openquake/commands/zip.py | zip | def zip(what, archive_zip='', risk_file=''):
"""
Zip into an archive one or two job.ini files with all related files
"""
if os.path.isdir(what):
oqzip.zip_all(what)
elif what.endswith('.xml') and '<logicTree' in open(what).read(512):
# hack to see if the NRML file is of kind logicTree
oqzip.zip_source_model(what, archive_zip)
elif what.endswith('.xml') and '<exposureModel' in open(what).read(512):
# hack to see if the NRML file is of kind exposureModel
oqzip.zip_exposure(what, archive_zip)
elif what.endswith('.ini'): # a job.ini
oqzip.zip_job(what, archive_zip, risk_file)
else:
sys.exit('Cannot zip %s' % what) | python | def zip(what, archive_zip='', risk_file=''):
"""
Zip into an archive one or two job.ini files with all related files
"""
if os.path.isdir(what):
oqzip.zip_all(what)
elif what.endswith('.xml') and '<logicTree' in open(what).read(512):
# hack to see if the NRML file is of kind logicTree
oqzip.zip_source_model(what, archive_zip)
elif what.endswith('.xml') and '<exposureModel' in open(what).read(512):
# hack to see if the NRML file is of kind exposureModel
oqzip.zip_exposure(what, archive_zip)
elif what.endswith('.ini'): # a job.ini
oqzip.zip_job(what, archive_zip, risk_file)
else:
sys.exit('Cannot zip %s' % what) | [
"def",
"zip",
"(",
"what",
",",
"archive_zip",
"=",
"''",
",",
"risk_file",
"=",
"''",
")",
":",
"if",
"os",
".",
"path",
".",
"isdir",
"(",
"what",
")",
":",
"oqzip",
".",
"zip_all",
"(",
"what",
")",
"elif",
"what",
".",
"endswith",
"(",
"'.xml'",
")",
"and",
"'<logicTree'",
"in",
"open",
"(",
"what",
")",
".",
"read",
"(",
"512",
")",
":",
"# hack to see if the NRML file is of kind logicTree",
"oqzip",
".",
"zip_source_model",
"(",
"what",
",",
"archive_zip",
")",
"elif",
"what",
".",
"endswith",
"(",
"'.xml'",
")",
"and",
"'<exposureModel'",
"in",
"open",
"(",
"what",
")",
".",
"read",
"(",
"512",
")",
":",
"# hack to see if the NRML file is of kind exposureModel",
"oqzip",
".",
"zip_exposure",
"(",
"what",
",",
"archive_zip",
")",
"elif",
"what",
".",
"endswith",
"(",
"'.ini'",
")",
":",
"# a job.ini",
"oqzip",
".",
"zip_job",
"(",
"what",
",",
"archive_zip",
",",
"risk_file",
")",
"else",
":",
"sys",
".",
"exit",
"(",
"'Cannot zip %s'",
"%",
"what",
")"
] | Zip into an archive one or two job.ini files with all related files | [
"Zip",
"into",
"an",
"archive",
"one",
"or",
"two",
"job",
".",
"ini",
"files",
"with",
"all",
"related",
"files"
] | 8294553a0b8aba33fd96437a35065d03547d0040 | https://github.com/gem/oq-engine/blob/8294553a0b8aba33fd96437a35065d03547d0040/openquake/commands/zip.py#L25-L40 |
286 | gem/oq-engine | openquake/commands/reduce.py | reduce | def reduce(fname, reduction_factor):
"""
Produce a submodel from `fname` by sampling the nodes randomly.
Supports source models, site models and exposure models. As a special
case, it is also able to reduce .csv files by sampling the lines.
This is a debugging utility to reduce large computations to small ones.
"""
if fname.endswith('.csv'):
with open(fname) as f:
line = f.readline() # read the first line
if csv.Sniffer().has_header(line):
header = line
all_lines = f.readlines()
else:
header = None
f.seek(0)
all_lines = f.readlines()
lines = general.random_filter(all_lines, reduction_factor)
shutil.copy(fname, fname + '.bak')
print('Copied the original file in %s.bak' % fname)
_save_csv(fname, lines, header)
print('Extracted %d lines out of %d' % (len(lines), len(all_lines)))
return
elif fname.endswith('.npy'):
array = numpy.load(fname)
shutil.copy(fname, fname + '.bak')
print('Copied the original file in %s.bak' % fname)
arr = numpy.array(general.random_filter(array, reduction_factor))
numpy.save(fname, arr)
print('Extracted %d rows out of %d' % (len(arr), len(array)))
return
node = nrml.read(fname)
model = node[0]
if model.tag.endswith('exposureModel'):
total = len(model.assets)
model.assets.nodes = general.random_filter(
model.assets, reduction_factor)
num_nodes = len(model.assets)
elif model.tag.endswith('siteModel'):
total = len(model)
model.nodes = general.random_filter(model, reduction_factor)
num_nodes = len(model)
elif model.tag.endswith('sourceModel'):
reduce_source_model(fname, reduction_factor)
return
elif model.tag.endswith('logicTree'):
for smpath in logictree.collect_info(fname).smpaths:
reduce_source_model(smpath, reduction_factor)
return
else:
raise RuntimeError('Unknown model tag: %s' % model.tag)
save_bak(fname, node, num_nodes, total) | python | def reduce(fname, reduction_factor):
"""
Produce a submodel from `fname` by sampling the nodes randomly.
Supports source models, site models and exposure models. As a special
case, it is also able to reduce .csv files by sampling the lines.
This is a debugging utility to reduce large computations to small ones.
"""
if fname.endswith('.csv'):
with open(fname) as f:
line = f.readline() # read the first line
if csv.Sniffer().has_header(line):
header = line
all_lines = f.readlines()
else:
header = None
f.seek(0)
all_lines = f.readlines()
lines = general.random_filter(all_lines, reduction_factor)
shutil.copy(fname, fname + '.bak')
print('Copied the original file in %s.bak' % fname)
_save_csv(fname, lines, header)
print('Extracted %d lines out of %d' % (len(lines), len(all_lines)))
return
elif fname.endswith('.npy'):
array = numpy.load(fname)
shutil.copy(fname, fname + '.bak')
print('Copied the original file in %s.bak' % fname)
arr = numpy.array(general.random_filter(array, reduction_factor))
numpy.save(fname, arr)
print('Extracted %d rows out of %d' % (len(arr), len(array)))
return
node = nrml.read(fname)
model = node[0]
if model.tag.endswith('exposureModel'):
total = len(model.assets)
model.assets.nodes = general.random_filter(
model.assets, reduction_factor)
num_nodes = len(model.assets)
elif model.tag.endswith('siteModel'):
total = len(model)
model.nodes = general.random_filter(model, reduction_factor)
num_nodes = len(model)
elif model.tag.endswith('sourceModel'):
reduce_source_model(fname, reduction_factor)
return
elif model.tag.endswith('logicTree'):
for smpath in logictree.collect_info(fname).smpaths:
reduce_source_model(smpath, reduction_factor)
return
else:
raise RuntimeError('Unknown model tag: %s' % model.tag)
save_bak(fname, node, num_nodes, total) | [
"def",
"reduce",
"(",
"fname",
",",
"reduction_factor",
")",
":",
"if",
"fname",
".",
"endswith",
"(",
"'.csv'",
")",
":",
"with",
"open",
"(",
"fname",
")",
"as",
"f",
":",
"line",
"=",
"f",
".",
"readline",
"(",
")",
"# read the first line",
"if",
"csv",
".",
"Sniffer",
"(",
")",
".",
"has_header",
"(",
"line",
")",
":",
"header",
"=",
"line",
"all_lines",
"=",
"f",
".",
"readlines",
"(",
")",
"else",
":",
"header",
"=",
"None",
"f",
".",
"seek",
"(",
"0",
")",
"all_lines",
"=",
"f",
".",
"readlines",
"(",
")",
"lines",
"=",
"general",
".",
"random_filter",
"(",
"all_lines",
",",
"reduction_factor",
")",
"shutil",
".",
"copy",
"(",
"fname",
",",
"fname",
"+",
"'.bak'",
")",
"print",
"(",
"'Copied the original file in %s.bak'",
"%",
"fname",
")",
"_save_csv",
"(",
"fname",
",",
"lines",
",",
"header",
")",
"print",
"(",
"'Extracted %d lines out of %d'",
"%",
"(",
"len",
"(",
"lines",
")",
",",
"len",
"(",
"all_lines",
")",
")",
")",
"return",
"elif",
"fname",
".",
"endswith",
"(",
"'.npy'",
")",
":",
"array",
"=",
"numpy",
".",
"load",
"(",
"fname",
")",
"shutil",
".",
"copy",
"(",
"fname",
",",
"fname",
"+",
"'.bak'",
")",
"print",
"(",
"'Copied the original file in %s.bak'",
"%",
"fname",
")",
"arr",
"=",
"numpy",
".",
"array",
"(",
"general",
".",
"random_filter",
"(",
"array",
",",
"reduction_factor",
")",
")",
"numpy",
".",
"save",
"(",
"fname",
",",
"arr",
")",
"print",
"(",
"'Extracted %d rows out of %d'",
"%",
"(",
"len",
"(",
"arr",
")",
",",
"len",
"(",
"array",
")",
")",
")",
"return",
"node",
"=",
"nrml",
".",
"read",
"(",
"fname",
")",
"model",
"=",
"node",
"[",
"0",
"]",
"if",
"model",
".",
"tag",
".",
"endswith",
"(",
"'exposureModel'",
")",
":",
"total",
"=",
"len",
"(",
"model",
".",
"assets",
")",
"model",
".",
"assets",
".",
"nodes",
"=",
"general",
".",
"random_filter",
"(",
"model",
".",
"assets",
",",
"reduction_factor",
")",
"num_nodes",
"=",
"len",
"(",
"model",
".",
"assets",
")",
"elif",
"model",
".",
"tag",
".",
"endswith",
"(",
"'siteModel'",
")",
":",
"total",
"=",
"len",
"(",
"model",
")",
"model",
".",
"nodes",
"=",
"general",
".",
"random_filter",
"(",
"model",
",",
"reduction_factor",
")",
"num_nodes",
"=",
"len",
"(",
"model",
")",
"elif",
"model",
".",
"tag",
".",
"endswith",
"(",
"'sourceModel'",
")",
":",
"reduce_source_model",
"(",
"fname",
",",
"reduction_factor",
")",
"return",
"elif",
"model",
".",
"tag",
".",
"endswith",
"(",
"'logicTree'",
")",
":",
"for",
"smpath",
"in",
"logictree",
".",
"collect_info",
"(",
"fname",
")",
".",
"smpaths",
":",
"reduce_source_model",
"(",
"smpath",
",",
"reduction_factor",
")",
"return",
"else",
":",
"raise",
"RuntimeError",
"(",
"'Unknown model tag: %s'",
"%",
"model",
".",
"tag",
")",
"save_bak",
"(",
"fname",
",",
"node",
",",
"num_nodes",
",",
"total",
")"
] | Produce a submodel from `fname` by sampling the nodes randomly.
Supports source models, site models and exposure models. As a special
case, it is also able to reduce .csv files by sampling the lines.
This is a debugging utility to reduce large computations to small ones. | [
"Produce",
"a",
"submodel",
"from",
"fname",
"by",
"sampling",
"the",
"nodes",
"randomly",
".",
"Supports",
"source",
"models",
"site",
"models",
"and",
"exposure",
"models",
".",
"As",
"a",
"special",
"case",
"it",
"is",
"also",
"able",
"to",
"reduce",
".",
"csv",
"files",
"by",
"sampling",
"the",
"lines",
".",
"This",
"is",
"a",
"debugging",
"utility",
"to",
"reduce",
"large",
"computations",
"to",
"small",
"ones",
"."
] | 8294553a0b8aba33fd96437a35065d03547d0040 | https://github.com/gem/oq-engine/blob/8294553a0b8aba33fd96437a35065d03547d0040/openquake/commands/reduce.py#L60-L111 |
287 | gem/oq-engine | openquake/hazardlib/geo/surface/base.py | downsample_mesh | def downsample_mesh(mesh, tol=1.0):
"""
Returns a mesh sampled at a lower resolution - if the difference
in azimuth is larger than the specified tolerance a turn is assumed
:returns:
Downsampled mesh as instance of :class:
openquake.hazardlib.geo.mesh.RectangularMesh
"""
idx = _find_turning_points(mesh, tol)
if mesh.depths is not None:
return RectangularMesh(lons=mesh.lons[:, idx],
lats=mesh.lats[:, idx],
depths=mesh.depths[:, idx])
else:
return RectangularMesh(lons=mesh.lons[:, idx],
lats=mesh.lats[:, idx]) | python | def downsample_mesh(mesh, tol=1.0):
"""
Returns a mesh sampled at a lower resolution - if the difference
in azimuth is larger than the specified tolerance a turn is assumed
:returns:
Downsampled mesh as instance of :class:
openquake.hazardlib.geo.mesh.RectangularMesh
"""
idx = _find_turning_points(mesh, tol)
if mesh.depths is not None:
return RectangularMesh(lons=mesh.lons[:, idx],
lats=mesh.lats[:, idx],
depths=mesh.depths[:, idx])
else:
return RectangularMesh(lons=mesh.lons[:, idx],
lats=mesh.lats[:, idx]) | [
"def",
"downsample_mesh",
"(",
"mesh",
",",
"tol",
"=",
"1.0",
")",
":",
"idx",
"=",
"_find_turning_points",
"(",
"mesh",
",",
"tol",
")",
"if",
"mesh",
".",
"depths",
"is",
"not",
"None",
":",
"return",
"RectangularMesh",
"(",
"lons",
"=",
"mesh",
".",
"lons",
"[",
":",
",",
"idx",
"]",
",",
"lats",
"=",
"mesh",
".",
"lats",
"[",
":",
",",
"idx",
"]",
",",
"depths",
"=",
"mesh",
".",
"depths",
"[",
":",
",",
"idx",
"]",
")",
"else",
":",
"return",
"RectangularMesh",
"(",
"lons",
"=",
"mesh",
".",
"lons",
"[",
":",
",",
"idx",
"]",
",",
"lats",
"=",
"mesh",
".",
"lats",
"[",
":",
",",
"idx",
"]",
")"
] | Returns a mesh sampled at a lower resolution - if the difference
in azimuth is larger than the specified tolerance a turn is assumed
:returns:
Downsampled mesh as instance of :class:
openquake.hazardlib.geo.mesh.RectangularMesh | [
"Returns",
"a",
"mesh",
"sampled",
"at",
"a",
"lower",
"resolution",
"-",
"if",
"the",
"difference",
"in",
"azimuth",
"is",
"larger",
"than",
"the",
"specified",
"tolerance",
"a",
"turn",
"is",
"assumed"
] | 8294553a0b8aba33fd96437a35065d03547d0040 | https://github.com/gem/oq-engine/blob/8294553a0b8aba33fd96437a35065d03547d0040/openquake/hazardlib/geo/surface/base.py#L64-L80 |
288 | gem/oq-engine | openquake/hazardlib/geo/surface/base.py | downsample_trace | def downsample_trace(mesh, tol=1.0):
"""
Downsamples the upper edge of a fault within a rectangular mesh, retaining
node points only if changes in direction on the order of tol are found
:returns:
Downsampled edge as a numpy array of [long, lat, depth]
"""
idx = _find_turning_points(mesh, tol)
if mesh.depths is not None:
return numpy.column_stack([mesh.lons[0, idx],
mesh.lats[0, idx],
mesh.depths[0, idx]])
else:
return numpy.column_stack([mesh.lons[0, idx], mesh.lats[0, idx]]) | python | def downsample_trace(mesh, tol=1.0):
"""
Downsamples the upper edge of a fault within a rectangular mesh, retaining
node points only if changes in direction on the order of tol are found
:returns:
Downsampled edge as a numpy array of [long, lat, depth]
"""
idx = _find_turning_points(mesh, tol)
if mesh.depths is not None:
return numpy.column_stack([mesh.lons[0, idx],
mesh.lats[0, idx],
mesh.depths[0, idx]])
else:
return numpy.column_stack([mesh.lons[0, idx], mesh.lats[0, idx]]) | [
"def",
"downsample_trace",
"(",
"mesh",
",",
"tol",
"=",
"1.0",
")",
":",
"idx",
"=",
"_find_turning_points",
"(",
"mesh",
",",
"tol",
")",
"if",
"mesh",
".",
"depths",
"is",
"not",
"None",
":",
"return",
"numpy",
".",
"column_stack",
"(",
"[",
"mesh",
".",
"lons",
"[",
"0",
",",
"idx",
"]",
",",
"mesh",
".",
"lats",
"[",
"0",
",",
"idx",
"]",
",",
"mesh",
".",
"depths",
"[",
"0",
",",
"idx",
"]",
"]",
")",
"else",
":",
"return",
"numpy",
".",
"column_stack",
"(",
"[",
"mesh",
".",
"lons",
"[",
"0",
",",
"idx",
"]",
",",
"mesh",
".",
"lats",
"[",
"0",
",",
"idx",
"]",
"]",
")"
] | Downsamples the upper edge of a fault within a rectangular mesh, retaining
node points only if changes in direction on the order of tol are found
:returns:
Downsampled edge as a numpy array of [long, lat, depth] | [
"Downsamples",
"the",
"upper",
"edge",
"of",
"a",
"fault",
"within",
"a",
"rectangular",
"mesh",
"retaining",
"node",
"points",
"only",
"if",
"changes",
"in",
"direction",
"on",
"the",
"order",
"of",
"tol",
"are",
"found"
] | 8294553a0b8aba33fd96437a35065d03547d0040 | https://github.com/gem/oq-engine/blob/8294553a0b8aba33fd96437a35065d03547d0040/openquake/hazardlib/geo/surface/base.py#L83-L97 |
289 | gem/oq-engine | openquake/hazardlib/geo/surface/base.py | BaseSurface.get_ry0_distance | def get_ry0_distance(self, mesh):
"""
Compute the minimum distance between each point of a mesh and the great
circle arcs perpendicular to the average strike direction of the
fault trace and passing through the end-points of the trace.
:param mesh:
:class:`~openquake.hazardlib.geo.mesh.Mesh` of points to calculate
Ry0-distance to.
:returns:
Numpy array of distances in km.
"""
# This computes ry0 by using an average strike direction
top_edge = self.mesh[0:1]
mean_strike = self.get_strike()
dst1 = geodetic.distance_to_arc(top_edge.lons[0, 0],
top_edge.lats[0, 0],
(mean_strike + 90.) % 360,
mesh.lons, mesh.lats)
dst2 = geodetic.distance_to_arc(top_edge.lons[0, -1],
top_edge.lats[0, -1],
(mean_strike + 90.) % 360,
mesh.lons, mesh.lats)
# Find the points on the rupture
# Get the shortest distance from the two lines
idx = numpy.sign(dst1) == numpy.sign(dst2)
dst = numpy.zeros_like(dst1)
dst[idx] = numpy.fmin(numpy.abs(dst1[idx]), numpy.abs(dst2[idx]))
return dst | python | def get_ry0_distance(self, mesh):
"""
Compute the minimum distance between each point of a mesh and the great
circle arcs perpendicular to the average strike direction of the
fault trace and passing through the end-points of the trace.
:param mesh:
:class:`~openquake.hazardlib.geo.mesh.Mesh` of points to calculate
Ry0-distance to.
:returns:
Numpy array of distances in km.
"""
# This computes ry0 by using an average strike direction
top_edge = self.mesh[0:1]
mean_strike = self.get_strike()
dst1 = geodetic.distance_to_arc(top_edge.lons[0, 0],
top_edge.lats[0, 0],
(mean_strike + 90.) % 360,
mesh.lons, mesh.lats)
dst2 = geodetic.distance_to_arc(top_edge.lons[0, -1],
top_edge.lats[0, -1],
(mean_strike + 90.) % 360,
mesh.lons, mesh.lats)
# Find the points on the rupture
# Get the shortest distance from the two lines
idx = numpy.sign(dst1) == numpy.sign(dst2)
dst = numpy.zeros_like(dst1)
dst[idx] = numpy.fmin(numpy.abs(dst1[idx]), numpy.abs(dst2[idx]))
return dst | [
"def",
"get_ry0_distance",
"(",
"self",
",",
"mesh",
")",
":",
"# This computes ry0 by using an average strike direction",
"top_edge",
"=",
"self",
".",
"mesh",
"[",
"0",
":",
"1",
"]",
"mean_strike",
"=",
"self",
".",
"get_strike",
"(",
")",
"dst1",
"=",
"geodetic",
".",
"distance_to_arc",
"(",
"top_edge",
".",
"lons",
"[",
"0",
",",
"0",
"]",
",",
"top_edge",
".",
"lats",
"[",
"0",
",",
"0",
"]",
",",
"(",
"mean_strike",
"+",
"90.",
")",
"%",
"360",
",",
"mesh",
".",
"lons",
",",
"mesh",
".",
"lats",
")",
"dst2",
"=",
"geodetic",
".",
"distance_to_arc",
"(",
"top_edge",
".",
"lons",
"[",
"0",
",",
"-",
"1",
"]",
",",
"top_edge",
".",
"lats",
"[",
"0",
",",
"-",
"1",
"]",
",",
"(",
"mean_strike",
"+",
"90.",
")",
"%",
"360",
",",
"mesh",
".",
"lons",
",",
"mesh",
".",
"lats",
")",
"# Find the points on the rupture",
"# Get the shortest distance from the two lines",
"idx",
"=",
"numpy",
".",
"sign",
"(",
"dst1",
")",
"==",
"numpy",
".",
"sign",
"(",
"dst2",
")",
"dst",
"=",
"numpy",
".",
"zeros_like",
"(",
"dst1",
")",
"dst",
"[",
"idx",
"]",
"=",
"numpy",
".",
"fmin",
"(",
"numpy",
".",
"abs",
"(",
"dst1",
"[",
"idx",
"]",
")",
",",
"numpy",
".",
"abs",
"(",
"dst2",
"[",
"idx",
"]",
")",
")",
"return",
"dst"
] | Compute the minimum distance between each point of a mesh and the great
circle arcs perpendicular to the average strike direction of the
fault trace and passing through the end-points of the trace.
:param mesh:
:class:`~openquake.hazardlib.geo.mesh.Mesh` of points to calculate
Ry0-distance to.
:returns:
Numpy array of distances in km. | [
"Compute",
"the",
"minimum",
"distance",
"between",
"each",
"point",
"of",
"a",
"mesh",
"and",
"the",
"great",
"circle",
"arcs",
"perpendicular",
"to",
"the",
"average",
"strike",
"direction",
"of",
"the",
"fault",
"trace",
"and",
"passing",
"through",
"the",
"end",
"-",
"points",
"of",
"the",
"trace",
"."
] | 8294553a0b8aba33fd96437a35065d03547d0040 | https://github.com/gem/oq-engine/blob/8294553a0b8aba33fd96437a35065d03547d0040/openquake/hazardlib/geo/surface/base.py#L148-L180 |
290 | gem/oq-engine | openquake/hazardlib/geo/surface/base.py | BaseSurface.get_rx_distance | def get_rx_distance(self, mesh):
"""
Compute distance between each point of mesh and surface's great circle
arc.
Distance is measured perpendicular to the rupture strike, from
the surface projection of the updip edge of the rupture, with
the down dip direction being positive (this distance is usually
called ``Rx``).
In other words, is the horizontal distance to top edge of rupture
measured perpendicular to the strike. Values on the hanging wall
are positive, values on the footwall are negative.
:param mesh:
:class:`~openquake.hazardlib.geo.mesh.Mesh` of points to calculate
Rx-distance to.
:returns:
Numpy array of distances in km.
"""
top_edge = self.mesh[0:1]
dists = []
if top_edge.lons.shape[1] < 3:
i = 0
p1 = Point(
top_edge.lons[0, i],
top_edge.lats[0, i],
top_edge.depths[0, i]
)
p2 = Point(
top_edge.lons[0, i + 1], top_edge.lats[0, i + 1],
top_edge.depths[0, i + 1]
)
azimuth = p1.azimuth(p2)
dists.append(
geodetic.distance_to_arc(
p1.longitude, p1.latitude, azimuth,
mesh.lons, mesh.lats
)
)
else:
for i in range(top_edge.lons.shape[1] - 1):
p1 = Point(
top_edge.lons[0, i],
top_edge.lats[0, i],
top_edge.depths[0, i]
)
p2 = Point(
top_edge.lons[0, i + 1],
top_edge.lats[0, i + 1],
top_edge.depths[0, i + 1]
)
# Swapping
if i == 0:
pt = p1
p1 = p2
p2 = pt
# Computing azimuth and distance
if i == 0 or i == top_edge.lons.shape[1] - 2:
azimuth = p1.azimuth(p2)
tmp = geodetic.distance_to_semi_arc(p1.longitude,
p1.latitude,
azimuth,
mesh.lons, mesh.lats)
else:
tmp = geodetic.min_distance_to_segment(
numpy.array([p1.longitude, p2.longitude]),
numpy.array([p1.latitude, p2.latitude]),
mesh.lons, mesh.lats)
# Correcting the sign of the distance
if i == 0:
tmp *= -1
dists.append(tmp)
# Computing distances
dists = numpy.array(dists)
iii = abs(dists).argmin(axis=0)
dst = dists[iii, list(range(dists.shape[1]))]
return dst | python | def get_rx_distance(self, mesh):
"""
Compute distance between each point of mesh and surface's great circle
arc.
Distance is measured perpendicular to the rupture strike, from
the surface projection of the updip edge of the rupture, with
the down dip direction being positive (this distance is usually
called ``Rx``).
In other words, is the horizontal distance to top edge of rupture
measured perpendicular to the strike. Values on the hanging wall
are positive, values on the footwall are negative.
:param mesh:
:class:`~openquake.hazardlib.geo.mesh.Mesh` of points to calculate
Rx-distance to.
:returns:
Numpy array of distances in km.
"""
top_edge = self.mesh[0:1]
dists = []
if top_edge.lons.shape[1] < 3:
i = 0
p1 = Point(
top_edge.lons[0, i],
top_edge.lats[0, i],
top_edge.depths[0, i]
)
p2 = Point(
top_edge.lons[0, i + 1], top_edge.lats[0, i + 1],
top_edge.depths[0, i + 1]
)
azimuth = p1.azimuth(p2)
dists.append(
geodetic.distance_to_arc(
p1.longitude, p1.latitude, azimuth,
mesh.lons, mesh.lats
)
)
else:
for i in range(top_edge.lons.shape[1] - 1):
p1 = Point(
top_edge.lons[0, i],
top_edge.lats[0, i],
top_edge.depths[0, i]
)
p2 = Point(
top_edge.lons[0, i + 1],
top_edge.lats[0, i + 1],
top_edge.depths[0, i + 1]
)
# Swapping
if i == 0:
pt = p1
p1 = p2
p2 = pt
# Computing azimuth and distance
if i == 0 or i == top_edge.lons.shape[1] - 2:
azimuth = p1.azimuth(p2)
tmp = geodetic.distance_to_semi_arc(p1.longitude,
p1.latitude,
azimuth,
mesh.lons, mesh.lats)
else:
tmp = geodetic.min_distance_to_segment(
numpy.array([p1.longitude, p2.longitude]),
numpy.array([p1.latitude, p2.latitude]),
mesh.lons, mesh.lats)
# Correcting the sign of the distance
if i == 0:
tmp *= -1
dists.append(tmp)
# Computing distances
dists = numpy.array(dists)
iii = abs(dists).argmin(axis=0)
dst = dists[iii, list(range(dists.shape[1]))]
return dst | [
"def",
"get_rx_distance",
"(",
"self",
",",
"mesh",
")",
":",
"top_edge",
"=",
"self",
".",
"mesh",
"[",
"0",
":",
"1",
"]",
"dists",
"=",
"[",
"]",
"if",
"top_edge",
".",
"lons",
".",
"shape",
"[",
"1",
"]",
"<",
"3",
":",
"i",
"=",
"0",
"p1",
"=",
"Point",
"(",
"top_edge",
".",
"lons",
"[",
"0",
",",
"i",
"]",
",",
"top_edge",
".",
"lats",
"[",
"0",
",",
"i",
"]",
",",
"top_edge",
".",
"depths",
"[",
"0",
",",
"i",
"]",
")",
"p2",
"=",
"Point",
"(",
"top_edge",
".",
"lons",
"[",
"0",
",",
"i",
"+",
"1",
"]",
",",
"top_edge",
".",
"lats",
"[",
"0",
",",
"i",
"+",
"1",
"]",
",",
"top_edge",
".",
"depths",
"[",
"0",
",",
"i",
"+",
"1",
"]",
")",
"azimuth",
"=",
"p1",
".",
"azimuth",
"(",
"p2",
")",
"dists",
".",
"append",
"(",
"geodetic",
".",
"distance_to_arc",
"(",
"p1",
".",
"longitude",
",",
"p1",
".",
"latitude",
",",
"azimuth",
",",
"mesh",
".",
"lons",
",",
"mesh",
".",
"lats",
")",
")",
"else",
":",
"for",
"i",
"in",
"range",
"(",
"top_edge",
".",
"lons",
".",
"shape",
"[",
"1",
"]",
"-",
"1",
")",
":",
"p1",
"=",
"Point",
"(",
"top_edge",
".",
"lons",
"[",
"0",
",",
"i",
"]",
",",
"top_edge",
".",
"lats",
"[",
"0",
",",
"i",
"]",
",",
"top_edge",
".",
"depths",
"[",
"0",
",",
"i",
"]",
")",
"p2",
"=",
"Point",
"(",
"top_edge",
".",
"lons",
"[",
"0",
",",
"i",
"+",
"1",
"]",
",",
"top_edge",
".",
"lats",
"[",
"0",
",",
"i",
"+",
"1",
"]",
",",
"top_edge",
".",
"depths",
"[",
"0",
",",
"i",
"+",
"1",
"]",
")",
"# Swapping",
"if",
"i",
"==",
"0",
":",
"pt",
"=",
"p1",
"p1",
"=",
"p2",
"p2",
"=",
"pt",
"# Computing azimuth and distance",
"if",
"i",
"==",
"0",
"or",
"i",
"==",
"top_edge",
".",
"lons",
".",
"shape",
"[",
"1",
"]",
"-",
"2",
":",
"azimuth",
"=",
"p1",
".",
"azimuth",
"(",
"p2",
")",
"tmp",
"=",
"geodetic",
".",
"distance_to_semi_arc",
"(",
"p1",
".",
"longitude",
",",
"p1",
".",
"latitude",
",",
"azimuth",
",",
"mesh",
".",
"lons",
",",
"mesh",
".",
"lats",
")",
"else",
":",
"tmp",
"=",
"geodetic",
".",
"min_distance_to_segment",
"(",
"numpy",
".",
"array",
"(",
"[",
"p1",
".",
"longitude",
",",
"p2",
".",
"longitude",
"]",
")",
",",
"numpy",
".",
"array",
"(",
"[",
"p1",
".",
"latitude",
",",
"p2",
".",
"latitude",
"]",
")",
",",
"mesh",
".",
"lons",
",",
"mesh",
".",
"lats",
")",
"# Correcting the sign of the distance",
"if",
"i",
"==",
"0",
":",
"tmp",
"*=",
"-",
"1",
"dists",
".",
"append",
"(",
"tmp",
")",
"# Computing distances",
"dists",
"=",
"numpy",
".",
"array",
"(",
"dists",
")",
"iii",
"=",
"abs",
"(",
"dists",
")",
".",
"argmin",
"(",
"axis",
"=",
"0",
")",
"dst",
"=",
"dists",
"[",
"iii",
",",
"list",
"(",
"range",
"(",
"dists",
".",
"shape",
"[",
"1",
"]",
")",
")",
"]",
"return",
"dst"
] | Compute distance between each point of mesh and surface's great circle
arc.
Distance is measured perpendicular to the rupture strike, from
the surface projection of the updip edge of the rupture, with
the down dip direction being positive (this distance is usually
called ``Rx``).
In other words, is the horizontal distance to top edge of rupture
measured perpendicular to the strike. Values on the hanging wall
are positive, values on the footwall are negative.
:param mesh:
:class:`~openquake.hazardlib.geo.mesh.Mesh` of points to calculate
Rx-distance to.
:returns:
Numpy array of distances in km. | [
"Compute",
"distance",
"between",
"each",
"point",
"of",
"mesh",
"and",
"surface",
"s",
"great",
"circle",
"arc",
"."
] | 8294553a0b8aba33fd96437a35065d03547d0040 | https://github.com/gem/oq-engine/blob/8294553a0b8aba33fd96437a35065d03547d0040/openquake/hazardlib/geo/surface/base.py#L182-L266 |
291 | gem/oq-engine | openquake/hazardlib/geo/surface/base.py | BaseSurface.get_top_edge_depth | def get_top_edge_depth(self):
"""
Return minimum depth of surface's top edge.
:returns:
Float value, the vertical distance between the earth surface
and the shallowest point in surface's top edge in km.
"""
top_edge = self.mesh[0:1]
if top_edge.depths is None:
return 0
else:
return numpy.min(top_edge.depths) | python | def get_top_edge_depth(self):
"""
Return minimum depth of surface's top edge.
:returns:
Float value, the vertical distance between the earth surface
and the shallowest point in surface's top edge in km.
"""
top_edge = self.mesh[0:1]
if top_edge.depths is None:
return 0
else:
return numpy.min(top_edge.depths) | [
"def",
"get_top_edge_depth",
"(",
"self",
")",
":",
"top_edge",
"=",
"self",
".",
"mesh",
"[",
"0",
":",
"1",
"]",
"if",
"top_edge",
".",
"depths",
"is",
"None",
":",
"return",
"0",
"else",
":",
"return",
"numpy",
".",
"min",
"(",
"top_edge",
".",
"depths",
")"
] | Return minimum depth of surface's top edge.
:returns:
Float value, the vertical distance between the earth surface
and the shallowest point in surface's top edge in km. | [
"Return",
"minimum",
"depth",
"of",
"surface",
"s",
"top",
"edge",
"."
] | 8294553a0b8aba33fd96437a35065d03547d0040 | https://github.com/gem/oq-engine/blob/8294553a0b8aba33fd96437a35065d03547d0040/openquake/hazardlib/geo/surface/base.py#L268-L280 |
292 | gem/oq-engine | openquake/hazardlib/geo/surface/base.py | BaseSurface.get_area | def get_area(self):
"""
Compute area as the sum of the mesh cells area values.
"""
mesh = self.mesh
_, _, _, area = mesh.get_cell_dimensions()
return numpy.sum(area) | python | def get_area(self):
"""
Compute area as the sum of the mesh cells area values.
"""
mesh = self.mesh
_, _, _, area = mesh.get_cell_dimensions()
return numpy.sum(area) | [
"def",
"get_area",
"(",
"self",
")",
":",
"mesh",
"=",
"self",
".",
"mesh",
"_",
",",
"_",
",",
"_",
",",
"area",
"=",
"mesh",
".",
"get_cell_dimensions",
"(",
")",
"return",
"numpy",
".",
"sum",
"(",
"area",
")"
] | Compute area as the sum of the mesh cells area values. | [
"Compute",
"area",
"as",
"the",
"sum",
"of",
"the",
"mesh",
"cells",
"area",
"values",
"."
] | 8294553a0b8aba33fd96437a35065d03547d0040 | https://github.com/gem/oq-engine/blob/8294553a0b8aba33fd96437a35065d03547d0040/openquake/hazardlib/geo/surface/base.py#L290-L297 |
293 | gem/oq-engine | openquake/hazardlib/geo/surface/base.py | BaseSurface.get_surface_boundaries | def get_surface_boundaries(self):
"""
Returns the boundaries in the same format as a multiplanar
surface, with two one-element lists of lons and lats
"""
mesh = self.mesh
lons = numpy.concatenate((mesh.lons[0, :],
mesh.lons[1:, -1],
mesh.lons[-1, :-1][::-1],
mesh.lons[:-1, 0][::-1]))
lats = numpy.concatenate((mesh.lats[0, :],
mesh.lats[1:, -1],
mesh.lats[-1, :-1][::-1],
mesh.lats[:-1, 0][::-1]))
return [lons], [lats] | python | def get_surface_boundaries(self):
"""
Returns the boundaries in the same format as a multiplanar
surface, with two one-element lists of lons and lats
"""
mesh = self.mesh
lons = numpy.concatenate((mesh.lons[0, :],
mesh.lons[1:, -1],
mesh.lons[-1, :-1][::-1],
mesh.lons[:-1, 0][::-1]))
lats = numpy.concatenate((mesh.lats[0, :],
mesh.lats[1:, -1],
mesh.lats[-1, :-1][::-1],
mesh.lats[:-1, 0][::-1]))
return [lons], [lats] | [
"def",
"get_surface_boundaries",
"(",
"self",
")",
":",
"mesh",
"=",
"self",
".",
"mesh",
"lons",
"=",
"numpy",
".",
"concatenate",
"(",
"(",
"mesh",
".",
"lons",
"[",
"0",
",",
":",
"]",
",",
"mesh",
".",
"lons",
"[",
"1",
":",
",",
"-",
"1",
"]",
",",
"mesh",
".",
"lons",
"[",
"-",
"1",
",",
":",
"-",
"1",
"]",
"[",
":",
":",
"-",
"1",
"]",
",",
"mesh",
".",
"lons",
"[",
":",
"-",
"1",
",",
"0",
"]",
"[",
":",
":",
"-",
"1",
"]",
")",
")",
"lats",
"=",
"numpy",
".",
"concatenate",
"(",
"(",
"mesh",
".",
"lats",
"[",
"0",
",",
":",
"]",
",",
"mesh",
".",
"lats",
"[",
"1",
":",
",",
"-",
"1",
"]",
",",
"mesh",
".",
"lats",
"[",
"-",
"1",
",",
":",
"-",
"1",
"]",
"[",
":",
":",
"-",
"1",
"]",
",",
"mesh",
".",
"lats",
"[",
":",
"-",
"1",
",",
"0",
"]",
"[",
":",
":",
"-",
"1",
"]",
")",
")",
"return",
"[",
"lons",
"]",
",",
"[",
"lats",
"]"
] | Returns the boundaries in the same format as a multiplanar
surface, with two one-element lists of lons and lats | [
"Returns",
"the",
"boundaries",
"in",
"the",
"same",
"format",
"as",
"a",
"multiplanar",
"surface",
"with",
"two",
"one",
"-",
"element",
"lists",
"of",
"lons",
"and",
"lats"
] | 8294553a0b8aba33fd96437a35065d03547d0040 | https://github.com/gem/oq-engine/blob/8294553a0b8aba33fd96437a35065d03547d0040/openquake/hazardlib/geo/surface/base.py#L326-L340 |
294 | gem/oq-engine | openquake/hazardlib/geo/surface/base.py | BaseSurface.get_resampled_top_edge | def get_resampled_top_edge(self, angle_var=0.1):
"""
This methods computes a simplified representation of a fault top edge
by removing the points that are not describing a change of direction,
provided a certain tolerance angle.
:param float angle_var:
Number representing the maximum deviation (in degrees) admitted
without the creation of a new segment
:returns:
A :class:`~openquake.hazardlib.geo.line.Line` representing the
rupture surface's top edge.
"""
mesh = self.mesh
top_edge = [Point(mesh.lons[0][0], mesh.lats[0][0], mesh.depths[0][0])]
for i in range(len(mesh.triangulate()[1][0]) - 1):
v1 = numpy.asarray(mesh.triangulate()[1][0][i])
v2 = numpy.asarray(mesh.triangulate()[1][0][i + 1])
cosang = numpy.dot(v1, v2)
sinang = numpy.linalg.norm(numpy.cross(v1, v2))
angle = math.degrees(numpy.arctan2(sinang, cosang))
if abs(angle) > angle_var:
top_edge.append(Point(mesh.lons[0][i + 1],
mesh.lats[0][i + 1],
mesh.depths[0][i + 1]))
top_edge.append(Point(mesh.lons[0][-1],
mesh.lats[0][-1], mesh.depths[0][-1]))
line_top_edge = Line(top_edge)
return line_top_edge | python | def get_resampled_top_edge(self, angle_var=0.1):
"""
This methods computes a simplified representation of a fault top edge
by removing the points that are not describing a change of direction,
provided a certain tolerance angle.
:param float angle_var:
Number representing the maximum deviation (in degrees) admitted
without the creation of a new segment
:returns:
A :class:`~openquake.hazardlib.geo.line.Line` representing the
rupture surface's top edge.
"""
mesh = self.mesh
top_edge = [Point(mesh.lons[0][0], mesh.lats[0][0], mesh.depths[0][0])]
for i in range(len(mesh.triangulate()[1][0]) - 1):
v1 = numpy.asarray(mesh.triangulate()[1][0][i])
v2 = numpy.asarray(mesh.triangulate()[1][0][i + 1])
cosang = numpy.dot(v1, v2)
sinang = numpy.linalg.norm(numpy.cross(v1, v2))
angle = math.degrees(numpy.arctan2(sinang, cosang))
if abs(angle) > angle_var:
top_edge.append(Point(mesh.lons[0][i + 1],
mesh.lats[0][i + 1],
mesh.depths[0][i + 1]))
top_edge.append(Point(mesh.lons[0][-1],
mesh.lats[0][-1], mesh.depths[0][-1]))
line_top_edge = Line(top_edge)
return line_top_edge | [
"def",
"get_resampled_top_edge",
"(",
"self",
",",
"angle_var",
"=",
"0.1",
")",
":",
"mesh",
"=",
"self",
".",
"mesh",
"top_edge",
"=",
"[",
"Point",
"(",
"mesh",
".",
"lons",
"[",
"0",
"]",
"[",
"0",
"]",
",",
"mesh",
".",
"lats",
"[",
"0",
"]",
"[",
"0",
"]",
",",
"mesh",
".",
"depths",
"[",
"0",
"]",
"[",
"0",
"]",
")",
"]",
"for",
"i",
"in",
"range",
"(",
"len",
"(",
"mesh",
".",
"triangulate",
"(",
")",
"[",
"1",
"]",
"[",
"0",
"]",
")",
"-",
"1",
")",
":",
"v1",
"=",
"numpy",
".",
"asarray",
"(",
"mesh",
".",
"triangulate",
"(",
")",
"[",
"1",
"]",
"[",
"0",
"]",
"[",
"i",
"]",
")",
"v2",
"=",
"numpy",
".",
"asarray",
"(",
"mesh",
".",
"triangulate",
"(",
")",
"[",
"1",
"]",
"[",
"0",
"]",
"[",
"i",
"+",
"1",
"]",
")",
"cosang",
"=",
"numpy",
".",
"dot",
"(",
"v1",
",",
"v2",
")",
"sinang",
"=",
"numpy",
".",
"linalg",
".",
"norm",
"(",
"numpy",
".",
"cross",
"(",
"v1",
",",
"v2",
")",
")",
"angle",
"=",
"math",
".",
"degrees",
"(",
"numpy",
".",
"arctan2",
"(",
"sinang",
",",
"cosang",
")",
")",
"if",
"abs",
"(",
"angle",
")",
">",
"angle_var",
":",
"top_edge",
".",
"append",
"(",
"Point",
"(",
"mesh",
".",
"lons",
"[",
"0",
"]",
"[",
"i",
"+",
"1",
"]",
",",
"mesh",
".",
"lats",
"[",
"0",
"]",
"[",
"i",
"+",
"1",
"]",
",",
"mesh",
".",
"depths",
"[",
"0",
"]",
"[",
"i",
"+",
"1",
"]",
")",
")",
"top_edge",
".",
"append",
"(",
"Point",
"(",
"mesh",
".",
"lons",
"[",
"0",
"]",
"[",
"-",
"1",
"]",
",",
"mesh",
".",
"lats",
"[",
"0",
"]",
"[",
"-",
"1",
"]",
",",
"mesh",
".",
"depths",
"[",
"0",
"]",
"[",
"-",
"1",
"]",
")",
")",
"line_top_edge",
"=",
"Line",
"(",
"top_edge",
")",
"return",
"line_top_edge"
] | This methods computes a simplified representation of a fault top edge
by removing the points that are not describing a change of direction,
provided a certain tolerance angle.
:param float angle_var:
Number representing the maximum deviation (in degrees) admitted
without the creation of a new segment
:returns:
A :class:`~openquake.hazardlib.geo.line.Line` representing the
rupture surface's top edge. | [
"This",
"methods",
"computes",
"a",
"simplified",
"representation",
"of",
"a",
"fault",
"top",
"edge",
"by",
"removing",
"the",
"points",
"that",
"are",
"not",
"describing",
"a",
"change",
"of",
"direction",
"provided",
"a",
"certain",
"tolerance",
"angle",
"."
] | 8294553a0b8aba33fd96437a35065d03547d0040 | https://github.com/gem/oq-engine/blob/8294553a0b8aba33fd96437a35065d03547d0040/openquake/hazardlib/geo/surface/base.py#L342-L375 |
295 | gem/oq-engine | openquake/hazardlib/geo/surface/base.py | BaseSurface.get_hypo_location | def get_hypo_location(self, mesh_spacing, hypo_loc=None):
"""
The method determines the location of the hypocentre within the rupture
:param mesh:
:class:`~openquake.hazardlib.geo.mesh.Mesh` of points
:param mesh_spacing:
The desired distance between two adjacent points in source's
ruptures' mesh, in km. Mainly this parameter allows to balance
the trade-off between time needed to compute the distance
between the rupture surface and a site and the precision of that
computation.
:param hypo_loc:
Hypocentre location as fraction of rupture plane, as a tuple of
(Along Strike, Down Dip), e.g. a hypocentre located in the centroid
of the rupture would be input as (0.5, 0.5), whereas a
hypocentre located in a position 3/4 along the length, and 1/4 of
the way down dip of the rupture plane would be entered as
(0.75, 0.25).
:returns:
Hypocentre location as instance of
:class:`~openquake.hazardlib.geo.point.Point`
"""
mesh = self.mesh
centroid = mesh.get_middle_point()
if hypo_loc is None:
return centroid
total_len_y = (len(mesh.depths) - 1) * mesh_spacing
y_distance = hypo_loc[1] * total_len_y
y_node = int(numpy.round(y_distance / mesh_spacing))
total_len_x = (len(mesh.lons[y_node]) - 1) * mesh_spacing
x_distance = hypo_loc[0] * total_len_x
x_node = int(numpy.round(x_distance / mesh_spacing))
hypocentre = Point(mesh.lons[y_node][x_node],
mesh.lats[y_node][x_node],
mesh.depths[y_node][x_node])
return hypocentre | python | def get_hypo_location(self, mesh_spacing, hypo_loc=None):
"""
The method determines the location of the hypocentre within the rupture
:param mesh:
:class:`~openquake.hazardlib.geo.mesh.Mesh` of points
:param mesh_spacing:
The desired distance between two adjacent points in source's
ruptures' mesh, in km. Mainly this parameter allows to balance
the trade-off between time needed to compute the distance
between the rupture surface and a site and the precision of that
computation.
:param hypo_loc:
Hypocentre location as fraction of rupture plane, as a tuple of
(Along Strike, Down Dip), e.g. a hypocentre located in the centroid
of the rupture would be input as (0.5, 0.5), whereas a
hypocentre located in a position 3/4 along the length, and 1/4 of
the way down dip of the rupture plane would be entered as
(0.75, 0.25).
:returns:
Hypocentre location as instance of
:class:`~openquake.hazardlib.geo.point.Point`
"""
mesh = self.mesh
centroid = mesh.get_middle_point()
if hypo_loc is None:
return centroid
total_len_y = (len(mesh.depths) - 1) * mesh_spacing
y_distance = hypo_loc[1] * total_len_y
y_node = int(numpy.round(y_distance / mesh_spacing))
total_len_x = (len(mesh.lons[y_node]) - 1) * mesh_spacing
x_distance = hypo_loc[0] * total_len_x
x_node = int(numpy.round(x_distance / mesh_spacing))
hypocentre = Point(mesh.lons[y_node][x_node],
mesh.lats[y_node][x_node],
mesh.depths[y_node][x_node])
return hypocentre | [
"def",
"get_hypo_location",
"(",
"self",
",",
"mesh_spacing",
",",
"hypo_loc",
"=",
"None",
")",
":",
"mesh",
"=",
"self",
".",
"mesh",
"centroid",
"=",
"mesh",
".",
"get_middle_point",
"(",
")",
"if",
"hypo_loc",
"is",
"None",
":",
"return",
"centroid",
"total_len_y",
"=",
"(",
"len",
"(",
"mesh",
".",
"depths",
")",
"-",
"1",
")",
"*",
"mesh_spacing",
"y_distance",
"=",
"hypo_loc",
"[",
"1",
"]",
"*",
"total_len_y",
"y_node",
"=",
"int",
"(",
"numpy",
".",
"round",
"(",
"y_distance",
"/",
"mesh_spacing",
")",
")",
"total_len_x",
"=",
"(",
"len",
"(",
"mesh",
".",
"lons",
"[",
"y_node",
"]",
")",
"-",
"1",
")",
"*",
"mesh_spacing",
"x_distance",
"=",
"hypo_loc",
"[",
"0",
"]",
"*",
"total_len_x",
"x_node",
"=",
"int",
"(",
"numpy",
".",
"round",
"(",
"x_distance",
"/",
"mesh_spacing",
")",
")",
"hypocentre",
"=",
"Point",
"(",
"mesh",
".",
"lons",
"[",
"y_node",
"]",
"[",
"x_node",
"]",
",",
"mesh",
".",
"lats",
"[",
"y_node",
"]",
"[",
"x_node",
"]",
",",
"mesh",
".",
"depths",
"[",
"y_node",
"]",
"[",
"x_node",
"]",
")",
"return",
"hypocentre"
] | The method determines the location of the hypocentre within the rupture
:param mesh:
:class:`~openquake.hazardlib.geo.mesh.Mesh` of points
:param mesh_spacing:
The desired distance between two adjacent points in source's
ruptures' mesh, in km. Mainly this parameter allows to balance
the trade-off between time needed to compute the distance
between the rupture surface and a site and the precision of that
computation.
:param hypo_loc:
Hypocentre location as fraction of rupture plane, as a tuple of
(Along Strike, Down Dip), e.g. a hypocentre located in the centroid
of the rupture would be input as (0.5, 0.5), whereas a
hypocentre located in a position 3/4 along the length, and 1/4 of
the way down dip of the rupture plane would be entered as
(0.75, 0.25).
:returns:
Hypocentre location as instance of
:class:`~openquake.hazardlib.geo.point.Point` | [
"The",
"method",
"determines",
"the",
"location",
"of",
"the",
"hypocentre",
"within",
"the",
"rupture"
] | 8294553a0b8aba33fd96437a35065d03547d0040 | https://github.com/gem/oq-engine/blob/8294553a0b8aba33fd96437a35065d03547d0040/openquake/hazardlib/geo/surface/base.py#L377-L414 |
296 | gem/oq-engine | openquake/engine/tools/viewlog.py | viewlog | def viewlog(calc_id, host='localhost', port=8000):
"""
Extract the log of the given calculation ID from the WebUI
"""
base_url = 'http://%s:%s/v1/calc/' % (host, port)
start = 0
psize = 10 # page size
try:
while True:
url = base_url + '%d/log/%d:%d' % (calc_id, start, start + psize)
rows = json.load(urlopen(url))
for row in rows:
print(' '.join(row))
start += len(rows)
time.sleep(1)
except:
pass | python | def viewlog(calc_id, host='localhost', port=8000):
"""
Extract the log of the given calculation ID from the WebUI
"""
base_url = 'http://%s:%s/v1/calc/' % (host, port)
start = 0
psize = 10 # page size
try:
while True:
url = base_url + '%d/log/%d:%d' % (calc_id, start, start + psize)
rows = json.load(urlopen(url))
for row in rows:
print(' '.join(row))
start += len(rows)
time.sleep(1)
except:
pass | [
"def",
"viewlog",
"(",
"calc_id",
",",
"host",
"=",
"'localhost'",
",",
"port",
"=",
"8000",
")",
":",
"base_url",
"=",
"'http://%s:%s/v1/calc/'",
"%",
"(",
"host",
",",
"port",
")",
"start",
"=",
"0",
"psize",
"=",
"10",
"# page size",
"try",
":",
"while",
"True",
":",
"url",
"=",
"base_url",
"+",
"'%d/log/%d:%d'",
"%",
"(",
"calc_id",
",",
"start",
",",
"start",
"+",
"psize",
")",
"rows",
"=",
"json",
".",
"load",
"(",
"urlopen",
"(",
"url",
")",
")",
"for",
"row",
"in",
"rows",
":",
"print",
"(",
"' '",
".",
"join",
"(",
"row",
")",
")",
"start",
"+=",
"len",
"(",
"rows",
")",
"time",
".",
"sleep",
"(",
"1",
")",
"except",
":",
"pass"
] | Extract the log of the given calculation ID from the WebUI | [
"Extract",
"the",
"log",
"of",
"the",
"given",
"calculation",
"ID",
"from",
"the",
"WebUI"
] | 8294553a0b8aba33fd96437a35065d03547d0040 | https://github.com/gem/oq-engine/blob/8294553a0b8aba33fd96437a35065d03547d0040/openquake/engine/tools/viewlog.py#L33-L49 |
297 | gem/oq-engine | openquake/baselib/parallel.py | pickle_sequence | def pickle_sequence(objects):
"""
Convert an iterable of objects into a list of pickled objects.
If the iterable contains copies, the pickling will be done only once.
If the iterable contains objects already pickled, they will not be
pickled again.
:param objects: a sequence of objects to pickle
"""
cache = {}
out = []
for obj in objects:
obj_id = id(obj)
if obj_id not in cache:
if isinstance(obj, Pickled): # already pickled
cache[obj_id] = obj
else: # pickle the object
cache[obj_id] = Pickled(obj)
out.append(cache[obj_id])
return out | python | def pickle_sequence(objects):
"""
Convert an iterable of objects into a list of pickled objects.
If the iterable contains copies, the pickling will be done only once.
If the iterable contains objects already pickled, they will not be
pickled again.
:param objects: a sequence of objects to pickle
"""
cache = {}
out = []
for obj in objects:
obj_id = id(obj)
if obj_id not in cache:
if isinstance(obj, Pickled): # already pickled
cache[obj_id] = obj
else: # pickle the object
cache[obj_id] = Pickled(obj)
out.append(cache[obj_id])
return out | [
"def",
"pickle_sequence",
"(",
"objects",
")",
":",
"cache",
"=",
"{",
"}",
"out",
"=",
"[",
"]",
"for",
"obj",
"in",
"objects",
":",
"obj_id",
"=",
"id",
"(",
"obj",
")",
"if",
"obj_id",
"not",
"in",
"cache",
":",
"if",
"isinstance",
"(",
"obj",
",",
"Pickled",
")",
":",
"# already pickled",
"cache",
"[",
"obj_id",
"]",
"=",
"obj",
"else",
":",
"# pickle the object",
"cache",
"[",
"obj_id",
"]",
"=",
"Pickled",
"(",
"obj",
")",
"out",
".",
"append",
"(",
"cache",
"[",
"obj_id",
"]",
")",
"return",
"out"
] | Convert an iterable of objects into a list of pickled objects.
If the iterable contains copies, the pickling will be done only once.
If the iterable contains objects already pickled, they will not be
pickled again.
:param objects: a sequence of objects to pickle | [
"Convert",
"an",
"iterable",
"of",
"objects",
"into",
"a",
"list",
"of",
"pickled",
"objects",
".",
"If",
"the",
"iterable",
"contains",
"copies",
"the",
"pickling",
"will",
"be",
"done",
"only",
"once",
".",
"If",
"the",
"iterable",
"contains",
"objects",
"already",
"pickled",
"they",
"will",
"not",
"be",
"pickled",
"again",
"."
] | 8294553a0b8aba33fd96437a35065d03547d0040 | https://github.com/gem/oq-engine/blob/8294553a0b8aba33fd96437a35065d03547d0040/openquake/baselib/parallel.py#L294-L313 |
298 | gem/oq-engine | openquake/baselib/parallel.py | check_mem_usage | def check_mem_usage(soft_percent=None, hard_percent=None):
"""
Display a warning if we are running out of memory
"""
soft_percent = soft_percent or config.memory.soft_mem_limit
hard_percent = hard_percent or config.memory.hard_mem_limit
used_mem_percent = psutil.virtual_memory().percent
if used_mem_percent > hard_percent:
raise MemoryError('Using more memory than allowed by configuration '
'(Used: %d%% / Allowed: %d%%)! Shutting down.' %
(used_mem_percent, hard_percent))
elif used_mem_percent > soft_percent:
msg = 'Using over %d%% of the memory in %s!'
return msg % (used_mem_percent, socket.gethostname()) | python | def check_mem_usage(soft_percent=None, hard_percent=None):
"""
Display a warning if we are running out of memory
"""
soft_percent = soft_percent or config.memory.soft_mem_limit
hard_percent = hard_percent or config.memory.hard_mem_limit
used_mem_percent = psutil.virtual_memory().percent
if used_mem_percent > hard_percent:
raise MemoryError('Using more memory than allowed by configuration '
'(Used: %d%% / Allowed: %d%%)! Shutting down.' %
(used_mem_percent, hard_percent))
elif used_mem_percent > soft_percent:
msg = 'Using over %d%% of the memory in %s!'
return msg % (used_mem_percent, socket.gethostname()) | [
"def",
"check_mem_usage",
"(",
"soft_percent",
"=",
"None",
",",
"hard_percent",
"=",
"None",
")",
":",
"soft_percent",
"=",
"soft_percent",
"or",
"config",
".",
"memory",
".",
"soft_mem_limit",
"hard_percent",
"=",
"hard_percent",
"or",
"config",
".",
"memory",
".",
"hard_mem_limit",
"used_mem_percent",
"=",
"psutil",
".",
"virtual_memory",
"(",
")",
".",
"percent",
"if",
"used_mem_percent",
">",
"hard_percent",
":",
"raise",
"MemoryError",
"(",
"'Using more memory than allowed by configuration '",
"'(Used: %d%% / Allowed: %d%%)! Shutting down.'",
"%",
"(",
"used_mem_percent",
",",
"hard_percent",
")",
")",
"elif",
"used_mem_percent",
">",
"soft_percent",
":",
"msg",
"=",
"'Using over %d%% of the memory in %s!'",
"return",
"msg",
"%",
"(",
"used_mem_percent",
",",
"socket",
".",
"gethostname",
"(",
")",
")"
] | Display a warning if we are running out of memory | [
"Display",
"a",
"warning",
"if",
"we",
"are",
"running",
"out",
"of",
"memory"
] | 8294553a0b8aba33fd96437a35065d03547d0040 | https://github.com/gem/oq-engine/blob/8294553a0b8aba33fd96437a35065d03547d0040/openquake/baselib/parallel.py#L370-L383 |
299 | gem/oq-engine | openquake/baselib/parallel.py | init_workers | def init_workers():
"""Waiting function, used to wake up the process pool"""
setproctitle('oq-worker')
# unregister raiseMasterKilled in oq-workers to avoid deadlock
# since processes are terminated via pool.terminate()
signal.signal(signal.SIGTERM, signal.SIG_DFL)
# prctl is still useful (on Linux) to terminate all spawned processes
# when master is killed via SIGKILL
try:
import prctl
except ImportError:
pass
else:
# if the parent dies, the children die
prctl.set_pdeathsig(signal.SIGKILL) | python | def init_workers():
"""Waiting function, used to wake up the process pool"""
setproctitle('oq-worker')
# unregister raiseMasterKilled in oq-workers to avoid deadlock
# since processes are terminated via pool.terminate()
signal.signal(signal.SIGTERM, signal.SIG_DFL)
# prctl is still useful (on Linux) to terminate all spawned processes
# when master is killed via SIGKILL
try:
import prctl
except ImportError:
pass
else:
# if the parent dies, the children die
prctl.set_pdeathsig(signal.SIGKILL) | [
"def",
"init_workers",
"(",
")",
":",
"setproctitle",
"(",
"'oq-worker'",
")",
"# unregister raiseMasterKilled in oq-workers to avoid deadlock",
"# since processes are terminated via pool.terminate()",
"signal",
".",
"signal",
"(",
"signal",
".",
"SIGTERM",
",",
"signal",
".",
"SIG_DFL",
")",
"# prctl is still useful (on Linux) to terminate all spawned processes",
"# when master is killed via SIGKILL",
"try",
":",
"import",
"prctl",
"except",
"ImportError",
":",
"pass",
"else",
":",
"# if the parent dies, the children die",
"prctl",
".",
"set_pdeathsig",
"(",
"signal",
".",
"SIGKILL",
")"
] | Waiting function, used to wake up the process pool | [
"Waiting",
"function",
"used",
"to",
"wake",
"up",
"the",
"process",
"pool"
] | 8294553a0b8aba33fd96437a35065d03547d0040 | https://github.com/gem/oq-engine/blob/8294553a0b8aba33fd96437a35065d03547d0040/openquake/baselib/parallel.py#L567-L581 |