code
string
signature
string
docstring
string
loss_without_docstring
float64
loss_with_docstring
float64
factor
float64
dists = set() for gsim in gsims: dists.update(gsim.REQUIRES_DISTANCES) return len(dists)
def get_num_distances(gsims)
:returns: the number of distances required for the given GSIMs
3.354091
2.918461
1.149267
distances = get_distances(rupture, sites, self.filter_distance) if self.maximum_distance: mask = distances <= self.maximum_distance( rupture.tectonic_region_type, rupture.mag) if mask.any(): sites, distances = sites.filter(mask), distances...
def filter(self, sites, rupture)
Filter the site collection with respect to the rupture. :param sites: Instance of :class:`openquake.hazardlib.site.SiteCollection`. :param rupture: Instance of :class:`openquake.hazardlib.source.rupture.BaseRupture` :returns: (filtered sites, dist...
5.474553
4.879865
1.121866
for param in self.REQUIRES_RUPTURE_PARAMETERS: if param == 'mag': value = rupture.mag elif param == 'strike': value = rupture.surface.get_strike() elif param == 'dip': value = rupture.surface.get_dip() elif ...
def add_rup_params(self, rupture)
Add .REQUIRES_RUPTURE_PARAMETERS to the rupture
1.790154
1.684157
1.062938
sites, dctx = self.filter(sites, rupture) for param in self.REQUIRES_DISTANCES - set([self.filter_distance]): distances = get_distances(rupture, sites, param) setattr(dctx, param, distances) reqv_obj = (self.reqv.get(rupture.tectonic_region_type) ...
def make_contexts(self, sites, rupture)
Filter the site collection with respect to the rupture and create context objects. :param sites: Instance of :class:`openquake.hazardlib.site.SiteCollection`. :param rupture: Instance of :class:`openquake.hazardlib.source.rupture.BaseRupture` :retur...
4.939309
4.968355
0.994154
sitecol = sites.complete N = len(sitecol) fewsites = N <= FEWSITES rupdata = [] # rupture data for rup, sites in self._gen_rup_sites(src, sites): try: with self.ctx_mon: sctx, dctx = self.make_contexts(sites, rup) ...
def gen_rup_contexts(self, src, sites)
:param src: a hazardlib source :param sites: the sites affected by it :yields: (rup, sctx, dctx)
3.518441
3.427898
1.026414
pmap = ProbabilityMap.build( len(imtls.array), len(self.gsims), s_sites.sids, initvalue=rup_indep) eff_ruptures = 0 for rup, sctx, dctx in self.gen_rup_contexts(src, s_sites): eff_ruptures += 1 with self.poe_mon: pnes = sel...
def poe_map(self, src, s_sites, imtls, trunclevel, rup_indep=True)
:param src: a source object :param s_sites: a filtered SiteCollection of sites around the source :param imtls: intensity measure and levels :param trunclevel: truncation level :param rup_indep: True if the ruptures are independent :returns: a ProbabilityMap instance
4.885836
4.949327
0.987172
acc = AccumDict(accum=[]) ctx_mon = monitor('disagg_contexts', measuremem=False) pne_mon = monitor('disaggregate_pne', measuremem=False) clo_mon = monitor('get_closest', measuremem=False) for rupture in ruptures: with ctx_mon: orig_dctx = Dist...
def disaggregate(self, sitecol, ruptures, iml4, truncnorm, epsilons, monitor=Monitor())
Disaggregate (separate) PoE in different contributions. :param sitecol: a SiteCollection with N sites :param ruptures: an iterator over ruptures with the same TRT :param iml4: a 4d array of IMLs of shape (N, R, M, P) :param truncnorm: an instance of scipy.stats.truncnorm :param ...
4.728471
4.179511
1.131345
if not minimum_distance: return self ctx = DistancesContext() for dist, array in vars(self).items(): small_distances = array < minimum_distance if small_distances.any(): array = array[:] # make a copy first array[small...
def roundup(self, minimum_distance)
If the minimum_distance is nonzero, returns a copy of the DistancesContext with updated distances, i.e. the ones below minimum_distance are rounded up to the minimum_distance. Otherwise, returns the original DistancesContext unchanged.
5.555187
3.81957
1.454401
if numpy.isnan(self.occurrence_rate): # nonparametric rupture # Uses the formula # # ∑ p(k|T) * p(X<x|rup)^k # # where `p(k|T)` is the probability that the rupture occurs k times # in the time span `T`, `p(X<x|rup)` is the prob...
def get_probability_no_exceedance(self, poes)
Compute and return the probability that in the time span for which the rupture is defined, the rupture itself never generates a ground motion value higher than a given level at a given site. Such calculation is performed starting from the conditional probability that an occurrence of th...
3.895436
3.902757
0.998124
return (src.__class__.__iter__ is not BaseSeismicSource.__iter__ and getattr(src, 'mutex_weight', 1) == 1 and src.splittable)
def splittable(src)
:returns: True if the source is splittable, False otherwise
13.669891
11.640126
1.174377
'''Checks if value is valid float, appends to array if valid, appends nan if not''' value = value.strip(' ') try: if value: attribute_array = np.hstack([attribute_array, float(value)]) else: attribute_array = np.hstack([attribute_ar...
def _float_check(self, attribute_array, value, irow, key)
Checks if value is valid float, appends to array if valid, appends nan if not
3.622687
2.739333
1.322471
'''Checks if value is valid integer, appends to array if valid, appends nan if not''' value = value.strip(' ') try: if value: attribute_array = np.hstack([attribute_array, int(value)]) else: attribute_array = np.hstack([attribute_ar...
def _int_check(self, attribute_array, value, irow, key)
Checks if value is valid integer, appends to array if valid, appends nan if not
3.684794
2.714265
1.357566
''' Writes the catalogue to file, purging events if necessary. :param catalogue: Earthquake catalogue as instance of :class: openquake.hmtk.seismicity.catalogue.Catalogue :param numpy.array flag_vector: Boolean vector specifying whether each event is ...
def write_file(self, catalogue, flag_vector=None, magnitude_table=None)
Writes the catalogue to file, purging events if necessary. :param catalogue: Earthquake catalogue as instance of :class: openquake.hmtk.seismicity.catalogue.Catalogue :param numpy.array flag_vector: Boolean vector specifying whether each event is valid (therefore ...
3.290839
2.03571
1.616556
''' Apply all the various purging conditions, if specified. :param catalogue: Earthquake catalogue as instance of :class:`openquake.hmtk.seismicity.catalogue.Catalogue` :param numpy.array flag_vector: Boolean vector specifying whether each event is valid (therefo...
def apply_purging(self, catalogue, flag_vector, magnitude_table)
Apply all the various purging conditions, if specified. :param catalogue: Earthquake catalogue as instance of :class:`openquake.hmtk.seismicity.catalogue.Catalogue` :param numpy.array flag_vector: Boolean vector specifying whether each event is valid (therefore writt...
3.768693
1.694016
2.224709
azims = geod.azimuth(reference.longitude, reference.latitude, projected.longitude, projected.latitude) depths = np.subtract(reference.depth, projected.depth) dists = geod.geodetic_distance(reference.longitude, reference.latitude, ...
def get_xyz_from_ll(projected, reference)
This method computes the x, y and z coordinates of a set of points provided a reference point :param projected: :class:`~openquake.hazardlib.geo.point.Point` object representing the coordinates of target point to be projected :param reference: :class:`~openquake.hazardlib.geo.point....
2.869705
2.81632
1.018956
''' Define the equation of target fault plane passing through 3 given points which includes two points on the fault trace and one point on the fault plane but away from the fault trace. Note: in order to remain the consistency of the fault normal vector direction definition, the order of the thr...
def get_plane_equation(p0, p1, p2, reference)
Define the equation of target fault plane passing through 3 given points which includes two points on the fault trace and one point on the fault plane but away from the fault trace. Note: in order to remain the consistency of the fault normal vector direction definition, the order of the three given poi...
3.653659
1.359489
2.687524
''' This method finds the projection of the site onto the plane containing the slipped area, defined as the Pp(i.e. 'perpendicular projection of site location onto the fault plane' Spudich et al. (2013) - page 88) given a site. :param site: Location of the site, [lon, lat, dep] :par...
def projection_pp(site, normal, dist_to_plane, reference)
This method finds the projection of the site onto the plane containing the slipped area, defined as the Pp(i.e. 'perpendicular projection of site location onto the fault plane' Spudich et al. (2013) - page 88) given a site. :param site: Location of the site, [lon, lat, dep] :param normal: ...
4.779685
1.463771
3.265324
cosang = np.dot(v1, v2) sinang = np.linalg.norm(np.cross(v1, v2)) return np.arctan2(sinang, cosang)
def vectors2angle(v1, v2)
Returns the angle in radians between vectors 'v1' and 'v2'. :param v1: vector, a numpy array :param v2: vector, a numpy array :returns: the angle in radians between the two vetors
1.801986
2.402128
0.750162
if e == 0.: c_prime = 0.8 elif e > 0.: c_prime = 1. / ((1. / 0.8) - ((r_hyp - rd) / e)) return c_prime
def isochone_ratio(e, rd, r_hyp)
Get the isochone ratio as described in Spudich et al. (2013) PEER report, page 88. :param e: a float defining the E-path length, which is the distance from Pd(direction) point to hypocentre. In km. :param rd: float, distance from the site to the direct point. :param r_hyp: ...
4.599393
4.168624
1.103336
pa = np.array([seg1_start, seg2_start]) pb = np.array([seg1_end, seg2_end]) si = pb - pa ni = si / np.power( np.dot(np.sum(si ** 2, axis=1).reshape(2, 1), np.ones((1, 3))), 0.5) nx = ni[:, 0].reshape(2, 1) ny = ni[:, 1].reshape(2, 1) nz = ni[:, 2].reshape(2, 1...
def _intersection(seg1_start, seg1_end, seg2_start, seg2_end)
Get the intersection point between two segments. The calculation is in Catestian coordinate system. :param seg1_start: A numpy array, representing one end point of a segment(e.g. segment1) segment. :param seg1_end: A numpy array, representing the other end point of t...
1.704862
1.625085
1.049091
# extracting dictionary of coefficients specific to required # intensity measure type. C = self.COEFFS[imt] C_SITE = self.SITE_COEFFS[imt] s_c, idx = self._get_site_classification(sites.vs30) sa_rock = (self.get_magnitude_scaling_term(C, rup) + ...
def get_mean_and_stddevs(self, sites, rup, dists, imt, stddev_types)
See :meth:`superclass method <.base.GroundShakingIntensityModel.get_mean_and_stddevs>` for spec of input and result values.
4.077331
4.082936
0.998627
if rup.mag <= self.CONSTANTS["m_c"]: return C["ccr"] * rup.mag else: return (C["ccr"] * self.CONSTANTS["m_c"]) +\ (C["dcr"] * (rup.mag - self.CONSTANTS["m_c"]))
def get_magnitude_scaling_term(self, C, rup)
Returns the magnitude scaling term in equations 1 and 2
3.816835
3.517609
1.085065
n_sites = sites.vs30.shape # Convert from reference rock to hard rock hard_rock_sa = sa_rock - C["lnSC1AM"] # Gets the elastic site amplification ratio ln_a_n_max = self._get_ln_a_n_max(C, n_sites, idx, rup) # Retrieves coefficients needed to determine smr ...
def add_site_amplification(self, C, C_SITE, sites, sa_rock, idx, rup)
Applies the site amplification scaling defined in equations from 10 to 15
3.642815
3.62858
1.003923
# Get SR sreff = np.zeros(n_sites) sreffc = np.zeros(n_sites) f_sr = np.zeros(n_sites) for i in range(1, 5): sreff[idx[i]] += (np.exp(sa_rock[idx[i]]) * self.IMF[i]) sreffc[idx[i]] += (C_SITE["Src1D{:g}".format(i)] * self.IMF[i]) # Get...
def _get_smr_coeffs(self, C, C_SITE, idx, n_sites, sa_rock)
Returns the SReff and SReffC terms needed for equation 14 and 15
3.788602
3.485745
1.086884
ln_a_n_max = C["lnSC1AM"] * np.ones(n_sites) for i in [2, 3, 4]: if np.any(idx[i]): ln_a_n_max[idx[i]] += C["S{:g}".format(i)] return ln_a_n_max
def _get_ln_a_n_max(self, C, n_sites, idx, rup)
Defines the rock site amplification defined in equations 10a and 10b
4.304117
3.933938
1.094099
ln_sf = np.zeros(n_sites) for i in range(1, 5): ln_sf_i = (C["lnSC1AM"] - C_SITE["LnAmax1D{:g}".format(i)]) if i > 1: ln_sf_i += C["S{:g}".format(i)] ln_sf[idx[i]] += ln_sf_i return ln_sf
def _get_ln_sf(self, C, C_SITE, idx, n_sites, rup)
Returns the log SF term required for equation 12
5.237435
4.999738
1.047542
site_class = np.ones(vs30.shape, dtype=int) idx = {} idx[1] = vs30 > 600. idx[2] = np.logical_and(vs30 > 300., vs30 <= 600.) idx[3] = np.logical_and(vs30 > 200., vs30 <= 300.) idx[4] = vs30 <= 200. for i in [2, 3, 4]: site_class[idx[i]] = i ...
def _get_site_classification(self, vs30)
Define the site class categories based on Vs30. Returns a vector of site class values and a dictionary containing logical vectors for each of the site classes
1.924298
1.904981
1.01014
if rup.rake <= -45.0 and rup.rake >= -135.0: # Normal faulting return C["FN_UM"] elif rup.rake > 45.0 and rup.rake < 135.0: # Reverse faulting return C["FRV_UM"] else: # No adjustment for strike-slip faulting return...
def get_sof_term(self, C, rup)
In the case of the upper mantle events separate coefficients are considered for normal, reverse and strike-slip
3.849852
3.508081
1.097424
x_ij = dists.rrup gn_exp = np.exp(C["c1"] + 6.5 * C["c2"]) g_n = C["gcrN"] * np.log(self.CONSTANTS["xcro"] + 30. + gn_exp) *\ np.ones_like(x_ij) idx = x_ij <= 30.0 if np.any(idx): g_n[idx] = C["gcrN"] * np.log(self.CONSTANTS["xcro"] + ...
def get_distance_term(self, C, dists, rup)
Returns the distance attenuation term
5.518758
5.448793
1.012841
if rup.ztor > 25.0: # Deep interface events c_int = C["cint"] else: c_int = C["cintS"] if rup.mag <= self.CONSTANTS["m_c"]: return c_int * rup.mag else: return (c_int * self.CONSTANTS["m_c"]) +\ (C["din...
def get_magnitude_scaling_term(self, C, rup)
Returns magnitude scaling term, which is dependent on top of rupture depth - as described in equations 1 and 2
4.704389
4.617753
1.018761
m_c = self.CONSTANTS["m_c"] if rup.mag <= m_c: return C["cSL"] * rup.mag +\ C["cSL2"] * ((rup.mag - self.CONSTANTS["m_sc"]) ** 2.) else: return C["cSL"] * m_c +\ C["cSL2"] * ((m_c - self.CONSTANTS["m_sc"]) ** 2.) +\ C...
def get_magnitude_scaling_term(self, C, rup)
Returns the magnitude scaling defined in equation 1
3.364179
3.234898
1.039964
if rup.ztor > 100.0: return C["bSLH"] * 100.0 else: return C["bSLH"] * rup.ztor
def get_depth_term(self, C, rup)
Returns depth term (dependent on top of rupture depth) as given in equations 1 Note that there is a ztor cap of 100 km that is introduced in the Fortran code but not mentioned in the original paper!
6.060206
4.396674
1.378362
x_ij = dists.rrup # Get anelastic scaling term in quation 5 if rup.ztor >= 50.: qslh = C["eSLH"] * (0.02 * rup.ztor - 1.0) else: qslh = 0.0 # r_volc = np.copy(dists.rvolc) # r_volc[np.logical_and(r_volc > 0.0, r_volc <= 12.0)] = 12.0 ...
def get_distance_term(self, C, dists, rup)
Returns the distance scaling term in equation 2a Note that the paper describes a lower and upper cap on Rvolc that is not found in the Fortran code, and is thus neglected here.
5.430401
5.273985
1.029658
COEFFS = self.COEFFS[imt] R = self._compute_term_r(COEFFS, rup.mag, dists.rrup) mean = 10 ** (self._compute_mean(COEFFS, rup.mag, R)) # Convert units to g, # but only for PGA and SA (not PGV): if imt.name in "SA PGA": mean = np.log(mean / (g*100.))...
def get_mean_and_stddevs(self, sites, rup, dists, imt, stddev_types)
See :meth:`superclass method <.base.GroundShakingIntensityModel.get_mean_and_stddevs>` for spec of input and result values.
4.309418
4.318864
0.997813
phi_ss = _compute_phi_ss(C, mag, c1_rrup, log_phi_ss, mean_phi_ss) stddevs = [] for stddev_type in stddev_types: assert stddev_type in self.DEFINED_FOR_STANDARD_DEVIATION_TYPES if stddev_type == const.StdDev.TOTAL: stddevs.append(np.sqrt( ...
def _get_stddevs(self, C, stddev_types, num_sites, mag, c1_rrup, log_phi_ss, mean_phi_ss)
Return standard deviations
1.951847
1.967799
0.991893
if mag > self.M1: rrup_min = 0.55 elif mag > self.M2: rrup_min = -2.80 * mag + 14.55 else: rrup_min = -0.295 * mag + 2.65 R = np.maximum(rrup, rrup_min) return np.log10(R)
def _compute_term_r(self, C, mag, rrup)
Compute distance term d = log10(max(R,rmin));
4.147234
4.015028
1.032928
return ( C['a1'] + C['a2'] * mag + C['a3'] * np.power(mag, 2) + C['a4'] * np.power(mag, 3) + C['a5'] * np.power(mag, 4) + C['a6'] * np.power(mag, 5) + C['a7'] * np.power(mag, 6) )
def _compute_term_1(self, C, mag)
Compute term 1 a1 + a2.*M + a3.*M.^2 + a4.*M.^3 + a5.*M.^4 + a6.*M.^5 + a7.*M.^6
1.765017
1.532203
1.151947
return ( (C['a8'] + C['a9'] * mag + C['a10'] * np.power(mag, 2) + C['a11'] * np.power(mag, 3)) * R )
def _compute_term_2(self, C, mag, R)
(a8 + a9.*M + a10.*M.*M + a11.*M.*M.*M).*d(r)
2.931237
2.299416
1.274774
return ( (C['a12'] + C['a13'] * mag + C['a14'] * np.power(mag, 2) + C['a15'] * np.power(mag, 3)) * np.power(R, 2) )
def _compute_term_3(self, C, mag, R)
(a12 + a13.*M + a14.*M.*M + a15.*M.*M.*M).*(d(r).^2)
2.696907
2.068153
1.304017
return ( (C['a16'] + C['a17'] * mag + C['a18'] * np.power(mag, 2) + C['a19'] * np.power(mag, 3)) * np.power(R, 3) )
def _compute_term_4(self, C, mag, R)
(a16 + a17.*M + a18.*M.*M + a19.*M.*M.*M).*(d(r).^3)
2.771748
2.058332
1.346599
return ( (C['a20'] + C['a21'] * mag + C['a22'] * np.power(mag, 2) + C['a23'] * np.power(mag, 3)) * np.power(R, 4) )
def _compute_term_5(self, C, mag, R)
(a20 + a21.*M + a22.*M.*M + a23.*M.*M.*M).*(d(r).^4)
2.603524
2.063747
1.261552
return (self._compute_term_1(C, mag) + self._compute_term_2(C, mag, term_dist_r) + self._compute_term_3(C, mag, term_dist_r) + self._compute_term_4(C, mag, term_dist_r) + self._compute_term_5(C, mag, term_dist_r))
def _compute_mean(self, C, mag, term_dist_r)
compute mean
1.588893
1.570391
1.011781
if isinstance(calc_id, str) or calc_id < 0 and not username: # get the last calculation in the datastore of the current user return datastore.read(calc_id) job = logs.dbcmd('get_job', calc_id, username) if job: return datastore.read(job.ds_calc_dir + '.hdf5') else: #...
def read(calc_id, username=None)
:param calc_id: a calculation ID :param username: if given, restrict the search to the user's calculations :returns: the associated DataStore instance
7.569753
7.472431
1.013024
assert len(curve_ref) == len(curve), (len(curve_ref), len(curve)) assert len(curve), 'The curves are empty!' max_diff = 0 for c1, c2 in zip(curve_ref, curve): if c1 >= min_value: max_diff = max(max_diff, abs(c1 - c2) / c1) return max_diff
def max_rel_diff(curve_ref, curve, min_value=0.01)
Compute the maximum relative difference between two curves. Only values greather or equal than the min_value are considered. >>> curve_ref = [0.01, 0.02, 0.03, 0.05, 1.0] >>> curve = [0.011, 0.021, 0.031, 0.051, 1.0] >>> round(max_rel_diff(curve_ref, curve), 2) 0.1
2.419513
2.786503
0.868297
assert len(curve_ref) == len(curve), (len(curve_ref), len(curve)) assert len(curve), 'The curves are empty!' diffs = [max_rel_diff(c1, c2, min_value) for c1, c2 in zip(curve_ref, curve)] maxdiff = max(diffs) maxindex = diffs.index(maxdiff) return maxdiff, maxindex
def max_rel_diff_index(curve_ref, curve, min_value=0.01)
Compute the maximum relative difference between two sets of curves. Only values greather or equal than the min_value are considered. Return both the maximum difference and its location (array index). >>> curve_refs = [[0.01, 0.02, 0.03, 0.05], [0.01, 0.02, 0.04, 0.06]] >>> curves = [[0.011, 0.021, 0.03...
2.493991
2.976554
0.837879
bigvalues = array_ref > min_value reldiffsquare = (1. - array[bigvalues] / array_ref[bigvalues]) ** 2 return numpy.sqrt(reldiffsquare.mean())
def rmsep(array_ref, array, min_value=0)
Root Mean Square Error Percentage for two arrays. :param array_ref: reference array :param array: another array :param min_value: compare only the elements larger than min_value :returns: the relative distance between the arrays >>> curve_ref = numpy.array([[0.01, 0.02, 0.03, 0.05], ... [0.01,...
5.5839
6.190811
0.901966
arr = numpy.copy(array) arr[arr < cutoff] = cutoff return numpy.log(arr)
def log(array, cutoff)
Compute the logarithm of an array with a cutoff on the small values
3.686871
3.028515
1.217386
dist = numpy.zeros(len(arrays)) logref = log(ref, cutoff) for rlz, array in enumerate(arrays): diff = log(array, cutoff) - logref dist[rlz] = numpy.sqrt((diff * diff).sum()) rlz = dist.argmin() closest = dict(rlz=rlz, value=arrays[rlz], dist=dist[rlz]) return closest
def closest_to_ref(arrays, ref, cutoff=1E-12)
:param arrays: a sequence of R arrays :param ref: the reference array :returns: a dictionary with keys rlz, value, and dist
3.509706
2.902738
1.209102
assert len(a1) == len(a2), (len(a1), len(a2)) if a1.dtype.names is None and len(a1.shape) == 1: # the first array is not composite, but it is one-dimensional a1 = numpy.array(a1, numpy.dtype([(firstfield, a1.dtype)])) fields1 = [(f, a1.dtype.fields[f][0]) for f in a1.dtype.names] ...
def compose_arrays(a1, a2, firstfield='etag')
Compose composite arrays by generating an extended datatype containing all the fields. The two arrays must have the same length.
1.943182
1.899675
1.022902
assetcol = dstore['assetcol'] tagnames = sorted(assetcol.tagnames) tag = {t: getattr(assetcol.tagcol, t) for t in tagnames} dtlist = [('asset_ref', (bytes, 100))] for tagname in tagnames: dtlist.append((tagname, (bytes, 100))) dtlist.extend([('lon', F32), ('lat', F32)]) asset_da...
def get_assets(dstore)
:param dstore: a datastore with keys 'assetcol' :returns: an array of records (asset_ref, tag1, ..., tagN, lon, lat)
3.884249
3.300399
1.176903
assert all(stddev_type in self.DEFINED_FOR_STANDARD_DEVIATION_TYPES for stddev_type in stddev_types) # Compute SA with primed coeffs and PGA with both unprimed and # primed coeffs C = self.COEFFS_PRIMED[imt] C_PGA = self.COEFFS_PRIMED[PGA()] C...
def get_mean_and_stddevs(self, sites, rup, dists, imt, stddev_types)
See :meth:`superclass method <.base.GroundShakingIntensityModel.get_mean_and_stddevs>` for spec of input and result values.
4.43037
4.449192
0.99577
lats = np.ravel(rup.surface.mesh.array[1]) lons = np.ravel(rup.surface.mesh.array[0]) # These coordinates are provided by M Gerstenberger (personal # communication, 10 August 2018) polygon = shapely.geometry.Polygon([(171.6, -43.3), (173.2, -43.3), ...
def _check_in_cshm_polygon(self, rup)
Checks if any part of the rupture surface mesh is located within the intended boundaries of the Canterbury Seismic Hazard Model in Gerstenberger et al. (2014), Seismic hazard modelling for the recovery of Christchurch, Earthquake Spectra, 30(1), 17-29.
3.29136
2.990503
1.100604
conn = WrappedConnection(conn, debug=debug) try: upgrade(conn) except Exception: conn.rollback() raise else: if dry_run: conn.rollback() else: conn.commit()
def check_script(upgrade, conn, dry_run=True, debug=True)
An utility to debug upgrade scripts written in Python :param upgrade: upgrade procedure :param conn: a DB API 2 connection :param dry_run: if True, do not change the database :param debug: if True, print the queries which are executed
2.824935
3.82553
0.738443
sql = open(fname).read() try: # we cannot use conn.executescript which is non transactional for query in sql.split('\n\n'): conn.execute(query) except Exception: logging.error('Error executing %s' % fname) raise
def apply_sql_script(conn, fname)
Apply the given SQL script to the database :param conn: a DB API 2 connection :param fname: full path to the creation script
4.248095
5.032029
0.844211
upgrader = UpgradeManager.instance(conn, pkg_name) t0 = time.time() # run the upgrade scripts try: versions_applied = upgrader.upgrade(conn, skip_versions) except: conn.rollback() raise else: conn.commit() dt = time.time() - t0 logging.info('Upgrade c...
def upgrade_db(conn, pkg_name='openquake.server.db.schema.upgrades', skip_versions=())
Upgrade a database by running several scripts in a single transaction. :param conn: a DB API 2 connection :param str pkg_name: the name of the package with the upgrade scripts :param list skip_versions: the versions to skip :returns: the version numbers of the new scripts applied the database
3.414392
3.486271
0.979382
upgrader = UpgradeManager.instance(conn, pkg_name) return max(upgrader.get_db_versions(conn))
def db_version(conn, pkg_name='openquake.server.db.schema.upgrades')
:param conn: a DB API 2 connection :param str pkg_name: the name of the package with the upgrade scripts :returns: the current version of the database
8.134212
9.657683
0.842253
msg_safe_ = '\nThe following scripts can be applied safely:\n%s' msg_slow_ = '\nPlease note that the following scripts could be slow:\n%s' msg_danger_ = ('\nPlease note that the following scripts are potentially ' 'dangerous and could destroy your data:\n%s') upgrader = UpgradeMa...
def what_if_I_upgrade(conn, pkg_name='openquake.server.db.schema.upgrades', extract_scripts='extract_upgrade_scripts')
:param conn: a DB API 2 connection :param str pkg_name: the name of the package with the upgrade scripts :param extract_scripts: name of the method to extract the scripts
4.044836
4.093392
0.988138
curs = self._conn.cursor() query = curs.mogrify(templ, args) if self.debug: print(query) curs.execute(query) return curs
def run(self, templ, *args)
A simple utility to run SQL queries. :param templ: a query or query template :param args: the arguments (or the empty tuple) :returns: the DB API 2 cursor used to run the query
3.361985
3.243192
1.036628
logging.info('Creating the versioning table %s', self.version_table) conn.executescript(CREATE_VERSIONING % self.version_table) self._insert_script(self.read_scripts()[0], conn)
def install_versioning(self, conn)
Create the version table into an already populated database and insert the base script. :param conn: a DB API 2 connection
6.425457
5.817914
1.104426
base = self.read_scripts()[0]['fname'] logging.info('Creating the initial schema from %s', base) apply_sql_script(conn, os.path.join(self.upgrade_dir, base)) self.install_versioning(conn)
def init(self, conn)
Create the version table and run the base script on an empty database. :param conn: a DB API 2 connection
10.22494
9.418513
1.085621
''' Upgrade the database from the current version to the maximum version in the upgrade scripts. :param conn: a DBAPI 2 connection :param skip_versions: the versions to skip ''' db_versions = self.get_db_versions(conn) self.starting_version = max(db_versi...
def upgrade(self, conn, skip_versions=())
Upgrade the database from the current version to the maximum version in the upgrade scripts. :param conn: a DBAPI 2 connection :param skip_versions: the versions to skip
4.333464
3.234548
1.339743
scripts = self.read_scripts(skip_versions=self.get_db_versions(conn)) versions = [s['version'] for s in scripts] if versions: return ('Your database is not updated. You can update it by ' 'running oq engine --upgrade-db which will process the ' ...
def check_versions(self, conn)
:param conn: a DB API 2 connection :returns: a message with the versions that will be applied or None
9.939926
8.925241
1.113687
curs = conn.cursor() query = 'select version from {}'.format(self.version_table) try: curs.execute(query) return set(version for version, in curs.fetchall()) except: raise VersioningNotInstalled('Run oq engine --upgrade-db')
def get_db_versions(self, conn)
Get all the versions stored in the database as a set. :param conn: a DB API 2 connection
7.592251
7.577163
1.001991
''' Parse a script name and return a dictionary with fields fname, name, version and ext (or None if the name does not match). :param name: name of the script ''' match = re.match(self.pattern, script_name) if not match: return version, flag, ...
def parse_script_name(self, script_name)
Parse a script name and return a dictionary with fields fname, name, version and ext (or None if the name does not match). :param name: name of the script
4.799796
2.776664
1.728619
scripts = [] versions = {} # a script is unique per version for scriptname in sorted(os.listdir(self.upgrade_dir)): match = self.parse_script_name(scriptname) if match: version = match['version'] if version in skip_versions: ...
def read_scripts(self, minversion=None, maxversion=None, skip_versions=())
Extract the upgrade scripts from a directory as a list of dictionaries, ordered by version. :param minversion: the minimum version to consider :param maxversion: the maximum version to consider :param skipversions: the versions to skip
3.71169
3.743056
0.99162
link_pattern = '>\s*{0}\s*<'.format(self.pattern[1:-1]) page = urllib.request.urlopen(self.upgrades_url).read() for mo in re.finditer(link_pattern, page): scriptname = mo.group(0)[1:-1].strip() yield self.parse_script_name(scriptname)
def extract_upgrade_scripts(self)
Extract the OpenQuake upgrade scripts from the links in the GitHub page
4.796794
4.313412
1.112065
try: # upgrader is an UpgradeManager instance defined in the __init__.py upgrader = importlib.import_module(pkg_name).upgrader except ImportError: raise SystemExit( 'Could not import %s (not in the PYTHONPATH?)' % pkg_name) if not upgr...
def instance(cls, conn, pkg_name='openquake.server.db.schema.upgrades')
Return an :class:`UpgradeManager` instance. :param conn: a DB API 2 connection :param str pkg_name: the name of the package with the upgrade scripts
4.456738
4.395094
1.014026
if len(self.points) == 2: return self.points[0].azimuth(self.points[1]) lons = numpy.array([point.longitude for point in self.points]) lats = numpy.array([point.latitude for point in self.points]) azimuths = geodetic.azimuth(lons[:-1], lats[:-1], lons[1:], lats[1:]) ...
def average_azimuth(self)
Calculate and return weighted average azimuth of all line's segments in decimal degrees. Uses formula from http://en.wikipedia.org/wiki/Mean_of_circular_quantities >>> from openquake.hazardlib.geo.point import Point as P >>> '%.1f' % Line([P(0, 0), P(1e-5, 1e-5)]).average_azimu...
2.351805
2.395485
0.981766
if len(self.points) < 2: return Line(self.points) resampled_points = [] # 1. Resample the first section. 2. Loop over the remaining points # in the line and resample the remaining sections. # 3. Extend the list with the resampled points, except the first o...
def resample(self, section_length)
Resample this line into sections. The first point in the resampled line corresponds to the first point in the original line. Starting from the first point in the original line, a line segment is defined as the line connecting the last point in the resampled line and the next po...
3.33936
3.075461
1.085808
length = 0 for i, point in enumerate(self.points): if i != 0: length += point.distance(self.points[i - 1]) return length
def get_length(self)
Calculate and return the length of the line as a sum of lengths of all its segments. :returns: Total length in km.
2.958276
3.009645
0.982932
assert len(self.points) > 1, "can not resample the line of one point" section_length = self.get_length() / (num_points - 1) resampled_points = [self.points[0]] segment = 0 acc_length = 0 last_segment_length = 0 for i in range(num_points - 1): ...
def resample_to_num_points(self, num_points)
Resample the line to a specified number of points. :param num_points: Integer number of points the resulting line should have. :returns: A new line with that many points as requested.
3.568122
3.588871
0.994218
assert (percentile >= 0.0) and (percentile <= 1.0) c_val = _scaling(tau, var_tau) k_val = _dof(tau, var_tau) return np.sqrt(c_val * chi2.ppf(percentile, df=k_val))
def _at_percentile(tau, var_tau, percentile)
Returns the value of the inverse chi-2 distribution at the given percentile from the mean and variance of the uncertainty model, as reported in equations 5.1 - 5.3 of Al Atik (2015)
3.773633
3.765177
1.002246
if imt.name == "PGV": C = params["PGV"] else: C = params["SA"] if mag > 6.5: return C["tau4"] elif (mag > 5.5) and (mag <= 6.5): return ITPL(mag, C["tau4"], C["tau3"], 5.5, 1.0) elif (mag > 5.0) and (mag <= 5.5): return ITPL(mag, C["tau3"], C["tau2"], 5.0...
def global_tau(imt, mag, params)
'Global' model of inter-event variability, as presented in equation 5.6 (p103)
2.185115
2.217818
0.985255
if imt.name == "PGV": C = params["PGV"] else: C = params["SA"] if mag > 6.5: return C["tau3"] elif (mag > 5.5) and (mag <= 6.5): return ITPL(mag, C["tau3"], C["tau2"], 5.5, 1.0) elif (mag > 5.0) and (mag <= 5.5): return ITPL(mag, C["tau2"], C["tau1"], 5.0...
def cena_tau(imt, mag, params)
Returns the inter-event standard deviation, tau, for the CENA case
2.666757
2.659894
1.00258
tau_model = {} for imt in mean: tau_model[imt] = {} for key in mean[imt]: if quantile is None: tau_model[imt][key] = mean[imt][key] else: tau_model[imt][key] = _at_percentile(mean[imt][key], ...
def get_tau_at_quantile(mean, stddev, quantile)
Returns the value of tau at a given quantile in the form of a dictionary organised by intensity measure
2.442457
2.329742
1.048381
# Setup SA coeffs - the backward compatible Python 2.7 way coeffs = deepcopy(phi_model.sa_coeffs) coeffs.update(phi_model.non_sa_coeffs) for imt in coeffs: if quantile is None: coeffs[imt] = {"a": phi_model[imt]["mean_a"], "b": phi_model[imt]["mean_b"]...
def get_phi_ss_at_quantile(phi_model, quantile)
Returns the phi_ss values at the specified quantile as an instance of `class`:: openquake.hazardlib.gsim.base.CoeffsTable - applies to the magnitude-dependent cases
3.552452
3.376824
1.05201
C = params[imt] if mag <= 5.0: phi = C["a"] elif mag > 6.5: phi = C["b"] else: phi = C["a"] + (mag - 5.0) * ((C["b"] - C["a"]) / 1.5) return phi
def get_phi_ss(imt, mag, params)
Returns the single station phi (or it's variance) for a given magnitude and intensity measure type according to equation 5.14 of Al Atik (2015)
2.964587
3.0313
0.977992
tau = self._get_tau(imt, mag) phi = self._get_phi(imt, mag) sigma = np.sqrt(tau ** 2. + phi ** 2.) stddevs = [] for stddev_type in stddev_types: assert stddev_type in self.DEFINED_FOR_STANDARD_DEVIATION_TYPES if stddev_type == const.StdDev.TOTAL: ...
def get_stddevs(self, mag, imt, stddev_types, num_sites)
Returns the standard deviations for either the ergodic or non-ergodic models
1.723263
1.717205
1.003528
return TAU_EXECUTION[self.tau_model](imt, mag, self.TAU)
def _get_tau(self, imt, mag)
Returns the inter-event standard deviation (tau)
12.082797
12.433733
0.971775
phi = get_phi_ss(imt, mag, self.PHI_SS) if self.ergodic: C = self.PHI_S2SS[imt] phi = np.sqrt(phi ** 2. + C["phi_s2ss"] ** 2.) return phi
def _get_phi(self, imt, mag)
Returns the within-event standard deviation (phi)
6.36195
6.355749
1.000976
stddevs = [] for stddev_type in stddev_types: assert stddev_type in self.DEFINED_FOR_STANDARD_DEVIATION_TYPES if stddev_type == const.StdDev.TOTAL: sigma = self._get_total_sigma(imt, mag) stddevs.append(sigma + np.zeros(num_sites)) ...
def get_stddevs(self, mag, imt, stddev_types, num_sites)
Returns the total standard deviation
2.301807
2.332937
0.986656
# Mean mean is found in self.TAU. Get the variance in tau tau_std = TAU_SETUP[self.tau_model]["STD"] # Mean phiss is found in self.PHI_SS. Get the variance in phi phi_std = deepcopy(self.PHI_SS.sa_coeffs) phi_std.update(self.PHI_SS.non_sa_coeffs) for key in phi_s...
def _get_sigma_at_quantile(self, sigma_quantile)
Calculates the total standard deviation at the specified quantile
3.208453
3.220248
0.996338
self.magnitude_limits = MAG_LIMS_KEYS[self.tau_model]["mag"] self.tau_keys = MAG_LIMS_KEYS[self.tau_model]["keys"] t_bar = {} t_std = {} for imt in imt_list: t_bar[imt] = [] t_std[imt] = [] for mag, key in zip(self.magnitude_limits, se...
def _get_tau_vector(self, tau_mean, tau_std, imt_list)
Gets the vector of mean and variance of tau values corresponding to the specific model and returns them as dictionaries
2.75873
2.709749
1.018076
p_bar = {} p_std = {} for imt in imt_list: p_bar[imt] = [] p_std[imt] = [] for mag in self.magnitude_limits: phi_ss_mean = get_phi_ss(imt, mag, phi_mean) phi_ss_std = get_phi_ss(imt, mag, phi_std) if se...
def _get_phi_vector(self, phi_mean, phi_std, imt_list)
Gets the vector of mean and variance of phi values corresponding to the specific model and returns them as dictionaries
2.705737
2.662756
1.016142
C = self.SIGMA[imt] if mag <= self.magnitude_limits[0]: # The CENA constant model is always returned here return C[self.tau_keys[0]] elif mag > self.magnitude_limits[-1]: return C[self.tau_keys[-1]] else: # Needs interpolation ...
def _get_total_sigma(self, imt, mag)
Returns the estimated total standard deviation for a given intensity measure type and magnitude
3.446367
3.456414
0.997093
assert all(stddev_type in self.DEFINED_FOR_STANDARD_DEVIATION_TYPES for stddev_type in stddev_types) F, HW = self._get_fault_type_hanging_wall(rup.rake) S = self._get_site_class(sites.vs30) # compute pga on rock (used then to compute site amplification facto...
def get_mean_and_stddevs(self, sites, rup, dists, imt, stddev_types)
See :meth:`superclass method <.base.GroundShakingIntensityModel.get_mean_and_stddevs>` for spec of input and result values.
3.302916
3.300642
1.000689
f1 = self._compute_f1(C, mag, rrup) f3 = self._compute_f3(C, mag) f4 = self._compute_f4(C, mag, rrup) return f1 + F * f3 + HW * f4
def _compute_mean_on_rock(self, C, mag, rrup, F, HW)
Compute mean value on rock (that is eq.1, page 105 with S = 0)
2.475111
2.33572
1.059678
F, HW = 0, 0 if 45 <= rake <= 135: F, HW = 1, 1 return F, HW
def _get_fault_type_hanging_wall(self, rake)
Return fault type (F) and hanging wall (HW) flags depending on rake angle. The method assumes 'reverse' (F = 1) if 45 <= rake <= 135, 'other' (F = 0) if otherwise. Hanging-wall flag is set to 1 if 'reverse', and 0 if 'other'.
5.404798
3.352057
1.612383
r = np.sqrt(rrup ** 2 + C['c4'] ** 2) f1 = ( C['a1'] + C['a12'] * (8.5 - mag) ** C['n'] + (C['a3'] + C['a13'] * (mag - C['c1'])) * np.log(r) ) if mag <= C['c1']: f1 += C['a2'] * (mag - C['c1']) else: f1 += C['...
def _compute_f1(self, C, mag, rrup)
Compute f1 term (eq.4, page 105)
2.81521
2.624242
1.072771
if mag <= 5.8: return C['a5'] elif 5.8 < mag < C['c1']: return ( C['a5'] + (C['a6'] - C['a5']) * (mag - 5.8) / (C['c1'] - 5.8) ) else: return C['a6']
def _compute_f3(self, C, mag)
Compute f3 term (eq.6, page 106) NOTE: In the original manuscript, for the case 5.8 < mag < c1, the term in the numerator '(mag - 5.8)' is missing, while is present in the software used for creating the verification tables
2.698677
2.200328
1.226488
fhw_m = 0 fhw_r = np.zeros_like(rrup) if mag <= 5.5: fhw_m = 0 elif 5.5 < mag < 6.5: fhw_m = mag - 5.5 else: fhw_m = 1 idx = (rrup > 4) & (rrup <= 8) fhw_r[idx] = C['a9'] * (rrup[idx] - 4.) / 4. idx = (rrup >...
def _compute_f4(self, C, mag, rrup)
Compute f4 term (eq. 7, 8, and 9, page 106)
2.290489
2.249535
1.018205
return C['a10'] + C['a11'] * np.log(pga_rock + C['c5'])
def _compute_f5(self, C, pga_rock)
Compute f5 term (non-linear soil response)
6.470146
5.684942
1.13812
rlzs_assoc = dstore['csm_info'].get_rlzs_assoc() getter = getters.PmapGetter(dstore, rlzs_assoc) pmaps = getter.get_pmaps() return dict(zip(getter.rlzs, pmaps)), dstore['hcurves/mean']
def get_hcurves_and_means(dstore)
Extract hcurves from the datastore and compute their means. :returns: curves_by_rlz, mean_curves
7.823336
6.925987
1.129563
datadir = datastore.get_datadir() if what == 'all': # show all if not os.path.exists(datadir): return rows = [] for calc_id in datastore.get_calc_ids(datadir): try: ds = util.read(calc_id) oq = ds['oqparam'] cm...
def show(what='contents', calc_id=-1, extra=())
Show the content of a datastore (by default the last one).
5.070341
5.002836
1.013493
assert all(stddev_type in self.DEFINED_FOR_STANDARD_DEVIATION_TYPES for stddev_type in stddev_types) mean = np.zeros_like(sites.vs30) stddevs = [np.zeros_like(sites.vs30) for _ in stddev_types] idx_rock = sites.vs30 >= self.ROCK_VS30 idx_soil = sites...
def get_mean_and_stddevs(self, sites, rup, dists, imt, stddev_types)
See :meth:`superclass method <.base.GroundShakingIntensityModel.get_mean_and_stddevs>` for spec of input and result values.
1.797541
1.80697
0.994782
mean[idx] = (C['C1'] + C['C2'] * mag + C['C3'] * np.log(rhypo[idx] + C['C4'] * np.exp(C['C5'] * mag)) + C['C6'] * hypo_depth)
def _compute_mean(self, C, mag, rhypo, hypo_depth, mean, idx)
Compute mean value according to equations 10 and 11 page 226.
2.737281
2.565454
1.066977
for stddev in stddevs: stddev[idx] += C['sigma']
def _compute_std(self, C, stddevs, idx)
Compute total standard deviation, see tables 3 and 4, pages 227 and 228.
8.750524
9.469751
0.92405
mean, stddevs = super().get_mean_and_stddevs( sites, rup, dists, imt, stddev_types) idx_rock = sites.vs30 >= self.ROCK_VS30 idx_soil = sites.vs30 < self.ROCK_VS30 mean[idx_rock] += 0.275 mean[idx_soil] += 0.31 return mean, stddevs
def get_mean_and_stddevs(self, sites, rup, dists, imt, stddev_types)
See :meth:`superclass method <.base.GroundShakingIntensityModel.get_mean_and_stddevs>` for spec of input and result values.
2.437112
2.671508
0.912261
'''Check config file inputs and overwrite bad values with the defaults''' essential_keys = ['number_earthquakes'] for key in essential_keys: if key not in config: raise ValueError('For Kijko Nonparametric Gaussian the key %s ' 'needs to be set in the configu...
def check_config(config)
Check config file inputs and overwrite bad values with the defaults
4.920372
4.076246
1.207084
''' Function to return a set of exponentially spaced values between mmin and mmax :param float mmin: Minimum value :param float mmax: Maximum value :param float number_samples: Number of exponentially spaced samples :return np.ndarray: Set of 'number_samples'...
def _get_exponential_spaced_values(mmin, mmax, number_samples)
Function to return a set of exponentially spaced values between mmin and mmax :param float mmin: Minimum value :param float mmax: Maximum value :param float number_samples: Number of exponentially spaced samples :return np.ndarray: Set of 'number_samples' exponential...
3.216212
2.274605
1.413965