code
string
signature
string
docstring
string
loss_without_docstring
float64
loss_with_docstring
float64
factor
float64
for obj in args: if obj is not None and hasattr(obj, 'cleanup'): try: obj.cleanup() except NotImplementedError: pass except Exception: logger.exception("Unable to cleanup %s object", obj)
def cleanup(logger, *args)
Environment's cleanup routine.
3.292773
3.094443
1.064092
self.logger.debug("Allocating environment.") self._allocate() self.logger.debug("Environment successfully allocated.")
def allocate(self)
Builds the context and the Hooks.
6.433357
5.417366
1.187543
self.logger.debug("Deallocating environment.") self._deallocate() self.logger.debug("Environment successfully deallocated.")
def deallocate(self)
Cleans up the context and the Hooks.
4.968068
4.524926
1.097934
domain = etree.fromstring(xml) subelement(domain, './/name', 'name', identifier) subelement(domain, './/uuid', 'uuid', identifier) devices = subelement(domain, './/devices', 'devices', None) for mount in mounts: filesystem = etree.SubElement(devices, 'filesystem', type='mount') ...
def domain_xml(identifier, xml, mounts, network_name=None)
Fills the XML file with the required fields. @param identifier: (str) UUID of the Environment. @param xml: (str) XML configuration of the domain. @param filesystem: (tuple) ((source, target), (source, target)) * name * uuid * devices * network * filesystem
2.437577
2.699233
0.903063
mounts = [] with open(configuration['configuration']) as config_file: domain_config = config_file.read() if 'filesystem' in configuration: if isinstance(configuration['filesystem'], (list, tuple)): for mount in configuration['filesystem']: mounts.append(mou...
def domain_create(hypervisor, identifier, configuration, network_name=None)
libvirt Domain definition. @raise: ConfigError, IOError, libvirt.libvirtError.
2.995647
2.996699
0.999649
if domain is not None: try: if domain.isActive(): domain.destroy() except libvirt.libvirtError: logger.exception("Unable to destroy the domain.") try: domain.undefine() except libvirt.libvirtError: logger.exception(...
def domain_delete(domain, logger, filesystem)
libvirt domain undefinition. @raise: libvirt.libvirtError.
2.410115
2.204248
1.093395
network_name = None self._hypervisor = libvirt.open( self.configuration.get('hypervisor', 'lxc:///')) if 'network' in self.configuration: self._network = network.create(self._hypervisor, self.identifier, self.configura...
def allocate(self)
Initializes libvirt resources.
3.878826
3.407068
1.138465
if self._domain is not None: self._domain_delete() if self._network is not None and 'network' in self.configuration: self._network_delete() if self._hypervisor is not None: self._hypervisor_delete()
def deallocate(self)
Releases all resources.
4.152195
3.878409
1.070593
manager = HookManager(identifier, configuration) manager.load_hooks(context) return manager
def hooks_factory(identifier, configuration, context)
Returns the initialized hooks.
5.521143
5.089566
1.084796
for hook in self.configuration.get('hooks', ()): config = hook.get('configuration', {}) config.update(self.configuration.get('configuration', {})) try: self._load_hook(hook['name'], config, context) except KeyError: self.l...
def load_hooks(self, context)
Initializes the Hooks and loads them within the Environment.
3.959594
3.817969
1.037094
subelm = element.find(xpath) if subelm is None: subelm = etree.SubElement(element, tag) else: subelm.tag = tag subelm.text = text for attr, value in kwargs.items(): subelm.set(attr, value) return subelm
def subelement(element, xpath, tag, text, **kwargs)
Searches element matching the *xpath* in *parent* and replaces it's *tag*, *text* and *kwargs* attributes. If the element in *xpath* is not found a new child element is created with *kwargs* attributes and added. Returns the found/created element.
2.003437
2.617007
0.765545
group = group if group else cmod.PairingGroup(PAIRING_GROUP) h_challenge = sha256() serialedArgs = [group.serialize(arg) if isGroupElement(arg) else cmod.Conversion.IP2OS(arg) for arg in args] for arg in sorted(serialedArgs): h_challenge.update(arg...
def get_hash_as_int(*args, group: cmod.PairingGroup = None)
Enumerate over the input tuple and generate a hash using the tuple values :param args: sequence of either group or integer elements :param group: pairing group if an element is a group element :return:
6.663049
5.95246
1.119377
return ''.join(sample(chars, size))
def randomString(size: int = 20, chars: str = string.ascii_letters + string.digits) -> str
Generate a random string of the specified size. Ensure that the size is less than the length of chars as this function uses random.choice which uses random sampling without replacement. :param size: size of the random string to generate :param chars: the set of characters to use to generate the random...
15.234615
18.286118
0.833125
prime = cmod.randomPrime(LARGE_PRIME) i = 0 while not cmod.isPrime(2 * prime + 1): prime = cmod.randomPrime(LARGE_PRIME) i += 1 return prime
def genPrime()
Generate 2 large primes `p_prime` and `q_prime` and use them to generate another 2 primes `p` and `q` of 1024 bits
3.618021
4.035852
0.89647
encoded = {} for i in range(len(self.credType.names)): self.credType.names[i] attr_types = self.credType.attrTypes[i] for at in attr_types: attrName = at.name if attrName in self._vals: if at.encode: ...
def encoded(self)
This function will encode all the attributes to 256 bit integers :return:
4.037981
3.917125
1.030853
schema = Schema(name, version, attrNames, self.issuerId) return await self.wallet.submitSchema(schema)
async def genSchema(self, name, version, attrNames) -> Schema
Generates and submits Schema. :param name: schema name :param version: schema version :param attrNames: a list of attributes the schema contains :return: submitted Schema
8.006314
8.859216
0.903727
pk, sk = await self._primaryIssuer.genKeys(schemaId, p_prime, q_prime) pkR, skR = await self._nonRevocationIssuer.genRevocationKeys() pk = await self.wallet.submitPublicKeys(schemaId=schemaId, pk=pk, pkR=pkR) pkR = await self.walle...
async def genKeys(self, schemaId: ID, p_prime=None, q_prime=None) -> ( PublicKey, RevocationPublicKey)
Generates and submits keys (both public and secret, primary and non-revocation). :param schemaId: The schema ID (reference to claim definition schema) :param p_prime: optional p_prime parameter :param q_prime: optional q_prime parameter :return: Submitted Public keys (bo...
3.806715
3.126334
1.217629
accum, tails, accPK, accSK = await self._nonRevocationIssuer.issueAccumulator( schemaId, iA, L) accPK = await self.wallet.submitAccumPublic(schemaId=schemaId, accumPK=accPK, accum...
async def issueAccumulator(self, schemaId: ID, iA, L) -> AccumulatorPublicKey
Issues and submits an accumulator used for non-revocation proof. :param schemaId: The schema ID (reference to claim definition schema) :param iA: accumulator ID :param L: maximum number of claims within accumulator. :return: Submitted accumulator public key
5.371327
5.215873
1.029804
acc, ts = await self._nonRevocationIssuer.revoke(schemaId, i) await self.wallet.submitAccumUpdate(schemaId=schemaId, accum=acc, timestampMs=ts)
async def revoke(self, schemaId: ID, i)
Performs revocation of a Claim. :param schemaId: The schema ID (reference to claim definition schema) :param i: claim's sequence number within accumulator
17.687687
15.526985
1.139158
schemaKey = (await self.wallet.getSchema(schemaId)).getKey() attributes = self._attrRepo.getAttributes(schemaKey, claimRequest.userId) # TODO re-enable when revocation registry is implemented # iA = iA if iA else (await self.wa...
async def issueClaim(self, schemaId: ID, claimRequest: ClaimRequest, iA=None, i=None) -> (Claims, Dict[str, ClaimAttributeValues])
Issue a claim for the given user and schema. :param schemaId: The schema ID (reference to claim definition schema) :param claimRequest: A claim request containing prover ID and prover-generated values :param iA: accumulator ID :param i: claim's sequence number within acc...
6.670876
6.039009
1.104631
res = {} for schemaId, claimReq in allClaimRequest.items(): res[schemaId] = await self.issueClaim(schemaId, claimReq) return res
async def issueClaims(self, allClaimRequest: Dict[ID, ClaimRequest]) -> \ Dict[ID, Claims]
Issue claims for the given users and schemas. :param allClaimRequest: a map of schema ID to a claim request containing prover ID and prover-generated values :return: The claims (both primary and non-revocation)
2.996399
2.965393
1.010456
if proofRequest.verifiableAttributes.keys() != proof.requestedProof.revealed_attrs.keys(): raise ValueError('Received attributes ={} do not correspond to requested={}'.format( proof.requestedProof.revealed_attrs.keys(), proofRequest.verifiableAttributes.keys())) if...
async def verify(self, proofRequest: ProofRequest, proof: FullProof)
Verifies a proof from the prover. :param proofRequest: description of a proof to be presented (revealed attributes, predicates, timestamps for non-revocation) :param proof: a proof :return: True if verified successfully and false otherwise.
4.017373
3.728319
1.077529
await self._genMasterSecret(schemaId) U = await self._genU(schemaId) Ur = None if not reqNonRevoc else await self._genUr(schemaId) proverId = proverId if proverId else self.proverId return ClaimRequest(userId=proverId, U=U, Ur=Ur)
async def createClaimRequest(self, schemaId: ID, proverId=None, reqNonRevoc=True) -> ClaimRequest
Creates a claim request to the issuer. :param schemaId: The schema ID (reference to claim definition schema) :param proverId: a prover ID request a claim for (if None then the current prover default ID is used) :param reqNonRevoc: whether to request non-revocation claim ...
4.370374
4.301028
1.016123
res = {} for schemaId in schemaIds: res[schemaId] = await self.createClaimRequest(schemaId, proverId, reqNonRevoc) return res
async def createClaimRequests(self, schemaIds: Sequence[ID], proverId=None, reqNonRevoc=True) -> Dict[ID, ClaimRequest]
Creates a claim request to the issuer. :param schemaIds: The schema IDs (references to claim definition schema) :param proverId: a prover ID request a claim for (if None then the current prover default ID is used) :param reqNonRevoc: whether to request non-revocation claim ...
2.164969
2.55313
0.847966
await self.wallet.submitContextAttr(schemaId, signature.primaryClaim.m2) await self.wallet.submitClaimAttributes(schemaId, claimAttributes) await self._initPrimaryClaim(schemaId, signature.primaryClaim) if signature.nonRevocClaim: await self._initNonRevocationClaim(...
async def processClaim(self, schemaId: ID, claimAttributes: Dict[str, ClaimAttributeValues], signature: Claims)
Processes and saves a received Claim for the given Schema. :param schemaId: The schema ID (reference to claim definition schema) :param claims: claims to be processed and saved
5.381391
5.631796
0.955537
res = [] for schemaId, (claim_signature, claim_attributes) in allClaims.items(): res.append(await self.processClaim(schemaId, claim_attributes, claim_signature)) return res
async def processClaims(self, allClaims: Dict[ID, Claims])
Processes and saves received Claims. :param claims: claims to be processed and saved for each claim definition.
5.527894
5.497355
1.005555
claims, requestedProof = await self._findClaims(proofRequest) proof = await self._prepareProof(claims, proofRequest.nonce, requestedProof) return proof
async def presentProof(self, proofRequest: ProofRequest) -> FullProof
Presents a proof to the verifier. :param proofRequest: description of a proof to be presented (revealed attributes, predicates, timestamps for non-revocation) :return: a proof (both primary and non-revocation) and revealed attributes (initial non-encoded values)
6.369228
7.991984
0.796952
v = struct.unpack( 'q', struct.pack('Q', int(hex_string, 16)))[0] # type: int return v
def unsigned_hex_to_signed_int(hex_string: str) -> int
Converts a 64-bit hex string to a signed int value. This is due to the fact that Apache Thrift only has signed values. Examples: '17133d482ba4f605' => 1662740067609015813 'b6dbb1c2b362bf51' => -5270423489115668655 :param hex_string: the string representation of a zipkin ID :returns: s...
5.980719
5.950161
1.005136
hex_string = hex(struct.unpack('Q', struct.pack('q', signed_int))[0])[2:] if hex_string.endswith('L'): return hex_string[:-1] return hex_string
def signed_int_to_unsigned_hex(signed_int: int) -> str
Converts a signed int value to a 64-bit hex string. Examples: 1662740067609015813 => '17133d482ba4f605' -5270423489115668655 => 'b6dbb1c2b362bf51' :param signed_int: an int to convert :returns: unsigned hex string
3.067513
3.604851
0.85094
app[tracer_key] = tracer m = middleware_maker(skip_routes=skip_routes, tracer_key=tracer_key, request_key=request_key) app.middlewares.append(m) # register cleanup signal to close zipkin transport connections async def close_aiozipkin(app: Appl...
def setup(app: Application, tracer: Tracer, *, skip_routes: Optional[AbstractRoute] = None, tracer_key: str = APP_AIOZIPKIN_KEY, request_key: str = REQUEST_AIOZIPKIN_KEY) -> Application
Sets required parameters in aiohttp applications for aiozipkin. Tracer added into application context and cleaned after application shutdown. You can provide custom tracer_key, if default name is not suitable.
3.136774
3.369557
0.930916
return cast(Tracer, app[tracer_key])
def get_tracer( app: Application, tracer_key: str = APP_AIOZIPKIN_KEY) -> Tracer
Returns tracer object from application context. By default tracer has APP_AIOZIPKIN_KEY in aiohttp application context, you can provide own key, if for some reason default one is not suitable.
11.965389
12.937226
0.924881
return cast(SpanAbc, request[request_key])
def request_span(request: Request, request_key: str = REQUEST_AIOZIPKIN_KEY) -> SpanAbc
Returns span created by middleware from request context, you can use it as parent on next child span.
8.546787
8.985882
0.951135
trace_config = aiohttp.TraceConfig() zipkin = ZipkinClientSignals(tracer) trace_config.on_request_start.append(zipkin.on_request_start) trace_config.on_request_end.append(zipkin.on_request_end) trace_config.on_request_exception.append(zipkin.on_request_exception) return trace_config
def make_trace_config(tracer: Tracer) -> aiohttp.TraceConfig
Creates aiohttp.TraceConfig with enabled aiozipking instrumentation for aiohttp client.
2.206369
1.974051
1.117686
return Endpoint(service_name, ipv4, ipv6, port)
def create_endpoint(service_name: str, *, ipv4: OptStr = None, ipv6: OptStr = None, port: OptInt = None) -> Endpoint
Factory function to create Endpoint object.
4.178916
3.892404
1.073608
ts = ts if ts is not None else time.time() return int(ts * 1000 * 1000)
def make_timestamp(ts: OptTs = None) -> int
Create zipkin timestamp in microseconds, or convert available one from second. Useful when user supplies ts from time.time() call.
3.053584
2.226197
1.371659
headers = { TRACE_ID_HEADER: context.trace_id, SPAN_ID_HEADER: context.span_id, FLAGS_HEADER: '0', SAMPLED_ID_HEADER: '1' if context.sampled else '0', } if context.parent_id is not None: headers[PARENT_ID_HEADER] = context.parent_id return headers
def make_headers(context: TraceContext) -> Headers
Creates dict with zipkin headers from supplied trace context.
2.115369
1.977107
1.069932
# b3={TraceId}-{SpanId}-{SamplingState}-{ParentSpanId} c = context # encode sampled flag if c.debug: sampled = 'd' elif c.sampled: sampled = '1' else: sampled = '0' params = [c.trace_id, c.span_id, sampled] # type: List[str] if c.parent_id is not None: ...
def make_single_header(context: TraceContext) -> Headers
Creates dict with zipkin single header format.
4.188604
3.990451
1.049657
# TODO: add validation for trace_id/span_id/parent_id # normalize header names just in case someone passed regular dict # instead dict with case insensitive keys headers = {k.lower(): v for k, v in headers.items()} required = (TRACE_ID_HEADER.lower(), SPAN_ID_HEADER.lower()) has_b3 = all(...
def make_context(headers: Headers) -> Optional[TraceContext]
Converts available headers to TraceContext, if headers mapping does not contain zipkin headers, function returns None.
3.053705
3.138375
0.973021
def limited_filter(k: str, v: Any) -> bool: return k not in keys or v is not None # type: ignore def full_filter(k: str, v: Any) -> bool: return v is not None f = limited_filter if keys is not None else full_filter return {k: v for k, v in data.items() if f(k, v)}
def filter_none(data: Dict[str, Any], keys: OptKeys = None) -> Dict[str, Any]
Filter keys from dict with None values. Check occurs only on root level. If list of keys specified, filter works only for selected keys
2.696413
2.67354
1.008555
d = rhypo d[d <= 15.0] = 15.0 return C['a3'] * np.log10(d)
def _compute_term_3(self, C, rhypo)
Compute term 3 in equation 2 page 462. Distances are clipped at 15 km (as per Ezio Faccioli's personal communication.)
7.046542
4.836496
1.456952
# for rock values the site term is zero site_term = np.zeros_like(vs30) # hard soil site_term[(vs30 >= 360) & (vs30 < 800)] = C['aB'] # medium soil site_term[(vs30 >= 180) & (vs30 < 360)] = C['aC'] # soft soil site_term[vs30 < 180] = C['aD'] ...
def _compute_site_term(self, C, vs30)
Compute site term as a function of vs30: 4th, 5th and 6th terms in equation 2 page 462.
2.911491
2.922841
0.996117
if rake > -120.0 and rake <= -60.0: return C['aN'] elif rake > 30.0 and rake <= 150.0: return C['aR'] else: return C['aS']
def _compute_faulting_style_term(self, C, rake)
Compute faulting style term as a function of rake angle value as given in equation 5 page 465.
3.686196
3.693901
0.997914
mean = (self._compute_term_1_2(C, mag) + self._compute_term_3(C, dists.rhypo) + self._compute_site_term(C, vs30) + self._compute_faulting_style_term(C, rake)) # convert from cm/s**2 to g for SA and from m/s**2 to g for PGA (PGV # is alrea...
def _compute_mean(self, C, mag, dists, vs30, rake, imt)
Compute mean value for PGV, PGA and Displacement responce spectrum, as given in equation 2, page 462 with the addition of the faulting style term as given in equation 5, page 465. Converts also displacement responce spectrum values to SA.
3.85306
3.586135
1.074433
stddevs = [] for stddev_type in stddev_types: assert stddev_type in self.DEFINED_FOR_STANDARD_DEVIATION_TYPES stddevs.append(np.log(10 ** C['sigma']) + np.zeros(num_sites)) return stddevs
def _get_stddevs(self, C, stddev_types, num_sites)
Return total standard deviation.
3.235297
3.267241
0.990223
cmaker = ContextMaker(rupture.tectonic_region_type, [gsim]) gc = GmfComputer(rupture, sites, [str(imt) for imt in imts], cmaker, truncation_level, correlation_model) res, _sig, _eps = gc.compute(gsim, realizations, seed) return {imt: res[imti] for imti, imt in enumerate(gc.imts...
def ground_motion_fields(rupture, sites, imts, gsim, truncation_level, realizations, correlation_model=None, seed=None)
Given an earthquake rupture, the ground motion field calculator computes ground shaking over a set of sites, by randomly sampling a ground shaking intensity model. A ground motion field represents a possible 'realization' of the ground shaking due to an earthquake rupture. .. note:: This calculat...
5.306162
5.995108
0.885082
try: # read the seed from self.rupture.serial seed = seed or self.rupture.serial except AttributeError: pass if seed is not None: numpy.random.seed(seed) result = numpy.zeros((len(self.imts), len(self.sids), num_events), F32) sig = nu...
def compute(self, gsim, num_events, seed=None)
:param gsim: a GSIM instance :param num_events: the number of seismic events :param seed: a random seed or None :returns: a 32 bit array of shape (num_imts, num_sites, num_events) and two arrays with shape (num_imts, num_events): sig for stddev_inter and eps f...
3.34285
2.946965
1.134337
rctx = getattr(self.rupture, 'rupture', self.rupture) if seed is not None: numpy.random.seed(seed) dctx = self.dctx.roundup(gsim.minimum_distance) if self.truncation_level == 0: assert self.correlation_model is None mean, _stddevs = gsim.get_m...
def _compute(self, seed, gsim, num_events, imt)
:param seed: a random seed or None if the seed is already set :param gsim: a GSIM instance :param num_events: the number of seismic events :param imt: an IMT instance :returns: (gmf(num_sites, num_events), stddev_inter(num_events), epsilons(num_events))
2.822967
2.692907
1.048297
if os.path.exists(oqdata): sys.exit('%s exists already' % oqdata) if '://' in archive: # get the zip archive from an URL resp = requests.get(archive) _, archive = archive.rsplit('/', 1) with open(archive, 'wb') as f: f.write(resp.content) if not os.pa...
def restore(archive, oqdata)
Build a new oqdata directory from the data contained in the zip archive
3.225163
3.14822
1.02444
return (C["c2"] * rhypo) + (C["c3"] * np.log10(rhypo))
def _compute_distance_term(self, C, rhypo)
Returns the distance scaling term
5.40274
5.098066
1.059763
epsilon = rhypo - (4.853 + 1.347E-6 * (mag ** 8.163)) rjb = np.zeros_like(rhypo) idx = epsilon >= 3. rjb[idx] = np.sqrt((epsilon[idx] ** 2.) - 9.0) rjb[rjb < 0.0] = 0.0 return rjb
def rhypo_to_rjb(rhypo, mag)
Converts hypocentral distance to an equivalent Joyner-Boore distance dependent on the magnitude
4.577103
4.881083
0.937723
# Convert rhypo to rrup rrup = rhypo_to_rrup(dists.rhypo, rup.mag) mean = (self._get_magnitude_scaling_term(C, rup.mag) + self._get_distance_scaling_term(C, rup.mag, rrup) + self._get_style_of_faulting_term(C, rup.rake) + self._get_site_am...
def _compute_mean(self, C, rup, dists, sites, imt)
Returns the mean ground motion acceleration and velocity
3.45773
3.499865
0.987961
# List must be in following order p_n = [] # Rjb # Note that Rjb must be clipped at 0.1 km rjb = rhypo_to_rjb(dists.rhypo, rup.mag) rjb[rjb < 0.1] = 0.1 p_n.append(self._get_normalised_term(np.log10(rjb), self....
def get_pn(self, rup, sites, dists, sof)
Normalise the input parameters within their upper and lower defined range
2.643449
2.587291
1.021705
dtlist = [(imt, numpy.float32) for imt in sorted_imts] imt_dt = numpy.dtype(dtlist) return numpy.dtype([(str(gsim), imt_dt) for gsim in sorted_gsims])
def gsim_imt_dt(sorted_gsims, sorted_imts)
Build a numpy dtype as a nested record with keys 'idx' and nested (gsim, imt). :param sorted_gsims: a list of GSIM instances, sorted lexicographically :param sorted_imts: a list of intensity measure type strings
2.992019
3.973702
0.752955
# notation from http://en.wikipedia.org/wiki/Truncated_normal_distribution. # given that mu = 0 and sigma = 1, we have alpha = a and beta = b. # "CDF" in comments refers to cumulative distribution function # of non-truncated distribution with that mu and sigma values. # assume symmetric trunc...
def _truncnorm_sf(truncation_level, values)
Survival function for truncated normal distribution. Assumes zero mean, standard deviation equal to one and symmetric truncation. :param truncation_level: Positive float number representing the truncation on both sides around the mean, in units of sigma. :param values: Numpy ar...
5.294886
5.302577
0.998549
with warnings.catch_warnings(): warnings.simplefilter("ignore") # avoid RuntimeWarning: divide by zero encountered in log return numpy.log(values)
def to_distribution_values(self, values)
Returns numpy array of natural logarithms of ``values``.
4.40253
3.576656
1.230907
for key in (ADMITTED_STR_PARAMETERS + ADMITTED_FLOAT_PARAMETERS + ADMITTED_SET_PARAMETERS): try: val = getattr(self.gmpe, key) except AttributeError: pass else: setattr(self, key, val)
def set_parameters(self)
Combines the parameters of the GMPE provided at the construction level with the ones originally assigned to the backbone modified GMPE.
5.15016
4.331088
1.189115
table = table.strip().splitlines() header = table.pop(0).split() if not header[0].upper() == "IMT": raise ValueError('first column in a table must be IMT') coeff_names = header[1:] for row in table: row = row.split() imt_name = row[0]....
def _setup_table_from_str(self, table, sa_damping)
Builds the input tables from a string definition
3.103283
3.08874
1.004708
return (C["r1"] + C["r2"] * mag) *\ np.log(np.sqrt(rrup ** 2. + C["h1"] ** 2.))
def get_distance_term(self, C, rrup, mag)
Returns distance scaling term
4.900184
4.884715
1.003167
stddevs = [] zeros_array = np.zeros(nsites) for stddev in stddev_types: assert stddev in self.DEFINED_FOR_STANDARD_DEVIATION_TYPES if stddev == const.StdDev.TOTAL: stddevs.append(np.sqrt(C["tau"] ** 2. + C["phi"] ** 2.) + ...
def get_stddevs(self, C, nsites, stddev_types)
Returns the standard deviations
2.208638
2.244793
0.983894
if imt.period < 0.2: return np.log(10**0.23) elif imt.period > 1.0: return np.log(10**0.27) else: return np.log(10**(0.23 + (imt.period - 0.2)/0.8 * 0.04))
def get_sigma(imt)
Return the value of the total sigma :param float imt: An :class:`openquake.hazardlib.imt.IMT` instance :returns: A float representing the total sigma value
2.899735
3.179808
0.911921
for field, type_info in fields_spec.items(): has_default = not isinstance(type_info, type) if field not in config and not has_default: raise RuntimeError( "Configuration not complete. %s missing" % field)
def check_config(self, config, fields_spec)
Check that `config` has each field in `fields_spec` if a default has not been provided.
4.711301
4.303918
1.094654
defaults = dict([(f, d) for f, d in fields_spec.items() if not isinstance(d, type)]) for field, default_value in defaults.items(): if field not in config: config[field] = default_value
def set_defaults(self, config, fields_spec)
Set default values got from `fields_spec` into the `config` dictionary
3.001233
3.015491
0.995272
def class_decorator(class_obj): original_method = getattr(class_obj, method_name) if sys.version[0] == '2': # Python 2 original_method = original_method.im_func def caller(fn, obj, catalogue, config=None, *args, **kwargs): config = c...
def add(self, method_name, completeness=False, **fields)
Class decorator. Decorate `method_name` by adding a call to `set_defaults` and `check_config`. Then, save into the registry a callable function with the same signature of the original method. :param str method_name: the method to decorate :param bool completeness: ...
3.322329
3.111795
1.067657
def dec(fn): if completeness: def fn_with_config_and_c( catalogue, config, completeness_table=None): return fn(catalogue, completeness_table, **config) fn_with_config = fn_with_config_and_c else: ...
def add_function(self, completeness=False, **fields)
Function decorator. Decorate a function by adding a call to `set_defaults` and `check_config`. Then, save into the registry a callable function with the same signature of the original method :param fields: a dictionary of field spec, e.g. time_bin=numpy.float, ...
2.636609
2.695635
0.978103
C = self.COEFFS[imt] mean = (np.log(self.get_magnitude_term(C, rup) + self.get_distance_term(C, dists.rrup)) + self.get_site_amplification(C, sites)) stddevs = self.get_stddevs(C, sites.vs30.shape, rup.mag, stddev_types) return mean, stddev...
def get_mean_and_stddevs(self, sites, rup, dists, imt, stddev_types)
See :meth:`superclass method <.base.GroundShakingIntensityModel.get_mean_and_stddevs>` for spec of input and result values.
3.139919
3.177233
0.988256
b0, stress_drop = self._get_sof_terms(C, rup.rake) if rup.mag <= C["m1"]: return b0 else: # Calculate moment (equation 5) m_0 = 10.0 ** (1.5 * rup.mag + 16.05) # Get stress-drop scaling (equation 6) if rup.mag > C["m2"]: ...
def get_magnitude_term(self, C, rup)
Returns the magnitude scaling term in equation 3
4.569621
4.376541
1.044117
f_p = C["c1"] * rrup idx = np.logical_and(rrup > self.CONSTANTS["r1"], rrup <= self.CONSTANTS["r2"]) f_p[idx] = (C["c1"] * self.CONSTANTS["r1"]) +\ C["c2"] * (rrup[idx] - self.CONSTANTS["r1"]) idx = rrup > self.CONSTANTS["r2"] f_p...
def get_distance_term(self, C, rrup)
Returns the distance scaling term in equation 7
2.016462
1.985432
1.015629
if rake >= 45.0 and rake <= 135.0: # Reverse faulting return C["b0R"], C["b1R"] elif rake <= -45. and rake >= -135.0: # Normal faulting return C["b0N"], C["b1N"] else: # Strike slip return C["b0SS"], C["b1SS"]
def _get_sof_terms(self, C, rake)
Returns the style-of-faulting scaling parameters
3.212344
2.922731
1.09909
# Gets delta normalised z1 dz1 = sites.z1pt0 - np.exp(self._get_lnmu_z1(sites.vs30)) f_s = C["c5"] * dz1 # Calculates site amplification term f_s[dz1 > self.CONSTANTS["dz1ref"]] = (C["c5"] * self.CONSTANTS["dz1ref"]) ...
def get_site_amplification(self, C, sites)
Returns the site amplification term
4.469441
4.34065
1.029671
tau = self._get_tau(C, mag) + np.zeros(nsites) phi = self._get_phi(C, mag) + np.zeros(nsites) stddevs = [] for stddev in stddev_types: assert stddev in self.DEFINED_FOR_STANDARD_DEVIATION_TYPES if stddev == const.StdDev.TOTAL: stddevs.appe...
def get_stddevs(self, C, nsites, mag, stddev_types)
Returns the standard deviations
1.958251
1.97443
0.991806
if mag < 6.5: return C["tau1"] elif mag < 7.: return C["tau1"] + (C["tau2"] - C["tau1"]) * ((mag - 6.5) / 0.5) else: return C["tau2"]
def _get_tau(self, C, mag)
Returns magnitude dependent inter-event standard deviation (tau) (equation 14)
2.42675
2.277428
1.065566
if mag < 5.5: return C["phi1"] elif mag < 5.75: return C["phi1"] + (C["phi2"] - C["phi1"]) * ((mag - 5.5) / 0.25) else: return C["phi2"]
def _get_phi(self, C, mag)
Returns the magnitude dependent intra-event standard deviation (phi) (equation 15)
2.250347
2.155106
1.044193
delta = 0.00750 * 10 ** (0.507 * mag) # computing R for different values of mag if mag < 6.5: R = np.sqrt(dists.rhypo ** 2 + delta ** 2) else: R = np.sqrt(dists.rrup ** 2 + delta ** 2) mean = ( # 1st term C['c1'] + C['c2...
def _compute_mean(self, C, g, mag, hypo_depth, dists, imt)
Compute mean according to equation on Table 2, page 2275.
3.378587
3.270968
1.032901
with tempfile.TemporaryFile(mode='w+') as stream: ps = pstats.Stats(pstatfile, stream=stream) ps.sort_stats('cumtime') ps.print_stats(n) stream.seek(0) lines = list(stream) for i, line in enumerate(lines): if line.startswith(' ncalls'): break ...
def get_pstats(pstatfile, n)
Return profiling information as an RST table. :param pstatfile: path to a .pstat file :param n: the maximum number of stats to retrieve
4.277205
4.296143
0.995592
hcalc = base.calculators(readinput.get_oqparam(job_haz), calc_id) hcalc.run(concurrent_tasks=concurrent_tasks, pdb=pdb, exports=exports, **params) hc_id = hcalc.datastore.calc_id rcalc_id = logs.init(level=getattr(logging, loglevel.upper())) oq = readinput.get_oqparam(job_risk, hc...
def run2(job_haz, job_risk, calc_id, concurrent_tasks, pdb, loglevel, exports, params)
Run both hazard and risk, one after the other
4.420427
4.192934
1.054256
dbserver.ensure_on() if param: params = oqvalidation.OqParam.check( dict(p.split('=', 1) for p in param.split(','))) else: params = {} if slowest: prof = cProfile.Profile() stmt = ('_run(job_ini, concurrent_tasks, pdb, loglevel, hc, ' 'exp...
def run(job_ini, slowest=False, hc=None, param='', concurrent_tasks=None, exports='', loglevel='info', pdb=None)
Run a calculation bypassing the database layer
4.468775
4.507727
0.991359
''' Return an ordered dictionary with the available classes in the scalerel submodule with classes that derives from `base_class`, keyed by class name. ''' gsims = {} for fname in os.listdir(os.path.dirname(__file__)): if fname.endswith('.py'): modname, _ext = os.path.spl...
def _get_available_class(base_class)
Return an ordered dictionary with the available classes in the scalerel submodule with classes that derives from `base_class`, keyed by class name.
3.17078
1.977244
1.603636
if host is None: host_cores = self.host_cores else: host_cores = [hc for hc in self.host_cores if hc[0] == host] lst = [] for host, _ in host_cores: ready = general.socket_ready((host, self.ctrl_port)) lst.append((host, 'running' i...
def status(self, host=None)
:returns: a list of pairs (hostname, 'running'|'not-running')
4.331424
3.486223
1.24244
if streamer and not general.socket_ready(self.task_in_url): # started self.streamer = multiprocessing.Process( target=_streamer, args=(self.master_host, self.task_in_port, self.task_out_port)) self.streamer.start() starting = [] f...
def start(self, streamer=False)
Start multiple workerpools, possibly on remote servers via ssh, and possibly a streamer, depending on the `streamercls`. :param streamer: if True, starts a streamer with multiprocessing.Process
4.401027
4.478606
0.982678
stopped = [] for host, _ in self.host_cores: if self.status(host)[0][1] == 'not-running': print('%s not running' % host) continue ctrl_url = 'tcp://%s:%s' % (host, self.ctrl_port) with z.Socket(ctrl_url, z.zmq.REQ, 'connect') a...
def stop(self)
Send a "stop" command to all worker pools
5.24889
4.934224
1.063772
setproctitle('oq-zworker') with sock: for cmd, args, mon in sock: parallel.safely_call(cmd, args, mon)
def worker(self, sock)
:param sock: a zeromq.Socket of kind PULL receiving (cmd, args)
22.181215
17.620947
1.258798
setproctitle('oq-zworkerpool %s' % self.ctrl_url[6:]) # strip tcp:// # start workers self.workers = [] for _ in range(self.num_workers): sock = z.Socket(self.task_out_port, z.zmq.PULL, 'connect') proc = multiprocessing.Process(target=self.worker, args=(s...
def start(self)
Start worker processes and a control loop
4.592849
4.239015
1.083471
for sock in self.workers: os.kill(sock.pid, signal.SIGTERM) return 'WorkerPool %s stopped' % self.ctrl_url
def stop(self)
Send a SIGTERM to all worker processes
9.125393
7.830828
1.165316
for sock in self.workers: os.kill(sock.pid, signal.SIGKILL) return 'WorkerPool %s killed' % self.ctrl_url
def kill(self)
Send a SIGKILL to all worker processes
8.73343
7.444025
1.173213
address = address or (config.dbserver.host, DBSERVER_PORT) return 'running' if socket_ready(address) else 'not-running'
def get_status(address=None)
Check if the DbServer is up. :param address: pair (hostname, port) :returns: 'running' or 'not-running'
10.392028
7.95271
1.306728
if not config.dbserver.multi_user: remote_server_path = logs.dbcmd('get_path') if different_paths(server_path, remote_server_path): return('You are trying to contact a DbServer from another' ' instance (got %s, expected %s)\n' 'Check the configu...
def check_foreign()
Check if we the DbServer is the right one
10.174126
8.537409
1.191711
if get_status() == 'not-running': if config.dbserver.multi_user: sys.exit('Please start the DbServer: ' 'see the documentation for details') # otherwise start the DbServer automatically; NB: I tried to use # multiprocessing.Process(target=run_server).sta...
def ensure_on()
Start the DbServer if it is off
7.379638
6.568725
1.123451
if dbhostport: # assume a string of the form "dbhost:port" dbhost, port = dbhostport.split(':') addr = (dbhost, int(port)) else: addr = (config.dbserver.listen, DBSERVER_PORT) # create the db directory if needed dirname = os.path.dirname(dbpath) if not os.path.exists(d...
def run_server(dbpath=os.path.expanduser(config.dbserver.file), dbhostport=None, loglevel='WARN')
Run the DbServer on the given database file and port. If not given, use the settings in openquake.cfg.
5.676618
5.706903
0.994693
# give a nice name to the process w.setproctitle('oq-dbserver') dworkers = [] for _ in range(self.num_workers): sock = z.Socket(self.backend, z.zmq.REP, 'connect') threading.Thread(target=self.dworker, args=(sock,)).start() dworkers.append(so...
def start(self)
Start database worker threads
5.58385
5.494311
1.016297
if ZMQ: logging.warning(self.master.stop()) z.context.term() self.db.close()
def stop(self)
Stop the DbServer and the zworkers if any
19.93317
14.152339
1.408472
mean = (C['c1'] + self._compute_magnitude_term(C, rup) + self._compute_distance_term(C, rup, rjb)) return mean
def _compute_mean(self, C, rup, rjb)
Compute mean value according to equation 30, page 1021.
3.822392
3.587911
1.065353
return C['c2'] * (rup.mag - 8.0) + C['c3'] * (rup.mag - 8.0) ** 2
def _compute_magnitude_term(self, C, rup)
This computes the term f1 equation 8 Drouet & Cotton (2015)
3.634901
3.206273
1.133684
return (C['c4'] + C['c5'] * rup.mag) * np.log( np.sqrt(rjb ** 2. + C['c6'] ** 2.)) + C['c7'] * rjb
def _compute_distance_term(self, C, rup, rjb)
This computes the term f2 equation 8 Drouet & Cotton (2015)
3.206475
3.131918
1.023805
cutoff = 6.056877878 rhypo = dists.rhypo.copy() rhypo[rhypo <= cutoff] = cutoff return C['c3'] * np.log(rhypo) + C['c4'] * rhypo
def _compute_term_3_4(self, dists, C)
Compute term 3 and 4 in equation 1 page 1.
6.203712
5.828004
1.064466
S = self._get_site_type_dummy_variables(sites) return (C['c5'] * S)
def _get_site_amplification(self, sites, imt, C)
Compute the fith term of the equation (1), p. 1: ``c5 * S``
18.062935
9.11095
1.982552
S = np.zeros_like(sites.vs30) # S=0 for rock sites, S=1 otherwise pag 1. idxS = (sites.vs30 < 760.0) S[idxS] = 1 return S
def _get_site_type_dummy_variables(self, sites)
Get site type dummy variables, ``S`` (for rock and soil sites)
8.147133
6.186584
1.316903
mean = (self._compute_term_1_2(rup, C) + self._compute_term_3_4(dists, C) + self._get_site_amplification(sites, imt, C)) # convert from m/s**2 to g for PGA and from m/s to g for PSV # and divided this value for the ratio(SA_larger/SA_geo_mean) i...
def _compute_mean(self, C, rup, dists, sites, imt)
Compute mean value for PGA and pseudo-velocity response spectrum, as given in equation 1. Converts also pseudo-velocity response spectrum values to SA, using: SA = (PSV * W)/ratio(SA_larger/SA_geo_mean) W = (2 * pi / T) T = period (sec)
6.297293
4.357153
1.445277
return C["c"] * np.log10(np.sqrt((rhypo ** 2.) + (C["h"] ** 2.))) +\ (C["d"] * rhypo)
def _compute_distance_scaling(self, C, rhypo)
Returns the distance scaling term accounting for geometric and anelastic attenuation
5.363792
5.256295
1.020451
site_term = np.zeros(len(vs30), dtype=float) # For soil sites add on the site coefficient site_term[vs30 < 760.0] = C["e"] return site_term
def _compute_site_scaling(self, C, vs30)
Returns the site scaling term as a simple coefficient
6.409783
5.925246
1.081775
if param == 'rrup': dist = rupture.surface.get_min_distance(mesh) elif param == 'rx': dist = rupture.surface.get_rx_distance(mesh) elif param == 'ry0': dist = rupture.surface.get_ry0_distance(mesh) elif param == 'rjb': dist = rupture.surface.get_joyner_boore_distance...
def get_distances(rupture, mesh, param)
:param rupture: a rupture :param mesh: a mesh of points or a site collection :param param: the kind of distance to compute (default rjb) :returns: an array of distances from the given mesh
2.634681
2.628232
1.002454