code
stringlengths
51
2.34k
docstring
stringlengths
11
171
def split(self, amt): ratio = abs(amt / self.qty) t1 = Trade(self.tid, self.ts, amt, self.px, fees=ratio * self.fees, **self.kwargs) t2 = Trade(self.tid, self.ts, self.qty - amt, self.px, fees=(1. - ratio) * self.fees, **self.kwargs) return [t1, t2]
return 2 trades, 1 with specific amt and the other with self.quantity - amt
def firethread(f): @functools.wraps(f) def callmeth(*args, **kwargs): thr = worker(f, *args, **kwargs) return thr return callmeth
A decorator for making a function fire a thread.
def httprettified(test): "A decorator tests that use HTTPretty" def decorate_class(klass): for attr in dir(klass): if not attr.startswith('test_'): continue attr_value = getattr(klass, attr) if not hasattr(attr_value, "__call__"): continue setattr(klass, attr, decorate_callable(attr_value)) return klass def decorate_callable(test): @functools.wraps(test) def wrapper(*args, **kw): httpretty.reset() httpretty.enable() try: return test(*args, **kw) finally: httpretty.disable() return wrapper if isinstance(test, ClassTypes): return decorate_class(test) return decorate_callable(test)
A decorator tests that use HTTPretty
def Xor(bytestr, key): from builtins import bytes precondition.AssertType(bytestr, bytes) bytestr = bytes(bytestr) return bytes([byte ^ key for byte in bytestr])
Returns a `bytes` object where each byte has been xored with key.
def table_to_root(table, filename, **kwargs): import root_numpy root_numpy.array2root(table.as_array(), filename, **kwargs)
Write a Table to a ROOT file
def validate_image_col_row(image , col , row): SPLIT_LIMIT = 99 try: col = int(col) row = int(row) except: raise ValueError('columns and rows values could not be cast to integer.') if col < 2: raise ValueError('Number of columns must be between 2 and {} (you \ asked for {}).'.format(SPLIT_LIMIT, col)) if row < 2 : raise ValueError('Number of rows must be between 2 and {} (you \ asked for {}).'.format(SPLIT_LIMIT, row))
Basic checks for columns and rows values
def bind(cls, app, *paths, methods=None, name=None, view=None): if view is None: app.ps.admin.register(cls) if not paths: paths = ('%s/%s' % (app.ps.admin.cfg.prefix, name or cls.name),) cls.url = paths[0] return super(AdminHandler, cls).bind(app, *paths, methods=methods, name=name, view=view)
Connect to admin interface and application.
def _get_storage_key(self, identified_with, identifier): return ':'.join(( self.key_prefix, identified_with.name, self.hash_identifier(identified_with, identifier), ))
Get key string for given user identifier in consistent manner.
def _decode_sense_packet(self, version, packet): data = self._sense_packet_to_data(packet) offset = 4 i = 0 datalen = len(data) - offset - 6 temp_count = int(datalen / 2) temp = [] for i in range(temp_count): temp_index = i * 2 + offset temp.append(self._decode_temp(data[temp_index], data[temp_index + 1])) self._debug(PROP_LOGLEVEL_DEBUG, "T: " + str(temp)) for sensor in self._sense_sensor: if (sensor.sensor_type == PROP_SENSOR_TEMPERATURE): sensor.value = temp[sensor.index] elif (sensor.sensor_type == PROP_SENSOR_RAW): sensor.value = packet self._debug(PROP_LOGLEVEL_DEBUG, str(self))
Decode a sense packet into the list of sensors.
def run(self, name, *args): assert isinstance(name, string_types) name = self._aliases.get(name, name) action = self._actions_dict.get(name, None) if not action: raise ValueError("Action `{}` doesn't exist.".format(name)) if not name.startswith('_'): logger.debug("Execute action `%s`.", name) return action.callback(*args)
Run an action as specified by its name.
def make_url_absolute(self, url, resolve_base=False): if self.config['url']: if resolve_base: ubody = self.doc.unicode_body() base_url = find_base_url(ubody) if base_url: return urljoin(base_url, url) return urljoin(self.config['url'], url) else: return url
Make url absolute using previous request url as base url.
def prt_hier(self, prt=sys.stdout): objwr = WrHierGO(self.gosubdag, **self.kws) assert self.goids, "NO VALID GO IDs WERE PROVIDED" if 'up' not in objwr.usrset: for goid in self.goids: objwr.prt_hier_down(goid, prt) else: objwr.prt_hier_up(self.goids, prt)
Write hierarchy below specfied GO IDs.
def split_spec(spec, sep): parts = spec.rsplit(sep, 1) spec_start = parts[0].strip() spec_end = '' if len(parts) == 2: spec_end = parts[-1].strip() return spec_start, spec_end
Split a spec by separator and return stripped start and end parts.
def load_unicode(self, resource_path): resource_content = pkg_resources.resource_string(self.module_name, resource_path) return resource_content.decode('utf-8')
Gets the content of a resource
def _create_function(name, doc=""): def _(col): spark_ctx = SparkContext._active_spark_context java_ctx = (getattr(spark_ctx._jvm.com.sparklingpandas.functions, name) (col._java_ctx if isinstance(col, Column) else col)) return Column(java_ctx) _.__name__ = name _.__doc__ = doc return _
Create a function for aggregator by name
def restore_type(self, dtype, sample=None): if pdc.is_bool_dtype(dtype): return 'boolean' elif pdc.is_datetime64_any_dtype(dtype): return 'datetime' elif pdc.is_integer_dtype(dtype): return 'integer' elif pdc.is_numeric_dtype(dtype): return 'number' if sample is not None: if isinstance(sample, (list, tuple)): return 'array' elif isinstance(sample, datetime.date): return 'date' elif isinstance(sample, isodate.Duration): return 'duration' elif isinstance(sample, dict): return 'object' elif isinstance(sample, six.string_types): return 'string' elif isinstance(sample, datetime.time): return 'time' return 'string'
Restore type from Pandas
def add_exit(self): self.completable.append("quit") self.completable.append("exit") self.descrip["quit"] = "Exits the program" self.descrip["exit"] = "Exits the program" self.command_tree.add_child(CommandBranch("quit")) self.command_tree.add_child(CommandBranch("exit")) self.command_param["quit"] = "" self.command_param["exit"] = ""
adds the exits from the application
def _ar_data(self, ar): if not ar: return {} if ar.portal_type == "AnalysisRequest": return {'obj': ar, 'id': ar.getId(), 'date_received': self.ulocalized_time( ar.getDateReceived(), long_format=0), 'date_sampled': self.ulocalized_time( ar.getDateSampled(), long_format=True), 'url': ar.absolute_url(), } elif ar.portal_type == "ReferenceSample": return {'obj': ar, 'id': ar.id, 'date_received': self.ulocalized_time( ar.getDateReceived(), long_format=0), 'date_sampled': self.ulocalized_time( ar.getDateSampled(), long_format=True), 'url': ar.absolute_url(), } else: return {'obj': ar, 'id': ar.id, 'date_received': "", 'date_sampled': "", 'url': ar.absolute_url(), }
Returns a dict that represents the analysis request
def load(self, f, skip): array = self.get() counter = 0 counter_limit = array.size convert = array.dtype.type while counter < counter_limit: line = f.readline() words = line.split() for word in words: if counter >= counter_limit: raise FileFormatError("Wrong array data: too many values.") if not skip: array.flat[counter] = convert(word) counter += 1
Load the array data from a file-like object
def token_network_leave( self, registry_address: PaymentNetworkID, token_address: TokenAddress, ) -> List[NettingChannelState]: if not is_binary_address(registry_address): raise InvalidAddress('registry_address must be a valid address in binary') if not is_binary_address(token_address): raise InvalidAddress('token_address must be a valid address in binary') if token_address not in self.get_tokens_list(registry_address): raise UnknownTokenAddress('token_address unknown') token_network_identifier = views.get_token_network_identifier_by_token_address( chain_state=views.state_from_raiden(self.raiden), payment_network_id=registry_address, token_address=token_address, ) connection_manager = self.raiden.connection_manager_for_token_network( token_network_identifier, ) return connection_manager.leave(registry_address)
Close all channels and wait for settlement.
def _execute_config_show(self, show_command, delay_factor=.1): rpc_command = '<CLI><Configuration>{show_command}</Configuration></CLI>'.format( show_command=escape_xml(show_command) ) response = self._execute_rpc(rpc_command, delay_factor=delay_factor) raw_response = response.xpath('.//CLI/Configuration')[0].text return raw_response.strip() if raw_response else ''
Executes a configuration show-type command.
def decode_payload_as(self,cls): s = bytes(self.payload) self.payload = cls(s, _internal=1, _underlayer=self) pp = self while pp.underlayer is not None: pp = pp.underlayer self.payload.dissection_done(pp)
Reassembles the payload and decode it using another packet class
def append(self, value): if not self.need_free: raise ValueError("Stack is read-only") if not isinstance(value, X509): raise TypeError('StackOfX509 can contain only X509 objects') sk_push(self.ptr, libcrypto.X509_dup(value.cert))
Adds certificate to stack
def section_bif_lengths(neurites, neurite_type=NeuriteType.all): return map_sections(_section_length, neurites, neurite_type=neurite_type, iterator_type=Tree.ibifurcation_point)
Bifurcation section lengths in a collection of neurites
def commit(self): self.git.add('-A', '.') try: self.git.commit('-m', self.commit_msg) return True except sh.ErrorReturnCode_1: return False
git commit and return whether there were changes
def create_feature_template(self): fields = self.fields feat_schema = {} att = {} for fld in fields: self._globalIdField if not fld['name'] == self._objectIdField and not fld['name'] == self._globalIdField: att[fld['name']] = '' feat_schema['attributes'] = att feat_schema['geometry'] = '' return Feature(feat_schema)
creates a feature template
def _read_http_none(self, size, kind, flag): if any((int(bit, base=2) for bit in flag)): raise ProtocolError(f'HTTP/2: [Type {kind}] invalid format', quiet=True) data = dict( flags=None, payload=self._read_fileng(size - 9) or None, ) return data
Read HTTP packet with unsigned type.
def check_version(url=VERSION_URL): for line in get(url): if 'release:' in line: return line.split(':')[-1].strip(' \'"\r\n')
Returns the version string for the latest SDK.
def run_latex_report(base, report_dir, section_info): out_name = "%s_recal_plots.tex" % base out = os.path.join(report_dir, out_name) with open(out, "w") as out_handle: out_tmpl = Template(out_template) out_handle.write(out_tmpl.render(sections=section_info)) start_dir = os.getcwd() try: os.chdir(report_dir) cl = ["pdflatex", out_name] child = subprocess.Popen(cl) child.wait() finally: os.chdir(start_dir)
Generate a pdf report with plots using latex.
def remove_namespace(self, ns_uri): if not self.contains_namespace(ns_uri): return ni = self.__ns_uri_map.pop(ns_uri) for prefix in ni.prefixes: del self.__prefix_map[prefix]
Removes the indicated namespace from this set.
def _roads_extract(resp): try: j = resp.json() except: if resp.status_code != 200: raise googlemaps.exceptions.HTTPError(resp.status_code) raise googlemaps.exceptions.ApiError("UNKNOWN_ERROR", "Received a malformed response.") if "error" in j: error = j["error"] status = error["status"] if status == "RESOURCE_EXHAUSTED": raise googlemaps.exceptions._OverQueryLimit(status, error.get("message")) raise googlemaps.exceptions.ApiError(status, error.get("message")) if resp.status_code != 200: raise googlemaps.exceptions.HTTPError(resp.status_code) return j
Extracts a result from a Roads API HTTP response.
def _render_attributes(self, resource): attributes = {} attrs_to_ignore = set() for key, relationship in resource.__mapper__.relationships.items(): attrs_to_ignore.update(set( [column.name for column in relationship.local_columns]).union( {key})) if self.dasherize: mapped_fields = {x: dasherize(underscore(x)) for x in self.fields} else: mapped_fields = {x: x for x in self.fields} for attribute in self.fields: if attribute == self.primary_key: continue if attribute in attrs_to_ignore: raise AttributeError try: value = getattr(resource, attribute) if isinstance(value, datetime.datetime): attributes[mapped_fields[attribute]] = value.isoformat() else: attributes[mapped_fields[attribute]] = value except AttributeError: raise return attributes
Render the resources's attributes.
def _identifiers(self): for handle in self._storage_broker.iter_item_handles(): yield dtoolcore.utils.generate_identifier(handle)
Return iterable of dataset item identifiers.
def extend(self, workflow: 'SoS_Workflow') -> None: if not workflow.sections: return if not self.sections: self.sections = workflow.sections return section = workflow.sections[0] depends_idx = [ idx for idx, stmt in enumerate(section.statements) if stmt[0] == ':' and stmt[1] == 'depends' ] if not depends_idx: section.statements.insert(0, [ ':', 'depends', f"sos_step('{self.sections[-1].step_name()}')" ]) else: section.statements[depends_idx[0]][2] = section.statements[depends_idx[0]][2].strip() + \ (", " if section.statements[depends_idx[0]][2].strip() else "") + \ f"sos_step('{self.sections[-1].step_name()}')\n" self.sections.extend(workflow.sections)
Append another workflow to existing one to created a combined workflow
def keys(): for admin in current_app.config['ADMIN_USERS']: try: db.get_db() keys = ApiKey.find_by_user(admin) except Exception as e: click.echo('ERROR: {}'.format(e)) else: for key in keys: click.echo('{:40} {}'.format(key.key, key.user))
List admin API keys.
def fire(self, sender=None, **params): keys = (_make_id(None), _make_id(sender)) results = [] for (_, key), callback in self.callbacks: if key in keys: results.append(callback(self, sender, **params)) return results
Fire callbacks from a ``sender``.
def disapprove(self, request, *args, **kwargs): self.object = self.get_object() success_url = self.get_success_url() self.object.delete() messages.success(self.request, self.success_message) return HttpResponseRedirect(success_url)
Disapproves the considered post and retirects the user to the success URL.
def RenderJson(self, pretty=False): steps = self._steps topdict = {} topdict['tropo'] = steps if pretty: try: json = jsonlib.dumps(topdict, indent=4, sort_keys=False) except TypeError: json = jsonlib.dumps(topdict) else: json = jsonlib.dumps(topdict) return json
Render a Tropo object into a Json string.
def link(self): if self.linked: return self self.linked = True included_modules = [] for include in self.includes.values(): included_modules.append(include.link().surface) self.scope.add_surface('__includes__', tuple(included_modules)) self.scope.add_surface('__thrift_source__', self.thrift_source) for linker in LINKERS: linker(self.scope).link() self.scope.add_surface('loads', Deserializer(self.protocol)) self.scope.add_surface('dumps', Serializer(self.protocol)) return self
Link all the types in this module and all included modules.
def focus_next_sibling(self): mid = self.get_selected_mid() newpos = self._tree.next_sibling_position(mid) if newpos is not None: newpos = self._sanitize_position((newpos,)) self.body.set_focus(newpos)
focus next sibling of currently focussed message in thread tree
def _compute_sigma_0(self, C, mag, vs30measured): s1 = np.zeros_like(vs30measured, dtype=float) s2 = np.zeros_like(vs30measured, dtype=float) idx = vs30measured == 1 s1[idx] = C['s1mea'] s2[idx] = C['s2mea'] idx = vs30measured == 0 s1[idx] = C['s1est'] s2[idx] = C['s2est'] return self._compute_std_0(s1, s2, mag)
Equation 27, page 82.
def hash(self): hph = self.hparent.hash hpfilt = hashobj(self.hparent._filter) dhash = hashobj(hph + hpfilt) return dhash
Hashes of a hierarchy child changes if the parent changes
def store_dcnm_subnet_dict(self, subnet_dict, direc): if direc == 'in': self.in_dcnm_subnet_dict = subnet_dict self.in_subnet_dict = self._parse_subnet(subnet_dict) else: self.out_dcnm_subnet_dict = subnet_dict self.out_subnet_dict = self._parse_subnet(subnet_dict)
Store the subnet attributes and dict.
def _update_params_on_kvstore(param_arrays, grad_arrays, kvstore, param_names): for index, pair in enumerate(zip(param_arrays, grad_arrays)): arg_list, grad_list = pair if grad_list[0] is None: continue name = param_names[index] kvstore.push(name, grad_list, priority=-index) kvstore.pull(name, arg_list, priority=-index)
Perform update of param_arrays from grad_arrays on kvstore.
def refund_payment(self): Money = MoneyMaker(self.currency) filter_kwargs = { 'transaction_id__startswith': 'ch_', 'payment_method': StripePayment.namespace, } for payment in self.orderpayment_set.filter(**filter_kwargs): refund = stripe.Refund.create(charge=payment.transaction_id) if refund['status'] == 'succeeded': amount = Money(refund['amount']) / Money.subunits OrderPayment.objects.create(order=self, amount=-amount, transaction_id=refund['id'], payment_method=StripePayment.namespace) del self.amount_paid if self.amount_paid: super(OrderWorkflowMixin, self).refund_payment()
Refund the payment using Stripe's refunding API.
async def handle_disconnect_callback(self): self.is_connected = False if self.disconnect_callback: self.disconnect_callback() if self.reconnect: self.logger.debug("Protocol disconnected...reconnecting") await self.setup() self.protocol.reset_cmd_timeout() if self.in_transaction: self.protocol.transport.write(self.active_packet) else: packet = self.protocol.format_packet(b"\x1e") self.protocol.transport.write(packet)
Reconnect automatically unless stopping.
def nodes_callback(self, data): for node_id, tags, coords in data: self.nodes[node_id] = tags
Callback for nodes with tags
def savePattern(self): if ( self.dev == None ): return '' buf = [REPORT_ID, ord('W'), 0xBE, 0xEF, 0xCA, 0xFE, 0, 0, 0] return self.write(buf);
Save internal RAM pattern to flash
def string(self, units: typing.Optional[str] = None) -> str: if not units: _units: str = self._units else: if not units.upper() in CustomPressure.legal_units: raise UnitsError("unrecognized pressure unit: '" + units + "'") _units = units.upper() val = self.value(units) if _units == "MB": return "%.0f mb" % val if _units == "HPA": return "%.0f hPa" % val if _units == "IN": return "%.2f inches" % val if _units == "MM": return "%.0f mmHg" % val raise ValueError(_units)
Return a string representation of the pressure, using the given units.
def class_error(self, input_data, targets, average=True, cache=None, prediction=False): if cache is not None: activations = cache else: activations = \ self.feed_forward(input_data, prediction=prediction) targets = targets.get().argmax(1) class_error = np.sum(activations.get().argmax(1) != targets) if average: class_error = float(class_error) / targets.shape[0] return class_error
Return the classification error rate
async def _on_heartbeat(self, update): name = update['service'] if name not in self.services: return with self._state_lock: self.services[name].heartbeat()
Receive a new heartbeat for a service.
def create_from_remote_file(self, group, snapshot=True, **args): import requests url = "http://snapshot.geneontology.org/annotations/{}.gaf.gz".format(group) r = requests.get(url, stream=True, headers={'User-Agent': get_user_agent(modules=[requests], caller_name=__name__)}) p = GafParser() results = p.skim(r.raw) return self.create_from_tuples(results, **args)
Creates from remote GAF
def find_by_reference_ids(reference_ids, connection=None, page_size=100, page_number=0, sort_by=DEFAULT_SORT_BY, sort_order=DEFAULT_SORT_ORDER): reference_ids = ','.join([str(i) for i in reference_ids]) return pybrightcove.connection.ItemResultSet( "find_playlists_by_reference_ids", Playlist, connection, page_size, page_number, sort_by, sort_order, reference_ids=reference_ids)
List playlists by specific reference_ids.
def origin_req_host(self): if self.history: return self.history[0].request.origin_req_host else: return scheme_host_port(self.url)[1]
Required by Cookies handlers
def from_genesis(cls, base_db: BaseAtomicDB, genesis_params: Dict[str, HeaderParams], genesis_state: AccountState=None) -> 'BaseChain': genesis_vm_class = cls.get_vm_class_for_block_number(BlockNumber(0)) pre_genesis_header = BlockHeader(difficulty=0, block_number=-1, gas_limit=0) state = genesis_vm_class.build_state(base_db, pre_genesis_header) if genesis_state is None: genesis_state = {} apply_state_dict(state, genesis_state) state.persist() if 'state_root' not in genesis_params: genesis_params = assoc(genesis_params, 'state_root', state.state_root) elif genesis_params['state_root'] != state.state_root: raise ValidationError( "The provided genesis state root does not match the computed " "genesis state root. Got {0}. Expected {1}".format( state.state_root, genesis_params['state_root'], ) ) genesis_header = BlockHeader(**genesis_params) return cls.from_genesis_header(base_db, genesis_header)
Initializes the Chain from a genesis state.
def ndim(self) -> int: try: assert self._ndim is not None except (AssertionError, AttributeError): if len(self.variables) == 0: self._ndim = 0 else: self._ndim = self.variables[0].ndim finally: return self._ndim
Get number of dimensions.
def async_client(self): if not self._async_client: self._async_client = AlfAsyncClient( token_endpoint=self.config.get('OAUTH_TOKEN_ENDPOINT'), client_id=self.config.get('OAUTH_CLIENT_ID'), client_secret=self.config.get('OAUTH_CLIENT_SECRET') ) return self._async_client
Asynchronous OAuth 2.0 Bearer client
def flatten(self): return PhaseGroup( setup=flatten_phases_and_groups(self.setup), main=flatten_phases_and_groups(self.main), teardown=flatten_phases_and_groups(self.teardown), name=self.name)
Internally flatten out nested iterables.
def _spark_job_metrics(self, instance, running_apps, addl_tags, requests_config): for app_id, (app_name, tracking_url) in iteritems(running_apps): base_url = self._get_request_url(instance, tracking_url) response = self._rest_request_to_json( base_url, SPARK_APPS_PATH, SPARK_SERVICE_CHECK, requests_config, addl_tags, app_id, 'jobs' ) for job in response: status = job.get('status') tags = ['app_name:%s' % str(app_name)] tags.extend(addl_tags) tags.append('status:%s' % str(status).lower()) self._set_metrics_from_json(tags, job, SPARK_JOB_METRICS) self._set_metric('spark.job.count', COUNT, 1, tags)
Get metrics for each Spark job.
def _is_common_text(self, inpath): one_suffix = inpath[-2:] two_suffix = inpath[-3:] three_suffix = inpath[-4:] four_suffix = inpath[-5:] if one_suffix in self.common_text: return True elif two_suffix in self.common_text: return True elif three_suffix in self.common_text: return True elif four_suffix in self.common_text: return True else: return False
private method to compare file path mime type to common text file types
def create_client(self, name): client = {'client':{ 'name': name, }} response = self.post_request('clients/', client, follow = True) if response: return Client(self, response['client'])
Creates a Client with the given information.
def check_request(request, login_unsuccessful, get_username=get_username_from_request, username=None): ip_address = get_ip(request) username = username or get_username(request) if not login_unsuccessful: reset_failed_attempts(ip_address=ip_address, username=username) return True else: return record_failed_attempt(ip_address, username)
check the request, and process results
def _get_menu_width(self, max_width, complete_state): return min(max_width, max(self.MIN_WIDTH, max(get_cwidth(c.display) for c in complete_state.current_completions) + 2))
Return the width of the main column.
def summarize(self): doc = counts_nz = np.count_nonzero(self.countsmat_) cnz = self.countsmat_[np.nonzero(self.countsmat_)] return doc.format( lag_time=self.lag_time, reversible_type=self.reversible_type, ergodic_cutoff=self.ergodic_cutoff, prior_counts=self.prior_counts, n_states=self.n_states_, counts_nz=counts_nz, percent_counts_nz=(100 * counts_nz / self.countsmat_.size), cnz_min=np.min(cnz), cnz_1st=np.percentile(cnz, 25), cnz_med=np.percentile(cnz, 50), cnz_mean=np.mean(cnz), cnz_3rd=np.percentile(cnz, 75), cnz_max=np.max(cnz), cnz_sum=np.sum(cnz), cnz_sum_per_lag=np.sum(cnz)/self.lag_time, ts=', '.join(['{:.2f}'.format(t) for t in self.timescales_]), )
Return some diagnostic summary statistics about this Markov model
def Compare(fromMo, toMo, diff): from UcsBase import UcsUtils if (fromMo.classId != toMo.classId): return CompareStatus.TypesDifferent for prop in UcsUtils.GetUcsPropertyMetaAttributeList(str(fromMo.classId)): propMeta = UcsUtils.IsPropertyInMetaIgnoreCase(fromMo.classId, prop) if propMeta != None: if ((propMeta.access == UcsPropertyMeta.Internal) or (propMeta.access == UcsPropertyMeta.ReadOnly) or ( prop in toMo._excludePropList)): continue if ((toMo.__dict__.has_key(prop)) and (fromMo.getattr(prop) != toMo.getattr(prop))): diff.append(prop) if (len(diff) > 0): return CompareStatus.PropsDifferent return CompareStatus.Equal
Internal method to support CompareManagedObject functionality.
def doc(): kwdb = current_app.kwdb libraries = get_collections(kwdb, libtype="library") resource_files = get_collections(kwdb, libtype="resource") hierarchy = get_navpanel_data(kwdb) return flask.render_template("home.html", data={"libraries": libraries, "version": __version__, "libdoc": None, "hierarchy": hierarchy, "resource_files": resource_files })
Show a list of libraries, along with the nav panel on the left
def send_data(self, message): data = json.dumps(message).encode('utf-8') + b'\n' self.transport.write(data)
Given an object, encode as JSON and transmit to the server.
def shutdown(self): self._done.set() self.executor.shutdown(wait=False)
Stop the publishing loop.
def parse_yaml_node(self, y): if 'participant' not in y: raise InvalidParticipantNodeError self.target_component = TargetComponent().parse_yaml_node(y['participant']) return self
Parse a YAML specification of a participant into this object.
def available_streams(): sds = kp.db.StreamDS() print("Available streams: ") print(', '.join(sorted(sds.streams)))
Show a short list of available streams.
def url(self): url = self.xml.find('coredata/link[@rel="scopus-affiliation"]') if url is not None: url = url.get('href') return url
URL to the affiliation's profile page.
def __dump_validators(self): if hasattr(self, '_validators'): validators_json = [] for validator in self._validators: if isinstance(validator, PropertyValidator): validators_json.append(validator.as_json()) else: raise APIError("validator is not a PropertyValidator: '{}'".format(validator)) if self._options.get('validators', list()) == validators_json: pass else: new_options = self._options.copy() new_options.update({'validators': validators_json}) validate(new_options, options_json_schema) self._options = new_options
Dump the validators as json inside the _options dictionary with the key `validators`.
def setItemData(self, treeItem, column, value, role=Qt.EditRole): if role == Qt.CheckStateRole: if column != self.COL_VALUE: return False else: logger.debug("Setting check state (col={}): {!r}".format(column, value)) treeItem.checkState = value return True elif role == Qt.EditRole: if column != self.COL_VALUE: return False else: logger.debug("Set Edit value (col={}): {!r}".format(column, value)) treeItem.data = value return True else: raise ValueError("Unexpected edit role: {}".format(role))
Sets the role data for the item at index to value.
def listener(messages): for m in messages: if m.content_type == 'text': print(str(m.chat.first_name) + " [" + str(m.chat.id) + "]: " + m.text)
When new messages arrive TeleBot will call this function.
def initialize_weights(self): n = self._outputSize m = self._inputSize self._Q = self._random.sample((n,m)) for i in range(n): self._Q[i] /= np.sqrt( np.dot(self._Q[i], self._Q[i]) )
Randomly initializes the visible-to-hidden connections.
def make_image_cache(img_cache): log.info('Initiating the image cache at {0}'.format(img_cache)) if not os.path.isdir(img_cache): utils.mkdir_p(img_cache) utils.mkdir_p(os.path.join(img_cache, '10.1371')) utils.mkdir_p(os.path.join(img_cache, '10.3389'))
Initiates the image cache if it does not exist
def guess_cls(self): try: ret = fcntl.ioctl(self.ins, BIOCGDLT, struct.pack('I', 0)) ret = struct.unpack('I', ret)[0] except IOError: cls = conf.default_l2 warning("BIOCGDLT failed: unable to guess type. Using %s !", cls.name) return cls try: return conf.l2types[ret] except KeyError: cls = conf.default_l2 warning("Unable to guess type (type %i). Using %s", ret, cls.name)
Guess the packet class that must be used on the interface
def all(self): return self.pages(self.url.page, self.url.max_page)
Yield torrents in range from current page to last page
def run(self): with self._scp_bensh_runner(): self._execute_bensh_runner() path = self._retrieve_tarball() try: self._aggregate_tarball(path) finally: os.remove(path)
Execute benchmark on the specified node
def printDiagnosticsAfterTraining(exp, verbosity=0): print "Number of connected synapses per cell" l2 = exp.getAlgorithmInstance("L2") numConnectedCells = 0 connectedSynapses = 0 for c in range(4096): cp = l2.numberOfConnectedProximalSynapses([c]) if cp>0: numConnectedCells += 1 connectedSynapses += cp print "Num L2 cells with connected synapses:", numConnectedCells if numConnectedCells > 0: print "Avg connected synapses per connected cell:", float(connectedSynapses)/numConnectedCells print
Useful diagnostics a trained system for debugging.
def _get_ids_from_name_private(self, name): results = self.list_private_images(name=name) return [result['id'] for result in results]
Get private images which match the given name.
def _build_date(date, kwargs): if date is None: if not kwargs: raise ValueError('Must pass a date or kwargs') else: return datetime.date(**kwargs) elif kwargs: raise ValueError('Cannot pass kwargs and a date') else: return date
Builds the date argument for event rules.
def from_file(self, file_name=None): file_name = self._check_file_name(file_name) with open(file_name, 'r') as infile: top_level_dict = json.load(infile) pages_dict = top_level_dict['info_df'] pages = pd.DataFrame(pages_dict) self.pages = pages self.file_name = file_name self._prm_packer(top_level_dict['metadata']) self.generate_folder_names() self.paginate()
Loads a DataFrame with all the needed info about the experiment
def filename(self): if self.value and 'value' in self._json_data and self._json_data['value']: return self._json_data['value'].split('/')[-1] return None
Filename of the attachment, without the full 'attachment' path.
def _copy_if_necessary(self, local_path, overwrite): local_path = abspath(local_path) if not exists(local_path): raise MissingLocalFile(local_path) elif not self.copy_local_files_to_cache: return local_path else: cached_path = self.cached_path(local_path) if exists(cached_path) and not overwrite: return cached_path copy2(local_path, cached_path) return cached_path
Return cached path to local file, copying it to the cache if necessary.
def _generate_type(self, data_type, indent_spaces, extra_args): if is_alias(data_type): self._generate_alias_type(data_type) elif is_struct_type(data_type): self._generate_struct_type(data_type, indent_spaces, extra_args) elif is_union_type(data_type): self._generate_union_type(data_type, indent_spaces)
Generates a TypeScript type for the given type.
def verbatim(parser, token): text = [] while 1: token = parser.tokens.pop(0) if token.contents == 'endverbatim': break if token.token_type == TOKEN_VAR: text.append('{{ ') elif token.token_type == TOKEN_BLOCK: text.append('{%') text.append(token.contents) if token.token_type == TOKEN_VAR: text.append(' }}') elif token.token_type == TOKEN_BLOCK: if not text[-1].startswith('='): text[-1:-1] = [' '] text.append(' %}') return VerbatimNode(''.join(text))
Tag to render x-tmpl templates with Django template code.
def type_to_string(t): if t == MemoryElement.TYPE_I2C: return 'I2C' if t == MemoryElement.TYPE_1W: return '1-wire' if t == MemoryElement.TYPE_DRIVER_LED: return 'LED driver' if t == MemoryElement.TYPE_LOCO: return 'Loco Positioning' if t == MemoryElement.TYPE_TRAJ: return 'Trajectory' if t == MemoryElement.TYPE_LOCO2: return 'Loco Positioning 2' return 'Unknown'
Get string representation of memory type
def save_yaml(self, outFile): with open(outFile,'w') as myfile: print(yaml.dump(self.params), file=myfile)
saves the config parameters to a json file
def _delegate_required(self, path): fs = self._delegate(path) if fs is None: raise errors.ResourceNotFound(path) return fs
Check that there is a filesystem with the given ``path``.
def send_packet(self): waiter, packet = self.client.waiters.popleft() self.logger.debug('sending packet: %s', binascii.hexlify(packet)) self.client.active_transaction = waiter self.client.in_transaction = True self.client.active_packet = packet self.reset_cmd_timeout() self.transport.write(packet)
Write next packet in send queue.
def list_lbaas_loadbalancers(self, retrieve_all=True, **_params): return self.list('loadbalancers', self.lbaas_loadbalancers_path, retrieve_all, **_params)
Fetches a list of all lbaas_loadbalancers for a project.
def add_mpl_colorscale(fig, heatmap_gs, ax_map, params, title=None): cbticks = [params.vmin + e * params.vdiff for e in (0, 0.25, 0.5, 0.75, 1)] if params.vmax > 10: exponent = int(floor(log10(params.vmax))) - 1 cbticks = [int(round(e, -exponent)) for e in cbticks] scale_subplot = gridspec.GridSpecFromSubplotSpec( 1, 3, subplot_spec=heatmap_gs[0, 0], wspace=0.0, hspace=0.0 ) scale_ax = fig.add_subplot(scale_subplot[0, 1]) cbar = fig.colorbar(ax_map, scale_ax, ticks=cbticks) if title: cbar.set_label(title, fontsize=6) cbar.ax.yaxis.set_ticks_position("left") cbar.ax.yaxis.set_label_position("left") cbar.ax.tick_params(labelsize=6) cbar.outline.set_linewidth(0) return cbar
Add colour scale to heatmap.
def save_state(self): try: self.db.savestate(self.get_state()) except: print_('Warning, unable to save state.') print_('Error message:') traceback.print_exc()
Tell the database to save the current state of the sampler.
def _onPaint(self, evt): DEBUG_MSG("_onPaint()", 1, self) drawDC = wx.PaintDC(self) if not self._isDrawn: self.draw(drawDC=drawDC) else: self.gui_repaint(drawDC=drawDC) evt.Skip()
Called when wxPaintEvt is generated
def to_date_or_datetime(value, ctx): if isinstance(value, str): temporal = ctx.get_date_parser().auto(value) if temporal is not None: return temporal elif type(value) == datetime.date: return value elif isinstance(value, datetime.datetime): return value.astimezone(ctx.timezone) raise EvaluationError("Can't convert '%s' to a date or datetime" % str(value))
Tries conversion of any value to a date or datetime
def Initialize(self): super(AFF4ImageBase, self).Initialize() self.offset = 0 self.chunk_cache = ChunkCache(self._WriteChunk, 100) if "r" in self.mode: self.size = int(self.Get(self.Schema.SIZE)) self.chunksize = int(self.Get(self.Schema._CHUNKSIZE)) self.content_last = self.Get(self.Schema.CONTENT_LAST) else: self.size = 0 self.content_last = None
Build a cache for our chunks.
def delete_user_permissions(self, user, perm, obj, check_groups=False): user_perms = self.user_permissions(user, perm, obj, check_groups=False) if not user_perms.filter(object_id=obj.id): return perms = self.user_permissions(user, perm, obj).filter(object_id=obj.id) perms.delete()
Remove granular permission perm from user on an object instance
def download(client, target_dir): print('') print("download inappproducts") print('---------------------') products = client.list_inappproducts() for product in products: path = os.path.join(target_dir, 'products') del product['packageName'] mkdir_p(path) with open(os.path.join(path, product['sku'] + '.json'), 'w') as outfile: print("save product for {0}".format(product['sku'])) json.dump( product, outfile, sort_keys=True, indent=4, separators=(',', ': '))
Download inappproducts from play store.
def change_count(self): status = self.git.status(porcelain=True, untracked_files='no').strip() if not status: return 0 else: return len(status.split('\n'))
The number of changes in the working directory.