code
stringlengths
51
2.34k
docstring
stringlengths
11
171
def _write_cache(self, lines, append=False): mode = 'at' if append else 'wt' with open(self.filepath, mode, encoding='utf8') as fh: fh.writelines(line + '\n' for line in lines)
Write virtualenv metadata to cache.
def python_type(textx_type_name): return { 'ID': text, 'BOOL': bool, 'INT': int, 'FLOAT': float, 'STRICTFLOAT': float, 'STRING': text, 'NUMBER': float, 'BASETYPE': text, }.get(textx_type_name, textx_type_name)
Return Python type from the name of base textx type.
def process(self, metric): if not boto: return collector = str(metric.getCollectorPath()) metricname = str(metric.getMetricPath()) for rule in self.rules: self.log.debug( "Comparing Collector: [%s] with (%s) " "and Metric: [%s] with (%s)", str(rule['collector']), collector, str(rule['metric']), metricname ) if ((str(rule['collector']) == collector and str(rule['metric']) == metricname)): if rule['collect_by_instance'] and self.instance_id: self.send_metrics_to_cloudwatch( rule, metric, {'InstanceId': self.instance_id}) if rule['collect_without_dimension']: self.send_metrics_to_cloudwatch( rule, metric, {})
Process a metric and send it to CloudWatch
def generate_docs(app): config = app.config config_dir = app.env.srcdir source_root = os.path.join(config_dir, config.apidoc_source_root) output_root = os.path.join(config_dir, config.apidoc_output_root) execution_dir = os.path.join(config_dir, '..') cleanup(output_root) command = ['sphinx-apidoc', '-f', '-o', output_root, source_root] for exclude in config.apidoc_exclude: command.append(os.path.join(source_root, exclude)) process = Popen(command, cwd=execution_dir) process.wait()
Run sphinx-apidoc to generate Python API documentation for the project.
def _load_outcome_models(self): if not self.state_copy_initialized: return self.outcomes = [] for outcome_m in self.state_copy.outcomes: new_oc_m = deepcopy(outcome_m) new_oc_m.parent = self new_oc_m.outcome = outcome_m.outcome self.outcomes.append(new_oc_m)
Reloads the outcome models directly from the state
def move_committees(src, dest): comm, sub_comm = import_committees(src) save_committees(comm, dest) save_subcommittees(comm, dest)
Import stupid yaml files, convert to something useful.
def update_looking_for(profile_tree, looking_for): div = profile_tree.xpath("//div[@id = 'what_i_want']")[0] looking_for['gentation'] = div.xpath(".//li[@id = 'ajax_gentation']/text()")[0].strip() looking_for['ages'] = replace_chars(div.xpath(".//li[@id = 'ajax_ages']/text()")[0].strip()) looking_for['near'] = div.xpath(".//li[@id = 'ajax_near']/text()")[0].strip() looking_for['single'] = div.xpath(".//li[@id = 'ajax_single']/text()")[0].strip() try: looking_for['seeking'] = div.xpath(".//li[@id = 'ajax_lookingfor']/text()")[0].strip() except: pass
Update looking_for attribute of a Profile.
def connection(self): self._condition.acquire() try: if (self._maxconnections and self._connections >= self._maxconnections): raise TooManyConnections("%d connections are already equal to the max: %d" % (self._connections, self._maxconnections)) try: con = self._idle_cache.pop(0) except IndexError: con = self.new_connection() self._connections += 1 finally: self._condition.release() return con
get a cached connection from the pool
def params(self, dict): self._configuration.update(dict) self._measurements.update()
Set configuration variables for an OnShape part.
def _fit_radec(self): self.orbfit.fitradec.restype = ctypes.c_int self.orbfit.fitradec.argtypes = [ ctypes.c_char_p, ctypes.c_char_p, ctypes.c_char_p ] mpc_file = tempfile.NamedTemporaryFile(suffix='.mpc') for observation in self.observations: mpc_file.write("{}\n".format(str(observation))) mpc_file.seek(0) abg_file = tempfile.NamedTemporaryFile() res_file = tempfile.NamedTemporaryFile() self.orbfit.fitradec(ctypes.c_char_p(mpc_file.name), ctypes.c_char_p(abg_file.name), ctypes.c_char_p(res_file.name)) self.abg = abg_file self.abg.seek(0) self.residuals = res_file self.residuals.seek(0)
call fit_radec of BK passing in the observations.
def warning_free_pause(): import matplotlib.pyplot as plt with warnings.catch_warnings(): warnings.filterwarnings("ignore", message="Using default event loop until " "function specific to this GUI is " "implemented") plt.pause(0.0001)
Issue a matplotlib pause without the warning.
def gevent_stop(self): import gevent gevent.kill(self._poller_greenlet) self.remove() self._select = select.select
Helper method to stop the node for gevent-based applications.
def _morph(self): self.scanner_paths = {} if not hasattr(self, '_local'): self._local = 0 if not hasattr(self, 'released_target_info'): self.released_target_info = False self.store_info = 1 self._func_exists = 4 self._func_get_contents = 3 self.changed_since_last_build = 4 if self.has_builder(): self.changed_since_last_build = 5
Turn a file system node into a File object.
def _find_common_roots(paths): paths = [x.split(os.path.sep) for x in paths] root = {} for chunks in sorted(paths, key=len, reverse=True): node = root for chunk in chunks: node = node.setdefault(chunk, {}) node.clear() rv = set() def _walk(node, path): for prefix, child in iteritems(node): _walk(child, path + (prefix,)) if not node: rv.add("/".join(path)) _walk(root, ()) return rv
Out of some paths it finds the common roots that need monitoring.
def dt64_to_dt(dt64): ts = (dt64 - np.datetime64('1970-01-01T00:00:00')) / np.timedelta64(1, 's') return dt.datetime.utcfromtimestamp(ts)
Safely converts NumPy datetime64 to a datetime object.
def make_hash(s, n_bytes): raw_h = int(md5(s.encode('utf-8')).hexdigest()[:n_bytes], 16) return 16**n_bytes//2 - raw_h
Make the hash from a matches key.
def search(self, spec, operator): return self.xmlrpc.search(spec, operator.lower())
Query PYPI via XMLRPC interface using search spec
def validate_answer(self, value): try: serialized = json.dumps(value) except (ValueError, TypeError): raise serializers.ValidationError("Answer value must be JSON-serializable") if len(serialized) > Submission.MAXSIZE: raise serializers.ValidationError("Maximum answer size exceeded.") return value
Check that the answer is JSON-serializable and not too long.
def create(cls, name, email, cb): it = cls(name, create_structure=True) it.value['email'] = email cb.upsert_multi(ItemSequence([it])) return it
Create the basic structure of a player
def from_similars(cls, learn, layer_ls:list=[0, 7, 2], **kwargs): "Gets the indices for the most similar images." train_ds, train_idxs = cls.get_similars_idxs(learn, layer_ls, **kwargs) return train_ds, train_idxs
Gets the indices for the most similar images.
def leaf_asts(self): seen = set() ast_queue = deque([self]) while ast_queue: ast = ast_queue.pop() if isinstance(ast, Base) and id(ast.cache_key) not in seen: seen.add(id(ast.cache_key)) if ast.depth == 1: yield ast continue ast_queue.extend(ast.args) continue
Return an iterator over the leaf ASTs.
def _initialize_recursion_depth(self): from furious.context import get_current_async recursion_options = self._options.get('_recursion', {}) current_depth = recursion_options.get('current', 0) max_depth = recursion_options.get('max', MAX_DEPTH) try: executing_async = get_current_async() current_depth = executing_async.recursion_depth if max_depth == MAX_DEPTH: executing_options = executing_async.get_options().get( '_recursion', {}) max_depth = executing_options.get('max', max_depth) except errors.NotInContextError: pass self.update_options(_recursion={'current': current_depth, 'max': max_depth})
Ensure recursion info is initialized, if not, initialize it.
def make_field_objects(field_data, names): field_objects = [] field_names = [] for field in field_data: if hasattr(field, 'get') and callable(field.get): atype = cast(Text, field.get('type')) name = cast(Text, field.get('name')) has_default = False default = None if 'default' in field: has_default = True default = field.get('default') order = field.get('order') doc = field.get('doc') other_props = get_other_props(field, FIELD_RESERVED_PROPS) new_field = Field(atype, name, has_default, default, order, names, doc, other_props) if new_field.name in field_names: fail_msg = 'Field name %s already in use.' % new_field.name raise SchemaParseException(fail_msg) field_names.append(new_field.name) else: raise SchemaParseException('Not a valid field: %s' % field) field_objects.append(new_field) return field_objects
We're going to need to make message parameters too.
def _remove_deactivated(contexts): stack_contexts = tuple([h for h in contexts[0] if h.active]) head = contexts[1] while head is not None and not head.active: head = head.old_contexts[1] ctx = head while ctx is not None: parent = ctx.old_contexts[1] while parent is not None: if parent.active: break ctx.old_contexts = parent.old_contexts parent = parent.old_contexts[1] ctx = parent return (stack_contexts, head)
Remove deactivated handlers from the chain
async def cursor(self) -> Cursor: return Cursor(self, await self._execute(self._conn.cursor))
Create an aiosqlite cursor wrapping a sqlite3 cursor object.
def _update_alpha(self, event=None): a = self.alpha.get() hexa = self.hexa.get() hexa = hexa[:7] + ("%2.2x" % a).upper() self.hexa.delete(0, 'end') self.hexa.insert(0, hexa) self.alphabar.set(a) self._update_preview()
Update display after a change in the alpha spinbox.
def setLog(self, fileName, writeName=False): self.log = 1 self.logFile = fileName self._logPtr = open(fileName, "w") if writeName: self._namePtr = open(fileName + ".name", "w")
Opens a log file with name fileName.
def jsonp_wrap(callback_key='callback'): def decorator_fn(f): @wraps(f) def jsonp_output_decorator(*args, **kwargs): task_data = _get_data_from_args(args) data = task_data.get_data() if callback_key not in data: raise KeyError( 'Missing required parameter "{0}" for task.'.format( callback_key)) callback = data[callback_key] jsonp = f(*args, **kwargs) if isinstance(JobContext.get_current_context(), WebJobContext): JobContext.get_current_context().add_responder( MimeSetterWebTaskResponder('application/javascript')) jsonp = "{callback}({data})".format(callback=callback, data=jsonp) return jsonp return jsonp_output_decorator return decorator_fn
Format response to jsonp and add a callback to JSON data - a jsonp request
def peak_interval(self, name, alpha=_alpha, npoints=_npoints, **kwargs): data = self.get(name, **kwargs) return peak_interval(data,alpha,npoints)
Calculate peak interval for parameter.
def make_complete(self): self.array['sids'] = numpy.arange(len(self), dtype=numpy.uint32) self.complete = self
Turns the site collection into a complete one, if needed
def _wrap_unary_errors(callable_): _patch_callable_name(callable_) @six.wraps(callable_) def error_remapped_callable(*args, **kwargs): try: return callable_(*args, **kwargs) except grpc.RpcError as exc: six.raise_from(exceptions.from_grpc_error(exc), exc) return error_remapped_callable
Map errors for Unary-Unary and Stream-Unary gRPC callables.
def wordlist2cognates(wordlist, source, expert='expert', ref='cogid'): for k in wordlist: yield dict( Form_ID=wordlist[k, 'lid'], ID=k, Form=wordlist[k, 'ipa'], Cognateset_ID='{0}-{1}'.format( slug(wordlist[k, 'concept']), wordlist[k, ref]), Cognate_Detection_Method=expert, Source=source)
Turn a wordlist into a cognate set list, using the cldf parameters.
def read_bytes(self, path): file = staticfiles_storage.open(path) content = file.read() file.close() return content
Read file content in binary mode
def task(obj = None, deps = None): if callable(obj): __task(obj.__name__, obj) return obj def __decorated(func): __task(obj if obj else obj.__name__, deps, func) return func return __decorated
Decorator for creating a task.
def update_footer(self): field_item = self.field_list.currentItem() if not field_item: self.footer_label.setText('') return field_name = field_item.data(Qt.UserRole) field = self.layer.fields().field(field_name) index = self.layer.fields().lookupField(field_name) unique_values = list(self.layer.uniqueValues(index)) pretty_unique_values = ', '.join([str(v) for v in unique_values[:10]]) footer_text = tr('Field type: {0}\n').format(field.typeName()) footer_text += tr('Unique values: {0}').format(pretty_unique_values) self.footer_label.setText(footer_text)
Update footer when the field list change.
def _global_index(): var_and_type = var + Optional(type_) global_dec = Suppress(upkey("global")) + index range_key_etc = Suppress(",") + Group(throughput) | Optional( Group(Suppress(",") + var_and_type).setResultsName("range_key") ) + Optional(Suppress(",") + include_vars) + Optional( Group(Suppress(",") + throughput) ) global_spec = ( Suppress("(") + primitive + Suppress(",") + Group(var_and_type).setResultsName("hash_key") + range_key_etc + Suppress(")") ) return Group(global_dec + global_spec).setName("global index")
Create grammar for a global index declaration
def humanize_date(p_datetime): now = arrow.now() _date = now.replace(day=p_datetime.day, month=p_datetime.month, year=p_datetime.year) return _date.humanize(now).replace('just now', 'today')
Returns a relative date string from a datetime object.
def flow(self)->FlowField: "Access the flow-field grid after applying queued affine transforms." if self._flow is None: self._flow = _affine_grid(self.shape) if self._affine_mat is not None: self._flow = _affine_mult(self._flow,self._affine_mat) self._affine_mat = None return self._flow
Access the flow-field grid after applying queued affine transforms.
def dump_dict(self): dump_dict = dict() dump_dict['Structure'] = self.name for keys in self.__keys__: for key in keys: val = getattr(self, key) if isinstance(val, (int, long)): if key == 'TimeDateStamp' or key == 'dwTimeStamp': try: val = '0x%-8X [%s UTC]' % (val, time.asctime(time.gmtime(val))) except ValueError as e: val = '0x%-8X [INVALID TIME]' % val else: val = ''.join(chr(d) if chr(d) in string.printable else "\\x%02x" % d for d in [ord(c) if not isinstance(c, int) else c for c in val]) dump_dict[key] = {'FileOffset': self.__field_offsets__[key] + self.__file_offset__, 'Offset': self.__field_offsets__[key], 'Value': val} return dump_dict
Returns a dictionary representation of the structure.
def _check_file_corruption(self, header): if header.msg_type == 0 or header.msg_size == 0 or header.msg_size > 10000: if not self._file_corrupt and self._debug: print('File corruption detected') self._file_corrupt = True return self._file_corrupt
check for file corruption based on an unknown message type in the header
def extract_number(text): result = list() chunk = list() valid_char = set(".1234567890") for char in text: if char in valid_char: chunk.append(char) else: result.append("".join(chunk)) chunk = list() result.append("".join(chunk)) result_new = list() for number in result: if "." in number: try: result_new.append(float(number)) except: pass else: try: result_new.append(int(number)) except: pass return result_new
Extract digit character from text.
def _get_yum_config_value(name): conf = _get_yum_config() if name in conf.keys(): return conf.get(name) return None
Look for a specific config variable and return its value
def index(request, page): days = [] days_found = set() query = Entry.query.order_by(Entry.pub_date.desc()) pagination = Pagination(query, PER_PAGE, page, "index") for entry in pagination.entries: day = date(*entry.pub_date.timetuple()[:3]) if day not in days_found: days_found.add(day) days.append({"date": day, "entries": []}) days[-1]["entries"].append(entry) return render_template("index.html", days=days, pagination=pagination)
Show the index page or any an offset of it.
def _full_to_yearly_ts(self, arr, dt): time_defined = self.def_time and not ('av' in self.dtype_in_time) if time_defined: arr = utils.times.yearly_average(arr, dt) return arr
Average the full timeseries within each year.
async def expand_all_quays(self) -> None: if not self.stops: return headers = {'ET-Client-Name': self._client_name} request = { 'query': GRAPHQL_STOP_TO_QUAY_TEMPLATE, 'variables': { 'stops': self.stops, 'omitNonBoarding': self.omit_non_boarding } } with async_timeout.timeout(10): resp = await self.web_session.post(RESOURCE, json=request, headers=headers) if resp.status != 200: _LOGGER.error( "Error connecting to Entur, response http status code: %s", resp.status) return None result = await resp.json() if 'errors' in result: return for stop_place in result['data']['stopPlaces']: if len(stop_place['quays']) > 1: for quay in stop_place['quays']: if quay['estimatedCalls']: self.quays.append(quay['id'])
Find all quays from stop places.
def _var_bounds(self): x0 = array([]) xmin = array([]) xmax = array([]) for var in self.om.vars: x0 = r_[x0, var.v0] xmin = r_[xmin, var.vl] xmax = r_[xmax, var.vu] return x0, xmin, xmax
Returns bounds on the optimisation variables.
def connect(db, host=HACR_DATABASE_SERVER, user=HACR_DATABASE_USER, passwd=HACR_DATABASE_PASSWD): try: import pymysql except ImportError as e: e.args = ('pymysql is required to fetch HACR triggers',) raise return pymysql.connect(host=host, user=user, passwd=passwd, db=db)
Connect to the given SQL database
def setup_logging(): config = json.load(open(os.path.join(config_path, 'logging.json'))) if dbconfig.get('enable_syslog_forwarding', NS_LOG, False): try: config['formatters']['syslog'] = { 'format': _get_syslog_format('cloud-inquisitor-logs') } config['handlers']['syslog'] = { 'class': 'cloud_inquisitor.log.SyslogPipelineHandler', 'formatter': 'syslog', 'filters': ['standard'] } config['loggers']['cloud_inquisitor']['handlers'].append('syslog') audit_handler = SyslogPipelineHandler() audit_handler.setFormatter(logging.Formatter(_get_syslog_format('cloud-inquisitor-audit'))) audit_handler.setLevel(logging.DEBUG) _AUDIT_LOGGER.addHandler(audit_handler) _AUDIT_LOGGER.propagate = False except Exception as ex: print('An error occured while configuring the syslogger: {}'.format(ex)) logging.config.dictConfig(config)
Utility function to setup the logging systems based on the `logging.json` configuration file
def getPythonVarName(name): return SUB_REGEX.sub('', name.replace('+', '_').replace('-', '_').replace('.', '_').replace(' ', '').replace('/', '_')).upper()
Get the python variable name
def _conv_adr(adr, entry): if adr.value.street: entry['address'] = adr.value.street if adr.value.extended: entry['address2'] = adr.value.extended if adr.value.city: entry['city'] = adr.value.city if adr.value.region: entry['state'] = adr.value.region if adr.value.code and adr.value.code != '0': entry['zip'] = adr.value.code if adr.value.country: entry['country'] = adr.value.country
Converts to Abook address format
def update_model_cache(table_name): model_cache_info = ModelCacheInfo(table_name, uuid.uuid4().hex) model_cache_backend.share_model_cache_info(model_cache_info)
Updates model cache by generating a new key for the model
def sourcehook(self, newfile, encoding='utf-8'): "Hook called on a filename to be sourced." from codecs import open if newfile[0] == '"': newfile = newfile[1:-1] if isinstance(self.infile, basestring) and not os.path.isabs(newfile): newfile = os.path.join(os.path.dirname(self.infile), newfile) return (newfile, open(newfile, "r", encoding))
Hook called on a filename to be sourced.
def _set_repo_option(repo, option): if not option: return opt = option.split('=') if len(opt) != 2: return if opt[0] == 'trusted': repo['trusted'] = opt[1] == 'yes' else: repo[opt[0]] = opt[1]
Set the option to repo
def brent_optimise(node1, node2, min_brlen=0.001, max_brlen=10, verbose=False): from scipy.optimize import minimize_scalar wrapper = BranchLengthOptimiser(node1, node2, (min_brlen + max_brlen) / 2.) n = minimize_scalar(lambda x: -wrapper(x)[0], method='brent', bracket=(min_brlen, max_brlen))['x'] if verbose: logger.info(wrapper) if n < min_brlen: n = min_brlen wrapper(n) return n, -1 / wrapper.get_d2lnl(n)
Optimise ML distance between two partials. min and max set brackets
def before_func_accept_retry_state(fn): if not six.callable(fn): return fn if func_takes_retry_state(fn): return fn @_utils.wraps(fn) def wrapped_before_func(retry_state): warn_about_non_retry_state_deprecation('before', fn, stacklevel=4) return fn( retry_state.fn, retry_state.attempt_number, ) return wrapped_before_func
Wrap "before" function to accept "retry_state".
def GetVmodlType(name): if isinstance(name, type): return name typ = vmodlTypes.get(name) if typ: return typ isArray = name.endswith("[]") if isArray: name = name[:-2] ns, wsdlName = _GetWsdlInfo(name) try: typ = GetWsdlType(ns, wsdlName) except KeyError: raise KeyError(name) if typ: return isArray and typ.Array or typ else: raise KeyError(name)
Get type from vmodl name
def openTrades(self) -> List[Trade]: return [v for v in self.wrapper.trades.values() if v.orderStatus.status not in OrderStatus.DoneStates]
List of all open order trades.
def _use_absolute_file_name(self, img): img['file_name'] = os.path.join( self._imgdir, img['file_name']) assert os.path.isfile(img['file_name']), img['file_name']
Change relative filename to abosolute file name.
def _handle_cancel_notification(self, msg_id): request_future = self._client_request_futures.pop(msg_id, None) if not request_future: log.warn("Received cancel notification for unknown message id %s", msg_id) return if request_future.cancel(): log.debug("Cancelled request with id %s", msg_id)
Handle a cancel notification from the client.
def _iter_lexerclasses(plugins=True): for key in sorted(LEXERS): module_name, name = LEXERS[key][:2] if name not in _lexer_cache: _load_lexers(module_name) yield _lexer_cache[name] if plugins: for lexer in find_plugin_lexers(): yield lexer
Return an iterator over all lexer classes.
def setFormatMetadata(self, format): assert((self.needMetadataUpdate(CoverImageMetadata.FORMAT)) or (self.format is format)) self.format = format self.check_metadata &= ~CoverImageMetadata.FORMAT
Set format image metadata to what has been reliably identified.
def query(self, sql: str, args: tuple = None): with self._cursor() as cursor: log.debug('Running SQL: ' + str((sql, args))) cursor.execute(sql, args) return cursor.fetchall()
Execute a SQL query with a return value.
def publishToMyself(self, roomId, name, data): self.publishToRoom(roomId, name, data, [self])
Publish to only myself
def run(self): self.timer = t.Thread(target=self.report_spans) self.timer.daemon = True self.timer.name = "Instana Span Reporting" self.timer.start()
Span a background thread to periodically report queued spans
def delaunay3D(dataset, alpha=0, tol=None, boundary=True): deln = vtk.vtkDelaunay3D() deln.SetInputData(dataset) deln.SetAlpha(alpha) if tol: deln.SetTolerance(tol) deln.SetBoundingTriangulation(boundary) deln.Update() return deln.GetOutput()
Create 3D Delaunay triangulation of input points.
def execute_command(self, *args, **kwargs): try: return self.get_connection().execute_command(*args, **kwargs) except ConnectionError as e: logger.warn('trying to reconnect') self.connect() logger.warn('connected') raise
Execute a command on the connected server.
def isDocumentCollection(cls, name) : try : col = cls.getCollectionClass(name) return issubclass(col, Collection) except KeyError : return False
return true or false wether 'name' is the name of a document collection.
def read_sis_ini(fh, byteorder, dtype, count, offsetsize): inistr = fh.read(count) inistr = bytes2str(stripnull(inistr)) try: return olympusini_metadata(inistr) except Exception as exc: log.warning('olympusini_metadata: %s: %s', exc.__class__.__name__, exc) return {}
Read OlympusSIS INI string and return as dict.
def sortframe(frame): d = frame['data']; sortedargs = np.lexsort([d['xi'],d['yi'],d['zi']]) d = d[sortedargs]; frame['data']=d; return frame;
sorts particles for a frame
def center(self, X): X = X.copy() inan = numpy.isnan(X) if self.mu is None: X_ = numpy.ma.masked_array(X, inan) self.mu = X_.mean(0).base self.sigma = X_.std(0).base reduce(lambda y,x: setitem(x[0], x[1], x[2]), zip(X.T, inan.T, self.mu), None) X = X - self.mu X = X / numpy.where(self.sigma == 0, 1e-30, self.sigma) return X
Center `X` in PCA space.
def __store_deactivated_components(self): deactivated_components = [] for node in foundations.walkers.nodes_walker(self.__model.root_node): if node.family == "Component": node.component.interface.activated or deactivated_components.append(node.component.name) LOGGER.debug("> Storing '{0}' deactivated Components.".format(", ".join(deactivated_components))) self.__settings.set_key("Settings", "deactivated_components", ",".join(deactivated_components))
Stores deactivated Components in settings file.
def show(self, baseAppInstance): self.from_dict_to_fields(self.configDict) super(ProjectConfigurationDialog, self).show(baseAppInstance)
Allows to show the widget as root window
def _one_iteration(self, F, Ybus, V, Vm, Va, pv, pq, pvpq): J = self._build_jacobian(Ybus, V, pv, pq, pvpq) dx = -1 * spsolve(J, F) npv = len(pv) npq = len(pq) if npv > 0: Va[pv] = Va[pv] + dx[range(npv)] if npq > 0: Va[pq] = Va[pq] + dx[range(npv, npv + npq)] Vm[pq] = Vm[pq] + dx[range(npv + npq, npv + npq + npq)] V = Vm * exp(1j * Va) Vm = abs(V) Va = angle(V) return V, Vm, Va
Performs one Newton iteration.
def as_dict(self): d = {} for key, value in self: d.setdefault(key, []).append(value) return d
Return a copy of the comment data in a real dict.
def clear_cache(m, files_processed): for what, reason, url, path in files_processed: cp = m.doc.downloader.cache_path(url) if m.cache.exists(cp): m.cache.remove(cp)
Remove any files we may have uploaded from the cache.
def table_formatter(self, dataframe, inc_header=1, inc_index=1): return TableFormatter(dataframe, inc_header=inc_header, inc_index=inc_index)
Return a table formatter for the dataframe. Saves the user the need to import this class
def pxe_netboot(self, filename): new_port = { 'extra_dhcp_opts': [ {'opt_name': 'bootfile-name', 'opt_value': 'http://192.0.2.240:8088/' + filename, 'ip_version': 4, }, {'opt_name': 'tftp-server', 'opt_value': '192.0.2.240', 'ip_version': '4'}, {'opt_name': 'server-ip-address', 'opt_value': '192.0.2.240', 'ip_version': '4'} ] } self.neutron.update_port(self._provision_port_id, {'port': new_port})
Specify which file ipxe should load during the netboot.
def create_archive(archive, filenames, verbosity=0, program=None, interactive=True): util.check_new_filename(archive) util.check_archive_filelist(filenames) if verbosity >= 0: util.log_info("Creating %s ..." % archive) res = _create_archive(archive, filenames, verbosity=verbosity, interactive=interactive, program=program) if verbosity >= 0: util.log_info("... %s created." % archive) return res
Create given archive with given files.
def padded_variance_explained(predictions, labels, weights_fn=common_layers.weights_all): predictions, labels = common_layers.pad_with_zeros(predictions, labels) targets = labels weights = weights_fn(targets) y_bar = tf.reduce_mean(weights * targets) tot_ss = tf.reduce_sum(weights * tf.pow(targets - y_bar, 2)) res_ss = tf.reduce_sum(weights * tf.pow(targets - predictions, 2)) r2 = 1. - res_ss / tot_ss return r2, tf.reduce_sum(weights)
Explained variance, also known as R^2.
def create_index(modules): for key in modules.keys(): file_path = join(HERE, '%s_modules/_list_of_modules.rst' % key) list_file = open(file_path, 'w') list_file.write('%s\n' % AUTOGEN) list_file.write('%s\n' % key.title()) list_file.write('=' * len(key)) list_file.write('\n\n') list_file.write('.. toctree::\n') list_file.write(' :maxdepth: 2\n\n') for module in modules[key]: list_file.write(' %s\n' % module)
This takes a dict of modules and created the RST index file.
def _expand_one_key_dictionary(_dict): key = next(six.iterkeys(_dict)) value = _dict[key] return key, value
Returns the only one key and it's value from a dictionary.
def _copy_context_into_mutable(context): def make_mutable(val): if isinstance(val, Mapping): return dict(val) else: return val if not isinstance(context, (str, Mapping)): try: return [make_mutable(val) for val in context] except TypeError: pass return make_mutable(context)
Copy a properly formatted context into a mutable data structure.
def login(self, command='su -', user=None, password=None, prompt_prefix=None, expect=None, timeout=shutit_global.shutit_global_object.default_timeout, escape=False, echo=None, note=None, go_home=True, fail_on_fail=True, is_ssh=True, check_sudo=True, loglevel=logging.DEBUG): shutit_global.shutit_global_object.yield_to_draw() shutit_pexpect_session = self.get_current_shutit_pexpect_session() return shutit_pexpect_session.login(ShutItSendSpec(shutit_pexpect_session, user=user, send=command, password=password, prompt_prefix=prompt_prefix, expect=expect, timeout=timeout, escape=escape, echo=echo, note=note, go_home=go_home, fail_on_fail=fail_on_fail, is_ssh=is_ssh, check_sudo=check_sudo, loglevel=loglevel))
Logs user in on default child.
def check_theme(theme): terminal_colors = curses.COLORS if curses.has_colors() else 0 if theme.required_colors > terminal_colors: return False elif theme.required_color_pairs > curses.COLOR_PAIRS: return False else: return True
Check if the given theme is compatible with the terminal
def reindex(report): index = list(report.index) i = index.index('TOTAL') return report.reindex(index[:i] + index[i+1:] + ['TOTAL'])
Reindex report so that 'TOTAL' is the last row
def parse_line(string): match = line_regexp().match(string) if match: matches = match.groupdict() line_number = matches['line_number'] path_to_python = matches['path_to_python'] spaceless_path_to_python = matches['spaceless_path_to_python'] if path_to_python: return path_to_python, line_number elif spaceless_path_to_python: return spaceless_path_to_python, line_number
Parse a single string as traceback line
def create_notification_rule(self, data, **kwargs): data = {'notification_rule': data, } endpoint = '{0}/{1}/notification_rules'.format( self.endpoint, self['id'], ) result = self.request('POST', endpoint=endpoint, data=data, query_params=kwargs) self._data['notification_rules'].append(result['notification_rule']) return result
Create a notification rule for this user.
def requestOpenOrders(self, all_clients=False): if all_clients: self.ibConn.reqAllOpenOrders() self.ibConn.reqOpenOrders()
Request open orders - loads up orders that wasn't created using this session
def _process_generic_param(pval, def_unit, equivalencies=[]): if isinstance(pval, u.Quantity): outval = pval.to(def_unit, equivalencies).value else: outval = pval return outval
Process generic model parameter.
def mapper(mapping, _nt_name='NT'): if isinstance(mapping, Mapping) and not isinstance(mapping, AsDict): for key, value in list(mapping.items()): mapping[key] = mapper(value) return namedtuple_wrapper(_nt_name, **mapping) elif isinstance(mapping, list): return [mapper(item) for item in mapping] return mapping
Convert mappings to namedtuples recursively.
def __getRefererUrl(self, url=None): if url is None: url = "http://www.arcgis.com/sharing/rest/portals/self" params = { "f" : "json", "token" : self.token } val = self._get(url=url, param_dict=params, proxy_url=self._proxy_url, proxy_port=self._proxy_port) self._referer_url = "arcgis.com" self._token = None return self._referer_url
gets the referer url for the token handler
def palettebar(height, length, colormap): cbar = np.tile(np.arange(length) * 1.0 / (length - 1), (height, 1)) cbar = (cbar * (colormap.values.max() + 1 - colormap.values.min()) + colormap.values.min()) return colormap.palettize(cbar)
Return the channels of a palettebar.
def after_unassign(reference_analysis): analysis_events.after_unassign(reference_analysis) ref_sample = reference_analysis.aq_parent ref_sample.manage_delObjects([reference_analysis.getId()])
Removes the reference analysis from the system
def preloop(self): if not self.parser: self.stdout.write("Welcome to imagemounter {version}".format(version=__version__)) self.stdout.write("\n") self.parser = ImageParser() for p in self.args.paths: self.onecmd('disk "{}"'.format(p))
if the parser is not already set, loads the parser.
def board_fen(self, *, promoted: Optional[bool] = False) -> str: builder = [] empty = 0 for square in SQUARES_180: piece = self.piece_at(square) if not piece: empty += 1 else: if empty: builder.append(str(empty)) empty = 0 builder.append(piece.symbol()) if promoted and BB_SQUARES[square] & self.promoted: builder.append("~") if BB_SQUARES[square] & BB_FILE_H: if empty: builder.append(str(empty)) empty = 0 if square != H1: builder.append("/") return "".join(builder)
Gets the board FEN.
def headerData(self, section, orientation, role): if role == Qt.TextAlignmentRole: if orientation == Qt.Horizontal: return Qt.AlignCenter | Qt.AlignBottom else: return Qt.AlignRight | Qt.AlignVCenter if role != Qt.DisplayRole and role != Qt.ToolTipRole: return None if self.axis == 1 and self._shape[1] <= 1: return None orient_axis = 0 if orientation == Qt.Horizontal else 1 if self.model.header_shape[orient_axis] > 1: header = section else: header = self.model.header(self.axis, section) if not is_type_text_string(header): header = to_text_string(header) return header
Get the information to put in the header.
def _file_lines(self, filename): try: return self._file_lines_cache[filename] except KeyError: if os.path.isfile(filename): with open(filename) as python_file: self._file_lines_cache[filename] = python_file.readlines() else: self._file_lines_cache[filename] = "" return self._file_lines_cache[filename]
Get lines for filename, caching opened files.
def expand_source_paths(paths): for src_path in paths: if src_path.endswith(('.pyc', '.pyo')): py_path = get_py_path(src_path) if os.path.exists(py_path): src_path = py_path yield src_path
Convert pyc files into their source equivalents.
def objectprep(self): if self.bcltofastq: if self.customsamplesheet: assert os.path.isfile(self.customsamplesheet), 'Cannot find custom sample sheet as specified {}' \ .format(self.customsamplesheet) self.samples = fastqCreator.CreateFastq(self) samples_dict = vars(self.samples) self.index = samples_dict['index'] self.index_length = samples_dict['indexlength'] self.forward = samples_dict['forwardlength'] self.reverse = samples_dict['reverselength'] self.forwardlength = samples_dict['forward'] self.reverselength = samples_dict['reverse'] self.header = samples_dict['header'] else: self.samples = createObject.ObjectCreation(self)
Creates fastq files from an in-progress Illumina MiSeq run or create an object and moves files appropriately
def addCmdClass(self, ctor, **opts): item = ctor(self, **opts) name = item.getCmdName() self.cmds[name] = item
Add a Cmd subclass to this cli.