code
stringlengths
51
2.34k
docstring
stringlengths
11
171
def primary_from_id(self, tax_id): s = select([self.names.c.tax_name], and_(self.names.c.tax_id == tax_id, self.names.c.is_primary)) res = s.execute() output = res.fetchone() if not output: msg = 'value "{}" not found in names.tax_id'.format(tax_id) raise ValueError(msg) else: return output[0]
Returns primary taxonomic name associated with tax_id
def makedev(self, tarinfo, targetpath): if not hasattr(os, "mknod") or not hasattr(os, "makedev"): raise ExtractError("special devices not supported by system") mode = tarinfo.mode if tarinfo.isblk(): mode |= stat.S_IFBLK else: mode |= stat.S_IFCHR os.mknod(targetpath, mode, os.makedev(tarinfo.devmajor, tarinfo.devminor))
Make a character or block device called targetpath.
def save_updates(self): if not self._track_changes: return True url = self.build_url(self._endpoints.get('update_list_item').format(item_id=self.object_id)) update = {field: value for field, value in self.fields.items() if self._cc(field) in self._track_changes} response = self.con.patch(url, update) if not response: return False self._clear_tracker() return True
Save the updated fields to the cloud
def sync(self, force=None): try: if force: sd = force else: sd = self.sync_dir() if sd == self.SYNC_DIR.FILE_TO_RECORD: if force and not self.exists(): return None self.fs_to_record() elif sd == self.SYNC_DIR.RECORD_TO_FILE: self.record_to_fs() else: return None self._dataset.config.sync[self.file_const][sd] = time.time() return sd except Exception as e: self._bundle.rollback() self._bundle.error("Failed to sync '{}': {}".format(self.file_const, e)) raise
Synchronize between the file in the file system and the field record
def readTableFromCSV(f, dialect="excel"): rowNames = [] columnNames = [] matrix = [] first = True for row in csv.reader(f, dialect): if first: columnNames = row[1:] first = False else: rowNames.append(row[0]) matrix.append([float(c) for c in row[1:]]) return Table(rowNames, columnNames, matrix)
Reads a table object from given CSV file.
def _parse_blkio_metrics(self, stats): metrics = { 'io_read': 0, 'io_write': 0, } for line in stats: if 'Read' in line: metrics['io_read'] += int(line.split()[2]) if 'Write' in line: metrics['io_write'] += int(line.split()[2]) return metrics
Parse the blkio metrics.
def preview_stream(stream): _cv2.startWindowThread() for frame in stream.frame_generator(): if frame is not None: _cv2.imshow('Video', frame) _cv2.moveWindow('Video',5,5) else: break key = _cv2.waitKey(1) & 0xFF if key == ord("q"): break _cv2.waitKey(1) _cv2.destroyAllWindows() _cv2.waitKey(1)
Display stream in an OpenCV window until "q" key is pressed
def _close(self, name, suppress_logging): try: pool_names = list(self.pools) if name in pool_names: self.pools[name].close() del self.pools[name] except Exception as e: self.logger.error('Exception on closing Flopsy Pool for {0}: {1}'.format(name, e), exc_info=not suppress_logging)
closes one particular pool and all its amqp amqp connections
def stripped_name(self): name = self.name while True: name, n = self._parenthesis_re.subn('', name) if not n: break name = self._const_re.sub('', name) while True: name, n = self._angles_re.subn('', name) if not n: break return name
Remove extraneous information from C++ demangled function names.
def add_to_parent(self): parent = self.parent if parent is not None: try: children = parent.children except AttributeError: pass else: include(children, self)
Adds this node to the parent's ``children`` collection if it exists.
def build_attrs(self, *args, **kwargs): "Helper function for building an attribute dictionary." self.attrs = self.widget.build_attrs(*args, **kwargs) return self.attrs
Helper function for building an attribute dictionary.
def ConsultarRemito(self, cod_remito=None, id_req=None, tipo_comprobante=None, punto_emision=None, nro_comprobante=None): "Obtener los datos de un remito generado" print(self.client.help("consultarRemito")) response = self.client.consultarRemito( authRequest={'token': self.Token, 'sign': self.Sign, 'cuitRepresentada': self.Cuit}, codRemito=cod_remito, idReq=id_req, tipoComprobante=tipo_comprobante, puntoEmision=punto_emision, nroComprobante=nro_comprobante) ret = response.get("consultarRemitoReturn", {}) id_req = ret.get("idReq", 0) self.remito = rec = ret.get("remito", {}) self.__analizar_errores(ret) self.__analizar_observaciones(ret) self.__analizar_evento(ret) self.AnalizarRemito(rec) return id_req
Obtener los datos de un remito generado
def run(self): while not self._abort: hashes = self._GetHashes(self._hash_queue, self.hashes_per_batch) if hashes: time_before_analysis = time.time() hash_analyses = self.Analyze(hashes) current_time = time.time() self.seconds_spent_analyzing += current_time - time_before_analysis self.analyses_performed += 1 for hash_analysis in hash_analyses: self._hash_analysis_queue.put(hash_analysis) self._hash_queue.task_done() time.sleep(self.wait_after_analysis) else: time.sleep(self.EMPTY_QUEUE_WAIT_TIME)
The method called by the threading library to start the thread.
def remove_external_references(self): for ex_ref_node in self.node.findall('externalReferences'): self.node.remove(ex_ref_node)
Removes any external reference from the role
def label(self) -> str: label = self.expression.replace("_", "\\;") if self.units_kind: symbol = wt_units.get_symbol(self.units) for v in self.variables: vl = "%s_{%s}" % (symbol, v.label) vl = vl.replace("_{}", "") label = label.replace(v.natural_name, vl) val = ( round(self.value, self.round_spec) if self.round_spec is not None else self.value ) label += r"\,=\,{}".format(format(val, self.format_spec)) if self.units_kind: units_dictionary = getattr(wt_units, self.units_kind) label += r"\," label += units_dictionary[self.units][2] label = r"$\mathsf{%s}$" % label return label
A latex formatted label representing constant expression and united value.
def _unpack_episode(element: ET.Element): return Episode( epno=element.find('epno').text, type=int(element.find('epno').get('type')), length=int(element.find('length').text), titles=tuple(_unpack_episode_title(title) for title in element.iterfind('title')), )
Unpack Episode from episode XML element.
def declared_symbols(self): return self.local_declared_symbols | ( self.parent.declared_symbols if self.parent else set())
Return all local symbols here, and also of the parents
def load_private_key(pem_path, passphrase_bytes=None): with open(pem_path, "rb") as f: return cryptography.hazmat.primitives.serialization.load_pem_private_key( data=f.read(), password=passphrase_bytes, backend=cryptography.hazmat.backends.default_backend(), )
Load private key from PEM encoded file
def rapl_read(): basenames = glob.glob('/sys/class/powercap/intel-rapl:*/') basenames = sorted(set({x for x in basenames})) pjoin = os.path.join ret = list() for path in basenames: name = None try: name = cat(pjoin(path, 'name'), fallback=None, binary=False) except (IOError, OSError, ValueError) as err: logging.warning("ignoring %r for file %r", (err, path), RuntimeWarning) continue if name: try: current = cat(pjoin(path, 'energy_uj')) max_reading = 0.0 ret.append(RaplStats(name, float(current), max_reading)) except (IOError, OSError, ValueError) as err: logging.warning("ignoring %r for file %r", (err, path), RuntimeWarning) return ret
Read power stats and return dictionary
def post_save_stop(sender, instance, **kwargs): from multigtfs.models.trip import Trip trip_ids = instance.stoptime_set.filter( trip__shape=None).values_list('trip_id', flat=True).distinct() for trip in Trip.objects.filter(id__in=trip_ids): trip.update_geometry()
Update related objects when the Stop is updated
def _next_offset(self): if self._filestream: offset = self._filestream.tell() if offset: offset -= 1 else: offset = self._initial_offset return offset
Return the offset of the next line to read.
def initializable(self): return bool(lib.EnvSlotInitableP(self._env, self._cls, self._name))
True if the Slot is initializable.
def second_textx_model(self, model_parser): if self.grammar_parser.debug: self.grammar_parser.dprint("RESOLVING MODEL PARSER: second_pass") self._resolve_rule_refs(self.grammar_parser, model_parser) self._determine_rule_types(model_parser.metamodel) self._resolve_cls_refs(self.grammar_parser, model_parser) return model_parser
Cross reference resolving for parser model.
def iterativeFetch(query, batchSize=default_batch_size): while True: rows = query.fetchmany(batchSize) if not rows: break rowDicts = sqliteRowsToDicts(rows) for rowDict in rowDicts: yield rowDict
Returns rows of a sql fetch query on demand
def embeddedFileUpd(self, id, buffer=None, filename=None, ufilename=None, desc=None): return _fitz.Document_embeddedFileUpd(self, id, buffer, filename, ufilename, desc)
Change an embedded file given its entry number or name.
def gen_key(path): cmd = 'ssh-keygen -P "" -f {0} -t rsa -q'.format(path) if not os.path.isdir(os.path.dirname(path)): os.makedirs(os.path.dirname(path)) subprocess.call(cmd, shell=True)
Generate a key for use with salt-ssh
def _zoom_labels(self, zoom): labelfont = self.grid.GetLabelFont() default_fontsize = get_default_font().GetPointSize() labelfont.SetPointSize(max(1, int(round(default_fontsize * zoom)))) self.grid.SetLabelFont(labelfont)
Adjust grid label font to zoom factor
def setup(sphinx): create_auto_documentation() from sphinx.highlighting import lexers lexers['py3status'] = Py3statusLexer() sphinx.add_directive('screenshot', ScreenshotDirective)
This will be called by sphinx.
def _shorten_file_path(line): start = line.lower().find('file') if start < 0: return line first_quote = line.find('"', start) if first_quote < 0: return line second_quote = line.find('"', first_quote + 1) if second_quote < 0: return line path = line[first_quote + 1:second_quote] new_path = '/'.join(path.split('/')[-3:]) return line[:first_quote] + '[...]/' + new_path + line[second_quote + 1:]
Shorten file path in error lines for more readable tracebacks.
def update(self, stats): if self.generate_every != 0 and self._timer.finished(): self.args.generate_graph = True self._timer.reset() if not self.args.generate_graph: return plugins = stats.getPluginsList() for plugin_name in plugins: plugin = stats._plugins[plugin_name] if plugin_name in self.plugins_to_export(): self.export(plugin_name, plugin.get_export_history()) logger.info("Graphs created in the folder {}".format(self.path)) self.args.generate_graph = False
Generate Graph file in the output folder.
def find(self, node, path): return node.find(path, namespaces=self.namespaces)
Wrapper for lxml`s find.
def _get_belief_package(stmt): belief_packages = [] for st in stmt.supports: parent_packages = _get_belief_package(st) package_stmt_keys = [pkg.statement_key for pkg in belief_packages] for package in parent_packages: if package.statement_key not in package_stmt_keys: belief_packages.append(package) belief_package = BeliefPackage(stmt.matches_key(), stmt.evidence) belief_packages.append(belief_package) return belief_packages
Return the belief packages of a given statement recursively.
def should_filter(items): return (vcfutils.get_paired(items) is not None and any("damage_filter" in dd.get_tools_on(d) for d in items))
Check if we should do damage filtering on somatic calling with low frequency events.
def configure_nodes(self): required_nodes = self._get_required_nodes() log.debug( "Matching existing lb nodes to required %s (port %s)" % (", ".join(required_nodes), self.backend_port) ) self.consul.match_lb_nodes( self.lb_attrs[A.loadbalancer.ID], self.lb_attrs[A.loadbalancer.NODES_KEY], required_nodes, self.backend_port) self.lb_attrs = self.consul.lb_details( self.lb_attrs[A.loadbalancer.ID] )
Ensure that the LB's nodes matches the stack
def deconstruct(name): name = coerce_unicode(name, _c.FSQ_CHARSET) new_arg = sep = u'' args = [] if 1 > len(name): raise FSQMalformedEntryError(errno.EINVAL, u'cannot derive delimiter'\ u'from: {0}'.format(name)) delimiter, encodeseq = delimiter_encodeseq(name[0], _c.FSQ_ENCODE, _c.FSQ_CHARSET) if 1 == len(name): return delimiter, args encoding_trg = sep for c in name[1:]: if 3 == len(encoding_trg): encoding_trg = sep if c == encodeseq or len(encoding_trg): encoding_trg = sep.join([encoding_trg, c]) elif c == delimiter: args.append(decode(new_arg, delimiter=delimiter, encodeseq=encodeseq)) new_arg = sep continue new_arg = sep.join([new_arg, c]) args.append(decode(new_arg, delimiter=delimiter, encodeseq=encodeseq)) return delimiter, args
Deconstruct a queue-name to a set of arguments
def object_ref(self): return ImmutableDict(type=self.type, category_id=self.category_id, event_id=self.event_id, session_id=self.session_id, contrib_id=self.contrib_id, subcontrib_id=self.subcontrib_id)
Return the reference of the changed object.
def run_setup(setup_script, args): setup_dir = os.path.abspath(os.path.dirname(setup_script)) with setup_context(setup_dir): try: sys.argv[:] = [setup_script] + list(args) sys.path.insert(0, setup_dir) working_set.__init__() working_set.callbacks.append(lambda dist: dist.activate()) dunder_file = ( setup_script if isinstance(setup_script, str) else setup_script.encode(sys.getfilesystemencoding()) ) with DirectorySandbox(setup_dir): ns = dict(__file__=dunder_file, __name__='__main__') _execfile(setup_script, ns) except SystemExit as v: if v.args and v.args[0]: raise
Run a distutils setup script, sandboxed in its directory
def interactive(outdir): print("Building your Blended files into a website!") global outdir_type outdir_type = outdir reload(sys) sys.setdefaultencoding('utf8') build_files(outdir) print("Watching the content and templates directories for changes, press CTRL+C to stop...\n") w = Watcher() w.run()
Blends the generated files and outputs a HTML website on file change
def validate_meta(meta): if not isinstance(meta, (dict,)): raise TypeError('Model Meta "linguist" must be a dict') required_keys = ("identifier", "fields") for key in required_keys: if key not in meta: raise KeyError('Model Meta "linguist" dict requires %s to be defined', key) if not isinstance(meta["fields"], (list, tuple)): raise ImproperlyConfigured( "Linguist Meta's fields attribute must be a list or tuple" )
Validates Linguist Meta attribute.
def delete_virtualip(self, loadbalancer, vip): lb = vip.parent if not lb: raise exc.UnattachedVirtualIP("No parent Load Balancer for this " "VirtualIP could be determined.") resp, body = self.api.method_delete("/loadbalancers/%s/virtualips/%s" % (lb.id, vip.id)) return resp, body
Deletes the VirtualIP from its load balancer.
def setEngineRootOverride(self, rootDir): ConfigurationManager.setConfigKey('rootDirOverride', os.path.abspath(rootDir)) try: self.getEngineVersion() except: print('Warning: the specified directory does not appear to contain a valid version of the Unreal Engine.')
Sets a user-specified directory as the root engine directory, overriding any auto-detection
def _extract_auth_config(self): service = self._service if not service.authentication: return {} auth_infos = {} for auth_rule in service.authentication.rules: selector = auth_rule.selector provider_ids_to_audiences = {} for requirement in auth_rule.requirements: provider_id = requirement.providerId if provider_id and requirement.audiences: audiences = requirement.audiences.split(u",") provider_ids_to_audiences[provider_id] = audiences auth_infos[selector] = AuthInfo(provider_ids_to_audiences) return auth_infos
Obtains the authentication configurations.
def serialize(self, value): if six.PY2: output = io.BytesIO() csv.writer(output).writerow([unicode(x).encode('utf-8') for x in value]) serialized_value = output.getvalue().decode('utf-8').strip() else: output = io.StringIO() csv.writer(output).writerow([str(x) for x in value]) serialized_value = output.getvalue().strip() return _helpers.StrOrUnicode(serialized_value)
Serialize a list as a string, if possible, or as a unicode string.
def echo_verbose_results(data, no_color): click.echo() click.echo( '\n'.join( '{}: {}'.format(key, val) for key, val in data['info'].items() ) ) click.echo() for test in data['tests']: if test['outcome'] == 'passed': fg = 'green' elif test['outcome'] == 'skipped': fg = 'yellow' else: fg = 'red' name = parse_test_name(test['name']) echo_style( '{} {}'.format(name, test['outcome'].upper()), no_color, fg=fg )
Print list of tests and result of each test.
def find_and_filter_sgf_files(base_dir, min_year=None, komi=None): sgf_files = [] for dirpath, dirnames, filenames in os.walk(base_dir): for filename in filenames: if filename.endswith('.sgf'): path = os.path.join(dirpath, filename) sgf_files.append(path) if min_year == komi == None: print ("Found {} sgf_files".format(len(sgf_files))) return sgf_files f = filter_year_komi(min_year, komi) filtered_sgf_files = [sgf for sgf in tqdm(sgf_files) if f(sgf)] print("{} of {} .sgf files matched (min_year >= {}, komi = {})".format( len(filtered_sgf_files), len(sgf_files), min_year, komi)) return filtered_sgf_files
Finds all sgf files in base_dir with year >= min_year and komi
def overlays_at(self, key): if isinstance(key, slice): s, e, _ = key.indices(len(self.text)) else: s = e = key return [o for o in self.overlays if o.start in Rng(s, e)]
Key may be a slice or a point.
def AddATR(self, readernode, atr): capchild = self.AppendItem(readernode, atr) self.SetPyData(capchild, None) self.SetItemImage( capchild, self.cardimageindex, wx.TreeItemIcon_Normal) self.SetItemImage( capchild, self.cardimageindex, wx.TreeItemIcon_Expanded) self.Expand(capchild) return capchild
Add an ATR to a reader node.
def _reorder_shape(input_shape, output=None): if output is None: return input_shape return base.nested_map(output, lambda i: input_shape[i])
Helper to determine the shape of reorder output.
def isportopen(host, port): if not 1 <= int(port) <= 65535: return False sock = socket.socket(socket.AF_INET, socket.SOCK_STREAM) out = sock.connect_ex((sanitize_host(host), int(port))) return out
Return status of a port
def flake(self, message): self.stdout.write(str(message)) self.stdout.write('\n')
Print an error message to stdout.
def lset(self, name, index, value): "Set ``position`` of list ``name`` to ``value``" return self.execute_command('LSET', name, index, value)
Set ``position`` of list ``name`` to ``value``
def fisher_vector_product(self, vector, kl_divergence_gradient, model): assert not vector.requires_grad, "Vector must not propagate gradient" dot_product = vector @ kl_divergence_gradient double_gradient = torch.autograd.grad(dot_product, model.policy_parameters(), retain_graph=True) fvp = p2v(x.contiguous() for x in double_gradient) return fvp + vector * self.cg_damping
Calculate product Hessian @ vector
def lookup_comment_by_wordpress_id(self, comment_id, comments): for comment in comments: if comment.wordpress_id == comment_id: return comment
Returns Django comment object with this wordpress id
def connect(self): self.serial = serial.Serial(port=self.port, baudrate=self.baudrate, timeout=self.timeout) self.alive = True self.rxThread = threading.Thread(target=self._readLoop) self.rxThread.daemon = True self.rxThread.start()
Connects to the device and starts the read thread
def to_index(self, index_type, index_name, includes=None): return IndexField(self.name, self.data_type, index_type, index_name, includes)
Create an index field from this field
def _(pymux, variables): " Go to previous active window. " w = pymux.arrangement.get_previous_active_window() if w: pymux.arrangement.set_active_window(w)
Go to previous active window.
def ExistingFileOrNone(fname): if os.path.isfile(fname): return fname elif fname.lower() == 'none': return None else: raise ValueError("%s must specify a valid file name or 'None'" % fname)
Like `Existingfile`, but if `fname` is string "None" then return `None`.
def load(config, opt): ctx = Context(opt) seed_map = py_resources() seed_keys = sorted(set([m[0] for m in seed_map]), key=resource_sort) for config_key in seed_keys: if config_key not in config: continue for resource_config in config[config_key]: mod = find_model(config_key, resource_config, seed_map) if not mod: LOG.warning("unable to find mod for %s", resource_config) continue ctx.add(mod(resource_config, opt)) for config_key in config.keys(): if config_key != 'pgp_keys' and \ config_key not in seed_keys: LOG.warning("missing model for %s", config_key) return filtered_context(ctx)
Loads and returns a full context object based on the Secretfile
def _assert_expandable(repl, use_format=False): if isinstance(repl, ReplaceTemplate): if repl.use_format != use_format: if use_format: raise ValueError("Replace not compiled as a format replace") else: raise ValueError("Replace should not be compiled as a format replace!") elif not isinstance(repl, (str, bytes)): raise TypeError("Expected string, buffer, or compiled replace!")
Check if replace template is expandable.
def transform(self, m): if len(m) != 6: raise ValueError("bad sequ. length") self.ul *= m self.ur *= m self.ll *= m self.lr *= m return self
Replace quad by its transformation with matrix m.
def resample_single_nifti(input_nifti): input_image = nibabel.load(input_nifti) output_image = resample_nifti_images([input_image]) output_image.to_filename(input_nifti)
Resample a gantry tilted image in place
def pex_hash(cls, d): names = sorted(f for f in cls._iter_files(d) if not (f.endswith('.pyc') or f.startswith('.'))) def stream_factory(name): return open(os.path.join(d, name), 'rb') return cls._compute_hash(names, stream_factory)
Return a reproducible hash of the contents of a directory.
def _call(self, coeffs): if self.impl == 'pywt': coeffs = pywt.unravel_coeffs(coeffs, coeff_slices=self._coeff_slices, coeff_shapes=self._coeff_shapes, output_format='wavedecn') recon = pywt.waverecn( coeffs, wavelet=self.pywt_wavelet, mode=self.pywt_pad_mode, axes=self.axes) recon_shape = self.range.shape if recon.shape != recon_shape: recon_slc = [] for i, (n_recon, n_intended) in enumerate(zip(recon.shape, recon_shape)): if n_recon == n_intended + 1: recon_slc.append(slice(-1)) elif n_recon == n_intended: recon_slc.append(slice(None)) else: raise ValueError( 'in axis {}: expected size {} or {} in ' '`recon_shape`, got {}' ''.format(i, n_recon - 1, n_recon, n_intended)) recon = recon[tuple(recon_slc)] return recon else: raise RuntimeError("bad `impl` '{}'".format(self.impl))
Return the inverse wavelet transform of ``coeffs``.
def __get_yubico_users(username): user = {} try: if __opts__['yubico_users'].get(username, None): (user['id'], user['key']) = list(__opts__['yubico_users'][username].values()) else: return None except KeyError: return None return user
Grab the YubiKey Client ID & Secret Key
def in_lamp_reach(p): v1 = XYPoint(Lime.x - Red.x, Lime.y - Red.y) v2 = XYPoint(Blue.x - Red.x, Blue.y - Red.y) q = XYPoint(p.x - Red.x, p.y - Red.y) s = cross_product(q, v2) / cross_product(v1, v2) t = cross_product(v1, q) / cross_product(v1, v2) return (s >= 0.0) and (t >= 0.0) and (s + t <= 1.0)
Check if the provided XYPoint can be recreated by a Hue lamp.
def chunks(l,n): return [l[x:x+n] for x in range(0, len(l), n)];
chunk l in n sized bits
def infer_attribute(self, context=None): for owner in self.expr.infer(context): if owner is util.Uninferable: yield owner continue if context and context.boundnode: if isinstance(owner, bases.Instance) and isinstance( context.boundnode, bases.Instance ): try: if helpers.is_subtype( helpers.object_type(context.boundnode), helpers.object_type(owner), ): owner = context.boundnode except exceptions._NonDeducibleTypeHierarchy: pass try: context.boundnode = owner yield from owner.igetattr(self.attrname, context) context.boundnode = None except (exceptions.AttributeInferenceError, exceptions.InferenceError): context.boundnode = None except AttributeError: context.boundnode = None return dict(node=self, context=context)
infer an Attribute node by using getattr on the associated object
def _draw_footer(self): n_rows, n_cols = self.term.stdscr.getmaxyx() window = self.term.stdscr.derwin(1, n_cols, self._row, 0) window.erase() window.bkgd(str(' '), self.term.attr('HelpBar')) text = self.FOOTER.strip() self.term.add_line(window, text, 0, 0) self._row += 1
Draw the key binds help bar at the bottom of the screen
def _idx_to_bits(self, i): bits = bin(i)[2:].zfill(self.nb_hyperplanes) return [-1.0 if b == "0" else 1.0 for b in bits]
Convert an group index to its bit representation.
def se(self): global _complained_se if not _complained_se: _complained_se = True l.critical("The name state.se is deprecated; please use state.solver.") return self.get_plugin('solver')
Deprecated alias for `solver`
def U_ij(q_vars: List[fl.Var], mass: np.ndarray, i: int, j: int): assert len(q_vars) == 3 * len(mass) mi = mass[i] mj = mass[j] U = -(G * mi * mj) / flux_r(q_vars, i, j) return U
Make Fluxion with the gratiational potential energy beween body i and j
def load(self): if not op.exists(self.path): logger.debug("The GUI state file `%s` doesn't exist.", self.path) return assert op.exists(self.path) logger.debug("Load the GUI state from `%s`.", self.path) self.update(_bunchify(_load_json(self.path)))
Load the state from the JSON file in the config dir.
def cache_makedirs(self, subdir=None): if subdir is not None: dirname = self.cache_path if subdir: dirname = os.path.join(dirname, subdir) else: dirname = os.path.dirname(self.cache_path) os.makedirs(dirname, exist_ok=True)
Make necessary directories to hold cache value
def check_is_a_mapping(var, allow_none=False): if not is_a_mapping(var, allow_none=allow_none): raise TypeError("var must be a dict, however type(var) is {}" .format(type(var)))
Calls is_a_mapping and raises a type error if the check fails.
def echo( message=None, file=None, nl=True, err=False, color=None, carriage_return=False ): message = message or "" if carriage_return and nl: click_echo(message + "\r\n", file, False, err, color) elif carriage_return and not nl: click_echo(message + "\r", file, False, err, color) else: click_echo(message, file, nl, err, color)
Patched click echo function.
def add_thread(self, checker, end_callback, source_code, parent): parent_id = id(parent) thread = AnalysisThread(self, checker, source_code) self.end_callbacks[id(thread)] = end_callback self.pending_threads.append((thread, parent_id)) logger.debug("Added thread %r to queue" % thread) QTimer.singleShot(50, self.update_queue)
Add thread to queue
def numberarray(x, shape): try: iter(x) except TypeError: return numpy.ones(shape) * x else: return x
Return x if it is an array or create an array and fill it with x.
def _configure_cdn(self): ident = self.identity cdn_svc = ident.services.get("object_cdn") if cdn_svc: ep = cdn_svc.endpoints.get(self.region_name) if ep: self.cdn_management_url = ep.public_url
Initialize CDN-related endpoints, if available.
def parse_action(action, parsed): if action == "list": list_env() elif action == "new": new_env(parsed.environment) elif action == "remove": remove_env(parsed.environment) elif action == "show": show_env(parsed.environment) elif action == "start": start_env(parsed.environment, parsed.path)
Parse the action to execute.
async def sentinel_monitor(self, name, ip, port, quorum): "Add a new master to Sentinel to be monitored" return await self.execute_command('SENTINEL MONITOR', name, ip, port, quorum)
Add a new master to Sentinel to be monitored
def _setup_bar(self): bar = u"" items_cnt = len(PROGRESS_BAR_ITEMS) bar_val = float(self._time_left) / self._section_time * self.num_progress_bars while bar_val > 0: selector = int(bar_val * items_cnt) selector = min(selector, items_cnt - 1) bar += PROGRESS_BAR_ITEMS[selector] bar_val -= 1 bar = bar.ljust(self.num_progress_bars) return bar
Setup the process bar.
def chain(*args): has_iter = partial(hasattr, name='__iter__') if len(args) == 1 and hasattr(args[0], '__iter__'): args = args[0] for arg in args: if hasattr(arg, '__iter__'): for i in arg: yield i else: yield arg
itertools.chain, just better
def convert_to_nested_dict(dotted_dict): nested_dict = {} for k, v in iterate_flattened(dotted_dict): set_by_dotted_path(nested_dict, k, v) return nested_dict
Convert a dict with dotted path keys to corresponding nested dict.
def add_raw_code(self, string_or_list): if _is_string(string_or_list): self._GMSH_CODE.append(string_or_list) else: assert isinstance(string_or_list, list) for string in string_or_list: self._GMSH_CODE.append(string) return
Add raw Gmsh code.
def by_value(self, value, default=None): try: return [k for k, v in self.items() if v == value][0] except IndexError: if default is not None: return default raise ValueError('%s' % value)
Returns the key for the given value
def parse_ini_file(self, path): cfgobj = ConfigObj(path, list_values=False) def extract_section(namespace, d): cfg = {} for key, val in d.items(): if isinstance(d[key], dict): cfg.update(extract_section(namespace + [key], d[key])) else: cfg['_'.join(namespace + [key]).upper()] = val return cfg return extract_section([], cfgobj.dict())
Parse ini file at ``path`` and return dict.
def live_weather(self, live_weather): summary = live_weather['currently']['summary'] self.summary(summary) click.echo()
Prints the live weather in a pretty format
def normalize(self, address, **kwargs): addresses = super(AddressType, self).normalize(address, **kwargs) return addresses
Make the address more compareable.
def _parse_references(xml): references = [] ref_finder = HTMLReferenceFinder(xml) for elm, uri_attr in ref_finder: type_ = _discover_uri_type(elm.get(uri_attr)) references.append(Reference(elm, type_, uri_attr)) return references
Parse the references to ``Reference`` instances.
def update_flavor(self, flavor, body): return self.put(self.flavor_path % (flavor), body=body)
Update a Neutron service flavor.
def teardown_socket(s): try: s.shutdown(socket.SHUT_WR) except socket.error: pass finally: s.close()
Shuts down and closes a socket.
def run_etime(self): if self.start_datetime is None or self.end_datetime is None: return None return self.end_datetime - self.start_datetime
Wall-time of the run as `timedelta` object.
def get(self, timeout=None): self.wait(timeout) if isinstance(self._result, Exception): raise self._result return self._result
Return the result or raise the error the function has produced
def add_object(self, obj): state = self.state if not obj.layer in state.layers: state.layers[obj.layer] = {} state.layers[obj.layer][obj.key] = obj state.need_redraw = True if (not self.legend_checkbox_menuitem_added and isinstance(obj, SlipFlightModeLegend)): self.add_legend_checkbox_menuitem() self.legend_checkbox_menuitem_added = True self.SetMenuBar(self.menu.wx_menu())
add an object to a layer
def page_sequence(n_sheets: int, one_based: bool = True) -> List[int]: n_pages = calc_n_virtual_pages(n_sheets) assert n_pages % 4 == 0 half_n_pages = n_pages // 2 firsthalf = list(range(half_n_pages)) secondhalf = list(reversed(range(half_n_pages, n_pages))) sequence = [] top = True for left, right in zip(secondhalf, firsthalf): if not top: left, right = right, left sequence += [left, right] top = not top if one_based: sequence = [x + 1 for x in sequence] log.debug("{} sheets => page sequence {!r}", n_sheets, sequence) return sequence
Generates the final page sequence from the starting number of sheets.
def close(self): print("PGPooledTransaction - shutting down connection pool") for name, conn in self.pool.iteritems(): conn.close() print("PGPooledTransaction - connection %s closed" % name)
close all pooled connections
def build_graph(self): for child, parents in self.dependencies.items(): if child not in self.nodes: raise NodeNotFoundError( "App %s SQL item dependencies reference nonexistent child node %r" % ( child[0], child), child ) for parent in parents: if parent not in self.nodes: raise NodeNotFoundError( "App %s SQL item dependencies reference nonexistent parent node %r" % ( child[0], parent), parent ) self.node_map[child].add_parent(self.node_map[parent]) self.node_map[parent].add_child(self.node_map[child]) for node in self.nodes: self.ensure_not_cyclic(node, lambda x: (parent.key for parent in self.node_map[x].parents))
Read lazy dependency list and build graph.
def recompile_all(path): import os if os.path.isdir(path): for root, dirs, files in os.walk(path): for name in files: if name.endswith('.py'): filename = os.path.abspath(os.path.join(root, name)) print >> sys.stderr, filename recompile(filename) else: filename = os.path.abspath(path) recompile(filename)
recursively recompile all .py files in the directory
def summary(app): r = requests.get('https://{}.herokuapp.com/summary'.format(app)) summary = r.json()['summary'] click.echo("\nstatus \t| count") click.echo("----------------") for s in summary: click.echo("{}\t| {}".format(s[0], s[1])) num_101s = sum([s[1] for s in summary if s[0] == 101]) num_10xs = sum([s[1] for s in summary if s[0] >= 100]) if num_10xs > 0: click.echo("\nYield: {:.2%}".format(1.0 * num_101s / num_10xs))
Print a summary of a deployed app's status.
def command_line_runner(): parser = get_parser() args = vars(parser.parse_args()) if args['version']: print(__version__) return if args['clear_cache']: utils.clear_cache() print('Cleared {0}.'.format(utils.CACHE_DIR)) return if not args['query']: parser.print_help() return if not os.getenv('SCRAPE_DISABLE_CACHE'): utils.enable_cache() if os.getenv('SCRAPE_DISABLE_IMGS'): args['no_images'] = True prompt_filetype(args) prompt_save_images(args) scrape(args)
Handle command-line interaction.