code
stringlengths
51
2.34k
docstring
stringlengths
11
171
def log_if(level, msg, condition, *args): if condition: log(level, msg, *args)
Logs 'msg % args' at level 'level' only if condition is fulfilled.
def fixtags(self, text): text = _guillemetLeftPat.sub(ur'\1 \2', text) text = _guillemetRightPat.sub(ur'\1 ', text) return text
Clean up special characters, only run once, next-to-last before doBlockLevels
def render_thread(self): obs = True while obs: obs = self._obs_queue.get() if obs: for alert in obs.observation.alerts: self._alerts[sc_pb.Alert.Name(alert)] = time.time() for err in obs.action_errors: if err.result != sc_err.Success: self._alerts[sc_err.ActionResult.Name(err.result)] = time.time() self.prepare_actions(obs) if self._obs_queue.empty(): self.render_obs(obs) if self._video_writer: self._video_writer.add(np.transpose( pygame.surfarray.pixels3d(self._window), axes=(1, 0, 2))) self._obs_queue.task_done()
A render loop that pulls observations off the queue to render.
def inh(table): t = [] for i in table: t.append(np.ndarray.tolist(np.arcsinh(i))) return t
inverse hyperbolic sine transformation
def parse_datetime(s: str) -> datetime.date: for fmt in (CREATION_DATE_FMT, PUBLISHED_DATE_FMT, PUBLISHED_DATE_FMT_2): try: dt = datetime.strptime(s, fmt) except ValueError: pass else: return dt raise ValueError('Incorrect datetime format for {}'.format(s))
Try to parse a datetime object from a standard datetime format or date format.
def _hash(number, alphabet): hashed = '' len_alphabet = len(alphabet) while True: hashed = alphabet[number % len_alphabet] + hashed number //= len_alphabet if not number: return hashed
Hashes `number` using the given `alphabet` sequence.
def kube_cronjob_next_schedule_time(self, metric, scraper_config): check_basename = scraper_config['namespace'] + '.cronjob.on_schedule_check' curr_time = int(time.time()) for sample in metric.samples: on_schedule = int(sample[self.SAMPLE_VALUE]) - curr_time tags = [ self._format_tag(label_name, label_value, scraper_config) for label_name, label_value in iteritems(sample[self.SAMPLE_LABELS]) ] tags += scraper_config['custom_tags'] if on_schedule < 0: message = "The service check scheduled at {} is {} seconds late".format( time.strftime('%Y-%m-%d %H:%M:%S', time.gmtime(int(sample[self.SAMPLE_VALUE]))), on_schedule ) self.service_check(check_basename, self.CRITICAL, tags=tags, message=message) else: self.service_check(check_basename, self.OK, tags=tags)
Time until the next schedule
def create_postgresql_pypostgresql(username, password, host, port, database, **kwargs): return create_engine( _create_postgresql_pypostgresql( username, password, host, port, database), **kwargs )
create an engine connected to a postgresql database using pypostgresql.
def _load_manifest_from_url(manifest, url, verify_certificate=True, username=None, password=None): try: if username and password: manifest_file_handler = StringIO(lib.authenticated_get(username, password, url, verify=verify_certificate).decode("utf-8")) else: manifest_file_handler = StringIO(lib.cleaned_request( 'get', url, verify=verify_certificate ).text) manifest.readfp(manifest_file_handler) except requests.exceptions.RequestException: logger.debug("", exc_info=True) error_message = sys.exc_info()[1] raise ManifestException("There was an error retrieving {0}!\n {1}".format(url, str(error_message)))
load a url body into a manifest
def object_ns(self): return Namespace( subject=self.object_, object_=None, prefix=self.prefix, qualifier=self.qualifier, version=self.version, )
Create a new namespace for the current namespace's object value.
def use_comparative_asseessment_part_item_view(self): self._object_views['asseessment_part_item'] = COMPARATIVE for session in self._get_provider_sessions(): try: session.use_comparative_asseessment_part_item_view() except AttributeError: pass
Pass through to provider AssessmentPartItemSession.use_comparative_asseessment_part_item_view
def _all_dims(x, default_dims=None): if x.get_shape().ndims is not None: return list(xrange(x.get_shape().ndims)) else: return default_dims
Returns a list of dims in x or default_dims if the rank is unknown.
def slicesum(self, start, stop=None, axis=0): return self.slice(start, stop, axis).sum(axis)
Slices the histogram along axis, then sums over that slice, returning a d-1 dimensional histogram
def entropy(self, base = 2): entropy = 0 if not base and self.base: base = self.base for type in self._dist: if not base: entropy += self._dist[type] * -math.log(self._dist[type]) else: entropy += self._dist[type] * -math.log(self._dist[type], base) return entropy
Compute the entropy of the distribution
def _class_tags(cls): class_tags = set() for base in cls.mro()[1:]: class_tags.update(getattr(base, '_class_tags', set())) return class_tags
Collect the tags from all base classes.
def _calc_mask(self): mask = [] for row in self._constraints: mask.append(tuple(x is None for x in row)) return tuple(mask)
Computes a boolean mask from the user defined constraints.
def _selectTree( self ): self.uiGanttTREE.blockSignals(True) self.uiGanttTREE.clearSelection() for item in self.uiGanttVIEW.scene().selectedItems(): item.treeItem().setSelected(True) self.uiGanttTREE.blockSignals(False)
Matches the tree selection to the views selection.
def open_json(file_name): with open(file_name, "r") as json_data: data = json.load(json_data) return data
returns json contents as string
def abbreviate(labels, rfill=' '): max_len = max(len(l) for l in labels) for i in range(1, max_len): abbrev = [l[:i].ljust(i, rfill) for l in labels] if len(abbrev) == len(set(abbrev)): break return abbrev
Abbreviate labels without introducing ambiguities.
def find_source_lines(self): strs = trace.find_strings(self.filename) lines = trace.find_lines_from_code(self.fn.__code__, strs) self.firstcodelineno = sys.maxint for lineno in lines: self.firstcodelineno = min(self.firstcodelineno, lineno) self.sourcelines.setdefault(lineno, 0) if self.firstcodelineno == sys.maxint: self.firstcodelineno = self.firstlineno
Mark all executable source lines in fn as executed 0 times.
def dataset(self): data = tablib.Dataset() if len(list(self)) == 0: return data first = self[0] data.headers = first.keys() for row in self.all(): row = _reduce_datetimes(row.values()) data.append(row) return data
A Tablib Dataset representation of the RecordCollection.
def twirl_url(self): return construct_api_url(self.input, 'twirl', self.resolvers, False, self.get3d, False, **self.kwargs)
Url of a TwirlyMol 3D viewer.
def prettylist(list_): if not list_: return '' values = set() uniqueList = [] for entry in list_: if not entry in values: values.add(entry) uniqueList.append(entry) return uniqueList[0] if len(uniqueList) == 1 \ else '[' + '; '.join(uniqueList) + ']'
Filter out duplicate values while keeping order.
def omit(self): self._omit = self.lib.iperf_get_test_omit(self._test) return self._omit
The test startup duration to omit in seconds.
def stringify(data): if isinstance(data, dict): for key, value in data.items(): data[key] = stringify(value) elif isinstance(data, list): return [stringify(item) for item in data] else: return smart_text(data) return data
Turns all dictionary values into strings
def inject_into_urllib3(): util.ssl_.SSLContext = SecureTransportContext util.HAS_SNI = HAS_SNI util.ssl_.HAS_SNI = HAS_SNI util.IS_SECURETRANSPORT = True util.ssl_.IS_SECURETRANSPORT = True
Monkey-patch urllib3 with SecureTransport-backed SSL-support.
def run(addr, *commands, **kwargs): results = [] handler = VarnishHandler(addr, **kwargs) for cmd in commands: if isinstance(cmd, tuple) and len(cmd)>1: results.extend([getattr(handler, c[0].replace('.','_'))(*c[1:]) for c in cmd]) else: results.append(getattr(handler, cmd.replace('.','_'))(*commands[1:])) break handler.close() return results
Non-threaded batch command runner returning output results
def _makepass(password, hasher='sha256'): if hasher == 'sha256': h = hashlib.sha256(password) elif hasher == 'md5': h = hashlib.md5(password) else: return NotImplemented c = "abcdefghijklmnopqrstuvwxyz" \ "ABCDEFGHIJKLMNOPQRSTUVWXYZ" \ "0123456789!?.,:;/*-+_()" r = { 'Method': h.name, 'Salt': ''.join(random.SystemRandom().choice(c) for x in range(20)), } h.update(r['Salt']) r['Hash'] = h.hexdigest() return r
Create a znc compatible hashed password
def apply_trans_rot(ampal, translation, angle, axis, point, radians=False): if not numpy.isclose(angle, 0.0): ampal.rotate(angle=angle, axis=axis, point=point, radians=radians) ampal.translate(vector=translation) return
Applies a translation and rotation to an AMPAL object.
def add_resources_to_registry(): from deform.widget import default_resource_registry default_resource_registry.set_js_resources("jqueryui", None, None) default_resource_registry.set_js_resources("datetimepicker", None, None) default_resource_registry.set_js_resources("custom_dates", None, None) default_resource_registry.set_js_resources( "radio_choice_toggle", None, None ) default_resource_registry.set_js_resources("checkbox_toggle", None, None) from js.deform import resource_mapping from js.select2 import select2 resource_mapping['select2'] = select2 from js.jquery_timepicker_addon import timepicker resource_mapping['datetimepicker'] = timepicker resource_mapping['custom_dates'] = custom_dates resource_mapping['radio_choice_toggle'] = radio_choice_toggle resource_mapping['checkbox_toggle'] = checkbox_toggle
Add resources to the deform registry
def register(name=''): "For backwards compatibility, we support @register(name) syntax." def reg(widget): w = widget.class_traits() Widget.widget_types.register(w['_model_module'].default_value, w['_model_module_version'].default_value, w['_model_name'].default_value, w['_view_module'].default_value, w['_view_module_version'].default_value, w['_view_name'].default_value, widget) return widget if isinstance(name, string_types): import warnings warnings.warn("Widget registration using a string name has been deprecated. Widget registration now uses a plain `@register` decorator.", DeprecationWarning) return reg else: return reg(name)
For backwards compatibility, we support @register(name) syntax.
def log_vacation_days(): days_off = get_days_off(rc.read()) pretty_days = map(lambda day: day.strftime('%a %b %d %Y'), days_off) for day in pretty_days: print(day)
Sum and report taken days off.
def global_custom_theme(request): today = datetime.datetime.now().date() theme = {} if today.month == 3 and (14 <= today.day <= 16): theme = {"css": "themes/piday/piday.css"} return {"theme": theme}
Add custom theme javascript and css.
def update_visited(self): assert isinstance(self.player.cshape.center, eu.Vector2) pos = self.player.cshape.center def set_visited(layer, cell): if cell and not cell.properties.get('visited') and cell.tile and cell.tile.id > 0: cell.properties['visited'] = True self.reward_explore() key = layer.get_key_at_pixel(cell.x, cell.y) layer.set_cell_opacity(key[0], key[1], 255*0.8) current = self.visit_layer.get_at_pixel(pos.x, pos.y) if current: if current == self.visit_layer.get_at_pixel(self.spawn.x, self.spawn.y): self.reward_goal() set_visited(self.visit_layer, current) neighbours = self.visit_layer.get_neighbors(current) for cell in neighbours: neighbour = neighbours[cell] set_visited(self.visit_layer, neighbour)
Updates exploration map visited status
def _get_atomsection(mol2_lst): started = False for idx, s in enumerate(mol2_lst): if s.startswith('@<TRIPOS>ATOM'): first_idx = idx + 1 started = True elif started and s.startswith('@<TRIPOS>'): last_idx_plus1 = idx break return mol2_lst[first_idx:last_idx_plus1]
Returns atom section from mol2 provided as list of strings
def _raiseValidationException(standardExcMsg, customExcMsg=None): if customExcMsg is None: raise ValidationException(str(standardExcMsg)) else: raise ValidationException(str(customExcMsg))
Raise ValidationException with standardExcMsg, unless customExcMsg is specified.
def undo_action_name(self): if self._open: return self._open[-1].name elif self._undo: return self._undo[-1].name return ""
The name of the top group on the undo stack, or an empty string.
def find_by_name(self, item_name, items_list, name_list=None): if not name_list: names = [item.name for item in items_list if item] else: names = name_list if item_name in names: ind = names.index(item_name) return items_list[ind] return False
Return item from items_list with name item_name.
def PrintStorageInformation(self): storage_reader = storage_factory.StorageFactory.CreateStorageReaderForFile( self._storage_file_path) if not storage_reader: logger.error( 'Format of storage file: {0:s} not supported'.format( self._storage_file_path)) return try: if self._output_format == 'json': self._PrintStorageInformationAsJSON(storage_reader) elif self._output_format == 'text': self._PrintStorageInformationAsText(storage_reader) finally: storage_reader.Close()
Prints the storage information.
def _project_on_ellipsoid(c, r, locations): p0 = locations - c l2 = 1 / np.sum(p0**2 / r**2, axis=1, keepdims=True) p = p0 * np.sqrt(l2) fun = lambda x: np.sum((x.reshape(p0.shape) - p0)**2) con = lambda x: np.sum(x.reshape(p0.shape)**2 / r**2, axis=1) - 1 res = sp.optimize.minimize(fun, p, constraints={'type': 'eq', 'fun': con}, method='SLSQP') return res['x'].reshape(p0.shape) + c
displace locations to the nearest point on ellipsoid surface
def drawHUD(self): self.win.move(self.height - 2, self.x_pad) self.win.clrtoeol() self.win.box() self.addstr(2, self.x_pad + 1, "Population: %i" % len(self.grid)) self.addstr(3, self.x_pad + 1, "Generation: %s" % self.current_gen) self.addstr(3, self.x_grid - 21, "s: start p: pause") self.addstr(2, self.x_grid - 21, "r: restart q: quit")
Draw information on population size and current generation
def enumerate(context, data): items = ensure_list(context.params.get('items')) for item in items: data['item'] = item context.emit(data=data)
Iterate through a set of items and emit each one of them.
def make_header(self, locale, catalog): return { "po-revision-date": self.get_catalogue_header_value(catalog, 'PO-Revision-Date'), "mime-version": self.get_catalogue_header_value(catalog, 'MIME-Version'), "last-translator": 'Automatic <hi@thorgate.eu>', "x-generator": "Python", "language": self.get_catalogue_header_value(catalog, 'Language') or locale, "lang": locale, "content-transfer-encoding": self.get_catalogue_header_value(catalog, 'Content-Transfer-Encoding'), "project-id-version": self.get_catalogue_header_value(catalog, 'Project-Id-Version'), "pot-creation-date": self.get_catalogue_header_value(catalog, 'POT-Creation-Date'), "domain": self.domain, "report-msgid-bugs-to": self.get_catalogue_header_value(catalog, 'Report-Msgid-Bugs-To'), "content-type": self.get_catalogue_header_value(catalog, 'Content-Type'), "plural-forms": self.get_plural(catalog), "language-team": self.get_catalogue_header_value(catalog, 'Language-Team') }
Populate header with correct data from top-most locale file.
def register_hid_device(screen_width, screen_height, absolute=False, integrated_display=False): message = create(protobuf.REGISTER_HID_DEVICE_MESSAGE) descriptor = message.inner().deviceDescriptor descriptor.absolute = 1 if absolute else 0 descriptor.integratedDisplay = 1 if integrated_display else 0 descriptor.screenSizeWidth = screen_width descriptor.screenSizeHeight = screen_height return message
Create a new REGISTER_HID_DEVICE_MESSAGE.
def Docker(): docker_info = {'server': {}, 'env': '', 'type': '', 'os': ''} try: d_client = docker.from_env() docker_info['server'] = d_client.version() except Exception as e: logger.error("Can't get docker info " + str(e)) system = System() docker_info['os'] = system if 'DOCKER_MACHINE_NAME' in environ: docker_info['env'] = environ['DOCKER_MACHINE_NAME'] docker_info['type'] = 'docker-machine' elif 'DOCKER_HOST' in environ: docker_info['env'] = environ['DOCKER_HOST'] docker_info['type'] = 'remote' else: docker_info['type'] = 'native' return docker_info
Get Docker setup information
def serialized(self, prepend_date=True): name = self.serialized_name() datetime = self.serialized_time(prepend_date) return "%s %s" % (datetime, name)
Return a string fully representing the fact.
def time(ctx, hours, minutes, seconds): return _time(conversions.to_integer(hours, ctx), conversions.to_integer(minutes, ctx), conversions.to_integer(seconds, ctx))
Defines a time value
def add_host(self, name: str, **kwargs) -> None: host = { name: deserializer.inventory.InventoryElement.deserialize_host( name=name, defaults=self.defaults, **kwargs ) } self.hosts.update(host)
Add a host to the inventory after initialization
def _raise_error_from_response(data): meta = data.get('meta') if meta: if meta.get('code') in (200, 409): return data exc = error_types.get(meta.get('errorType')) if exc: raise exc(meta.get('errorDetail')) else: _log_and_raise_exception('Unknown error. meta', meta) else: _log_and_raise_exception('Response format invalid, missing meta property. data', data)
Processes the response data
def AddAccuracy(model, softmax, label): accuracy = brew.accuracy(model, [softmax, label], "accuracy") return accuracy
Adds an accuracy op to the model
def _update_visible_blocks(self, *args): self._visible_blocks[:] = [] block = self.firstVisibleBlock() block_nbr = block.blockNumber() top = int(self.blockBoundingGeometry(block).translated( self.contentOffset()).top()) bottom = top + int(self.blockBoundingRect(block).height()) ebottom_top = 0 ebottom_bottom = self.height() while block.isValid(): visible = (top >= ebottom_top and bottom <= ebottom_bottom) if not visible: break if block.isVisible(): self._visible_blocks.append((top, block_nbr, block)) block = block.next() top = bottom bottom = top + int(self.blockBoundingRect(block).height()) block_nbr = block.blockNumber()
Updates the list of visible blocks
def launch_ipython(argv=None): from .linux import launch_ipython as _launch_ipython_linux os.environ = {str(k): str(v) for k,v in os.environ.items()} try: from qtconsole.qtconsoleapp import JupyterQtConsoleApp except ImportError: sys.exit("ERROR: IPython QtConsole not installed in this environment. " "Try with `conda install jupyter ipython qtconsole`") else: _launch_ipython_linux(ipython_app=JupyterQtConsoleApp)
Force usage of QtConsole under Windows
def at_match(self, match, predicate=None, index=None): return self.at_span(match.span, predicate, index)
Retrieves a list of matches from given match.
def best_parent( self, node, tree_type=None ): parents = self.parents(node) selected_parent = None if node['type'] == 'type': module = ".".join( node['name'].split( '.' )[:-1] ) if module: for mod in parents: if mod['type'] == 'module' and mod['name'] == module: selected_parent = mod if parents and selected_parent is None: parents.sort( key = lambda x: self.value(node, x) ) return parents[-1] return selected_parent
Choose the best parent for a given node
def grid_2d_8graph(self, m, n): me = nx.Graph() node = me.node add_node = me.add_node add_edge = me.add_edge for i in range(m): for j in range(n): add_node((i, j)) if i > 0: add_edge((i, j), (i-1, j)) if j > 0: add_edge((i, j), (i-1, j-1)) if j > 0: add_edge((i, j), (i, j-1)) if (i - 1, j + 1) in node: add_edge((i, j), (i-1, j+1)) return self.copy_from(me)
Make a 2d graph that's connected 8 ways, enabling diagonal movement
def cancelThread(*threads, exception=EscapeException): 'Raise exception on another thread.' for t in threads: ctypes.pythonapi.PyThreadState_SetAsyncExc(ctypes.c_long(t.ident), ctypes.py_object(exception))
Raise exception on another thread.
def result(self, res): return self >> Parser(lambda _, index: Value.success(index, res))
Return a value according to the parameter `res` when parse successfully.
def capture_dash_in_url_name(self, node): for keyword in node.keywords: if keyword.arg == 'name' and '-' in keyword.value.s: return DJ04( lineno=node.lineno, col=node.col_offset, )
Capture dash in URL name
def _run_collect_allelic_counts(pos_file, pos_name, work_dir, data): out_dir = utils.safe_makedir(os.path.join(dd.get_work_dir(data), "structural", "counts")) out_file = os.path.join(out_dir, "%s-%s-counts.tsv" % (dd.get_sample_name(data), pos_name)) if not utils.file_exists(out_file): with file_transaction(data, out_file) as tx_out_file: params = ["-T", "CollectAllelicCounts", "-L", pos_file, "-I", dd.get_align_bam(data), "-R", dd.get_ref_file(data), "-O", tx_out_file] _run_with_memory_scaling(params, tx_out_file, data) return out_file
Counts by alleles for a specific sample and set of positions.
def unique(series: pd.Series) -> pd.Series: return ~series.duplicated(keep=False)
Test that the data items do not repeat.
def unicode_name(self, name, in_group=False): value = ord(_unicodedata.lookup(name)) if (self.is_bytes and value > 0xFF): value = "" if not in_group and value == "": return '[^%s]' % ('\x00-\xff' if self.is_bytes else _uniprops.UNICODE_RANGE) elif value == "": return value else: return ['\\%03o' % value if value <= 0xFF else chr(value)]
Insert Unicode value by its name.
def begin_commit(): session_token = request.headers['session_token'] repository = request.headers['repository'] current_user = have_authenticated_user(request.environ['REMOTE_ADDR'], repository, session_token) if current_user is False: return fail(user_auth_fail_msg) repository_path = config['repositories'][repository]['path'] def with_exclusive_lock(): if not can_aquire_user_lock(repository_path, session_token): return fail(lock_fail_msg) data_store = versioned_storage(repository_path) if data_store.get_head() != request.headers["previous_revision"]: return fail(need_to_update_msg) if data_store.have_active_commit(): data_store.rollback() data_store.begin() update_user_lock(repository_path, session_token) return success() return lock_access(repository_path, with_exclusive_lock)
Allow a client to begin a commit and acquire the write lock
def _find_controller(self, *args): for name in args: obj = self._lookup_child(name) if obj and iscontroller(obj): return obj return None
Returns the appropriate controller for routing a custom action.
def read(fname): path = os.path.join(SCRIPTDIR, fname) if PY3: f = open(path, 'r', encoding='utf8') else: f = open(path, 'r') content = f.read() f.close() return content
Return content of specified file
def addPrefs(self, prefs=[]): if len(prefs) == len(self.preferences) == 0: logger.debug("no preferences") return None self.preferences.extend(prefs) self.css1(path['search-btn']).click() count = 0 for pref in self.preferences: self.css1(path['search-pref']).fill(pref) self.css1(path['pref-icon']).click() btn = self.css1('div.add-to-watchlist-popup-item .icon-wrapper') if not self.css1('svg', btn)['class'] is None: btn.click() count += 1 self.css1(path['pref-icon']).click() self.css1(path['back-btn']).click() self.css1(path['back-btn']).click() logger.debug("updated %d preferences" % count) return self.preferences
add preference in self.preferences
def move_position(self, dx, dy, speed=None): if speed: self._intf.write('MoveChuckPosition %1.1f %1.1f R Y %d' % (dx, dy, speed)) else: self._intf.write('MoveChuckPosition %1.1f %1.1f R Y' % (dx, dy))
Move chuck relative to actual position in um
def reset_password(app, appbuilder, username, password): _appbuilder = import_application(app, appbuilder) user = _appbuilder.sm.find_user(username=username) if not user: click.echo("User {0} not found.".format(username)) else: _appbuilder.sm.reset_password(user.id, password) click.echo(click.style("User {0} reseted.".format(username), fg="green"))
Resets a user's password
def default_hass_config_dir(): data_dir = os.getenv("APPDATA") if os.name == "nt" else os.path.expanduser("~") return os.path.join(data_dir, ".homeassistant")
Put together the default configuration directory based on the OS.
def add_fortran_to_env(env): try: FortranSuffixes = env['FORTRANFILESUFFIXES'] except KeyError: FortranSuffixes = ['.f', '.for', '.ftn'] try: FortranPPSuffixes = env['FORTRANPPFILESUFFIXES'] except KeyError: FortranPPSuffixes = ['.fpp', '.FPP'] DialectAddToEnv(env, "FORTRAN", FortranSuffixes, FortranPPSuffixes, support_module = 1) env['FORTRANMODPREFIX'] = '' env['FORTRANMODSUFFIX'] = '.mod' env['FORTRANMODDIR'] = '' env['FORTRANMODDIRPREFIX'] = '' env['FORTRANMODDIRSUFFIX'] = '' env['_FORTRANMODFLAG'] = '$( ${_concat(FORTRANMODDIRPREFIX, FORTRANMODDIR, FORTRANMODDIRSUFFIX, __env__, RDirs, TARGET, SOURCE)} $)'
Add Builders and construction variables for Fortran to an Environment.
def signature(self): iexec, execmod = self.context.parser.tree_find(self.context.el_name, self.context.module, "executables") if iexec is None: iexec, execmod = self.context.parser.tree_find(self.context.el_name, self.context.module, "interfaces") if iexec is None: return [] return self._signature_index(iexec)
Gets completion or call signature information for the current cursor.
def spew_length(self, length): pos = self.pos if not pos or length > pos: return None row = self.row for char in reversed(self.string[pos - length:pos]): pos -= 1 if char == '\n': row -= 1 self.pos = pos self.col = self.eol_distance_last() self.row = row if self.has_space(): self.eos = 0
Move current position backwards by length.
def add_atmost(self, lits, k, no_return=True): if self.minicard: res = pysolvers.minicard_add_am(self.minicard, lits, k) if res == False: self.status = False if not no_return: return res
Add a new atmost constraint to solver's internal formula.
def git_lines(*args, git=maybeloggit, **kwargs): 'Generator of stdout lines from given git command' err = io.StringIO() try: for line in git('--no-pager', _err=err, *args, _decode_errors='replace', _iter=True, _bg_exc=False, **kwargs): yield line[:-1] except sh.ErrorReturnCode as e: status('exit_code=%s' % e.exit_code) errlines = err.getvalue().splitlines() if len(errlines) < 3: for line in errlines: status(line) else: vd().push(TextSheet('git ' + ' '.join(args), errlines))
Generator of stdout lines from given git command
def make_automaton(self): queue = deque() for i in range(256): c = chr(i) if c in self.root.children: node = self.root.children[c] node.fail = self.root queue.append(node) else: self.root.children[c] = self.root while queue: r = queue.popleft() for node in r.children.values(): queue.append(node) state = r.fail while node.char not in state.children: state = state.fail node.fail = state.children.get(node.char, self.root)
Converts trie to Aho-Corasick automaton.
def error_string(mqtt_errno): if mqtt_errno == MQTT_ERR_SUCCESS: return "No error." elif mqtt_errno == MQTT_ERR_NOMEM: return "Out of memory." elif mqtt_errno == MQTT_ERR_PROTOCOL: return "A network protocol error occurred when communicating with the broker." elif mqtt_errno == MQTT_ERR_INVAL: return "Invalid function arguments provided." elif mqtt_errno == MQTT_ERR_NO_CONN: return "The client is not currently connected." elif mqtt_errno == MQTT_ERR_CONN_REFUSED: return "The connection was refused." elif mqtt_errno == MQTT_ERR_NOT_FOUND: return "Message not found (internal error)." elif mqtt_errno == MQTT_ERR_CONN_LOST: return "The connection was lost." elif mqtt_errno == MQTT_ERR_TLS: return "A TLS error occurred." elif mqtt_errno == MQTT_ERR_PAYLOAD_SIZE: return "Payload too large." elif mqtt_errno == MQTT_ERR_NOT_SUPPORTED: return "This feature is not supported." elif mqtt_errno == MQTT_ERR_AUTH: return "Authorisation failed." elif mqtt_errno == MQTT_ERR_ACL_DENIED: return "Access denied by ACL." elif mqtt_errno == MQTT_ERR_UNKNOWN: return "Unknown error." elif mqtt_errno == MQTT_ERR_ERRNO: return "Error defined by errno." else: return "Unknown error."
Return the error string associated with an mqtt error number.
def _determine_current_dimension_size(self, dim_name, max_size): if self.dimensions[dim_name] is not None: return max_size def _find_dim(h5group, dim): if dim not in h5group: return _find_dim(h5group.parent, dim) return h5group[dim] dim_variable = _find_dim(self._h5group, dim_name) if "REFERENCE_LIST" not in dim_variable.attrs: return max_size root = self._h5group["/"] for ref, _ in dim_variable.attrs["REFERENCE_LIST"]: var = root[ref] for i, var_d in enumerate(var.dims): name = _name_from_dimension(var_d) if name == dim_name: max_size = max(var.shape[i], max_size) return max_size
Helper method to determine the current size of a dimension.
def update_pointed(self): if not self.pointed_at_expired: try: conf_string, stat2 = self.zoo_client.get(self.point_path, watch=self.watch_pointed) except ZookeeperError: self.old_data = '' self.set_valid(False) self.pointed_at_expired = True self.call_error(self.INVALID_PATH) return if self.compare_data(conf_string): self.call_config(conf_string) self.set_valid(True)
Grabs the latest file contents based on the pointer uri
def create_connection_model(service): services = service._services bases = (BaseModel,) attributes = {model_service_name(service): fields.CharField() for service in services} return type(BaseModel)(connection_service_name(service), bases, attributes)
Create an SQL Alchemy table that connects the provides services
def _await_socket(self, timeout): with safe_open(self._ng_stdout, 'r') as ng_stdout: start_time = time.time() accumulated_stdout = '' while 1: remaining_time = time.time() - (start_time + timeout) if remaining_time > 0: stderr = read_file(self._ng_stderr, binary_mode=True) raise self.InitialNailgunConnectTimedOut( timeout=timeout, stdout=accumulated_stdout, stderr=stderr, ) readable, _, _ = select.select([ng_stdout], [], [], (-1 * remaining_time)) if readable: line = ng_stdout.readline() try: return self._NG_PORT_REGEX.match(line).group(1) except AttributeError: pass accumulated_stdout += line
Blocks for the nailgun subprocess to bind and emit a listening port in the nailgun stdout.
def refresh_toc(self, refresh_done_callback, toc_cache): self._useV2 = self.cf.platform.get_protocol_version() >= 4 self._toc_cache = toc_cache self._refresh_callback = refresh_done_callback self.toc = None pk = CRTPPacket() pk.set_header(CRTPPort.LOGGING, CHAN_SETTINGS) pk.data = (CMD_RESET_LOGGING,) self.cf.send_packet(pk, expected_reply=(CMD_RESET_LOGGING,))
Start refreshing the table of loggale variables
def update(): repo_directory = get_config()['repo_directory'] os.chdir(repo_directory) click.echo("Check for updates...") local = subprocess.check_output('git rev-parse master'.split()).strip() remote = subprocess.check_output( 'git ls-remote https://github.com/tldr-pages/tldr/ HEAD'.split() ).split()[0] if local != remote: click.echo("Updating...") subprocess.check_call('git checkout master'.split()) subprocess.check_call('git pull --rebase'.split()) build_index() click.echo("Update to the latest and rebuild the index.") else: click.echo("No need for updates.")
Update to the latest pages.
def apply(self, coordinates): transform = self.get_transformation(coordinates) result = MolecularDistortion(self.affected_atoms, transform) result.apply(coordinates) return result
Generate, apply and return a random manipulation
def copy(self): ms = MouseState() ms.left_pressed = self.left_pressed ms.middle_pressed = self.middle_pressed ms.right_pressed = self.right_pressed ms.mouse_pos = self.mouse_pos return ms
Create a copy of this MouseState and return it.
def ics2task(): from argparse import ArgumentParser, FileType from sys import stdin parser = ArgumentParser(description='Converter from iCalendar to Taskwarrior syntax.') parser.add_argument('infile', nargs='?', type=FileType('r'), default=stdin, help='Input iCalendar file (default: stdin)') parser.add_argument('outdir', nargs='?', help='Output Taskwarrior directory (default to ~/.task)', default=expanduser('~/.task')) args = parser.parse_args() vobject = readOne(args.infile.read()) task = IcsTask(args.outdir) for todo in vobject.vtodo_list: task.to_task(todo)
Command line tool to convert from iCalendar to Taskwarrior
def _notify_remove(self, slice_): change = RemoveChange(self, slice_) self.notify_observers(change)
Notify about a RemoveChange.
def to_fp32(learn:Learner): "Put `learn` back to FP32 precision mode." learn.data.remove_tfm(batch_to_half) for cb in learn.callbacks: if isinstance(cb, MixedPrecision): learn.callbacks.remove(cb) learn.model = learn.model.float() return learn
Put `learn` back to FP32 precision mode.
def init(config_file): schema = generate_schema_file(open(config_file, 'r').read()) sys.stdout.write(schema)
Initialize a confirm schema from an existing configuration file.
def partial_row_coordinates(self, X): utils.validation.check_is_fitted(self, 's_') if self.check_input: utils.check_array(X, dtype=[str, np.number]) X = self._prepare_input(X) P = len(X) ** 0.5 * self.U_ / self.s_ coords = {} for name, cols in sorted(self.groups.items()): X_partial = X.loc[:, cols] if not self.all_nums_[name]: X_partial = self.cat_one_hots_[name].transform(X_partial) Z_partial = X_partial / self.partial_factor_analysis_[name].s_[0] coords[name] = len(self.groups) * (Z_partial @ Z_partial.T) @ P coords = pd.DataFrame({ (name, i): group_coords.loc[:, i] for name, group_coords in coords.items() for i in range(group_coords.shape[1]) }) return coords
Returns the row coordinates for each group.
def can_delete_post(self, post, user): checker = self._get_checker(user) is_author = self._is_post_author(post, user) can_delete = ( user.is_superuser or (is_author and checker.has_perm('can_delete_own_posts', post.topic.forum)) or checker.has_perm('can_delete_posts', post.topic.forum) ) return can_delete
Given a forum post, checks whether the user can delete the latter.
def link_label(link): if hasattr(link, 'label'): label = link.label else: label = str(link.linknum+1) return label
return a link label as a string
def level(self, lvl=None): if not lvl: return self._lvl self._lvl = self._parse_level(lvl) self.stream.setLevel(self._lvl) logging.root.setLevel(self._lvl)
Get or set the logging level.
def copy_list(src_list, dst_list, lbl='Copying', ioerr_ok=False, sherro_ok=False, oserror_ok=False): task_iter = zip(src_list, dst_list) def docopy(src, dst): try: shutil.copy2(src, dst) except OSError: if ioerr_ok: return False raise except shutil.Error: if sherro_ok: return False raise except IOError: if ioerr_ok: return False raise return True progiter = util_progress.ProgIter(task_iter, adjust=True, lbl=lbl) success_list = [docopy(src, dst) for (src, dst) in progiter] return success_list
Copies all data and stat info
def find_function(self, name): deffunction = lib.EnvFindDeffunction(self._env, name.encode()) if deffunction == ffi.NULL: raise LookupError("Function '%s' not found" % name) return Function(self._env, deffunction)
Find the Function by its name.
def coerce(value): if isinstance(value, ListCell): return value elif isinstance(value, (list)): return ListCell(value) else: return ListCell([value])
Turns a value into a list
def sysidpath(ignore_options=False): failover = Path('/tmp/machine-id') if not ignore_options: options = ( Path('/etc/machine-id'), failover, ) for option in options: if (option.exists() and os.access(option, os.R_OK) and option.stat().st_size > 0): return option uuid = uuid4() with open(failover, 'wt') as f: f.write(uuid.hex) return failover
get a unique identifier for the machine running this function
def load(self,dset): self.dset_filename = dset self.dset = nib.load(dset) self.data = self.dset.get_data() self.header = self.dset.get_header()
load a dataset from given filename into the object
def _check_emotion_set_is_supported(self): supported_emotion_subsets = [ set(['anger', 'fear', 'surprise', 'calm']), set(['happiness', 'disgust', 'surprise']), set(['anger', 'fear', 'surprise']), set(['anger', 'fear', 'calm']), set(['anger', 'happiness', 'calm']), set(['anger', 'fear', 'disgust']), set(['calm', 'disgust', 'surprise']), set(['sadness', 'disgust', 'surprise']), set(['anger', 'happiness']) ] if not set(self.target_emotions) in supported_emotion_subsets: error_string = 'Target emotions must be a supported subset. ' error_string += 'Choose from one of the following emotion subset: \n' possible_subset_string = '' for emotion_set in supported_emotion_subsets: possible_subset_string += ', '.join(emotion_set) possible_subset_string += '\n' error_string += possible_subset_string raise ValueError(error_string)
Validates set of user-supplied target emotions.
def show_order(self, order_id): request = self._get('transactions/orders/' + str(order_id)) return self.responder(request)
Shows an existing order transaction.
def accuracy_thresh(y_pred:Tensor, y_true:Tensor, thresh:float=0.5, sigmoid:bool=True)->Rank0Tensor: "Compute accuracy when `y_pred` and `y_true` are the same size." if sigmoid: y_pred = y_pred.sigmoid() return ((y_pred>thresh)==y_true.byte()).float().mean()
Compute accuracy when `y_pred` and `y_true` are the same size.
def _decodeTimestamp(byteIter): dateStr = decodeSemiOctets(byteIter, 7) timeZoneStr = dateStr[-2:] return datetime.strptime(dateStr[:-2], '%y%m%d%H%M%S').replace(tzinfo=SmsPduTzInfo(timeZoneStr))
Decodes a 7-octet timestamp