code
stringlengths
51
2.34k
docstring
stringlengths
11
171
def change_username_view(self): form = self.ChangeUsernameFormClass(request.form) if request.method == 'POST' and form.validate(): new_username = form.new_username.data current_user.username=new_username self.db_manager.save_object(current_user) self.db_manager.commit() self.email_manager.send_username_changed_email(current_user) signals.user_changed_username.send(current_app._get_current_object(), user=current_user) flash(_("Your username has been changed to '%(username)s'.", username=new_username), 'success') safe_next_url = self._get_safe_next_url('next', self.USER_AFTER_CHANGE_USERNAME_ENDPOINT) return redirect(safe_next_url) self.prepare_domain_translations() return render_template(self.USER_CHANGE_USERNAME_TEMPLATE, form=form)
Prompt for new username and old password and change the user's username.
def transformer_wikitext103_l4k_memory_v0(): hparams = transformer_wikitext103_l4k_v0() hparams.split_targets_chunk_length = 64 hparams.split_targets_max_chunks = 64 hparams.split_targets_strided_training = True hparams.add_hparam("memory_type", "transformer_xl") target_tokens_per_batch = 4096 hparams.batch_size = int(target_tokens_per_batch * ( hparams.max_length / hparams.split_targets_chunk_length)) hparams.pos = None hparams.self_attention_type = "dot_product_relative" hparams.max_relative_position = 2 * hparams.split_targets_chunk_length hparams.add_hparam("unconditional", True) hparams.add_hparam("recurrent_memory_batch_size", 0) hparams.add_hparam("num_memory_items", hparams.split_targets_chunk_length) return hparams
HParams for training languagemodel_wikitext103_l4k with memory.
def show(title, lst, full=-1): txt = title + ' (' + str(len(lst)) + ') items :\n ' num = 0 for i in lst: if full == -1 or num < full: if type(i) is str: txt = txt + i + ',\n ' else: txt = txt + i + ', [' for j in i: txt = txt + j + ', ' txt = txt + ']\n' num = num + 1 try: print(txt) except Exception as ex: print('index.show() - cant print line, error ' + str(ex))
for testing, simply shows a list details
def _login(self, user, provider=None, remember=False, force=False, **attrs): user.last_login_at = datetime.datetime.now() user.last_login_provider = provider or self.options["default_auth_provider_name"] user.last_login_from = request.remote_addr populate_obj(user, attrs) save_model(user) flask_login.login_user(user, remember=remember, force=force)
Updates user attributes and login the user in flask-login
def getLibraryFiles(self, engineRoot, delimiter=' '): return delimiter.join(self.resolveRoot(self.libs, engineRoot))
Returns the list of library files for this library, joined using the specified delimiter
def fib(n): v = n.value return v if v < 2 else fib2(PythonInt(v-1)) + fib(PythonInt(v-2))
Terrible Fibonacci number generator.
def identifier_md5(self): as_int = (self.identifier * 1e4).astype(np.int64) hashed = util.md5_object(as_int.tostring(order='C')) return hashed
Return an MD5 of the identifier
def load_yaml(filepath): with open(filepath) as f: txt = f.read() return yaml.load(txt)
Convenience function for loading yaml-encoded data from disk.
def value(self): return self._ads.read(self._pin_setting, is_differential=self.is_differential)
Returns the value of an ADC pin as an integer.
def onselection(self, widget): self._selected_key = None for k in self.children: if self.children[k] == widget: self._selected_key = k if (self._selected_item is not None) and self._selectable: self._selected_item.attributes['selected'] = False self._selected_item = self.children[self._selected_key] if self._selectable: self._selected_item.attributes['selected'] = True break return (self._selected_key,)
Called when a new item gets selected in the list.
def filter_pythons(path): if not isinstance(path, vistir.compat.Path): path = vistir.compat.Path(str(path)) if not path.is_dir(): return path if path_is_python(path) else None return filter(path_is_python, path.iterdir())
Return all valid pythons in a given path
def _validate_no_rels(param, rels): if param.field in rels: raise InvalidQueryParams(**{ 'detail': 'The sort query param value of "%s" is not ' 'supported. Sorting on relationships is not ' 'currently supported' % param.raw_field, 'links': LINK, 'parameter': PARAM, })
Ensure the sortable field is not on a relationship
def addPlayer(settings): _validate(settings) player = PlayerRecord(settings) player.save() getKnownPlayers()[player.name] = player return player
define a new PlayerRecord setting and save to disk file
def write_to_datastore(self): client = self._datastore_client with client.no_transact_batch() as client_batch: for batch_id, batch_data in iteritems(self._data): batch_key = client.key(self._entity_kind_batches, batch_id) batch_entity = client.entity(batch_key) for k, v in iteritems(batch_data): if k != 'images': batch_entity[k] = v client_batch.put(batch_entity) self._write_single_batch_images_internal(batch_id, client_batch)
Writes all image batches to the datastore.
def project_point(cb, msg, attributes=('x', 'y')): if skip(cb, msg, attributes): return msg plot = get_cb_plot(cb) x, y = msg.get('x', 0), msg.get('y', 0) crs = plot.current_frame.crs coordinates = crs.transform_points(plot.projection, np.array([x]), np.array([y])) msg['x'], msg['y'] = coordinates[0, :2] return {k: v for k, v in msg.items() if k in attributes}
Projects a single point supplied by a callback
def flatten(self): size = prod(self.shape[:-1]) return self.reshape(size, self.shape[-1])
Reshape all dimensions but the last into a single dimension
def _gen_tasks(self): for filename in self.args: path = os.path.abspath(filename) yield { 'name': path, 'actions': [(self.fun, (filename,))], }
generate doit tasks for each file
def handle_valid(self, form=None, *args, **kwargs): if hasattr(form, 'save'): form.save() if hasattr(form, 'handle_valid'): form.handle_valid(*args, **kwargs)
Called after the form has validated.
async def bluetooth(dev: Device, target, value): if target and value: await dev.set_bluetooth_settings(target, value) print_settings(await dev.get_bluetooth_settings())
Get or set bluetooth settings.
def rules(self): rule = lib.EnvGetNextDefrule(self._env, ffi.NULL) while rule != ffi.NULL: yield Rule(self._env, rule) rule = lib.EnvGetNextDefrule(self._env, rule)
Iterate over the defined Rules.
def find_commands(cls): cmds = [] for subclass in cls.__subclasses__(): cmds.append(subclass) cmds.extend(find_commands(subclass)) return cmds
Finds commands by finding the subclasses of Command
def _call(self, x, out=None): if out is None: out = self.range.element() out.lincomb(self.a, x[0], self.b, x[1]) return out
Linearly combine ``x`` and write to ``out`` if given.
def on_transparency_value_changed(self, hscale): value = hscale.get_value() self.prefDlg.set_colors_from_settings() self.settings.styleBackground.set_int('transparency', MAX_TRANSPARENCY - int(value))
Changes the value of background_transparency in dconf
def add(self, command): self.add_command(command.config) command.set_application(self) return self
Adds a command object.
def hex(self): props = self._message_properties() msg = bytearray([MESSAGE_START_CODE_0X02, self._code]) for prop in props: for key, val in prop.items(): if val is None: pass elif isinstance(val, int): msg.append(val) elif isinstance(val, Address): if val.addr is None: pass else: msg.extend(val.bytes) elif isinstance(val, MessageFlags): msg.extend(val.bytes) elif isinstance(val, bytearray): msg.extend(val) elif isinstance(val, bytes): msg.extend(val) elif isinstance(val, Userdata): msg.extend(val.bytes) return binascii.hexlify(msg).decode()
Hexideciaml representation of the message in bytes.
def sum_over_energy(self): return Map(np.sum(self.counts, axis=0), self.wcs.dropaxis(2))
Reduce a 3D counts cube to a 2D counts map
def focusOutEvent(self, event): if not self.is_valid(): lineedit = self.lineEdit() QTimer.singleShot(50, lambda: lineedit.setText(self.selected_text)) hide_status = getattr(self.lineEdit(), 'hide_status_icon', None) if hide_status: hide_status() QComboBox.focusOutEvent(self, event)
Handle focus out event restoring the last valid selected path.
def to_list(stringlist, unquote=True): stringlist = stringlist[1:-1] return [ string.strip('"') if unquote else string for string in stringlist.split(",") ]
Convert a string representing a list to real list.
def sanitize(value): value = unicodedata.normalize('NFKD', value) value = value.strip() value = re.sub('[^./\w\s-]', '', value) value = re.sub('[-\s]+', '-', value) return value
Strips all undesirable characters out of potential file paths.
def make_param_dict_from_file(self,path_to_params): param_list = list(csv.reader(open(path_to_params,"rb"))) param_file = [x for x in param_list if x != []] name_list = [] param_list = [] param_colnames = param_file[0][1:] for i in np.arange(1, len(param_file)): name_list.append(param_file[i][0]) param_list.append(param_file[i][1:]) param_list = [[x.strip() for x in y] for y in param_list] param_dict = {} for i in np.arange(0, len(param_colnames)): param_dict[param_colnames[i]] = [] for j in np.arange(0,len(name_list)): param_dict[param_colnames[i]].append(param_list[j][i]) self._param_dict = param_dict self._row_names = name_list
make param dict from a file on disk
def find_top_pyfile(): import os frame = currentframe() while True: if frame.f_back is None: finfo = getframeinfo(frame) return os.path.abspath(finfo.filename) frame = frame.f_back
This function inspects the Cpython frame to find the path of the script.
def plot_energy(time, H, T, U): T0 = T[0] H = H / T0 T = T / T0 U = U / T0 fig, ax = plt.subplots(figsize=[16,8]) ax.set_title('System Energy vs. Time') ax.set_xlabel('Time in Days') ax.set_ylabel('Energy (Ratio Initial KE)') ax.plot(time, T, label='T', color='r') ax.plot(time, U, label='U', color='b') ax.plot(time, H, label='H', color='k') ax.legend() ax.grid() plt.show()
Plot kinetic and potential energy of system over time
def delete_bandwidth_limit_rule(self, rule, policy): return self.delete(self.qos_bandwidth_limit_rule_path % (policy, rule))
Deletes a bandwidth limit rule.
def _load_preset(self, path): try: with open(path, 'r') as f: presetBody = json.load(f) except IOError as e: raise PresetException("IOError: " + e.strerror) except ValueError as e: raise PresetException("JSON decoding error: " + str(e)) except Exception as e: raise PresetException(str(e)) try: preset = Preset(presetBody) except PresetException as e: e.message = "Bad format: " + e.message raise if(preset.id in self.presets): raise PresetException("Duplicate preset id: " + preset.id) else: self.presets[preset.id] = preset
load, validate and store a single preset file
def stop(self): self.working = False for w in self.workers: w.join() self.workers = []
Stops the worker threads and waits for them to finish
def subscribe(self, observer): sid = self._sn self.observers[sid] = observer self._sn += 1 return SubscribeID(self, sid)
Subscribe an observer to this subject and return a subscription id
def unique(seq): seen = {} result = [] for item in seq: marker = item if marker in seen: continue seen[marker] = 1 result.append(item) return result
Helper function to include only unique monomials in a basis.
def read(self, size=-1): if size < 0 and self._offset: size = self._size return self._fh.read(size)
Read 'size' bytes from file, or until EOF is reached.
def imagetransformer_b12l_4h_b128_uncond_dr03_tpu(): hparams = imagetransformer_bas8l_8h_big_uncond_dr03_imgnet() update_hparams_for_tpu(hparams) hparams.batch_size = 2 hparams.num_heads = 4 hparams.num_decoder_layers = 12 hparams.block_length = 128 hparams.hidden_size = 256 hparams.filter_size = 2048 hparams.layer_preprocess_sequence = "none" hparams.layer_postprocess_sequence = "dan" hparams.layer_prepostprocess_dropout = 0.1 hparams.optimizer = "Adafactor" hparams.learning_rate_schedule = "rsqrt_decay" hparams.learning_rate_warmup_steps = 10000 return hparams
TPU config for cifar 10.
def unflag_field(self, move_x, move_y): field_status = self.info_map[move_y, move_x] if field_status == 9 or field_status == 10: self.info_map[move_y, move_x] = 11
Unflag or unquestion a grid by given position.
def galprop_rings_yaml(self, **kwargs): kwargs_copy = self.base_dict.copy() kwargs_copy.update(**kwargs) self._replace_none(kwargs_copy) localpath = NameFactory.galprop_rings_yaml_format.format(**kwargs_copy) if kwargs.get('fullpath', False): return self.fullpath(localpath=localpath) return localpath
return the name of a galprop rings merging yaml file
def handle_end_signal(self): try: signal.signal(signal.SIGTERM, self.catch_end_signal) signal.signal(signal.SIGINT, self.catch_end_signal) except ValueError: self.log('Signals cannot be caught in a Thread', level='warning')
Catch some system signals to handle them internaly
def make_catalog_comp_dict(**kwargs): library_yamlfile = kwargs.pop('library', 'models/library.yaml') csm = kwargs.pop('CatalogSourceManager', CatalogSourceManager(**kwargs)) if library_yamlfile is None or library_yamlfile == 'None': yamldict = {} else: yamldict = yaml.safe_load(open(library_yamlfile)) catalog_info_dict, comp_info_dict = csm.make_catalog_comp_info_dict(yamldict) return dict(catalog_info_dict=catalog_info_dict, comp_info_dict=comp_info_dict, CatalogSourceManager=csm)
Build and return the information about the catalog components
def generate(basename, xml_list): for xml in xml_list: generate_one(basename, xml) generate_enums(basename, xml) generate_MAVLinkMessage(basename, xml_list) copy_fixed_headers(basename, xml_list[0])
generate complete MAVLink Java implemenation
def buildNavigation(self): if self.buildSpec['navigation'] == constants.TABBED: navigation = Tabbar(self, self.buildSpec, self.configs) else: navigation = Sidebar(self, self.buildSpec, self.configs) if self.buildSpec['navigation'] == constants.HIDDEN: navigation.Hide() return navigation
Chooses the appropriate layout navigation component based on user prefs
def rgb_to_hsv(r, g, b): h, s, v = colorsys.rgb_to_hsv(r / 255., g / 255., b / 255.) return round2(h * 360), round2(s * 100), round2(v * 100)
Convert RGB color to HSV.
def skypipe_input_stream(endpoint, name=None): name = name or '' class context_manager(object): def __enter__(self): self.socket = ctx.socket(zmq.DEALER) self.socket.connect(endpoint) return self def send(self, data): data_msg = sp_msg(SP_CMD_DATA, name, data) self.socket.send_multipart(data_msg) def __exit__(self, *args, **kwargs): eof_msg = sp_msg(SP_CMD_DATA, name, SP_DATA_EOF) self.socket.send_multipart(eof_msg) self.socket.close() return context_manager()
Returns a context manager for streaming data into skypipe
def pretty_print(n): if type(n) != int: return n ret = [] n = str(n) for i in range(len(n) - 1, -1, -1): ret.append(n[i]) if (len(n) - i) % 3 == 0: ret.append(',') ret.reverse() return ''.join(ret[1:]) if ret[0] == ',' else ''.join(ret)
Pretty print function for very big integers
def run_mp(songs): stats = Stats() if CONFIG['debug']: good = open('found', 'w') bad = open('notfound', 'w') logger.debug('Launching a pool of %d processes\n', CONFIG['jobcount']) chunksize = math.ceil(len(songs) / os.cpu_count()) try: with Pool(CONFIG['jobcount']) as pool: for result in pool.imap_unordered(get_lyrics, songs, chunksize): if result is None: continue for source, runtime in result.runtimes.items(): stats.add_result(source, result.source == source, runtime) found = process_result(result) if CONFIG['debug']: if found: good.write(f'{id_source(source)}: {result.song}\n') good.flush() else: bad.write(str(result.song) + '\n') bad.flush() finally: if CONFIG['debug']: good.close() bad.close() return stats
Concurrently calls get_lyrics to fetch the lyrics of a large list of songs.
def _set_properties(self): self.codetext_ctrl.SetToolTipString(_("Enter python code here.")) self.apply_button.SetToolTipString(_("Apply changes to current macro")) self.splitter.SetBackgroundStyle(wx.BG_STYLE_COLOUR) self.result_ctrl.SetMinSize((10, 10))
Setup title, size and tooltips
def measure_float_put(self, measure, value): if value < 0: logger.warning("Cannot record negative values") self._measurement_map[measure] = value
associates the measure of type Float with the given value
def operate(config): "Interface to do simple operations on the database." app = make_app(config=config) print "Operate Mode" with app.app_context(): operate_menu()
Interface to do simple operations on the database.
def delete_node(self, node: str): if self.has_node(node): self.remove_node(node)
Removes a node if it is in the graph.
def getQueryParams(url): query = urlsplit(url)[3] out.debug(u'Extracting query parameters from %r (%r)...' % (url, query)) return cgi.parse_qs(query)
Get URL query parameters.
def reopen(self): if self._con: self._con.reopen() else: self._con = self._pool.connection()
Reopen the pooled connection.
def _all_spec(self): base = self._mod_spec for spec in self.basic_spec: base[spec] = self.basic_spec[spec] return base
All specifiers and their lengths.
def load_p2th_privkey_into_local_node(provider: RpcNode, prod: bool=True) -> None: assert isinstance(provider, RpcNode), {"error": "Import only works with local node."} error = {"error": "Loading P2TH privkey failed."} pa_params = param_query(provider.network) if prod: provider.importprivkey(pa_params.P2TH_wif, "PAPROD") if not provider.validateaddress(pa_params.P2TH_addr)['ismine']: raise P2THImportFailed(error) else: provider.importprivkey(pa_params.test_P2TH_wif, "PATEST") if not provider.validateaddress(pa_params.test_P2TH_addr)['ismine']: raise P2THImportFailed(error)
Load PeerAssets P2TH privkey into the local node.
def refresh(self): self._update_id_list() for _id in self.history[:]: if _id not in self.id_list: self.history.remove(_id)
Remove editors that are not longer open.
def cat(self, numlines=None): if len(self.titles) == 1: lines = self.lines() if numlines is not None: lines = lines[len(lines)-numlines:] log("\n".join(lines)) else: lines = [self._printtuple(line[0], line[1]) for line in self.lines()] if numlines is not None: lines = lines[len(lines)-numlines:] log("".join(lines))
Return a list of lines output by this service.
def loads(self, data): st = BytesIO(data) try: return self.load(st) finally: st.close()
Deserializes the given byte array into an object and returns it.
def lesson_nums(self): lesson_nums = {} for brain_name, curriculum in self.brains_to_curriculums.items(): lesson_nums[brain_name] = curriculum.lesson_num return lesson_nums
A dict from brain name to the brain's curriculum's lesson number.
def relocate(self): name=self.SearchVar.get() if kbos.has_key(name): import orbfit,ephem,math jdate=ephem.julian_date(w.date.get()) try: (ra,dec,a,b,ang)=orbfit.predict(kbos[name],jdate,568) except: return ra=math.radians(ra) dec=math.radians(dec) elif mpc_objs.has_key(name): ra=mpc_objs[name].ra dec=mpc_objs[name].dec self.recenter(ra,dec) self.create_point(ra,dec,color='blue',size=4)
Move to the postion of self.SearchVar
def mount2name(self, mount): if not self.is_mount(mount): raise ValueError('%r is not a supported mount name' % (mount,)) return mount.replace(self.mount_prefix, self.module_prefix)
Convert a mount name to a module name
def validate_price(price): if isinstance(price, str): try: price = int(price) except ValueError: price = float(price) if not isinstance(price, (int, float)): raise TypeError('Price should be a number: ' + repr(price)) return price
validation checks for price argument
def _get_binding_keys(self, port, host): binding_keys = list() switch_binding = port[portbindings.PROFILE].get( 'local_link_information', None) if switch_binding: for binding in switch_binding: switch_id = binding.get('switch_id') port_id = binding.get('port_id') binding_keys.append((port['id'], (switch_id, port_id))) else: binding_keys.append((port['id'], host)) return binding_keys
Get binding keys from the port binding
def properties_operator(cls, name): def wrapper(self, *args, **kwargs): output = getattr(super(cls, self), name)(*args, **kwargs) return cls(output) wrapped = getattr(cls, name) wrapper.__name__ = wrapped.__name__ wrapper.__doc__ = wrapped.__doc__ return wrapper
Wraps a container operator to ensure container class is maintained
def create_where(): conjunction = Forward().setResultsName("conjunction") nested = Group(Suppress("(") + conjunction + Suppress(")")).setResultsName( "conjunction" ) maybe_nested = nested | constraint inverted = Group(not_ + maybe_nested).setResultsName("not") full_constraint = maybe_nested | inverted conjunction <<= full_constraint + OneOrMore(and_or + full_constraint) return upkey("where") + Group(conjunction | full_constraint).setResultsName("where")
Create a grammar for the 'where' clause used by 'select'
def to_api_data(self): data = { self._cc('flagStatus'): self._cc(self.__status.value) } if self.__status is Flag.Flagged: data[self._cc('startDateTime')] = self._build_date_time_time_zone(self.__start) data[self._cc('dueDateTime')] = self._build_date_time_time_zone(self.__due_date) if self.__status is Flag.Complete: data[self._cc('completedDateTime')] = self._build_date_time_time_zone(self.__completed) return data
Returns this data as a dict to be sent to the server
def message( *tokens: Token, end: str = "\n", sep: str = " ", fileobj: FileObj = sys.stdout, update_title: bool = False ) -> None: if using_colorama(): global _INITIALIZED if not _INITIALIZED: colorama.init() _INITIALIZED = True with_color, without_color = process_tokens(tokens, end=end, sep=sep) if CONFIG["record"]: _MESSAGES.append(without_color) if update_title and with_color: write_title_string(without_color, fileobj) to_write = with_color if config_color(fileobj) else without_color write_and_flush(fileobj, to_write)
Helper method for error, warning, info, debug
def _get_stack_events(h_client, stack_id, event_args): event_args['stack_id'] = stack_id event_args['resource_name'] = None try: events = h_client.events.list(**event_args) except heatclient.exc.HTTPNotFound as exc: raise heatclient.exc.CommandError(six.text_type(exc)) else: for event in events: event.stack_name = stack_id.split('/')[0] return events
Get event for stack
def cancelEdit( self ): if ( self._partsWidget.isVisible() ): return False self._completerTree.hide() self.completer().popup().hide() self.setText(self._originalText) return True
Rejects the current edit and shows the parts widget.
def _change_mode(self, mode, major, minor): if self._mode: if self._mode != mode: raise RuntimeError('Can\'t change mode (from %s to %s)' % (self._mode, mode)) self._require_version(major=major, minor=minor) self._mode = mode self.ticket_flags = YubiKeyConfigBits(0x0) self.config_flags = YubiKeyConfigBits(0x0) self.extended_flags = YubiKeyConfigBits(0x0) if mode != 'YUBIKEY_OTP': self.ticket_flag(mode, True)
Change mode of operation, with some sanity checks.
def own_time(self): sub_time = sum(stats.deep_time for stats in self) return max(0., self.deep_time - sub_time)
The exclusive execution time.
def account_groups_and_extra_data(account, resource, refresh_timedelta=None): updated = datetime.utcnow() modified_since = updated if refresh_timedelta is not None: modified_since += refresh_timedelta modified_since = modified_since.isoformat() last_update = account.extra_data.get('updated', modified_since) if last_update > modified_since: return account.extra_data.get('groups', []) groups = fetch_groups(resource['Group']) extra_data = current_app.config.get( 'OAUTHCLIENT_CERN_EXTRA_DATA_SERIALIZER', fetch_extra_data )(resource) account.extra_data.update( groups=groups, updated=updated.isoformat(), **extra_data ) return groups
Fetch account groups and extra data from resource if necessary.
def object(self): if self.type == EntryType.category: return self.category elif self.type == EntryType.event: return self.event elif self.type == EntryType.session: return self.session elif self.type == EntryType.contribution: return self.contribution elif self.type == EntryType.subcontribution: return self.subcontribution
Return the changed object.
def _preprocess(self, filehandle, metadata): "Runs all attached preprocessors on the provided filehandle." for process in self._preprocessors: filehandle = process(filehandle, metadata) return filehandle
Runs all attached preprocessors on the provided filehandle.
def keyring_refresh(**kwargs): ctx = Context(**kwargs) ctx.execute_action('keyring:refresh', **{ 'tvm': ctx.repo.create_secure_service('tvm'), })
Refresh the keyring in the cocaine-runtime.
def list_syslogs(self, service_id, version_number): content = self._fetch("/service/%s/version/%d/syslog" % (service_id, version_number)) return map(lambda x: FastlySyslog(self, x), content)
List all of the Syslogs for a particular service and version.
def interface_by_ipaddr(self, ipaddr): ipaddr = IPAddr(ipaddr) for devname,iface in self._devinfo.items(): if iface.ipaddr == ipaddr: return iface raise KeyError("No device has IP address {}".format(ipaddr))
Given an IP address, return the interface that 'owns' this address
def read(self, *args, **kwargs): with self.open('r') as f: return f.read(*args, **kwargs)
Reads the node as a file
def match_host (host, domainlist): if not host: return False for domain in domainlist: if domain.startswith('.'): if host.endswith(domain): return True elif host == domain: return True return False
Return True if host matches an entry in given domain list.
def add(self, priority, observer, callble): ins = 0 for pr, _, _ in self: if priority > pr: break ins += 1 self._poc.insert(ins, (priority, weakref.ref(observer), callble))
Add an observer with priority and callble
def run_cufflinks(data): if "cufflinks" in dd.get_tools_off(data): return [[data]] work_bam = dd.get_work_bam(data) ref_file = dd.get_sam_ref(data) out_dir, fpkm_file, fpkm_isoform_file = cufflinks.run(work_bam, ref_file, data) data = dd.set_cufflinks_dir(data, out_dir) data = dd.set_fpkm(data, fpkm_file) data = dd.set_fpkm_isoform(data, fpkm_isoform_file) return [[data]]
Quantitate transcript expression with Cufflinks
def as_requirement(self): if isinstance(self.parsed_version, packaging.version.Version): spec = "%s==%s" % (self.project_name, self.parsed_version) else: spec = "%s===%s" % (self.project_name, self.parsed_version) return Requirement.parse(spec)
Return a ``Requirement`` that matches this distribution exactly
def Channels(module): nums = {2: 1, 3: 5, 4: 9, 6: 13, 7: 17, 8: 21, 9: 25, 10: 29, 11: 33, 12: 37, 13: 41, 14: 45, 15: 49, 16: 53, 17: 57, 18: 61, 19: 65, 20: 69, 22: 73, 23: 77, 24: 81} if module in nums: return [nums[module], nums[module] + 1, nums[module] + 2, nums[module] + 3] else: return None
Returns the channels contained in the given K2 module.
def add_text_to_image(fname, txt, opFilename): ft = ImageFont.load("T://user//dev//src//python//_AS_LIB//timR24.pil") print("Adding text ", txt, " to ", fname, " pixels wide to file " , opFilename) im = Image.open(fname) draw = ImageDraw.Draw(im) draw.text((0, 0), txt, fill=(0, 0, 0), font=ft) del draw im.save(opFilename)
convert an image by adding text
def apply_fixes(args, tmpdir): invocation = [args.clang_apply_replacements_binary] if args.format: invocation.append('-format') if args.style: invocation.append('-style=' + args.style) invocation.append(tmpdir) subprocess.call(invocation)
Calls clang-apply-fixes on a given directory.
def do_buggers(self, args): args = args.split() if _debug: ConsoleCmd._debug("do_buggers %r", args) if not self.handlers: self.stdout.write("no handlers\n") else: self.stdout.write("handlers: ") self.stdout.write(', '.join(loggerName or '__root__' for loggerName in self.handlers)) self.stdout.write("\n") loggers = logging.Logger.manager.loggerDict.keys() for loggerName in sorted(loggers): if args and (not args[0] in loggerName): continue if loggerName in self.handlers: self.stdout.write("* %s\n" % loggerName) else: self.stdout.write(" %s\n" % loggerName) self.stdout.write("\n")
buggers - list the console logging handlers
def join_path(base, *parts: str): _parts = "/".join((_part.strip("/") for _part in parts)) if base.endswith("/"): url = base + _parts else: url = base + "/" + _parts return url
Creates urls from base path and additional parts.
def _plugins_inventory(self, entities): import os from ansible.plugins.loader import vars_loader from ansible.utils.vars import combine_vars data = {} for inventory_dir in self.variable_manager._inventory._sources: if ',' in inventory_dir: continue elif not os.path.isdir(inventory_dir): inventory_dir = os.path.dirname(inventory_dir) for plugin in vars_loader.all(): data = combine_vars(data, self._get_plugin_vars(plugin, inventory_dir, entities)) return data
merges all entities by inventory source
def cp(source, bucket, checksum, key_prefix): from .models import Bucket from .helpers import populate_from_path for object_version in populate_from_path( Bucket.get(bucket), source, checksum=checksum, key_prefix=key_prefix): click.secho(str(object_version)) db.session.commit()
Create new bucket from all files in directory.
def pWMWrite(fileHandle, pWM, alphabetSize=4): for i in xrange(0, alphabetSize): fileHandle.write("%s\n" % ' '.join([ str(pWM[j][i]) for j in xrange(0, len(pWM)) ]))
Writes file in standard PWM format, is reverse of pWMParser
def remove_accounts_from_institute(accounts_query, institute): query = accounts_query.filter(date_deleted__isnull=True) for account in query: remove_account_from_institute(account, institute)
Remove accounts from institute.
def _log_future_exception(future, logger): if not future.done(): return try: future.result() except: logger.warning("Exception in ignored future: %s", future, exc_info=True)
Log any exception raised by future.
def _from_dict(cls, _dict): args = {} if 'words' in _dict: args['words'] = [Word._from_dict(x) for x in (_dict.get('words'))] else: raise ValueError( 'Required property \'words\' not present in Words JSON') return cls(**args)
Initialize a Words object from a json dictionary.
def make_map(declarations): mapper = routes.Mapper() for route, methods in ROUTE_LIST: allowed_methods = [] for method, func in methods.items(): mapper.connect(route, action=func, conditions=dict(method=[method])) allowed_methods.append(method) allowed_methods = ', '.join(allowed_methods) mapper.connect(route, action=handle_not_allowed, _methods=allowed_methods) return mapper
Process route declarations to create a Route Mapper.
def invoke(self, function_name, raw_python=False, command=None, no_color=False): key = command if command is not None else 'command' if raw_python: command = {'raw_command': function_name} else: command = {key: function_name} import json as json response = self.zappa.invoke_lambda_function( self.lambda_name, json.dumps(command), invocation_type='RequestResponse', ) if 'LogResult' in response: if no_color: print(base64.b64decode(response['LogResult'])) else: decoded = base64.b64decode(response['LogResult']).decode() formatted = self.format_invoke_command(decoded) colorized = self.colorize_invoke_command(formatted) print(colorized) else: print(response) if 'FunctionError' in response: raise ClickException( "{} error occurred while invoking command.".format(response['FunctionError']) )
Invoke a remote function.
def render(self, request, context, status=codes.ok, content_type=None, args=None, kwargs=None): "Expects the method handler to return the `context` for the template." if isinstance(self.template_name, (list, tuple)): template = loader.select_template(self.template_name) elif self.template_name: template = loader.get_template(self.template_name) else: template = loader.Template(self.template_string) context = RequestContext(request, context) content = template.render(context) return HttpResponse(content, status=status, content_type=content_type)
Expects the method handler to return the `context` for the template.
def restart_listener(self, topics): if self.listener is not None: if self.listener.running: self.stop() self.__init__(topics=topics)
Restart listener after configuration update.
def _validate_allowed_settings(self, application_id, application_config, allowed_settings): for setting_key in application_config.keys(): if setting_key not in allowed_settings: raise ImproperlyConfigured( "Platform {}, app {} does not support the setting: {}.".format( application_config["PLATFORM"], application_id, setting_key ) )
Confirm only allowed settings are present.