code
stringlengths
51
2.34k
docstring
stringlengths
11
171
def load_config(configfile): try: with open(configfile, 'r') as ymlfile: try: config = yaml.load(ymlfile) return config except yaml.parser.ParserError: raise PyYAMLConfigError( 'Could not parse config file: {}'.format(configfile), ) except IOError: raise PyYAMLConfigError( 'Could not open config file: {}'.format(configfile), )
Return a dict with configuration from the supplied yaml file
def http_call(self, url=None, **kwargs): if not url: url = self.search_url http_func, arg_name = self.get_http_method_arg_name() _kwargs = { arg_name: kwargs, } response = http_func( url=url.format(**kwargs), headers=self.get_http_headers(), **_kwargs ) if response.status_code != 200: logger.warning('Invalid Request for `%s`', response.url) response.raise_for_status() return response.json()
Call the target URL via HTTP and return the JSON result
def metadataContributer(self): if self._metaFL is None: fl = FeatureService(url=self._metadataURL, proxy_url=self._proxy_url, proxy_port=self._proxy_port) self._metaFS = fl return self._metaFS
gets the metadata featurelayer object
def shared_atts(self): atts = {} first = self.chunks[0] for att in sorted(first.atts): if all(fs.atts.get(att, '???') == first.atts[att] for fs in self.chunks if len(fs) > 0): atts[att] = first.atts[att] return atts
Gets atts shared among all nonzero length component Chunk
def copy(self, *args, **kwargs): for slot in self.__slots__: attr = getattr(self, slot) if slot[0] == '_': slot = slot[1:] if slot not in kwargs: kwargs[slot] = attr result = type(self)(*args, **kwargs) return result
Copy this model element and contained elements if they exist.
def _build_codes() -> Dict[str, Dict[str, str]]: built = { 'fore': {}, 'back': {}, 'style': {}, } for name, number in _namemap: built['fore'][name] = codeformat(30 + number) built['back'][name] = codeformat(40 + number) litename = 'light{}'.format(name) built['fore'][litename] = codeformat(90 + number) built['back'][litename] = codeformat(100 + number) built['fore']['reset'] = codeformat(39) built['back']['reset'] = codeformat(49) for code, names in _stylemap: for alias in names: built['style'][alias] = codeformat(code) for i in range(256): built['fore'][str(i)] = extforeformat(i) built['back'][str(i)] = extbackformat(i) return built
Build code map, encapsulated to reduce module-level globals.
def _vcf_info(start, end, mate_id, info=None): out = "SVTYPE=BND;MATEID={mate};IMPRECISE;CIPOS=0,{size}".format( mate=mate_id, size=end-start) if info is not None: extra_info = ";".join("{0}={1}".format(k, v) for k, v in info.iteritems()) out = "{0};{1}".format(out, extra_info) return out
Return breakend information line with mate and imprecise location.
def _write_bed_header(self): final_byte = 1 if self._bed_format == "SNP-major" else 0 self._bed.write(bytearray((108, 27, final_byte)))
Writes the BED first 3 bytes.
def vsh(cmd, *args, **kw): args = '" "'.join(i.replace('"', r'\"') for i in args) easy.sh('"%s" "%s"' % (venv_bin(cmd), args))
Execute a command installed into the active virtualenv.
def map_package(shutit_pexpect_session, package, install_type): if package in PACKAGE_MAP.keys(): for itype in PACKAGE_MAP[package].keys(): if itype == install_type: ret = PACKAGE_MAP[package][install_type] if isinstance(ret,str): return ret if callable(ret): ret(shutit_pexpect_session) return '' return package
If package mapping exists, then return it, else return package.
def git_ls_tree(repo_dir, treeish='HEAD'): command = ['git', 'ls-tree', '-r', '--full-tree', treeish] raw = execute_git_command(command, repo_dir=repo_dir).splitlines() output = [l.strip() for l in raw if l.strip()] breakout = [k.split(None, 3) for k in output] headers = ['mode', 'type', 'object', 'file'] return [dict(zip(headers, vals)) for vals in breakout]
Run git ls-tree.
def do_EOF(self, args): if _debug: ConsoleCmd._debug("do_EOF %r", args) return self.do_exit(args)
Exit on system end of file character
def unzip_unicode(output, version): unzipper = zipfile.ZipFile(os.path.join(output, 'unicodedata', '%s.zip' % version)) target = os.path.join(output, 'unicodedata', version) print('Unzipping %s.zip...' % version) os.makedirs(target) for f in unzipper.namelist(): unzipper.extract(f, target)
Unzip the Unicode files.
def add_arguments(cls, parser, sys_arg_list=None): parser.add_argument('--tcp_check_interval', dest='tcp_check_interval', required=False, default=2, type=float, help="TCP health-test interval in seconds, " "default 2 " "(only for 'tcp' health monitor plugin)") parser.add_argument('--tcp_check_port', dest='tcp_check_port', required=False, default=22, type=int, help="Port for TCP health-test, default 22 " "(only for 'tcp' health monitor plugin)") return ["tcp_check_interval", "tcp_check_port"]
Arguments for the TCP health monitor plugin.
def _triplify_object(self, data, parent): subject = self.get_subject(data) if self.path: yield (subject, TYPE_SCHEMA, self.path, TYPE_SCHEMA) if parent is not None: yield (parent, self.predicate, subject, TYPE_LINK) if self.reverse is not None: yield (subject, self.reverse, parent, TYPE_LINK) for prop in self.properties: for res in prop.triplify(data.get(prop.name), subject): yield res
Create bi-directional statements for object relationships.
def response_json(self, status, response, content_type='application/json', encoding='utf-8', headers=None, jsonp=None): encoder = JSONEncoder( check_circular=self.app.validate_output, allow_nan=False, sort_keys=True, indent=2 if self.app.pretty_output else None, separators=(',', ': ') if self.app.pretty_output else (',', ':') ) content = encoder.encode(response) if jsonp: content_list = [jsonp.encode(encoding), b'(', content.encode(encoding), b');'] else: content_list = [content.encode(encoding)] return self.response(status, content_type, content_list, headers=headers)
Send a JSON response
def render_formset_errors(formset, **kwargs): renderer_cls = get_formset_renderer(**kwargs) return renderer_cls(formset, **kwargs).render_errors()
Render formset errors to a Bootstrap layout
def find_resources(client): wildcard = Keys.DISPENSER.format('*') pattern = re.compile(Keys.DISPENSER.format('(.*)')) return [pattern.match(d).group(1) for d in client.scan_iter(wildcard)]
Detect dispensers and return corresponding resources.
def find_build_dir(path, build="_build"): path = os.path.abspath(os.path.expanduser(path)) contents = os.listdir(path) filtered_contents = [directory for directory in contents if os.path.isdir(os.path.join(path, directory))] if build in filtered_contents: return os.path.join(path, build) else: if path == os.path.realpath("/"): return None else: return find_build_dir("{0}/..".format(path), build)
try to guess the build folder's location
def create(self, weeks): user_pageviews = self.create_profiles('Pageviews', weeks) user_downloads = self.create_profiles('Downloads', weeks) self._export_profiles('Profiles', user_pageviews, user_downloads) user_pageviews = self.create_profiles('Pageviews_IP', weeks, True) user_downloads = self.create_profiles('Downloads_IP', weeks, True) self._export_profiles('Profiles_IP', user_pageviews, user_downloads, ip_user=True)
Create the user and ip profiles for the given weeks.
def pairwise_mean(values): "Averages between a value and the next value in a sequence" return numpy.array([numpy.mean(pair) for pair in pairwise(values)])
Averages between a value and the next value in a sequence
def current_settings(self): settings = {} if not self.status_data: return settings for (key, val) in self.status_data.get('curvals', {}).items(): try: val = float(val) except ValueError: val = val if val in ('on', 'off'): val = (val == 'on') settings[key] = val return settings
Return dict with all config include.
def scale_to(x, ratio, targ): return max(math.floor(x*ratio), targ)
Calculate dimension of an image during scaling with aspect ratio
def verify_chunks(self, chunks): err = [] for chunk in chunks: err.extend(self.verify_data(chunk)) return err
Verify the chunks in a list of low data structures
def add_unique_template_variables(self, options): options.update(dict( geojson_data=json.dumps(self.data, ensure_ascii=False), colorProperty=self.color_property, colorType=self.color_function_type, colorStops=self.color_stops, strokeWidth=self.stroke_width, strokeColor=self.stroke_color, radius=self.radius, defaultColor=self.color_default, highlightColor=self.highlight_color )) if self.vector_source: options.update(vectorColorStops=self.generate_vector_color_map())
Update map template variables specific to circle visual
def _adb_screencap(self, scale=1.0): remote_file = tempfile.mktemp(dir='/data/local/tmp/', prefix='screencap-', suffix='.png') local_file = tempfile.mktemp(prefix='atx-screencap-', suffix='.png') self.shell('screencap', '-p', remote_file) try: self.pull(remote_file, local_file) image = imutils.open_as_pillow(local_file) if scale is not None and scale != 1.0: image = image.resize([int(scale * s) for s in image.size], Image.BICUBIC) rotation = self.rotation() if rotation: method = getattr(Image, 'ROTATE_{}'.format(rotation*90)) image = image.transpose(method) return image finally: self.remove(remote_file) os.unlink(local_file)
capture screen with adb shell screencap
def reset_kernel(self): client = self.get_current_client() if client is not None: self.switch_to_plugin() client.reset_namespace()
Reset kernel of current client.
def summary(self): print("\nStatus summary") print("=" * 79) print("{0}found {1} dependencies in {2} packages.{3}\n".format( self.grey, self.count_dep, self.count_pkg, self.endc))
Summary by packages and dependencies
def link_docstring(modules, docstring:str, overwrite:bool=False)->str: "Search `docstring` for backticks and attempt to link those functions to respective documentation." mods = listify(modules) for mod in mods: _modvars.update(mod.__dict__) return re.sub(BT_REGEX, replace_link, docstring)
Search `docstring` for backticks and attempt to link those functions to respective documentation.
def pretty_print_table_instance(table): assert isinstance(table, Table) def pretty_print_row(styled, plain): click.secho( " | ".join( v + " " * (table.column_widths[k] - len(plain[k])) for k, v in enumerate(styled) ) ) pretty_print_row(table.headers, table.plain_headers) for k, row in enumerate(table.rows): pretty_print_row(row, table.plain_rows[k])
Pretty print a table instance.
def delete_set(self, x): if x not in self._parents: return members = list(self.members(x)) for v in members: del self._parents[v] del self._weights[v] del self._prev_next[v] del self._min_values[v]
Removes the equivalence class containing `x`.
def edge_list(self) -> List[Edge]: return [edge for edge in sorted(self._edges.values(), key=attrgetter("key"))]
The ordered list of edges in the container.
def _caching_enabled(self): try: config = self._runtime.get_configuration() parameter_id = Id('parameter:useCachingForQualifierIds@json') if config.get_value_by_parameter(parameter_id).get_boolean_value(): return True else: return False except (AttributeError, KeyError, errors.NotFound): return False
Returns True if caching is enabled per configuration, false otherwise.
def addFeaturesSearchOptions(parser): addFeatureSetIdArgument(parser) addFeaturesReferenceNameArgument(parser) addStartArgument(parser) addEndArgument(parser) addParentFeatureIdArgument(parser) addFeatureTypesArgument(parser)
Adds common options to a features search command line parser.
def _relative_frequency(self, word): count = self.type_counts.get(word, 0) return math.log(count/len(self.type_counts)) if count > 0 else 0
Computes the log relative frequency for a word form
def close(self): uwsgi.disconnect() if self._req_ctx is None: self._select_greenlet.kill() self._event.set()
Disconnects uWSGI from the client.
def add(self, items): options = self._create_options(items) for k, v in options.items(): if k in self.labels and v not in self.items: options.pop(k) count = 0 while f'{k}_{count}' in self.labels: count += 1 options[f'{k}_{count}'] = v self.widget.options.update(options) self.widget.param.trigger('options') self.widget.value = list(options.values())[:1]
Add items to options
def isotime(at=None, subsecond=False): if not at: at = utcnow() st = at.strftime(_ISO8601_TIME_FORMAT if not subsecond else _ISO8601_TIME_FORMAT_SUBSECOND) tz = at.tzinfo.tzname(None) if at.tzinfo else 'UTC' st += ('Z' if tz == 'UTC' else tz) return st
Stringify time in ISO 8601 format.
def read(url, **kwargs): response = open_url(url, **kwargs) try: return response.read() finally: response.close()
Read the contents of a URL into memory, return
def euclid(a, b): a = abs(a) b = abs(b) if a < b: a, b = b, a while b != 0: a, b = b, a % b return a
returns the Greatest Common Divisor of a and b
def _get_image_size(self, maxcharno, maxlineno): return (self._get_char_x(maxcharno) + self.image_pad, self._get_line_y(maxlineno + 0) + self.image_pad)
Get the required image size.
def merged_gasmap(self, **kwargs): kwargs_copy = self.base_dict.copy() kwargs_copy.update(**kwargs) self._replace_none(kwargs_copy) localpath = NameFactory.merged_gasmap_format.format(**kwargs_copy) if kwargs.get('fullpath', False): return self.fullpath(localpath=localpath) return localpath
return the file name for Galprop merged gasmaps
def get(self, key: Text, locale: Optional[Text]) -> List[Tuple[Text, ...]]: locale = self.choose_locale(locale) return self.dict[locale][key]
Get a single set of intents.
def use_settings(**kwargs): from omnic import singletons singletons.settings.use_settings_dict(kwargs) yield singletons.settings.use_previous_settings()
Context manager to temporarily override settings
def _role_remove(name, user=None, host=None, port=None, maintenance_db=None, password=None, runas=None): if not user_exists(name, user, host, port, maintenance_db, password=password, runas=runas): log.info('User \'%s\' does not exist', name) return False sub_cmd = 'DROP ROLE "{0}"'.format(name) _psql_prepare_and_run( ['-c', sub_cmd], runas=runas, host=host, user=user, port=port, maintenance_db=maintenance_db, password=password) if not user_exists(name, user, host, port, maintenance_db, password=password, runas=runas): return True else: log.info('Failed to delete user \'%s\'.', name) return False
Removes a role from the Postgres Server
def _write(self, session, openFile, replaceParamFile): hmetRecords = self.hmetRecords for record in hmetRecords: openFile.write('%s\t%s\t%s\t%s\t%.3f\t%s\t%s\t%s\t%s\t%.2f\t%.2f\n' % ( record.hmetDateTime.year, record.hmetDateTime.month, record.hmetDateTime.day, record.hmetDateTime.hour, record.barometricPress, record.relHumidity, record.totalSkyCover, record.windSpeed, record.dryBulbTemp, record.directRad, record.globalRad))
Write HMET WES to File Method
def EnsureSConsVersion(self, major, minor, revision=0): if SCons.__version__ == '__' + 'VERSION__': SCons.Warnings.warn(SCons.Warnings.DevelopmentVersionWarning, "EnsureSConsVersion is ignored for development version") return scons_ver = self._get_major_minor_revision(SCons.__version__) if scons_ver < (major, minor, revision): if revision: scons_ver_string = '%d.%d.%d' % (major, minor, revision) else: scons_ver_string = '%d.%d' % (major, minor) print("SCons %s or greater required, but you have SCons %s" % \ (scons_ver_string, SCons.__version__)) sys.exit(2)
Exit abnormally if the SCons version is not late enough.
def s3_etag(url: str) -> Optional[str]: s3_resource = boto3.resource("s3") bucket_name, s3_path = split_s3_path(url) s3_object = s3_resource.Object(bucket_name, s3_path) return s3_object.e_tag
Check ETag on S3 object.
def _byteify(data): if isinstance(data, six.text_type): return data.encode("utf-8") if isinstance(data, list): return [_byteify(item) for item in data] if isinstance(data, dict): return { _byteify(key): _byteify(value) for key, value in data.items() } return data
Convert unicode to bytes
def remove_workspace(self): def confirm_clicked(): if len(self.document_model.workspaces) > 1: command = Workspace.RemoveWorkspaceCommand(self) command.perform() self.document_controller.push_undo_command(command) caption = _("Remove workspace named '{0}'?").format(self.__workspace.name) self.pose_confirmation_message_box(caption, confirm_clicked, accepted_text=_("Remove Workspace"), message_box_id="remove_workspace")
Pose a dialog to confirm removal then remove workspace.
def add_view(self, *args, **kwargs): try: singleton = self.model.objects.get() except (self.model.DoesNotExist, self.model.MultipleObjectsReturned): kwargs.setdefault("extra_context", {}) kwargs["extra_context"]["singleton"] = True response = super(SingletonAdmin, self).add_view(*args, **kwargs) return self.handle_save(args[0], response) return redirect(admin_url(self.model, "change", singleton.id))
Redirect to the change view if the singleton instance exists.
def _filter_by_pattern(self, pattern): try: _len = len(pattern) except TypeError: raise TypeError("pattern is not a list of Booleans. Got {}".format( type(pattern))) _filt_values = [d for i, d in enumerate(self._values) if pattern[i % _len]] _filt_datetimes = [d for i, d in enumerate(self.datetimes) if pattern[i % _len]] return _filt_values, _filt_datetimes
Filter the Filter the Data Collection based on a list of booleans.
def _utc_float(self): tai = self.tai leap_dates = self.ts.leap_dates leap_offsets = self.ts.leap_offsets leap_reverse_dates = leap_dates + leap_offsets / DAY_S i = searchsorted(leap_reverse_dates, tai, 'right') return tai - leap_offsets[i] / DAY_S
Return UTC as a floating point Julian date.
def url(self): url = u'{home_url}{permalink}'.format(home_url=settings.HOME_URL, permalink=self._permalink) url = re.sub(r'/{2,}', r'/', url) return url
The site-relative URL to the post.
def _parse_template(self, has_content): reset = self._head context = contexts.TEMPLATE_NAME if has_content: context |= contexts.HAS_TEMPLATE try: template = self._parse(context) except BadRoute: self._head = reset raise self._emit_first(tokens.TemplateOpen()) self._emit_all(template) self._emit(tokens.TemplateClose())
Parse a template at the head of the wikicode string.
def remove_api_key(self, api_id, stage_name): response = self.apigateway_client.get_api_keys( limit=1, nameQuery='{}_{}'.format(stage_name, api_id) ) for api_key in response.get('items'): self.apigateway_client.delete_api_key( apiKey="{}".format(api_key['id']) )
Remove a generated API key for api_id and stage_name
def bar(self, progress): if not hasattr(self, "_limit") or not self._limit: self._limit = self.terminal_size() graph_progress = int(progress * self._limit) self.stdout.write('\r', ending='') progress_format = "[%-{}s] %d%%".format(self._limit) self.stdout.write( self.style.SUCCESS(progress_format % (self.progress_symbol * graph_progress, int(progress * 100))), ending='' ) self.stdout.flush()
Shows on the stdout the progress bar for the given progress.
def clean_new(self, value): value = self.schema_class(value).full_clean() return self.object_class(**value)
Return a new object instantiated with cleaned data.
def makeHawkExt(self): o = self.options c = o.get('credentials', {}) if c.get('clientId') and c.get('accessToken'): ext = {} cert = c.get('certificate') if cert: if six.PY3 and isinstance(cert, six.binary_type): cert = cert.decode() if isinstance(cert, six.string_types): cert = json.loads(cert) ext['certificate'] = cert if 'authorizedScopes' in o: ext['authorizedScopes'] = o['authorizedScopes'] return utils.makeB64UrlSafe(utils.encodeStringForB64Header(utils.dumpJson(ext)).strip()) else: return {}
Make an 'ext' for Hawk authentication
def server_info(self): response = self._post(self.apiurl + "/v2/server/info", data={'apikey': self.apikey}) return self._raise_or_extract(response)
Query information about the server.
def print_subcommands(self): lines = ["Call"] lines.append('-'*len(lines[-1])) lines.append('') lines.append("> jhubctl <subcommand> <resource-type> <resource-name>") lines.append('') lines.append("Subcommands") lines.append('-'*len(lines[-1])) lines.append('') for name, subcommand in self.subcommands.items(): lines.append(name) lines.append(indent(subcommand[1])) lines.append('') print(os.linesep.join(lines))
Print the subcommand part of the help.
def load_context(context, file_path=None): if not file_path: file_path = _get_context_filepath() if os.path.exists(file_path): with io.open(file_path, encoding='utf-8') as f: for line in f: execute(line, context)
Load a Context object in place from user data directory.
def limit(self, keys): if not isinstance(keys, list) and not isinstance(keys, tuple): keys = [keys] remove_keys = [k for k in self.keys() if k not in keys] for k in remove_keys: self.pop(k)
Remove all keys other than the keys specified.
def profile_path(profile_id, profile): user = os.path.expanduser("~") return os.path.join(user, profile_id + profile)
Create full path to given provide for the current user.
def passagg(recipient, sender): adj = random.choice(pmxbot.phrases.adjs) if random.choice([False, True]): lead = "" trail = recipient if not recipient else ", %s" % recipient else: lead = recipient if not recipient else "%s, " % recipient trail = "" body = random.choice(pmxbot.phrases.adj_intros) % adj if not lead: body = body.capitalize() msg = "{lead}{body}{trail}.".format(**locals()) fw = random.choice(pmxbot.phrases.farewells) return "{msg} {fw}, {sender}.".format(**locals())
Generate a passive-aggressive statement to recipient from sender.
def register_bse_task(self, *args, **kwargs): kwargs["task_class"] = BseTask return self.register_task(*args, **kwargs)
Register a Bethe-Salpeter task.
def add(self, histogram: Histogram1D): if self.binning and not self.binning == histogram.binning: raise ValueError("Cannot add histogram with different binning.") self.histograms.append(histogram)
Add a histogram to the collection.
def show_run(command_history_id): from pprint import pprint from .config import ConfigStore from .database import DataBase db = DataBase(ConfigStore().db_path) with db.connection(): for ch_id in command_history_id: crec = db.get_full_command_record(ch_id) pprint(crec.__dict__) print("")
Show detailed command history by its ID.
def _set_complete_option(cls): get_config = cls.context.get_config complete = get_config('complete', None) if complete is None: conditions = [ get_config('transitions', False), get_config('named_transitions', False), ] complete = not any(conditions) cls.context.new_meta['complete'] = complete
Check and set complete option.
def detail(callback=None, path=None, method=Method.GET, resource=None, tags=None, summary="Get specified resource.", middleware=None): def inner(c): op = Operation(c, path or PathParam('{key_field}'), method, resource, tags, summary, middleware) op.responses.add(Response(HTTPStatus.OK, "Get a {name}")) op.responses.add(Response(HTTPStatus.NOT_FOUND, "Not found", Error)) return op return inner(callback) if callback else inner
Decorator to configure an operation that fetches a resource.
def short_form_one_format(jupytext_format): if not isinstance(jupytext_format, dict): return jupytext_format fmt = jupytext_format['extension'] if 'suffix' in jupytext_format: fmt = jupytext_format['suffix'] + fmt elif fmt.startswith('.'): fmt = fmt[1:] if 'prefix' in jupytext_format: fmt = jupytext_format['prefix'] + '/' + fmt if jupytext_format.get('format_name'): if jupytext_format['extension'] not in ['.md', '.Rmd'] or jupytext_format['format_name'] == 'pandoc': fmt = fmt + ':' + jupytext_format['format_name'] return fmt
Represent one jupytext format as a string
def _parse_file(self): args = utilities.build_includes(self.arch.includes()) args.append('-E') args.append('-D__attribute__(x)=') args.append('-D__extension__=') self.ast = parse_file(self.filepath, use_cpp=True, cpp_path='arm-none-eabi-gcc', cpp_args=args)
Preprocess and parse C file into an AST
def _create_threads(self): creator = JobCreator( self.config, self.observers.jobs, self.logger ) self.jobs = creator.job_factory()
This method creates job instances.
def setCell(self, x, y, v): self.cells[y][x] = v
set the cell value at x,y
def load_boston(): dataset = datasets.load_boston() return Dataset(load_boston.__doc__, dataset.data, dataset.target, r2_score)
Boston House Prices Dataset.
def close(self): if self._closing: return log.info('MWorkerQueue under PID %s is closing', os.getpid()) self._closing = True if getattr(self, '_monitor', None) is not None: self._monitor.stop() self._monitor = None if getattr(self, '_w_monitor', None) is not None: self._w_monitor.stop() self._w_monitor = None if hasattr(self, 'clients') and self.clients.closed is False: self.clients.close() if hasattr(self, 'workers') and self.workers.closed is False: self.workers.close() if hasattr(self, 'stream'): self.stream.close() if hasattr(self, '_socket') and self._socket.closed is False: self._socket.close() if hasattr(self, 'context') and self.context.closed is False: self.context.term()
Cleanly shutdown the router socket
def cache_call_signatures(source, user_pos, stmt): index = user_pos[0] - 1 lines = source.splitlines() or [''] if source and source[-1] == '\n': lines.append('') before_cursor = lines[index][:user_pos[1]] other_lines = lines[stmt.start_pos[0]:index] whole = '\n'.join(other_lines + [before_cursor]) before_bracket = re.match(r'.*\(', whole, re.DOTALL) module_path = stmt.get_parent_until().path return None if module_path is None else (module_path, before_bracket, stmt.start_pos)
This function calculates the cache key.
def run_and_save(data): run(None, data) stats_file, idxstats_file = _get_stats_files(data) data = tz.update_in(data, ["depth", "samtools", "stats"], lambda x: stats_file) data = tz.update_in(data, ["depth", "samtools", "idxstats"], lambda x: idxstats_file) return data
Run QC, saving file outputs in data dictionary.
def close_stream(self): if not self.is_connected: return self.stream.close() self.state = DISCONNECTED self.on_close.send(self)
Close the underlying socket.
def symbol(self): if self._symbol is None: self._symbol = self._symbol_extract(cache.RE_CURSOR) return self._symbol
Gets the symbol under the current cursor.
def _hue(color, **kwargs): h = colorsys.rgb_to_hls(*[x / 255.0 for x in color.value[:3]])[0] return NumberValue(h * 360.0)
Get hue value of HSL color.
def to_dict(self): return { 'id': self.set_id, 'title': self.title, 'terms': [term.to_dict() for term in self.terms] }
Convert WordSet into raw dictionary data.
def _remove_boundaries(self, interval): begin = interval.begin end = interval.end if self.boundary_table[begin] == 1: del self.boundary_table[begin] else: self.boundary_table[begin] -= 1 if self.boundary_table[end] == 1: del self.boundary_table[end] else: self.boundary_table[end] -= 1
Removes the boundaries of the interval from the boundary table.
def dump_image_data(dataset_dir, data_dir, dataset, color_array_info, root=None, compress=True): if root is None: root = {} root['vtkClass'] = 'vtkImageData' container = root container['spacing'] = dataset.GetSpacing() container['origin'] = dataset.GetOrigin() container['extent'] = dataset.GetExtent() dump_all_arrays(dataset_dir, data_dir, dataset, container, compress) return root
Dump image data object to vtkjs
def run(self): self.find_new() for n in self.news: print("{0}".format(n)) print("") self.msg.template(78) print("| Installed {0} new configuration files:".format( len(self.news))) self.msg.template(78) self.choices()
print .new configuration files
def jsontype(self, name, path=Path.rootPath()): return self.execute_command('JSON.TYPE', name, str_path(path))
Gets the type of the JSON value under ``path`` from key ``name``
def to_node(value): if isinstance(value, Node): return value elif isinstance(value, str): return Node('string', value=value, pseudo_type='String') elif isinstance(value, int): return Node('int', value=value, pseudo_type='Int') elif isinstance(value, bool): return Node('boolean', value=str(value).lower(), pseudo_type='Boolean') elif isinstance(value, float): return Node('float', value=value, pseudo_type='Float') elif value is None: return Node('null', pseudo_type='Void') else: 1/0
Expand to a literal node if a basic type otherwise just returns the node
def _check_cat_dict_source(self, cat_dict_class, key_in_self, **kwargs): source = kwargs.get(cat_dict_class._KEYS.SOURCE, None) if source is None: raise CatDictError( "{}: `source` must be provided!".format(self[self._KEYS.NAME]), warn=True) for x in source.split(','): if not is_integer(x): raise CatDictError( "{}: `source` is comma-delimited list of " " integers!".format(self[self._KEYS.NAME]), warn=True) if self.is_erroneous(key_in_self, source): self._log.info("This source is erroneous, skipping") return None if (self.catalog.args is not None and not self.catalog.args.private and self.is_private(key_in_self, source)): self._log.info("This source is private, skipping") return None return source
Check that a source exists and that a quantity isn't erroneous.
def _update_phi(self): etaprod = 1.0 for w in range(N_NT - 1): self.phi[w] = etaprod * (1 - self.eta[w]) etaprod *= self.eta[w] self.phi[N_NT - 1] = etaprod
Update `phi` using current `eta`.
def runserver(debug, console_log, use_reloader, address, port, timeout, workers, socket): debug = debug or config.get('DEBUG') or console_log if debug: print(Fore.BLUE + '-=' * 20) print( Fore.YELLOW + 'Starting Superset server in ' + Fore.RED + 'DEBUG' + Fore.YELLOW + ' mode') print(Fore.BLUE + '-=' * 20) print(Style.RESET_ALL) if console_log: console_log_run(app, port, use_reloader) else: debug_run(app, port, use_reloader) else: logging.info( "The Gunicorn 'superset runserver' command is deprecated. Please " "use the 'gunicorn' command instead.") addr_str = f' unix:{socket} ' if socket else f' {address}:{port} ' cmd = ( 'gunicorn ' f'-w {workers} ' f'--timeout {timeout} ' f'-b {addr_str} ' '--limit-request-line 0 ' '--limit-request-field_size 0 ' 'superset:app' ) print(Fore.GREEN + 'Starting server with command: ') print(Fore.YELLOW + cmd) print(Style.RESET_ALL) Popen(cmd, shell=True).wait()
Starts a Superset web server.
def pyspread(S=None): app = MainApplication(S=S, redirect=False) app.MainLoop()
Holds application main loop
def graded_submissions(self): qs = self._valid_submissions().filter(state__in=[Submission.GRADED]) return qs
Queryset for the graded submissions, which are worth closing.
def handle_starttag(self, tag, attrs): if not tag == 'a': return for attr in attrs: if attr[0] == 'href': url = urllib.unquote(attr[1]) self.active_url = url.rstrip('/').split('/')[-1] return
Callback for when a tag gets opened.
def on_resize(self, event): self.context.set_viewport(0, 0, event.size[0], event.size[1]) for visual in self.visuals: visual.on_resize(event.size) self.update()
Resize the OpenGL context.
async def echo_all(app, message): for address in app.kv.get_prefix('address.').values(): host, port = address.decode().split(':') port = int(port) await tcp_echo_client(message, loop, host, port)
Send and recieve a message from all running echo servers
async def fromURL( cls, url, *, credentials=None, insecure=False): try: description = await helpers.fetch_api_description( url, insecure=insecure) except helpers.RemoteError as error: raise SessionError(str(error)) else: session = cls(description, credentials) session.insecure = insecure return session
Return a `SessionAPI` for a given MAAS instance.
def type(self): if self.__type is None: found_type = find_definition( self.__type_name, self.message_definition()) if not (found_type is not Enum and isinstance(found_type, type) and issubclass(found_type, Enum)): raise FieldDefinitionError( 'Invalid enum type: %s' % found_type) self.__type = found_type return self.__type
Enum type used for field.
def load(path): with open(path) as rfile: steps = MODEL.parse(rfile.read()) new_steps = [] for step in steps: new_steps += expand_includes(step, path) return new_steps
Load |path| and recursively expand any includes.
def _normalize(self, name, columns, points): for i, _ in enumerate(points): if points[i] is None: del(points[i]) del(columns[i]) continue try: points[i] = float(points[i]) except (TypeError, ValueError): pass else: continue try: points[i] = str(points[i]) except (TypeError, ValueError): pass else: continue return [{'measurement': name, 'tags': self.parse_tags(self.tags), 'fields': dict(zip(columns, points))}]
Normalize data for the InfluxDB's data model.
def _flush(self): if self._recording: raise Exception("Cannot flush data queue while recording!") if self._saving_cache: logging.warn("Flush when using cache means unsaved data will be lost and not returned!") self._cmds_q.put(("reset_data_segment",)) else: data = self._extract_q(0) return data
Returns a list of all current data