code
stringlengths
51
2.34k
docstring
stringlengths
11
171
def mode(self): if self._resources is None: self.__init() if "mode" in self._resources: url = self._url + "/mode" return _mode.Mode(url=url, securityHandler=self._securityHandler, proxy_url=self._proxy_url, proxy_port=self._proxy_port, initialize=True) else: return None
returns an object to work with the site mode
def convert_out(self, obj): newobj = super(ProcessedImageProduct, self).convert_out(obj) if newobj: hdulist = newobj.open() hdr = hdulist[0].header if 'EMIRUUID' not in hdr: hdr['EMIRUUID'] = str(uuid.uuid1()) return newobj
Write EMIRUUID header on reduction
def directories(self): directories_description = [ self.project_name, self.project_name + '/conf', self.project_name + '/static', ] return directories_description
Return the names of directories to be created.
def filter_extant_exports(client, bucket, prefix, days, start, end=None): end = end or datetime.now() try: tag_set = client.get_object_tagging(Bucket=bucket, Key=prefix).get('TagSet', []) except ClientError as e: if e.response['Error']['Code'] != 'NoSuchKey': raise tag_set = [] tags = {t['Key']: t['Value'] for t in tag_set} if 'LastExport' not in tags: return sorted(days) last_export = parse(tags['LastExport']) if last_export.tzinfo is None: last_export = last_export.replace(tzinfo=tzutc()) return [d for d in sorted(days) if d > last_export]
Filter days where the bucket already has extant export keys.
def find_netmiko_dir(): try: netmiko_base_dir = os.environ["NETMIKO_DIR"] except KeyError: netmiko_base_dir = NETMIKO_BASE_DIR netmiko_base_dir = os.path.expanduser(netmiko_base_dir) if netmiko_base_dir == "/": raise ValueError("/ cannot be netmiko_base_dir") netmiko_full_dir = "{}/tmp".format(netmiko_base_dir) return (netmiko_base_dir, netmiko_full_dir)
Check environment first, then default dir
def parse_trailer(header): pos = 0 names = [] while pos < len(header): name, pos = expect_re(re_token, header, pos) if name: names.append(name) _, pos = accept_ws(header, pos) _, pos = expect_lit(',', header, pos) _, pos = accept_ws(header, pos) return names
Parse the "Trailer" header.
def on_error(self, status_code): logger.error('Twitter returned error code %s', status_code) self.error = status_code return False
Called when a non-200 status code is returned
def FUNCTIONNOPROTO(self, _cursor_type): returns = _cursor_type.get_result() returns = self.parse_cursor_type(returns) attributes = [] obj = typedesc.FunctionType(returns, attributes) self.set_location(obj, None) return obj
Handles function with no prototype.
def draw_segments(image, segments, color=(255, 0, 0), line_width=1): for segment in segments: x, y, w, h = segment cv2.rectangle(image, (x, y), (x + w, y + h), color, line_width)
draws segments on image
def delete(cls, id): client = cls._new_api_client() return client.make_request(cls, 'delete', url_params={'id': id})
Destroy a Union object
def add(image_path, file_name=None): if file_name is not None: dst_path = os.path.join(IMG_DIR, str(Path(file_name).stem + Path(image_path).suffix)) else: dst_path = IMG_DIR if os.path.isfile(image_path): shutil.copy2(image_path, dst_path)
Add an image to the GUI img library.
def stage_http_response2(self, payload): if not self._http_response_version and not payload: return if self.enabled and self.http_detail_level is not None and \ self.httplogger.isEnabledFor(logging.DEBUG): if self._http_response_headers: header_str = \ ' '.join('{0}:{1!r}'.format(k, v) for k, v in self._http_response_headers.items()) else: header_str = '' if self.http_detail_level == 'summary': upayload = "" elif self.http_maxlen and (len(payload) > self.http_maxlen): upayload = (_ensure_unicode(payload[:self.http_maxlen]) + '...') else: upayload = _ensure_unicode(payload) self.httplogger.debug('Response:%s %s:%s %s %s\n %s', self._http_response_conn_id, self._http_response_status, self._http_response_reason, self._http_response_version, header_str, upayload)
Log complete http response, including response1 and payload
def _maximize(self): if not self.space.is_observations_valid(): return None y_max = self.space.y.max() self.utility_function.gaussian_process.fit(self.space.x, self.space.y) return self.utility_function.max_compute(y_max=y_max, bounds=self.space.bounds, n_warmup=self.n_warmup, n_iter=self.n_iter)
Find argmax of the acquisition function.
def _do_help(self, cmd, args): print(self.doc_string()) print() data_unsorted = [] cls = self.__class__ for name in dir(cls): obj = getattr(cls, name) if iscommand(obj): cmds = [] for cmd in getcommands(obj): cmds.append(cmd) cmd_str = ','.join(sorted(cmds)) doc_str = textwrap.dedent(obj.__doc__).strip() if obj.__doc__ else \ '(no doc string available)' data_unsorted.append([cmd_str, doc_str]) data_sorted = sorted(data_unsorted, key = lambda x: x[0]) data = [['COMMANDS', 'DOC STRING']] + data_sorted table_banner = 'List of Available Commands' table = terminaltables.SingleTable(data, table_banner) table.inner_row_border = True table.inner_heading_row_border = True print(table.table)
Display doc strings of the shell and its commands.
def add_subcommands(parser, commands): "Add commands to a parser" subps = parser.add_subparsers() for cmd, cls in commands: subp = subps.add_parser(cmd, help=cls.__doc__) add_args = getattr(cls, 'add_arguments', None) if add_args: add_args(subp) handler = getattr(cls, 'handle', None) if handler: subp.set_defaults(handler=handler)
Add commands to a parser
def _replace_token_range(tokens, start, end, replacement): tokens = tokens[:start] + replacement + tokens[end:] return tokens
For a range indicated from start to end, replace with replacement.
def merge_from(self, other): if other.national_number_pattern is not None: self.national_number_pattern = other.national_number_pattern if other.example_number is not None: self.example_number = other.example_number
Merge information from another PhoneNumberDesc object into this one.
def _count_pixels_on_line(self, y, p): h = line(y, self._effective_thickness(p), 0.0) return h.sum()
Count the number of pixels rendered on this line.
def _compare_match(dict1, dict2): for karg, warg in six.iteritems(dict1): if karg in dict2 and dict2[karg] != warg: return False return True
Compare two dictionaries and return a boolean value if their values match.
def map_overview_header_element(feature, parent): _ = feature, parent header = map_overview_header['string_format'] return header.capitalize()
Retrieve map overview header string from definitions.
async def observer_evaluate(self, message): observer_id = message['observer'] throttle_rate = get_queryobserver_settings()['throttle_rate'] if throttle_rate <= 0: await self._evaluate(observer_id) return cache_key = throttle_cache_key(observer_id) try: count = cache.incr(cache_key) if count == 2: await self.channel_layer.send( CHANNEL_MAIN, { 'type': TYPE_POLL, 'observer': observer_id, 'interval': throttle_rate, }, ) except ValueError: count = cache.get_or_set(cache_key, default=1, timeout=throttle_rate) if count == 1: await self._evaluate(observer_id)
Execute observer evaluation on the worker or throttle.
def visit_list(self, node, parent): context = self._get_context(node) newnode = nodes.List( ctx=context, lineno=node.lineno, col_offset=node.col_offset, parent=parent ) newnode.postinit([self.visit(child, newnode) for child in node.elts]) return newnode
visit a List node by returning a fresh instance of it
def atom_by_serialnumber(self): atm_by_snum = {} for atom in self.model.atoms: atm_by_snum[atom.serialNumber] = atom return atm_by_snum
Provides a dictionary mapping serial numbers to their atom objects.
def server_list(endpoint_id): endpoint, server_list = get_endpoint_w_server_list(endpoint_id) if server_list == "S3": server_list = {"s3_url": endpoint["s3_url"]} fields = [("S3 URL", "s3_url")] text_format = FORMAT_TEXT_RECORD else: fields = ( ("ID", "id"), ("URI", lambda s: (s["uri"] or "none (Globus Connect Personal)")), ) text_format = FORMAT_TEXT_TABLE formatted_print(server_list, text_format=text_format, fields=fields)
Executor for `globus endpoint server list`
def Get(self, attribute, default=None): if attribute is None: return default elif isinstance(attribute, str): attribute = Attribute.GetAttributeByName(attribute) if "r" not in self.mode and (attribute not in self.new_attributes and attribute not in self.synced_attributes): raise IOError("Fetching %s from object not opened for reading." % attribute) for result in self.GetValuesForAttribute(attribute, only_one=True): try: result.attribute_instance = attribute except AttributeError: pass return result return attribute.GetDefault(self, default)
Gets the attribute from this object.
def sample_frame_single_env(self, batch_size, forward_steps=1): if self.current_size < self.buffer_capacity: return np.random.choice(self.current_size - forward_steps, batch_size, replace=False) else: candidate = np.random.choice(self.buffer_capacity, batch_size, replace=False) forbidden_ones = ( np.arange(self.current_idx - forward_steps + 1, self.current_idx + self.frame_history) % self.buffer_capacity ) while any(x in candidate for x in forbidden_ones): candidate = np.random.choice(self.buffer_capacity, batch_size, replace=False) return candidate
Return an in index of a random set of frames from a buffer, that have enough history and future
def DosDateTimeToTimeTuple(dosDateTime): dos_date = dosDateTime >> 16 dos_time = dosDateTime & 0xffff day = dos_date & 0x1f month = (dos_date >> 5) & 0xf year = 1980 + (dos_date >> 9) second = 2 * (dos_time & 0x1f) minute = (dos_time >> 5) & 0x3f hour = dos_time >> 11 return time.localtime( time.mktime((year, month, day, hour, minute, second, 0, 1, -1)))
Convert an MS-DOS format date time to a Python time tuple.
def batch_contains_deleted(self): "Check if current batch contains already deleted images." if not self._duplicates: return False imgs = [self._all_images[:self._batch_size][0][1], self._all_images[:self._batch_size][1][1]] return any(img in self._deleted_fns for img in imgs)
Check if current batch contains already deleted images.
def _parent_foreign_key_mappings(cls): parent_rel = cls.__mapper__.relationships.get(cls.export_parent) if parent_rel: return {l.name: r.name for (l, r) in parent_rel.local_remote_pairs} return {}
Get a mapping of foreign name to the local name of foreign keys
def _cancel_orphan_orders(self, orderId): orders = self.ibConn.orders for order in orders: order = orders[order] if order['parentId'] != orderId: self.ibConn.cancelOrder(order['id'])
cancel child orders when parent is gone
def authenticate(self, transport, account_name, password=None): Authenticator.authenticate(self, transport, account_name, password) if password == None: return self.pre_auth(transport, account_name) else: return self.auth(transport, account_name, password)
Authenticates account using soap method.
def _get_products(self): products_request = self.account_products() if products_request['error']: raise Exception(products_request['error']) product_ids = [] for product in products_request["json"]["entries"]: product_ids.append(product['productId']) self.products = {} for id in product_ids: product_request = self.account_product(id) if product_request['error']: raise Exception(product_request['error']) self.products[id] = product_request['json'] return self.products
a method to retrieve account product details at initialization
def get(key, profile=None): if not profile: return False redis_kwargs = profile.copy() redis_kwargs.pop('driver') redis_conn = redis.StrictRedis(**redis_kwargs) return redis_conn.get(key)
Get a value from the Redis SDB.
def _convert_key_to_str(key): return salt.utils.data.encode(key) \ if six.PY2 and isinstance(key, unicode) \ else key
Stolen completely from boto.providers
def _no_op(name, **kwargs): return dict(name=name, result=True, changes={}, comment='')
No-op state to support state config via the stateconf renderer.
def bootstrap(self, config): pg_hba = config.get('pg_hba', []) method = config.get('method') or 'initdb' self._running_custom_bootstrap = method != 'initdb' and method in config and 'command' in config[method] if self._running_custom_bootstrap: do_initialize = self._custom_bootstrap config = config[method] else: do_initialize = self._initdb return do_initialize(config) and self.append_pg_hba(pg_hba) and self.save_configuration_files() \ and self._configure_server_parameters() and self.start()
Initialize a new node from scratch and start it.
def _construct_form(self, i, **kwargs): if not settings.HIDE_LANGUAGE: self._construct_available_languages() form = super(TranslationFormSet, self)._construct_form(i, **kwargs) if settings.HIDE_LANGUAGE: form.instance.language_code = settings.DEFAULT_LANGUAGE else: language_code = form.instance.language_code if language_code: logger.debug( u'Removing translation choice %s for instance %s' u' in form %d', language_code, form.instance, i ) self.available_languages.remove(language_code) else: initial_language_code = self._get_default_language() logger.debug( u'Preselecting language code %s for form %d', initial_language_code, i ) form.initial['language_code'] = initial_language_code return form
Construct the form, overriding the initial value for `language_code`.
def __isValidFilename(self, filename): if filename and isinstance(filename, string_types): if re.match(r'^[\w\d\_\-\.]+$', filename, re.I): if self.__isValidTGZ(filename) or self.__isValidZIP(filename): return True return False
Determine whether filename is valid
def _get_cache(self): if not self._cache: self._cache = get_cache(self.app) return self._cache
Return the cache to use for thundering herd protection, etc.
def remove_non_magic_cols(self): for table_name in self.tables: table = self.tables[table_name] table.remove_non_magic_cols_from_table()
Remove all non-MagIC columns from all tables.
def AddLabels(self, labels_names, owner=None): if owner is None and not self.token: raise ValueError("Can't set label: No owner specified and " "no access token available.") if isinstance(labels_names, string_types): raise ValueError("Label list can't be string.") owner = owner or self.token.username current_labels = self.Get(self.Schema.LABELS, self.Schema.LABELS()) for label_name in labels_names: label = rdf_aff4.AFF4ObjectLabel( name=label_name, owner=owner, timestamp=rdfvalue.RDFDatetime.Now()) current_labels.AddLabel(label) self.Set(current_labels)
Add labels to the AFF4Object.
def _default_capacity(self, value): if value is not None: return value if self.default_return_capacity or self.rate_limiters: return INDEXES return NONE
Get the value for ReturnConsumedCapacity from provided value
def run_to_states(self): self.execution_engine_lock.acquire() return_value = self._run_to_states self.execution_engine_lock.release() return return_value
Property for the _run_to_states field
def main(host='localhost', port=8086): now = datetime.datetime.today() points = [] for angle in range(0, 360): y = 10 + math.sin(math.radians(angle)) * 10 point = { "measurement": 'foobar', "time": int(now.strftime('%s')) + angle, "fields": { "value": y } } points.append(point) client = InfluxDBClient(host, port, USER, PASSWORD, DBNAME) print("Create database: " + DBNAME) client.create_database(DBNAME) client.switch_database(DBNAME) client.write_points(points) time.sleep(3) query = 'SELECT * FROM foobar' print("Querying data: " + query) result = client.query(query, database=DBNAME) print("Result: {0}".format(result)) print("Delete database: " + DBNAME) client.drop_database(DBNAME)
Define function to generate the sin wave.
def list_contributors(self, project_id=None, language_code=None): data = self._run( url_path="contributors/list", id=project_id, language=language_code ) return data['result'].get('contributors', [])
Returns the list of contributors
def parse_list(self): try: return List([self.parse() for _ in self.collect_tokens_until('CLOSE_BRACKET')]) except IncompatibleItemType as exc: raise self.error(f'Item {str(exc.item)!r} is not a ' f'{exc.subtype.__name__} tag') from None
Parse a list from the token stream.
def parse_metrics(self, f): headers = None for l in f['f'].splitlines(): s = l.strip().split("\t") if headers is None: headers = s else: s_name = s[ headers.index('Sample') ] data = dict() for idx, h in enumerate(headers): try: data[h] = float(s[idx]) except ValueError: data[h] = s[idx] self.rna_seqc_metrics[s_name] = data
Parse the metrics.tsv file from RNA-SeQC
def do_verify(marfile, keyfiles=None): try: with open(marfile, 'rb') as f: with MarReader(f) as m: errors = m.get_errors() if errors: print("File is not well formed: {}".format(errors)) sys.exit(1) if keyfiles: try: keys = get_keys(keyfiles, m.signature_type) except ValueError as e: print(e) sys.exit(1) if any(m.verify(key) for key in keys): print("Verification OK") return True else: print("Verification failed") sys.exit(1) else: print("Verification OK") return True except Exception as e: print("Error opening or parsing file: {}".format(e)) sys.exit(1)
Verify the MAR file.
def environment_session_entity_type_path(cls, project, environment, user, session, entity_type): return google.api_core.path_template.expand( 'projects/{project}/agent/environments/{environment}/users/{user}/sessions/{session}/entityTypes/{entity_type}', project=project, environment=environment, user=user, session=session, entity_type=entity_type, )
Return a fully-qualified environment_session_entity_type string.
def jflatten(j): nobs, nf, nargs = j.shape nrows, ncols = nf * nobs, nargs * nobs jflat = np.zeros((nrows, ncols)) for n in xrange(nobs): r, c = n * nf, n * nargs jflat[r:(r + nf), c:(c + nargs)] = j[n] return jflat
Flatten 3_D Jacobian into 2-D.
def float16(val): frac = val & 0x03ff exp = (val >> 10) & 0x1F sign = val >> 15 if exp: value = 2 ** (exp - 16) * (1 + float(frac) / 2**10) else: value = float(frac) / 2**9 if sign: value *= -1 return value
Convert a 16-bit floating point value to a standard Python float.
def getTimes(dataTasks): global begin_time start_time, end_time = float('inf'), 0 for fichier, vals in dataTask.items(): try: if hasattr(vals, 'values'): tmp_start_time = min([a['start_time'] for a in vals.values()])[0] if tmp_start_time < start_time: start_time = tmp_start_time tmp_end_time = max([a['end_time'] for a in vals.values()])[0] if tmp_end_time > end_time: end_time = tmp_end_time except ValueError: continue begin_time = 1000 * start_time return 1000 * start_time, 1000 * end_time
Get the start time and the end time of data in milliseconds
def zDDEClose(self): if _PyZDDE.server and not _PyZDDE.liveCh: _PyZDDE.server.Shutdown(self.conversation) _PyZDDE.server = 0 elif _PyZDDE.server and self.connection and _PyZDDE.liveCh == 1: _PyZDDE.server.Shutdown(self.conversation) self.connection = False self.appName = '' _PyZDDE.liveCh -= 1 _PyZDDE.server = 0 elif self.connection: _PyZDDE.server.Shutdown(self.conversation) self.connection = False self.appName = '' _PyZDDE.liveCh -= 1 return 0
Close the DDE link with Zemax server
def open(safe_file): if os.path.isdir(safe_file) or os.path.isfile(safe_file): return SentinelDataSet(safe_file) else: raise IOError("file not found: %s" % safe_file)
Return a SentinelDataSet object.
def getLocalDateAndTime(date, time, *args, **kwargs): localDt = getLocalDatetime(date, time, *args, **kwargs) if time is not None: return (localDt.date(), localDt.timetz()) else: return (localDt.date(), None)
Get the date and time in the local timezone from date and optionally time
def run(self): input = self._consume() put_item = self._que_out.put try: if input is None: res = self._callable(*self._args, **self._kwargs) else: res = self._callable(input, *self._args, **self._kwargs) if res != None: for item in res: put_item(item) except Exception as e: self._que_err.put((self.name, e)) if input is not None: for i in input: pass raise finally: for i in range(self._num_followers): put_item(EXIT) self._que_err.put(EXIT)
Execute the task on all the input and send the needed number of EXIT at the end
def swap(self, c2): inv = False c1 = self if c1.order > c2.order: ct = c1 c1 = c2 c2 = ct inv = True return inv, c1, c2
put the order of currencies as market standard
def update(self, instance): assert isinstance(instance, UnitOfWork) if instance.db_id: query = {'_id': ObjectId(instance.db_id)} else: query = {unit_of_work.PROCESS_NAME: instance.process_name, unit_of_work.TIMEPERIOD: instance.timeperiod, unit_of_work.START_ID: instance.start_id, unit_of_work.END_ID: instance.end_id} self.ds.update(COLLECTION_UNIT_OF_WORK, query, instance) return instance.db_id
method finds unit_of_work record and change its status
def parse_table_name(self, table): if "." in table: schema, table = table.split(".") else: schema = None return (schema, table)
Parse schema qualified table name
def fetch(cls, client, _id, symbol): url = "https://api.robinhood.com/options/chains/" params = { "equity_instrument_ids": _id, "state": "active", "tradability": "tradable" } data = client.get(url, params=params) def filter_func(x): return x["symbol"] == symbol results = list(filter(filter_func, data["results"])) return results[0]
fetch option chain for instrument
def result_consumed(self, task_id): logger.debug('Sending result consumed message.') data = { 'task_ids': task_id, } return self._perform_post_request(self.results_consumed_endpoint, data, self.token_header)
Report the result as successfully consumed.
def previous_friday(dt): if dt.weekday() == 5: return dt - timedelta(1) elif dt.weekday() == 6: return dt - timedelta(2) return dt
If holiday falls on Saturday or Sunday, use previous Friday instead.
def _density_seaborn_(self, label=None, style=None, opts=None): try: fig = sns.kdeplot(self.df[self.x], self.df[self.y]) fig = self._set_with_height(fig, opts) return fig except Exception as e: self.err(e, self.density_, "Can not draw density chart")
Returns a Seaborn density chart
def objc_type_encoding(self): if not hasattr(self, '_objc_type_encoding'): self._objc_type_encoding = \ conf.lib.clang_getDeclObjCTypeEncoding(self) return self._objc_type_encoding
Return the Objective-C type encoding as a str.
def readTableFromDelimited(f, separator="\t"): rowNames = [] columnNames = [] matrix = [] first = True for line in f.readlines(): line = line.rstrip() if len(line) == 0: continue row = line.split(separator) if first: columnNames = row[1:] first = False else: rowNames.append(row[0]) matrix.append([float(c) for c in row[1:]]) return Table(rowNames, columnNames, matrix)
Reads a table object from given plain delimited file.
def setup_runner(self): runner = ApplicationRunner( url=self.config['transport_host'], realm=u'realm1', extra={ 'config': self.config, 'handlers': self.handlers, } ) return runner
Setup instance of runner var
def send_to_address(recipient_address, amount, private_key, blockchain_client=BlockchainInfoClient(), fee=STANDARD_FEE, change_address=None): signed_tx = make_send_to_address_tx(recipient_address, amount, private_key, blockchain_client, fee=fee, change_address=change_address) response = broadcast_transaction(signed_tx, blockchain_client) return response
Builds, signs, and dispatches a "send to address" transaction.
def event_return(events): for event in events: ret = event.get('data', False) if ret: returner(ret)
Return event data via SMTP
def create_client(access_token): url = 'http://keycloak:8080/auth/admin/realms/dci-test/clients' r = requests.post(url, data=json.dumps(client_data), headers=get_auth_headers(access_token)) if r.status_code in (201, 409): print('Keycloak client dci created successfully.') else: raise Exception( 'Error while creating Keycloak client dci:\nstatus code %s\n' 'error: %s' % (r.status_code, r.content) )
Create the dci client in the master realm.
def add_subparser(subparsers): parser = subparsers.add_parser("version", help="Export versions of used software to stdout or a file ") parser.add_argument("--workdir", help="Directory export programs to in workdir/provenance/programs.txt", default=None)
Add command line option for exporting version information.
def save_load(jid, clear_load, minions=None): for returner_ in __opts__[CONFIG_KEY]: _mminion().returners['{0}.save_load'.format(returner_)](jid, clear_load)
Write load to all returners in multi_returner
def tilequeue_rawr_enqueue(cfg, args): from tilequeue.stats import RawrTileEnqueueStatsHandler from tilequeue.rawr import make_rawr_enqueuer_from_cfg msg_marshall_yaml = cfg.yml.get('message-marshall') assert msg_marshall_yaml, 'Missing message-marshall config' msg_marshaller = make_message_marshaller(msg_marshall_yaml) logger = make_logger(cfg, 'rawr_enqueue') stats = make_statsd_client_from_cfg(cfg) stats_handler = RawrTileEnqueueStatsHandler(stats) rawr_enqueuer = make_rawr_enqueuer_from_cfg( cfg, logger, stats_handler, msg_marshaller) with open(args.expiry_path) as fh: coords = create_coords_generator_from_tiles_file(fh) rawr_enqueuer(coords)
command to take tile expiry path and enqueue for rawr tile generation
def add(self, piece_uid, index): if self.occupancy[index]: raise OccupiedPosition if self.exposed_territory[index]: raise VulnerablePosition klass = PIECE_CLASSES[piece_uid] piece = klass(self, index) territory = piece.territory for i in self.indexes: if self.occupancy[i] and territory[i]: raise AttackablePiece self.pieces.add(piece) self.occupancy[index] = True self.exposed_territory = list( map(or_, self.exposed_territory, territory))
Add a piece to the board at the provided linear position.
def shell(no_ipython): banner = "Interactive Werkzeug Shell" namespace = make_shell() if not no_ipython: try: try: from IPython.frontend.terminal.embed import InteractiveShellEmbed sh = InteractiveShellEmbed.instance(banner1=banner) except ImportError: from IPython.Shell import IPShellEmbed sh = IPShellEmbed(banner=banner) except ImportError: pass else: sh(local_ns=namespace) return from code import interact interact(banner, local=namespace)
Start a new interactive python session.
def artifact_cache_dir(self): return (self.get_options().artifact_cache_dir or os.path.join(self.scratch_dir, 'artifacts'))
Note that this is unrelated to the general pants artifact cache.
def extended_blank_lines(logical_line, blank_lines, blank_before, indent_level, previous_logical): if previous_logical.startswith('def '): if blank_lines and pycodestyle.DOCSTRING_REGEX.match(logical_line): yield (0, 'E303 too many blank lines ({0})'.format(blank_lines)) elif pycodestyle.DOCSTRING_REGEX.match(previous_logical): if ( indent_level and not blank_lines and not blank_before and logical_line.startswith(('def ')) and '(self' in logical_line ): yield (0, 'E301 expected 1 blank line, found 0')
Check for missing blank lines after class declaration.
def load(self, **kwargs): coordsys = kwargs.get('coordsys', 'CEL') extdir = kwargs.get('extdir', self.extdir) srcname = kwargs.get('srcname', None) self.clear() self.load_diffuse_srcs() for c in self.config['catalogs']: if isinstance(c, catalog.Catalog): self.load_existing_catalog(c) continue extname = os.path.splitext(c)[1] if extname != '.xml': self.load_fits_catalog(c, extdir=extdir, coordsys=coordsys, srcname=srcname) elif extname == '.xml': self.load_xml(c, extdir=extdir, coordsys=coordsys) else: raise Exception('Unrecognized catalog file extension: %s' % c) for c in self.config['sources']: if 'name' not in c: raise Exception( 'No name field in source dictionary:\n ' + str(c)) self.create_source(c['name'], c, build_index=False) self._build_src_index()
Load both point source and diffuse components.
def widen(self): t, h = self.time, self.half_duration h *= self.scaling_coeff_x self.set_interval((t - h, t + h))
Increase the interval size.
def start(ctx, **kwargs): update_context(ctx, kwargs) daemon = mk_daemon(ctx) if ctx.debug or kwargs['no_fork']: daemon.run() else: daemon.start()
start a vaping process
def update_params(params, updates): params = params.copy() if isinstance(params, dict) else dict() params.update(updates) return params
Merges updates into params
def to_prj(self, filename): with open(filename, "w") as fp: fp.write(self.prj)
Saves prj WKT to given file.
def aggregate_key(self, aggregate_key): aggregation = self.data_dict[aggregate_key] data_dict_keys = {y for x in aggregation for y in x.keys()} for key in data_dict_keys: stacked = np.stack([d[key] for d in aggregation], axis=0) self.data_dict[key] = np.mean(stacked, axis=0)
Aggregate values from key and put them into the top-level dictionary
def create(cls, api, run_id=None, project=None, username=None): run_id = run_id or util.generate_id() project = project or api.settings.get("project") mutation = gql( ) variables = {'entity': username, 'project': project, 'name': run_id} res = api.client.execute(mutation, variable_values=variables) res = res['upsertBucket']['bucket'] return Run(api.client, res["project"]["entity"]["name"], res["project"]["name"], res["name"], { "id": res["id"], "config": "{}", "systemMetrics": "{}", "summaryMetrics": "{}", "tags": [], "description": None, "state": "running" })
Create a run for the given project
def apply(funcs, stack): return reduce(lambda x, y: y(x), funcs, stack)
Apply functions to the stack, passing the resulting stack to next state.
def filter_by_domain(self, domain): query = self._copy() query.domain = domain return query
Apply the given domain to a copy of this query
def flag_values_dict(self): return {name: flag.value for name, flag in six.iteritems(self._flags())}
Returns a dictionary that maps flag names to flag values.
def _execution(self): did_start_executing = False if self.state == STATE_DEFAULT: did_start_executing = True self.state = STATE_EXECUTING def close(): if did_start_executing and self.state == STATE_EXECUTING: self.state = STATE_DEFAULT yield close close()
Context manager for executing some JavaScript inside a template.
def _spec(self, name): "Return the named spec." for s in self._framespec: if s.name == name: return s raise ValueError("Unknown spec: " + name)
Return the named spec.
def create_secret(*args, **kwargs): to_sign = '-!'.join(args) + '$$'.join(kwargs.values()) key = settings.SECRET_FOR_SIGNS hashed = hmac.new(key, to_sign, sha1) return re.sub(r'[\W_]+', '', binascii.b2a_base64(hashed.digest()))
Return a secure key generated from the user and the object. As we load elements fron any class from user imput, this prevent the user to specify arbitrary class
def unfreeze_extensions(self): output_path = os.path.join(_registry_folder(), 'frozen_extensions.json') if not os.path.isfile(output_path): raise ExternalError("There is no frozen extension list") os.remove(output_path) ComponentRegistry._frozen_extensions = None
Remove a previously frozen list of extensions.
def scale_cb(self, setting, value): zoomlevel = self.zoom.calc_level(value) self.t_.set(zoomlevel=zoomlevel) self.redraw(whence=0)
Handle callback related to image scaling.
def _keyring_equivalent(keyring_one, keyring_two): def keyring_extract_key(file_path): with open(file_path) as f: for line in f: content = line.strip() if len(content) == 0: continue split_line = content.split('=') if split_line[0].strip() == 'key': return "=".join(split_line[1:]).strip() raise RuntimeError("File '%s' is not a keyring" % file_path) key_one = keyring_extract_key(keyring_one) key_two = keyring_extract_key(keyring_two) return key_one == key_two
Check two keyrings are identical
def objectsFromPEM(pemdata): certificates = [] keys = [] blobs = [b""] for line in pemdata.split(b"\n"): if line.startswith(b'-----BEGIN'): if b'CERTIFICATE' in line: blobs = certificates else: blobs = keys blobs.append(b'') blobs[-1] += line blobs[-1] += b'\n' keys = [KeyPair.load(key, FILETYPE_PEM) for key in keys] certificates = [Certificate.loadPEM(certificate) for certificate in certificates] return PEMObjects(keys=keys, certificates=certificates)
Load some objects from a PEM.
def dweet_for(thing_name, payload, key=None, session=None): if key is not None: params = {'key': key} else: params = None return _send_dweet(payload, '/dweet/for/{0}'.format(thing_name), params=params, session=session)
Send a dweet to dweet.io for a thing with a known name
def partition(molList, options): status_field = options.status_field active_label = options.active_label decoy_label = options.decoy_label activeList = [] decoyList = [] for mol in molList: if mol.GetProp(status_field) == active_label: activeList.append(mol) elif mol.GetProp(status_field) == decoy_label: decoyList.append(mol) return activeList, decoyList
Partition molList into activeList and decoyList
def cursor(self): if self._cursor < 0: self.cursor = 0 if self._cursor > len(self): self.cursor = len(self) return self._cursor
The position of the cursor in the text.
def load_json_file_contents(path: str) -> str: assert isinstance(path, str) content = None file_path = os.path.abspath(path) content = fileutils.read_text_from_file(file_path) json_object = json.loads(content) content = json.dumps(json_object, sort_keys=True, indent=4) return content
Loads contents from a json file
def _no_primary(max_staleness, selection): smax = selection.secondary_with_max_last_write_date() if not smax: return selection.with_server_descriptions([]) sds = [] for s in selection.server_descriptions: if s.server_type == SERVER_TYPE.RSSecondary: staleness = (smax.last_write_date - s.last_write_date + selection.heartbeat_frequency) if staleness <= max_staleness: sds.append(s) else: sds.append(s) return selection.with_server_descriptions(sds)
Apply max_staleness, in seconds, to a Selection with no known primary.
def experiment_list(args): experiment_config = Experiments() experiment_dict = experiment_config.get_all_experiments() if not experiment_dict: print('There is no experiment running...') exit(1) update_experiment() experiment_id_list = [] if args.all and args.all == 'all': for key in experiment_dict.keys(): experiment_id_list.append(key) else: for key in experiment_dict.keys(): if experiment_dict[key]['status'] != 'STOPPED': experiment_id_list.append(key) if not experiment_id_list: print_warning('There is no experiment running...\nYou can use \'nnictl experiment list all\' to list all stopped experiments!') experiment_information = "" for key in experiment_id_list: experiment_information += (EXPERIMENT_DETAIL_FORMAT % (key, experiment_dict[key]['status'], experiment_dict[key]['port'],\ experiment_dict[key].get('platform'), experiment_dict[key]['startTime'], experiment_dict[key]['endTime'])) print(EXPERIMENT_INFORMATION_FORMAT % experiment_information)
get the information of all experiments
def _is_last_child(self, tagname, attributes=None): children = self.cur_node.getchildren() if children: result = self._is_node(tagname, attributes, node=children[-1]) return result return False
Check if last child of cur_node is tagname with attributes