code
stringlengths
51
2.34k
docstring
stringlengths
11
171
def htmIndex(ra,dec,htm_level=3): import re if os.uname()[0] == "Linux": javabin = '/opt/java2/bin/java ' htm_level = htm_level verc_htm_cmd = javabin+'-classpath /usr/cadc/misc/htm/htmIndex.jar edu.jhu.htm.app.lookup %s %s %s' % (htm_level, ra, dec) for result in os.popen( verc_htm_cmd ).readlines(): result = result[:-1] if re.search("ID/Name cc", result): (void, coord ) = result.split("=") (void, junk, htm_index) = coord.split(" ") return htm_index
Compute htm index of htm_level at position ra,dec
def all_firmwares(self): all = [] for manifest in self.manifests: for firmware in manifest["firmware"]: all.append(firmware) return all
return firmware entries from all manifests
def load_img(self, img_path): with open_file(self.uuid, img_path) as f: return mpimg.imread(f)
Return an image object that can be immediately plotted with matplotlib
def untar(fname, verbose=True): if fname.lower().endswith(".tar.gz"): dirpath = os.path.join(BIGDATA_PATH, os.path.basename(fname)[:-7]) if os.path.isdir(dirpath): return dirpath with tarfile.open(fname) as tf: members = tf.getmembers() for member in tqdm(members, total=len(members)): tf.extract(member, path=BIGDATA_PATH) dirpath = os.path.join(BIGDATA_PATH, members[0].name) if os.path.isdir(dirpath): return dirpath else: logger.warning("Not a tar.gz file: {}".format(fname))
Uunzip and untar a tar.gz file into a subdir of the BIGDATA_PATH directory
def _get_esxdatacenter_proxy_details(): det = __salt__['esxdatacenter.get_details']() return det.get('vcenter'), det.get('username'), det.get('password'), \ det.get('protocol'), det.get('port'), det.get('mechanism'), \ det.get('principal'), det.get('domain'), det.get('datacenter')
Returns the running esxdatacenter's proxy details
def use_any_status_sequence_rule_view(self): self._operable_views['sequence_rule'] = ANY_STATUS for session in self._get_provider_sessions(): try: session.use_any_status_sequence_rule_view() except AttributeError: pass
Pass through to provider SequenceRuleLookupSession.use_any_status_sequence_rule_view
def scale(self, scalar): return self.__class__([self.coefficients[i] * scalar for i in _range(len(self))])
Multiply a polynomial with a scalar
def main(): all_summary = {} for license in RESOURCES: req = requests.get(RESOURCES[license]) if req.status_code == requests.codes.ok: summary = get_summary(req.text) can, cannot, must = get_rules(license) all_summary[license] = { "summary": summary, "source": RESOURCES[license], "can": can, "cannot": cannot, "must": must } with open('summary.json', 'w+') as f: f.write(json.dumps(all_summary, indent=4))
Gets all the license information and stores it in json format
def form_query(query_type, query): fields = [ field + "^" + str(SEARCH_BOOSTS[field]) if field in SEARCH_BOOSTS else field for field in SEARCH_FIELDS ] return Q("multi_match", fields=fields, query=query, type=query_type)
Returns a multi match query
def add_rows(self, list_of_rows): for row in list_of_rows: self.row_deque.append(row) self.time_deque.append(time.time()) self.update()
Add a list of rows to the DataFrameCache class
def save(self, *args, **kwargs): self.body_formatted = sanetize_text(self.body) super(Contact, self).save()
Create formatted version of body text.
def km3h5concat(input_files, output_file, n_events=None, **kwargs): from km3pipe import Pipeline from km3pipe.io import HDF5Pump, HDF5Sink pipe = Pipeline() pipe.attach(HDF5Pump, filenames=input_files, **kwargs) pipe.attach(StatusBar, every=250) pipe.attach(HDF5Sink, filename=output_file, **kwargs) pipe.drain(n_events)
Concatenate KM3HDF5 files via pipeline.
def _drop_id_prefixes(self, item): if isinstance(item, list): return [self._drop_id_prefixes(i) for i in item] if isinstance(item, dict): return { 'id' if k.endswith('id') else k: self._drop_id_prefixes(v) for k, v in item.items() } return item
Rename keys ending in 'id', to just be 'id' for nested dicts.
def create(fs, channels, application): result_code = ctypes.c_int() result = _create(fs, channels, application, ctypes.byref(result_code)) if result_code.value is not constants.OK: raise OpusError(result_code.value) return result
Allocates and initializes an encoder state.
def check_indexes(self): for collection_name in INDEXES: existing_indexes = self.indexes(collection_name) indexes = INDEXES[collection_name] for index in indexes: index_name = index.document.get('name') if not index_name in existing_indexes: logger.warning("Index {0} missing. Run command `loqusdb index`".format(index_name)) return logger.info("All indexes exists")
Check if the indexes exists
def get(args): from . import config for key in args.key.split("."): config = getattr(config, key) print(json.dumps(config))
Get an Aegea configuration parameter by name
def first(self): if self._results_cache: return self._results_cache[0] query = PaginatedResponse(func=self._func, lwrap_type=self._lwrap_type, **self._kwargs) try: return next(query) except StopIteration: return None
Returns the first item from the query, or None if there are no results
def poly(self, return_coeffs=False): p = self.bpoints() coeffs = (p[0] - 2*p[1] + p[2], 2*(p[1] - p[0]), p[0]) if return_coeffs: return coeffs else: return np.poly1d(coeffs)
returns the quadratic as a Polynomial object.
def violations(self): return self._all_violations if self.config.fail_on == FAIL_ON_ANY else self._diff_violations
Returns either the diff violations or all violations depending on configuration.
def showEvent(self, event): super(CallTipWidget, self).showEvent(event) self._text_edit.cursorPositionChanged.connect( self._cursor_position_changed) self._text_edit.installEventFilter(self)
Reimplemented to connect signal handlers and event filter.
def _get_mps_od_net(input_image_shape, batch_size, output_size, anchors, config, weights={}): network = _MpsGraphAPI(network_id=_MpsGraphNetworkType.kODGraphNet) c_in, h_in, w_in = input_image_shape c_out = output_size h_out = h_in // 32 w_out = w_in // 32 c_view = c_in h_view = h_in w_view = w_in network.init(batch_size, c_in, h_in, w_in, c_out, h_out, w_out, weights=weights, config=config) return network
Initializes an MpsGraphAPI for object detection.
def sideral(date, longitude=0., model='mean', eop_correction=True, terms=106): theta = _sideral(date, longitude, model, eop_correction, terms) return rot3(np.deg2rad(-theta))
Sideral time as a rotation matrix
def parse_to_gvid(v): from geoid.civick import GVid from geoid.acs import AcsGeoid m1 = '' try: return GVid.parse(v) except ValueError as e: m1 = str(e) try: return AcsGeoid.parse(v).convert(GVid) except ValueError as e: raise ValueError("Failed to parse to either ACS or GVid: {}; {}".format(m1, str(e)))
Parse an ACS Geoid or a GVID to a GVID
def underline(self, text, indent=4): length = len(text) indentation = (' ' * indent) return indentation + text + '\n' + indentation + ('-' * length)
Underline a given text
def _config_options(self): self._config_sortable(self._sortable) self._config_drag_cols(self._drag_cols)
Apply options set in attributes to Treeview
def run_oldstyle(self): arg = [self._load_files(), self.opts['dest']] local = salt.client.get_local_client(self.opts['conf_file']) args = [self.opts['tgt'], 'cp.recv', arg, self.opts['timeout'], ] selected_target_option = self.opts.get('selected_target_option', None) if selected_target_option is not None: args.append(selected_target_option) return local.cmd(*args)
Make the salt client call in old-style all-in-one call method
def load_adjusted_array(self, domain, columns, dates, sids, mask): if len(columns) != 1: raise ValueError( "Can't load multiple columns with DataFrameLoader" ) column = columns[0] self._validate_input_column(column) date_indexer = self.dates.get_indexer(dates) assets_indexer = self.assets.get_indexer(sids) good_dates = (date_indexer != -1) good_assets = (assets_indexer != -1) data = self.baseline[ix_(date_indexer, assets_indexer)] mask = (good_assets & as_column(good_dates)) & mask data[~mask] = column.missing_value return { column: AdjustedArray( data=data, adjustments=self.format_adjustments(dates, sids), missing_value=column.missing_value, ), }
Load data from our stored baseline.
def nextColRegex(sheet, colregex): 'Go to first visible column after the cursor matching `colregex`.' pivot = sheet.cursorVisibleColIndex for i in itertools.chain(range(pivot+1, len(sheet.visibleCols)), range(0, pivot+1)): c = sheet.visibleCols[i] if re.search(colregex, c.name, regex_flags()): return i fail('no column name matches /%s/' % colregex)
Go to first visible column after the cursor matching `colregex`.
def save(self): logger.debug("Save the GUI state to `%s`.", self.path) _save_json(self.path, {k: v for k, v in self.items() if k not in ('config_dir', 'name')})
Save the state to the JSON file in the config dir.
def list_files(tag=None, sat_id=None, data_path=None, format_str=None): index = pds.date_range(pysat.datetime(2017,12,1), pysat.datetime(2018,12,1)) names = [ data_path+date.strftime('%Y-%m-%d')+'.nofile' for date in index] return pysat.Series(names, index=index)
Produce a fake list of files spanning a year
def create(self, request, *args, **kwargs): serializer = self.get_serializer(data=compat_get_request_data(request)) compat_serializer_check_is_valid(serializer) self.perform_create(request, serializer) headers = self.get_success_headers(serializer.data) return Response(serializer.data, status=status.HTTP_201_CREATED, headers=headers)
We ensure the Thread only involves eligible participants.
def run_multicore(fn, items, config, parallel=None): if len(items) == 0: return [] if parallel is None or "num_jobs" not in parallel: if parallel is None: parallel = {"type": "local", "cores": config["algorithm"].get("num_cores", 1)} sysinfo = system.get_info({}, parallel) parallel = resources.calculate(parallel, items, sysinfo, config, parallel.get("multiplier", 1), max_multicore=int(parallel.get("max_multicore", sysinfo["cores"]))) items = [config_utils.add_cores_to_config(x, parallel["cores_per_job"]) for x in items] if joblib is None: raise ImportError("Need joblib for multiprocessing parallelization") out = [] for data in joblib.Parallel(parallel["num_jobs"], batch_size=1, backend="multiprocessing")(joblib.delayed(fn)(*x) for x in items): if data: out.extend(data) return out
Run the function using multiple cores on the given items to process.
def run(align_bams, items, ref_file, assoc_files, region, out_file): if not utils.file_exists(out_file): paired = vcfutils.get_paired_bams(align_bams, items) vrs = bedutils.population_variant_regions(items) target = shared.subset_variant_regions(vrs, region, out_file, items=items, do_merge=True) if paired: return _run_somatic(paired, ref_file, target, out_file) else: return _run_germline(align_bams, items, ref_file, target, out_file) return out_file
Run octopus variant calling, handling both somatic and germline calling.
def iter_coords(obj): if isinstance(obj, (tuple, list)): coords = obj elif 'features' in obj: coords = [geom['geometry']['coordinates'] for geom in obj['features']] elif 'geometry' in obj: coords = obj['geometry']['coordinates'] else: coords = obj.get('coordinates', obj) for coord in coords: if isinstance(coord, (float, int)): yield tuple(coords) break else: for f in iter_coords(coord): yield f
Returns all the coordinate tuples from a geometry or feature.
def reset(self): "If your convolutional window is greater than 1 and you save previous xs, you must reset at the beginning of each new sequence." for layer in self.layers: layer.reset() if self.bidirectional: for layer in self.layers_bwd: layer.reset()
If your convolutional window is greater than 1 and you save previous xs, you must reset at the beginning of each new sequence.
def build_message(self, checker): solution = ' (%s)' % checker.solution if self.with_solutions else '' return '{} {}{}'.format(checker.code, checker.msg, solution)
Builds the checker's error message to report
def type_id(self): try: return ContentType.objects.get_for_model(self.model, for_concrete_model=False).id except DatabaseError as e: raise DatabaseError("Unable to fetch ContentType object, is a plugin being registered before the initial syncdb? (original error: {0})".format(str(e)))
Shortcut to retrieving the ContentType id of the model.
def _serialize_info(self, record): result = [] for key, value in record.INFO.items(): info = self.header.get_info_field_info(key) if info.type == "Flag": result.append(key) else: result.append("{}={}".format(key, format_value(info, value, "INFO"))) return ";".join(result)
Return serialized version of record.INFO
def _from_docstring_rst(doc): def format_fn(line, status): if re_from_data.match(line): line = re_from_data.sub(r"**\1** ", line) status["add_line"] = True line = re_from_defaults.sub(r"*\1*", line) if status["listing"]: if re_from_param.match(line): m = re_from_param.match(line) line = " - ``{}`` {}".format(m.group(1), m.group(3)) elif re_from_status.match(line): m = re_from_status.match(line) line = " - ``{}`` {}".format(m.group(1), m.group(3)) elif re_from_item.match(line): line = re_from_item.sub(r" -", line) else: line = " " * 4 + line.lstrip() line = re_lone_backtick.sub("``", line) return line return _reformat_docstring(doc, format_fn, code_newline="\n")
format from docstring to ReStructured Text
def util_mic_len(pkt): if (pkt.nwk_seclevel == 0): return 0 elif (pkt.nwk_seclevel == 1): return 4 elif (pkt.nwk_seclevel == 2): return 8 elif (pkt.nwk_seclevel == 3): return 16 elif (pkt.nwk_seclevel == 4): return 0 elif (pkt.nwk_seclevel == 5): return 4 elif (pkt.nwk_seclevel == 6): return 8 elif (pkt.nwk_seclevel == 7): return 16 else: return 0
Calculate the length of the attribute value field
def _passes_cortex_depth(line, min_depth): parts = line.split("\t") cov_index = parts[8].split(":").index("COV") passes_depth = False for gt in parts[9:]: cur_cov = gt.split(":")[cov_index] cur_depth = sum(int(x) for x in cur_cov.split(",")) if cur_depth >= min_depth: passes_depth = True return passes_depth
Do any genotypes in the cortex_var VCF line passes the minimum depth requirement?
def sorted_key_list(self): if not self.is_baked: self.bake() key_value_tuple = sorted(self.dct.items(), key=lambda x: x[1]['__abs_time__']) skl = [k[0] for k in key_value_tuple] return skl
Returns list of keys sorted according to their absolute time.
def _merge_ovls(self, ovls): ret = reduce(lambda x, y: x.merge(y), ovls) ret.value = self.value(ovls=ovls) ret.set_props(self.props) return ret
Merge ovls and also setup the value and props.
def required_arguments(func): defaults = default_values_of(func) args = arguments_of(func) if defaults: args = args[:-len(defaults)] return args
Return all arguments of a function that do not have a default value.
def getScriptLocation(): location = os.path.abspath("./") if __file__.rfind("/") != -1: location = __file__[:__file__.rfind("/")] return location
Helper function to get the location of a Python file.
def from_name(cls, name): disks = cls.list({'name': name}) if len(disks) == 1: return disks[0]['id'] elif not disks: return raise DuplicateResults('disk name %s is ambiguous.' % name)
Retrieve a disk id associated to a name.
def checkConsistency(self): for referenceName, (dataUrl, indexFile) in self._chromFileMap.items(): varFile = pysam.VariantFile(dataUrl, index_filename=indexFile) try: for chrom in varFile.index: chrom, _, _ = self.sanitizeVariantFileFetch(chrom) if not isEmptyIter(varFile.fetch(chrom)): self._checkMetadata(varFile) self._checkCallSetIds(varFile) finally: varFile.close()
Perform consistency check on the variant set
def _update_ssl_config(opts): if opts['ssl'] in (None, False): opts['ssl'] = None return if opts['ssl'] is True: opts['ssl'] = {} return import ssl for key, prefix in (('cert_reqs', 'CERT_'), ('ssl_version', 'PROTOCOL_')): val = opts['ssl'].get(key) if val is None: continue if not isinstance(val, six.string_types) or not val.startswith(prefix) or not hasattr(ssl, val): message = 'SSL option \'{0}\' must be set to one of the following values: \'{1}\'.' \ .format(key, '\', \''.join([val for val in dir(ssl) if val.startswith(prefix)])) log.error(message) raise salt.exceptions.SaltConfigurationError(message) opts['ssl'][key] = getattr(ssl, val)
Resolves string names to integer constant in ssl configuration.
def _getFirstPathExpression(name): tokens = grammar.parseString(name) pathExpression = None while pathExpression is None: if tokens.pathExpression: pathExpression = tokens.pathExpression elif tokens.expression: tokens = tokens.expression elif tokens.call: tokens = tokens.call.args[0] else: break return pathExpression
Returns the first metric path in an expression.
def router(self): for server in self._routers: info = Servers().info(server) if info['procInfo'].get('alive', False): return {'id': server, 'hostname': Servers().hostname(server)}
return first available router
def _check_type(self): check_type = metric_descriptor.MetricDescriptorType.to_type_class( self.descriptor.type) for ts in self.time_series: if not ts.check_points_type(check_type): raise ValueError("Invalid point value type")
Check that point value types match the descriptor type.
def _func(self, volume, params): e0, b0, b1, v0 = tuple(params) eta = (v0 / volume) ** (1. / 3.) return (e0 + 9. * b0 * v0 / 16. * (eta ** 2 - 1)**2 * (6 + b1 * (eta ** 2 - 1.) - 4. * eta ** 2))
BirchMurnaghan equation from PRB 70, 224107
def values(self): self.expired() values = [] for key in self._dict.keys(): try: value = self._dict[key].get() values.append(value) except: continue return values
Will only return the current values
def mgz_to_nifti(filename,prefix=None,gzip=True): setup_freesurfer() if prefix==None: prefix = nl.prefix(filename) + '.nii' if gzip and not prefix.endswith('.gz'): prefix += '.gz' nl.run([os.path.join(freesurfer_home,'bin','mri_convert'),filename,prefix],products=prefix)
Convert ``filename`` to a NIFTI file using ``mri_convert``
def _HandleFlowProcessingRequestLoop(self, handler): while not self.flow_handler_stop: with self.lock: todo = self._GetFlowRequestsReadyForProcessing() for request in todo: self.flow_handler_num_being_processed += 1 del self.flow_processing_requests[(request.client_id, request.flow_id)] for request in todo: handler(request) with self.lock: self.flow_handler_num_being_processed -= 1 time.sleep(0.2)
Handler thread for the FlowProcessingRequest queue.
def _sim_atoi_inner(self, str_addr, region, base=10, read_length=None): from .. import SIM_PROCEDURES strtol = SIM_PROCEDURES['libc']['strtol'] return strtol.strtol_inner(str_addr, self.state, region, base, True, read_length=read_length)
Return the result of invoking the atoi simprocedure on `str_addr`.
def get(cls, backend_id): for backend_class in cls._get_backends_classes(): if backend_class.id == backend_id: return backend_class.create() raise InvalidBackendError( cls.backend_type, backend_id, get_installed_pools()[cls.backend_type] )
Return an instance of backend type
def numberofsamples(self): idline = 0 linenumber = 0 with open(self.samplesheet, "rb") as ssheet: for linenumber, entry in enumerate(ssheet): if "Sample_ID" in entry: idline = linenumber self.samplecount = linenumber - idline printtime('There are {} samples in this run. ' 'Running off-hours module with the following parameters:\n' 'MiSeqPath: {},\n' 'MiSeqFolder: {},\n' 'SampleSheet: {}'.format(self.samplecount, self.miseqpath, self.miseqfolder, self.samplesheet), self.start) self.fastqlinker()
Count the number of samples is the samplesheet
def lin_sim_calc(goid1, goid2, sim_r, termcnts): if sim_r is not None: info = get_info_content(goid1, termcnts) + get_info_content(goid2, termcnts) if info != 0: return (2*sim_r)/info
Computes Lin's similarity measure using pre-calculated Resnik's similarities.
def _get_jmx_data(self, instance, jmx_address, tags): response = self._rest_request_to_json( instance, jmx_address, self.JMX_PATH, {'qry': self.HDFS_DATANODE_BEAN_NAME}, tags=tags ) beans = response.get('beans', []) return beans
Get namenode beans data from JMX endpoint
def _update_class(self, oldclass, newclass): olddict = oldclass.__dict__ newdict = newclass.__dict__ oldnames = set(olddict) newnames = set(newdict) for name in newnames - oldnames: setattr(oldclass, name, newdict[name]) notify_info0('Added:', name, 'to', oldclass) self.found_change = True for name in (oldnames & newnames) - set(['__dict__', '__doc__']): self._update(oldclass, name, olddict[name], newdict[name], is_class_namespace=True) old_bases = getattr(oldclass, '__bases__', None) new_bases = getattr(newclass, '__bases__', None) if str(old_bases) != str(new_bases): notify_error('Changing the hierarchy of a class is not supported. %s may be inconsistent.' % (oldclass,)) self._handle_namespace(oldclass, is_class_namespace=True)
Update a class object.
async def get_storage_list(self) -> List[Storage]: return [ Storage.make(**x) for x in await self.services["system"]["getStorageList"]({}) ]
Return information about connected storage devices.
def __read_chunk(self, start, size): for _retries in range(3): command = 1504 command_string = pack('<ii', start, size) if self.tcp: response_size = size + 32 else: response_size = 1024 + 8 cmd_response = self.__send_command(command, command_string, response_size) data = self.__recieve_chunk() if data is not None: return data else: raise ZKErrorResponse("can't read chunk %i:[%i]" % (start, size))
read a chunk from buffer
def c2f(r, i, ctype_name): ftype = c2f_dict[ctype_name] return np.typeDict[ctype_name](ftype(r) + 1j * ftype(i))
Convert strings to complex number instance with specified numpy type.
def write_scatterfunction(self, job, scattername): scatter_outputs = self.fetch_scatter_outputs(job) fn_section = self.write_scatterfunction_header(scattername) fn_section += self.write_scatterfunction_lists(scatter_outputs) fn_section += self.write_scatterfunction_loop(job, scatter_outputs) fn_section += self.write_scatterfunction_outputreturn(scatter_outputs) return fn_section
Writes out a python function for each WDL "scatter" object.
def path(self): if self._root_dir is None: override_buildroot = os.environ.get('PANTS_BUILDROOT_OVERRIDE', None) if override_buildroot: self._root_dir = override_buildroot else: self._root_dir = os.path.realpath(self.find_buildroot()) if PY2: self._root_dir = self._root_dir.decode('utf-8') return self._root_dir
Returns the build root for the current workspace.
def from_files(cls, *glyphdata_files): name_mapping = {} alt_name_mapping = {} production_name_mapping = {} for glyphdata_file in glyphdata_files: glyph_data = xml.etree.ElementTree.parse(glyphdata_file).getroot() for glyph in glyph_data: glyph_name = glyph.attrib["name"] glyph_name_alternatives = glyph.attrib.get("altNames") glyph_name_production = glyph.attrib.get("production") name_mapping[glyph_name] = glyph.attrib if glyph_name_alternatives: alternatives = glyph_name_alternatives.replace(" ", "").split(",") for glyph_name_alternative in alternatives: alt_name_mapping[glyph_name_alternative] = glyph.attrib if glyph_name_production: production_name_mapping[glyph_name_production] = glyph.attrib return cls(name_mapping, alt_name_mapping, production_name_mapping)
Return GlyphData holding data from a list of XML file paths.
def _assert_take_fillable(self, values, indices, allow_fill=True, fill_value=None, na_value=None): if allow_fill and fill_value is not None: if (indices < -1).any(): msg = ('When allow_fill=True and fill_value is not None, ' 'all indices must be >= -1') raise ValueError(msg) taken = [lab.take(indices) for lab in self.codes] mask = indices == -1 if mask.any(): masked = [] for new_label in taken: label_values = new_label.values() label_values[mask] = na_value masked.append(np.asarray(label_values)) taken = masked else: taken = [lab.take(indices) for lab in self.codes] return taken
Internal method to handle NA filling of take
def create(self, request): variant_id = request.data.get("variant_id", None) if variant_id is not None: variant = ProductVariant.objects.get(id=variant_id) quantity = int(request.data.get("quantity", 1)) items, bid = utils.get_basket_items(request) in_basket = False for item in items: if item.variant.id == variant.id: item.increase_quantity(quantity) in_basket = True break if not in_basket: item = BasketItem(variant=variant, quantity=quantity, basket_id=bid) item.save() serializer = BasketItemSerializer(self.get_queryset(request), many=True) response = Response(data=serializer.data, status=status.HTTP_201_CREATED) else: response = Response( {"message": "Missing 'variant_id'"}, status=status.HTTP_400_BAD_REQUEST) return response
Add an item to the basket
def _get_snapshot(self): if self._snapshot is None: self._snapshot = self._get_session().snapshot( read_timestamp=self._read_timestamp, exact_staleness=self._exact_staleness, multi_use=True, ) self._snapshot.begin() return self._snapshot
Create snapshot if needed.
def save(self, reload=False): self.wrapper.raw.save() if reload: self.reload()
Save changes to the file.
def name(cls): if cls.__name_length__ is None: column_type = UnicodeText() else: column_type = Unicode(cls.__name_length__) if cls.__name_blank_allowed__: return Column(column_type, nullable=False, unique=True) else: return Column(column_type, CheckConstraint("name <> ''"), nullable=False, unique=True)
The URL name of this object, unique across all instances of this model
def _changes(plays): changes = {} for play in plays['plays']: task_changes = {} for task in play['tasks']: host_changes = {} for host, data in six.iteritems(task['hosts']): if data['changed'] is True: host_changes[host] = data.get('diff', data.get('changes', {})) if host_changes: task_changes[task['task']['name']] = host_changes if task_changes: changes[play['play']['name']] = task_changes return changes
Find changes in ansible return data
def reset(self): self.stream.write(self.normal_cursor) self.stream.write(self.csr(0, self.height)) self.stream.write(self.move(self.height, 0))
Reset scroll window and cursor to default
def create_tipo_roteiro(self): return TipoRoteiro( self.networkapi_url, self.user, self.password, self.user_ldap)
Get an instance of tipo_roteiro services facade.
def compute_metavar(kwargs): metavar = kwargs.get('metavar') if not metavar: typ = kwargs.get('type', str) if typ == list: typ = kwargs.get('member_type', str) if typ == dict: metavar = '"{\'key1\':val1,\'key2\':val2,...}"' else: type_name = typ.__name__ if typ != newstr else 'str' metavar = '<{}>'.format(type_name) return metavar
Compute the metavar to display in help for an option registered with these kwargs.
def _gen_input_mask(mask): return input_mask( shift=bool(mask & MOD_Shift), lock=bool(mask & MOD_Lock), control=bool(mask & MOD_Control), mod1=bool(mask & MOD_Mod1), mod2=bool(mask & MOD_Mod2), mod3=bool(mask & MOD_Mod3), mod4=bool(mask & MOD_Mod4), mod5=bool(mask & MOD_Mod5))
Generate input mask from bytemask
def run_winexe_command(cmd, args, host, username, password, port=445): creds = "-U '{0}%{1}' //{2}".format( username, password, host ) logging_creds = "-U '{0}%XXX-REDACTED-XXX' //{1}".format( username, host ) cmd = 'winexe {0} {1} {2}'.format(creds, cmd, args) logging_cmd = 'winexe {0} {1} {2}'.format(logging_creds, cmd, args) return win_cmd(cmd, logging_command=logging_cmd)
Run a command remotly via the winexe executable
def assert_valid_name(name: str) -> str: error = is_valid_name_error(name) if error: raise error return name
Uphold the spec rules about naming.
def send(self, task_path, args, kwargs): message = { 'task_path': task_path, 'capture_response': self.capture_response, 'response_id': self.response_id, 'args': args, 'kwargs': kwargs } self._send(message) return self
Create the message object and pass it to the actual sender.
def GetMemTargetSizeMB(self): counter = c_uint() ret = vmGuestLib.VMGuestLib_GetMemTargetSizeMB(self.handle.value, byref(counter)) if ret != VMGUESTLIB_ERROR_SUCCESS: raise VMGuestLibException(ret) return counter.value
Retrieves the size of the target memory allocation for this virtual machine.
def find_nonzero_constrained_reactions(model): lower_bound, upper_bound = helpers.find_bounds(model) return [rxn for rxn in model.reactions if 0 > rxn.lower_bound > lower_bound or 0 < rxn.upper_bound < upper_bound]
Return list of reactions with non-zero, non-maximal bounds.
def makePartitions(self): class NetworkMeasures: pass self.nm=nm=NetworkMeasures() nm.degrees=self.network.degree() nm.nodes_= sorted(self.network.nodes(), key=lambda x : nm.degrees[x]) nm.degrees_=[nm.degrees[i] for i in nm.nodes_] nm.edges= self.network.edges(data=True) nm.E=self.network.number_of_edges() nm.N=self.network.number_of_nodes() self.np=g.NetworkPartitioning(nm,10,metric="g")
Make partitions with gmane help.
def _get_plugin_dirs(): plugin_dirs = [ os.path.expanduser(os.path.join(USER_CONFIG_DIR, "plugins")), os.path.join("rapport", "plugins") ] return plugin_dirs
Return a list of directories where plugins may be located.
def resolve_remote(self, uri): try: return super(LocalRefResolver, self).resolve_remote(uri) except ValueError: return super(LocalRefResolver, self).resolve_remote( 'file://' + get_schema_path(uri.rsplit('.json', 1)[0]) )
Resolve a uri or relative path to a schema.
def _download_py3(link, path, __hdr__): try: req = urllib.request.Request(link, headers=__hdr__) u = urllib.request.urlopen(req) except Exception as e: raise Exception(' Download failed with the error:\n{}'.format(e)) with open(path, 'wb') as outf: for l in u: outf.write(l) u.close()
Download a file from a link in Python 3.
def to_internal_value(self, value): natural_key = value.split("_") content_type = ContentType.objects.get_by_natural_key(*natural_key) return content_type.id
Convert to integer id.
def check_cache(self, type, data, obj=None): try: id = data['id'] except: return data try: type = obj._get_type() except: pass try: hit = self.item_cache[type][id] except KeyError: pass else: hit._update_data(data) return hit if not obj: obj = self.item_class.get(type, Redmine_Item) new_item = obj(redmine=self, data=data, type=type) self.item_cache.setdefault(type, {})[id] = new_item return new_item
Returns the updated cached version of the given dict
def _generateForOAuthSecurity(self, client_id, secret_id, token_url=None): grant_type="client_credentials" if token_url is None: token_url = "https://www.arcgis.com/sharing/rest/oauth2/token" params = { "client_id" : client_id, "client_secret" : secret_id, "grant_type":grant_type, "f" : "json" } token = self._post(url=token_url, param_dict=params, securityHandler=None, proxy_port=self._proxy_port, proxy_url=self._proxy_url) if 'access_token' in token: self._token = token['access_token'] self._expires_in = token['expires_in'] self._token_created_on = datetime.datetime.now() self._token_expires_on = self._token_created_on + datetime.timedelta(seconds=int(token['expires_in'])) self._valid = True self._message = "Token Generated" else: self._token = None self._expires_in = None self._token_created_on = None self._token_expires_on = None self._valid = False self._message = token
generates a token based on the OAuth security model
def _get_item(self, package, flavor): for item in package['items']: if item['keyName'] == flavor: return item raise SoftLayer.SoftLayerError("Could not find valid item for: '%s'" % flavor)
Returns the item for ordering a dedicated host.
def __global_logging_exception_handler(exc_type, exc_value, exc_traceback): if exc_type.__name__ == "KeyboardInterrupt": if is_mp_logging_listener_configured(): shutdown_multiprocessing_logging_listener() else: logging.getLogger(__name__).error( 'An un-handled exception was caught by salt\'s global exception ' 'handler:\n%s: %s\n%s', exc_type.__name__, exc_value, ''.join(traceback.format_exception( exc_type, exc_value, exc_traceback )).strip() ) sys.__excepthook__(exc_type, exc_value, exc_traceback)
This function will log all un-handled python exceptions.
def rehighlight(self): start = time.time() QtWidgets.QApplication.setOverrideCursor( QtGui.QCursor(QtCore.Qt.WaitCursor)) try: super(SyntaxHighlighter, self).rehighlight() except RuntimeError: pass QtWidgets.QApplication.restoreOverrideCursor() end = time.time() _logger().debug('rehighlight duration: %fs' % (end - start))
Rehighlight the entire document, may be slow.
def saveBestScore(self): if self.score > self.best_score: self.best_score = self.score try: with open(self.scores_file, 'w') as f: f.write(str(self.best_score)) except: return False return True
save current best score in the default file
def folder_path_for_package(cls, package: ecore.EPackage): parent = package.eContainer() if parent: return os.path.join(cls.folder_path_for_package(parent), package.name) return package.name
Returns path to folder holding generated artifact for given element.
def parse_wiki_terms(doc): results = [] last3 = ['', '', ''] header = True for line in doc.split('\n'): last3.pop(0) last3.append(line.strip()) if all(s.startswith('<td>') and not s == '<td></td>' for s in last3): if header: header = False continue last3 = [s.replace('<td>', '').replace('</td>', '').strip() for s in last3] rank, term, count = last3 rank = int(rank.split()[0]) term = term.replace('</a>', '') term = term[term.index('>')+1:].lower() results.append(term) assert len(results) in [1000, 2000, 1284] return results
who needs an html parser. fragile hax, but checks the result at the end
def metop20kmto1km(lons20km, lats20km): cols20km = np.array([0] + list(range(4, 2048, 20)) + [2047]) cols1km = np.arange(2048) lines = lons20km.shape[0] rows20km = np.arange(lines) rows1km = np.arange(lines) along_track_order = 1 cross_track_order = 3 satint = SatelliteInterpolator((lons20km, lats20km), (rows20km, cols20km), (rows1km, cols1km), along_track_order, cross_track_order) return satint.interpolate()
Getting 1km geolocation for metop avhrr from 20km tiepoints.
def safe_request( url, method=None, params=None, data=None, json=None, headers=None, allow_redirects=False, timeout=30, verify_ssl=True, ): session = requests.Session() kwargs = {} if json: kwargs['json'] = json if not headers: headers = {} headers.setdefault('Content-Type', 'application/json') if data: kwargs['data'] = data if params: kwargs['params'] = params if headers: kwargs['headers'] = headers if method is None: method = 'POST' if (data or json) else 'GET' response = session.request( method=method, url=url, allow_redirects=allow_redirects, timeout=timeout, verify=verify_ssl, **kwargs ) return response
A slightly safer version of `request`.
def previous_theme(self): theme = self.term.theme_list.previous(self.term.theme) while not self.term.check_theme(theme): theme = self.term.theme_list.previous(theme) self.term.set_theme(theme) self.draw() message = self.term.theme.display_string self.term.show_notification(message, timeout=1)
Cycle to preview the previous theme from the internal list of themes.
def add(**kwargs): output, err = cli_syncthing_adapter.add(**kwargs) click.echo("%s" % output, err=err)
Make a directory shareable.
def init(): if "_" not in builtins.__dict__: os.environ["LANGUAGE"] = inginious.input.get_lang() if inginious.DEBUG: gettext.install("messages", get_lang_dir_path()) else: gettext.install("messages", get_lang_dir_path())
Install gettext with the default parameters