content stringlengths 35 762k | sha1 stringlengths 40 40 | id int64 0 3.66M |
|---|---|---|
def svd(A):
"""
Singular Value Decomposition
Parameters
----------
A: af.Array
A 2 dimensional arrayfire array.
Returns
-------
(U,S,Vt): tuple of af.Arrays
- U - A unitary matrix
- S - An array containing the elements of diagonal matrix
- Vt - A... | 7b7d48dc1782d1e02eca01b657895372170caf6c | 3,649,740 |
def parse_content_type_header(value):
""" maintype "/" subtype *( ";" parameter )
The maintype and substype are tokens. Theoretically they could
be checked against the official IANA list + x-token, but we
don't do that.
"""
ctype = ContentType()
recover = False
if not value:
ct... | 24722c1dd5784896fd6aa8b39cd29eb76fec155a | 3,649,741 |
import numpy
def csr_matrix_multiply(S, x): # noqa
"""Multiplies a :class:`scipy.sparse.csr_matrix` S by an object-array vector x.
"""
h, w = S.shape
result = numpy.empty_like(x)
for i in range(h):
result[i] = sum(S.data[idx]*x[S.indices[idx]] # noqa pylint:disable=unsupported-assignme... | 77e1630cbdd59f53b1b2885b731e73a14fb18b35 | 3,649,742 |
def calculate_sem_IoU(pred_np, seg_np, num_classes):
"""Calculate the Intersection Over Union of the predicted classes and the ground truth
Args:
pred_np (array_like): List of predicted class labels
seg_np (array_like): List of ground truth labels
num_classes (int): Number of classes in... | e8e360cb8aad0f2226aa54c88c01485840017f2d | 3,649,743 |
def _invert(M, eps):
"""
Invert matrices, with special fast handling of the 1x1 and 2x2 cases.
Will generate errors if the matrices are singular: user must handle this
through his own regularization schemes.
Parameters
----------
M: np.ndarray [shape=(..., nb_channels, nb_channels)]
... | 119c16ad816dd37b7e5eb23c121ef5affc8851f5 | 3,649,744 |
from pathlib import Path
def read_densecsv_to_anndata(ds_file: Path):
"""Reads a dense text file in csv format into the AnnData format."""
return read_densemat_to_anndata(ds_file, sep=",") | cee99509b6744972ad7a9530d66b59c06f7deec5 | 3,649,745 |
def _singleton(name):
"""Returns a singleton object which represents itself as `name` when printed,
but is only comparable (via identity) to itself."""
return type(name, (), {'__repr__': lambda self: name})() | b07003e1716115864bf1914d4b523b36d0f0471f | 3,649,747 |
def get_zone(*, zone_name: str):
""" Get zone with given zone name.
Args:
zone_name: zone name, e.g. "haomingyin.com"
Returns:
json: zone details
"""
params = dict(name=zone_name)
zones = _get("zones", params=params)
if not zones:
raise CloudflareAPIError(f"Unable to ... | 5e79ec900af7e5cc4d457d04292e55e2e3abc9ec | 3,649,748 |
def execute(connection, cmdline, **kwargs):
"""generic function to execute command for device
| Parameters:
| connection (Adaptor): connection of device
| cmdline (str): command line
| kwargs (dict): additional keyword arguments for command line execution
| Returns:
| str: ou... | 7f3424cb8a747fab87a5a67c880ec755d9c9cb96 | 3,649,750 |
def json_to_numpy_mask(shapes, width, height):
"""Converts JSON labels with pixel classifications into NumPy arrays"""
img = Image.new("L", (width, height), 0)
for shape in shapes:
if shape["label"] == "barrel":
barrel_lst = [tuple(i) for i in shape["points"]]
ImageDraw.Draw(... | 33757246478d854d15f71a0737174ac6952514ef | 3,649,751 |
import types
import typing
def _format_call(value: ast3.Call, context: types.Context) -> typing.Text:
"""Format a function call like 'print(a*b, foo=x)'"""
try:
return _format_call_horizontal(value, context)
except errors.NotPossible:
return _format_call_vertical(value, context) | 2019f50943bb597948248dfda9ce8620d3286377 | 3,649,752 |
def get_tags(repo_dir):
"""
_get_tags_
returns a list of tags for the given repo, ordered as
newest first
"""
repo = git.Repo(repo_dir)
tags_with_date = {
tag.name: tag.commit.committed_date
for tag in repo.tags
}
return sorted(tags_with_date, key=tags_with_date.get... | aa5462ff0b15501cf486a2bf49837f0dd60ecfaf | 3,649,753 |
def readh5(filename, GroupName=None):
"""
Read the HDF5 file 'filename' into a class. Groups within the hdf5 file are
by default loaded as sub classes, unless they include a _read_as attribute
(see sharpy.postproc.savedata). In this case, group can be loaded as classes,
dictionaries, lists or tuples... | 9ab33071916a634da6ddc68df56fe29429ef6313 | 3,649,754 |
def calc_E_E_hs_d_t(W_dash_k_d_t, W_dash_s_d_t, W_dash_w_d_t, W_dash_b1_d_t, W_dash_b2_d_t, W_dash_ba1_d_t,
theta_ex_d_Ave_d,
L_dashdash_ba2_d_t):
"""1時間当たりの給湯機の消費電力量 (kWh/h) (1)
Args:
W_dash_k_d_t(ndarray): 1時間当たりの台所水栓における太陽熱補正給湯負荷 (MJ/h)
W_dash_s_d_t(ndarra... | d76a65d4d30b0c2cf59b837e188473827425d576 | 3,649,755 |
def best_promo(order):
"""
选择可用的最佳折扣
"""
return max(promo(order) for promo in promos) | db4001b4e04a167171da02da92e4234489bf13a5 | 3,649,756 |
def random_joint_positions(robot):
"""
Generates random joint positions within joint limits for the given robot.
@type robot: orpy.Robot
@param robot: The OpenRAVE robot
@rtype: np.array
@return:
"""
# Get the limits of the active DOFs
lower, upper = robot.GetActiveDOFLimits()
positions = lower + n... | 49fe770a8cc22945e79c892d54754c50f19974e8 | 3,649,757 |
def test_cancel_examples(example):
"""
We can't specify examples in test_fuzz_cancel (because we use data, see
https://hypothesis.readthedocs.io/en/latest/data.html#interactive-draw),
so we have this here for explicit examples.
"""
stream_req, stream_resp, draws = example
def draw(lst):
... | c3a3a970a77f136c39e86666c0485163d0fbb408 | 3,649,758 |
import pickle
def fetch_pickle(filename):
"""
Fetches any variable saved into a picklefile with the given filename.
Parameters:
filename (str): filename of the pickle file
Returns:
variable (any pickle compatible type): variable that was saved into the picklefile.
"""... | 172c18520619d102b520658949d2464d5ecfb05c | 3,649,759 |
def check_clockwise(poly):
"""Checks if a sequence of (x,y) polygon vertice pairs is ordered clockwise or not.
NOTE: Counter-clockwise (=FALSE) vertice order reserved for inner ring polygons"""
clockwise = False
if (sum(x0*y1 - x1*y0 for ((x0, y0), (x1, y1)) in zip(poly, poly[1:] + [poly[0]]))) < 0:
... | 5e9f8fba6cd11e33dfe60a89e62eeac2ac24c805 | 3,649,760 |
def bookList(request):
"""测试"""
# 查询书籍信息:使用默认的管理器对象 : 在管理器上调用过滤器方法会返回查询集
# book_list = BookInfo.objects.all()
# 查询书籍信息:使用自定义的管理器对象
# book_list = BookInfo.books.all()
# 以下代码演示,自定义管理器的类给模型类新增初始化方法: 类比books.all()
# book1 = BookInfo.books.create_model('zxc')
# book2 = BookInfo.books.creat... | b9b05f259d5cdb9d0570268c0f08eaafc8ba6cc1 | 3,649,761 |
def format_stats(stats):
"""Format statistics for printing to a table"""
result = ''
for key, value in stats.items():
result += f'{key} - {value}\n'
return result[:-1] | 2d01b6c48b83f8e8810f4609183b39fad871f942 | 3,649,762 |
def imcrop(img, bboxes, scale=1.0, pad_fill=None):
"""Crop image patches.
3 steps: scale the bboxes -> clip bboxes -> crop and pad.
Args:
img (ndarray): Image to be cropped.
bboxes (ndarray): Shape (k, 4) or (4, ), location of cropped bboxes.
scale (float, optional): Scale ratio of... | 244d6c39410c5d51780a8d3a261810986c17d779 | 3,649,763 |
def timestamp2str(ts):
""" Converts Timestamp object to str containing date and time
"""
date = ts.date().strftime("%Y-%m-%d")
time = ts.time().strftime("%H:%M:%S")
return ' '.join([date, time]) | 0e847a8af0cbbacf18df911e3070ac7c70e504b7 | 3,649,764 |
from operator import index
def define_class_functions(processes, stages, progress):
"""
Define and return class of unit tests for stand-alone functions
for the given configuration.
"""
class Test_functions(TestCase):
def test_mapreduce(self):
logger = log() if progress else Non... | 0dc8df39e49f1e7591be7a7b8e80dc1266714cc4 | 3,649,765 |
def concept(*reference):
"""Reference to a semantic concept.
Parameters
----------
*reference : :obj:`str`
Keys pointing to the ruleset defining this concept in the rules file of
an ontology.
Returns
-------
:obj:`CubeProxy`
A textual reference to the concept that can be solved by ... | c3e01f48ca962c5312a0cf8d6deb66eecc062078 | 3,649,766 |
import torch
def collate_tensors(batch, stack_tensors=torch.stack):
""" Collate a list of type ``k`` (dict, namedtuple, list, etc.) with tensors.
Inspired by:
https://github.com/pytorch/pytorch/blob/master/torch/utils/data/_utils/collate.py#L31
Args:
batch (list of k): List of rows of type `... | cbd1098188e3d47b705e25edeae636624ebbec47 | 3,649,767 |
def build_boundaries_layers(cyt_coord, nuc_coord, rna_coord):
"""
Parameters
----------
cyt_coord : np.ndarray, np.int64
Array of cytoplasm boundaries coordinates with shape (nb_points, 2).
nuc_coord : np.ndarray, np.int64
Array of nucleus boundaries coordinates with shape (nb_point... | a99efab6ccc3044c04df330ca9c3ce0ebbf0c413 | 3,649,768 |
def predicted_actual_chart(actual, predicted, title="Predicted vs Actual Values"):
"""Predicted vs actual values curve."""
source = pd.DataFrame({"x": actual, "y": predicted})
scatter = scatter_chart(source, "Actual", "Residual", title=title)
vmin = source.min().min()
vmax = source.max().max()
... | 91588a9d79bfa8eaea39067042b7e4b3c6784b7e | 3,649,769 |
from operator import mul
from operator import inv
def interpolate(R1,R2,u):
"""Interpolate linearly between the two rotations R1 and R2. """
R = mul(inv(R1),R2)
m = moment(R)
angle = vectorops.norm(m)
if angle==0: return R1
axis = vectorops.div(m,angle)
return mul(R1,rotation(axis,angle*u)... | d4aaa976e52b6f44f44c4f26eccb59f1b85f9f0b | 3,649,772 |
def plot_spikes(
spikes: dict,
ax: plt.Axes = None,
markersize: int = None,
color: tp.Union[str, tp.Any] = "k",
) -> plt.Axes:
"""Plot Spikes returned by NeuroDriver's OutputRecorder"""
if ax is None:
fig = plt.gcf()
ax = fig.add_subplot()
for n, (name, ss) in enumerate(spik... | d757c9c342e34e45820ee81f45e0bc59b8cbc277 | 3,649,773 |
def boardToString(board):
"""
return a string representation of the current board.
"""
# global board
# b = board
rg = range(board.size())
s = "┌────┬────┬────┬────┐\n|"+"|\n╞════╪════╪════╪════╡\n|".join(
['|'.join([getCellStr(board, x, y) for x in rg]) for y in rg])
s = "\n" + s ... | 2ea53d0ce7448ab0073176195195f1c4fb028a71 | 3,649,774 |
def create_data_ops(batch_size, num_elements_min_max):
"""Returns graphs containg the inputs and targets for classification.
Refer to create_data_dicts_tf and create_linked_list_target for more details.
Args:
batch_size: batch size for the `input_graphs`.
num_elements_min_max: a 2-`tuple` of `int`s whic... | fd38b1a7d0d8e9e4633fa6fcefc5b1c1614c97fc | 3,649,775 |
def location_matches(stmt):
"""Return a matches_key which takes geo-location into account."""
if isinstance(stmt, Event):
context_key = get_location(stmt)
matches_key = str((stmt.concept.matches_key(), context_key))
elif isinstance(stmt, Influence):
subj_context_key = get_location(st... | be261d2dcf7be09330542a4cd2c18b3261ef0eca | 3,649,777 |
def parse_files(files, options):
"""Build datastructures from lines"""
lines = []
for line in finput(files, openhook=compr):
if (type(line) is bytes): line = line.decode('utf-8')
lines.append(line.rstrip().split("|"))
db = {}
db['rp'], db['users'], db['msgprof'], db['logins'] = {}, ... | 926f805d87ead9af1099f39bfb57be0b4b775e0a | 3,649,779 |
def resize_preserving_order(nparray: np.ndarray, length: int) -> np.ndarray:
"""Extends/truncates nparray so that ``len(result) == length``.
The elements of nparray are duplicated to achieve the desired length
(favours earlier elements).
Constructs a zeroes array of length if nparray is emp... | e074b1135d2192a9b0cf2d9b91f6d99f22408220 | 3,649,780 |
def push(service, key, data):
"""Push
Called to push data to the sync cache
Args:
service (str): The name of the service using the sync
key (mixed): The key to push the data onto
data (mixed): The data to be pushed
Returns:
bool|string
"""
# Make sure the service and key are strings
if not isinstance... | 2be85735b1c4965e5a0cdf35b5f62267ce31cc6e | 3,649,781 |
def get_db_filenames(database_name):
""" This is used to populate the dropdown menu, so users can
only access their data if their name is in the user column"""
con = sql.connect(database_name)
c = con.cursor()
names_list = []
for row in c.execute(
"""SELECT Dataset_Name FROM master_t... | 7ffdd7cfb24d135ddc20353799dd0c7d21504232 | 3,649,783 |
import string
def Calculate(values, mode=0, bin_function=None):
"""Return a list of (value, count) pairs, summarizing the input values.
Sorted by increasing value, or if mode=1, by decreasing count.
If bin_function is given, map it over values first.
"""
if bin_function:
values = list(m... | bb3f40eec7733d948e66e00c3bafdd032acb6372 | 3,649,784 |
import time
def getToday(format=3):
"""返回今天的日期字串"""
t = time.time()
date_ary = time.localtime(t)
if format == 1:
x = time.strftime("%Y%m%d", date_ary)
elif format == 2:
x = time.strftime("%H:%M", date_ary)
elif format == 3:
x = time.strftime("%Y/%m/%d", date_ary)
el... | 900c0a0d42dc2220c5e5030eeebd858e3e6a41bf | 3,649,785 |
def _get_referenced(body, start, end, no_header, clean, as_xml, as_list):
"""Retrieve data from body between some start and end."""
if body is None or start is None or end is None:
return None
content_list = body.get_between(
start, end, as_text=False, no_header=no_header, clean=clean
)
... | 2b3e1ce008461711c37e4af6dda7dc7d2e332d9e | 3,649,786 |
import torch
def info(filepath: str) -> AudioMetaData:
"""Get signal information of an audio file.
Args:
filepath (str): Path to audio file
Returns:
AudioMetaData: meta data of the given audio.
"""
sinfo = torch.ops.torchaudio.sox_io_get_info(filepath)
return AudioMetaData(si... | e3ff5929f563977c44f25f8f51f3a7014f43b397 | 3,649,787 |
def _override_regex_to_allow_long_doctest_lines():
"""Allow too-long lines for doctests.
Mostly a copy from `pylint/checkers/format.py`
Parts newly added are marked with comment, "[PYTA added]: ..."
"""
def new_check_lines(self, lines, i):
"""check lines have less than a maximum number of c... | 9b9d1b5eefaa9e61d1e8915aef988fbc25756d1a | 3,649,788 |
import types
def handle(*, artifacts: oa_types.SimplePropertyArtifacts) -> types.TColumn:
"""
Handle a simple property.
Args:
artifacts: The artifacts of the simple property.
Returns:
The constructed column.
"""
return facades.sqlalchemy.simple.construct(artifacts=artifacts) | 2c9d5cd47b2aecb7603430c8eec7b326ce3c249f | 3,649,789 |
def rollout_representation(representation_model, steps, obs_embed, action, prev_states, done):
"""
Roll out the model with actions and observations from data.
:param steps: number of steps to roll out
:param obs_embed: size(time_steps, batch_size, n_agents, embedding_size)
:param act... | 2736609ab54d477c3fad2ab7a4e3270772517a08 | 3,649,790 |
def generate_random_ast(schema, rng):
"""End-to-end simulator for AST of Core DSL."""
distributions = [schemum[1] for schemum in schema]
partition_alpha = rng.gamma(1,1)
partition = generate_random_partition(partition_alpha, len(distributions), rng)
row_dividers = [generate_random_row_divider(rng) f... | 9547f815ad07af33b182c7edf7ea646ec9fdd49f | 3,649,792 |
def _opcode_to_string(opcode):
"""Return the printable name for a REIL opcode.
Args:
opcode (reil.Opcode): The opcode to provide in printable form.
Returns:
A string representing the opcode.
"""
return _opcode_string_map[opcode] | a1307efe0af8d223360a9ca0f2d9e96913ccb601 | 3,649,793 |
def get_shot(shot):
"""Retrieves shot object from database and returns as dictionary.
Raises exception if shot is not found.
"""
return __get_conn().get_entity(__table_name(),
shot['PartitionKey'], shot['RowKey']) | 0e9ad55427bba2074f7a77d94b61e7bae34bcbda | 3,649,794 |
def report_value_count(data_frame: pd.DataFrame, column: str, digits: int = 2) -> str:
"""
Report the number and percentage of non-empty values in the column.
Parameters
----------
data_frame : pandas.DataFrame
A data frame with one or more columns.
column : str
The name of the ... | d31d9e8bae216f7931f96ec08992d6319d4c3645 | 3,649,795 |
def input_fn(is_training, data_dir, batch_size, num_epochs=1,
num_parallel_calls=1, multi_gpu=False):
"""Input_fn using the tf.data input pipeline for CIFAR-10 dataset.
Args:
is_training: A boolean denoting whether the input is for training.
data_dir: The directory containing the input data.
... | 5d27f5a04b409ad4b04ce9885b592b0454ae0b4b | 3,649,796 |
def getWinners(players, game):
"""
Return a list of winners
:param players:
:param game:
:return:
"""
# get score for each player
for i in range(0, len(game.players)):
game.players[i].credits = scoreFor(i, game)
currentPlayer = whoseTurn(game)
# add 1 to players who ha... | a872d4f9ed596e31ae9a129c9054f9bb95a6e765 | 3,649,797 |
def read_xsf(filepath):
"""
:param filepath filepath of the xtd file
:return cell and atoms need to build the pymatflow.structure.crystal object
"""
a = ase.io.read(filepath, format='xsf')
cell = a.cell.tolist()
atoms = []
for i in range(len(a.arrays['numbers'])):
for item in bas... | 97152eb3d18752e78689598bb0c8603c13051623 | 3,649,798 |
def elina_abstract0_bound_linexpr(man, a, linexpr):
"""
Returns the ElinaInterval taken by an ElinaLinexpr0 over an ElinaAbstract0.
Parameters
----------
man : ElinaManagerPtr
Pointer to the ElinaManager.
a : ElinaAbstract0Ptr
Pointer to the ElinaAbstract0.
linexpr : Eli... | 2764507b79f3326741496a92642be75b5afb8ce4 | 3,649,799 |
import random
import collections
def load_papertext(train_rate=0.8, dev_rate=0.1, test_rate=0.1, max_length=50, download_from_label_studio=True):
"""
Aspect Base sentiment analysis
:param kind: 是加载papertext数据,还是dem8的数据
:return:
:rtype:
"""
export_dir = "/opt/nlp/data/papertext/"
if dow... | b0c4747aaf61dce82612162652218ce001a7f17e | 3,649,800 |
import json
def _load_cmake_spec():
"""Load and return the CMake spec from disk"""
try:
with open(CMAKE_SPEC_FILE()) as fp:
return json.load(fp)
except (OSError, IOError, ValueError):
return None | 32d239ec667aa6f24da6f426d0c2dbf1984f3409 | 3,649,802 |
def read():
"""
Read temperature
:return: temperature
"""
# global ds18b20
location = '/sys/bus/w1/devices/' + ds18b20 + '/w1_slave'
tfile = open(location)
text = tfile.read()
tfile.close()
secondline = text.split("\n")[1]
temperaturedata = secondline.split(" ")[9]
temper... | 7e4c689d5cce6b28c28314eb7e1773e9af1a5061 | 3,649,804 |
import time
def wine(root):
"""Title of Database: Wine recognition data
Updated Sept 21, 1998 by C.Blake : Added attribute information
These data are the results of a chemical analysis of
wines grown in the same region in Italy but derived from three
different cultivars.
The analysis dete... | f2a9a3b66b276b563dc03919becc326f35d77b3a | 3,649,805 |
def initialize_scenario_data():
"""Will initialize the Scenario Data.
:return an empty ScenarioData named tuple
:rtype ScenarioData
"""
actors = {}
companies = {}
scenario_data = ScenarioData(actors, companies)
return scenario_data | 4bbb26b84abef89fc6636bd382d0308cbc8e7573 | 3,649,806 |
def dynamicMass(bulk_density, lat, lon, height, jd, velocity, decel, gamma=1.0, shape_factor=1.21):
""" Calculate dynamic mass at the given point on meteor's trajectory.
Either a single set of values can be given (i.e. every argument is a float number), or all arguments
must be numpy arrays.
... | 48920ecaef4c039672a387f4da45297861b6da56 | 3,649,807 |
def input_fn_tfrecords(files_name_pattern, num_epochs, batch_size, mode):
"""
Input functions which parses TFRecords.
:param files_name_pattern: File name to TFRecords.
:param num_epochs: Number of epochs.
:param batch_size: Batch size.
:param mode: Input function mode.
:return: features and... | bd2b5bf41c2ea9fbb28d7e2cdc5c8f22e8bbac93 | 3,649,808 |
def validate(number):
"""Check if the number provided is a valid RUC number. This checks the
length, formatting, check digit and check sum."""
number = compact(number)
if len(number) != 13:
raise InvalidLength()
if not number.isdigit():
raise InvalidFormat()
if number[:2] < '01' ... | c09602c8b3301c6f1d4d467a1b7bfd607656c436 | 3,649,809 |
def parse_raw(data: bytes) -> dict:
"""
Parse the contents of an environment retrieved from flash or memory
and provide an equivalent dictionary.
The provided *data* should being at the start of the variable definitions.
It **must not** contain the ``env_t`` metadata, such as the CRC32 word
and... | c40c08a099d7468a4ec19da90ce9062d8ddd6ed1 | 3,649,810 |
from typing import List
def _list_registered_paths() -> List[str]:
"""List available paths registered to this service."""
paths = []
for rule in application.url_map.iter_rules():
rule = str(rule)
if rule.startswith("/api/v1"):
paths.append(rule)
return paths | 56f27aa4b33191cbd779e0e173295431670d26ab | 3,649,811 |
def input_fn(request_body, request_content_type):
"""An input_fn that loads a pickled numpy array"""
if request_content_type == "application/python-pickle":
array = np.load(BytesIO(request_body), allow_pickle=True)
return array
else:
raise Exception("Please provide 'application/pytho... | 0f6387dffc3ade2097888a92ef1af99f4d367ef8 | 3,649,812 |
def game(x_train, x_test, y_train, y_test, algo='rf', show_train_scores=True):
"""Standard Alogrithms fit and return scores.
* Default Random State is set as 192 when posible.
* Available models - dc, rf, gb, knn, mc_ovo_rf, mc_ova_rf
"""
if algo is 'dc':
clf = clf = DummyClassifier(strateg... | 9a225f04d5d883bc70c4f4f9036ddfee7b206dbc | 3,649,813 |
def get_convolutional_model(vocab_size: int,
input_length: int,
num_classes: int,
embedding_size: int=300,
model_size: str='small'
) -> Model:
"""Create a character convolution... | aafd9fe6141a05c433508ff0a9583d9c42a7de5b | 3,649,814 |
def parse_test_config(doc):
""" Get the configuration element. """
test_config = doc.documentElement
if test_config.tagName != 'configuration':
raise RuntimeError('expected configuration tag at root')
return test_config | c61c2f4e43c5501c461bb92b63609162b2918860 | 3,649,815 |
import textwrap
def _get_control_vars(control_vars):
"""
Create the section of control variables
Parameters
----------
control_vars: str
Functions to define control variables.
Returns
-------
text: str
Control variables section and header of model variables section.
... | 614a6ca5bc8ac7354f63bfceabaff4eb4b93208a | 3,649,816 |
def echo():
"""Echo data"""
return request.get_data() + '\n' | 75aad93e46925ed086be87b18a96d756fa1c6425 | 3,649,817 |
import logging
def _get_signature_def(signature_def_key, export_dir, tags):
"""Construct a `SignatureDef` proto."""
signature_def_key = (
signature_def_key or
signature_constants.DEFAULT_SERVING_SIGNATURE_DEF_KEY)
metagraph_def = saved_model_cli.get_meta_graph_def(export_dir, tags)
try:
sign... | d0bedd323fb68ad41553034a08b64dc73f85faf3 | 3,649,819 |
def illuminance_to_exposure_value(E, S, c=250):
"""
Computes the exposure value :math:`EV` from given scene illuminance
:math:`E` in :math:`Lux`, *ISO* arithmetic speed :math:`S` and
*incident light calibration constant* :math:`c`.
Parameters
----------
E : array_like
Scene illumina... | 7c03f816e801f04735687a2a2adb6f4969877bb2 | 3,649,820 |
from typing import Counter
def feedback(code, guess):
"""
Return a namedtuple Feedback(blacks, whites) where
blacks is the number of pegs from the guess that
are correct in both color and position and
whites is the number of pegs of the right color but wrong position.
"""
blacks = sum(g ==... | bab57da2d7c60869988d6c24b69b8eab1c7da173 | 3,649,821 |
from datetime import date
from .models import PlacedDateBet
def find_winning_dates(placed_bets, winning_date):
"""
Finds the placed bets with the dates closest to the winning date
:param placed_bets: iterable of PlacedDateBet
:param winning_date: datetime.date
:return: list of winning PlacedDateBe... | 73315f2bebfcc0290f9372af935ded78011c7d4b | 3,649,823 |
def create_greedy_policy(Q):
"""
Creates a greedy policy based on Q values.
Args:
Q: A dictionary that maps from state -> action values
Returns:
A function that takes an observation as input and returns a vector
of action probabilities.
"""
def policy_fn(observation):
... | 01966964034504454e3be9926236706371c626c8 | 3,649,824 |
def get_tags(rule, method, **options):
"""
gets the valid tags for given rule.
:param pyrin.api.router.handlers.base.RouteBase rule: rule instance to be processed.
:param str method: http method name.
:rtype: list[str]
"""
return get_component(SwaggerPackage.COMPONENT_NAME).get_tags(rule,... | 4671d1d9c66934d6b22bee74801d07b30635b3b6 | 3,649,825 |
def get_paybc_transaction_request():
"""Return a stub payment transaction request."""
return {
'clientSystemUrl': 'http://localhost:8080/abcd',
'payReturnUrl': 'http://localhost:8081/xyz'
} | b913438562d4f2b0883e340b48843f9954faa8a4 | 3,649,826 |
def pretreatment(filename):
"""pretreatment"""
poems = []
file = open(filename, "r")
for line in file: #every line is a poem
#print(line)
title, poem = line.strip().split(":") #get title and poem
poem = poem.replace(' ','')
if '_' in poem or '《' in poem or '[' in poem o... | 5aa85b3bda72d3efb3067ebcc06d7f4038d9990e | 3,649,828 |
def forward_fdm(order, deriv, adapt=1, **kw_args):
"""Construct a forward finite difference method.
Further takes in keyword arguments of the constructor of :class:`.fdm.FDM`.
Args:
order (int): Order of the method.
deriv (int): Order of the derivative to estimate.
adapt (int, opti... | 7b5c46fcdfc1a186079b2a4f94a129b8f79dbfde | 3,649,829 |
import requests
def get_list_by_ingredient(ingredient):
""" this should return data for filtered recipes by ingredient """
res = requests.get(f'{API_URL}/{API_KEY}/filter.php', params={"i":ingredient})
return res.json() | 5bb34ffe635499a93decc5d4c080c638ee92c1b5 | 3,649,831 |
def chk_sudo():
"""\
Type: decorator.
The command will only be able to be executed by the author if the author is owner or have permissions.
"""
async def predicate(ctx):
if is_sudoers(ctx.author):
return True
await ctx.message.add_reaction("🛑")
raise excepts.Not... | 45ddad31e761c9cf227a19fb78e3b3f52414c966 | 3,649,832 |
def have_same_items(list1, list2):
""" Проверяет состоят ли массивы list1 и list2 из одинакового
числа одних и тех же элементов
Parameters
----------
list1 : list[int]
отсортированный по возрастанию массив уникальных элементов
list2 : list[int]
массив... | 2973a1961e25686fcbd2003dd366429cbd4c67eb | 3,649,833 |
def analyze(geometry_filenames, mode='global', training_info=None, stride=None,
box_size=None, configs=None, descriptor=None, model=None,
format_=None, descriptors=None, save_descriptors=False,
save_path_descriptors=None, nb_jobs=-1, **kwargs):
"""
Apply ARISE to given list ... | eeec9ac33a91b41b8a90f825aef0fc7605bdbf58 | 3,649,834 |
def get_params(name, seed):
"""Some default parameters.
Note that this will initially include training parameters that you won't need for metalearning since we have our own training loop."""
configs = []
overrides = {}
overrides["dataset_reader"] = {"lazy": True}
configs.append(Params(override... | 02d70be07a2d7afe793e657d6fb38fefe99171ce | 3,649,835 |
def rgb2hex(rgb):
"""Converts an RGB 3-tuple to a hexadeximal color string.
EXAMPLE
-------
>>> rgb2hex((0,0,255))
'#0000FF'
"""
return ('#%02x%02x%02x' % tuple(rgb)).upper() | 4c3323e34fcd2c1b4402ebe5f433c5fd9320cce9 | 3,649,836 |
from typing import Union
import re
from typing import Optional
def path_regex(
path_regex: Union[str, re.Pattern], *, disable_stage_removal: Optional[bool] = False
):
"""Validate the path in the event against the given path pattern.
The following APIErrorResponse subclasses are used:
PathNotFound... | 5c54d71a20fa7795b9e6eefa508de5b8516378a6 | 3,649,837 |
async def root():
"""Health check"""
return {"status": "OK"} | 80c3c7ff9e1abebbb9f38dc11a5ecd5a7fe5414a | 3,649,838 |
from typing import Dict
from typing import List
def build_foreign_keys(
resources: Dict[str, dict],
prune: bool = True,
) -> Dict[str, List[dict]]:
"""Build foreign keys for each resource.
A resource's `foreign_key_rules` (if present) determines which other resources will
be assigned a foreign ke... | 96cb032a03445400eeee57a23a4024ae06f62573 | 3,649,839 |
import ipaddress
def port_scan(ip):
"""Run a scan to determine what services are responding.
Returns nmap output in JSON format.
"""
# validate input
valid_ip = ipaddress.ip_address(ip)
# nnap requires a `-6` option if the target is IPv6
v6_flag = '-6 ' if valid_ip.version == 6 else ''
... | c33cd56635338d3476e4ce5348376a1f6b2cfd68 | 3,649,840 |
def create_table(p, table_name, schema):
"""Create a new Prism table.
Parameters
----------
p : Prism
Instantiated Prism class from prism.Prism()
table_name : str
The name of the table to obtain details about. If the default value
of None is specified, details regarding fir... | 43c8c789d4e212d2d98d68f4f22e3f0fb0a97552 | 3,649,841 |
def get_args():
"""
Parses and processes args, returning the modified arguments as a dict.
This is to maintain backwards compatibility with the old of parsing
arguments.
"""
parser = make_parser()
args = parser.parse_args()
process_args(args)
return vars(args) | 8a6f31bd0c9547a007bdd7644d148e8ba0e126d1 | 3,649,842 |
from typing import Iterable
def run_asm_pprinter(ir: gtirb.IR, args: Iterable[str] = ()) -> str:
"""
Runs the pretty-printer to generate an assembly output.
:param ir: The IR object to print.
:param args: Any additional arguments for the pretty printer.
:returns: The assembly string.
"""
a... | 8d71a4b91f90cb449f65d5c95ec740d78836a071 | 3,649,843 |
import re
def fix_ccdsec(hdu):
""" Fix CCDSEC keywords in image extensions """
section_regexp = re.compile(SECTION_STRING)
# In unbinned space
ccdsec = _get_key_value(hdu, 'CCDSEC')
detsec = _get_key_value(hdu, 'DETSEC')
if None in [ccdsec, detsec]:
raise ValueError("CCDSEC {}; detse... | 1ce3e7e519f47f63f8894c3a29e269ca77d7cf5d | 3,649,844 |
def reload(hdf):
"""Reload a hdf file, hdf = reload(hdf)"""
filename = hdf.filename
return load(filename) | 6eb17d171b1181ac4ed974de6c36f83c00e72c57 | 3,649,845 |
def read_image(im_name, n_channel, data_dir='', batch_size=1, rescale=None):
""" function for create a Dataflow for reading images from a folder
This function returns a Dataflow object for images with file
name containing 'im_name' in directory 'data_dir'.
Args:
im_name (str): ... | 017878c8afce1be73160b338407a920c4f01a286 | 3,649,846 |
def build_optimizer(config, model):
"""
Build optimizer, set weight decay of normalization to 0 by default.
"""
skip = {}
skip_keywords = {}
if hasattr(model, 'no_weight_decay'):
skip = model.no_weight_decay()
if hasattr(model, 'no_weight_decay_keywords'):
skip_keywords = mod... | 83a09ed34c24caff7367ba1e43e051f362dfa85c | 3,649,847 |
def ising2d_worm(T_range, mcsteps, L):
"""T = temperature [K]; L = Length of grid."""
def new_head_position(worm, lattice):
"""
Extract current worm head position indices,
then randomly set new worm head position index.
lattice.occupied points to either lattice.bonds_x or latti... | 6fba36aceb70f19605e20a460db7054b81264224 | 3,649,848 |
def valid_chapter_name(chapter_name):
"""
判断目录名称是否合理
Args:
chapter_name ([type]): [description]
"""
for each in ["目录"]:
if each in chapter_name:
return False
return True | 9ec71837503f969808a6a666a3bf999ee3290f03 | 3,649,849 |
from typing import Iterable
from typing import Tuple
def lex_min(perms: Iterable[Perm]) -> Tuple[Perm, ...]:
"""Find the lexicographical minimum of the sets of all symmetries."""
return min(all_symmetry_sets(perms)) | 4cbb7e78de32c46684c9e621db90708934bb5e33 | 3,649,850 |
def subfield(string, delim, occurrence):
"""
function to extract specified occurence of subfield from string
using specified field delimiter
eg select subfield('abc/123/xyz','/',0) returns 'abc'
eg select subfield('abc/123/xyz','/',1) returns '123'
eg select subfield('abc/123/xyz','/',2) retu... | ef022d0ca05e969e8ad69e4644cd24d1b7f47cb8 | 3,649,851 |
def in_hull(points, hull):
"""
Test if points in `p` are in `hull`
`p` should be a `NxK` coordinates of `N` points in `K` dimensions
`hull` is either a scipy.spatial.Delaunay object or the `MxK` array of the
coordinates of `M` points in `K`dimensions for which Delaunay triangulation
will be computed
"""
... | ab116c17b42c26648b02930824dd0ae591b32eef | 3,649,852 |
def sample_random(X_all, N):
"""Given an array of (x,t) points, sample N points from this."""
set_seed(0) # this can be fixed for all N_f
idx = np.random.choice(X_all.shape[0], N, replace=False)
X_sampled = X_all[idx, :]
return X_sampled | b2297c13cf7cf40dcdf82ea97e2029a96d7554ef | 3,649,853 |
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.