_id stringlengths 2 7 | title stringlengths 1 88 | partition stringclasses 3
values | text stringlengths 75 19.8k | language stringclasses 1
value | meta_information dict |
|---|---|---|---|---|---|
q4900 | add_uniform_time_weights | train | def add_uniform_time_weights(ds):
"""Append uniform time weights to a Dataset.
All DataArrays with a time coordinate require a time weights coordinate.
For Datasets read in without a time bounds coordinate or explicit
time weights built in, aospy adds uniform time weights at each point
in the time ... | python | {
"resource": ""
} |
q4901 | _assert_has_data_for_time | train | def _assert_has_data_for_time(da, start_date, end_date):
"""Check to make sure data is in Dataset for the given time range.
Parameters
----------
da : DataArray
DataArray with a time variable
start_date : datetime-like object or str
start date
end_date : datetime-like object o... | python | {
"resource": ""
} |
q4902 | sel_time | train | def sel_time(da, start_date, end_date):
"""Subset a DataArray or Dataset for a given date range.
Ensures that data are present for full extent of requested range.
Appends start and end date of the subset to the DataArray.
Parameters
----------
da : DataArray or Dataset
data to subset
... | python | {
"resource": ""
} |
q4903 | assert_matching_time_coord | train | def assert_matching_time_coord(arr1, arr2):
"""Check to see if two DataArrays have the same time coordinate.
Parameters
----------
arr1 : DataArray or Dataset
First DataArray or Dataset
arr2 : DataArray or Dataset
Second DataArray or Dataset
Raises
------
ValueError
... | python | {
"resource": ""
} |
q4904 | ensure_time_as_index | train | def ensure_time_as_index(ds):
"""Ensures that time is an indexed coordinate on relevant quantites.
Sometimes when the data we load from disk has only one timestep, the
indexing of time-defined quantities in the resulting xarray.Dataset gets
messed up, in that the time bounds array and data variables do... | python | {
"resource": ""
} |
q4905 | infer_year | train | def infer_year(date):
"""Given a datetime-like object or string infer the year.
Parameters
----------
date : datetime-like object or str
Input date
Returns
-------
int
Examples
--------
>>> infer_year('2000')
2000
>>> infer_year('2000-01')
2000
>>> infe... | python | {
"resource": ""
} |
q4906 | maybe_convert_to_index_date_type | train | def maybe_convert_to_index_date_type(index, date):
"""Convert a datetime-like object to the index's date type.
Datetime indexing in xarray can be done using either a pandas
DatetimeIndex or a CFTimeIndex. Both support partial-datetime string
indexing regardless of the calendar type of the underlying d... | python | {
"resource": ""
} |
q4907 | Region._make_mask | train | def _make_mask(self, data, lon_str=LON_STR, lat_str=LAT_STR):
"""Construct the mask that defines a region on a given data's grid."""
mask = False
for west, east, south, north in self.mask_bounds:
if west < east:
mask_lon = (data[lon_str] > west) & (data[lon_str] < eas... | python | {
"resource": ""
} |
q4908 | Region.mask_var | train | def mask_var(self, data, lon_cyclic=True, lon_str=LON_STR,
lat_str=LAT_STR):
"""Mask the given data outside this region.
Parameters
----------
data : xarray.DataArray
The array to be regionally masked.
lon_cyclic : bool, optional (default True)
... | python | {
"resource": ""
} |
q4909 | Region.ts | train | def ts(self, data, lon_cyclic=True, lon_str=LON_STR, lat_str=LAT_STR,
land_mask_str=LAND_MASK_STR, sfc_area_str=SFC_AREA_STR):
"""Create yearly time-series of region-averaged data.
Parameters
----------
data : xarray.DataArray
The array to create the regional time... | python | {
"resource": ""
} |
q4910 | Region.av | train | def av(self, data, lon_str=LON_STR, lat_str=LAT_STR,
land_mask_str=LAND_MASK_STR, sfc_area_str=SFC_AREA_STR):
"""Time-average of region-averaged data.
Parameters
----------
data : xarray.DataArray
The array to compute the regional time-average of
lat_str, ... | python | {
"resource": ""
} |
q4911 | _rename_coords | train | def _rename_coords(ds, attrs):
"""Rename coordinates to aospy's internal names."""
for name_int, names_ext in attrs.items():
# Check if coord is in dataset already.
ds_coord_name = set(names_ext).intersection(set(ds.coords))
if ds_coord_name:
# Rename to the aospy internal na... | python | {
"resource": ""
} |
q4912 | _bounds_from_array | train | def _bounds_from_array(arr, dim_name, bounds_name):
"""Get the bounds of an array given its center values.
E.g. if lat-lon grid center lat/lon values are known, but not the
bounds of each grid box. The algorithm assumes that the bounds
are simply halfway between each pair of center values.
"""
... | python | {
"resource": ""
} |
q4913 | _diff_bounds | train | def _diff_bounds(bounds, coord):
"""Get grid spacing by subtracting upper and lower bounds."""
try:
return bounds[:, 1] - bounds[:, 0]
except IndexError:
diff = np.diff(bounds, axis=0)
return xr.DataArray(diff, dims=coord.dims, coords=coord.coords) | python | {
"resource": ""
} |
q4914 | _grid_sfc_area | train | def _grid_sfc_area(lon, lat, lon_bounds=None, lat_bounds=None):
"""Calculate surface area of each grid cell in a lon-lat grid."""
# Compute the bounds if not given.
if lon_bounds is None:
lon_bounds = _bounds_from_array(
lon, internal_names.LON_STR, internal_names.LON_BOUNDS_STR)
if ... | python | {
"resource": ""
} |
q4915 | Model._get_grid_files | train | def _get_grid_files(self):
"""Get the files holding grid data for an aospy object."""
grid_file_paths = self.grid_file_paths
datasets = []
if isinstance(grid_file_paths, str):
grid_file_paths = [grid_file_paths]
for path in grid_file_paths:
try:
... | python | {
"resource": ""
} |
q4916 | Model._set_mult_grid_attr | train | def _set_mult_grid_attr(self):
"""
Set multiple attrs from grid file given their names in the grid file.
"""
grid_objs = self._get_grid_files()
if self.grid_attrs is None:
self.grid_attrs = {}
# Override GRID_ATTRS with entries in grid_attrs
attrs = ... | python | {
"resource": ""
} |
q4917 | Model.set_grid_data | train | def set_grid_data(self):
"""Populate the attrs that hold grid data."""
if self._grid_data_is_set:
return
self._set_mult_grid_attr()
if not np.any(getattr(self, 'sfc_area', None)):
try:
sfc_area = _grid_sfc_area(self.lon, self.lat, self.lon_bounds,
... | python | {
"resource": ""
} |
q4918 | _other_to_lon | train | def _other_to_lon(func):
"""Wrapper for casting Longitude operator arguments to Longitude"""
def func_other_to_lon(obj, other):
return func(obj, _maybe_cast_to_lon(other))
return func_other_to_lon | python | {
"resource": ""
} |
q4919 | _get_attr_by_tag | train | def _get_attr_by_tag(obj, tag, attr_name):
"""Get attribute from an object via a string tag.
Parameters
----------
obj : object from which to get the attribute
attr_name : str
Unmodified name of the attribute to be found. The actual attribute
that is returned may be modified be 'ta... | python | {
"resource": ""
} |
q4920 | _get_all_objs_of_type | train | def _get_all_objs_of_type(type_, parent):
"""Get all attributes of the given type from the given object.
Parameters
----------
type_ : The desired type
parent : The object from which to get the attributes with type matching
'type_'
Returns
-------
A list (possibly empty) of att... | python | {
"resource": ""
} |
q4921 | _prune_invalid_time_reductions | train | def _prune_invalid_time_reductions(spec):
"""Prune time reductions of spec with no time dimension."""
valid_reductions = []
if not spec['var'].def_time and spec['dtype_out_time'] is not None:
for reduction in spec['dtype_out_time']:
if reduction not in _TIME_DEFINED_REDUCTIONS:
... | python | {
"resource": ""
} |
q4922 | _compute_or_skip_on_error | train | def _compute_or_skip_on_error(calc, compute_kwargs):
"""Execute the Calc, catching and logging exceptions, but don't re-raise.
Prevents one failed calculation from stopping a larger requested set
of calculations.
"""
try:
return calc.compute(**compute_kwargs)
except Exception:
m... | python | {
"resource": ""
} |
q4923 | _submit_calcs_on_client | train | def _submit_calcs_on_client(calcs, client, func):
"""Submit calculations via dask.bag and a distributed client"""
logging.info('Connected to client: {}'.format(client))
if LooseVersion(dask.__version__) < '0.18':
dask_option_setter = dask.set_options
else:
dask_option_setter = dask.confi... | python | {
"resource": ""
} |
q4924 | _exec_calcs | train | def _exec_calcs(calcs, parallelize=False, client=None, **compute_kwargs):
"""Execute the given calculations.
Parameters
----------
calcs : Sequence of ``aospy.Calc`` objects
parallelize : bool, default False
Whether to submit the calculations in parallel or not
client : distributed.Clie... | python | {
"resource": ""
} |
q4925 | submit_mult_calcs | train | def submit_mult_calcs(calc_suite_specs, exec_options=None):
"""Generate and execute all specified computations.
Once the calculations are prepped and submitted for execution, any
calculation that triggers any exception or error is skipped, and the rest
of the calculations proceed unaffected. This prev... | python | {
"resource": ""
} |
q4926 | CalcSuite._get_requested_spec | train | def _get_requested_spec(self, obj, spec_name):
"""Helper to translate user specifications to needed objects."""
requested = self._specs_in[spec_name]
if isinstance(requested, str):
return _get_attr_by_tag(obj, requested, spec_name)
else:
return requested | python | {
"resource": ""
} |
q4927 | CalcSuite._permute_core_specs | train | def _permute_core_specs(self):
"""Generate all requested combinations of the core objects."""
obj_trees = []
projects = self._get_requested_spec(self._obj_lib, _PROJECTS_STR)
for project in projects:
models = self._get_requested_spec(project, _MODELS_STR)
for mode... | python | {
"resource": ""
} |
q4928 | CalcSuite._get_regions | train | def _get_regions(self):
"""Get the requested regions."""
if self._specs_in[_REGIONS_STR] == 'all':
return [_get_all_objs_of_type(
Region, getattr(self._obj_lib, 'regions', self._obj_lib)
)]
else:
return [set(self._specs_in[_REGIONS_STR])] | python | {
"resource": ""
} |
q4929 | CalcSuite._get_variables | train | def _get_variables(self):
"""Get the requested variables."""
if self._specs_in[_VARIABLES_STR] == 'all':
return _get_all_objs_of_type(
Var, getattr(self._obj_lib, 'variables', self._obj_lib)
)
else:
return set(self._specs_in[_VARIABLES_STR]) | python | {
"resource": ""
} |
q4930 | CalcSuite._get_aux_specs | train | def _get_aux_specs(self):
"""Get and pre-process all of the non-core specifications."""
# Drop the "core" specifications, which are handled separately.
specs = self._specs_in.copy()
[specs.pop(core) for core in self._CORE_SPEC_NAMES]
specs[_REGIONS_STR] = self._get_regions()
... | python | {
"resource": ""
} |
q4931 | CalcSuite._permute_aux_specs | train | def _permute_aux_specs(self):
"""Generate all permutations of the non-core specifications."""
# Convert to attr names that Calc is expecting.
calc_aux_mapping = self._NAMES_SUITE_TO_CALC.copy()
# Special case: manually add 'library' to mapping
calc_aux_mapping[_OBJ_LIB_STR] = Non... | python | {
"resource": ""
} |
q4932 | CalcSuite._combine_core_aux_specs | train | def _combine_core_aux_specs(self):
"""Combine permutations over core and auxilliary Calc specs."""
all_specs = []
for core_dict in self._permute_core_specs():
for aux_dict in self._permute_aux_specs():
all_specs.append(_merge_dicts(core_dict, aux_dict))
return... | python | {
"resource": ""
} |
q4933 | CalcSuite.create_calcs | train | def create_calcs(self):
"""Generate a Calc object for each requested parameter combination."""
specs = self._combine_core_aux_specs()
for spec in specs:
spec['dtype_out_time'] = _prune_invalid_time_reductions(spec)
return [Calc(**sp) for sp in specs] | python | {
"resource": ""
} |
q4934 | data_in_label | train | def data_in_label(intvl_in, dtype_in_time, dtype_in_vert=False):
"""Create string label specifying the input data of a calculation."""
intvl_lbl = intvl_in
time_lbl = dtype_in_time
lbl = '_'.join(['from', intvl_lbl, time_lbl]).replace('__', '_')
vert_lbl = dtype_in_vert if dtype_in_vert else False
... | python | {
"resource": ""
} |
q4935 | data_name_gfdl | train | def data_name_gfdl(name, domain, data_type, intvl_type, data_yr,
intvl, data_in_start_yr, data_in_dur):
"""Determine the filename of GFDL model data output."""
# Determine starting year of netCDF file to be accessed.
extra_yrs = (data_yr - data_in_start_yr) % data_in_dur
data_in_yr = ... | python | {
"resource": ""
} |
q4936 | dmget | train | def dmget(files_list):
"""Call GFDL command 'dmget' to access archived files."""
if isinstance(files_list, str):
files_list = [files_list]
archive_files = []
for f in files_list:
if f.startswith('/archive'):
archive_files.append(f)
try:
subprocess.call(['dmget'] ... | python | {
"resource": ""
} |
q4937 | _replace_pressure | train | def _replace_pressure(arguments, dtype_in_vert):
"""Replace p and dp Vars with appropriate Var objects specific to
the dtype_in_vert."""
arguments_out = []
for arg in arguments:
if isinstance(arg, Var):
if arg.name == 'p':
arguments_out.append(_P_VARS[dtype_in_vert])
... | python | {
"resource": ""
} |
q4938 | _add_metadata_as_attrs | train | def _add_metadata_as_attrs(data, units, description, dtype_out_vert):
"""Add metadata attributes to Dataset or DataArray"""
if isinstance(data, xr.DataArray):
return _add_metadata_as_attrs_da(data, units, description,
dtype_out_vert)
else:
for name, a... | python | {
"resource": ""
} |
q4939 | _add_metadata_as_attrs_da | train | def _add_metadata_as_attrs_da(data, units, description, dtype_out_vert):
"""Add metadata attributes to DataArray"""
if dtype_out_vert == 'vert_int':
if units != '':
units = '(vertical integral of {0}): {0} kg m^-2)'.format(units)
else:
units = '(vertical integral of quant... | python | {
"resource": ""
} |
q4940 | Calc._dir_out | train | def _dir_out(self):
"""Create string of the data directory to save individual .nc files."""
return os.path.join(self.proj.direc_out, self.proj.name,
self.model.name, self.run.name, self.name) | python | {
"resource": ""
} |
q4941 | Calc._dir_tar_out | train | def _dir_tar_out(self):
"""Create string of the data directory to store a tar file."""
return os.path.join(self.proj.tar_direc_out, self.proj.name,
self.model.name, self.run.name) | python | {
"resource": ""
} |
q4942 | Calc._file_name | train | def _file_name(self, dtype_out_time, extension='nc'):
"""Create the name of the aospy file."""
if dtype_out_time is None:
dtype_out_time = ''
out_lbl = utils.io.data_out_label(self.intvl_out, dtype_out_time,
dtype_vert=self.dtype_out_vert)
... | python | {
"resource": ""
} |
q4943 | Calc._print_verbose | train | def _print_verbose(*args):
"""Print diagnostic message."""
try:
return '{0} {1} ({2})'.format(args[0], args[1], ctime())
except IndexError:
return '{0} ({1})'.format(args[0], ctime()) | python | {
"resource": ""
} |
q4944 | Calc._to_desired_dates | train | def _to_desired_dates(self, arr):
"""Restrict the xarray DataArray or Dataset to the desired months."""
times = utils.times.extract_months(
arr[internal_names.TIME_STR], self.months
)
return arr.sel(time=times) | python | {
"resource": ""
} |
q4945 | Calc._add_grid_attributes | train | def _add_grid_attributes(self, ds):
"""Add model grid attributes to a dataset"""
for name_int, names_ext in self._grid_attrs.items():
ds_coord_name = set(names_ext).intersection(set(ds.coords) |
set(ds.data_vars))
model_attr... | python | {
"resource": ""
} |
q4946 | Calc._get_input_data | train | def _get_input_data(self, var, start_date, end_date):
"""Get the data for a single variable over the desired date range."""
logging.info(self._print_verbose("Getting input data:", var))
if isinstance(var, (float, int)):
return var
else:
cond_pfull = ((not hasattr... | python | {
"resource": ""
} |
q4947 | Calc._get_all_data | train | def _get_all_data(self, start_date, end_date):
"""Get the needed data from all of the vars in the calculation."""
return [self._get_input_data(var, start_date, end_date)
for var in _replace_pressure(self.variables,
self.dtype_in_vert)] | python | {
"resource": ""
} |
q4948 | Calc._compute | train | def _compute(self, data):
"""Perform the calculation."""
local_ts = self._local_ts(*data)
dt = local_ts[internal_names.TIME_WEIGHTS_STR]
# Convert dt to units of days to prevent overflow
dt = dt / np.timedelta64(1, 'D')
return local_ts, dt | python | {
"resource": ""
} |
q4949 | Calc._compute_full_ts | train | def _compute_full_ts(self, data):
"""Perform calculation and create yearly timeseries at each point."""
# Get results at each desired timestep and spatial point.
full_ts, dt = self._compute(data)
# Vertically integrate.
vert_types = ('vert_int', 'vert_av')
if self.dtype_o... | python | {
"resource": ""
} |
q4950 | Calc._full_to_yearly_ts | train | def _full_to_yearly_ts(self, arr, dt):
"""Average the full timeseries within each year."""
time_defined = self.def_time and not ('av' in self.dtype_in_time)
if time_defined:
arr = utils.times.yearly_average(arr, dt)
return arr | python | {
"resource": ""
} |
q4951 | Calc._time_reduce | train | def _time_reduce(self, arr, reduction):
"""Perform the specified time reduction on a local time-series."""
if self.dtype_in_time == 'av' or not self.def_time:
return arr
reductions = {
'ts': lambda xarr: xarr,
'av': lambda xarr: xarr.mean(internal_names.YEAR_S... | python | {
"resource": ""
} |
q4952 | Calc.region_calcs | train | def region_calcs(self, arr, func):
"""Perform a calculation for all regions."""
# Get pressure values for data output on hybrid vertical coordinates.
bool_pfull = (self.def_vert and self.dtype_in_vert ==
internal_names.ETA_STR and self.dtype_out_vert is False)
if bo... | python | {
"resource": ""
} |
q4953 | Calc._apply_all_time_reductions | train | def _apply_all_time_reductions(self, data):
"""Apply all requested time reductions to the data."""
logging.info(self._print_verbose("Applying desired time-"
"reduction methods."))
reduc_specs = [r.split('.') for r in self.dtype_out_time]
reduced =... | python | {
"resource": ""
} |
q4954 | Calc.compute | train | def compute(self, write_to_tar=True):
"""Perform all desired calculations on the data and save externally."""
data = self._get_all_data(self.start_date, self.end_date)
logging.info('Computing timeseries for {0} -- '
'{1}.'.format(self.start_date, self.end_date))
full... | python | {
"resource": ""
} |
q4955 | Calc._save_files | train | def _save_files(self, data, dtype_out_time):
"""Save the data to netcdf files in direc_out."""
path = self.path_out[dtype_out_time]
if not os.path.isdir(self.dir_out):
os.makedirs(self.dir_out)
if 'reg' in dtype_out_time:
try:
reg_data = xr.open_da... | python | {
"resource": ""
} |
q4956 | Calc._write_to_tar | train | def _write_to_tar(self, dtype_out_time):
"""Add the data to the tar file in tar_out_direc."""
# When submitted in parallel and the directory does not exist yet
# multiple processes may try to create a new directory; this leads
# to an OSError for all processes that tried to make the
... | python | {
"resource": ""
} |
q4957 | Calc._update_data_out | train | def _update_data_out(self, data, dtype):
"""Append the data of the given dtype_out to the data_out attr."""
try:
self.data_out.update({dtype: data})
except AttributeError:
self.data_out = {dtype: data} | python | {
"resource": ""
} |
q4958 | Calc.save | train | def save(self, data, dtype_out_time, dtype_out_vert=False,
save_files=True, write_to_tar=False):
"""Save aospy data to data_out attr and to an external file."""
self._update_data_out(data, dtype_out_time)
if save_files:
self._save_files(data, dtype_out_time)
if w... | python | {
"resource": ""
} |
q4959 | Calc._load_from_disk | train | def _load_from_disk(self, dtype_out_time, dtype_out_vert=False,
region=False):
"""Load aospy data saved as netcdf files on the file system."""
ds = xr.open_dataset(self.path_out[dtype_out_time])
if region:
arr = ds[region.name]
# Use region-specifi... | python | {
"resource": ""
} |
q4960 | Calc._load_from_tar | train | def _load_from_tar(self, dtype_out_time, dtype_out_vert=False):
"""Load data save in tarball form on the file system."""
path = os.path.join(self.dir_tar_out, 'data.tar')
utils.io.dmget([path])
with tarfile.open(path, 'r') as data_tar:
ds = xr.open_dataset(
da... | python | {
"resource": ""
} |
q4961 | Calc.load | train | def load(self, dtype_out_time, dtype_out_vert=False, region=False,
plot_units=False, mask_unphysical=False):
"""Load the data from the object if possible or from disk."""
msg = ("Loading data from disk for object={0}, dtype_out_time={1}, "
"dtype_out_vert={2}, and region="
... | python | {
"resource": ""
} |
q4962 | conv_precip_frac | train | def conv_precip_frac(precip_largescale, precip_convective):
"""Fraction of total precip that is from convection parameterization.
Parameters
----------
precip_largescale, precip_convective : xarray.DataArrays
Precipitation from grid-scale condensation and from convective
parameterizatio... | python | {
"resource": ""
} |
q4963 | dumps | train | def dumps(x, float_bits=DEFAULT_FLOAT_BITS):
"""
Dump data structure to str.
Here float_bits is either 32 or 64.
"""
with lock:
if float_bits == 32:
encode_func[float] = encode_float32
elif float_bits == 64:
encode_func[float] = encode_float64
else:
... | python | {
"resource": ""
} |
q4964 | Brain.stop_batch_learning | train | def stop_batch_learning(self):
"""Finish a series of batch learn operations."""
self._learning = False
self.graph.commit()
self.graph.cursor().execute("PRAGMA journal_mode=truncate")
self.graph.ensure_indexes() | python | {
"resource": ""
} |
q4965 | Brain.learn | train | def learn(self, text):
"""Learn a string of text. If the input is not already
Unicode, it will be decoded as utf-8."""
if type(text) != types.UnicodeType:
# Assume that non-Unicode text is encoded as utf-8, which
# should be somewhat safe in the modern world.
... | python | {
"resource": ""
} |
q4966 | Brain._to_graph | train | def _to_graph(self, contexts):
"""This is an iterator that returns each edge of our graph
with its two nodes"""
prev = None
for context in contexts:
if prev is None:
prev = context
continue
yield prev[0], context[1], context[0]
... | python | {
"resource": ""
} |
q4967 | Brain.reply | train | def reply(self, text, loop_ms=500, max_len=None):
"""Reply to a string of text. If the input is not already
Unicode, it will be decoded as utf-8."""
if type(text) != types.UnicodeType:
# Assume that non-Unicode text is encoded as utf-8, which
# should be somewhat safe in ... | python | {
"resource": ""
} |
q4968 | Brain.init | train | def init(filename, order=3, tokenizer=None):
"""Initialize a brain. This brain's file must not already exist.
Keyword arguments:
order -- Order of the forward/reverse Markov chains (integer)
tokenizer -- One of Cobe, MegaHAL (default Cobe). See documentation
for cobe.tokenizers for details. (strin... | python | {
"resource": ""
} |
q4969 | PulseExtStreamRestoreInfo.struct_from_value | train | def struct_from_value( cls, name, volume,
channel_list=None, mute=False, device=None ):
'Same arguments as with class instance init.'
chan_map = c.PA_CHANNEL_MAP()
if not channel_list: c.pa.channel_map_init_mono(chan_map)
else:
if not is_str(channel_list):
channel_list = b','.join(map(c.force_bytes, c... | python | {
"resource": ""
} |
q4970 | Pulse.connect | train | def connect(self, autospawn=False, wait=False):
'''Connect to pulseaudio server.
"autospawn" option will start new pulse daemon, if necessary.
Specifying "wait" option will make function block until pulseaudio server appears.'''
if self._loop_closed:
raise PulseError('Eventloop object was already'
' de... | python | {
"resource": ""
} |
q4971 | Pulse.stream_restore_delete | train | def stream_restore_delete(obj_name_or_list):
'''Can be passed string name,
PulseExtStreamRestoreInfo object or a list of any of these.'''
if is_str(obj_name_or_list, PulseExtStreamRestoreInfo):
obj_name_or_list = [obj_name_or_list]
name_list = list((obj.name if isinstance( obj,
PulseExtStreamRestoreInfo ... | python | {
"resource": ""
} |
q4972 | Pulse.default_set | train | def default_set(self, obj):
'Set passed sink or source to be used as default one by pulseaudio server.'
assert_pulse_object(obj)
method = {
PulseSinkInfo: self.sink_default_set,
PulseSourceInfo: self.source_default_set }.get(type(obj))
if not method: raise NotImplementedError(type(obj))
method(obj) | python | {
"resource": ""
} |
q4973 | MLink.update | train | def update(uid, post_data):
'''
Updat the link.
'''
entry = TabLink.update(
name=post_data['name'],
link=post_data['link'],
order=post_data['order'],
logo=post_data['logo'] if 'logo' in post_data else '',
).where(TabLink.uid == uid)... | python | {
"resource": ""
} |
q4974 | MLink.create_link | train | def create_link(id_link, post_data):
'''
Add record in link.
'''
if MLink.get_by_uid(id_link):
return False
try:
the_order = int(post_data['order'])
except:
the_order = 999
TabLink.create(name=post_data['name'],
... | python | {
"resource": ""
} |
q4975 | WikiHandler.recent | train | def recent(self):
'''
List recent wiki.
'''
kwd = {
'pager': '',
'title': 'Recent Pages',
}
self.render('wiki_page/wiki_list.html',
view=MWiki.query_recent(),
format_date=tools.format_date,
... | python | {
"resource": ""
} |
q4976 | WikiHandler.view_or_add | train | def view_or_add(self, title):
'''
To judge if there is a post of the title.
Then, to show, or to add.
'''
postinfo = MWiki.get_by_wiki(title)
if postinfo:
if postinfo.kind == self.kind:
self.view(postinfo)
else:
retu... | python | {
"resource": ""
} |
q4977 | WikiHandler.update | train | def update(self, uid):
'''
Update the wiki.
'''
postinfo = MWiki.get_by_uid(uid)
if self.check_post_role()['EDIT'] or postinfo.user_name == self.get_current_user():
pass
else:
return False
post_data = self.get_post_data()
post_data[... | python | {
"resource": ""
} |
q4978 | WikiHandler.view | train | def view(self, view):
'''
View the wiki.
'''
kwd = {
'pager': '',
'editable': self.editable(),
}
self.render('wiki_page/wiki_view.html',
postinfo=view,
kwd=kwd,
userinfo=self.userinfo) | python | {
"resource": ""
} |
q4979 | FilterHandler.echo_html | train | def echo_html(self, url_str):
'''
Show the HTML
'''
logger.info('info echo html: {0}'.format(url_str))
condition = self.gen_redis_kw()
url_arr = self.parse_url(url_str)
sig = url_arr[0]
num = (len(url_arr) - 2) // 2
catinfo = MCategory.get_by_... | python | {
"resource": ""
} |
q4980 | gen_array_crud | train | def gen_array_crud():
'''
Return the dictionay of the switcher form XLXS file.
if valud of the column of the row is `1`, it will be added to the array.
'''
if WORK_BOOK:
pass
else:
return False
papa_id = 0
switch_dics = {}
kind_dics = {}
for work_sheet in WORK... | python | {
"resource": ""
} |
q4981 | __get_switch_arr | train | def __get_switch_arr(work_sheet, row_num):
'''
if valud of the column of the row is `1`, it will be added to the array.
'''
u_dic = []
for col_idx in FILTER_COLUMNS:
cell_val = work_sheet['{0}{1}'.format(col_idx, row_num)].value
if cell_val in [1, '1']:
# Appending the ... | python | {
"resource": ""
} |
q4982 | MUsage.add_or_update | train | def add_or_update(user_id, post_id, kind):
'''
Create the record if new, else update it.
'''
rec = MUsage.query_by_signature(user_id, post_id)
cate_rec = MInfor2Catalog.get_first_category(post_id)
if cate_rec:
cat_id = cate_rec.tag_id
else:
... | python | {
"resource": ""
} |
q4983 | run_send_all | train | def run_send_all(*args):
'''
Send email to all user.
'''
for user_rec in MUser.query_all():
email_add = user_rec.user_email
send_mail([email_add],
"{0}|{1}".format(SMTP_CFG['name'], email_cfg['title']),
email_cfg['content']) | python | {
"resource": ""
} |
q4984 | run_send_nologin | train | def run_send_nologin(*args):
'''
Send email to who not logged in recently.
'''
for user_rec in MUser.query_nologin():
email_add = user_rec.user_email
print(email_add)
send_mail([email_add],
"{0}|{1}".format(SMTP_CFG['name'], email_cfg['title']),
... | python | {
"resource": ""
} |
q4985 | gen_xlsx_category | train | def gen_xlsx_category():
'''
Genereting catetory from xlsx file.
'''
if os.path.exists(XLSX_FILE):
pass
else:
return
# 在分类中排序
order_index = 1
all_cate_arr = []
for sheet_ranges in load_workbook(filename=XLSX_FILE):
kind_sig = str(sheet_ranges['A1'].value).st... | python | {
"resource": ""
} |
q4986 | gen_category | train | def gen_category(yaml_file, sig):
'''
Genereting catetory from YAML file.
'''
out_dic = yaml.load(open(yaml_file))
for key in out_dic:
if key.endswith('00'):
uid = key[1:]
cur_dic = out_dic[key]
porder = cur_dic['order']
cat_dic = {
... | python | {
"resource": ""
} |
q4987 | gen_yaml_category | train | def gen_yaml_category():
'''
find YAML.
'''
for wroot, _, wfiles in os.walk('./database/meta'):
for wfile in wfiles:
if wfile.endswith('.yaml'):
gen_category(os.path.join(wroot, wfile), wfile[0]) | python | {
"resource": ""
} |
q4988 | get_cfg | train | def get_cfg():
'''
Get the configure value.
'''
cfg_var = dir(cfg)
if 'DB_CFG' in cfg_var:
db_cfg = cfg.DB_CFG
else:
db_cfg = ConfigDefault.DB_CFG
if 'SMTP_CFG' in cfg_var:
smtp_cfg = cfg.SMTP_CFG
else:
smtp_cfg = ConfigDefault.SMTP_CFG
if 'SITE_CF... | python | {
"resource": ""
} |
q4989 | PageHandler.view_or_add | train | def view_or_add(self, slug):
'''
When access with the slug, It will add the page if there is no record in database.
'''
rec_page = MWiki.get_by_uid(slug)
if rec_page:
if rec_page.kind == self.kind:
self.view(rec_page)
else:
... | python | {
"resource": ""
} |
q4990 | PageHandler.to_add | train | def to_add(self, citiao):
'''
To Add page.
'''
kwd = {
'cats': MCategory.query_all(),
'slug': citiao,
'pager': '',
}
self.render('wiki_page/page_add.html',
kwd=kwd,
userinfo=self.userinfo) | python | {
"resource": ""
} |
q4991 | PageHandler.__could_edit | train | def __could_edit(self, slug):
'''
Test if the user could edit the page.
'''
page_rec = MWiki.get_by_uid(slug)
if not page_rec:
return False
if self.check_post_role()['EDIT']:
return True
elif page_rec.user_name == self.userinfo.user_name:
... | python | {
"resource": ""
} |
q4992 | PageHandler.update | train | def update(self, slug):
'''
Update the page.
'''
post_data = self.get_post_data()
post_data['user_name'] = self.userinfo.user_name
pageinfo = MWiki.get_by_uid(slug)
cnt_old = tornado.escape.xhtml_unescape(pageinfo.cnt_md).strip()
cnt_new = post_data['c... | python | {
"resource": ""
} |
q4993 | PageHandler.to_modify | train | def to_modify(self, uid):
'''
Try to modify the page.
'''
kwd = {
'pager': '',
}
self.render('wiki_page/page_edit.html',
postinfo=MWiki.get_by_uid(uid),
kwd=kwd,
cfg=CMS_CFG,
use... | python | {
"resource": ""
} |
q4994 | PageHandler.view | train | def view(self, rec):
'''
View the page.
'''
kwd = {
'pager': '',
}
self.render('wiki_page/page_view.html',
postinfo=rec,
kwd=kwd,
author=rec.user_name,
format_date=tools.format_da... | python | {
"resource": ""
} |
q4995 | PageHandler.ajax_count_plus | train | def ajax_count_plus(self, slug):
'''
post count plus one via ajax.
'''
output = {
'status': 1 if MWiki.view_count_plus(slug) else 0,
}
return json.dump(output, self) | python | {
"resource": ""
} |
q4996 | PageHandler.add_page | train | def add_page(self, slug):
'''
Add new page.
'''
post_data = self.get_post_data()
post_data['user_name'] = self.userinfo.user_name
if MWiki.get_by_uid(slug):
self.set_status(400)
return False
else:
MWiki.create_page(slug, post... | python | {
"resource": ""
} |
q4997 | MPostHist.update_cnt | train | def update_cnt(uid, post_data):
'''
Update the content by ID.
'''
entry = TabPostHist.update(
user_name=post_data['user_name'],
cnt_md=tornado.escape.xhtml_escape(post_data['cnt_md']),
time_update=tools.timestamp(),
).where(TabPostHist.uid == u... | python | {
"resource": ""
} |
q4998 | MPostHist.query_by_postid | train | def query_by_postid(postid, limit=5):
'''
Query history of certian records.
'''
recs = TabPostHist.select().where(
TabPostHist.post_id == postid
).order_by(
TabPostHist.time_update.desc()
).limit(limit)
return recs | python | {
"resource": ""
} |
q4999 | MPostHist.get_last | train | def get_last(postid, limit=10):
'''
Get the last one of the records.
'''
recs = TabPostHist.select().where(
TabPostHist.post_id == postid
).order_by(TabPostHist.time_update.desc()).limit(limit)
if recs.count():
return recs.get()
return None | python | {
"resource": ""
} |
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.