desc
stringlengths
3
26.7k
decl
stringlengths
11
7.89k
bodies
stringlengths
8
553k
'Serializes or deserializes a value by given name. This operator saves or loads a value by given name. If this is a serializer, then the value is simply saved at the key. Note that some type information might be missed depending on the implementation (and the target file format). If this is a deserializer, then the val...
def __call__(self, key, value):
raise NotImplementedError
'Saves an object by this serializer. This is equivalent to ``obj.serialize(self)``. Args: obj: Target object to be serialized.'
def save(self, obj):
obj.serialize(self)
'Loads an object from this deserializer. This is equivalent to ``obj.serialize(self)``. Args: obj: Target object to be serialized.'
def load(self, obj):
obj.serialize(self)
'Transition in forword and backword algorithms is represented as matrix. See also https://blog.wtf.sg/2014/10/06/connectionist-temporal-classification-ctc-with-theano/'
def recurrence_relation(self, label, path_length, max_length, dtype, xp):
(batch, lab) = label.shape repeat_mask = xp.ones((batch, ((lab * 2) + 1))) repeat_mask[:, 1::2] = (label != xp.take(label, ((xp.arange((-1), (lab - 1)) % lab) + xp.arange(0, (batch * lab), lab)[:, None]))) repeat_mask[:, 1] = 1 rr = ((xp.eye(max_length, dtype=dtype)[None, :] + xp.eye(max_length, k=1...
'Callback function invoked before forward propagation. Args: function(~chainer.Function): Function object to which the function hook is registered. in_data(tuple of numpy.ndarray or tuple of cupy.ndarray): Input data of forward propagation.'
def forward_preprocess(self, function, in_data):
pass
'Callback function invoked after forward propagation. Args: function(~chainer.Function): Function object to which the function hook is registered. in_data(tuple of numpy.ndarray or tuple of cupy.ndarray): Input data of forward propagation.'
def forward_postprocess(self, function, in_data):
pass
'Callback function invoked before backward propagation. Args: function(~chainer.Function): Function object to which the function hook is registered. in_data(tuple of numpy.ndarray or tuple of cupy.ndarray): Input data of forward propagation. out_grad(tuple of numpy.ndarray or tuple of cupy.ndarray): Gradient data of ba...
def backward_preprocess(self, function, in_data, out_grad):
pass
'Callback function invoked after backward propagation. Args: function(~chainer.Function): Function object to which the function hook is registered. in_data(tuple of numpy.ndarray or tuple of cupy.ndarray): Input of forward propagation. out_grad(tuple of numpy.ndarray or tuple of cupy.ndarray): Gradient data of backward...
def backward_postprocess(self, function, in_data, out_grad):
pass
'show(file=sys.stdout) Prints the global config entries. The entries are sorted in the lexicographical order of the entry name. Args: file: Output file-like object.'
def show(self, file=sys.stdout):
keys = sorted(self.__dict__) _print_attrs(self, keys, file)
'show(file=sys.stdout) Prints the config entries. The entries are sorted in the lexicographical order of the entry names. Args: file: Output file-like object. .. admonition:: Example You can easily print the list of configurations used in the current thread. >>> chainer.config.show() # doctest: +SKIP debug F...
def show(self, file=sys.stdout):
keys = sorted((set(self._global.__dict__) | set(self._local.__dict__))) _print_attrs(self, keys, file)
'Makes this reporter object current.'
def __enter__(self):
_reporters.append(self)
'Recovers the previous reporter object to the current.'
def __exit__(self, exc_type, exc_value, traceback):
_reporters.pop()
'Creates a scope to report observed values to ``observation``. This is a context manager to be passed to ``with`` statements. In this scope, the observation dictionary is changed to the given one. It also makes this reporter object current. Args: observation (dict): Observation dictionary. All observations reported ins...
@contextlib.contextmanager def scope(self, observation):
old = self.observation self.observation = observation self.__enter__() (yield) self.__exit__(None, None, None) self.observation = old
'Registers an observer of values. Observer defines a scope of names for observed values. Values observed with the observer are registered with names prefixed by the observer name. Args: name (str): Name of the observer. observer: The observer object. Note that the reporter distinguishes the observers by their object id...
def add_observer(self, name, observer):
self._observer_names[id(observer)] = name
'Registers multiple observers at once. This is a convenient method to register multiple objects at once. Args: prefix (str): Prefix of each name of observers. observers: Iterator of name and observer pairs.'
def add_observers(self, prefix, observers):
for (name, observer) in observers: self._observer_names[id(observer)] = (prefix + name)
'Reports observed values. The values are written with the key, prefixed by the name of the observer object if given. .. note:: As of v2.0.0, if a value is of type :class:`~chainer.Variable`, the variable is copied without preserving the computational graph and the new variable object purged from the graph is stored to ...
def report(self, values, observer=None):
if (not configuration.config.keep_graph_on_report): values = {k: _copy_variable(v) for (k, v) in six.iteritems(values)} if (observer is not None): observer_id = id(observer) if (observer_id not in self._observer_names): raise KeyError('Given observer is not regist...
'Adds a scalar value. Args: value: Scalar value to accumulate. It is either a NumPy scalar or a zero-dimensional array (on CPU or GPU).'
def add(self, value):
with _get_device(value): self._x += value self._x2 += (value * value) self._n += 1
'Computes the mean.'
def compute_mean(self):
(x, n) = (self._x, self._n) with _get_device(x): return (x / n)
'Computes and returns the mean and standard deviation values. Returns: tuple: Mean and standard deviation values.'
def make_statistics(self):
(x, n) = (self._x, self._n) xp = cuda.get_array_module(x) with _get_device(x): mean = (x / n) var = ((self._x2 / n) - (mean * mean)) std = xp.sqrt(var) return (mean, std)
'Adds a dictionary of scalars. Args: d (dict): Dictionary of scalars to accumulate. Only elements of scalars, zero-dimensional arrays, and variables of zero-dimensional arrays are accumulated.'
def add(self, d):
summaries = self._summaries for (k, v) in six.iteritems(d): if isinstance(v, variable.Variable): v = v.data if (numpy.isscalar(v) or (getattr(v, 'ndim', (-1)) == 0)): summaries[k].add(v)
'Creates a dictionary of mean values. It returns a single dictionary that holds a mean value for each entry added to the summary. Returns: dict: Dictionary of mean values.'
def compute_mean(self):
return {name: summary.compute_mean() for (name, summary) in six.iteritems(self._summaries)}
'Creates a dictionary of statistics. It returns a single dictionary that holds mean and standard deviation values for every entry added to the summary. For an entry of name ``\'key\'``, these values are added to the dictionary by names ``\'key\'`` and ``\'key.std\'``, respectively. Returns: dict: Dictionary of statisti...
def make_statistics(self):
stats = {} for (name, summary) in six.iteritems(self._summaries): (mean, std) = summary.make_statistics() stats[name] = mean stats[(name + '.std')] = std return stats
'Ordered dictionary of registered function hooks. Contrary to ``chainer.thread_local.function_hooks``, which registers its elements to all functions, Function hooks in this property is specific to this function.'
@property def local_function_hooks(self):
if (self._local_function_hooks is None): self._local_function_hooks = collections.OrderedDict() return self._local_function_hooks
'Short text that represents the function. The default implementation returns its type name. Each function should override it to give more information.'
@property def label(self):
return self.__class__.__name__
'A tuple of the retained output arrays. This property is mainly used by :class:`Function`. Users basically do not have to use this property; use :meth:`get_retained_outputs` instead.'
@property def output_data(self):
if (self._retained_output_data is None): raise RuntimeError('retained output data is gone') out_data = ([None] * len(self.outputs)) for (index, data) in six.moves.zip(self._output_indexes_to_retain, self._retained_output_data): out_data[index] = data return tuple(out_data)
'Computes output variables and grows the computational graph. Basic behavior is expressed in the documentation of :class:`FunctionNode`. .. note:: If the :data:`~Variable.data` attribute of input variables exist on a GPU device, that device is made current before calling :meth:`forward`, so implementors do not need to ...
def apply(self, inputs):
input_vars = [(x if isinstance(x, variable.Variable) else variable.Variable(x, requires_grad=False)) for x in inputs] in_data = tuple([x.data for x in input_vars]) requires_grad = any([x.requires_grad for x in input_vars]) if chainer.is_debug(): self.stack = traceback.extract_stack() if conf...
'Checks types of input data before forward propagation. This method is called before :meth:`forward` and validates the types of input variables using :ref:`the type checking utilities <type-check-utils>`. Args: in_types (~chainer.utils.type_check.TypeInfoTuple): The type information of input variables for :meth:`forwar...
def check_type_forward(self, in_types):
pass
'Computes the output arrays from the input arrays. It delegates the procedure to :meth:`forward_cpu` or :meth:`forward_gpu` by default. Which of them this method selects is determined by the type of input arrays. Implementations of :class:`FunctionNode` must implement either CPU/GPU methods or this method. Args: inputs...
def forward(self, inputs):
assert (len(inputs) > 0) if isinstance(inputs[0], cuda.ndarray): return self.forward_gpu(inputs) return self.forward_cpu(inputs)
'Computes the output arrays from the input NumPy arrays. Args: inputs: Tuple of input :class:`numpy.ndarray` objects. Returns: Tuple of output arrays. Each element can be NumPy or CuPy arrays. .. warning:: Implementation of :class:`FunctionNode` must take care that the return value must be a tuple even if it returns on...
def forward_cpu(self, inputs):
raise NotImplementedError
'Computes the output arrays from the input CuPy arrays. Args: inputs: Tuple of input :class:`cupy.ndarray` objects. Returns: Tuple of output arrays. Each element can be NumPy or CuPy arrays. .. warning:: Implementation of :class:`FunctionNode` must take care that the return value must be a tuple even if it returns only...
def forward_gpu(self, inputs):
raise NotImplementedError
'Lets specified input variable nodes keep data arrays. By calling this method from :meth:`forward`, the function node can specify which inputs are required for backprop. The input variables with retained arrays can be obtained by :meth:`get_retained_inputs` from :meth:`backward`. Unlike :class:`Function`, the function ...
def retain_inputs(self, indexes):
self._input_indexes_to_retain = indexes
'Lets specified output variable nodes keep data arrays. By calling this method from :meth:`forward`, the function node can specify which outputs are required for backprop. If this method is not called, any output variables are not marked to keep the data array at the point of returning from :meth:`apply`. The output va...
def retain_outputs(self, indexes):
self._output_indexes_to_retain = indexes
'Computes gradients w.r.t. specified inputs given output gradients. This method is used to compute one step of the backpropagation corresponding to the forward computation of this function node. Given the gradients w.r.t. output variables, this method computes the gradients w.r.t. specified input variables. Note that t...
def backward(self, target_input_indexes, grad_outputs):
return ((None,) * len(target_input_indexes))
'Computes gradients w.r.t. specified inputs and accumulates them. This method provides a way to fuse the backward computation and the gradient accumulations in the case that the multiple functions are applied to the same variable. Users have to override either of this method or :meth:`backward`. It is often simpler to ...
def backward_accumulate(self, target_input_indexes, grad_outputs, grad_inputs):
gxs = self.backward(target_input_indexes, grad_outputs) len_gxs = len(gxs) if (len_gxs == len(self.inputs)): gxs = tuple([gxs[i] for i in target_input_indexes]) elif (len_gxs != len(target_input_indexes)): raise ValueError(('number of gradients returned by %s (%s) is...
'Returns a tuple of retained input variables. This method is used to retrieve the input variables retained in :meth:`forward`. Returns: A tuple of retained input variables.'
def get_retained_inputs(self):
inputs = self.inputs return tuple([inputs[index].get_variable() for index in self._input_indexes_to_retain])
'Returns a tuple of retained output variables. This method is used to retrieve the output variables retained in :meth:`forward`. Returns: A tuple of retained output variables. .. note:: This method does a tricky thing to support the case of an output node garbage-collected before this method is called; in this case, th...
def get_retained_outputs(self):
ret = [] outputs = self.outputs new_outputs = list(outputs) outputs_modified = False for (index, data) in six.moves.zip(self._output_indexes_to_retain, self._retained_output_data): output = outputs[index]() if (output is None): output_var = variable.Variable(data) ...
'Purges in/out nodes and this function node itself from the graph.'
def unchain(self):
for y in self.outputs: y_ref = y() if (y_ref is not None): y_ref.unchain() self.inputs = None
'Registers a function hook. Args: hook (~chainer.function.FunctionHook): Function hook to be registered. name (str): Name of the function hook. The name must be unique among function hooks registered to this function. If ``None``, the default name of the function hook is used.'
def add_hook(self, hook, name=None):
if (not isinstance(hook, function_hook.FunctionHook)): raise TypeError('Hook must be of type FunctionHook') if (name is None): name = hook.name hooks = self.local_function_hooks if (name in hooks): raise KeyError(('Hook %s already exists' % name)) hook...
'Unregisters the function hook. Args: name (str): The name of the function hook to be unregistered.'
def delete_hook(self, name):
del self.local_function_hooks[name]
'Computes the loss value for given input and ground truth labels. Args: x (~chainer.Variable): Input of the weight matrix multiplication. t (~chainer.Variable): Batch of ground truth labels. reduce (str): Reduction option. Its value must be either ``\'sum\'`` or ``\'no\'``. Otherwise, :class:`ValueError` is raised. Ret...
def __call__(self, x, t, reduce='sum'):
return negative_sampling.negative_sampling(x, t, self.W, self.sampler.sample, self.sample_size, reduce=reduce)
'Makes a Huffman tree from a dictionary containing word counts. This method creates a binary Huffman tree, that is required for :class:`BinaryHierarchicalSoftmax`. For example, ``{0: 8, 1: 5, 2: 6, 3: 4}`` is converted to ``((3, 1), (2, 0))``. Args: word_counts (dict of int key and int or float values): Dictionary repr...
@staticmethod def create_huffman_tree(word_counts):
if (len(word_counts) == 0): raise ValueError('Empty vocabulary') q = six.moves.queue.PriorityQueue() for (uid, (w, c)) in enumerate(six.iteritems(word_counts)): q.put((c, uid, w)) while (q.qsize() >= 2): (count1, id1, word1) = q.get() (count2, id2, word2) = q.get() ...
'Computes the loss value for given input and ground truth labels. Args: x (~chainer.Variable): Input to the classifier at each node. t (~chainer.Variable): Batch of ground truth labels. Returns: ~chainer.Variable: Loss value.'
def __call__(self, x, t):
f = copy.copy(self._func) return f(x, t, self.W)
'Computes a state that maximizes a joint probability. Args: xs (list of Variable): Input vector for each label. Returns: tuple: A tuple of :class:`~chainer.Variable` representing each log-likelihood and a list representing the argmax path. .. seealso:: See :func:`~chainer.frunctions.crf1d_argmax` for more detail.'
def argmax(self, xs):
return crf1d.argmax_crf1d(self.cost, xs)
'Computes the loss value for given input and ground truth labels. Args: x (~chainer.Variable): Input of the weight matrix multiplication. t (~chainer.Variable): Batch of ground truth labels. Returns: ~chainer.Variable: Loss value.'
def __call__(self, x, t):
batch_size = x.shape[0] if hasattr(self, 'sample_data'): sample_data = self.sample_data else: shape = (batch_size, self.sample_size) sample_data = self.sampler.sample(shape) samples = variable.Variable(sample_data) return black_out.black_out(x, t, self.W, samples)
'__call__(self, inputs, outputs, disable=()) Executes a sub-network of the network. This function acts as an interpreter of the network definition for Caffe. On execution, it interprets each layer one by one, and if the bottom blobs are already computed, then emulates the layer and stores output blobs as :class:`~chain...
def __call__(self, inputs, outputs, disable=(), **kwargs):
argument.check_unexpected_kwargs(kwargs, train='train argument is not supported anymore. Use chainer.using_config') argument.assert_kwargs_empty(kwargs) variables = dict(inputs) for (func_name, bottom, top) in self.layers: if ((func_name in disable) or (func_name not in self...
'Applies the simplified dropconnect layer. Args: x (chainer.Variable or :class:`numpy.ndarray` or cupy.ndarray): Batch of input vectors. Its first dimension ``n`` is assumed to be the *minibatch dimension*. train (bool): If ``True``, executes simplified dropconnect. Otherwise, simplified dropconnect link works as a lin...
def __call__(self, x, train=True, mask=None, use_batchwise_mask=True):
if (self.W.data is None): self._initialize_params((x.size // len(x.data))) if ((mask is not None) and ('mask' not in self.__dict__)): self.add_persistent('mask', mask) return simplified_dropconnect.simplified_dropconnect(x, self.W, self.b, self.ratio, train, mask, use_batchwise_mask)
'Applies the parametric ReLU activation function. Args: x (~chainer.Variable): Input variable. Returns: ~chainer.Variable: Output of the parametric ReLU function.'
def __call__(self, x):
return prelu.prelu(x, self.W)
'Applies the maxout layer. Args: x (~chainer.Variable): Batch of input vectors. Returns: ~chainer.Variable: Output of the maxout layer.'
def __call__(self, x):
y = self.linear(x) return maxout.maxout(y, self.pool_size)
'Converts a pre-trained caffemodel to a chainer model. Args: path_caffemodel (str): Path of the pre-trained caffemodel. path_npz (str): Path of the converted chainer model.'
@classmethod def convert_caffemodel_to_npz(cls, path_caffemodel, path_npz, n_layers=50):
from chainer.links.caffe.caffe_function import CaffeFunction caffemodel = CaffeFunction(path_caffemodel) chainermodel = cls(pretrained_model=None, n_layers=n_layers) if (n_layers == 50): _transfer_resnet50(caffemodel, chainermodel) elif (n_layers == 101): _transfer_resnet101(caffemod...
'__call__(self, x, layers=[\'prob\']) Computes all the feature maps specified by ``layers``. .. warning:: ``test`` argument is not supported anymore since v2. Instead, use ``chainer.using_config(\'train\', train)``. See :func:`chainer.using_config`. Args: x (~chainer.Variable): Input variable. layers (list of str): The...
def __call__(self, x, layers=['prob'], **kwargs):
argument.check_unexpected_kwargs(kwargs, test='test argument is not supported anymore. Use chainer.using_config') argument.assert_kwargs_empty(kwargs) h = x activations = {} target_layers = set(layers) for (key, funcs) in self.functions.items(): if (len(target_layers...
'extract(self, images, layers=[\'pool5\'], size=(224, 224)) Extracts all the feature maps of given images. The difference of directly executing ``__call__`` is that it directly accepts images as an input and automatically transforms them to a proper variable. That is, it is also interpreted as a shortcut method that im...
def extract(self, images, layers=['pool5'], size=(224, 224), **kwargs):
argument.check_unexpected_kwargs(kwargs, test='test argument is not supported anymore. Use chainer.using_config', volatile='volatile argument is not supported anymore. Use chainer.using_config') argument.assert_kwargs_empty(kwargs) x = concat_examples([prepare(img, ...
'Computes all the probabilities of given images. Args: images (iterable of PIL.Image or numpy.ndarray): Input images. oversample (bool): If ``True``, it averages results across center, corners, and mirrors. Otherwise, it uses only the center. Returns: ~chainer.Variable: Output that contains the class probabilities of g...
def predict(self, images, oversample=True):
x = concat_examples([prepare(img, size=(256, 256)) for img in images]) if oversample: x = imgproc.oversample(x, crop_dims=(224, 224)) else: x = x[:, :, 16:240, 16:240] with function.no_backprop_mode(): x = Variable(self.xp.asarray(x)) y = self(x, layers=['prob'])['prob'] ...
'Converts a pre-trained caffemodel to a chainer model. Args: path_caffemodel (str): Path of the pre-trained caffemodel. path_npz (str): Path of the converted chainer model.'
@classmethod def convert_caffemodel_to_npz(cls, path_caffemodel, path_npz):
from chainer.links.caffe.caffe_function import CaffeFunction caffemodel = CaffeFunction(path_caffemodel) npz.save_npz(path_npz, caffemodel, compression=False)
'__call__(self, x, layers=[\'prob\']) Computes all the feature maps specified by ``layers``. .. warning:: ``test`` argument is not supported anymore since v2. Instead, use ``chainer.using_config(\'train\', train)``. See :func:`chainer.using_config`. Args: x (~chainer.Variable): Input variable. layers (list of str): The...
def __call__(self, x, layers=['prob'], **kwargs):
argument.check_unexpected_kwargs(kwargs, test='test argument is not supported anymore. Use chainer.using_config') argument.assert_kwargs_empty(kwargs) h = x activations = {} target_layers = set(layers) for (key, funcs) in self.functions.items(): if (len(target_layers...
'extract(self, images, layers=[\'fc7\'], size=(224, 224)) Extracts all the feature maps of given images. The difference of directly executing ``__call__`` is that it directly accepts images as an input and automatically transforms them to a proper variable. That is, it is also interpreted as a shortcut method that impl...
def extract(self, images, layers=['fc7'], size=(224, 224), **kwargs):
argument.check_unexpected_kwargs(kwargs, test='test argument is not supported anymore. Use chainer.using_config', volatile='volatile argument is not supported anymore. Use chainer.using_config') argument.assert_kwargs_empty(kwargs) x = concat_examples([prepare(img, ...
'Computes all the probabilities of given images. Args: images (iterable of PIL.Image or numpy.ndarray): Input images. oversample (bool): If ``True``, it averages results across center, corners, and mirrors. Otherwise, it uses only the center. Returns: ~chainer.Variable: Output that contains the class probabilities of g...
def predict(self, images, oversample=True):
x = concat_examples([prepare(img, size=(256, 256)) for img in images]) if oversample: x = imgproc.oversample(x, crop_dims=(224, 224)) else: x = x[:, :, 16:240, 16:240] with function.no_backprop_mode(): x = Variable(self.xp.asarray(x)) y = self(x, layers=['prob'])['prob'] ...
'Converts a pre-trained caffemodel to a chainer model. Args: path_caffemodel (str): Path of the pre-trained caffemodel. path_npz (str): Path of the converted chainer model.'
@classmethod def convert_caffemodel_to_npz(cls, path_caffemodel, path_npz):
from chainer.links.caffe.caffe_function import CaffeFunction caffemodel = CaffeFunction(path_caffemodel) chainermodel = cls(pretrained_model=None) _transfer_googlenet(caffemodel, chainermodel) npz.save_npz(path_npz, chainermodel, compression=False)
'__call__(self, x, layers=[\'prob\']) Computes all the feature maps specified by ``layers``. .. warning:: ``train`` argument is not supported anymore since v2. Instead, use ``chainer.using_config(\'train\', train)``. See :func:`chainer.using_config`. Args: x (~chainer.Variable): Input variable. It should be prepared by...
def __call__(self, x, layers=['prob'], **kwargs):
argument.check_unexpected_kwargs(kwargs, train='train argument is not supported anymore. Use chainer.using_config') argument.assert_kwargs_empty(kwargs) h = x activations = {} inception_4a_cache = None inception_4d_cache = None target_layers = set(layers) for (key, f...
'extract(self, images, layers=[\'pool5\'], size=(224, 224)) Extracts all the feature maps of given images. The difference of directly executing ``__call__`` is that it directly accepts images as an input and automatically transforms them to a proper variable. That is, it is also interpreted as a shortcut method that im...
def extract(self, images, layers=['pool5'], size=(224, 224), **kwargs):
argument.check_unexpected_kwargs(kwargs, train='train argument is not supported anymore. Use chainer.using_config', volatile='volatile argument is not supported anymore. Use chainer.using_config') argument.assert_kwargs_empty(kwargs) x = concat_examples([prepare(img...
'Computes all the probabilities of given images. Args: images (iterable of PIL.Image or numpy.ndarray): Input images. oversample (bool): If ``True``, it averages results across center, corners, and mirrors. Otherwise, it uses only the center. Returns: ~chainer.Variable: Output that contains the class probabilities of g...
def predict(self, images, oversample=True):
x = concat_examples([prepare(img, size=(256, 256)) for img in images]) if oversample: x = imgproc.oversample(x, crop_dims=(224, 224)) else: x = x[:, :, 16:240, 16:240] with function.no_backprop_mode(): x = Variable(self.xp.asarray(x)) y = self(x, layers=['prob'])['prob'] ...
'Computes the loss value for an input and label pair. It also computes accuracy and stores it to the attribute. Args: args (list of ~chainer.Variable): Input minibatch. kwargs (dict of ~chainer.Variable): Input minibatch. When ``label_key`` is ``int``, the correpoding element in ``args`` is treated as ground truth labe...
def __call__(self, *args, **kwargs):
if isinstance(self.label_key, int): if (not ((- len(args)) <= self.label_key < len(args))): msg = ('Label key %d is out of bounds' % self.label_key) raise ValueError(msg) t = args[self.label_key] if (self.label_key == (-1)): args = args[:...
'Apply layer normalization to given input. Args: x (~chainer.Variable): Batch vectors. Shape of this value must be `(batch_size, unit_size)`, e.g., the output of :func:`~chainer.functions.linear`. Returns: ~chainer.Variable: Output of the layer normalization.'
def __call__(self, x):
if (self.gamma.data is None): self._initialize_params((x.size // x.shape[0])) return layer_normalization.layer_normalization(x, self.gamma, self.beta, self.eps)
'__call__(self, x, finetune=False) Invokes the forward propagation of BatchNormalization. In training mode, the BatchNormalization computes moving averages of mean and variance for evaluatino during training, and normalizes the input using batch statistics. .. warning:: ``test`` argument is not supported anymore since ...
def __call__(self, x, **kwargs):
argument.check_unexpected_kwargs(kwargs, test='test argument is not supported anymore. Use chainer.using_config') (finetune,) = argument.parse_kwargs(kwargs, ('finetune', False)) if hasattr(self, 'gamma'): gamma = self.gamma else: with cuda.get_device_from_id(self._d...
'Resets the population count for collecting population statistics. This method can be skipped if it is the first time to use the fine-tuning mode. Otherwise, this method should be called before starting the fine-tuning mode again.'
def start_finetuning(self):
self.N = 0
'Returns the parameter variable. Args: volatile (~chainer.Flag): The volatility of the returned variable. Returns: ~chainer.Variable: A copy of the parameter variable with given volatility.'
def __call__(self, volatile='off'):
W = identity.identity(self.W) W.volatile = volatile return identity.identity(W)
'Returns new cell state and updated output of LSTM. Args: c (~chainer.Variable): Cell states of LSTM units. h (~chainer.Variable): Output at the previous time step. x (~chainer.Variable): A new batch from the input sequence. Returns: tuple of ~chainer.Variable: Returns ``(c_new, h_new)``, where ``c_new`` represents new...
def __call__(self, c, h, x):
if (self.upward.W.data is None): in_size = (x.size // x.shape[0]) with cuda.get_device_from_id(self._device_id): self.upward._initialize_params(in_size) self._initialize_params() lstm_in = self.upward(x) if (h is not None): lstm_in += self.lateral(h) if (c...
'Sets the internal state. It sets the :attr:`c` and :attr:`h` attributes. Args: c (~chainer.Variable): A new cell states of LSTM units. h (~chainer.Variable): A new output at the previous time step.'
def set_state(self, c, h):
assert isinstance(c, variable.Variable) assert isinstance(h, variable.Variable) c_ = c h_ = h if (self.xp == numpy): c_.to_cpu() h_.to_cpu() else: c_.to_gpu(self._device_id) h_.to_gpu(self._device_id) self.c = c_ self.h = h_
'Resets the internal state. It sets ``None`` to the :attr:`c` and :attr:`h` attributes.'
def reset_state(self):
self.c = self.h = None
'Updates the internal state and returns the LSTM outputs. Args: x (~chainer.Variable): A new batch from the input sequence. Returns: ~chainer.Variable: Outputs of updated LSTM units.'
def __call__(self, x):
if (self.upward.W.data is None): with cuda.get_device_from_id(self._device_id): in_size = (x.size // x.shape[0]) self.upward._initialize_params(in_size) self._initialize_params() batch = x.shape[0] lstm_in = self.upward(x) h_rest = None if (self.h is not N...
'Applies the convolution layer. Args: x (~chainer.Variable): Input image. Returns: ~chainer.Variable: Output of the convolution.'
def __call__(self, x):
if (self.W.data is None): self._initialize_params(x.shape[1]) return dilated_convolution_2d.dilated_convolution_2d(x, self.W, self.b, self.stride, self.pad, self.dilate)
'Applies the linear layer. Args: x (~chainer.Variable): Batch of input vectors. Returns: ~chainer.Variable: Output of the linear layer.'
def __call__(self, x):
if (self.W.data is None): self._initialize_params((x.size // x.shape[0])) return linear.linear(x, self.W, self.b)
'Applies broadcasted elementwise product. Args: xs (list of Variables): Input variables whose length should be one if the link has a learnable weight parameter, otherwise should be two.'
def __call__(self, *xs):
axis = self.axis if hasattr(self, 'W'): if chainer.is_debug(): assert (len(xs) == 1) (x,) = xs W = self.W z = scale.scale(x, W, axis) else: if chainer.is_debug(): assert (len(xs) == 2) (x, y) = xs z = scale.scale(x, y, axis) ...
'__call__(self, x) Does forward propagation.'
def __call__(self, *args):
n_args = len(args) msg = ('Invalid argument. The length of GRU.__call__ must be 1. But %d is given. ' % n_args) if ((n_args == 0) or (n_args >= 3)): raise ValueError(msg) elif (n_args == 2): msg += 'In Chainer v2, chainer.links.GRU is ...
'Applies broadcasted elementwise summation. Args: xs (list of Variables): Input variables whose length should be one if the link has a learnable bias parameter, otherwise should be two.'
def __call__(self, *xs):
axis = self.axis if hasattr(self, 'b'): if chainer.is_debug(): assert (len(xs) == 1) (x,) = xs b = self.b return bias.bias(x, b, axis) else: if chainer.is_debug(): assert (len(xs) == 2) (x, y) = xs return bias.bias(x, y, axis)
'__call__(self, hx, cx, xs) Calculate all hidden states and cell states. .. warning:: ``train`` argument is not supported anymore since v2. Instead, use ``chainer.using_config(\'train\', train)``. See :func:`chainer.using_config`. Args: hx (~chainer.Variable or None): Initial hidden states. If ``None`` is specified zer...
def __call__(self, hx, cx, xs, **kwargs):
argument.check_unexpected_kwargs(kwargs, train='train argument is not supported anymore. Use chainer.using_config') argument.assert_kwargs_empty(kwargs) assert isinstance(xs, (list, tuple)) indices = n_step_rnn.argsort_list_descent(xs) xs = n_step_rnn.permutate_list(xs, indices,...
'Extracts the word embedding of given IDs. Args: x (~chainer.Variable): Batch vectors of IDs. Returns: ~chainer.Variable: Batch of corresponding embeddings.'
def __call__(self, x):
return embed_id.embed_id(x, self.W, ignore_label=self.ignore_label)
'Returns new cell state and output of Child-Sum TreeLSTM. Args: cshsx (list of :class:`~chainer.Variable`): Variable arguments which include all cell vectors and all output vectors of variable children, and an input vector. Returns: tuple of ~chainer.Variable: Returns :math:`(c_{new}, h_{new})`, where :math:`c_{new}` r...
def __call__(self, *cshsx):
cs = cshsx[:(len(cshsx) // 2)] hs = cshsx[(len(cshsx) // 2):(-1)] x = cshsx[(-1)] assert (len(cs) >= 1) assert (len(hs) >= 1) assert (len(cs) == len(hs)) if (x is None): if any(((c is not None) for c in cs)): base = [c for c in cs if (c is not None)][0] elif any((...
'Returns new cell state and output of N-ary TreeLSTM. Args: cshsx (list of :class:`~chainer.Variable`): Arguments which include all cell vectors and all output vectors of fixed-length children, and an input vector. The number of arguments must be same as ``n_ary * 2 + 1``. Returns: tuple of ~chainer.Variable: Returns :...
def __call__(self, *cshsx):
assert (len(cshsx) == ((self.n_ary * 2) + 1)) cs = cshsx[:self.n_ary] hs = cshsx[self.n_ary:(-1)] x = cshsx[(-1)] if (x is None): if any(((c is not None) for c in cs)): base = [c for c in cs if (c is not None)][0] elif any(((h is not None) for h in hs)): base ...
'Applies the depthwise convolution layer. Args: x (chainer.Variable or :class:`numpy.ndarray` or cupy.ndarray): Input image. Returns: ~chainer.Variable: Output of the depthwise convolution.'
def __call__(self, x):
if (self.W.data is None): self._initialize_params(x.shape[1]) return depthwise_convolution_2d.depthwise_convolution_2d(x, self.W, self.b, self.stride, self.pad)
'Computes the output of the mlpconv layer. Args: x (~chainer.Variable): Input image. Returns: ~chainer.Variable: Output of the mlpconv layer.'
def __call__(self, x):
f = self.activation for l in self[:(-1)]: x = f(l(x)) return self[(-1)](x)
'Applies N-dimensional convolution layer. Args: x (~chainer.Variable): Input image. Returns: ~chainer.Variable: Output of convolution.'
def __call__(self, x):
return convolution_nd.convolution_nd(x, self.W, self.b, self.stride, self.pad, cover_all=self.cover_all)
'Applies the convolution layer. Args: x (~chainer.Variable): Input image. Returns: ~chainer.Variable: Output of the convolution.'
def __call__(self, x):
if (self.W.data is None): self._initialize_params(x.shape[1]) return convolution_2d.convolution_2d(x, self.W, self.b, self.stride, self.pad)
'__call__(self, hx, xs) Calculate all hidden states and cell states. .. warning:: ``train`` argument is not supported anymore since v2. Instead, use ``chainer.using_config(\'train\', train)``. See :func:`chainer.using_config`. Args: hx (~chainer.Variable or None): Initial hidden states. If ``None`` is specified zero-ve...
def __call__(self, hx, xs, **kwargs):
argument.check_unexpected_kwargs(kwargs, train='train argument is not supported anymore. Use chainer.using_config') argument.assert_kwargs_empty(kwargs) assert isinstance(xs, (list, tuple)) indices = argsort_list_descent(xs) xs = permutate_list(xs, indices, inv=False) if (hx...
'__call__(self, hx, xs) Calculate all hidden states and cell states. .. warning:: ``train`` argument is not supported anymore since v2. Instead, use ``chainer.using_config(\'train\', train)``. See :func:`chainer.using_config`. Args: hx (~chainer.Variable or None): Initial hidden states. If ``None`` is specified zero-ve...
def __call__(self, hx, xs, **kwargs):
argument.check_unexpected_kwargs(kwargs, train='train argument is not supported anymore. Use chainer.using_config') argument.assert_kwargs_empty(kwargs) assert isinstance(xs, (list, tuple)) indices = argsort_list_descent(xs) xs = permutate_list(xs, indices, inv=False) if (hx...
'Applies the bilinear function to inputs and the internal parameters. Args: e1 (~chainer.Variable): Left input. e2 (~chainer.Variable): Right input. Returns: ~chainer.Variable: Output variable.'
def __call__(self, e1, e2):
if self.nobias: return bilinear.bilinear(e1, e2, self.W) else: return bilinear.bilinear(e1, e2, self.W, self.V1, self.V2, self.b)
'Resets the internal states. It sets ``None`` to the :attr:`c` and :attr:`h` attributes.'
def reset_state(self):
self.c = self.h = None
'Updates the internal state and returns the LSTM outputs. Args: x (~chainer.Variable): A new batch from the input sequence. Returns: ~chainer.Variable: Outputs of updated LSTM units.'
def __call__(self, x):
lstm_in = self.upward(x) if (self.h is not None): lstm_in += self.lateral(self.h) if (self.c is None): xp = self.xp with cuda.get_device_from_id(self._device_id): self.c = variable.Variable(xp.zeros((x.shape[0], self.state_size), dtype=x.dtype)) lstm_in = reshape.resh...
'Computes the output of the Highway module. Args: x (~chainer.Variable): Input variable. Returns: Variable: Output variable. Its array has the same spatial size and the same minibatch size as the input array.'
def __call__(self, x):
out_plain = self.activate(self.plain(x)) out_transform = sigmoid.sigmoid(self.transform(x)) y = ((out_plain * out_transform) + (x * (1 - out_transform))) return y
'Sets the internal state. It sets the :attr:`c` and :attr:`h` attributes. Args: c (~chainer.Variable): A new cell states of LSTM units. h (~chainer.Variable): A new output at the previous time step.'
def set_state(self, c, h):
assert isinstance(c, variable.Variable) assert isinstance(h, variable.Variable) c_ = c h_ = h if (self.xp is numpy): c_.to_cpu() h_.to_cpu() else: c_.to_gpu(self._device_id) h_.to_gpu(self._device_id) self.c = c_ self.h = h_
'Resets the internal state. It sets ``None`` to the :attr:`c` and :attr:`h` attributes.'
def reset_state(self):
self.c = self.h = None
'Updates the internal state and returns the LSTM outputs. Args: x (~chainer.Variable): A new batch from the input sequence. Returns: ~chainer.Variable: Outputs of updated LSTM units.'
def __call__(self, x):
lstm_in = self.upward(x) if (self.h is not None): lstm_in += self.lateral(self.h) else: xp = self.xp with cuda.get_device_from_id(self._device_id): self.h = variable.Variable(xp.zeros((len(x.data), self.state_size), dtype=x.data.dtype)) if (self.c is None): xp...
'Computes the output of the Inception module. Args: x (~chainer.Variable): Input variable. Returns: Variable: Output variable. Its array has the same spatial size and the same minibatch size as the input array. The channel dimension has size ``out1 + out3 + out5 + proj_pool``.'
def __call__(self, x):
out1 = self.conv1(x) out3 = self.conv3(relu.relu(self.proj3(x))) out5 = self.conv5(relu.relu(self.proj5(x))) pool = self.projp(max_pooling_2d.max_pooling_2d(x, 3, stride=1, pad=1)) y = relu.relu(concat.concat((out1, out3, out5, pool), axis=1)) return y
'Function object that created this variable node. When the function is implemented with the old-style API (i.e., it uses :class:`Function` class), this property returns the :class:`Function` object. The object is extracted from the :class:`FunctionAdapter` object, so the returned object is not the function node, but in...
@property def creator(self):
node = self._creator_node if (node is None): return None if isinstance(node, chainer.function.FunctionAdapter): return node.function return node
'Function node that has this variable as an output. See :class:`FunctionNode` for the definition of a function node.'
@property def creator_node(self):
return self._creator_node
'Data array of the corresponding variable. If the data is not available, it returns ``None``.'
@property def data(self):
return self._data
'Gradient array of the corresponding variable. If the variable is not available, it returns ``None``.'
@property def grad(self):
var = self.get_variable() return (None if (var is None) else var.grad)
'Gradient variable of the corresponding variable. If the corresponding variable is not available, it return ``None``.'
@property def grad_var(self):
var = self.get_variable() return (None if (var is None) else var._grad_var)
'Short text that represents the variable node.'
@property def label(self):
if (self.shape == ()): return str(self.dtype) return ('(%s), %s' % (', '.join(map(str, self.shape)), str(self.dtype)))
'It indicates that ``grad`` will be set in backward calculation.'
@property def requires_grad(self):
return self._requires_grad
'Returns the corresponding :class:`Variable` object. VariableNode object holds a weak reference of the variable object. If the reference is alive, it is returned by this property. Otherwise, this property creates a new :class:`Variable` object from this node object and returns it. Returns: Variable: The variable object...
def get_variable(self):
var = self._variable() if (var is not None): return var var = Variable(self.data, name=self.name, requires_grad=self._requires_grad) var._node = self return var