Trait coaster_nn::Softmax

source ·
pub trait Softmax<F>: NN<F> {
    // Required methods
    fn softmax(
        &self,
        x: &SharedTensor<F>,
        result: &mut SharedTensor<F>
    ) -> Result<(), Error>;
    fn softmax_grad(
        &self,
        x: &SharedTensor<F>,
        x_diff: &SharedTensor<F>,
        result_diff: &mut SharedTensor<F>
    ) -> Result<(), Error>;
}
Expand description

Provides the functionality for a Backend to support Softmax operations.

Required Methods§

source

fn softmax( &self, x: &SharedTensor<F>, result: &mut SharedTensor<F> ) -> Result<(), Error>

Computes a [Softmax][softmax] over the input Tensor x. [softmax]: https://en.wikipedia.org/wiki/Softmax_function

Saves the result to result.

source

fn softmax_grad( &self, x: &SharedTensor<F>, x_diff: &SharedTensor<F>, result_diff: &mut SharedTensor<F> ) -> Result<(), Error>

Computes the gradient of a [Softmax][softmax] over the input Tensor x. [softmax]: https://en.wikipedia.org/wiki/Softmax_function

Saves the result to result_diff.

Object Safety§

This trait is not object safe.

Implementations on Foreign Types§

source§

impl Softmax<f32> for Backend<Native>

source§

fn softmax( &self, x: &SharedTensor<f32>, result: &mut SharedTensor<f32> ) -> Result<(), Error>

source§

fn softmax_grad( &self, x: &SharedTensor<f32>, x_diff: &SharedTensor<f32>, result_diff: &mut SharedTensor<f32> ) -> Result<(), Error>

source§

impl Softmax<f64> for Backend<Native>

source§

fn softmax( &self, x: &SharedTensor<f64>, result: &mut SharedTensor<f64> ) -> Result<(), Error>

source§

fn softmax_grad( &self, x: &SharedTensor<f64>, x_diff: &SharedTensor<f64>, result_diff: &mut SharedTensor<f64> ) -> Result<(), Error>

source§

impl<T> Softmax<T> for Backend<Cuda>
where T: Float + Default + DataTypeInfo,

source§

fn softmax( &self, x: &SharedTensor<T>, result: &mut SharedTensor<T> ) -> Result<(), Error>

source§

fn softmax_grad( &self, x: &SharedTensor<T>, x_diff: &SharedTensor<T>, result_diff: &mut SharedTensor<T> ) -> Result<(), Error>

Implementors§