pub trait LogSumExpTo: HasErr + HasShape {
    // Required method
    fn try_logsumexp<Dst: Shape, Ax: Axes>(
        self
    ) -> Result<Self::WithShape<Dst>, Self::Err>
       where Self::Shape: ReduceShapeTo<Dst, Ax>;

    // Provided method
    fn logsumexp<Dst: Shape, Ax: Axes>(self) -> Self::WithShape<Dst>
       where Self::Shape: ReduceShapeTo<Dst, Ax> { ... }
}
Expand description

Reduction along multiple axes using LogSumExp.

Required Methods§

source

fn try_logsumexp<Dst: Shape, Ax: Axes>( self ) -> Result<Self::WithShape<Dst>, Self::Err>where Self::Shape: ReduceShapeTo<Dst, Ax>,

Fallible version of LogSumExpTo::logsumexp

Provided Methods§

source

fn logsumexp<Dst: Shape, Ax: Axes>(self) -> Self::WithShape<Dst>where Self::Shape: ReduceShapeTo<Dst, Ax>,

LogSumExp reduction.

Pytorch equivalent: t.exp().sum(Axes).log()

Related functions: ln(), exp(), log_softmax(), softmax()

Example:

let t: Tensor<Rank3<2, 4, 6>, f32, _> = dev.zeros();
let _ = t.logsumexp::<Rank2<2, 4>, _>(); // or `logsumexp::<_, Axis<2>>()`

Multi axis logsumexp:

let _ = t.logsumexp::<Rank1<4>, _>(); // or `logsumexp::<_, Axes2<0, 2>>()`

Implementors§

source§

impl<S: Shape, E: Dtype, D: Device<E>, T: Tape<E, D>> LogSumExpTo for Tensor<S, E, D, T>