pub trait BroadcastTo: HasErr + HasShape {
    // Required method
    fn try_broadcast_like<Dst: HasShape, Ax: Axes>(
        self,
        dst: &Dst
    ) -> Result<Self::WithShape<Dst::Shape>, Self::Err>
       where Self::Shape: BroadcastShapeTo<Dst::Shape, Ax>;

    // Provided methods
    fn broadcast<Dst: ConstShape, Ax: Axes>(self) -> Self::WithShape<Dst>
       where Self::Shape: BroadcastShapeTo<Dst, Ax> { ... }
    fn try_broadcast<Dst: ConstShape, Ax: Axes>(
        self
    ) -> Result<Self::WithShape<Dst>, Self::Err>
       where Self::Shape: BroadcastShapeTo<Dst, Ax> { ... }
    fn broadcast_like<Dst: HasShape, Ax: Axes>(
        self,
        dst: &Dst
    ) -> Self::WithShape<Dst::Shape>
       where Self::Shape: BroadcastShapeTo<Dst::Shape, Ax> { ... }
}
Expand description

Broadcast self into a new shape.

pytorch equivalent torch.broadcast_to.

Use shape generic or output type to dictate what shape you want:

let a: Tensor<Rank2<3, 7>, f32, _> = dev.zeros();
// broadcast axis 1
let _: Tensor<Rank3<3, 5, 7>, _, _> = a.clone().broadcast();
// broadcast axis 0 and axis 2
let _ = a.clone().broadcast::<Rank4<1, 3, 5, 7>, _>();

Use axes generic to dis-ambiguate:

let a: Tensor<Rank1<1>, f32, _> = dev.zeros();
// It's ambiguous what axes to broadcast here - explicitly say axes 0 and 2
let _: Tensor<Rank3<1, 1, 1>, _, _> = a.clone().broadcast::<_, Axes2<0, 2>>();

Required Methods§

source

fn try_broadcast_like<Dst: HasShape, Ax: Axes>( self, dst: &Dst ) -> Result<Self::WithShape<Dst::Shape>, Self::Err>where Self::Shape: BroadcastShapeTo<Dst::Shape, Ax>,

fallible version of BroadcastTo::broadcast_like

Provided Methods§

source

fn broadcast<Dst: ConstShape, Ax: Axes>(self) -> Self::WithShape<Dst>where Self::Shape: BroadcastShapeTo<Dst, Ax>,

Broadcast into shape Dst along axes Ax.

source

fn try_broadcast<Dst: ConstShape, Ax: Axes>( self ) -> Result<Self::WithShape<Dst>, Self::Err>where Self::Shape: BroadcastShapeTo<Dst, Ax>,

Fallible version of BroadcastTo::broadcast

source

fn broadcast_like<Dst: HasShape, Ax: Axes>( self, dst: &Dst ) -> Self::WithShape<Dst::Shape>where Self::Shape: BroadcastShapeTo<Dst::Shape, Ax>,

Same as BroadcastTo::broadcast, but the target shape is given

Implementors§

source§

impl<S: Shape, E, D: Storage<E>, T: Tape<E, D>> BroadcastTo for Tensor<S, E, D, T>