use burn::tensor::{backend::AutodiffBackend, Tensor};
pub fn euclidean<B: AutodiffBackend>(x: Tensor<B, 2>) -> Tensor<B, 1> {
let n_samples = x.dims()[0]; let _n_features = x.dims()[1];
let x_expanded = x.clone().unsqueeze::<3>(); let x_transposed = x.clone().unsqueeze_dim(1);
let diff = x_expanded - x_transposed;
let squared_diff = diff.powi_scalar(2);
let pairwise_squared_distances = squared_diff.sum_dim(2);
let pairwise_distances = pairwise_squared_distances.triu(0);
let distances = pairwise_distances
.slice([0..n_samples, 0..1])
.reshape([n_samples]);
distances
}
pub fn euclidean_knn<B: AutodiffBackend>(x: Tensor<B, 2>, k: usize) -> Tensor<B, 1> {
let n_samples = x.dims()[0]; let _n_features = x.dims()[1]; let _n_features = x.dims()[1];
let x_expanded = x.clone().unsqueeze::<3>();
let x_transposed = x.clone().unsqueeze_dim(1);
let diff = x_expanded - x_transposed;
let squared_diff = diff.powi_scalar(2);
let pairwise_squared_distances = squared_diff.sum_dim(2);
let pairwise_distances = pairwise_squared_distances.triu(0);
let (top_k_distances, _top_k_indices) = pairwise_distances.topk_with_indices(k, 1);
let sum_of_top_k_distances = top_k_distances.sum_dim(1).reshape([n_samples]);
sum_of_top_k_distances
}