use criterion::{black_box, criterion_group, criterion_main, Criterion};
fn embedding_benchmark(c: &mut Criterion) {
use openmemory::memory::embed::synthetic::SyntheticProvider;
use openmemory::core::types::Sector;
use openmemory::memory::embed::EmbeddingProvider;
let provider = SyntheticProvider::new(256);
c.bench_function("synthetic_embed_256", |b| {
b.iter(|| {
let rt = tokio::runtime::Runtime::new().unwrap();
rt.block_on(async {
provider.embed(
black_box("This is a test sentence for embedding performance"),
black_box(&Sector::Semantic),
).await.unwrap()
})
})
});
}
fn text_processing_benchmark(c: &mut Criterion) {
use openmemory::utils::text::{tokenize, canonical_tokens_from_text};
let text = "This is a sample text for testing tokenization and text processing performance in OpenMemory.";
c.bench_function("tokenize", |b| {
b.iter(|| tokenize(black_box(text)))
});
c.bench_function("canonical_tokens", |b| {
b.iter(|| canonical_tokens_from_text(black_box(text)))
});
}
fn vector_operations_benchmark(c: &mut Criterion) {
use openmemory::utils::{cosine_similarity, l2_normalize};
let v1: Vec<f32> = (0..256).map(|i| (i as f32).sin()).collect();
let v2: Vec<f32> = (0..256).map(|i| (i as f32).cos()).collect();
c.bench_function("cosine_similarity_256", |b| {
b.iter(|| cosine_similarity(black_box(&v1), black_box(&v2)))
});
c.bench_function("l2_normalize_256", |b| {
b.iter(|| {
let mut v = v1.clone();
l2_normalize(black_box(&mut v))
})
});
}
criterion_group!(
benches,
embedding_benchmark,
text_processing_benchmark,
vector_operations_benchmark
);
criterion_main!(benches);