import numpy as np
import pytest
from astrora._core import numpy_ops
ARRAY_SIZES = {
"tiny": 10,
"small": 100,
"medium": 1_000,
"large": 10_000,
"very_large": 100_000,
"huge": 1_000_000,
}
class TestReadOnlyOperationsOverhead:
@pytest.mark.parametrize("size_name", ["tiny", "small", "medium", "large", "very_large"])
def test_sum_array_rust_vs_numpy(self, benchmark, size_name):
size = ARRAY_SIZES[size_name]
arr = np.random.randn(size)
result = benchmark(numpy_ops.sum_array, arr)
assert result == pytest.approx(np.sum(arr))
@pytest.mark.parametrize("size_name", ["tiny", "small", "medium", "large", "very_large"])
def test_sum_array_numpy_baseline(self, benchmark, size_name):
size = ARRAY_SIZES[size_name]
arr = np.random.randn(size)
result = benchmark(np.sum, arr)
@pytest.mark.parametrize("size_name", ["tiny", "small", "medium", "large"])
def test_dot_product_rust_vs_numpy(self, benchmark, size_name):
size = ARRAY_SIZES[size_name]
a = np.random.randn(size)
b = np.random.randn(size)
result = benchmark(numpy_ops.dot_product, a, b)
assert result == pytest.approx(np.dot(a, b))
@pytest.mark.parametrize("size_name", ["tiny", "small", "medium", "large"])
def test_dot_product_numpy_baseline(self, benchmark, size_name):
size = ARRAY_SIZES[size_name]
a = np.random.randn(size)
b = np.random.randn(size)
result = benchmark(np.dot, a, b)
def test_vector_magnitude_rust(self, benchmark):
vec = np.random.randn(1000)
result = benchmark(numpy_ops.vector_magnitude, vec)
assert result == pytest.approx(np.linalg.norm(vec))
def test_vector_magnitude_numpy_baseline(self, benchmark):
vec = np.random.randn(1000)
result = benchmark(np.linalg.norm, vec)
class TestArrayReturnOverhead:
@pytest.mark.parametrize("size_name", ["tiny", "small", "medium", "large", "very_large"])
def test_multiply_scalar_rust(self, benchmark, size_name):
size = ARRAY_SIZES[size_name]
arr = np.random.randn(size)
scalar = 2.5
result = benchmark(numpy_ops.multiply_scalar, arr, scalar)
assert np.allclose(result, arr * scalar)
@pytest.mark.parametrize("size_name", ["tiny", "small", "medium", "large", "very_large"])
def test_multiply_scalar_numpy_baseline(self, benchmark, size_name):
size = ARRAY_SIZES[size_name]
arr = np.random.randn(size)
scalar = 2.5
result = benchmark(lambda a, s: a * s, arr, scalar)
@pytest.mark.parametrize("size_name", ["tiny", "small", "medium", "large"])
def test_normalize_vector_rust(self, benchmark, size_name):
size = ARRAY_SIZES[size_name]
vec = np.random.randn(size)
result = benchmark(numpy_ops.normalize_vector, vec)
assert np.linalg.norm(result) == pytest.approx(1.0)
@pytest.mark.parametrize("size_name", ["tiny", "small", "medium", "large"])
def test_normalize_vector_numpy_baseline(self, benchmark, size_name):
size = ARRAY_SIZES[size_name]
vec = np.random.randn(size)
def normalize_numpy(v):
return v / np.linalg.norm(v)
result = benchmark(normalize_numpy, vec)
class TestInPlaceOperationsOverhead:
@pytest.mark.parametrize("size_name", ["tiny", "small", "medium", "large", "very_large"])
def test_multiply_scalar_inplace_rust(self, benchmark, size_name):
size = ARRAY_SIZES[size_name]
scalar = 2.0
def setup():
return (np.random.randn(size), scalar), {}
result = benchmark.pedantic(
numpy_ops.multiply_scalar_inplace, setup=setup, iterations=1, rounds=100
)
@pytest.mark.parametrize("size_name", ["tiny", "small", "medium", "large", "very_large"])
def test_multiply_scalar_inplace_numpy_baseline(self, benchmark, size_name):
size = ARRAY_SIZES[size_name]
scalar = 2.0
def setup():
return (np.random.randn(size), scalar), {}
def numpy_inplace(arr, s):
arr *= s
result = benchmark.pedantic(numpy_inplace, setup=setup, iterations=1, rounds=100)
class TestBatchOperationsScaling:
@pytest.mark.parametrize("batch_size", [10, 100, 1000, 10000])
def test_batch_normalize_rust(self, benchmark, batch_size):
vectors = np.random.randn(batch_size, 3)
result = benchmark(numpy_ops.batch_normalize_vectors, vectors)
magnitudes = np.linalg.norm(result, axis=1)
assert np.allclose(magnitudes, 1.0)
@pytest.mark.parametrize("batch_size", [10, 100, 1000, 10000])
def test_batch_normalize_numpy_baseline(self, benchmark, batch_size):
vectors = np.random.randn(batch_size, 3)
def normalize_batch_numpy(vecs):
norms = np.linalg.norm(vecs, axis=1, keepdims=True)
return vecs / norms
result = benchmark(normalize_batch_numpy, vectors)
@pytest.mark.parametrize("batch_size", [10, 100, 1000])
def test_sequential_normalize_rust(self, benchmark, batch_size):
vectors = np.random.randn(batch_size, 3)
def normalize_sequential(vecs):
results = []
for vec in vecs:
results.append(numpy_ops.normalize_vector(vec))
return np.array(results)
result = benchmark(normalize_sequential, vectors)
class TestMatrixOperationsOverhead:
@pytest.mark.parametrize("size", [10, 50, 100, 200])
def test_matrix_multiply_rust(self, benchmark, size):
a = np.random.randn(size, size)
b = np.random.randn(size, size)
result = benchmark(numpy_ops.matrix_multiply, a, b)
assert np.allclose(result, a @ b)
@pytest.mark.parametrize("size", [10, 50, 100, 200])
def test_matrix_multiply_numpy_baseline(self, benchmark, size):
a = np.random.randn(size, size)
b = np.random.randn(size, size)
result = benchmark(np.matmul, a, b)
@pytest.mark.parametrize("size", [10, 50, 100, 200])
def test_matrix_vector_multiply_rust(self, benchmark, size):
matrix = np.random.randn(size, size)
vector = np.random.randn(size)
result = benchmark(numpy_ops.matrix_vector_multiply, matrix, vector)
@pytest.mark.parametrize("size", [10, 50, 100, 200])
def test_matrix_vector_multiply_numpy_baseline(self, benchmark, size):
matrix = np.random.randn(size, size)
vector = np.random.randn(size)
result = benchmark(np.matmul, matrix, vector)
class TestBoundaryCrossingOverhead:
def test_minimal_rust_call(self, benchmark):
arr = np.array([1.0, 2.0, 3.0])
result = benchmark(numpy_ops.sum_array, arr)
def test_minimal_numpy_call(self, benchmark):
arr = np.array([1.0, 2.0, 3.0])
result = benchmark(np.sum, arr)
def test_repeated_tiny_calls_rust(self, benchmark):
arrays = [np.array([1.0, 2.0, 3.0]) for _ in range(100)]
def process_all(arrs):
return [numpy_ops.sum_array(a) for a in arrs]
result = benchmark(process_all, arrays)
def test_repeated_tiny_calls_numpy(self, benchmark):
arrays = [np.array([1.0, 2.0, 3.0]) for _ in range(100)]
def process_all(arrs):
return [np.sum(a) for a in arrs]
result = benchmark(process_all, arrays)
def test_single_large_call_rust(self, benchmark):
arr = np.random.randn(300)
result = benchmark(numpy_ops.sum_array, arr)
def test_single_large_call_numpy(self, benchmark):
arr = np.random.randn(300)
result = benchmark(np.sum, arr)
class TestCrossProductOperations:
def test_cross_product_rust(self, benchmark):
a = np.array([1.0, 2.0, 3.0])
b = np.array([4.0, 5.0, 6.0])
result = benchmark(numpy_ops.cross_product, a, b)
def test_cross_product_numpy_baseline(self, benchmark):
a = np.array([1.0, 2.0, 3.0])
b = np.array([4.0, 5.0, 6.0])
result = benchmark(np.cross, a, b)
@pytest.mark.parametrize("batch_size", [10, 100, 1000])
def test_batch_cross_product_simulation(self, benchmark, batch_size):
vectors_a = np.random.randn(batch_size, 3)
vectors_b = np.random.randn(batch_size, 3)
def batch_cross_rust(va, vb):
results = []
for a, b in zip(va, vb):
results.append(numpy_ops.cross_product(a, b))
return np.array(results)
result = benchmark(batch_cross_rust, vectors_a, vectors_b)
@pytest.mark.parametrize("batch_size", [10, 100, 1000])
def test_batch_cross_product_numpy_baseline(self, benchmark, batch_size):
vectors_a = np.random.randn(batch_size, 3)
vectors_b = np.random.randn(batch_size, 3)
result = benchmark(np.cross, vectors_a, vectors_b)
if __name__ == "__main__":
pytest.main([__file__, "--benchmark-only", "-v"])