vectus 0.1.38

A vector database implemented in Rust for learning purposes.
Documentation
#![allow(unused)]

pub mod document;
pub(crate) mod hnsw;
pub(crate) mod model;
pub use hnsw::metric;

use document::{DocBuilder, Document};
use hnsw::{metric::Metric, HNSWInitializer, HNSW};
use model::{Model, ModelType};
use ndarray::{Array1, Array2};
use std::{fmt::format, sync::Arc};
use tokio::sync::Mutex;

#[derive(Debug)]
pub enum StorageType {
    InMemory,
    Persistent,
}

pub struct Vectus {
    pub embeddings: Arc<Mutex<Array2<f32>>>,
    pub documents: Arc<Mutex<Vec<Document>>>,
    storage_type: StorageType,
    hnsw: Arc<Mutex<HNSW>>,
}

impl Vectus {
    pub fn new(storage_type: StorageType, metric: Metric) -> Vectus {
        let initializer = HNSWInitializer {
            max_level: 12,
            ef_construction: 350,
            m: 32,
            m_max: 64,
            norm: 3.0,
            entry: None,
            metric,
        };

        Vectus {
            embeddings: Mutex::new(Array2::zeros((0, 0))).into(),
            documents: Mutex::new(Vec::new()).into(),
            storage_type,
            hnsw: Mutex::new(HNSW::new(initializer)).into(),
        }
    }

    pub async fn get_k_relevant_documents(
        &self,
        api_key: &String,
        query: &String,
        k: usize,
    ) -> Vec<Document> {
        if query.is_empty() {
            return Vec::new();
        }

        let model = Model::new(ModelType::OpenAI(api_key.to_string()));

        let query_embedding = match model.get_embedding(query).await {
            Ok(embedding) => embedding,
            Err(e) => panic!("Error getting embedding: {}", e),
        };

        let query_emb = Array1::from_vec(query_embedding.clone());
        let hnsw_guard = self.hnsw.lock().await;

        let result = hnsw_guard
            .search(query_emb.clone(), hnsw_guard.len(), k)
            .await;

        let docs_guard = self.documents.lock().await;
        let mut relevant_docs: Vec<Document> = Vec::new();

        for ix in 0..k.min(result.len()) {
            relevant_docs.push(docs_guard[result[ix]].clone());
        }

        drop(hnsw_guard);
        drop(docs_guard);

        relevant_docs
    }

    pub async fn add_documents(
        &mut self,
        api_key: &String,
        docs: &Vec<Document>,
    ) -> Result<(), String> {
        if docs.is_empty() {
            return Err("No documents to add!".to_string());
        }

        let mut embeddings: Vec<Vec<f32>> = Vec::new();
        let model = Model::new(ModelType::OpenAI(api_key.to_string()));

        for doc in docs {
            let mut document = doc.page_content.clone();

            for (k, v) in doc.metadata.iter() {
                document.push_str(&format!("\n{} {}", k, v));
            }

            let embedding: Vec<f32> = match model.get_embedding(&document).await {
                Ok(embedding) => embedding,
                Err(e) => panic!("Error getting embedding: {}", e),
            };

            let nembd = Array1::from_vec(embedding.clone());
            self.store_emb_db(&nembd).await;
            embeddings.push(embedding);
        }

        let mut docs_guard = self.documents.lock().await;
        for doc in docs {
            docs_guard.push(doc.clone());
        }
        drop(docs_guard);

        let mut embeddings_guard = self.embeddings.lock().await;
        *embeddings_guard =
            Array2::from_shape_vec((docs.len(), embeddings[0].len()), embeddings.concat()).unwrap();

        Ok(())
    }

    async fn store_emb_db(&self, embedding: &Array1<f32>) {
        match self.storage_type {
            StorageType::InMemory => {
                let mut hnsw_guard = self.hnsw.lock().await;
                let len = hnsw_guard.len();
                hnsw_guard.insert(&embedding, len).await;
                drop(hnsw_guard);
            }
            StorageType::Persistent => {
                panic!("{:?} Not implemented yet!", self.storage_type);
            }
        }
    }
}