smartcore/algorithm/neighbour/
mod.rs

1#![allow(clippy::ptr_arg)]
2//! # Nearest Neighbors Search Algorithms and Data Structures
3//!
4//! Nearest neighbor search is a basic computational tool that is particularly relevant to machine learning,
5//! where it is often believed that highdimensional datasets have low-dimensional intrinsic structure.
6//! The basic nearest neighbor problem is formalized as follows: given a set \\( S \\) of \\( n \\) points in some metric space \\( (X, d) \\),
7//!  the problem is to preprocess \\( S \\) so that given a query point \\( p \in X \\), one can efficiently find a point \\( q \in S \\)
8//!  which minimizes \\( d(p, q) \\).
9//!
10//! [The most straightforward nearest neighbor search algorithm](linear_search/index.html) finds k nearest points using the brute-force approach where distances between all
11//! pairs of points in the dataset are calculated. This approach scales as \\( O(nd^2) \\) where \\( n = \lvert S \rvert \\), is number of samples and \\( d \\) is number
12//! of dimentions in metric space. As the number of samples  grows, the brute-force approach quickly becomes infeasible.
13//!
14//! [Cover Tree](cover_tree/index.html) is data structure that partitions metric spaces to speed up nearest neighbor search. Cover tree requires \\( O(n) \\) space and
15//! have nice theoretical properties:
16//!
17//! * construction time: \\( O(c^6n \log n) \\),
18//! * insertion time \\( O(c^6 \log n) \\),
19//! * removal time: \\( O(c^6 \log n) \\),
20//! * query time: \\( O(c^{12} \log n) \\),
21//!
22//! Where \\( c \\) is a constant.
23//!
24//! ## References:
25//! * ["The Art of Computer Programming" Knuth, D, Vol. 3, 2nd ed, Sorting and Searching, 1998](https://www-cs-faculty.stanford.edu/~knuth/taocp.html)
26//! * ["Cover Trees for Nearest Neighbor" Beygelzimer et al., Proceedings of the 23rd international conference on Machine learning, ICML'06 (2006)](https://hunch.net/~jl/projects/cover_tree/cover_tree.html)
27//! * ["Faster cover trees." Izbicki et al., Proceedings of the 32nd International Conference on Machine Learning, ICML'15 (2015)](http://www.cs.ucr.edu/~cshelton/papers/index.cgi%3FIzbShe15)
28//! * ["The Elements of Statistical Learning: Data Mining, Inference, and Prediction" Trevor et al., 2nd edition, chapter 13](https://web.stanford.edu/~hastie/ElemStatLearn/)
29//!
30//! <script src="https://polyfill.io/v3/polyfill.min.js?features=es6"></script>
31//! <script id="MathJax-script" async src="https://cdn.jsdelivr.net/npm/mathjax@3/es5/tex-mml-chtml.js"></script>
32
33use crate::algorithm::neighbour::cover_tree::CoverTree;
34use crate::algorithm::neighbour::linear_search::LinearKNNSearch;
35use crate::error::Failed;
36use crate::metrics::distance::Distance;
37use crate::numbers::basenum::Number;
38#[cfg(feature = "serde")]
39use serde::{Deserialize, Serialize};
40
41pub(crate) mod bbd_tree;
42/// tree data structure for fast nearest neighbor search
43pub mod cover_tree;
44/// fastpair closest neighbour algorithm
45pub mod fastpair;
46/// very simple algorithm that sequentially checks each element of the list until a match is found or the whole list has been searched.
47pub mod linear_search;
48
49/// Both, KNN classifier and regressor benefits from underlying search algorithms that helps to speed up queries.
50/// `KNNAlgorithmName` maintains a list of supported search algorithms, see [KNN algorithms](../algorithm/neighbour/index.html)
51#[cfg_attr(feature = "serde", derive(Serialize, Deserialize))]
52#[derive(Debug, Clone, Default)]
53pub enum KNNAlgorithmName {
54    /// Heap Search algorithm, see [`LinearSearch`](../algorithm/neighbour/linear_search/index.html)
55    LinearSearch,
56    /// Cover Tree Search algorithm, see [`CoverTree`](../algorithm/neighbour/cover_tree/index.html)
57    #[default]
58    CoverTree,
59}
60
61#[cfg_attr(feature = "serde", derive(Serialize, Deserialize))]
62#[derive(Debug)]
63pub(crate) enum KNNAlgorithm<T: Number, D: Distance<Vec<T>>> {
64    LinearSearch(LinearKNNSearch<Vec<T>, D>),
65    CoverTree(CoverTree<Vec<T>, D>),
66}
67
68// TODO: missing documentation
69impl KNNAlgorithmName {
70    pub(crate) fn fit<T: Number, D: Distance<Vec<T>>>(
71        &self,
72        data: Vec<Vec<T>>,
73        distance: D,
74    ) -> Result<KNNAlgorithm<T, D>, Failed> {
75        match *self {
76            KNNAlgorithmName::LinearSearch => {
77                LinearKNNSearch::new(data, distance).map(KNNAlgorithm::LinearSearch)
78            }
79            KNNAlgorithmName::CoverTree => {
80                CoverTree::new(data, distance).map(KNNAlgorithm::CoverTree)
81            }
82        }
83    }
84}
85
86impl<T: Number, D: Distance<Vec<T>>> KNNAlgorithm<T, D> {
87    pub fn find(&self, from: &Vec<T>, k: usize) -> Result<Vec<(usize, f64, &Vec<T>)>, Failed> {
88        match *self {
89            KNNAlgorithm::LinearSearch(ref linear) => linear.find(from, k),
90            KNNAlgorithm::CoverTree(ref cover) => cover.find(from, k),
91        }
92    }
93
94    pub fn find_radius(
95        &self,
96        from: &Vec<T>,
97        radius: f64,
98    ) -> Result<Vec<(usize, f64, &Vec<T>)>, Failed> {
99        match *self {
100            KNNAlgorithm::LinearSearch(ref linear) => linear.find_radius(from, radius),
101            KNNAlgorithm::CoverTree(ref cover) => cover.find_radius(from, radius),
102        }
103    }
104}