q_recognizer/
q_point_cloud_recognizer.rs

1/**
2 * The $P Point-Cloud Recognizer (rust version)
3 * 
4 * Translated to rust from the original authors' C# code with an AI tool.
5 * The translated code has been reviewed by Ferran Pujol Camins.
6 *
7 * Original authors:
8 * 
9 * 	    Radu-Daniel Vatavu, Ph.D.
10 *	    University Stefan cel Mare of Suceava
11 *	    Suceava 720229, Romania
12 *	    vatavu@eed.usv.ro
13 *
14 *	    Lisa Anthony, Ph.D.
15 *      UMBC
16 *      Information Systems Department
17 *      1000 Hilltop Circle
18 *      Baltimore, MD 21250
19 *      lanthony@umbc.edu
20 *
21 *	    Jacob O. Wobbrock, Ph.D.
22 * 	    The Information School
23 *	    University of Washington
24 *	    Seattle, WA 98195-2840
25 *	    wobbrock@uw.edu
26 *
27 * The academic publication for the $P recognizer, and what should be 
28 * used to cite it, is:
29 *
30 *	Vatavu, R.-D., Anthony, L. and Wobbrock, J.O. (2012).  
31 *	  Gestures as point clouds: A $P recognizer for user interface 
32 *	  prototypes. Proceedings of the ACM Int'l Conference on  
33 *	  Multimodal Interfaces (ICMI '12). Santa Monica, California  
34 *	  (October 22-26, 2012). New York: ACM Press, pp. 273-280.
35 *
36 * This software is distributed under the "New BSD License" agreement:
37 *
38 * Copyright (c) 2012, Radu-Daniel Vatavu, Lisa Anthony, and 
39 * Jacob O. Wobbrock. All rights reserved.
40 *
41 * Redistribution and use in source and binary forms, with or without
42 * modification, are permitted provided that the following conditions are met:
43 *    * Redistributions of source code must retain the above copyright
44 *      notice, this list of conditions and the following disclaimer.
45 *    * Redistributions in binary form must reproduce the above copyright
46 *      notice, this list of conditions and the following disclaimer in the
47 *      documentation and/or other materials provided with the distribution.
48 *    * Neither the names of the University Stefan cel Mare of Suceava, 
49 *	    University of Washington, nor UMBC, nor the names of its contributors 
50 *	    may be used to endorse or promote products derived from this software 
51 *	    without specific prior written permission.
52 *
53 * THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS
54 * IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO,
55 * THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR
56 * PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL Radu-Daniel Vatavu OR Lisa Anthony
57 * OR Jacob O. Wobbrock OR Ferran Pujol Camins BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, 
58 * EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT 
59 * OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS 
60 * INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, 
61 * STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY
62 * OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF
63 * SUCH DAMAGE.
64**/
65
66use crate::{geometry::sqr_euclidean_distance, gesture::{Gesture, LUT_SCALE_FACTOR}, point::Point};
67
68pub struct QParameters {
69    // $Q's two major optimization layers (Early Abandoning and Lower Bounding)
70    // can be activated / deactivated as desired
71    pub use_early_abandoning: bool,
72    pub use_lower_bounding: bool
73}
74
75impl Default for QParameters {
76    fn default() -> Self {
77        QParameters {
78            use_early_abandoning: true,
79            use_lower_bounding: true,
80        }
81    }
82}
83
84pub struct ClassifyResult {
85    pub class: String,
86    pub distance: f32
87}
88
89/// Main function of the $Q recognizer.
90/// Classifies a candidate gesture against a set of templates.
91/// Returns the class of the closest neighbor in the template set.
92pub fn classify(candidate: &Gesture, template_set: &[Gesture], params: &QParameters) -> ClassifyResult {
93    let mut best_class = String::new();
94    let mut min_dist = f32::MAX;
95    for template in template_set {
96        let d = greedy_cloud_match(candidate, template, min_dist, params);
97        if d < min_dist {
98            min_dist = d;
99            best_class = template.name.clone();
100        }
101    }
102    ClassifyResult {
103        class: best_class,
104        distance: min_dist
105    }
106}
107
108/// Implements greedy search for a minimum-distance matching between two point clouds.
109/// Implements Early Abandoning and Lower Bounding (LUT) optimizations.
110fn greedy_cloud_match(gesture1: &Gesture, gesture2: &Gesture, mut min_so_far: f32, params: &QParameters) -> f32 {
111    // the two clouds should have the same number of points by now
112    let n = gesture1.points.len();
113    // controls the number of greedy search trials (eps is in [0..1])
114    let eps = 0.5;
115    let step = (n as f32).powf(1.0 - eps).floor() as usize;
116
117        if params.use_lower_bounding {
118            // direction of matching: gesture1 --> gesture2
119            let lb1 = compute_lower_bound(&gesture1.points, &gesture2.points, gesture2.lut.as_ref().unwrap(), step);
120            // direction of matching: gesture2 --> gesture1
121            let lb2 = compute_lower_bound(&gesture2.points, &gesture1.points, gesture1.lut.as_ref().unwrap(), step);
122
123            let mut i = 0;
124            let mut index_lb = 0;
125            while i < n {
126                if lb1[index_lb] < min_so_far {
127                    // direction of matching: gesture1 --> gesture2 starting with index point i
128                    min_so_far = min_so_far.min(cloud_distance(&gesture1.points, &gesture2.points, i, min_so_far, params));
129                }
130                if lb2[index_lb] < min_so_far {
131                    // direction of matching: gesture2 --> gesture1 starting with index point i
132                    min_so_far = min_so_far.min(cloud_distance(&gesture2.points, &gesture1.points, i, min_so_far, params));
133                }
134
135                index_lb += 1;
136                i += step;
137            }
138        } else {
139            for i in (0..n).step_by(step) {
140                // direction of matching: gesture1 --> gesture2 starting with index point i
141                min_so_far = min_so_far.min(cloud_distance(&gesture1.points, &gesture2.points, i, min_so_far, params));
142                // direction of matching: gesture2 --> gesture1 starting with index point i   
143                min_so_far = min_so_far.min(cloud_distance(&gesture2.points, &gesture1.points, i, min_so_far, params));
144            }
145        }
146
147    min_so_far
148}
149
150/// Computes lower bounds for each starting point and the direction of matching from points1 to points2 
151fn compute_lower_bound(
152    points1: &[Point],
153    points2: &[Point],
154    lut: &[Vec<usize>],
155    step: usize
156) -> Vec<f32> {
157    let n = points1.len();
158    let mut lb = vec![0.0; n / step + 1];
159    let mut sat = vec![0.0; n];
160
161    for i in 0..n {
162        let index = lut[points1[i].int_y as usize / LUT_SCALE_FACTOR]
163            [points1[i].int_x as usize / LUT_SCALE_FACTOR];
164        let dist = sqr_euclidean_distance(&points1[i], &points2[index]);
165        sat[i] = if i == 0 { dist } else { sat[i - 1] + dist };
166        lb[0] += (n - i) as f32 * dist;
167    }
168
169    let mut i = step;
170    let mut index_lb = 1;
171    while i < n {
172        lb[index_lb] = lb[0] + (i as f32)*sat[n-1] - (n as f32)*sat[i-1];
173
174        index_lb += 1;
175        i += step;
176    }
177    lb
178}
179
180/// Computes the distance between two point clouds by performing a minimum-distance greedy matching
181/// starting with point startIndex
182fn cloud_distance(points1: &[Point], points2: &[Point], start_index: usize, min_so_far: f32, params: &QParameters) -> f32 {
183    // the two point clouds should have the same number of points by now
184    let n = points1.len();
185    // stores point indexes for points from the 2nd cloud that haven't been matched yet
186    let mut indexes_not_matched: Vec<usize> = (0..n).collect();
187
188    // computes the sum of distances between matched points (i.e., the distance between the two clouds)
189    let mut sum = 0.0;
190    // start matching with point startIndex from the 1st cloud
191    let mut i = start_index;
192    // implements weights, decreasing from n to 1
193    let mut weight = n;
194    // indexes the indexesNotMatched[..] array of points from the 2nd cloud that are not matched yet
195    let mut index_not_matched = 0;
196
197    loop {
198        let mut index = 0;
199        let mut min_distance = f32::MAX;
200        for j in index_not_matched..n {
201            let dist = sqr_euclidean_distance(&points1[i], &points2[indexes_not_matched[j]]);
202            if dist < min_distance {
203                min_distance = dist;
204                index = j;
205            }
206        }
207        // point indexesNotMatched[index] of the 2nd cloud is now matched to point i of the 1st cloud
208        indexes_not_matched[index] = indexes_not_matched[index_not_matched];
209        // weight each distance with a confidence coefficient that decreases from n to 1
210        sum += (weight as f32) * min_distance;
211        weight -= 1;
212
213        
214        if params.use_early_abandoning && sum >= min_so_far {
215            return sum;
216        }
217
218        // advance to the next point in the 1st cloud
219        i = (i + 1) % n;
220        // update the number of points from the 2nd cloud that haven't been matched yet
221        index_not_matched += 1;
222
223        if i == start_index {
224            break;
225        }
226    }
227    sum
228}