kll_core/layout/mod.rs
1// Copyright 2021-2023 Jacob Alexander
2//
3// Licensed under the Apache License, Version 2.0, <LICENSE-APACHE or
4// http://apache.org/licenses/LICENSE-2.0> or the MIT license <LICENSE-MIT or
5// http://opensource.org/licenses/MIT>, at your option. This file may not be
6// copied, modified, or distributed except according to those terms.
7
8mod test;
9
10// ----- Crates -----
11
12use super::*;
13use core::cmp::Ordering;
14use heapless::{FnvIndexMap, Vec};
15
16// ----- Enums -----
17
18#[derive(Copy, Clone, Debug, PartialEq, Eq)]
19#[cfg_attr(feature = "defmt", derive(defmt::Format))]
20enum LayerProcessMode {
21 Layer,
22 TriggerType,
23 IndexA,
24 IndexB,
25 TriggerSize,
26 Triggers(u8),
27}
28
29#[derive(Copy, Clone, Debug, PartialEq, Eq)]
30#[cfg_attr(feature = "defmt", derive(defmt::Format))]
31enum StateStatus {
32 /// TriggerCondition + u8 offset position
33 TriggerPos {
34 /// Time instance when the offset is updated (on combo increment).
35 time_instance: u32,
36 /// Next offset in the TriggerGuide
37 /// This is the offset inside the datastructure so it can be any number
38 /// even at the start of the TriggerGuide.
39 offset: u16,
40 },
41 /// Capability + u8 offset position + last TriggerEvent
42 ResultPos {
43 /// Time instance when the offset is updated (on combo increment).
44 /// This value is set on increment and is set for the first combo eval
45 time_instance: u32,
46 /// TriggerEvent that initiated the Result Capability
47 event: TriggerEvent,
48 /// Next offset in the ResultGuide
49 /// This is the offset inside the datastructure so it can be any number
50 /// even at the start of the ResultGuide.
51 offset: u16,
52 },
53 /// Done is set when the Capabilities are finished and the entry should be reaped
54 Done,
55}
56
57#[derive(Copy, Clone, Debug, PartialEq, Eq)]
58#[cfg_attr(feature = "defmt", derive(defmt::Format))]
59pub enum ProcessError {
60 /// MAX_OFF_STATE_LOOKUP is too small
61 FailedOffStatePush,
62 /// STATE_SIZE is too small
63 FailedLookupStateInsert,
64 /// MAX_ACTIVE_TRIGGERS is too small
65 FailedTriggerComboEvalStateInsert,
66}
67
68// ----- Structs -----
69
70pub struct TriggerEventIterator<const MAX_EVENTS: usize> {
71 events: Vec<TriggerEvent, MAX_EVENTS>,
72}
73
74impl<const MAX_EVENTS: usize> TriggerEventIterator<MAX_EVENTS> {
75 pub fn new(events: Vec<TriggerEvent, MAX_EVENTS>) -> Self {
76 Self { events }
77 }
78}
79
80impl<const MAX_EVENTS: usize> Iterator for TriggerEventIterator<MAX_EVENTS> {
81 type Item = TriggerEvent;
82
83 fn next(&mut self) -> Option<Self::Item> {
84 self.events.pop()
85 }
86}
87
88#[derive(Copy, Clone, Debug, PartialEq, Eq)]
89#[cfg_attr(feature = "defmt", derive(defmt::Format))]
90struct Layer {
91 state: layer::State,
92 /// Last operation that touched this layer state
93 last_time_instance: u32,
94}
95
96pub struct LayerState<
97 'a,
98 const LAYOUT_SIZE: usize,
99 const STATE_SIZE: usize,
100 const MAX_LAYERS: usize,
101 const MAX_ACTIVE_LAYERS: usize,
102 const MAX_ACTIVE_TRIGGERS: usize,
103 const MAX_LAYER_STACK_CACHE: usize,
104 const MAX_OFF_STATE_LOOKUP: usize,
105> {
106 /// KLL guide lookup
107 layer_lookup: LayerLookup<'a, LAYOUT_SIZE>,
108 /// Stores the trigger:result mapping state position, tracks the macro position
109 lookup_state: FnvIndexMap<(u16, u16), StateStatus, STATE_SIZE>,
110 /// Stores the current state of every possible layer
111 layer: Vec<Layer, MAX_LAYERS>,
112 /// Current evaluation order of each layer
113 /// Each layer is only in the stack once
114 /// Whenever layer::State::Off is set the layer is removed from the stack
115 /// Changing the state of a layer does not change the priority order of the stack
116 layer_stack: Vec<u8, MAX_ACTIVE_LAYERS>,
117 /// Whenever there is a layer lookup for "initial" actions cache the result of the lookup
118 /// This initial action always does a clean lookup.
119 /// The reason for this is to handle the situation where a layer is activated, a key is pressed
120 /// then the layer is deactivated and the key is released. You want the action that as
121 /// previously activated on the deactivated layer to deactivate, not whatever is on the
122 /// effective new layer in the stack.
123 /// (ttype, index) -> (layer index, Layer {layer state, time instance})
124 layer_stack_cache: FnvIndexMap<(u8, u16), (u8, Layer), MAX_LAYER_STACK_CACHE>,
125 /// Maintains the combo state when evaluating a list of TriggerEvents
126 /// This hash table is cleared when finalizing a scan loop
127 /// Maps (trigger_guide, result_guide) -> (combo evaluations remaining)
128 trigger_combo_eval_state: FnvIndexMap<(u16, u16), u8, MAX_ACTIVE_TRIGGERS>,
129 /// time_instance is a dumb counter used to keep track of processing instances.
130 /// Yes, the counter will rollover but generally this shouldn't matter
131 /// Used to calculate produced Layer TriggerEvents, is generally set once per processing loop
132 time_instance: u32,
133 /// Off state lookups
134 /// Used to keep track of possibly off-states that need a reverse lookup
135 /// Cleared each processing loop.
136 /// ((trigger_guide, result_guide), ttype, index)
137 off_state_lookups: Vec<((u16, u16), u8, u16), MAX_OFF_STATE_LOOKUP>,
138}
139
140impl<
141 'a,
142 const LAYOUT_SIZE: usize,
143 const STATE_SIZE: usize,
144 const MAX_LAYERS: usize,
145 const MAX_ACTIVE_LAYERS: usize,
146 const MAX_ACTIVE_TRIGGERS: usize,
147 const MAX_LAYER_STACK_CACHE: usize,
148 const MAX_OFF_STATE_LOOKUP: usize,
149 >
150 LayerState<
151 'a,
152 LAYOUT_SIZE,
153 STATE_SIZE,
154 MAX_LAYERS,
155 MAX_ACTIVE_LAYERS,
156 MAX_ACTIVE_TRIGGERS,
157 MAX_LAYER_STACK_CACHE,
158 MAX_OFF_STATE_LOOKUP,
159 >
160{
161 pub fn new(layer_lookup: LayerLookup<'a, LAYOUT_SIZE>, time_instance: u32) -> Self {
162 // Allocate trigger:result mapping state hashtable
163 // Used to keep track of the guide offset
164 // Mapping
165 // (trigger guide, result guide) -> Type(offset)
166 let lookup_state = FnvIndexMap::<(u16, u16), StateStatus, STATE_SIZE>::new();
167
168 let mut layer = Vec::new();
169 layer
170 .resize(
171 layer_lookup.max_layers() as usize,
172 Layer {
173 state: layer::State::Off,
174 last_time_instance: 0u32,
175 },
176 )
177 .unwrap();
178
179 // Layer 0 is always enabled by default
180 layer[0].state = layer::State::Shift;
181
182 let layer_stack = Vec::new();
183 let layer_stack_cache = FnvIndexMap::<(u8, u16), (u8, Layer), MAX_LAYER_STACK_CACHE>::new();
184 let trigger_combo_eval_state = FnvIndexMap::<(u16, u16), u8, MAX_ACTIVE_TRIGGERS>::new();
185 let off_state_lookups = Vec::new();
186
187 Self {
188 layer_lookup,
189 lookup_state,
190 layer,
191 layer_stack,
192 layer_stack_cache,
193 trigger_combo_eval_state,
194 time_instance,
195 off_state_lookups,
196 }
197 }
198
199 /// Determine if layer is in the stack
200 fn is_layer_in_stack(&self, layer: u8) -> bool {
201 self.layer_stack.contains(&layer)
202 }
203
204 /// Used to set the current time instance used for produced Layer TriggerEvents
205 pub fn set_time(&mut self, val: u32) {
206 self.time_instance = val;
207 }
208
209 /// Set layer state
210 /// If layer already has the state enable, disable and vice versa
211 pub fn set_layer(&mut self, layer: u8, state: layer::State) -> TriggerEvent {
212 // Make sure the layer is valid
213 assert!(
214 layer as usize >= self.layer.len(),
215 "Invalid layer set: {} {:?}",
216 layer,
217 state,
218 );
219
220 // Cannot set layer 0
221 assert!(layer != 0, "Cannot change layer 0 state");
222
223 // Check to see if the layer is already in the stack, add it if not
224 let layer_in_stack = self.is_layer_in_stack(layer);
225 if !layer_in_stack {
226 self.layer_stack.push(layer).unwrap();
227 }
228
229 // Store previous state for event generation
230 let prev_state = self.layer[layer as usize].state;
231
232 // Set the layer if not already enabled
233 if !self.layer[layer as usize].state.is_set(state) {
234 self.layer[layer as usize].state.add(state);
235 } else {
236 self.layer[layer as usize].state.remove(state);
237 }
238
239 // Current state
240 let cur_state = self.layer[layer as usize].state;
241
242 // Determine Aodo state
243 let activity_state = trigger::Aodo::from_state(prev_state.active(), cur_state.active());
244
245 // Update the time instance
246 self.layer[layer as usize].last_time_instance = self.time_instance;
247
248 // Remove the layer from the stack if state is Off
249 if self.layer[layer as usize].state == layer::State::Off {
250 let mut offset = 0;
251 for (index, val) in self.layer_stack.clone().iter().enumerate() {
252 // Search for index of the layer
253 if *val == layer {
254 offset = 1;
255 } else {
256 // Once index of the layer has been located, shift all stack elements
257 if offset > 0 {
258 self.layer_stack[index - offset] = *val;
259 }
260 }
261 }
262
263 // Reduce the length by one
264 self.layer_stack.truncate(self.layer_stack.len() - 1);
265 }
266
267 // Build layer trigger event
268 let state = trigger::LayerState::from_layer(cur_state, activity_state);
269
270 // Send signal for layer state change
271 TriggerEvent::Layer {
272 state,
273 layer,
274 last_state: 0u32, // Initial events always start at 0
275 }
276 }
277
278 /// Attempts to lookup a trigger list given a layer and given state
279 fn layer_lookup_search<const LSIZE: usize>(
280 &self,
281 ttype: u8,
282 index: u16,
283 ) -> Option<(u8, heapless::Vec<(u16, u16), LSIZE>)> {
284 // Start from the top of the stack
285 for (layer, state) in self.layer.iter().rev().enumerate() {
286 let layer = layer as u8;
287 // Check if effective state is valid
288 if state.state.effective() {
289 let guides = self
290 .layer_lookup
291 .lookup_guides::<LSIZE>((layer, ttype, index));
292 // If guides were found, we can stop here
293 if !guides.is_empty() {
294 return Some((layer, guides));
295 }
296 }
297 }
298
299 // No matches
300 None
301 }
302
303 /// Lookup effective layer for scancode
304 /// Depending on the incoming state use either a full-lookup or cached value
305 ///
306 /// Returns None if no lookup was successful
307 /// Otherwise returns a list of Trigger::Result mappings to process
308 pub fn lookup<const LSIZE: usize>(
309 &mut self,
310 event: TriggerEvent,
311 ) -> Option<(u8, heapless::Vec<(u16, u16), LSIZE>)> {
312 let cache_lookup = (u8::from(event), event.index());
313 let cache_hit = self.layer_stack_cache.get(&cache_lookup);
314 trace!("Lookup cache hit: {:?}", cache_hit);
315
316 // Convert to CapabilityRun to determine how to evaluate trigger
317 let capability: CapabilityRun = event.into();
318 let capability_state = capability.state();
319 trace!("Converted capability_state: {:?}", capability_state);
320
321 // Do cached lookup if not the initial event for the trigger and present in the cache
322 let layer_guides = if capability_state != CapabilityEvent::Initial && let Some((layer, _layer_state)) = cache_hit {
323 // Retrieve layer, and build guide lookup
324 let guide_lookup = (*layer, cache_lookup.0, cache_lookup.1);
325
326 // We can do a direct lookup as we're hitting a cache
327 let guides = self.layer_lookup.lookup_guides::<LSIZE>(guide_lookup);
328
329 Some((*layer, guides))
330
331 // Do full lookup if this is the initial event for the trigger or was not in the cache
332 } else {
333 self.layer_lookup_search::<LSIZE>(cache_lookup.0, cache_lookup.1)
334 };
335 trace!("layer_guides: {:?}", layer_guides);
336
337 // If this is a final event, remove the trigger from the layer cache
338 if capability_state == CapabilityEvent::Last {
339 self.layer_stack_cache.remove(&cache_lookup);
340
341 // Otherwise update/insert the key if we don't have one already
342 } else if cache_hit.is_none() && layer_guides.is_some() {
343 let layer = layer_guides.as_ref().unwrap().0;
344 // Build cache key by looking up identified layer state
345 // The layer state is needed so we can remember what to do if the layer is deactivated
346 // during the middle of an action
347 let cache_key = (
348 layer,
349 Layer {
350 state: self.layer[layer as usize].state,
351 last_time_instance: self.time_instance,
352 },
353 );
354
355 self.layer_stack_cache
356 .insert(cache_lookup, cache_key)
357 .unwrap();
358 }
359
360 layer_guides
361 }
362
363 /// Increment time instance
364 /// Per the design of KLL, each processing loop of events takes place in a single instance.
365 /// Before processing any events, make sure to call this function to increment the internal
366 /// time state which is needed to properly schedule generated events.
367 pub fn increment_time(&mut self) {
368 self.time_instance = self.time_instance.wrapping_add(1u32);
369 }
370
371 /// Process incoming triggers
372 pub fn process_trigger<const LSIZE: usize>(
373 &mut self,
374 event: TriggerEvent,
375 ) -> Result<(), ProcessError> {
376 trace!("Event: {:?}", event);
377 // Lookup guide
378 if let Some((_layer, guides)) = self.lookup::<LSIZE>(event) {
379 trace!("Event guides: {:?}", guides);
380 // Process each of the guides
381 for guide in guides {
382 // Lookup the state of each of the guides
383 let state = if let Some(state) = self.lookup_state.get(&guide) {
384 *state
385 } else {
386 StateStatus::TriggerPos {
387 time_instance: self.time_instance,
388 offset: 0,
389 }
390 };
391 trace!("guide state: {:?}", state);
392
393 // Determine if this trigger is valid
394 // If we have a new trigger on a state that is processing a result, ignore this
395 // event. We don't ignore result events, they are just queued up.
396 let pos = match state {
397 StateStatus::TriggerPos { offset, .. } => offset,
398 _ => {
399 continue;
400 }
401 };
402
403 // Lookup trigger guide
404 if let Some(trigger_guide) = self.layer_lookup.trigger_guide(guide, pos) {
405 // Check for already evaluated trigger state for this processing loop
406 let mut remaining =
407 if let Some(remaining) = self.trigger_combo_eval_state.get(&guide) {
408 *remaining
409 } else {
410 // Lookup size of this trigger list combo
411 trigger_guide.len() as u8
412 };
413
414 // Verify that we actually match the condition
415 // e.g. Press vs. Release
416 let mut removed_lookup_state = false;
417 for cond in trigger_guide {
418 match cond.evaluate(event, self.layer_lookup.loop_condition_lookup) {
419 Vote::Positive => {
420 trace!("eval({:?}): Positive", cond);
421 remaining -= 1;
422 }
423 Vote::Negative => {
424 trace!("eval({:?}): Negative", cond);
425 // Remove lookup state entry, continue to next guide
426 self.lookup_state.remove(&guide);
427 removed_lookup_state = true;
428 break;
429 }
430 Vote::Insufficient => {
431 trace!("eval({:?}): Insufficient", cond);
432 // Do nothing
433 }
434 Vote::OffState => {
435 trace!("eval({:?}): OffState", cond);
436 // Attempt to push a reverse lookup query
437 // The results of the query will be another set of TriggerEvents
438 if self
439 .off_state_lookups
440 .push((guide, u8::from(*cond), cond.index()))
441 .is_err()
442 {
443 return Err(ProcessError::FailedOffStatePush);
444 }
445 }
446 }
447 }
448
449 // Don't insert a new lookup_state entry if we're removed it on purpose
450 if removed_lookup_state {
451 continue;
452 }
453
454 // Check if there are no remaining evaluations
455 if remaining == 0 {
456 // Determine the next offset
457 let next_status = if let Some(next_offset) =
458 self.layer_lookup.next_trigger_combo(guide, pos)
459 {
460 StateStatus::TriggerPos {
461 time_instance: self.time_instance,
462 offset: next_offset,
463 }
464 } else {
465 StateStatus::ResultPos {
466 time_instance: self.time_instance,
467 event,
468 offset: 0,
469 }
470 };
471
472 // Update lookup state
473 if self.lookup_state.insert(guide, next_status).is_err() {
474 return Err(ProcessError::FailedLookupStateInsert);
475 }
476 } else {
477 // Update trigger_combo_eval_state
478 if self
479 .trigger_combo_eval_state
480 .insert(guide, remaining)
481 .is_err()
482 {
483 return Err(ProcessError::FailedTriggerComboEvalStateInsert);
484 }
485 }
486 }
487 }
488 } else {
489 trace!("No event mapping for: {:?}", event);
490 }
491
492 Ok(())
493 }
494
495 /// Off state lookups
496 /// Used to keep track of possibly off-states that need a reverse lookup
497 /// Cleared each processing loop.
498 /// ((trigger_guide, result_guide), ttype, index)
499 pub fn off_state_lookups(&self) -> &[((u16, u16), u8, u16)] {
500 &self.off_state_lookups
501 }
502
503 /// Process off state lookups
504 /// To maintain state use a callback function to evaluate input off states
505 pub fn process_off_state_lookups<
506 const MAX_LAYER_LOOKUP_SIZE: usize,
507 const MAX_EVENTS: usize,
508 >(
509 &mut self,
510 generate_event: &dyn Fn(usize) -> TriggerEventIterator<MAX_EVENTS>,
511 ) {
512 let mut events: heapless::Vec<TriggerEvent, MAX_LAYER_LOOKUP_SIZE> = heapless::Vec::new();
513 for lookup in &self.off_state_lookups {
514 // TODO support non-keyboard TriggerConditions
515 assert!(
516 lookup.1 == 1,
517 "Currently only keyboard TriggerConditions are supported"
518 );
519 for gen_event in generate_event(lookup.2.into()) {
520 events.push(gen_event).unwrap();
521 }
522 }
523
524 for event in events {
525 let ret = self.process_trigger::<MAX_LAYER_LOOKUP_SIZE>(event);
526 assert!(
527 ret.is_ok(),
528 "Failed to enqueue offstate: {:?} - {:?}",
529 event,
530 ret
531 );
532 }
533 }
534
535 /// Finalize incoming triggers, update internal state and generate outgoing results
536 pub fn finalize_triggers<const LSIZE: usize>(&mut self) -> heapless::Vec<CapabilityRun, LSIZE> {
537 let mut results = heapless::Vec::<_, LSIZE>::new();
538
539 // Iterate over lookup_state, looking for ResultPos entries
540 for (guide, status) in self.lookup_state.iter_mut() {
541 // Process results
542 if let StateStatus::ResultPos {
543 time_instance,
544 event,
545 offset,
546 } = status
547 {
548 // Time offset, used to compare against the timing conditions
549 let time_offset = self.time_instance - *time_instance;
550
551 // Lookup ResultGuide
552 if let Some(result_guide) = self.layer_lookup.result_guide(*guide, *offset) {
553 // Keeps track of completed conditions inside the combination
554 let mut completed_cond = 0;
555
556 // For each element in the combo
557 for cap in result_guide {
558 let time_cond = self.layer_lookup.loop_condition_lookup
559 [cap.loop_condition_index() as usize];
560 match time_offset.cmp(&time_cond) {
561 Ordering::Equal => {
562 // Convert the Capability into a CapabilityRun and enqueue it
563 if results
564 .push(
565 cap.generate(
566 *event,
567 self.layer_lookup.loop_condition_lookup,
568 ),
569 )
570 .is_err()
571 {
572 panic!("finalize_triggers LSIZE is too small!");
573 }
574
575 // Increment completion
576 completed_cond += 1;
577 }
578 Ordering::Greater => {
579 // Capability has already been scheduled, mark as completed
580 completed_cond += 1;
581 }
582 _ => {}
583 }
584 }
585
586 // Update status position
587 // Check to see if the time_instance is 0, so we can set it
588 if *offset == 0 && completed_cond != result_guide.len() {
589 *status = StateStatus::ResultPos {
590 time_instance: self.time_instance,
591 event: *event,
592 offset: *offset,
593 };
594 } else {
595 // Only increment combo if combo has been fully executed/processed
596 if completed_cond == result_guide.len() {
597 if let Some(next_pos) =
598 self.layer_lookup.next_result_combo(*guide, *offset)
599 {
600 *status = StateStatus::ResultPos {
601 time_instance: 0, // Set to 0, indicates new combo
602 event: *event,
603 offset: next_pos,
604 };
605 } else {
606 // No more combos, remove entry
607 *status = StateStatus::Done;
608 }
609 }
610 }
611 }
612 }
613 }
614
615 // Clear out StateStatus::Done entries
616 // TODO(HaaTa): Is this optimal?
617 for (guide, status) in self.lookup_state.clone().iter() {
618 if status == &StateStatus::Done {
619 self.lookup_state.remove(guide);
620 }
621 }
622
623 // Clear the trigger_combo_eval_state for the next scan iteration
624 self.trigger_combo_eval_state.clear();
625
626 // Clear the off_state_lookups for the next scan iteration
627 self.off_state_lookups.clear();
628
629 results
630 }
631}
632
633/// The LayerLookup struct is used as a guide for the KLL state machine
634/// It is a (mostly) constant lookup table which can give you all possible
635/// TriggerGuides for a specified input.
636/// Each TriggerGuide has a connected ResultGuide which is also stored in this datastructure.
637///
638/// In most cases a (layer, ttype, index) tuple is provided and a list of TriggerGuide:ResultGuide
639/// mappings
640/// is provided. See lookup_guides().
641#[derive(Clone, Debug, PartialEq, Eq)]
642pub struct LayerLookup<'a, const LAYOUT_SIZE: usize> {
643 layer_lookup: FnvIndexMap<(u8, u8, u16), usize, LAYOUT_SIZE>,
644 raw_layer_lookup: &'a [u8],
645 trigger_guides: &'a [u8],
646 result_guides: &'a [u8],
647 trigger_result_mapping: &'a [u16],
648 loop_condition_lookup: &'a [u32],
649 max_layer: u8,
650}
651
652impl<'a, const LAYOUT_SIZE: usize> LayerLookup<'a, LAYOUT_SIZE> {
653 pub fn new(
654 raw_layer_lookup: &'a [u8],
655 trigger_guides: &'a [u8],
656 result_guides: &'a [u8],
657 trigger_result_mapping: &'a [u16],
658 loop_condition_lookup: &'a [u32],
659 ) -> Self {
660 // Build layer lookup from array
661 // The purpose of this hash table is to quickly find the trigger list in LAYER_LOOKUP
662 // Mapping
663 // (<layer>, <ttype>, <index>) -> LAYER_LOOKUP index
664 let mut layer_lookup = FnvIndexMap::<(u8, u8, u16), usize, LAYOUT_SIZE>::new();
665
666 let mut max_layer = 0;
667
668 let mut mode = LayerProcessMode::Layer;
669 let mut layer = 0;
670 let mut ttype = 0;
671 let mut index: u16 = 0;
672 for (i, val) in raw_layer_lookup.iter().enumerate() {
673 match mode {
674 LayerProcessMode::Layer => {
675 layer = *val;
676 if layer > max_layer {
677 max_layer = layer;
678 }
679 mode = LayerProcessMode::TriggerType;
680 }
681 LayerProcessMode::TriggerType => {
682 ttype = *val;
683 mode = LayerProcessMode::IndexA;
684 }
685 LayerProcessMode::IndexA => {
686 index = *val as u16;
687 mode = LayerProcessMode::IndexB;
688 }
689 LayerProcessMode::IndexB => {
690 index |= (*val as u16) << 8;
691 mode = LayerProcessMode::TriggerSize;
692 }
693 LayerProcessMode::TriggerSize => {
694 let size = *val;
695 let lookup = i;
696 // We only add to the hash table if triggers actually exist
697 // The KLL compiler should optimize these out, but it's still valid array syntax
698 mode = if size > 0 {
699 // Attempt to insert the key
700 match layer_lookup.insert((layer, ttype, index), lookup) {
701 // Success, no existing key
702 Ok(None) => {}
703 // Success, replace existing key (this is bad, warn)
704 Ok(Some(old_lookup)) => {
705 warn!("Duplicate layer lookup key! ({}, {}). {} has been replaced by {}", layer, index, old_lookup, lookup);
706 }
707 Err(e) => {
708 error!(
709 "Failed to add lookup key ({}, {}) -> {}: {:?}; Size:{:?} Capacity:{:?}",
710 layer, index, lookup, e, layer_lookup.len(), LAYOUT_SIZE,
711 );
712 }
713 }
714 // Triggers are u16, so multiple by 2
715 LayerProcessMode::Triggers(size * 2)
716 } else {
717 LayerProcessMode::Layer
718 }
719 }
720 LayerProcessMode::Triggers(size) => {
721 mode = if size <= 1 {
722 LayerProcessMode::Layer
723 } else {
724 LayerProcessMode::Triggers(size - 1)
725 };
726 }
727 }
728 }
729 trace!("trigger_guides: {:?}", trigger_guides);
730 trace!("trigger_result_mapping: {:?}", trigger_result_mapping);
731 Self {
732 layer_lookup,
733 raw_layer_lookup,
734 trigger_guides,
735 result_guides,
736 trigger_result_mapping,
737 loop_condition_lookup,
738 max_layer,
739 }
740 }
741
742 /// Retrieves a TriggerList
743 /// A TriggerList is a list of indices that correspond to a specific TriggerGuide -> ResultGuide
744 /// mapping.
745 pub fn trigger_list(&self, (layer, ttype, index): (u8, u8, u16)) -> Option<&'a [u8]> {
746 #[cfg(not(feature = "defmt"))]
747 trace!("layer_lookup: {:?}", self.layer_lookup);
748 match self.layer_lookup.get(&(layer, ttype, index)) {
749 Some(lookup) => {
750 // Determine size of trigger list
751 trace!("raw_layer_lookup: {:?}", self.raw_layer_lookup);
752 let size: usize = self.raw_layer_lookup[*lookup].into();
753
754 // If the size is 0, just return None
755 if size == 0 {
756 return None;
757 }
758
759 // Each trigger list id is a u16
760 let size = size * 2;
761
762 // Build TriggerList slice
763 let initial: usize = lookup + 1;
764 Some(&self.raw_layer_lookup[initial..initial + size])
765 }
766 None => None,
767 }
768 }
769
770 /// Retrieves a list of TriggerGuide:ResultGuide mappings
771 /// Will need to be called for every new TriggerEvent.
772 pub fn lookup_guides<const LSIZE: usize>(
773 &self,
774 (layer, ttype, index): (u8, u8, u16),
775 ) -> heapless::Vec<(u16, u16), LSIZE> {
776 let mut guides = heapless::Vec::<_, LSIZE>::new();
777
778 // Lookup TriggerList
779 match self.trigger_list((layer, ttype, index)) {
780 Some(mlookup) => {
781 // Iterate over each trigger to locate guides
782 // Each value is a u16 (hence chunking by 2)
783 trace!("mlookup: {:?}", mlookup);
784 for chunk in mlookup.chunks_exact(2) {
785 // Determine guide lookup index
786 let index = u16::from_le_bytes([chunk[0], chunk[1]]) as usize;
787
788 // Push guide pair
789 assert!(
790 guides
791 .push((
792 self.trigger_result_mapping[index],
793 self.trigger_result_mapping[index + 1]
794 ))
795 .is_ok(),
796 "lookup_guides vector is full, increase LSIZE: {}",
797 LSIZE
798 );
799 }
800 trace!("guides: {:?}", guides);
801 guides
802 }
803 None => guides,
804 }
805 }
806
807 /// Retrieves the TriggerGuide for a given TriggerGuide:ResultGuide pair
808 ///
809 /// offset indicates the number of u8 positions the sequence is currently at.
810 /// trigger + offset will always point to the start of a combination
811 pub fn trigger_guide(
812 &self,
813 (trigger, _result): (u16, u16),
814 offset: u16,
815 ) -> Option<&[TriggerCondition]> {
816 // Determine size of offset combo in the sequence
817 let count = self.trigger_guides[trigger as usize + offset as usize] as usize;
818 if count == 0 {
819 return None;
820 }
821
822 // Determine starting position of combo
823 let start = trigger as usize + offset as usize + 1;
824
825 // Convert u8 combo list to TriggerCondition list
826 let ptr: *const u8 =
827 self.trigger_guides[start..start + core::mem::size_of::<TriggerCondition>()].as_ptr();
828 let cond = unsafe { core::slice::from_raw_parts(ptr as *const TriggerCondition, count) };
829 Some(cond)
830 }
831
832 /// Retrieves the ResultGuide for a given TriggerGuide:ResultGuide pair
833 ///
834 /// offset indicates the number of u8 positions the sequence is currently at.
835 /// result + offset will always point to the start of a combination
836 pub fn result_guide(
837 &self,
838 (_trigger, result): (u16, u16),
839 offset: u16,
840 ) -> Option<&[Capability]> {
841 // Determine size of offset combo in the sequence
842 let count = self.result_guides[result as usize + offset as usize] as usize;
843 if count == 0 {
844 return None;
845 }
846
847 // Determine starting position of combo
848 let start = result as usize + offset as usize + 1;
849
850 // Convert u8 combo list to Capability list
851 let ptr: *const u8 =
852 self.result_guides[start..start + core::mem::size_of::<Capability>()].as_ptr();
853 let cond = unsafe { core::slice::from_raw_parts(ptr as *const Capability, count) };
854 Some(cond)
855 }
856
857 /// Determines the next trigger guide combo offset
858 /// Returns Some if there is a next offset, None if the next combo is 0 length
859 /// Will also return None if the current offset is also 0 (shouldn't be a common use case)
860 pub fn next_trigger_combo(&self, (trigger, _result): (u16, u16), offset: u16) -> Option<u16> {
861 // Determine size of offset combo in the sequence
862 let count = self.trigger_guides[trigger as usize + offset as usize] as usize;
863 if count == 0 {
864 return None;
865 }
866
867 // New offset position
868 // +1 is added as the combo length count uses 1 byte
869 let offset = offset as usize + count * core::mem::size_of::<TriggerCondition>() + 1;
870
871 // Determine size of next combo
872 let count = self.trigger_guides[trigger as usize + offset] as usize;
873 if count == 0 {
874 None
875 } else {
876 Some(offset as u16)
877 }
878 }
879
880 /// Determine the next result guide combo offset
881 /// Returns Some if there is a next offset, None if the next combo is 0 length
882 /// Will also return None if the current offset is also 0 (shouldn't be a common use case)
883 pub fn next_result_combo(&self, (_trigger, result): (u16, u16), offset: u16) -> Option<u16> {
884 // Determine size of offset combo in the sequence
885 let count = self.result_guides[result as usize + offset as usize] as usize;
886 if count == 0 {
887 return None;
888 }
889
890 // New offset position
891 // +1 is added as the combo length count uses 1 byte
892 let offset = offset as usize + count * core::mem::size_of::<Capability>() + 1;
893
894 // Determine size of next combo
895 let count = self.result_guides[result as usize + offset] as usize;
896 if count == 0 {
897 None
898 } else {
899 Some(offset as u16)
900 }
901 }
902
903 /// Convience access for layer_lookup
904 /// Useful when trying to get a list of all possible triggers
905 pub fn layer_lookup(&self) -> &FnvIndexMap<(u8, u8, u16), usize, LAYOUT_SIZE> {
906 &self.layer_lookup
907 }
908
909 /// Determine the max number of layers
910 pub fn max_layers(&self) -> u8 {
911 self.max_layer + 1
912 }
913}