Skip to main content

bamboo_memory/memory_store/
recall.rs

1use std::cmp::Ordering;
2use std::collections::HashSet;
3use std::io;
4use std::sync::Arc;
5
6use futures::StreamExt;
7use serde::Deserialize;
8
9use bamboo_agent_core::Message;
10use bamboo_domain::ReasoningEffort;
11use bamboo_infrastructure::{LLMChunk, LLMProvider, LLMRequestOptions};
12
13use super::{
14    extract_keywords, parse_rfc3339, DurableMemoryStatus, LexicalIndexItem, MemoryScope,
15    MemoryStore,
16};
17
18#[derive(Debug, Clone, PartialEq)]
19pub struct MemoryRecallCandidate {
20    pub id: String,
21    pub title: String,
22    pub score: f64,
23    pub scope: MemoryScope,
24    pub project_key: Option<String>,
25    pub status: DurableMemoryStatus,
26    pub updated_at: String,
27    pub summary: String,
28}
29
30#[derive(Debug, Clone, PartialEq, Eq)]
31pub struct MemoryRecallOptions {
32    pub shortlist_limit: usize,
33    pub include_global_fallback: bool,
34    pub max_candidates_per_scope: usize,
35}
36
37impl Default for MemoryRecallOptions {
38    fn default() -> Self {
39        Self {
40            shortlist_limit: 3,
41            include_global_fallback: true,
42            max_candidates_per_scope: 20,
43        }
44    }
45}
46
47#[derive(Debug, Clone, Copy, PartialEq, Eq)]
48pub enum MemoryRecallStrategy {
49    Lexical,
50    Reranked,
51    RerankFallback,
52}
53
54impl MemoryRecallStrategy {
55    pub fn as_str(self) -> &'static str {
56        match self {
57            Self::Lexical => "lexical",
58            Self::Reranked => "reranked",
59            Self::RerankFallback => "rerank_fallback",
60        }
61    }
62}
63
64#[derive(Debug, Clone, PartialEq)]
65pub struct MemoryRecallSelection {
66    pub candidates: Vec<MemoryRecallCandidate>,
67    pub strategy: MemoryRecallStrategy,
68}
69
70#[derive(Clone)]
71pub struct MemoryRecallRerankContext {
72    pub llm: Arc<dyn LLMProvider>,
73    pub model: String,
74    pub session_id: Option<String>,
75}
76
77#[derive(Debug, Deserialize)]
78struct MemoryRecallRerankEnvelope {
79    #[serde(default)]
80    ids: Vec<String>,
81}
82
83pub async fn shortlist_relevant_memories(
84    store: &MemoryStore,
85    project_key: Option<&str>,
86    query: &str,
87    options: &MemoryRecallOptions,
88) -> io::Result<Vec<MemoryRecallCandidate>> {
89    let limit = options.shortlist_limit.max(1);
90    let mut candidates =
91        lexical_shortlist_relevant_memories(store, project_key, query, options).await?;
92    candidates.truncate(limit);
93    Ok(candidates)
94}
95
96pub async fn select_relevant_memories(
97    store: &MemoryStore,
98    project_key: Option<&str>,
99    query: &str,
100    options: &MemoryRecallOptions,
101    rerank_context: Option<&MemoryRecallRerankContext>,
102) -> io::Result<MemoryRecallSelection> {
103    let query = query.trim();
104    if query.is_empty() {
105        return Ok(MemoryRecallSelection {
106            candidates: Vec::new(),
107            strategy: MemoryRecallStrategy::Lexical,
108        });
109    }
110
111    let limit = options.shortlist_limit.max(1);
112    let mut shortlist =
113        lexical_shortlist_relevant_memories(store, project_key, query, options).await?;
114    if shortlist.is_empty() {
115        return Ok(MemoryRecallSelection {
116            candidates: shortlist,
117            strategy: MemoryRecallStrategy::Lexical,
118        });
119    }
120
121    let Some(rerank_context) = rerank_context else {
122        shortlist.truncate(limit);
123        return Ok(MemoryRecallSelection {
124            candidates: shortlist,
125            strategy: MemoryRecallStrategy::Lexical,
126        });
127    };
128
129    if shortlist.len() <= 1 {
130        shortlist.truncate(limit);
131        return Ok(MemoryRecallSelection {
132            candidates: shortlist,
133            strategy: MemoryRecallStrategy::Lexical,
134        });
135    }
136
137    match rerank_candidate_ids(query, &shortlist, limit, rerank_context).await {
138        Ok(ids) => {
139            let reranked = reorder_candidates_by_ids(&shortlist, &ids, limit);
140            if reranked.is_empty() {
141                let mut lexical = shortlist;
142                lexical.truncate(limit);
143                return Ok(MemoryRecallSelection {
144                    candidates: lexical,
145                    strategy: MemoryRecallStrategy::RerankFallback,
146                });
147            }
148            Ok(MemoryRecallSelection {
149                candidates: reranked,
150                strategy: MemoryRecallStrategy::Reranked,
151            })
152        }
153        Err(error) => {
154            tracing::warn!(
155                "Relevant memory rerank failed for model '{}': {}. Falling back to lexical shortlist.",
156                rerank_context.model,
157                error
158            );
159            shortlist.truncate(limit);
160            Ok(MemoryRecallSelection {
161                candidates: shortlist,
162                strategy: MemoryRecallStrategy::RerankFallback,
163            })
164        }
165    }
166}
167
168async fn lexical_shortlist_relevant_memories(
169    store: &MemoryStore,
170    project_key: Option<&str>,
171    query: &str,
172    options: &MemoryRecallOptions,
173) -> io::Result<Vec<MemoryRecallCandidate>> {
174    let query = query.trim();
175    if query.is_empty() {
176        return Ok(Vec::new());
177    }
178
179    let limit = options.shortlist_limit.max(1);
180    let per_scope_limit = options.max_candidates_per_scope.max(limit);
181
182    if let Some(project_key) = project_key.map(str::trim).filter(|value| !value.is_empty()) {
183        let mut project_hits =
184            shortlist_scope(store, MemoryScope::Project, Some(project_key), query).await?;
185        project_hits.truncate(per_scope_limit);
186        if !project_hits.is_empty() {
187            return Ok(project_hits);
188        }
189    }
190
191    if options.include_global_fallback {
192        let mut global_hits = shortlist_scope(store, MemoryScope::Global, None, query).await?;
193        global_hits.truncate(per_scope_limit);
194        return Ok(global_hits);
195    }
196
197    Ok(Vec::new())
198}
199
200async fn shortlist_scope(
201    store: &MemoryStore,
202    scope: MemoryScope,
203    project_key: Option<&str>,
204    query: &str,
205) -> io::Result<Vec<MemoryRecallCandidate>> {
206    let Some(index) = store.read_lexical_index(scope, project_key).await? else {
207        return Ok(Vec::new());
208    };
209
210    let query_tokens = extract_keywords(query, "", &[]);
211    if query_tokens.is_empty() {
212        return Ok(Vec::new());
213    }
214
215    let mut candidates = index
216        .items
217        .iter()
218        .filter_map(|item| score_lexical_index_item(item, &query_tokens).map(|score| (item, score)))
219        .map(|(item, score)| MemoryRecallCandidate {
220            id: item.id.clone(),
221            title: item.title.clone(),
222            score,
223            scope: item.scope,
224            project_key: item.project_key.clone(),
225            status: item.status,
226            updated_at: item.updated_at.clone(),
227            summary: item.summary.clone(),
228        })
229        .collect::<Vec<_>>();
230
231    sort_recall_candidates(&mut candidates);
232    Ok(candidates)
233}
234
235fn score_lexical_index_item(item: &LexicalIndexItem, query_tokens: &[String]) -> Option<f64> {
236    match item.status {
237        DurableMemoryStatus::Superseded
238        | DurableMemoryStatus::Contradicted
239        | DurableMemoryStatus::Archived => return None,
240        DurableMemoryStatus::Active | DurableMemoryStatus::Stale => {}
241    }
242
243    let title = item.title.to_ascii_lowercase();
244    let summary = item.summary.to_ascii_lowercase();
245
246    let mut score = 0.0;
247    let mut matched_any = false;
248
249    for token in query_tokens {
250        let mut token_score = 0.0;
251        if title.contains(token) {
252            token_score += 3.0;
253        }
254        if item
255            .keywords
256            .iter()
257            .any(|value| value.eq_ignore_ascii_case(token))
258        {
259            token_score += 2.5;
260        }
261        if item
262            .tags
263            .iter()
264            .any(|value| value.eq_ignore_ascii_case(token))
265        {
266            token_score += 2.0;
267        }
268        if item
269            .entities
270            .iter()
271            .any(|value| value.eq_ignore_ascii_case(token))
272        {
273            token_score += 1.5;
274        }
275        if summary.contains(token) {
276            token_score += 1.0;
277        }
278        if token_score > 0.0 {
279            matched_any = true;
280            score += token_score;
281        }
282    }
283
284    if !matched_any {
285        return None;
286    }
287
288    score += lexical_status_adjustment(item.status);
289    Some((score / query_tokens.len() as f64 * 100.0).round() / 100.0)
290}
291
292fn lexical_status_adjustment(status: DurableMemoryStatus) -> f64 {
293    match status {
294        DurableMemoryStatus::Active => 0.0,
295        DurableMemoryStatus::Stale => -0.75,
296        DurableMemoryStatus::Superseded
297        | DurableMemoryStatus::Contradicted
298        | DurableMemoryStatus::Archived => -10.0,
299    }
300}
301
302fn sort_recall_candidates(candidates: &mut [MemoryRecallCandidate]) {
303    candidates.sort_by(|left, right| {
304        right
305            .score
306            .partial_cmp(&left.score)
307            .unwrap_or(Ordering::Equal)
308            .then_with(|| {
309                let left_dt = parse_rfc3339(&left.updated_at)
310                    .unwrap_or(chrono::DateTime::<chrono::Utc>::MIN_UTC);
311                let right_dt = parse_rfc3339(&right.updated_at)
312                    .unwrap_or(chrono::DateTime::<chrono::Utc>::MIN_UTC);
313                right_dt.cmp(&left_dt)
314            })
315            .then_with(|| left.title.cmp(&right.title))
316    });
317}
318
319fn build_rerank_prompt(query: &str, candidates: &[MemoryRecallCandidate], limit: usize) -> String {
320    let mut prompt = String::from("# Bamboo Relevant Memory Recall Rerank\n\n");
321    prompt.push_str(
322        "Select the durable memory candidates that are most relevant to the user query.\n",
323    );
324    prompt.push_str("Return JSON only in the form {\"ids\":[\"candidate-id\", ...]}.\n");
325    prompt
326        .push_str("Do not include commentary, markdown fences, explanations, or unknown ids.\n\n");
327    prompt.push_str("## User query\n");
328    prompt.push_str(query.trim());
329    prompt.push_str("\n\n## Candidate memories\n");
330
331    for (index, candidate) in candidates.iter().enumerate() {
332        prompt.push_str(&format!(
333            "{}. id={}\n   title: {}\n   scope: {}\n   status: {}\n   updated_at: {}\n   lexical_score: {:.2}\n   summary: {}\n",
334            index + 1,
335            candidate.id,
336            candidate.title,
337            candidate.scope.as_str(),
338            candidate.status.as_str(),
339            candidate.updated_at,
340            candidate.score,
341            candidate.summary.replace('\n', " "),
342        ));
343    }
344
345    prompt.push_str(&format!(
346        "\n## Selection rules\n- Return at most {limit} ids.\n- Use only ids from the candidate list above.\n- Prefer candidates that best answer the user query or encode active preferences/constraints relevant to it.\n- Prefer active memories over stale ones when relevance is otherwise similar.\n- Keep the ids ordered best-to-worst.\n"
347    ));
348    prompt
349}
350
351async fn rerank_candidate_ids(
352    query: &str,
353    candidates: &[MemoryRecallCandidate],
354    limit: usize,
355    context: &MemoryRecallRerankContext,
356) -> Result<Vec<String>, String> {
357    let model = context.model.trim();
358    if model.is_empty() {
359        return Err("rerank model is empty".to_string());
360    }
361
362    let messages = vec![
363        Message::system(
364            "You rerank Bamboo durable-memory recall candidates. Return strict JSON only in the form {\"ids\":[...]} using only candidate ids from the prompt.",
365        ),
366        Message::user(build_rerank_prompt(query, candidates, limit)),
367    ];
368    let options = LLMRequestOptions {
369        session_id: context.session_id.clone(),
370        reasoning_effort: Some(ReasoningEffort::High),
371        parallel_tool_calls: None,
372        responses: None,
373    };
374
375    let mut stream = context
376        .llm
377        .chat_stream_with_options(&messages, &[], Some(200), model, Some(&options))
378        .await
379        .map_err(|error| format!("rerank provider call failed: {error}"))?;
380
381    let content = tokio::time::timeout(std::time::Duration::from_secs(30), async {
382        let mut content = String::new();
383        while let Some(chunk_result) = stream.next().await {
384            match chunk_result {
385                Ok(LLMChunk::Token(text)) => content.push_str(&text),
386                Ok(LLMChunk::Done) => break,
387                Ok(_) => {}
388                Err(error) => {
389                    if !content.trim().is_empty() {
390                        break;
391                    }
392                    return Err(format!("rerank stream failed: {error}"));
393                }
394            }
395        }
396        Ok(content)
397    })
398    .await
399    .unwrap_or_else(|_| Err("rerank timed out after 30s".to_string()))?;
400
401    parse_reranked_ids(&content, candidates)
402        .ok_or_else(|| format!("failed to parse rerank response: {}", content.trim()))
403}
404
405fn reorder_candidates_by_ids(
406    lexical_candidates: &[MemoryRecallCandidate],
407    preferred_ids: &[String],
408    limit: usize,
409) -> Vec<MemoryRecallCandidate> {
410    if lexical_candidates.is_empty() || limit == 0 {
411        return Vec::new();
412    }
413
414    let allowed = lexical_candidates
415        .iter()
416        .map(|candidate| candidate.id.as_str())
417        .collect::<HashSet<_>>();
418    let mut seen = HashSet::new();
419    let mut ordered = Vec::new();
420
421    for id in preferred_ids {
422        let trimmed = id.trim();
423        if trimmed.is_empty() || !allowed.contains(trimmed) || !seen.insert(trimmed.to_string()) {
424            continue;
425        }
426        if let Some(candidate) = lexical_candidates
427            .iter()
428            .find(|candidate| candidate.id == trimmed)
429            .cloned()
430        {
431            ordered.push(candidate);
432            if ordered.len() >= limit {
433                return ordered;
434            }
435        }
436    }
437
438    for candidate in lexical_candidates {
439        if seen.insert(candidate.id.clone()) {
440            ordered.push(candidate.clone());
441            if ordered.len() >= limit {
442                break;
443            }
444        }
445    }
446
447    ordered
448}
449
450fn parse_reranked_ids(raw: &str, candidates: &[MemoryRecallCandidate]) -> Option<Vec<String>> {
451    let stripped = strip_markdown_fence(raw);
452    let fragment = extract_json_fragment(&stripped).unwrap_or(stripped.trim());
453    let ids = serde_json::from_str::<MemoryRecallRerankEnvelope>(fragment)
454        .map(|value| value.ids)
455        .or_else(|_| serde_json::from_str::<Vec<String>>(fragment))
456        .ok()?;
457
458    let allowed = candidates
459        .iter()
460        .map(|candidate| candidate.id.as_str())
461        .collect::<HashSet<_>>();
462    let mut seen = HashSet::new();
463    let mut out = Vec::new();
464
465    for id in ids {
466        let trimmed = id.trim();
467        if trimmed.is_empty() || !allowed.contains(trimmed) || !seen.insert(trimmed.to_string()) {
468            continue;
469        }
470        out.push(trimmed.to_string());
471    }
472
473    (!out.is_empty()).then_some(out)
474}
475
476fn strip_markdown_fence(raw: &str) -> String {
477    let trimmed = raw.trim();
478    for fence in ["````", "```"] {
479        if let Some(after_fence) = trimmed.strip_prefix(fence) {
480            let Some(first_newline) = after_fence.find('\n') else {
481                continue;
482            };
483            let body = &after_fence[first_newline + 1..];
484            if let Some(end_idx) = body.rfind(fence) {
485                return body[..end_idx].trim().to_string();
486            }
487        }
488    }
489    trimmed.to_string()
490}
491
492fn extract_json_fragment(raw: &str) -> Option<&str> {
493    let trimmed = raw.trim();
494    if trimmed.is_empty() {
495        return None;
496    }
497
498    if let (Some(start), Some(end)) = (trimmed.find('{'), trimmed.rfind('}')) {
499        if start <= end {
500            return Some(trimmed[start..=end].trim());
501        }
502    }
503
504    if let (Some(start), Some(end)) = (trimmed.find('['), trimmed.rfind(']')) {
505        if start <= end {
506            return Some(trimmed[start..=end].trim());
507        }
508    }
509
510    None
511}
512
513#[cfg(test)]
514mod tests {
515    use super::*;
516    use crate::memory_store::DurableMemoryType;
517    use async_trait::async_trait;
518    use bamboo_infrastructure::{LLMError, LLMStream};
519    use futures::stream;
520    use std::sync::Mutex;
521    use tempfile::tempdir;
522
523    fn item(
524        id: &str,
525        title: &str,
526        status: DurableMemoryStatus,
527        updated_at: &str,
528        keywords: &[&str],
529        tags: &[&str],
530        entities: &[&str],
531        summary: &str,
532    ) -> LexicalIndexItem {
533        LexicalIndexItem {
534            id: id.to_string(),
535            title: title.to_string(),
536            scope: MemoryScope::Project,
537            project_key: Some("proj-1".to_string()),
538            r#type: DurableMemoryType::Project,
539            status,
540            tags: tags.iter().map(|v| v.to_string()).collect(),
541            keywords: keywords.iter().map(|v| v.to_string()).collect(),
542            entities: entities.iter().map(|v| v.to_string()).collect(),
543            updated_at: updated_at.to_string(),
544            created_at: updated_at.to_string(),
545            summary: summary.to_string(),
546        }
547    }
548
549    #[derive(Clone)]
550    struct StaticResponseProvider {
551        response: String,
552        requested_models: Arc<Mutex<Vec<String>>>,
553    }
554
555    impl StaticResponseProvider {
556        fn new(response: impl Into<String>) -> Self {
557            Self {
558                response: response.into(),
559                requested_models: Arc::new(Mutex::new(Vec::new())),
560            }
561        }
562    }
563
564    #[async_trait]
565    impl LLMProvider for StaticResponseProvider {
566        async fn chat_stream(
567            &self,
568            _messages: &[Message],
569            _tools: &[bamboo_agent_core::ToolSchema],
570            _max_output_tokens: Option<u32>,
571            model: &str,
572        ) -> Result<LLMStream, LLMError> {
573            self.requested_models
574                .lock()
575                .expect("lock poisoned")
576                .push(model.to_string());
577            Ok(Box::pin(stream::iter(vec![
578                Ok(LLMChunk::Token(self.response.clone())),
579                Ok(LLMChunk::Done),
580            ])))
581        }
582    }
583
584    #[test]
585    fn title_matches_outrank_keyword_only_matches() {
586        let query_tokens = vec!["release".to_string(), "freeze".to_string()];
587        let title_item = item(
588            "a",
589            "Release freeze decision",
590            DurableMemoryStatus::Active,
591            "2026-04-09T00:00:00Z",
592            &[],
593            &[],
594            &[],
595            "summary",
596        );
597        let keyword_item = item(
598            "b",
599            "Deployment decision",
600            DurableMemoryStatus::Active,
601            "2026-04-09T00:00:00Z",
602            &["release", "freeze"],
603            &[],
604            &[],
605            "summary",
606        );
607
608        let title_score = score_lexical_index_item(&title_item, &query_tokens).unwrap();
609        let keyword_score = score_lexical_index_item(&keyword_item, &query_tokens).unwrap();
610        assert!(title_score > keyword_score);
611    }
612
613    #[test]
614    fn active_items_outrank_stale_items() {
615        let query_tokens = vec!["release".to_string()];
616        let active = item(
617            "a",
618            "Release freeze decision",
619            DurableMemoryStatus::Active,
620            "2026-04-09T00:00:00Z",
621            &[],
622            &[],
623            &[],
624            "summary",
625        );
626        let stale = item(
627            "b",
628            "Release freeze decision",
629            DurableMemoryStatus::Stale,
630            "2026-04-10T00:00:00Z",
631            &[],
632            &[],
633            &[],
634            "summary",
635        );
636
637        let active_score = score_lexical_index_item(&active, &query_tokens).unwrap();
638        let stale_score = score_lexical_index_item(&stale, &query_tokens).unwrap();
639        assert!(active_score > stale_score);
640    }
641
642    #[test]
643    fn contradicted_and_archived_items_are_filtered_out() {
644        let query_tokens = vec!["release".to_string()];
645        let contradicted = item(
646            "a",
647            "Release freeze decision",
648            DurableMemoryStatus::Contradicted,
649            "2026-04-09T00:00:00Z",
650            &[],
651            &[],
652            &[],
653            "summary",
654        );
655        let archived = item(
656            "b",
657            "Release freeze decision",
658            DurableMemoryStatus::Archived,
659            "2026-04-09T00:00:00Z",
660            &[],
661            &[],
662            &[],
663            "summary",
664        );
665
666        assert!(score_lexical_index_item(&contradicted, &query_tokens).is_none());
667        assert!(score_lexical_index_item(&archived, &query_tokens).is_none());
668    }
669
670    #[test]
671    fn parse_reranked_ids_accepts_fenced_json_and_filters_unknown_ids() {
672        let candidates = vec![
673            MemoryRecallCandidate {
674                id: "mem-a".to_string(),
675                title: "A".to_string(),
676                score: 10.0,
677                scope: MemoryScope::Project,
678                project_key: Some("proj-1".to_string()),
679                status: DurableMemoryStatus::Active,
680                updated_at: "2026-04-09T00:00:00Z".to_string(),
681                summary: "summary a".to_string(),
682            },
683            MemoryRecallCandidate {
684                id: "mem-b".to_string(),
685                title: "B".to_string(),
686                score: 9.0,
687                scope: MemoryScope::Project,
688                project_key: Some("proj-1".to_string()),
689                status: DurableMemoryStatus::Active,
690                updated_at: "2026-04-09T00:00:00Z".to_string(),
691                summary: "summary b".to_string(),
692            },
693        ];
694
695        let parsed = parse_reranked_ids(
696            "```json\n{\"ids\":[\"mem-b\",\"unknown\",\"mem-a\",\"mem-b\"]}\n```",
697            &candidates,
698        )
699        .expect("reranked ids should parse");
700
701        assert_eq!(parsed, vec!["mem-b".to_string(), "mem-a".to_string()]);
702    }
703
704    #[test]
705    fn reorder_candidates_by_ids_appends_remaining_lexical_candidates() {
706        let lexical = vec![
707            MemoryRecallCandidate {
708                id: "mem-a".to_string(),
709                title: "A".to_string(),
710                score: 10.0,
711                scope: MemoryScope::Project,
712                project_key: Some("proj-1".to_string()),
713                status: DurableMemoryStatus::Active,
714                updated_at: "2026-04-09T00:00:00Z".to_string(),
715                summary: "summary a".to_string(),
716            },
717            MemoryRecallCandidate {
718                id: "mem-b".to_string(),
719                title: "B".to_string(),
720                score: 9.0,
721                scope: MemoryScope::Project,
722                project_key: Some("proj-1".to_string()),
723                status: DurableMemoryStatus::Active,
724                updated_at: "2026-04-09T00:00:00Z".to_string(),
725                summary: "summary b".to_string(),
726            },
727            MemoryRecallCandidate {
728                id: "mem-c".to_string(),
729                title: "C".to_string(),
730                score: 8.0,
731                scope: MemoryScope::Project,
732                project_key: Some("proj-1".to_string()),
733                status: DurableMemoryStatus::Active,
734                updated_at: "2026-04-09T00:00:00Z".to_string(),
735                summary: "summary c".to_string(),
736            },
737        ];
738
739        let reordered =
740            reorder_candidates_by_ids(&lexical, &["mem-c".to_string(), "mem-a".to_string()], 3);
741
742        assert_eq!(reordered[0].id, "mem-c");
743        assert_eq!(reordered[1].id, "mem-a");
744        assert_eq!(reordered[2].id, "mem-b");
745    }
746
747    #[tokio::test]
748    async fn project_scope_shortlist_excludes_global_when_project_hits_exist() {
749        let dir = tempdir().unwrap();
750        let store = MemoryStore::new(dir.path());
751
752        store
753            .write_memory(
754                MemoryScope::Project,
755                Some("proj-1"),
756                DurableMemoryType::Project,
757                "Release freeze decision",
758                "Project-specific release freeze note.",
759                &["release".to_string()],
760                Some("session-1"),
761                "main-model",
762                false,
763            )
764            .await
765            .unwrap();
766        store
767            .write_memory(
768                MemoryScope::Global,
769                None,
770                DurableMemoryType::Reference,
771                "Global release guidance",
772                "Global note that should not be used when project hits exist.",
773                &["release".to_string()],
774                Some("session-1"),
775                "main-model",
776                false,
777            )
778            .await
779            .unwrap();
780
781        let candidates = shortlist_relevant_memories(
782            &store,
783            Some("proj-1"),
784            "release freeze",
785            &MemoryRecallOptions::default(),
786        )
787        .await
788        .unwrap();
789
790        assert!(!candidates.is_empty());
791        assert!(candidates
792            .iter()
793            .all(|candidate| candidate.scope == MemoryScope::Project));
794    }
795
796    #[tokio::test]
797    async fn global_fallback_triggers_only_when_project_hits_are_absent() {
798        let dir = tempdir().unwrap();
799        let store = MemoryStore::new(dir.path());
800
801        store
802            .write_memory(
803                MemoryScope::Global,
804                None,
805                DurableMemoryType::Reference,
806                "Global release guidance",
807                "Fallback note for release work.",
808                &["release".to_string()],
809                Some("session-1"),
810                "main-model",
811                false,
812            )
813            .await
814            .unwrap();
815
816        let candidates = shortlist_relevant_memories(
817            &store,
818            Some("proj-missing"),
819            "release guidance",
820            &MemoryRecallOptions::default(),
821        )
822        .await
823        .unwrap();
824
825        assert!(!candidates.is_empty());
826        assert!(candidates
827            .iter()
828            .all(|candidate| candidate.scope == MemoryScope::Global));
829    }
830
831    #[tokio::test]
832    async fn model_rerank_reorders_lexical_shortlist_when_enabled() {
833        let dir = tempdir().unwrap();
834        let store = MemoryStore::new(dir.path());
835
836        let lexical_first = store
837            .write_memory(
838                MemoryScope::Project,
839                Some("proj-1"),
840                DurableMemoryType::Project,
841                "Release freeze checklist",
842                "Generic release freeze checklist for shipping work.",
843                &["release".to_string(), "freeze".to_string()],
844                Some("session-1"),
845                "main-model",
846                false,
847            )
848            .await
849            .unwrap();
850        let reranked_first = store
851            .write_memory(
852                MemoryScope::Project,
853                Some("proj-1"),
854                DurableMemoryType::Project,
855                "Mobile launch blocker",
856                "This durable note captures the release freeze decision for the mobile app and should be preferred for mobile freeze requests.",
857                &["mobile".to_string(), "launch".to_string()],
858                Some("session-1"),
859                "main-model",
860                false,
861            )
862            .await
863            .unwrap();
864
865        let provider = StaticResponseProvider::new(format!(
866            "{{\"ids\":[\"{}\",\"{}\"]}}",
867            reranked_first.frontmatter.id, lexical_first.frontmatter.id
868        ));
869        let requested_models = provider.requested_models.clone();
870        let selection = select_relevant_memories(
871            &store,
872            Some("proj-1"),
873            "release freeze for mobile",
874            &MemoryRecallOptions {
875                shortlist_limit: 2,
876                include_global_fallback: false,
877                max_candidates_per_scope: 12,
878            },
879            Some(&MemoryRecallRerankContext {
880                llm: Arc::new(provider),
881                model: "rerank-fast-model".to_string(),
882                session_id: Some("session-1".to_string()),
883            }),
884        )
885        .await
886        .unwrap();
887
888        assert_eq!(selection.strategy, MemoryRecallStrategy::Reranked);
889        assert_eq!(selection.candidates.len(), 2);
890        assert_eq!(selection.candidates[0].id, reranked_first.frontmatter.id);
891        assert_eq!(selection.candidates[1].id, lexical_first.frontmatter.id);
892        assert_eq!(
893            requested_models.lock().expect("lock poisoned").as_slice(),
894            ["rerank-fast-model"]
895        );
896    }
897
898    #[tokio::test]
899    async fn invalid_model_rerank_response_falls_back_to_lexical_order() {
900        let dir = tempdir().unwrap();
901        let store = MemoryStore::new(dir.path());
902
903        let lexical_first = store
904            .write_memory(
905                MemoryScope::Project,
906                Some("proj-1"),
907                DurableMemoryType::Project,
908                "Release freeze checklist",
909                "Generic release freeze checklist for shipping work.",
910                &["release".to_string(), "freeze".to_string()],
911                Some("session-1"),
912                "main-model",
913                false,
914            )
915            .await
916            .unwrap();
917        let lexical_second = store
918            .write_memory(
919                MemoryScope::Project,
920                Some("proj-1"),
921                DurableMemoryType::Project,
922                "Mobile launch blocker",
923                "This durable note captures the release freeze decision for the mobile app.",
924                &["mobile".to_string(), "launch".to_string()],
925                Some("session-1"),
926                "main-model",
927                false,
928            )
929            .await
930            .unwrap();
931
932        let selection = select_relevant_memories(
933            &store,
934            Some("proj-1"),
935            "release freeze for mobile",
936            &MemoryRecallOptions {
937                shortlist_limit: 2,
938                include_global_fallback: false,
939                max_candidates_per_scope: 12,
940            },
941            Some(&MemoryRecallRerankContext {
942                llm: Arc::new(StaticResponseProvider::new("not valid json")),
943                model: "rerank-fast-model".to_string(),
944                session_id: Some("session-1".to_string()),
945            }),
946        )
947        .await
948        .unwrap();
949
950        assert_eq!(selection.strategy, MemoryRecallStrategy::RerankFallback);
951        assert_eq!(selection.candidates.len(), 2);
952        assert_eq!(selection.candidates[0].id, lexical_first.frontmatter.id);
953        assert_eq!(selection.candidates[1].id, lexical_second.frontmatter.id);
954    }
955}