1use std::{sync::Arc, time::Instant};
2
3const SNIPPET_MAX_CHARS: usize = 500;
6
7use chrono::Utc;
8use rmcp::{
9 handler::server::{router::tool::ToolRouter, tool::Extension, wrapper::Parameters},
10 model::{ErrorData, ServerCapabilities, ServerInfo},
11 tool, tool_handler, tool_router, ServerHandler,
12};
13use tracing::{info, warn, Instrument};
14
15fn extract_session_id(parts: &http::request::Parts) -> String {
20 let raw = parts
21 .headers
22 .get("mcp-session-id")
23 .and_then(|v: &http::HeaderValue| v.to_str().ok())
24 .unwrap_or("unknown");
25 if raw.len() > 128 {
26 let truncated: String = raw.chars().take(128).collect();
27 format!("{truncated}…")
28 } else {
29 raw.to_owned()
30 }
31}
32
33use crate::{
34 embedding::EmbeddingBackend,
35 error::MemoryError,
36 index::VectorStore,
37 repo::MemoryRepo,
38 types::{
39 parse_qualified_name, parse_scope, parse_scope_filter, validate_name, AppState,
40 ChangedMemories, EditArgs, ForgetArgs, ListArgs, Memory, MemoryMetadata, PullResult,
41 ReadArgs, RecallArgs, ReindexStats, RememberArgs, Scope, ScopeFilter, SyncArgs,
42 },
43};
44
45#[derive(Clone)]
50pub struct MemoryServer {
51 state: Arc<AppState>,
52 #[allow(dead_code)]
55 tool_router: ToolRouter<Self>,
56}
57
58const MAX_CONTENT_SIZE: usize = 1_048_576;
60
61async fn incremental_reindex(
70 repo: &Arc<MemoryRepo>,
71 embedding: &dyn EmbeddingBackend,
72 index: &dyn VectorStore,
73 changes: &ChangedMemories,
74) -> ReindexStats {
75 let mut stats = ReindexStats::default();
76
77 for name in &changes.removed {
79 match parse_qualified_name(name) {
80 Ok((scope, _)) => {
81 if let Err(e) = index.remove(&scope, name) {
82 warn!(
83 qualified_name = %name,
84 error = %e,
85 "incremental_reindex: failed to remove vector; skipping"
86 );
87 stats.errors += 1;
88 } else {
89 stats.removed += 1;
90 }
91 }
92 Err(e) => {
93 warn!(
94 qualified_name = %name,
95 error = %e,
96 "incremental_reindex: cannot parse qualified name for removal; skipping"
97 );
98 }
100 }
101 }
103
104 let mut pairs: Vec<(Scope, String, String)> = Vec::new(); for qualified in &changes.upserted {
109 match parse_qualified_name(qualified) {
110 Ok((scope, name)) => pairs.push((scope, name, qualified.clone())),
111 Err(e) => {
112 warn!(
113 qualified_name = %qualified,
114 error = %e,
115 "incremental_reindex: cannot parse qualified name; skipping"
116 );
117 stats.errors += 1;
118 }
119 }
120 }
121
122 let mut to_embed: Vec<(Scope, String, String)> = Vec::new();
125 for (scope, name, qualified) in &pairs {
126 let memory = match repo.read_memory(name, scope).await {
127 Ok(m) => m,
128 Err(e) => {
129 warn!(
130 qualified_name = %qualified,
131 error = %e,
132 "incremental_reindex: failed to read memory; skipping"
133 );
134 stats.errors += 1;
135 continue;
136 }
137 };
138 to_embed.push((scope.clone(), qualified.clone(), memory.content));
139 }
140
141 if to_embed.is_empty() {
142 return stats;
143 }
144
145 let contents: Vec<String> = to_embed.iter().map(|(_, _, c)| c.clone()).collect();
147 let vectors = match embedding.embed(&contents).await {
148 Ok(v) => v,
149 Err(batch_err) => {
150 warn!(error = %batch_err, "incremental_reindex: batch embed failed; falling back to per-item");
151 let mut vecs: Vec<Vec<f32>> = Vec::with_capacity(contents.len());
152 let mut failed: Vec<usize> = Vec::new();
153 for (i, content) in contents.iter().enumerate() {
154 match embedding.embed(std::slice::from_ref(content)).await {
155 Ok(mut v) => vecs.push(v.remove(0)),
156 Err(e) => {
157 warn!(
158 error = %e,
159 qualified_name = %to_embed[i].1,
160 "incremental_reindex: per-item embed failed; skipping"
161 );
162 failed.push(i);
163 stats.errors += 1;
164 }
165 }
166 }
167 for &i in failed.iter().rev() {
169 to_embed.remove(i);
170 }
171 vecs
172 }
173 };
174
175 for ((scope, qualified_name, _), vector) in to_embed.iter().zip(vectors.iter()) {
177 let is_update = index.find_by_name(qualified_name).is_some();
178
179 match index.add(scope, vector, qualified_name.clone()) {
180 Ok(_) => {}
181 Err(e) => {
182 warn!(
183 qualified_name = %qualified_name,
184 error = %e,
185 "incremental_reindex: add failed; skipping"
186 );
187 stats.errors += 1;
188 continue;
189 }
190 }
191
192 if is_update {
193 stats.updated += 1;
194 } else {
195 stats.added += 1;
196 }
197 }
198
199 stats
200}
201
202pub async fn full_reindex(
211 repo: &Arc<MemoryRepo>,
212 embedding: &dyn EmbeddingBackend,
213 index: &dyn VectorStore,
214) -> Result<ReindexStats, MemoryError> {
215 let memories = repo.list_memories(None).await?;
216 if memories.is_empty() {
217 return Ok(ReindexStats::default());
218 }
219
220 let mut stats = ReindexStats::default();
221
222 let items: Vec<(Scope, String, String)> = memories
223 .into_iter()
224 .map(|m| {
225 let qualified = format!("{}/{}", m.metadata.scope.dir_prefix(), m.name);
226 (m.metadata.scope, qualified, m.content)
227 })
228 .collect();
229
230 const REINDEX_BATCH_SIZE: usize = 64;
234 for chunk in items.chunks(REINDEX_BATCH_SIZE) {
235 let contents: Vec<String> = chunk.iter().map(|(_, _, c)| c.clone()).collect();
236
237 let vectors = match embedding.embed(&contents).await {
238 Ok(v) => v,
239 Err(batch_err) => {
240 warn!(error = %batch_err, "full_reindex: batch embed failed; falling back to per-item");
241 let mut vecs = Vec::with_capacity(contents.len());
242 for (i, content) in contents.iter().enumerate() {
243 match embedding.embed(std::slice::from_ref(content)).await {
244 Ok(mut v) => vecs.push(v.remove(0)),
245 Err(e) => {
246 warn!(
247 error = %e,
248 qualified_name = %chunk[i].1,
249 "full_reindex: per-item embed failed; skipping"
250 );
251 stats.errors += 1;
252 vecs.push(Vec::new());
253 }
254 }
255 }
256 vecs
257 }
258 };
259
260 debug_assert_eq!(
261 vectors.len(),
262 chunk.len(),
263 "embed() must return exactly one vector per input"
264 );
265
266 for ((scope, qualified_name, _), vector) in chunk.iter().zip(vectors.iter()) {
267 if vector.is_empty() {
268 continue;
269 }
270 match index.add(scope, vector, qualified_name.clone()) {
271 Ok(_) => stats.added += 1,
272 Err(e) => {
273 warn!(
274 qualified_name = %qualified_name,
275 error = %e,
276 "full_reindex: index add failed; skipping"
277 );
278 stats.errors += 1;
279 }
280 }
281 }
282 }
283
284 Ok(stats)
285}
286
287#[tool_router]
288impl MemoryServer {
289 pub fn new(state: Arc<AppState>) -> Self {
291 Self {
292 state,
293 tool_router: Self::tool_router(),
294 }
295 }
296
297 #[tool(
304 name = "remember",
305 description = "Store a new memory. Saves the content to the git-backed repository and \
306 indexes it for semantic search. Use scope 'project:<basename-of-your-cwd>' for \
307 project-specific memories or omit for global. Returns the assigned memory ID. \
308 IMPORTANT: Never store credentials, API keys, tokens, passwords, or other secrets — \
309 memories are plaintext files in a git repo and may be synced to a remote."
310 )]
311 async fn remember(
312 &self,
313 Parameters(args): Parameters<RememberArgs>,
314 Extension(parts): Extension<http::request::Parts>,
315 ) -> Result<String, ErrorData> {
316 validate_name(&args.name).map_err(ErrorData::from)?;
317 if args.content.len() > MAX_CONTENT_SIZE {
318 return Err(ErrorData::from(crate::error::MemoryError::InvalidInput {
319 reason: format!(
320 "content size {} exceeds maximum of {} bytes",
321 args.content.len(),
322 MAX_CONTENT_SIZE
323 ),
324 }));
325 }
326 let session_id = extract_session_id(&parts);
327 let content_size = args.content.len();
328 let span = tracing::info_span!(
329 "handler.remember",
330 session_id = %session_id,
331 name = %args.name,
332 scope = ?args.scope,
333 content_size,
334 );
335 let state = Arc::clone(&self.state);
336 async move {
337 let scope = parse_scope(args.scope.as_deref()).map_err(ErrorData::from)?;
338 let metadata = MemoryMetadata::new(scope.clone(), args.tags, args.source);
339 let memory = Memory::new(args.name, args.content, metadata);
340
341 let start = Instant::now();
345 let vector = state
346 .embedding
347 .embed_one(&memory.content)
348 .await
349 .map_err(ErrorData::from)?;
350 info!(embed_ms = start.elapsed().as_millis(), "embedded");
351
352 let qualified_name = format!("{}/{}", memory.metadata.scope.dir_prefix(), memory.name);
353
354 state
355 .index
356 .add(&scope, &vector, qualified_name)
357 .map_err(ErrorData::from)?;
358
359 let start = Instant::now();
360 state
361 .repo
362 .save_memory(&memory)
363 .await
364 .map_err(ErrorData::from)?;
365 info!(repo_ms = start.elapsed().as_millis(), "saved to repo");
366
367 Ok(serde_json::json!({
368 "id": memory.id,
369 "name": memory.name,
370 "scope": memory.metadata.scope.to_string(),
371 })
372 .to_string())
373 }
374 .instrument(span)
375 .await
376 }
377
378 #[tool(
385 name = "recall",
386 description = "Search memories by semantic similarity. Embeds the query and returns the top matching memories as a JSON array \
387 with name, scope, tags, and a content snippet (max 500 chars).\n\n\
388 Each result includes `truncated` (bool) and `content_length` (total character count). \
389 When `truncated` is true, the snippet is incomplete — use the `read` tool with the memory's name and scope \
390 to retrieve the full content before acting on it.\n\n\
391 Scope: pass 'project:<basename-of-your-cwd>' to search your current project + global memories, \
392 'global' for global-only, or 'all' to search everything. Omitting scope defaults to global-only."
393 )]
394 async fn recall(
395 &self,
396 Parameters(args): Parameters<RecallArgs>,
397 Extension(parts): Extension<http::request::Parts>,
398 ) -> Result<String, ErrorData> {
399 let session_id = extract_session_id(&parts);
400 let span = tracing::info_span!(
402 "handler.recall",
403 session_id = %session_id,
404 scope = ?args.scope,
405 limit = ?args.limit,
406 count = tracing::field::Empty,
407 );
408 let state = Arc::clone(&self.state);
409 async move {
410 let scope_filter =
412 parse_scope_filter(args.scope.as_deref()).map_err(ErrorData::from)?;
413
414 let limit = args.limit.unwrap_or(5).min(100);
415
416 let start = Instant::now();
417 let query_vector = state
418 .embedding
419 .embed_one(&args.query)
420 .await
421 .map_err(ErrorData::from)?;
422 info!(embed_ms = start.elapsed().as_millis(), "query embedded");
423
424 let start = Instant::now();
425 let results = state
426 .index
427 .search(&scope_filter, &query_vector, limit)
428 .map_err(ErrorData::from)?;
429 info!(
430 search_ms = start.elapsed().as_millis(),
431 candidates = results.len(),
432 "index searched"
433 );
434
435 let pre_filter_count = results.len();
436 let mut results_vec = Vec::new();
437 let mut skipped_errors: usize = 0;
438
439 for (_key, qualified_name, distance) in results {
440 if results_vec.len() >= limit {
443 break;
444 }
445 let (scope, name) = match parse_qualified_name(&qualified_name) {
446 Ok(pair) => pair,
447 Err(e) => {
448 warn!(
449 qualified_name = %qualified_name,
450 error = %e,
451 "could not parse qualified name from index; skipping"
452 );
453 skipped_errors += 1;
454 continue;
455 }
456 };
457
458 let memory = match state.repo.read_memory(&name, &scope).await {
460 Ok(m) => m,
461 Err(e) => {
462 warn!(
463 name = %name,
464 error = %e,
465 "could not read memory from repo (deleted?); skipping"
466 );
467 skipped_errors += 1;
468 continue;
469 }
470 };
471
472 let (snippet, content_length, truncated) = build_snippet(&memory.content);
473
474 results_vec.push(serde_json::json!({
475 "id": memory.id,
476 "name": memory.name,
477 "scope": memory.metadata.scope.to_string(),
478 "tags": memory.metadata.tags,
479 "content": snippet,
480 "content_length": content_length,
481 "truncated": truncated,
482 "distance": distance,
483 }));
484 }
485
486 let count = results_vec.len();
487 tracing::Span::current().record("count", count);
488 info!(returned = count, skipped_errors, "recall complete");
489
490 Ok(serde_json::json!({
491 "results": results_vec,
492 "count": count,
493 "limit": limit,
494 "pre_filter_count": pre_filter_count,
495 "skipped_errors": skipped_errors,
496 })
497 .to_string())
498 }
499 .instrument(span)
500 .await
501 }
502
503 #[tool(
510 name = "forget",
511 description = "Delete a memory by name. Use scope 'project:<basename-of-your-cwd>' for project-scoped \
512 memories or omit for global. Removes the file from git and the vector from the search index. \
513 Returns 'ok' on success."
514 )]
515 async fn forget(
516 &self,
517 Parameters(args): Parameters<ForgetArgs>,
518 Extension(parts): Extension<http::request::Parts>,
519 ) -> Result<String, ErrorData> {
520 validate_name(&args.name).map_err(ErrorData::from)?;
521 let session_id = extract_session_id(&parts);
522 let span = tracing::info_span!(
523 "handler.forget",
524 session_id = %session_id,
525 name = %args.name,
526 scope = ?args.scope,
527 );
528 let state = Arc::clone(&self.state);
529 async move {
530 let scope = parse_scope(args.scope.as_deref()).map_err(ErrorData::from)?;
531
532 let start = Instant::now();
533
534 state
536 .repo
537 .delete_memory(&args.name, &scope)
538 .await
539 .map_err(ErrorData::from)?;
540
541 let qualified_name = format!("{}/{}", scope.dir_prefix(), args.name);
543 if let Err(e) = state.index.remove(&scope, &qualified_name) {
544 warn!(name = %args.name, error = %e, "vector removal failed during forget; stale entry will be skipped at recall");
545 }
546
547 info!(
548 ms = start.elapsed().as_millis(),
549 name = %args.name,
550 "memory forgotten"
551 );
552
553 Ok("ok".to_string())
554 }
555 .instrument(span)
556 .await
557 }
558
559 #[tool(
568 name = "edit",
569 description = "Edit an existing memory. Supports partial updates — omit content or \
570 tags to preserve existing values. Re-embeds and re-indexes the memory. Use scope \
571 'project:<basename-of-your-cwd>' for project-scoped memories. Returns the memory ID. \
572 IMPORTANT: Never store credentials, API keys, tokens, passwords, or other secrets — \
573 memories are plaintext files in a git repo and may be synced to a remote."
574 )]
575 async fn edit(
576 &self,
577 Parameters(args): Parameters<EditArgs>,
578 Extension(parts): Extension<http::request::Parts>,
579 ) -> Result<String, ErrorData> {
580 validate_name(&args.name).map_err(ErrorData::from)?;
581 if let Some(ref content) = args.content {
582 if content.len() > MAX_CONTENT_SIZE {
583 return Err(ErrorData::from(crate::error::MemoryError::InvalidInput {
584 reason: format!(
585 "content size {} exceeds maximum of {} bytes",
586 content.len(),
587 MAX_CONTENT_SIZE
588 ),
589 }));
590 }
591 }
592 let session_id = extract_session_id(&parts);
593 let content_size = args.content.as_ref().map(|c| c.len()).unwrap_or(0);
594 let span = tracing::info_span!(
595 "handler.edit",
596 session_id = %session_id,
597 name = %args.name,
598 scope = ?args.scope,
599 content_size,
600 );
601 let state = Arc::clone(&self.state);
602 async move {
603 let scope = parse_scope(args.scope.as_deref()).map_err(ErrorData::from)?;
604
605 let start = Instant::now();
606
607 let content_changed = args.content.is_some();
609
610 let mut memory = state
612 .repo
613 .read_memory(&args.name, &scope)
614 .await
615 .map_err(ErrorData::from)?;
616
617 if let Some(content) = args.content {
619 memory.content = content;
620 }
621 if let Some(tags) = args.tags {
622 memory.metadata.tags = tags;
623 }
624 memory.metadata.updated_at = Utc::now();
625
626 if content_changed {
629 let qualified_name =
630 format!("{}/{}", memory.metadata.scope.dir_prefix(), memory.name);
631
632 let vector = state
634 .embedding
635 .embed_one(&memory.content)
636 .await
637 .map_err(ErrorData::from)?;
638
639 state
640 .index
641 .add(&scope, &vector, qualified_name)
642 .map_err(ErrorData::from)?;
643 }
644
645 state
647 .repo
648 .save_memory(&memory)
649 .await
650 .map_err(ErrorData::from)?;
651
652 info!(
653 ms = start.elapsed().as_millis(),
654 name = %args.name,
655 content_changed,
656 "memory edited"
657 );
658
659 Ok(serde_json::json!({
660 "id": memory.id,
661 "name": memory.name,
662 "scope": memory.metadata.scope.to_string(),
663 })
664 .to_string())
665 }
666 .instrument(span)
667 .await
668 }
669
670 #[tool(
675 name = "list",
676 description = "List stored memories. Pass 'project:<basename-of-your-cwd>' for project + global memories, \
677 'global' for global-only, or 'all' for everything. Omitting scope defaults to global-only. \
678 Returns a JSON array of memory summaries without full content."
679 )]
680 async fn list(
681 &self,
682 Parameters(args): Parameters<ListArgs>,
683 Extension(parts): Extension<http::request::Parts>,
684 ) -> Result<String, ErrorData> {
685 let session_id = extract_session_id(&parts);
686 let span = tracing::info_span!(
687 "handler.list",
688 session_id = %session_id,
689 scope = ?args.scope,
690 count = tracing::field::Empty,
691 );
692 let state = Arc::clone(&self.state);
693 async move {
694 let scope_filter =
695 parse_scope_filter(args.scope.as_deref()).map_err(ErrorData::from)?;
696
697 let start = Instant::now();
698 let memories = match &scope_filter {
699 ScopeFilter::GlobalOnly => state
700 .repo
701 .list_memories(Some(&Scope::Global))
702 .await
703 .map_err(ErrorData::from)?,
704 ScopeFilter::All => state
705 .repo
706 .list_memories(None)
707 .await
708 .map_err(ErrorData::from)?,
709 ScopeFilter::ProjectAndGlobal(project_name) => {
710 let project_scope = Scope::Project(project_name.clone());
711 let mut global = state
712 .repo
713 .list_memories(Some(&Scope::Global))
714 .await
715 .map_err(ErrorData::from)?;
716 let project = state
717 .repo
718 .list_memories(Some(&project_scope))
719 .await
720 .map_err(ErrorData::from)?;
721 global.extend(project);
722 global
723 }
724 };
725 let count = memories.len();
726 tracing::Span::current().record("count", count);
727 info!(ms = start.elapsed().as_millis(), count, "listed memories");
728
729 let summaries: Vec<serde_json::Value> = memories
730 .into_iter()
731 .map(|m| {
732 serde_json::json!({
733 "id": m.id,
734 "name": m.name,
735 "scope": m.metadata.scope.to_string(),
736 "tags": m.metadata.tags,
737 "created_at": m.metadata.created_at,
738 "updated_at": m.metadata.updated_at,
739 })
740 })
741 .collect();
742
743 Ok(serde_json::json!({
744 "memories": summaries,
745 "count": count,
746 })
747 .to_string())
748 }
749 .instrument(span)
750 .await
751 }
752
753 #[tool(
758 name = "read",
759 description = "Read a specific memory by name. Use scope 'project:<basename-of-your-cwd>' for \
760 project-scoped memories or omit for global. Returns the full markdown content and metadata \
761 (id, scope, tags, timestamps) as a JSON object."
762 )]
763 async fn read(
764 &self,
765 Parameters(args): Parameters<ReadArgs>,
766 Extension(parts): Extension<http::request::Parts>,
767 ) -> Result<String, ErrorData> {
768 validate_name(&args.name).map_err(ErrorData::from)?;
769 let session_id = extract_session_id(&parts);
770 let span = tracing::info_span!(
771 "handler.read",
772 session_id = %session_id,
773 name = %args.name,
774 scope = ?args.scope,
775 );
776 let state = Arc::clone(&self.state);
777 async move {
778 let scope = parse_scope(args.scope.as_deref()).map_err(ErrorData::from)?;
779
780 let start = Instant::now();
781 let memory = state
782 .repo
783 .read_memory(&args.name, &scope)
784 .await
785 .map_err(ErrorData::from)?;
786 info!(
787 ms = start.elapsed().as_millis(),
788 name = %args.name,
789 "read memory"
790 );
791
792 Ok(serde_json::json!({
793 "id": memory.id,
794 "name": memory.name,
795 "scope": memory.metadata.scope.to_string(),
796 "tags": memory.metadata.tags,
797 "content": memory.content,
798 "source": memory.metadata.source,
799 "created_at": memory.metadata.created_at,
800 "updated_at": memory.metadata.updated_at,
801 })
802 .to_string())
803 }
804 .instrument(span)
805 .await
806 }
807
808 #[tool(
816 name = "sync",
817 description = "Sync the memory repo with the git remote (push/pull). Requires \
818 MEMORY_MCP_GITHUB_TOKEN or a token file. Returns a status message."
819 )]
820 async fn sync(
821 &self,
822 Parameters(args): Parameters<SyncArgs>,
823 Extension(parts): Extension<http::request::Parts>,
824 ) -> Result<String, ErrorData> {
825 let pull_first = args.pull_first.unwrap_or(true);
826 let session_id = extract_session_id(&parts);
827 let span = tracing::info_span!(
828 "handler.sync",
829 session_id = %session_id,
830 pull_first,
831 );
832 let state = Arc::clone(&self.state);
833 async move {
834 let start = Instant::now();
835 let branch = &state.branch;
836
837 let mut has_remote = true;
840
841 let mut reindex_stats: Option<ReindexStats> = None;
842
843 let pull_status = if pull_first {
844 let result = state
845 .repo
846 .pull(&state.auth, branch)
847 .await
848 .map_err(ErrorData::from)?;
849
850 let mut oid_range: Option<([u8; 20], [u8; 20])> = None;
851 let status = match result {
852 PullResult::NoRemote => {
853 has_remote = false;
854 "no-remote".to_string()
855 }
856 PullResult::UpToDate => "up-to-date".to_string(),
857 PullResult::FastForward { old_head, new_head } => {
858 oid_range = Some((old_head, new_head));
859 "fast-forward".to_string()
860 }
861 PullResult::Merged {
862 conflicts_resolved,
863 old_head,
864 new_head,
865 } => {
866 oid_range = Some((old_head, new_head));
867 format!("merged ({} conflicts resolved)", conflicts_resolved)
868 }
869 };
870
871 if let Some((old_head, new_head)) = oid_range {
872 let repo = Arc::clone(&state.repo);
873 let changes = tokio::task::spawn_blocking(move || {
874 repo.diff_changed_memories(old_head, new_head)
875 })
876 .await
877 .map_err(|e| MemoryError::Join(e.to_string()))
878 .map_err(ErrorData::from)?
879 .map_err(ErrorData::from)?;
880
881 let mut reindex_failed_completely = false;
882 if !changes.is_empty() {
883 let stats = incremental_reindex(
884 &state.repo,
885 state.embedding.as_ref(),
886 state.index.as_ref(),
887 &changes,
888 )
889 .instrument(tracing::info_span!("server.incremental_reindex"))
890 .await;
891 info!(
892 added = stats.added,
893 updated = stats.updated,
894 removed = stats.removed,
895 errors = stats.errors,
896 "incremental reindex complete"
897 );
898 reindex_failed_completely =
899 stats.added == 0 && stats.updated == 0 && stats.errors > 0;
900 reindex_stats = Some(stats);
901 }
902
903 if !reindex_failed_completely {
907 if let Some(sha) = state.repo.head_sha().await {
908 state.index.set_commit_sha(Some(&sha));
909 }
910 }
911 }
912
913 status
914 } else {
915 "skipped".to_string()
916 };
917
918 if has_remote {
919 state
920 .repo
921 .push(&state.auth, branch)
922 .await
923 .map_err(ErrorData::from)?;
924 }
925
926 info!(
927 ms = start.elapsed().as_millis(),
928 pull_first,
929 pull_status = %pull_status,
930 "sync complete"
931 );
932
933 let mut response = serde_json::json!({
934 "status": "sync complete",
935 "pull": pull_status,
936 "branch": branch,
937 });
938
939 if let Some(stats) = reindex_stats {
940 response["reindex"] = serde_json::json!({
941 "added": stats.added,
942 "updated": stats.updated,
943 "removed": stats.removed,
944 "errors": stats.errors,
945 });
946 }
947
948 Ok(response.to_string())
949 }
950 .instrument(span)
951 .await
952 }
953}
954
955#[tool_handler]
956impl ServerHandler for MemoryServer {
957 fn get_info(&self) -> ServerInfo {
958 ServerInfo::new(ServerCapabilities::builder().enable_tools().build()).with_instructions(
959 "A semantic memory system for AI coding agents. Memories are stored as markdown files \
960 in a git repository and indexed for semantic retrieval. Use `remember` to store, `recall` \
961 to search, `read` to fetch a specific memory, `edit` to update, `forget` to delete, \
962 `list` to browse, and `sync` to push/pull the remote.\n\n\
963 Scope convention: always pass scope='project:<basename-of-your-cwd>' when working within \
964 a project. This returns project memories alongside global ones. Omitting scope defaults to \
965 global-only for queries (recall, list) and targets a single memory for point operations \
966 (remember, edit, read, forget). Use scope='all' to search across all projects.\n\n\
967 IMPORTANT: Never store credentials, API keys, tokens, passwords, or other secrets in \
968 memory content. Memories are stored as plaintext markdown files committed to a git \
969 repository and may be synced to a remote. Treat all memory content as public."
970 .to_string(),
971 )
972 }
973}
974
975fn build_snippet(content: &str) -> (String, usize, bool) {
977 let content_length = content.chars().count();
978 let truncated = content_length > SNIPPET_MAX_CHARS;
979 let snippet: String = content.chars().take(SNIPPET_MAX_CHARS).collect();
980 (snippet, content_length, truncated)
981}
982
983#[cfg(test)]
984mod tests {
985 use super::*;
986
987 #[test]
988 fn snippet_short_content_not_truncated() {
989 let content = "Hello, world!";
990 let (snippet, content_length, truncated) = build_snippet(content);
991 assert_eq!(snippet, "Hello, world!");
992 assert_eq!(content_length, 13);
993 assert!(!truncated);
994 }
995
996 #[test]
997 fn snippet_exact_limit_not_truncated() {
998 let content: String = "a".repeat(SNIPPET_MAX_CHARS);
999 let (snippet, content_length, truncated) = build_snippet(&content);
1000 assert_eq!(snippet, content);
1001 assert_eq!(content_length, SNIPPET_MAX_CHARS);
1002 assert!(!truncated);
1003 }
1004
1005 #[test]
1006 fn snippet_over_limit_is_truncated() {
1007 let content: String = "b".repeat(SNIPPET_MAX_CHARS + 100);
1008 let (snippet, content_length, truncated) = build_snippet(&content);
1009 assert_eq!(snippet.chars().count(), SNIPPET_MAX_CHARS);
1010 assert_eq!(content_length, SNIPPET_MAX_CHARS + 100);
1011 assert!(truncated);
1012 }
1013
1014 #[test]
1015 fn snippet_counts_unicode_chars_not_bytes() {
1016 let emoji_content: String = "\u{1F600}".repeat(SNIPPET_MAX_CHARS + 1);
1018 let (snippet, content_length, truncated) = build_snippet(&emoji_content);
1019 assert_eq!(snippet.chars().count(), SNIPPET_MAX_CHARS);
1020 assert_eq!(content_length, SNIPPET_MAX_CHARS + 1);
1021 assert!(truncated);
1022 }
1023
1024 #[test]
1025 fn snippet_empty_content() {
1026 let (snippet, content_length, truncated) = build_snippet("");
1027 assert_eq!(snippet, "");
1028 assert_eq!(content_length, 0);
1029 assert!(!truncated);
1030 }
1031}