1use std::{sync::Arc, time::Instant};
2
3const SNIPPET_MAX_CHARS: usize = 500;
6
7use chrono::Utc;
8use rmcp::{
9 handler::server::{router::tool::ToolRouter, tool::Extension, wrapper::Parameters},
10 model::{ErrorData, ServerCapabilities, ServerInfo},
11 tool, tool_handler, tool_router, ServerHandler,
12};
13use tracing::{info, warn, Instrument};
14
15fn extract_session_id(parts: &http::request::Parts) -> String {
20 let raw = parts
21 .headers
22 .get("mcp-session-id")
23 .and_then(|v: &http::HeaderValue| v.to_str().ok())
24 .unwrap_or("unknown");
25 if raw.len() > 128 {
26 let truncated: String = raw.chars().take(128).collect();
27 format!("{truncated}…")
28 } else {
29 raw.to_owned()
30 }
31}
32
33use crate::{
34 embedding::EmbeddingBackend,
35 error::MemoryError,
36 index::VectorStore,
37 repo::MemoryRepo,
38 types::{
39 parse_qualified_name, parse_scope, parse_scope_filter, validate_name, AppState,
40 ChangedMemories, EditArgs, ForgetArgs, ListArgs, Memory, MemoryMetadata, PullResult,
41 ReadArgs, RecallArgs, ReindexStats, RememberArgs, Scope, ScopeFilter, SyncArgs,
42 },
43};
44
45#[derive(Clone)]
50pub struct MemoryServer {
51 state: Arc<AppState>,
52 #[allow(dead_code)]
55 tool_router: ToolRouter<Self>,
56}
57
58const MAX_CONTENT_SIZE: usize = 1_048_576;
60
61async fn incremental_reindex(
70 repo: &Arc<MemoryRepo>,
71 embedding: &dyn EmbeddingBackend,
72 index: &dyn VectorStore,
73 changes: &ChangedMemories,
74) -> ReindexStats {
75 let mut stats = ReindexStats::default();
76
77 for name in &changes.removed {
79 match parse_qualified_name(name) {
80 Ok((scope, _)) => {
81 if let Err(e) = index.remove(&scope, name) {
82 warn!(
83 qualified_name = %name,
84 error = %e,
85 "incremental_reindex: failed to remove vector; skipping"
86 );
87 stats.errors += 1;
88 } else {
89 stats.removed += 1;
90 }
91 }
92 Err(e) => {
93 warn!(
94 qualified_name = %name,
95 error = %e,
96 "incremental_reindex: cannot parse qualified name for removal; skipping"
97 );
98 }
100 }
101 }
103
104 let mut pairs: Vec<(Scope, String, String)> = Vec::new(); for qualified in &changes.upserted {
109 match parse_qualified_name(qualified) {
110 Ok((scope, name)) => pairs.push((scope, name, qualified.clone())),
111 Err(e) => {
112 warn!(
113 qualified_name = %qualified,
114 error = %e,
115 "incremental_reindex: cannot parse qualified name; skipping"
116 );
117 stats.errors += 1;
118 }
119 }
120 }
121
122 let mut to_embed: Vec<(Scope, String, String)> = Vec::new();
125 for (scope, name, qualified) in &pairs {
126 let memory = match repo.read_memory(name, scope).await {
127 Ok(m) => m,
128 Err(e) => {
129 warn!(
130 qualified_name = %qualified,
131 error = %e,
132 "incremental_reindex: failed to read memory; skipping"
133 );
134 stats.errors += 1;
135 continue;
136 }
137 };
138 to_embed.push((scope.clone(), qualified.clone(), memory.content));
139 }
140
141 if to_embed.is_empty() {
142 return stats;
143 }
144
145 let contents: Vec<String> = to_embed.iter().map(|(_, _, c)| c.clone()).collect();
147 let vectors = match embedding.embed(&contents).await {
148 Ok(v) => v,
149 Err(batch_err) => {
150 warn!(error = %batch_err, "incremental_reindex: batch embed failed; falling back to per-item");
151 let mut vecs: Vec<Vec<f32>> = Vec::with_capacity(contents.len());
152 let mut failed: Vec<usize> = Vec::new();
153 for (i, content) in contents.iter().enumerate() {
154 match embedding.embed(std::slice::from_ref(content)).await {
155 Ok(mut v) => vecs.push(v.remove(0)),
156 Err(e) => {
157 warn!(
158 error = %e,
159 qualified_name = %to_embed[i].1,
160 "incremental_reindex: per-item embed failed; skipping"
161 );
162 failed.push(i);
163 stats.errors += 1;
164 }
165 }
166 }
167 for &i in failed.iter().rev() {
169 to_embed.remove(i);
170 }
171 vecs
172 }
173 };
174
175 for ((scope, qualified_name, _), vector) in to_embed.iter().zip(vectors.iter()) {
177 let is_update = index.find_by_name(qualified_name).is_some();
178
179 match index.add(scope, vector, qualified_name.clone()) {
180 Ok(_) => {}
181 Err(e) => {
182 warn!(
183 qualified_name = %qualified_name,
184 error = %e,
185 "incremental_reindex: add failed; skipping"
186 );
187 stats.errors += 1;
188 continue;
189 }
190 }
191
192 if is_update {
193 stats.updated += 1;
194 } else {
195 stats.added += 1;
196 }
197 }
198
199 stats
200}
201
202#[tool_router]
203impl MemoryServer {
204 pub fn new(state: Arc<AppState>) -> Self {
206 Self {
207 state,
208 tool_router: Self::tool_router(),
209 }
210 }
211
212 #[tool(
219 name = "remember",
220 description = "Store a new memory. Saves the content to the git-backed repository and \
221 indexes it for semantic search. Use scope 'project:<basename-of-your-cwd>' for \
222 project-specific memories or omit for global. Returns the assigned memory ID. \
223 IMPORTANT: Never store credentials, API keys, tokens, passwords, or other secrets — \
224 memories are plaintext files in a git repo and may be synced to a remote."
225 )]
226 async fn remember(
227 &self,
228 Parameters(args): Parameters<RememberArgs>,
229 Extension(parts): Extension<http::request::Parts>,
230 ) -> Result<String, ErrorData> {
231 validate_name(&args.name).map_err(ErrorData::from)?;
232 if args.content.len() > MAX_CONTENT_SIZE {
233 return Err(ErrorData::from(crate::error::MemoryError::InvalidInput {
234 reason: format!(
235 "content size {} exceeds maximum of {} bytes",
236 args.content.len(),
237 MAX_CONTENT_SIZE
238 ),
239 }));
240 }
241 let session_id = extract_session_id(&parts);
242 let content_size = args.content.len();
243 let span = tracing::info_span!(
244 "handler.remember",
245 session_id = %session_id,
246 name = %args.name,
247 scope = ?args.scope,
248 content_size,
249 );
250 let state = Arc::clone(&self.state);
251 async move {
252 let scope = parse_scope(args.scope.as_deref()).map_err(ErrorData::from)?;
253 let metadata = MemoryMetadata::new(scope.clone(), args.tags, args.source);
254 let memory = Memory::new(args.name, args.content, metadata);
255
256 let start = Instant::now();
260 let vector = state
261 .embedding
262 .embed_one(&memory.content)
263 .await
264 .map_err(ErrorData::from)?;
265 info!(embed_ms = start.elapsed().as_millis(), "embedded");
266
267 let qualified_name = format!("{}/{}", memory.metadata.scope.dir_prefix(), memory.name);
268
269 state
270 .index
271 .add(&scope, &vector, qualified_name)
272 .map_err(ErrorData::from)?;
273
274 let start = Instant::now();
275 state
276 .repo
277 .save_memory(&memory)
278 .await
279 .map_err(ErrorData::from)?;
280 info!(repo_ms = start.elapsed().as_millis(), "saved to repo");
281
282 Ok(serde_json::json!({
283 "id": memory.id,
284 "name": memory.name,
285 "scope": memory.metadata.scope.to_string(),
286 })
287 .to_string())
288 }
289 .instrument(span)
290 .await
291 }
292
293 #[tool(
300 name = "recall",
301 description = "Search memories by semantic similarity. Embeds the query and returns the top matching memories as a JSON array \
302 with name, scope, tags, and a content snippet (max 500 chars).\n\n\
303 Each result includes `truncated` (bool) and `content_length` (total character count). \
304 When `truncated` is true, the snippet is incomplete — use the `read` tool with the memory's name and scope \
305 to retrieve the full content before acting on it.\n\n\
306 Scope: pass 'project:<basename-of-your-cwd>' to search your current project + global memories, \
307 'global' for global-only, or 'all' to search everything. Omitting scope defaults to global-only."
308 )]
309 async fn recall(
310 &self,
311 Parameters(args): Parameters<RecallArgs>,
312 Extension(parts): Extension<http::request::Parts>,
313 ) -> Result<String, ErrorData> {
314 let session_id = extract_session_id(&parts);
315 let span = tracing::info_span!(
317 "handler.recall",
318 session_id = %session_id,
319 scope = ?args.scope,
320 limit = ?args.limit,
321 count = tracing::field::Empty,
322 );
323 let state = Arc::clone(&self.state);
324 async move {
325 let scope_filter =
327 parse_scope_filter(args.scope.as_deref()).map_err(ErrorData::from)?;
328
329 let limit = args.limit.unwrap_or(5).min(100);
330
331 let start = Instant::now();
332 let query_vector = state
333 .embedding
334 .embed_one(&args.query)
335 .await
336 .map_err(ErrorData::from)?;
337 info!(embed_ms = start.elapsed().as_millis(), "query embedded");
338
339 let start = Instant::now();
340 let results = state
341 .index
342 .search(&scope_filter, &query_vector, limit)
343 .map_err(ErrorData::from)?;
344 info!(
345 search_ms = start.elapsed().as_millis(),
346 candidates = results.len(),
347 "index searched"
348 );
349
350 let pre_filter_count = results.len();
351 let mut results_vec = Vec::new();
352 let mut skipped_errors: usize = 0;
353
354 for (_key, qualified_name, distance) in results {
355 if results_vec.len() >= limit {
358 break;
359 }
360 let (scope, name) = match parse_qualified_name(&qualified_name) {
361 Ok(pair) => pair,
362 Err(e) => {
363 warn!(
364 qualified_name = %qualified_name,
365 error = %e,
366 "could not parse qualified name from index; skipping"
367 );
368 skipped_errors += 1;
369 continue;
370 }
371 };
372
373 let memory = match state.repo.read_memory(&name, &scope).await {
375 Ok(m) => m,
376 Err(e) => {
377 warn!(
378 name = %name,
379 error = %e,
380 "could not read memory from repo (deleted?); skipping"
381 );
382 skipped_errors += 1;
383 continue;
384 }
385 };
386
387 let (snippet, content_length, truncated) = build_snippet(&memory.content);
388
389 results_vec.push(serde_json::json!({
390 "id": memory.id,
391 "name": memory.name,
392 "scope": memory.metadata.scope.to_string(),
393 "tags": memory.metadata.tags,
394 "content": snippet,
395 "content_length": content_length,
396 "truncated": truncated,
397 "distance": distance,
398 }));
399 }
400
401 let count = results_vec.len();
402 tracing::Span::current().record("count", count);
403 info!(returned = count, skipped_errors, "recall complete");
404
405 Ok(serde_json::json!({
406 "results": results_vec,
407 "count": count,
408 "limit": limit,
409 "pre_filter_count": pre_filter_count,
410 "skipped_errors": skipped_errors,
411 })
412 .to_string())
413 }
414 .instrument(span)
415 .await
416 }
417
418 #[tool(
425 name = "forget",
426 description = "Delete a memory by name. Use scope 'project:<basename-of-your-cwd>' for project-scoped \
427 memories or omit for global. Removes the file from git and the vector from the search index. \
428 Returns 'ok' on success."
429 )]
430 async fn forget(
431 &self,
432 Parameters(args): Parameters<ForgetArgs>,
433 Extension(parts): Extension<http::request::Parts>,
434 ) -> Result<String, ErrorData> {
435 validate_name(&args.name).map_err(ErrorData::from)?;
436 let session_id = extract_session_id(&parts);
437 let span = tracing::info_span!(
438 "handler.forget",
439 session_id = %session_id,
440 name = %args.name,
441 scope = ?args.scope,
442 );
443 let state = Arc::clone(&self.state);
444 async move {
445 let scope = parse_scope(args.scope.as_deref()).map_err(ErrorData::from)?;
446
447 let start = Instant::now();
448
449 state
451 .repo
452 .delete_memory(&args.name, &scope)
453 .await
454 .map_err(ErrorData::from)?;
455
456 let qualified_name = format!("{}/{}", scope.dir_prefix(), args.name);
458 if let Err(e) = state.index.remove(&scope, &qualified_name) {
459 warn!(name = %args.name, error = %e, "vector removal failed during forget; stale entry will be skipped at recall");
460 }
461
462 info!(
463 ms = start.elapsed().as_millis(),
464 name = %args.name,
465 "memory forgotten"
466 );
467
468 Ok("ok".to_string())
469 }
470 .instrument(span)
471 .await
472 }
473
474 #[tool(
483 name = "edit",
484 description = "Edit an existing memory. Supports partial updates — omit content or \
485 tags to preserve existing values. Re-embeds and re-indexes the memory. Use scope \
486 'project:<basename-of-your-cwd>' for project-scoped memories. Returns the memory ID. \
487 IMPORTANT: Never store credentials, API keys, tokens, passwords, or other secrets — \
488 memories are plaintext files in a git repo and may be synced to a remote."
489 )]
490 async fn edit(
491 &self,
492 Parameters(args): Parameters<EditArgs>,
493 Extension(parts): Extension<http::request::Parts>,
494 ) -> Result<String, ErrorData> {
495 validate_name(&args.name).map_err(ErrorData::from)?;
496 if let Some(ref content) = args.content {
497 if content.len() > MAX_CONTENT_SIZE {
498 return Err(ErrorData::from(crate::error::MemoryError::InvalidInput {
499 reason: format!(
500 "content size {} exceeds maximum of {} bytes",
501 content.len(),
502 MAX_CONTENT_SIZE
503 ),
504 }));
505 }
506 }
507 let session_id = extract_session_id(&parts);
508 let content_size = args.content.as_ref().map(|c| c.len()).unwrap_or(0);
509 let span = tracing::info_span!(
510 "handler.edit",
511 session_id = %session_id,
512 name = %args.name,
513 scope = ?args.scope,
514 content_size,
515 );
516 let state = Arc::clone(&self.state);
517 async move {
518 let scope = parse_scope(args.scope.as_deref()).map_err(ErrorData::from)?;
519
520 let start = Instant::now();
521
522 let content_changed = args.content.is_some();
524
525 let mut memory = state
527 .repo
528 .read_memory(&args.name, &scope)
529 .await
530 .map_err(ErrorData::from)?;
531
532 if let Some(content) = args.content {
534 memory.content = content;
535 }
536 if let Some(tags) = args.tags {
537 memory.metadata.tags = tags;
538 }
539 memory.metadata.updated_at = Utc::now();
540
541 if content_changed {
544 let qualified_name =
545 format!("{}/{}", memory.metadata.scope.dir_prefix(), memory.name);
546
547 let vector = state
549 .embedding
550 .embed_one(&memory.content)
551 .await
552 .map_err(ErrorData::from)?;
553
554 state
555 .index
556 .add(&scope, &vector, qualified_name)
557 .map_err(ErrorData::from)?;
558 }
559
560 state
562 .repo
563 .save_memory(&memory)
564 .await
565 .map_err(ErrorData::from)?;
566
567 info!(
568 ms = start.elapsed().as_millis(),
569 name = %args.name,
570 content_changed,
571 "memory edited"
572 );
573
574 Ok(serde_json::json!({
575 "id": memory.id,
576 "name": memory.name,
577 "scope": memory.metadata.scope.to_string(),
578 })
579 .to_string())
580 }
581 .instrument(span)
582 .await
583 }
584
585 #[tool(
590 name = "list",
591 description = "List stored memories. Pass 'project:<basename-of-your-cwd>' for project + global memories, \
592 'global' for global-only, or 'all' for everything. Omitting scope defaults to global-only. \
593 Returns a JSON array of memory summaries without full content."
594 )]
595 async fn list(
596 &self,
597 Parameters(args): Parameters<ListArgs>,
598 Extension(parts): Extension<http::request::Parts>,
599 ) -> Result<String, ErrorData> {
600 let session_id = extract_session_id(&parts);
601 let span = tracing::info_span!(
602 "handler.list",
603 session_id = %session_id,
604 scope = ?args.scope,
605 count = tracing::field::Empty,
606 );
607 let state = Arc::clone(&self.state);
608 async move {
609 let scope_filter =
610 parse_scope_filter(args.scope.as_deref()).map_err(ErrorData::from)?;
611
612 let start = Instant::now();
613 let memories = match &scope_filter {
614 ScopeFilter::GlobalOnly => state
615 .repo
616 .list_memories(Some(&Scope::Global))
617 .await
618 .map_err(ErrorData::from)?,
619 ScopeFilter::All => state
620 .repo
621 .list_memories(None)
622 .await
623 .map_err(ErrorData::from)?,
624 ScopeFilter::ProjectAndGlobal(project_name) => {
625 let project_scope = Scope::Project(project_name.clone());
626 let mut global = state
627 .repo
628 .list_memories(Some(&Scope::Global))
629 .await
630 .map_err(ErrorData::from)?;
631 let project = state
632 .repo
633 .list_memories(Some(&project_scope))
634 .await
635 .map_err(ErrorData::from)?;
636 global.extend(project);
637 global
638 }
639 };
640 let count = memories.len();
641 tracing::Span::current().record("count", count);
642 info!(ms = start.elapsed().as_millis(), count, "listed memories");
643
644 let summaries: Vec<serde_json::Value> = memories
645 .into_iter()
646 .map(|m| {
647 serde_json::json!({
648 "id": m.id,
649 "name": m.name,
650 "scope": m.metadata.scope.to_string(),
651 "tags": m.metadata.tags,
652 "created_at": m.metadata.created_at,
653 "updated_at": m.metadata.updated_at,
654 })
655 })
656 .collect();
657
658 Ok(serde_json::json!({
659 "memories": summaries,
660 "count": count,
661 })
662 .to_string())
663 }
664 .instrument(span)
665 .await
666 }
667
668 #[tool(
673 name = "read",
674 description = "Read a specific memory by name. Use scope 'project:<basename-of-your-cwd>' for \
675 project-scoped memories or omit for global. Returns the full markdown content and metadata \
676 (id, scope, tags, timestamps) as a JSON object."
677 )]
678 async fn read(
679 &self,
680 Parameters(args): Parameters<ReadArgs>,
681 Extension(parts): Extension<http::request::Parts>,
682 ) -> Result<String, ErrorData> {
683 validate_name(&args.name).map_err(ErrorData::from)?;
684 let session_id = extract_session_id(&parts);
685 let span = tracing::info_span!(
686 "handler.read",
687 session_id = %session_id,
688 name = %args.name,
689 scope = ?args.scope,
690 );
691 let state = Arc::clone(&self.state);
692 async move {
693 let scope = parse_scope(args.scope.as_deref()).map_err(ErrorData::from)?;
694
695 let start = Instant::now();
696 let memory = state
697 .repo
698 .read_memory(&args.name, &scope)
699 .await
700 .map_err(ErrorData::from)?;
701 info!(
702 ms = start.elapsed().as_millis(),
703 name = %args.name,
704 "read memory"
705 );
706
707 Ok(serde_json::json!({
708 "id": memory.id,
709 "name": memory.name,
710 "scope": memory.metadata.scope.to_string(),
711 "tags": memory.metadata.tags,
712 "content": memory.content,
713 "source": memory.metadata.source,
714 "created_at": memory.metadata.created_at,
715 "updated_at": memory.metadata.updated_at,
716 })
717 .to_string())
718 }
719 .instrument(span)
720 .await
721 }
722
723 #[tool(
731 name = "sync",
732 description = "Sync the memory repo with the git remote (push/pull). Requires \
733 MEMORY_MCP_GITHUB_TOKEN or a token file. Returns a status message."
734 )]
735 async fn sync(
736 &self,
737 Parameters(args): Parameters<SyncArgs>,
738 Extension(parts): Extension<http::request::Parts>,
739 ) -> Result<String, ErrorData> {
740 let pull_first = args.pull_first.unwrap_or(true);
741 let session_id = extract_session_id(&parts);
742 let span = tracing::info_span!(
743 "handler.sync",
744 session_id = %session_id,
745 pull_first,
746 );
747 let state = Arc::clone(&self.state);
748 async move {
749 let start = Instant::now();
750 let branch = &state.branch;
751
752 let mut has_remote = true;
755
756 let mut reindex_stats: Option<ReindexStats> = None;
757
758 let pull_status = if pull_first {
759 let result = state
760 .repo
761 .pull(&state.auth, branch)
762 .await
763 .map_err(ErrorData::from)?;
764
765 let mut oid_range: Option<([u8; 20], [u8; 20])> = None;
766 let status = match result {
767 PullResult::NoRemote => {
768 has_remote = false;
769 "no-remote".to_string()
770 }
771 PullResult::UpToDate => "up-to-date".to_string(),
772 PullResult::FastForward { old_head, new_head } => {
773 oid_range = Some((old_head, new_head));
774 "fast-forward".to_string()
775 }
776 PullResult::Merged {
777 conflicts_resolved,
778 old_head,
779 new_head,
780 } => {
781 oid_range = Some((old_head, new_head));
782 format!("merged ({} conflicts resolved)", conflicts_resolved)
783 }
784 };
785
786 if let Some((old_head, new_head)) = oid_range {
787 let repo = Arc::clone(&state.repo);
788 let changes = tokio::task::spawn_blocking(move || {
789 repo.diff_changed_memories(old_head, new_head)
790 })
791 .await
792 .map_err(|e| MemoryError::Join(e.to_string()))
793 .map_err(ErrorData::from)?
794 .map_err(ErrorData::from)?;
795
796 if !changes.is_empty() {
797 let stats = incremental_reindex(
798 &state.repo,
799 state.embedding.as_ref(),
800 state.index.as_ref(),
801 &changes,
802 )
803 .instrument(tracing::info_span!("server.incremental_reindex"))
804 .await;
805 info!(
806 added = stats.added,
807 updated = stats.updated,
808 removed = stats.removed,
809 errors = stats.errors,
810 "incremental reindex complete"
811 );
812 reindex_stats = Some(stats);
813 }
814 }
815
816 status
817 } else {
818 "skipped".to_string()
819 };
820
821 if has_remote {
822 state
823 .repo
824 .push(&state.auth, branch)
825 .await
826 .map_err(ErrorData::from)?;
827 }
828
829 info!(
830 ms = start.elapsed().as_millis(),
831 pull_first,
832 pull_status = %pull_status,
833 "sync complete"
834 );
835
836 let mut response = serde_json::json!({
837 "status": "sync complete",
838 "pull": pull_status,
839 "branch": branch,
840 });
841
842 if let Some(stats) = reindex_stats {
843 response["reindex"] = serde_json::json!({
844 "added": stats.added,
845 "updated": stats.updated,
846 "removed": stats.removed,
847 "errors": stats.errors,
848 });
849 }
850
851 Ok(response.to_string())
852 }
853 .instrument(span)
854 .await
855 }
856}
857
858#[tool_handler]
859impl ServerHandler for MemoryServer {
860 fn get_info(&self) -> ServerInfo {
861 ServerInfo::new(ServerCapabilities::builder().enable_tools().build()).with_instructions(
862 "A semantic memory system for AI coding agents. Memories are stored as markdown files \
863 in a git repository and indexed for semantic retrieval. Use `remember` to store, `recall` \
864 to search, `read` to fetch a specific memory, `edit` to update, `forget` to delete, \
865 `list` to browse, and `sync` to push/pull the remote.\n\n\
866 Scope convention: always pass scope='project:<basename-of-your-cwd>' when working within \
867 a project. This returns project memories alongside global ones. Omitting scope defaults to \
868 global-only for queries (recall, list) and targets a single memory for point operations \
869 (remember, edit, read, forget). Use scope='all' to search across all projects.\n\n\
870 IMPORTANT: Never store credentials, API keys, tokens, passwords, or other secrets in \
871 memory content. Memories are stored as plaintext markdown files committed to a git \
872 repository and may be synced to a remote. Treat all memory content as public."
873 .to_string(),
874 )
875 }
876}
877
878fn build_snippet(content: &str) -> (String, usize, bool) {
880 let content_length = content.chars().count();
881 let truncated = content_length > SNIPPET_MAX_CHARS;
882 let snippet: String = content.chars().take(SNIPPET_MAX_CHARS).collect();
883 (snippet, content_length, truncated)
884}
885
886#[cfg(test)]
887mod tests {
888 use super::*;
889
890 #[test]
891 fn snippet_short_content_not_truncated() {
892 let content = "Hello, world!";
893 let (snippet, content_length, truncated) = build_snippet(content);
894 assert_eq!(snippet, "Hello, world!");
895 assert_eq!(content_length, 13);
896 assert!(!truncated);
897 }
898
899 #[test]
900 fn snippet_exact_limit_not_truncated() {
901 let content: String = "a".repeat(SNIPPET_MAX_CHARS);
902 let (snippet, content_length, truncated) = build_snippet(&content);
903 assert_eq!(snippet, content);
904 assert_eq!(content_length, SNIPPET_MAX_CHARS);
905 assert!(!truncated);
906 }
907
908 #[test]
909 fn snippet_over_limit_is_truncated() {
910 let content: String = "b".repeat(SNIPPET_MAX_CHARS + 100);
911 let (snippet, content_length, truncated) = build_snippet(&content);
912 assert_eq!(snippet.chars().count(), SNIPPET_MAX_CHARS);
913 assert_eq!(content_length, SNIPPET_MAX_CHARS + 100);
914 assert!(truncated);
915 }
916
917 #[test]
918 fn snippet_counts_unicode_chars_not_bytes() {
919 let emoji_content: String = "\u{1F600}".repeat(SNIPPET_MAX_CHARS + 1);
921 let (snippet, content_length, truncated) = build_snippet(&emoji_content);
922 assert_eq!(snippet.chars().count(), SNIPPET_MAX_CHARS);
923 assert_eq!(content_length, SNIPPET_MAX_CHARS + 1);
924 assert!(truncated);
925 }
926
927 #[test]
928 fn snippet_empty_content() {
929 let (snippet, content_length, truncated) = build_snippet("");
930 assert_eq!(snippet, "");
931 assert_eq!(content_length, 0);
932 assert!(!truncated);
933 }
934}