1use crate::provider::to_view;
15use crate::types::{PatchChange, Session};
16use serde_json::{Map, Value, json};
17use std::collections::HashMap;
18use toolpath::v1::{
19 ActorDefinition, ArtifactChange, Base, Identity, Path, PathIdentity, PathMeta, Step,
20 StepIdentity, StructuralChange,
21};
22use toolpath_convo::{ConversationView, Role, Turn};
23
24#[derive(Debug, Clone, Default)]
34pub struct DeriveConfig {
35 pub project_path: Option<String>,
37}
38
39pub fn derive_path(session: &Session, config: &DeriveConfig) -> Path {
41 let view = to_view(session);
42 derive_path_from_view(session, &view, config)
43}
44
45pub fn derive_project(sessions: &[Session], config: &DeriveConfig) -> Vec<Path> {
47 sessions.iter().map(|s| derive_path(s, config)).collect()
48}
49
50fn derive_path_from_view(
53 session: &Session,
54 view: &ConversationView,
55 config: &DeriveConfig,
56) -> Path {
57 let meta = session.meta();
58 let session_short: String = session.id.chars().take(8).collect();
59 let path_id = format!("path-codex-{}", session_short);
60 let convo_artifact = format!("codex://{}", session.id);
61
62 let mut steps: Vec<Step> = Vec::with_capacity(view.turns.len());
63 let mut actors: HashMap<String, ActorDefinition> = HashMap::new();
64 let mut last_step_id: Option<String> = None;
65
66 for (turn_idx, turn) in view.turns.iter().enumerate() {
67 let Some(step) = build_step(
68 turn_idx,
69 turn,
70 &convo_artifact,
71 last_step_id.as_deref(),
72 &mut actors,
73 ) else {
74 continue;
75 };
76 last_step_id = Some(step.step.id.clone());
77 steps.push(step);
78 }
79
80 let head = last_step_id.unwrap_or_else(|| "empty".to_string());
81
82 let base_uri = config
85 .project_path
86 .clone()
87 .or_else(|| meta.as_ref().map(|m| m.cwd.to_string_lossy().to_string()))
88 .or_else(|| {
89 view.turns
90 .first()
91 .and_then(|t| t.environment.as_ref()?.working_dir.clone())
92 })
93 .map(|p| {
94 if p.starts_with('/') {
95 format!("file://{}", p)
96 } else {
97 p
98 }
99 });
100
101 let base_ref = meta
103 .as_ref()
104 .and_then(|m| m.git.as_ref().and_then(|g| g.commit_hash.clone()));
105 let base_branch = meta
106 .as_ref()
107 .and_then(|m| m.git.as_ref().and_then(|g| g.branch.clone()));
108
109 let base = base_uri.map(|uri| Base {
110 uri,
111 ref_str: base_ref,
112 branch: base_branch,
113 });
114
115 let mut path_extra: HashMap<String, Value> = HashMap::new();
119 let mut codex_meta: Map<String, Value> = Map::new();
120 if let Some(m) = meta.as_ref() {
121 codex_meta.insert("session_id".into(), Value::String(session.id.clone()));
122 codex_meta.insert("originator".into(), Value::String(m.originator.clone()));
123 codex_meta.insert("cli_version".into(), Value::String(m.cli_version.clone()));
124 codex_meta.insert("source".into(), Value::String(m.source.clone()));
125 if let Some(model_provider) = &m.model_provider {
126 codex_meta.insert(
127 "model_provider".into(),
128 Value::String(model_provider.clone()),
129 );
130 }
131 if let Some(forked) = &m.forked_from_id {
132 codex_meta.insert("forked_from_id".into(), Value::String(forked.clone()));
133 }
134 if let Some(git) = &m.git {
135 let mut g: Map<String, Value> = Map::new();
136 if let Some(v) = &git.commit_hash {
137 g.insert("commit_hash".into(), Value::String(v.clone()));
138 }
139 if let Some(v) = &git.branch {
140 g.insert("branch".into(), Value::String(v.clone()));
141 }
142 if let Some(v) = &git.repository_url {
143 g.insert("repository_url".into(), Value::String(v.clone()));
144 }
145 if !g.is_empty() {
146 codex_meta.insert("git".into(), Value::Object(g));
147 }
148 }
149 }
150 if !view.files_changed.is_empty() {
151 codex_meta.insert(
152 "files_changed".into(),
153 Value::Array(
154 view.files_changed
155 .iter()
156 .map(|p| Value::String(p.clone()))
157 .collect(),
158 ),
159 );
160 }
161 if !codex_meta.is_empty() {
162 path_extra.insert("codex".into(), Value::Object(codex_meta));
163 }
164
165 Path {
166 path: PathIdentity {
167 id: path_id,
168 base,
169 head,
170 graph_ref: None,
171 },
172 steps,
173 meta: Some(PathMeta {
174 title: Some(format!("Codex session: {}", session_short)),
175 source: Some("codex".to_string()),
176 actors: if actors.is_empty() {
177 None
178 } else {
179 Some(actors)
180 },
181 extra: path_extra,
182 ..Default::default()
183 }),
184 }
185}
186
187fn build_step(
188 turn_idx: usize,
189 turn: &Turn,
190 convo_artifact: &str,
191 parent_id: Option<&str>,
192 actors: &mut HashMap<String, ActorDefinition>,
193) -> Option<Step> {
194 if turn.text.is_empty()
197 && turn.tool_uses.is_empty()
198 && turn.thinking.is_none()
199 && extract_patch_changes(turn).is_empty()
200 {
201 return None;
202 }
203
204 let (actor, role_str) = resolve_actor(turn, actors);
205
206 let mut convo_extra: HashMap<String, Value> = HashMap::new();
208 convo_extra.insert("role".into(), json!(role_str));
209 if !turn.text.is_empty() {
210 convo_extra.insert("text".into(), json!(turn.text));
211 }
212 if let Some(th) = turn.thinking.as_deref()
215 && !th.is_empty()
216 {
217 convo_extra.insert("thinking".into(), json!(th));
218 }
219 if !turn.tool_uses.is_empty() {
220 let calls: Vec<Value> = turn
221 .tool_uses
222 .iter()
223 .map(|tu| {
224 json!({
225 "name": tu.name,
226 "call_id": tu.id,
227 "category": tu.category,
228 "summary": tool_call_summary(tu),
229 "status": tool_call_status(turn, &tu.id),
230 })
231 })
232 .collect();
233 convo_extra.insert("tool_calls".into(), Value::Array(calls));
234 }
235 if let Some(u) = turn.token_usage.as_ref() {
236 convo_extra.insert("token_usage".into(), json!(u));
237 }
238 if let Some(ph) = turn
239 .extra
240 .get("codex")
241 .and_then(|c| c.get("phase"))
242 .and_then(|v| v.as_str())
243 {
244 convo_extra.insert("phase".into(), json!(ph));
245 }
246
247 let convo_change = ArtifactChange {
248 raw: None,
249 structural: Some(StructuralChange {
250 change_type: "conversation.append".to_string(),
251 extra: convo_extra,
252 }),
253 };
254
255 let mut changes: HashMap<String, ArtifactChange> = HashMap::new();
256 changes.insert(convo_artifact.to_string(), convo_change);
257
258 for (path, patch) in extract_patch_changes(turn) {
260 changes.insert(path, patch);
261 }
262
263 let step_id = format!("step-{:04}", turn_idx + 1);
264 let parents = parent_id.map(|p| vec![p.to_string()]).unwrap_or_default();
265
266 Some(Step {
267 step: StepIdentity {
268 id: step_id,
269 parents,
270 actor,
271 timestamp: turn.timestamp.clone(),
272 },
273 change: changes,
274 meta: None,
275 })
276}
277
278fn resolve_actor(
279 turn: &Turn,
280 actors: &mut HashMap<String, ActorDefinition>,
281) -> (String, &'static str) {
282 match &turn.role {
283 Role::User => {
284 actors
285 .entry("human:user".to_string())
286 .or_insert_with(|| ActorDefinition {
287 name: Some("User".to_string()),
288 ..Default::default()
289 });
290 ("human:user".to_string(), "user")
291 }
292 Role::Assistant => {
293 let (actor_key, model_str) = match &turn.model {
294 Some(m) if !m.is_empty() => (format!("agent:{}", m), m.clone()),
295 _ => ("agent:codex".to_string(), "codex".to_string()),
296 };
297 actors
298 .entry(actor_key.clone())
299 .or_insert_with(|| ActorDefinition {
300 name: Some("Codex CLI".to_string()),
301 provider: Some("openai".to_string()),
302 model: Some(model_str.clone()),
303 identities: vec![Identity {
304 system: "openai".to_string(),
305 id: model_str,
306 }],
307 ..Default::default()
308 });
309 (actor_key, "assistant")
310 }
311 Role::System => {
312 actors
313 .entry("system:codex".to_string())
314 .or_insert_with(|| ActorDefinition {
315 name: Some("Codex CLI system".to_string()),
316 provider: Some("openai".to_string()),
317 ..Default::default()
318 });
319 ("system:codex".to_string(), "developer")
320 }
321 Role::Other(s) => {
322 let key = format!("other:{}", s);
323 actors
324 .entry(key.clone())
325 .or_insert_with(|| ActorDefinition {
326 name: Some(s.clone()),
327 ..Default::default()
328 });
329 (key, "other")
330 }
331 }
332}
333
334fn tool_call_status(turn: &Turn, call_id: &str) -> String {
335 turn.extra
336 .get("codex")
337 .and_then(|c| c.get("tool_extras"))
338 .and_then(|t| t.get(call_id))
339 .and_then(|te| te.get("status").or_else(|| te.get("exit_code")))
340 .and_then(|v| {
341 v.as_str()
342 .map(str::to_string)
343 .or_else(|| v.as_i64().map(|n| n.to_string()))
344 })
345 .unwrap_or_else(|| {
346 turn.tool_uses
347 .iter()
348 .find(|tu| tu.id == call_id)
349 .and_then(|tu| tu.result.as_ref())
350 .map(|r| {
351 if r.is_error {
352 "error".to_string()
353 } else {
354 "success".to_string()
355 }
356 })
357 .unwrap_or_default()
358 })
359}
360
361fn tool_call_summary(tu: &toolpath_convo::ToolInvocation) -> String {
363 let pick = |k: &str| -> Option<String> {
364 tu.input.get(k).and_then(|v| v.as_str()).map(str::to_string)
365 };
366 let summary = match tu.name.as_str() {
367 "exec_command" | "shell" | "unified_exec" => pick("cmd").or_else(|| pick("command")),
368 "write_stdin" => pick("chars").or_else(|| pick("session_id")),
369 "read_file" | "read_many_files" | "list_dir" | "view_image" => pick("path"),
370 "write_file" | "replace" | "edit" => pick("file_path"),
371 "apply_patch" => {
372 tu.input.as_str().and_then(|s| {
374 s.lines()
375 .find(|l| {
376 l.starts_with("*** Add File:")
377 || l.starts_with("*** Update File:")
378 || l.starts_with("*** Delete File:")
379 })
380 .map(str::to_string)
381 })
382 }
383 "glob" | "grep_search" | "search_file_content" => pick("pattern").or_else(|| pick("query")),
384 "web_fetch" => pick("url"),
385 "web_search" | "google_web_search" => pick("query"),
386 "spawn_agent" | "task" | "activate_skill" => pick("prompt").or_else(|| pick("task")),
387 _ => None,
388 };
389 summary.unwrap_or_default()
390}
391
392fn extract_patch_changes(turn: &Turn) -> Vec<(String, ArtifactChange)> {
395 let Some(codex) = turn.extra.get("codex") else {
396 return Vec::new();
397 };
398 let Some(Value::Array(patches)) = codex.get("patch_changes") else {
399 return Vec::new();
400 };
401
402 let mut out: Vec<(String, ArtifactChange)> = Vec::new();
403 for patch in patches {
404 let Some(Value::Object(changes)) = patch.get("changes") else {
405 continue;
406 };
407 for (path, change_val) in changes {
408 let Some(change) = parse_patch_change(change_val) else {
409 continue;
410 };
411 let (raw, structural) = patch_change_to_perspectives(&change, path);
412 out.push((
413 path.clone(),
414 ArtifactChange {
415 raw,
416 structural: Some(structural),
417 },
418 ));
419 }
420 }
421 out
422}
423
424fn parse_patch_change(v: &Value) -> Option<PatchChange> {
425 serde_json::from_value::<PatchChange>(v.clone()).ok()
426}
427
428fn patch_change_to_perspectives(
429 change: &PatchChange,
430 file_path: &str,
431) -> (Option<String>, StructuralChange) {
432 let mut extra: HashMap<String, Value> = HashMap::new();
433 match change {
434 PatchChange::Add { content, .. } => {
435 extra.insert("operation".into(), json!("add"));
436 extra.insert("byte_count".into(), json!(content.len()));
437 extra.insert("line_count".into(), json!(content.lines().count()));
438 let raw = synth_add_diff(file_path, content);
439 (
440 Some(raw),
441 StructuralChange {
442 change_type: "codex.add".into(),
443 extra,
444 },
445 )
446 }
447 PatchChange::Update {
448 unified_diff,
449 move_path,
450 ..
451 } => {
452 extra.insert("operation".into(), json!("update"));
453 if let Some(mp) = move_path {
454 extra.insert("move_path".into(), json!(mp));
455 }
456 (
457 Some(unified_diff.clone()),
458 StructuralChange {
459 change_type: "codex.update".into(),
460 extra,
461 },
462 )
463 }
464 PatchChange::Delete {
465 original_content, ..
466 } => {
467 extra.insert("operation".into(), json!("delete"));
468 let raw = original_content
469 .as_ref()
470 .map(|c| synth_delete_diff(file_path, c));
471 (
472 raw,
473 StructuralChange {
474 change_type: "codex.delete".into(),
475 extra,
476 },
477 )
478 }
479 PatchChange::Unknown => {
480 extra.insert("operation".into(), json!("unknown"));
481 (
482 None,
483 StructuralChange {
484 change_type: "codex.unknown".into(),
485 extra,
486 },
487 )
488 }
489 }
490}
491
492fn synth_add_diff(_path: &str, content: &str) -> String {
493 let lines: Vec<&str> = content.split('\n').collect();
494 let effective: &[&str] = if lines.last() == Some(&"") {
497 &lines[..lines.len().saturating_sub(1)]
498 } else {
499 &lines[..]
500 };
501 let mut buf = format!("@@ -0,0 +1,{} @@\n", effective.len());
502 for l in effective {
503 buf.push('+');
504 buf.push_str(l);
505 buf.push('\n');
506 }
507 buf
508}
509
510fn synth_delete_diff(_path: &str, original: &str) -> String {
511 let lines: Vec<&str> = original.split('\n').collect();
512 let effective: &[&str] = if lines.last() == Some(&"") {
513 &lines[..lines.len().saturating_sub(1)]
514 } else {
515 &lines[..]
516 };
517 let mut buf = format!("@@ -1,{} +0,0 @@\n", effective.len());
518 for l in effective {
519 buf.push('-');
520 buf.push_str(l);
521 buf.push('\n');
522 }
523 buf
524}
525
526#[cfg(test)]
527mod tests {
528 use super::*;
529 use crate::CodexConvo;
530 use std::fs;
531 use tempfile::TempDir;
532 use toolpath::v1::Graph;
533
534 fn fixture_session(body: &str) -> (TempDir, CodexConvo, String) {
535 let temp = TempDir::new().unwrap();
536 let codex = temp.path().join(".codex");
537 let day = codex.join("sessions/2026/04/20");
538 fs::create_dir_all(&day).unwrap();
539 let name = "rollout-2026-04-20T10-00-00-019dabc6-8fef-7681-a054-b5bb75fcb97d";
540 fs::write(day.join(format!("{}.jsonl", name)), body).unwrap();
541 let resolver = crate::PathResolver::new().with_codex_dir(&codex);
542 (temp, CodexConvo::with_resolver(resolver), name.into())
543 }
544
545 fn minimal_body() -> String {
546 [
547 r#"{"timestamp":"2026-04-20T16:44:37.772Z","type":"session_meta","payload":{"id":"019dabc6-8fef-7681-a054-b5bb75fcb97d","timestamp":"2026-04-20T16:43:30.171Z","cwd":"/tmp/proj","originator":"codex-tui","cli_version":"0.118.0","source":"cli","git":{"commit_hash":"abc","branch":"main","repository_url":"git@example:x/y.git"}}}"#,
548 r#"{"timestamp":"2026-04-20T16:44:37.773Z","type":"turn_context","payload":{"turn_id":"t1","cwd":"/tmp/proj","model":"gpt-5.4"}}"#,
549 r#"{"timestamp":"2026-04-20T16:44:37.800Z","type":"response_item","payload":{"type":"message","role":"user","content":[{"type":"input_text","text":"build me a thing"}]}}"#,
550 r#"{"timestamp":"2026-04-20T16:44:38.100Z","type":"response_item","payload":{"type":"message","role":"assistant","content":[{"type":"output_text","text":"creating"}],"phase":"commentary"}}"#,
551 r#"{"timestamp":"2026-04-20T16:44:38.200Z","type":"response_item","payload":{"type":"function_call","name":"exec_command","arguments":"{\"cmd\":\"pwd\"}","call_id":"c1"}}"#,
552 r#"{"timestamp":"2026-04-20T16:44:38.300Z","type":"response_item","payload":{"type":"function_call_output","call_id":"c1","output":"/tmp/proj\n"}}"#,
553 r#"{"timestamp":"2026-04-20T16:44:38.400Z","type":"event_msg","payload":{"type":"exec_command_end","call_id":"c1","command":["/bin/bash","-lc","pwd"],"stdout":"/tmp/proj\n","exit_code":0,"aggregated_output":"/tmp/proj\n"}}"#,
554 r#"{"timestamp":"2026-04-20T16:44:38.500Z","type":"response_item","payload":{"type":"custom_tool_call","call_id":"c2","name":"apply_patch","input":"*** Begin Patch\n*** Add File: /tmp/proj/a.rs\n+fn main() {}\n*** End Patch"}}"#,
555 r#"{"timestamp":"2026-04-20T16:44:38.700Z","type":"event_msg","payload":{"type":"patch_apply_end","call_id":"c2","success":true,"changes":{"/tmp/proj/a.rs":{"type":"add","content":"fn main() {}\n"}}}}"#,
556 r#"{"timestamp":"2026-04-20T16:44:38.900Z","type":"response_item","payload":{"type":"message","role":"assistant","content":[{"type":"output_text","text":"done"}],"phase":"final","end_turn":true}}"#,
557 ]
558 .join("\n")
559 }
560
561 #[test]
562 fn derive_path_basic() {
563 let (_t, mgr, id) = fixture_session(&minimal_body());
564 let session = mgr.read_session(&id).unwrap();
565 let path = derive_path(&session, &DeriveConfig::default());
566
567 assert!(path.path.id.starts_with("path-codex-"));
568 assert_eq!(path.path.base.as_ref().unwrap().uri, "file:///tmp/proj");
569 assert_eq!(
570 path.path.base.as_ref().unwrap().ref_str.as_deref(),
571 Some("abc")
572 );
573 assert_eq!(path.steps.len(), 3);
575 }
576
577 #[test]
578 fn derive_path_actors_populated() {
579 let (_t, mgr, id) = fixture_session(&minimal_body());
580 let session = mgr.read_session(&id).unwrap();
581 let path = derive_path(&session, &DeriveConfig::default());
582 let actors = path.meta.as_ref().unwrap().actors.as_ref().unwrap();
583 assert!(actors.contains_key("human:user"));
584 assert!(actors.contains_key("agent:gpt-5.4"));
585 }
586
587 #[test]
588 fn derive_path_preserves_conversation_artifact() {
589 let (_t, mgr, id) = fixture_session(&minimal_body());
590 let session = mgr.read_session(&id).unwrap();
591 let path = derive_path(&session, &DeriveConfig::default());
592 let artifact = format!("codex://{}", session.id);
593 for step in &path.steps {
594 assert!(
595 step.change.contains_key(&artifact),
596 "step {} missing convo artifact",
597 step.step.id
598 );
599 }
600 }
601
602 #[test]
603 fn derive_path_surfaces_apply_patch_as_file_artifact() {
604 let (_t, mgr, id) = fixture_session(&minimal_body());
605 let session = mgr.read_session(&id).unwrap();
606 let path = derive_path(&session, &DeriveConfig::default());
607 let file_step = path
609 .steps
610 .iter()
611 .find(|s| s.change.contains_key("/tmp/proj/a.rs"))
612 .expect("no step carries the file artifact");
613 let change = &file_step.change["/tmp/proj/a.rs"];
614 assert!(change.raw.is_some(), "raw perspective must be populated");
615 assert!(
616 change.raw.as_ref().unwrap().contains("+fn main() {}"),
617 "raw must be a unified diff"
618 );
619 let structural = change.structural.as_ref().unwrap();
620 assert_eq!(structural.change_type, "codex.add");
621 assert_eq!(structural.extra["operation"], "add");
622 }
623
624 #[test]
625 fn derive_path_update_perspectives_preserved() {
626 let body = [
628 r#"{"timestamp":"t","type":"session_meta","payload":{"id":"s","timestamp":"t","cwd":"/p","originator":"x","cli_version":"1","source":"cli"}}"#,
629 r#"{"timestamp":"t","type":"response_item","payload":{"type":"message","role":"assistant","content":[{"type":"output_text","text":"edit"}]}}"#,
630 r#"{"timestamp":"t","type":"response_item","payload":{"type":"custom_tool_call","call_id":"c","name":"apply_patch","input":"*** Update File: /p/a.rs\n@@"}}"#,
631 r#"{"timestamp":"t","type":"event_msg","payload":{"type":"patch_apply_end","call_id":"c","success":true,"changes":{"/p/a.rs":{"type":"update","unified_diff":"@@ -1 +1 @@\n-old\n+new"}}}}"#,
632 ].join("\n");
633 let (_t, mgr, id) = fixture_session(&body);
634 let session = mgr.read_session(&id).unwrap();
635 let path = derive_path(&session, &DeriveConfig::default());
636 let file_change = path
637 .steps
638 .iter()
639 .find_map(|s| s.change.get("/p/a.rs"))
640 .expect("update should land as file artifact");
641 assert_eq!(file_change.raw.as_deref(), Some("@@ -1 +1 @@\n-old\n+new"));
642 let structural = file_change.structural.as_ref().unwrap();
643 assert_eq!(structural.change_type, "codex.update");
644 }
645
646 #[test]
647 fn derive_path_validates() {
648 let (_t, mgr, id) = fixture_session(&minimal_body());
649 let session = mgr.read_session(&id).unwrap();
650 let path = derive_path(&session, &DeriveConfig::default());
651 let doc = Graph::from_path(path);
652 let json = doc.to_json().unwrap();
653 let parsed = Graph::from_json(&json).unwrap();
655 let p = parsed.single_path().expect("single-path graph");
656 assert!(!p.steps.is_empty());
657 let ancestors = toolpath::v1::query::ancestors(&p.steps, &p.path.head);
658 assert_eq!(ancestors.len(), p.steps.len(), "all steps on head ancestry");
659 }
660
661 #[test]
662 fn derive_path_shell_summary() {
663 let (_t, mgr, id) = fixture_session(&minimal_body());
664 let session = mgr.read_session(&id).unwrap();
665 let path = derive_path(&session, &DeriveConfig::default());
666 let convo_artifact = format!("codex://{}", session.id);
667 let step = path
669 .steps
670 .iter()
671 .find(|s| {
672 s.change
673 .get(&convo_artifact)
674 .and_then(|c| c.structural.as_ref())
675 .and_then(|sc| sc.extra.get("tool_calls"))
676 .and_then(|v| v.as_array())
677 .map(|arr| arr.iter().any(|v| v["name"] == "exec_command"))
678 .unwrap_or(false)
679 })
680 .expect("no step with exec_command");
681 let calls = step.change[&convo_artifact]
682 .structural
683 .as_ref()
684 .unwrap()
685 .extra["tool_calls"]
686 .as_array()
687 .unwrap();
688 let exec = &calls[0];
689 assert_eq!(exec["summary"], "pwd");
690 }
691
692 #[test]
693 fn derive_path_meta_carries_git() {
694 let (_t, mgr, id) = fixture_session(&minimal_body());
695 let session = mgr.read_session(&id).unwrap();
696 let path = derive_path(&session, &DeriveConfig::default());
697 let codex_meta = &path.meta.as_ref().unwrap().extra["codex"];
698 let git = &codex_meta["git"];
699 assert_eq!(git["commit_hash"], "abc");
700 assert_eq!(git["branch"], "main");
701 }
702
703 #[test]
704 fn derive_project_multi() {
705 let (_t, mgr, id) = fixture_session(&minimal_body());
706 let session = mgr.read_session(&id).unwrap();
707 let paths = derive_project(&[session.clone(), session], &DeriveConfig::default());
708 assert_eq!(paths.len(), 2);
709 assert_eq!(paths[0].path.id, paths[1].path.id);
710 }
711
712 #[test]
713 fn synth_add_diff_has_plus_lines() {
714 let diff = synth_add_diff("a.rs", "hello\nworld\n");
715 assert!(diff.contains("+hello"));
716 assert!(diff.contains("+world"));
717 assert!(diff.starts_with("@@ -0,0 +1,2 @@"));
718 }
719
720 #[test]
721 fn synth_delete_diff_has_minus_lines() {
722 let diff = synth_delete_diff("a.rs", "gone\n");
723 assert!(diff.contains("-gone"));
724 assert!(diff.starts_with("@@ -1,1 +0,0 @@"));
725 }
726}