mod cache;
mod entity;
mod nulid_gen;
mod output;
mod parser;
mod registry;
mod relationship;
mod thumbnail;
mod timeline;
mod verifier;
mod writeback;
use clap::{Parser, Subcommand};
use crate::entity::Entity;
use crate::parser::{ParseError, ParsedCase, SectionKind};
use crate::relationship::Rel;
#[derive(Parser)]
#[command(name = "weave-content", version, about)]
struct Cli {
#[command(subcommand)]
command: Command,
}
#[derive(Subcommand)]
enum Command {
Validate {
path: Option<String>,
#[arg(long)]
root: Option<String>,
},
Verify {
path: Option<String>,
#[arg(long)]
root: Option<String>,
#[arg(long, default_value_t = 16)]
concurrency: usize,
#[arg(long, default_value_t = 15)]
timeout: u64,
#[arg(long)]
cache: Option<String>,
#[arg(long)]
warn_only: bool,
},
Build {
path: Option<String>,
#[arg(long)]
root: Option<String>,
#[arg(short, long)]
output: Option<String>,
#[arg(long)]
s3_endpoint: Option<String>,
#[arg(long)]
s3_bucket: Option<String>,
#[arg(long)]
s3_region: Option<String>,
#[arg(long)]
files_public_url: Option<String>,
},
}
fn main() {
let cli = Cli::parse();
let exit_code = match cli.command {
Command::Validate { ref path, ref root } => cmd_validate(path.as_deref(), root.as_deref()),
Command::Verify {
ref path,
ref root,
concurrency,
timeout,
ref cache,
warn_only,
} => cmd_verify(
path.as_deref(),
root.as_deref(),
concurrency,
timeout,
cache.as_deref(),
warn_only,
),
Command::Build {
ref path,
ref root,
ref output,
ref s3_endpoint,
ref s3_bucket,
ref s3_region,
ref files_public_url,
} => cmd_build(
path.as_deref(),
root.as_deref(),
output.as_deref(),
s3_endpoint.as_deref(),
s3_bucket.as_deref(),
s3_region.as_deref(),
files_public_url.as_deref(),
),
};
std::process::exit(exit_code);
}
fn cmd_validate(path: Option<&str>, root: Option<&str>) -> i32 {
let content_root = resolve_content_root(path, root);
let reg = match load_registry(&content_root) {
Ok(r) => r,
Err(code) => return code,
};
let case_files = match resolve_case_files(path, &content_root) {
Ok(f) => f,
Err(code) => return code,
};
if case_files.is_empty() {
eprintln!("no case files found");
return 1;
}
if !reg.is_empty() {
eprintln!("registry: {} entities loaded", reg.len(),);
}
let mut exit_code = 0;
for case_path in &case_files {
let result = validate_single_case(case_path, ®);
if result != 0 {
exit_code = result;
}
}
exit_code
}
fn validate_single_case(path: &str, reg: ®istry::EntityRegistry) -> i32 {
let content = match std::fs::read_to_string(path) {
Ok(c) => c,
Err(e) => {
eprintln!("{path}: error reading file: {e}");
return 2;
}
};
match parse_full(&content, Some(reg)) {
Ok((case, entities, rels)) => {
eprintln!(
"{path}: ok -- {id}: {title} ({ent} entities, {rel} relationships, {src} sources)",
id = case.id,
title = case.title,
ent = entities.len(),
rel = rels.len(),
src = case.sources.len(),
);
if !case.summary.is_empty() {
eprintln!(
" summary: {}...",
&case.summary[..case.summary.len().min(80)]
);
}
for e in &entities {
let id_display = e.id.as_deref().unwrap_or("(no id)");
eprintln!(
" line {}: {id_display} {} ({}, {} fields)",
e.line,
e.name,
e.label,
e.fields.len()
);
}
for r in &rels {
let id_display = r.id.as_deref().unwrap_or("(no id)");
eprintln!(
" line {}: {id_display} {} -> {}: {}",
r.line, r.source_name, r.target_name, r.rel_type,
);
}
0
}
Err(errors) => {
for err in &errors {
eprintln!("{path}:{err}");
}
1
}
}
}
#[allow(clippy::too_many_lines)]
fn cmd_verify(
path: Option<&str>,
root: Option<&str>,
concurrency: usize,
timeout: u64,
cache_path: Option<&str>,
warn_only: bool,
) -> i32 {
let content_root = resolve_content_root(path, root);
let reg = match load_registry(&content_root) {
Ok(r) => r,
Err(code) => return code,
};
let case_files = match resolve_case_files(path, &content_root) {
Ok(f) => f,
Err(code) => return code,
};
if case_files.is_empty() {
eprintln!("no case files found");
return 1;
}
let mut exit_code = 0;
for case_path in &case_files {
let result =
verify_single_case(case_path, ®, concurrency, timeout, cache_path, warn_only);
if result != 0 {
exit_code = result;
}
}
exit_code
}
#[allow(clippy::too_many_lines)]
fn verify_single_case(
path: &str,
reg: ®istry::EntityRegistry,
concurrency: usize,
timeout: u64,
cache_path: Option<&str>,
warn_only: bool,
) -> i32 {
let content = match std::fs::read_to_string(path) {
Ok(c) => c,
Err(e) => {
eprintln!("{path}: error reading file: {e}");
return 2;
}
};
let (case, entities, rels) = match parse_full(&content, Some(reg)) {
Ok(result) => result,
Err(errors) => {
for err in &errors {
eprintln!("{path}:{err}");
}
return 1;
}
};
let mut collect_errors = Vec::new();
let urls = verifier::collect_urls(&case.sources, &entities, &rels, &mut collect_errors);
if !collect_errors.is_empty() {
for err in &collect_errors {
eprintln!("{path}:{err}");
}
return 1;
}
if urls.is_empty() {
eprintln!("{path}: no URLs to verify");
return 0;
}
let mut verify_cache = cache_path.map(|p| match cache::VerifyCache::load(p) {
Ok(c) => {
eprintln!("{path}: using cache {p}");
c
}
Err(e) => {
eprintln!("{path}: cache load warning: {e}");
cache::VerifyCache::load("/dev/null").unwrap_or_else(|_| {
cache::VerifyCache::empty()
})
}
});
let (cached_results, urls_to_check) = partition_cached(&urls, verify_cache.as_ref());
let check_count = urls_to_check.len();
let cached_count = cached_results.len();
if cached_count > 0 {
eprintln!(
"{path}: {cached_count} cached, {check_count} to check (concurrency={concurrency}, timeout={timeout}s)"
);
} else {
eprintln!(
"{path}: verifying {check_count} URLs (concurrency={concurrency}, timeout={timeout}s)"
);
}
let fresh_results = if urls_to_check.is_empty() {
Vec::new()
} else {
let rt = match tokio::runtime::Builder::new_current_thread()
.enable_all()
.build()
{
Ok(rt) => rt,
Err(e) => {
eprintln!("{path}: failed to create async runtime: {e}");
return 2;
}
};
rt.block_on(verifier::verify_urls(urls_to_check, concurrency, timeout))
};
if let Some(ref mut vc) = verify_cache {
for check in &fresh_results {
vc.put(&check.url, check.status, check.detail.as_deref());
}
}
let mut all_results = cached_results;
all_results.extend(fresh_results);
let mut has_error = false;
for check in &all_results {
let detail = check.detail.as_deref().unwrap_or("");
match check.status {
verifier::CheckStatus::Ok => {
eprintln!(
" ok {}{}",
check.url,
if check.is_thumbnail {
" [thumbnail]"
} else {
""
}
);
}
verifier::CheckStatus::Warn => {
eprintln!(" warn {} -- {detail}", check.url);
}
verifier::CheckStatus::Error => {
has_error = true;
eprintln!(" ERROR {} -- {detail}", check.url);
}
}
}
let ok_count = all_results
.iter()
.filter(|c| c.status == verifier::CheckStatus::Ok)
.count();
let warn_count = all_results
.iter()
.filter(|c| c.status == verifier::CheckStatus::Warn)
.count();
let err_count = all_results
.iter()
.filter(|c| c.status == verifier::CheckStatus::Error)
.count();
eprintln!("{path}: {ok_count} ok, {warn_count} warn, {err_count} error");
if let Some(ref vc) = verify_cache
&& let Err(e) = vc.save()
{
eprintln!("{path}: cache save warning: {e}");
}
i32::from(has_error && !warn_only)
}
fn partition_cached(
urls: &[verifier::UrlEntry],
verify_cache: Option<&cache::VerifyCache>,
) -> (Vec<verifier::UrlCheck>, Vec<verifier::UrlEntry>) {
let Some(vc) = verify_cache else {
return (
Vec::new(),
urls.iter().map(verifier::UrlEntry::clone_entry).collect(),
);
};
let mut cached = Vec::new();
let mut uncached = Vec::new();
for entry in urls {
if let Some(cache_entry) = vc.get(entry.url()) {
let status = match cache_entry.status.as_str() {
"ok" => verifier::CheckStatus::Ok,
"warn" => verifier::CheckStatus::Warn,
_ => verifier::CheckStatus::Error,
};
cached.push(verifier::UrlCheck {
url: entry.url().to_string(),
status,
detail: cache_entry.detail.clone(),
is_thumbnail: entry.is_thumbnail(),
});
} else {
uncached.push(entry.clone_entry());
}
}
(cached, uncached)
}
fn cmd_build(
path: Option<&str>,
root: Option<&str>,
output_dir: Option<&str>,
s3_endpoint: Option<&str>,
s3_bucket: Option<&str>,
s3_region: Option<&str>,
files_public_url: Option<&str>,
) -> i32 {
let content_root = resolve_content_root(path, root);
let reg = match load_registry(&content_root) {
Ok(r) => r,
Err(code) => return code,
};
let case_files = match resolve_case_files(path, &content_root) {
Ok(f) => f,
Err(code) => return code,
};
if case_files.is_empty() {
eprintln!("no case files found");
return 1;
}
let mut exit_code = 0;
for case_path in &case_files {
let result = build_single_case(
case_path,
®,
output_dir,
s3_endpoint,
s3_bucket,
s3_region,
files_public_url,
);
if result != 0 {
exit_code = result;
}
}
exit_code
}
#[allow(clippy::too_many_arguments)]
fn build_single_case(
path: &str,
reg: ®istry::EntityRegistry,
output_dir: Option<&str>,
s3_endpoint: Option<&str>,
s3_bucket: Option<&str>,
s3_region: Option<&str>,
files_public_url: Option<&str>,
) -> i32 {
let content = match std::fs::read_to_string(path) {
Ok(c) => c,
Err(e) => {
eprintln!("{path}: error reading file: {e}");
return 2;
}
};
let (case, entities, rels) = match parse_full(&content, Some(reg)) {
Ok(result) => result,
Err(errors) => {
for err in &errors {
eprintln!("{path}:{err}");
}
return 1;
}
};
let referenced_entities = collect_referenced_registry_entities(&rels, &entities, reg);
let build_result = match output::build_output(
&case.id,
&case.title,
&case.summary,
&case.sources,
&entities,
&rels,
&referenced_entities,
) {
Ok(out) => out,
Err(errors) => {
for err in &errors {
eprintln!("{path}:{err}");
}
return 1;
}
};
let mut case_output = build_result.output;
if !build_result.case_pending.is_empty() {
let mut pending = build_result.case_pending;
if let Some(modified) = writeback::apply_writebacks(&content, &mut pending) {
if let Err(e) = writeback::write_file(std::path::Path::new(path), &modified) {
eprintln!("{e}");
return 2;
}
let count = pending.len();
eprintln!("{path}: wrote {count} generated ID(s) back to file");
}
}
if let Some(code) = writeback_registry_entities(&build_result.registry_pending, reg) {
return code;
}
if let Some(config) =
thumbnail::S3Config::from_args_or_env(s3_endpoint, s3_bucket, s3_region, files_public_url)
{
let rt = match tokio::runtime::Builder::new_current_thread()
.enable_all()
.build()
{
Ok(rt) => rt,
Err(e) => {
eprintln!("{path}: failed to create async runtime: {e}");
return 2;
}
};
thumbnail::process_thumbnails(&mut case_output, &config, &rt);
}
write_case_output(path, &case.id, &case_output, output_dir)
}
fn writeback_registry_entities(
pending: &[(String, writeback::PendingId)],
reg: ®istry::EntityRegistry,
) -> Option<i32> {
for (entity_name, pending_id) in pending {
let Some(entry) = reg.get_by_name(entity_name) else {
continue;
};
let entity_path = &entry.path;
let entity_content = match std::fs::read_to_string(entity_path) {
Ok(c) => c,
Err(e) => {
eprintln!("{}: error reading file: {e}", entity_path.display());
return Some(2);
}
};
let fm_end = writeback::find_front_matter_end(&entity_content);
let mut ids = vec![writeback::PendingId {
line: fm_end.unwrap_or(2),
id: pending_id.id.clone(),
kind: writeback::WriteBackKind::EntityFrontMatter,
}];
if let Some(modified) = writeback::apply_writebacks(&entity_content, &mut ids) {
if let Err(e) = writeback::write_file(entity_path, &modified) {
eprintln!("{e}");
return Some(2);
}
eprintln!("{}: wrote generated ID back to file", entity_path.display());
}
}
None
}
fn write_case_output(
path: &str,
case_id: &str,
case_output: &output::CaseOutput,
output_dir: Option<&str>,
) -> i32 {
match output_dir {
Some(dir) => {
let out_path = format!("{dir}/{case_id}.json");
match serde_json::to_string_pretty(case_output) {
Ok(json) => {
if let Err(e) = std::fs::write(&out_path, json) {
eprintln!("{out_path}: error writing file: {e}");
return 2;
}
eprintln!("{path} -> {out_path}");
}
Err(e) => {
eprintln!("{path}: JSON serialization error: {e}");
return 2;
}
}
}
None => match serde_json::to_string_pretty(case_output) {
Ok(json) => println!("{json}"),
Err(e) => {
eprintln!("{path}: JSON serialization error: {e}");
return 2;
}
},
}
0
}
fn resolve_content_root(path: Option<&str>, root: Option<&str>) -> std::path::PathBuf {
if let Some(r) = root {
return std::path::PathBuf::from(r);
}
if let Some(p) = path {
let p = std::path::Path::new(p);
if p.is_file() {
if let Some(parent) = p.parent() {
for ancestor in parent.ancestors() {
if ancestor.join("cases").is_dir()
|| ancestor.join("actors").is_dir()
|| ancestor.join("institutions").is_dir()
{
return ancestor.to_path_buf();
}
}
return parent.to_path_buf();
}
} else if p.is_dir() {
return p.to_path_buf();
}
}
std::path::PathBuf::from(".")
}
fn load_registry(content_root: &std::path::Path) -> Result<registry::EntityRegistry, i32> {
match registry::EntityRegistry::load(content_root) {
Ok(reg) => Ok(reg),
Err(errors) => {
for err in &errors {
eprintln!("registry: {err}");
}
Err(1)
}
}
}
fn resolve_case_files(
path: Option<&str>,
content_root: &std::path::Path,
) -> Result<Vec<String>, i32> {
if let Some(p) = path {
let p_path = std::path::Path::new(p);
if p_path.is_file() {
return Ok(vec![p.to_string()]);
}
if !p_path.is_dir() {
eprintln!("{p}: not a file or directory");
return Err(2);
}
}
let cases_dir = content_root.join("cases");
if !cases_dir.is_dir() {
return Ok(Vec::new());
}
let mut files = Vec::new();
discover_md_files(&cases_dir, &mut files, 0);
files.sort();
Ok(files)
}
fn discover_md_files(dir: &std::path::Path, files: &mut Vec<String>, depth: usize) {
const MAX_DEPTH: usize = 3;
if depth > MAX_DEPTH {
return;
}
let Ok(entries) = std::fs::read_dir(dir) else {
return;
};
let mut entries: Vec<_> = entries.filter_map(Result::ok).collect();
entries.sort_by_key(std::fs::DirEntry::file_name);
for entry in entries {
let path = entry.path();
if path.is_dir() {
discover_md_files(&path, files, depth + 1);
} else if path.extension().and_then(|e| e.to_str()) == Some("md") {
if let Some(s) = path.to_str() {
files.push(s.to_string());
}
}
}
}
fn collect_referenced_registry_entities(
rels: &[Rel],
inline_entities: &[Entity],
reg: ®istry::EntityRegistry,
) -> Vec<Entity> {
let inline_names: Vec<&str> = inline_entities.iter().map(|e| e.name.as_str()).collect();
let mut referenced = Vec::new();
let mut seen_names: Vec<String> = Vec::new();
for rel in rels {
for name in [&rel.source_name, &rel.target_name] {
if !inline_names.contains(&name.as_str()) && !seen_names.contains(name) {
if let Some(entry) = reg.get_by_name(name) {
referenced.push(entry.entity.clone());
seen_names.push(name.clone());
}
}
}
}
referenced
}
fn parse_full(
content: &str,
reg: Option<®istry::EntityRegistry>,
) -> Result<(ParsedCase, Vec<Entity>, Vec<Rel>), Vec<ParseError>> {
let case = parser::parse(content)?;
let mut errors = Vec::new();
let mut all_entities = Vec::new();
for section in &case.sections {
if section.kind == SectionKind::Events {
let entities =
entity::parse_entities(§ion.body, section.kind, section.line, &mut errors);
all_entities.extend(entities);
}
}
let mut entity_names: Vec<&str> = all_entities.iter().map(|e| e.name.as_str()).collect();
if let Some(registry) = reg {
for name in registry.names() {
if !entity_names.contains(&name) {
entity_names.push(name);
}
}
}
let event_names: Vec<&str> = all_entities
.iter()
.filter(|e| e.label == entity::Label::PublicRecord)
.map(|e| e.name.as_str())
.collect();
let mut all_rels = Vec::new();
for section in &case.sections {
if section.kind == SectionKind::Relationships {
let rels = relationship::parse_relationships(
§ion.body,
section.line,
&entity_names,
&case.sources,
&mut errors,
);
all_rels.extend(rels);
}
}
for section in &case.sections {
if section.kind == SectionKind::Timeline {
let rels =
timeline::parse_timeline(§ion.body, section.line, &event_names, &mut errors);
all_rels.extend(rels);
}
}
if errors.is_empty() {
Ok((case, all_entities, all_rels))
} else {
Err(errors)
}
}
#[cfg(test)]
mod tests {
use super::*;
const FULL_CASE: &str = r"---
id: bonnick-v-arsenal
sources:
- https://www.theguardian.com/football/2025/feb/03/bonnick
- https://novaramedia.com/2025/02/04/bonnick
---
# Bonnick v Arsenal FC
Kit manager dismissed over social media posts about Israel-Gaza.
## Events
### Bonnick dismissal
- occurred_at: 2024-12-24
- document_type: termination
- description: Arsenal dismisses Bonnick over social media posts
regarding Israel-Gaza conflict.
### FA investigation finding
- occurred_at: 2024
- document_type: investigation
- description: FA investigates and finds the posts did not breach
FA rules. Matter closed by FA.
### Employment tribunal filing
- occurred_at: 2025-02-03
- document_type: filing
- description: Bonnick files employment tribunal claim against Arsenal.
## Relationships
- Bonnick dismissal -> FA investigation finding: related_to
- FA investigation finding -> Employment tribunal filing: related_to
- Bonnick dismissal -> Employment tribunal filing: related_to
- source: https://novaramedia.com/2025/02/04/bonnick
## Timeline
Bonnick dismissal -> FA investigation finding -> Employment tribunal filing
";
#[test]
fn parse_full_case_file() {
let (case, entities, rels) = parse_full(FULL_CASE, None).unwrap();
assert_eq!(case.id, "bonnick-v-arsenal");
assert_eq!(case.title, "Bonnick v Arsenal FC");
assert!(case.summary.contains("Kit manager dismissed"));
assert_eq!(case.sources.len(), 2);
assert_eq!(entities.len(), 3);
assert!(
entities
.iter()
.all(|e| e.label == entity::Label::PublicRecord)
);
let dismissal = entities
.iter()
.find(|e| e.name == "Bonnick dismissal")
.unwrap();
assert_eq!(dismissal.label, entity::Label::PublicRecord);
assert_eq!(rels.len(), 5);
let next_rels: Vec<_> = rels.iter().filter(|r| r.rel_type == "next").collect();
assert_eq!(next_rels.len(), 2);
assert_eq!(next_rels[0].source_name, "Bonnick dismissal");
assert_eq!(next_rels[0].target_name, "FA investigation finding");
assert_eq!(next_rels[1].source_name, "FA investigation finding");
assert_eq!(next_rels[1].target_name, "Employment tribunal filing");
}
#[test]
fn parse_full_minimal_case() {
let input = r"---
id: minimal-test
sources:
- https://example.com/source
---
# Minimal Test Case
A simple test.
## Events
### Something happened
- occurred_at: 2025-01-01
- document_type: court_ruling
";
let (case, entities, rels) = parse_full(input, None).unwrap();
assert_eq!(case.id, "minimal-test");
assert_eq!(case.title, "Minimal Test Case");
assert_eq!(entities.len(), 1);
assert_eq!(entities[0].name, "Something happened");
assert!(rels.is_empty());
}
#[test]
fn json_snapshot_full_case() {
let (case, entities, rels) = parse_full(FULL_CASE, None).unwrap();
let build_result = output::build_output(
&case.id,
&case.title,
&case.summary,
&case.sources,
&entities,
&rels,
&[],
)
.unwrap();
let json = serde_json::to_string_pretty(&build_result.output).unwrap();
assert!(json.contains("\"case_id\": \"bonnick-v-arsenal\""));
assert!(json.contains("\"title\": \"Bonnick v Arsenal FC\""));
assert!(json.contains("\"label\": \"public_record\""));
assert!(json.contains("\"name\": \"Bonnick dismissal\""));
assert!(json.contains("\"name\": \"FA investigation finding\""));
assert!(json.contains("\"document_type\": \"termination\""));
assert!(json.contains("\"document_type\": \"investigation\""));
assert!(json.contains("\"type\": \"related_to\""));
assert!(json.contains("\"type\": \"next\""));
let output: serde_json::Value = serde_json::from_str(&json).unwrap();
let nodes = output["nodes"].as_array().unwrap();
let rels_arr = output["relationships"].as_array().unwrap();
for node in nodes {
let id = node["id"].as_str().unwrap();
assert!(!id.is_empty());
assert!(id.len() >= 20);
}
for rel in rels_arr {
let id = rel["id"].as_str().unwrap();
assert!(!id.is_empty());
}
let node_ids: Vec<&str> = nodes.iter().map(|n| n["id"].as_str().unwrap()).collect();
for rel in rels_arr {
let source_id = rel["source_id"].as_str().unwrap();
let target_id = rel["target_id"].as_str().unwrap();
assert!(
node_ids.contains(&source_id),
"source_id {source_id} not found in nodes"
);
assert!(
node_ids.contains(&target_id),
"target_id {target_id} not found in nodes"
);
}
}
#[test]
fn json_snapshot_omits_empty_fields() {
let input = r"---
id: sparse
sources:
- https://example.com/src
---
# Sparse Case
Summary.
## Events
### Something
- occurred_at: 2025-01-01
";
let (case, entities, rels) = parse_full(input, None).unwrap();
let build_result = output::build_output(
&case.id,
&case.title,
&case.summary,
&case.sources,
&entities,
&rels,
&[],
)
.unwrap();
let json = serde_json::to_string_pretty(&build_result.output).unwrap();
assert!(!json.contains("\"qualifier\""));
assert!(!json.contains("\"description\""));
assert!(!json.contains("\"thumbnail\""));
assert!(!json.contains("\"aliases\""));
assert!(!json.contains("\"urls\""));
assert!(json.contains("\"occurred_at\": \"2025-01-01\""));
}
#[test]
fn cross_file_resolution_with_registry() {
use std::path::PathBuf;
let entries = vec![registry::RegistryEntry {
entity: Entity {
name: "Mark Bonnick".to_string(),
label: entity::Label::Actor,
fields: vec![(
"nationality".to_string(),
entity::FieldValue::Single("British".to_string()),
)],
id: Some("01JXYZ123456789ABCDEFGHIJK".to_string()),
line: 1,
},
path: PathBuf::from("actors/mark-bonnick.md"),
}];
let reg = registry::EntityRegistry::from_entries(entries).unwrap();
let input = r"---
id: test-cross-ref
sources:
- https://example.com/src
---
# Cross Reference Test
Summary.
## Events
### Dismissal
- occurred_at: 2024-12-24
- document_type: termination
## Relationships
- Mark Bonnick -> Dismissal: related_to
";
let err = parse_full(input, None).unwrap_err();
assert!(err.iter().any(|e| e.message.contains("Mark Bonnick")));
let (case, entities, rels) = parse_full(input, Some(®)).unwrap();
assert_eq!(case.id, "test-cross-ref");
assert_eq!(entities.len(), 1); assert_eq!(rels.len(), 1);
assert_eq!(rels[0].source_name, "Mark Bonnick");
assert_eq!(rels[0].target_name, "Dismissal");
}
}