1use crate::llm::ContentSource;
2use crate::reminders::{append_reminder, builtin};
3use crate::{Environment, PrimitiveToolName, Tool, ToolContext, ToolResult, ToolTier};
4use anyhow::{Context, Result};
5use base64::Engine;
6use serde::Deserialize;
7use serde_json::{Value, json};
8use std::path::Path;
9use std::sync::Arc;
10
11use super::PrimitiveToolContext;
12
13const MAX_TOKENS: usize = 25_000;
15const CHARS_PER_TOKEN: usize = 4;
16
17pub struct ReadTool<E: Environment> {
19 ctx: PrimitiveToolContext<E>,
20}
21
22impl<E: Environment> ReadTool<E> {
23 #[must_use]
24 pub const fn new(environment: Arc<E>, capabilities: crate::AgentCapabilities) -> Self {
25 Self {
26 ctx: PrimitiveToolContext::new(environment, capabilities),
27 }
28 }
29}
30
31#[derive(Debug, Deserialize)]
32struct ReadInput {
33 #[serde(alias = "file_path")]
35 path: String,
36 #[serde(default)]
38 offset: Option<usize>,
39 #[serde(default)]
41 limit: Option<usize>,
42}
43
44enum ReadContent {
45 Text(String),
46 NativeBinary { mime_type: &'static str },
47 UnsupportedBinary,
48}
49
50impl<E: Environment + 'static> Tool<()> for ReadTool<E> {
51 type Name = PrimitiveToolName;
52
53 fn name(&self) -> PrimitiveToolName {
54 PrimitiveToolName::Read
55 }
56
57 fn display_name(&self) -> &'static str {
58 "Read File"
59 }
60
61 fn description(&self) -> &'static str {
62 "Read text files directly, and attach supported images/PDFs for native model inspection. Can optionally specify offset and limit for text files."
63 }
64
65 fn tier(&self) -> ToolTier {
66 ToolTier::Observe
67 }
68
69 fn input_schema(&self) -> Value {
70 json!({
71 "type": "object",
72 "properties": {
73 "path": {
74 "type": "string",
75 "description": "Path to the file to read"
76 },
77 "offset": {
78 "type": "integer",
79 "description": "Line number to start from (1-based). Optional. Only applies to text files."
80 },
81 "limit": {
82 "type": "integer",
83 "description": "Number of lines to read. Optional. Only applies to text files."
84 }
85 },
86 "required": ["path"]
87 })
88 }
89
90 async fn execute(&self, _ctx: &ToolContext<()>, input: Value) -> Result<ToolResult> {
91 let input: ReadInput =
92 serde_json::from_value(input).context("Invalid input for read tool")?;
93
94 let path = self.ctx.environment.resolve_path(&input.path);
95
96 if !self.ctx.capabilities.can_read(&path) {
97 return Ok(ToolResult::error(format!(
98 "Permission denied: cannot read '{path}'"
99 )));
100 }
101
102 let exists = self
103 .ctx
104 .environment
105 .exists(&path)
106 .await
107 .context("Failed to check file existence")?;
108
109 if !exists {
110 return Ok(ToolResult::error(format!("File not found: '{path}'")));
111 }
112
113 let is_dir = self
114 .ctx
115 .environment
116 .is_dir(&path)
117 .await
118 .context("Failed to check if path is directory")?;
119
120 if is_dir {
121 return Ok(ToolResult::error(format!(
122 "'{path}' is a directory, not a file"
123 )));
124 }
125
126 let bytes = self
127 .ctx
128 .environment
129 .read_file_bytes(&path)
130 .await
131 .context("Failed to read file")?;
132
133 let mut result = match classify_content(&path, &bytes) {
134 ReadContent::Text(content) => {
135 read_text_content(&path, &content, input.offset, input.limit)
136 }
137 ReadContent::NativeBinary { mime_type } => {
138 if input.offset.is_some() || input.limit.is_some() {
139 ToolResult::error(format!(
140 "offset and limit are only supported for text files. '{path}' is a {mime_type} file."
141 ))
142 } else {
143 ToolResult::success(format!(
144 "Attached '{path}' ({mime_type}, {} bytes) for native model inspection.",
145 bytes.len()
146 ))
147 .with_documents(vec![ContentSource::new(
148 mime_type,
149 base64::engine::general_purpose::STANDARD.encode(&bytes),
150 )])
151 }
152 }
153 ReadContent::UnsupportedBinary => ToolResult::error(format!(
154 "'{path}' is a binary file in an unsupported format. The read tool currently supports text files, images (PNG/JPEG/GIF/WebP), and PDF documents."
155 )),
156 };
157
158 if result.success && result.output == "(empty file)" {
159 append_reminder(&mut result, builtin::READ_EMPTY_FILE_REMINDER);
160 }
161
162 if result.success {
163 append_reminder(&mut result, builtin::READ_SECURITY_REMINDER);
164 }
165
166 Ok(result)
167 }
168}
169
170fn read_text_content(
171 path: &str,
172 content: &str,
173 offset: Option<usize>,
174 limit: Option<usize>,
175) -> ToolResult {
176 let lines: Vec<&str> = content.lines().collect();
177 let total_lines = lines.len();
178 let offset = offset.unwrap_or(1).saturating_sub(1);
179 let selected_lines: Vec<&str> = lines.iter().copied().skip(offset).collect();
180
181 let limit = if let Some(user_limit) = limit {
182 user_limit
183 } else {
184 let selected_content_len: usize = selected_lines.iter().map(|line| line.len() + 1).sum();
185 let estimated_tokens = selected_content_len / CHARS_PER_TOKEN;
186
187 if estimated_tokens > MAX_TOKENS {
188 let suggested_limit = estimate_lines_for_tokens(&selected_lines, MAX_TOKENS);
189 return ToolResult::success(format!(
190 "File too large to read at once (~{estimated_tokens} tokens, max {MAX_TOKENS}).\n\
191 Total lines: {total_lines}\n\n\
192 Use 'offset' and 'limit' parameters to read specific portions.\n\
193 Suggested: Start with offset=1, limit={suggested_limit} to read the first ~{MAX_TOKENS} tokens.\n\n\
194 Example: {{\"path\": \"{path}\", \"offset\": 1, \"limit\": {suggested_limit}}}"
195 ));
196 }
197
198 selected_lines.len()
199 };
200
201 let selected_lines: Vec<String> = lines
202 .into_iter()
203 .skip(offset)
204 .take(limit)
205 .enumerate()
206 .map(|(i, line)| format!("{:>6}\t{}", offset + i + 1, line))
207 .collect();
208
209 let is_empty = selected_lines.is_empty();
210 let output = if is_empty {
211 "(empty file)".to_string()
212 } else {
213 let header = if offset > 0 || limit < total_lines {
214 format!(
215 "Showing lines {}-{} of {} total\n",
216 offset + 1,
217 (offset + selected_lines.len()).min(total_lines),
218 total_lines
219 )
220 } else {
221 String::new()
222 };
223 format!("{header}{}", selected_lines.join("\n"))
224 };
225
226 ToolResult::success(output)
227}
228
229fn classify_content(path: &str, bytes: &[u8]) -> ReadContent {
230 if let Some(mime_type) = detect_native_binary_mime(path, bytes) {
231 return ReadContent::NativeBinary { mime_type };
232 }
233
234 if let Ok(content) = std::str::from_utf8(bytes) {
235 return ReadContent::Text(content.to_string());
236 }
237
238 ReadContent::UnsupportedBinary
239}
240
241fn detect_native_binary_mime(path: &str, bytes: &[u8]) -> Option<&'static str> {
242 if bytes.starts_with(b"%PDF-") {
243 return Some("application/pdf");
244 }
245
246 if bytes.starts_with(&[0x89, b'P', b'N', b'G', b'\r', b'\n', 0x1a, b'\n']) {
247 return Some("image/png");
248 }
249
250 if bytes.starts_with(&[0xff, 0xd8, 0xff]) {
251 return Some("image/jpeg");
252 }
253
254 if bytes.starts_with(b"GIF87a") || bytes.starts_with(b"GIF89a") {
255 return Some("image/gif");
256 }
257
258 if bytes.len() >= 12 && &bytes[0..4] == b"RIFF" && &bytes[8..12] == b"WEBP" {
259 return Some("image/webp");
260 }
261
262 let extension = Path::new(path)
263 .extension()
264 .and_then(|ext| ext.to_str())
265 .map(str::to_ascii_lowercase);
266
267 match extension.as_deref() {
268 Some("pdf") => Some("application/pdf"),
269 Some("png") => Some("image/png"),
270 Some("jpg" | "jpeg") => Some("image/jpeg"),
271 Some("gif") => Some("image/gif"),
272 Some("webp") => Some("image/webp"),
273 _ => None,
274 }
275}
276
277fn estimate_lines_for_tokens(lines: &[&str], max_tokens: usize) -> usize {
279 let max_chars = max_tokens * CHARS_PER_TOKEN;
280 let mut total_chars = 0;
281 let mut line_count = 0;
282
283 for line in lines {
284 let line_chars = line.len() + 1;
285 if total_chars + line_chars > max_chars {
286 break;
287 }
288 total_chars += line_chars;
289 line_count += 1;
290 }
291
292 line_count.max(1)
293}
294
295#[cfg(test)]
296mod tests {
297 use super::*;
298 use crate::{AgentCapabilities, InMemoryFileSystem};
299
300 fn create_test_tool(
301 fs: Arc<InMemoryFileSystem>,
302 capabilities: AgentCapabilities,
303 ) -> ReadTool<InMemoryFileSystem> {
304 ReadTool::new(fs, capabilities)
305 }
306
307 fn tool_ctx() -> ToolContext<()> {
308 ToolContext::new(())
309 }
310
311 #[tokio::test]
312 async fn test_read_entire_file() -> anyhow::Result<()> {
313 let fs = Arc::new(InMemoryFileSystem::new("/workspace"));
314 fs.write_file("test.txt", "line 1\nline 2\nline 3").await?;
315
316 let tool = create_test_tool(fs, AgentCapabilities::full_access());
317 let result = tool
318 .execute(&tool_ctx(), json!({"path": "/workspace/test.txt"}))
319 .await?;
320
321 assert!(result.success);
322 assert!(result.output.contains("line 1"));
323 assert!(result.output.contains("line 2"));
324 assert!(result.output.contains("line 3"));
325 assert!(result.documents.is_empty());
326 Ok(())
327 }
328
329 #[tokio::test]
330 async fn test_read_with_offset() -> anyhow::Result<()> {
331 let fs = Arc::new(InMemoryFileSystem::new("/workspace"));
332 fs.write_file("test.txt", "line 1\nline 2\nline 3\nline 4\nline 5")
333 .await?;
334
335 let tool = create_test_tool(fs, AgentCapabilities::full_access());
336 let result = tool
337 .execute(
338 &tool_ctx(),
339 json!({"path": "/workspace/test.txt", "offset": 3}),
340 )
341 .await?;
342
343 assert!(result.success);
344 assert!(result.output.contains("Showing lines 3-5 of 5 total"));
345 assert!(result.output.contains("line 3"));
346 assert!(result.output.contains("line 4"));
347 assert!(result.output.contains("line 5"));
348 assert!(!result.output.contains("\tline 1"));
349 assert!(!result.output.contains("\tline 2"));
350 Ok(())
351 }
352
353 #[tokio::test]
354 async fn test_read_with_limit() -> anyhow::Result<()> {
355 let fs = Arc::new(InMemoryFileSystem::new("/workspace"));
356 fs.write_file("test.txt", "line 1\nline 2\nline 3\nline 4\nline 5")
357 .await?;
358
359 let tool = create_test_tool(fs, AgentCapabilities::full_access());
360 let result = tool
361 .execute(
362 &tool_ctx(),
363 json!({"path": "/workspace/test.txt", "limit": 2}),
364 )
365 .await?;
366
367 assert!(result.success);
368 assert!(result.output.contains("Showing lines 1-2 of 5 total"));
369 assert!(result.output.contains("line 1"));
370 assert!(result.output.contains("line 2"));
371 assert!(!result.output.contains("\tline 3"));
372 Ok(())
373 }
374
375 #[tokio::test]
376 async fn test_read_with_offset_and_limit() -> anyhow::Result<()> {
377 let fs = Arc::new(InMemoryFileSystem::new("/workspace"));
378 fs.write_file("test.txt", "line 1\nline 2\nline 3\nline 4\nline 5")
379 .await?;
380
381 let tool = create_test_tool(fs, AgentCapabilities::full_access());
382 let result = tool
383 .execute(
384 &tool_ctx(),
385 json!({"path": "/workspace/test.txt", "offset": 2, "limit": 2}),
386 )
387 .await?;
388
389 assert!(result.success);
390 assert!(result.output.contains("Showing lines 2-3 of 5 total"));
391 assert!(result.output.contains("line 2"));
392 assert!(result.output.contains("line 3"));
393 assert!(!result.output.contains("\tline 1"));
394 assert!(!result.output.contains("\tline 4"));
395 Ok(())
396 }
397
398 #[tokio::test]
399 async fn test_read_nonexistent_file() -> anyhow::Result<()> {
400 let fs = Arc::new(InMemoryFileSystem::new("/workspace"));
401
402 let tool = create_test_tool(fs, AgentCapabilities::full_access());
403 let result = tool
404 .execute(&tool_ctx(), json!({"path": "/workspace/nonexistent.txt"}))
405 .await?;
406
407 assert!(!result.success);
408 assert!(result.output.contains("File not found"));
409 Ok(())
410 }
411
412 #[tokio::test]
413 async fn test_read_directory_returns_error() -> anyhow::Result<()> {
414 let fs = Arc::new(InMemoryFileSystem::new("/workspace"));
415 fs.create_dir("/workspace/subdir").await?;
416
417 let tool = create_test_tool(fs, AgentCapabilities::full_access());
418 let result = tool
419 .execute(&tool_ctx(), json!({"path": "/workspace/subdir"}))
420 .await?;
421
422 assert!(!result.success);
423 assert!(result.output.contains("is a directory"));
424 Ok(())
425 }
426
427 #[tokio::test]
428 async fn test_read_permission_denied() -> anyhow::Result<()> {
429 let fs = Arc::new(InMemoryFileSystem::new("/workspace"));
430 fs.write_file("secret.txt", "secret content").await?;
431
432 let tool = create_test_tool(fs, AgentCapabilities::none());
433 let result = tool
434 .execute(&tool_ctx(), json!({"path": "/workspace/secret.txt"}))
435 .await?;
436
437 assert!(!result.success);
438 assert!(result.output.contains("Permission denied"));
439 Ok(())
440 }
441
442 #[tokio::test]
443 async fn test_read_denied_path_via_capabilities() -> anyhow::Result<()> {
444 let fs = Arc::new(InMemoryFileSystem::new("/workspace"));
445 fs.write_file("secrets/api_key.txt", "API_KEY=secret")
446 .await?;
447
448 let caps =
449 AgentCapabilities::read_only().with_denied_paths(vec!["/workspace/secrets/**".into()]);
450
451 let tool = create_test_tool(fs, caps);
452 let result = tool
453 .execute(
454 &tool_ctx(),
455 json!({"path": "/workspace/secrets/api_key.txt"}),
456 )
457 .await?;
458
459 assert!(!result.success);
460 assert!(result.output.contains("Permission denied"));
461 Ok(())
462 }
463
464 #[tokio::test]
465 async fn test_read_allowed_path_restriction() -> anyhow::Result<()> {
466 let fs = Arc::new(InMemoryFileSystem::new("/workspace"));
467 fs.write_file("src/main.rs", "fn main() {} ").await?;
468 fs.write_file("config/settings.toml", "key = value").await?;
469
470 let caps = AgentCapabilities::read_only()
471 .with_denied_paths(vec![])
472 .with_allowed_paths(vec!["/workspace/src/**".into()]);
473
474 let tool = create_test_tool(Arc::clone(&fs), caps.clone());
475
476 let result = tool
477 .execute(&tool_ctx(), json!({"path": "/workspace/src/main.rs"}))
478 .await?;
479 assert!(result.success);
480
481 let tool = create_test_tool(fs, caps);
482 let result = tool
483 .execute(
484 &tool_ctx(),
485 json!({"path": "/workspace/config/settings.toml"}),
486 )
487 .await?;
488 assert!(!result.success);
489 assert!(result.output.contains("Permission denied"));
490 Ok(())
491 }
492
493 #[tokio::test]
494 async fn test_read_empty_file() -> anyhow::Result<()> {
495 let fs = Arc::new(InMemoryFileSystem::new("/workspace"));
496 fs.write_file("empty.txt", "").await?;
497
498 let tool = create_test_tool(fs, AgentCapabilities::full_access());
499 let result = tool
500 .execute(&tool_ctx(), json!({"path": "/workspace/empty.txt"}))
501 .await?;
502
503 assert!(result.success);
504 assert!(result.output.contains("(empty file)"));
505 Ok(())
506 }
507
508 #[tokio::test]
509 async fn test_read_large_file_with_pagination() -> anyhow::Result<()> {
510 let fs = Arc::new(InMemoryFileSystem::new("/workspace"));
511 let content: String = (1..=100)
512 .map(|i| format!("line {i}"))
513 .collect::<Vec<_>>()
514 .join("\n");
515 fs.write_file("large.txt", &content).await?;
516
517 let tool = create_test_tool(fs, AgentCapabilities::full_access());
518 let result = tool
519 .execute(
520 &tool_ctx(),
521 json!({"path": "/workspace/large.txt", "offset": 50, "limit": 10}),
522 )
523 .await?;
524
525 assert!(result.success);
526 assert!(result.output.contains("Showing lines 50-59 of 100 total"));
527 assert!(result.output.contains("line 50"));
528 assert!(result.output.contains("line 59"));
529 assert!(!result.output.contains("\tline 49"));
530 assert!(!result.output.contains("\tline 60"));
531 Ok(())
532 }
533
534 #[tokio::test]
535 async fn test_read_offset_beyond_file_length() -> anyhow::Result<()> {
536 let fs = Arc::new(InMemoryFileSystem::new("/workspace"));
537 fs.write_file("short.txt", "line 1\nline 2").await?;
538
539 let tool = create_test_tool(fs, AgentCapabilities::full_access());
540 let result = tool
541 .execute(
542 &tool_ctx(),
543 json!({"path": "/workspace/short.txt", "offset": 100}),
544 )
545 .await?;
546
547 assert!(result.success);
548 assert!(result.output.contains("(empty file)"));
549 Ok(())
550 }
551
552 #[tokio::test]
553 async fn test_read_file_with_special_characters() -> anyhow::Result<()> {
554 let fs = Arc::new(InMemoryFileSystem::new("/workspace"));
555 let content = "特殊字符\néàü\n🎉emoji\ntab\there";
556 fs.write_file("special.txt", content).await?;
557
558 let tool = create_test_tool(fs, AgentCapabilities::full_access());
559 let result = tool
560 .execute(&tool_ctx(), json!({"path": "/workspace/special.txt"}))
561 .await?;
562
563 assert!(result.success);
564 assert!(result.output.contains("特殊字符"));
565 assert!(result.output.contains("éàü"));
566 assert!(result.output.contains("🎉emoji"));
567 Ok(())
568 }
569
570 #[tokio::test]
571 async fn test_read_image_file_attaches_native_content() -> anyhow::Result<()> {
572 let fs = Arc::new(InMemoryFileSystem::new("/workspace"));
573 let png = vec![
574 0x89, b'P', b'N', b'G', b'\r', b'\n', 0x1a, b'\n', 1, 2, 3, 4,
575 ];
576 fs.write_file_bytes("image.png", &png).await?;
577
578 let tool = create_test_tool(fs, AgentCapabilities::full_access());
579 let result = tool
580 .execute(&tool_ctx(), json!({"path": "/workspace/image.png"}))
581 .await?;
582
583 assert!(result.success);
584 assert!(result.output.contains("Attached '/workspace/image.png'"));
585 assert_eq!(result.documents.len(), 1);
586 assert_eq!(result.documents[0].media_type, "image/png");
587 Ok(())
588 }
589
590 #[tokio::test]
591 async fn test_read_pdf_file_attaches_native_content() -> anyhow::Result<()> {
592 let fs = Arc::new(InMemoryFileSystem::new("/workspace"));
593 fs.write_file_bytes("doc.pdf", b"%PDF-1.7\nbody").await?;
594
595 let tool = create_test_tool(fs, AgentCapabilities::full_access());
596 let result = tool
597 .execute(&tool_ctx(), json!({"path": "/workspace/doc.pdf"}))
598 .await?;
599
600 assert!(result.success);
601 assert_eq!(result.documents.len(), 1);
602 assert_eq!(result.documents[0].media_type, "application/pdf");
603 Ok(())
604 }
605
606 #[tokio::test]
607 async fn test_read_binary_with_offset_returns_error() -> anyhow::Result<()> {
608 let fs = Arc::new(InMemoryFileSystem::new("/workspace"));
609 fs.write_file_bytes("doc.pdf", b"%PDF-1.7\nbody").await?;
610
611 let tool = create_test_tool(fs, AgentCapabilities::full_access());
612 let result = tool
613 .execute(
614 &tool_ctx(),
615 json!({"path": "/workspace/doc.pdf", "offset": 1}),
616 )
617 .await?;
618
619 assert!(!result.success);
620 assert!(result.output.contains("only supported for text files"));
621 Ok(())
622 }
623
624 #[tokio::test]
625 async fn test_read_unsupported_binary_returns_error() -> anyhow::Result<()> {
626 let fs = Arc::new(InMemoryFileSystem::new("/workspace"));
627 fs.write_file_bytes("archive.bin", &[0, 159, 146, 150])
628 .await?;
629
630 let tool = create_test_tool(fs, AgentCapabilities::full_access());
631 let result = tool
632 .execute(&tool_ctx(), json!({"path": "/workspace/archive.bin"}))
633 .await?;
634
635 assert!(!result.success);
636 assert!(result.output.contains("unsupported format"));
637 Ok(())
638 }
639
640 #[tokio::test]
641 async fn test_read_tool_metadata() {
642 let fs = Arc::new(InMemoryFileSystem::new("/workspace"));
643 let tool = create_test_tool(fs, AgentCapabilities::full_access());
644
645 assert_eq!(tool.name(), PrimitiveToolName::Read);
646 assert_eq!(tool.tier(), ToolTier::Observe);
647 assert!(tool.description().contains("Read"));
648
649 let schema = tool.input_schema();
650 assert!(schema.get("properties").is_some());
651 assert!(schema["properties"].get("path").is_some());
652 }
653
654 #[tokio::test]
655 async fn test_read_invalid_input() -> anyhow::Result<()> {
656 let fs = Arc::new(InMemoryFileSystem::new("/workspace"));
657 let tool = create_test_tool(fs, AgentCapabilities::full_access());
658
659 let result = tool.execute(&tool_ctx(), json!({})).await;
660
661 assert!(result.is_err());
662 Ok(())
663 }
664
665 #[tokio::test]
666 async fn test_read_large_file_exceeds_token_limit() -> anyhow::Result<()> {
667 let fs = Arc::new(InMemoryFileSystem::new("/workspace"));
668 let line = "x".repeat(100);
669 let content: String = (1..=1500)
670 .map(|i| format!("{i}: {line}"))
671 .collect::<Vec<_>>()
672 .join("\n");
673 fs.write_file("huge.txt", &content).await?;
674
675 let tool = create_test_tool(fs, AgentCapabilities::full_access());
676 let result = tool
677 .execute(&tool_ctx(), json!({"path": "/workspace/huge.txt"}))
678 .await?;
679
680 assert!(result.success);
681 assert!(result.output.contains("File too large to read at once"));
682 assert!(result.output.contains("Total lines: 1500"));
683 assert!(result.output.contains("offset"));
684 assert!(result.output.contains("limit"));
685 Ok(())
686 }
687
688 #[tokio::test]
689 async fn test_read_large_file_with_explicit_limit_bypasses_check() -> anyhow::Result<()> {
690 let fs = Arc::new(InMemoryFileSystem::new("/workspace"));
691 let line = "x".repeat(100);
692 let content: String = (1..=1500)
693 .map(|i| format!("{i}: {line}"))
694 .collect::<Vec<_>>()
695 .join("\n");
696 fs.write_file("huge.txt", &content).await?;
697
698 let tool = create_test_tool(fs, AgentCapabilities::full_access());
699 let result = tool
700 .execute(
701 &tool_ctx(),
702 json!({"path": "/workspace/huge.txt", "offset": 1, "limit": 10}),
703 )
704 .await?;
705
706 assert!(result.success);
707 assert!(result.output.contains("Showing lines 1-10 of 1500 total"));
708 assert!(!result.output.contains("File too large"));
709 Ok(())
710 }
711
712 #[test]
713 fn test_estimate_lines_for_tokens() {
714 let long = "x".repeat(100);
715 let lines: Vec<&str> = vec!["short line", "another short line", &long];
716
717 let count = estimate_lines_for_tokens(&lines, 10);
718 assert_eq!(count, 2);
719
720 let count = estimate_lines_for_tokens(&lines, 1);
721 assert_eq!(count, 1);
722 }
723}