use crate::common::Drip;
use serde_json::{json, Value};
use std::fs;
use std::io::Write;
use std::process::{Command, Stdio};
fn run_claude_hook(drip: &Drip, payload: Value) -> String {
let mut child = Command::new(&drip.bin)
.args(["hook", "claude"])
.env("DRIP_DATA_DIR", drip.data_dir.path())
.env("DRIP_SESSION_ID", &drip.session_id)
.stdin(Stdio::piped())
.stdout(Stdio::piped())
.stderr(Stdio::piped())
.spawn()
.unwrap();
child
.stdin
.as_mut()
.unwrap()
.write_all(payload.to_string().as_bytes())
.unwrap();
let o = child.wait_with_output().unwrap();
assert!(o.status.success());
String::from_utf8_lossy(&o.stdout).into_owned()
}
#[test]
fn read_with_offset_passes_through() {
let drip = Drip::new();
let dir = tempfile::tempdir().unwrap();
let f = dir.path().join("big.txt");
fs::write(&f, (0..200).map(|i| format!("l{i}\n")).collect::<String>()).unwrap();
let out = run_claude_hook(
&drip,
json!({
"tool_name": "Read",
"tool_input": {
"file_path": f.to_string_lossy(),
"offset": 100,
"limit": 50
}
}),
);
let v: Value = serde_json::from_str(out.trim()).unwrap();
assert_eq!(
v["hookSpecificOutput"]["permissionDecision"],
json!("allow"),
"partial read on unknown file (no baseline) must pass native: {out}"
);
}
#[test]
fn read_with_only_limit_also_passes_through() {
let drip = Drip::new();
let dir = tempfile::tempdir().unwrap();
let f = dir.path().join("g.txt");
fs::write(&f, "x\n".repeat(50)).unwrap();
let out = run_claude_hook(
&drip,
json!({
"tool_name": "Read",
"tool_input": {
"file_path": f.to_string_lossy(),
"limit": 10
}
}),
);
let v: Value = serde_json::from_str(out.trim()).unwrap();
assert_eq!(
v["hookSpecificOutput"]["permissionDecision"],
json!("allow"),
"limit-only read on unknown file must pass native: {out}"
);
}
#[test]
fn full_read_after_partial_read_delivers_full_then_collapses() {
let drip = Drip::new();
let dir = tempfile::tempdir().unwrap();
let f = dir.path().join("seq.txt");
fs::write(&f, (1..=50).map(|i| format!("l{i}\n")).collect::<String>()).unwrap();
let p = run_claude_hook(
&drip,
json!({
"tool_name": "Read",
"tool_input": {
"file_path": f.to_string_lossy(),
"offset": 0, "limit": 1
}
}),
);
let vp: Value = serde_json::from_str(p.trim()).unwrap();
assert_eq!(
vp["hookSpecificOutput"]["permissionDecision"],
json!("allow"),
"partial read still passes through: {p}"
);
let r1 = run_claude_hook(
&drip,
json!({
"tool_name": "Read",
"tool_input": { "file_path": f.to_string_lossy() }
}),
);
let v1: Value = serde_json::from_str(r1.trim()).unwrap();
assert_eq!(
v1["hookSpecificOutput"]["permissionDecision"],
json!("allow"),
"first full read after narrow partial must pass to native (agent hasn't seen full content yet): {r1}"
);
let r2 = run_claude_hook(
&drip,
json!({
"tool_name": "Read",
"tool_input": { "file_path": f.to_string_lossy() }
}),
);
let v2: Value = serde_json::from_str(r2.trim()).unwrap();
assert_eq!(
v2["hookSpecificOutput"]["permissionDecision"],
json!("deny")
);
let reason2 = v2["hookSpecificOutput"]["permissionDecisionReason"]
.as_str()
.unwrap();
assert!(reason2.contains("[DRIP: unchanged"));
}
#[test]
fn full_read_after_external_change_passes_through_not_delta() {
let drip = Drip::new();
let dir = tempfile::tempdir().unwrap();
let f = dir.path().join("ext-change.txt");
fs::write(
&f,
(1..=120).map(|i| format!("line {i}\n")).collect::<String>(),
)
.unwrap();
let r1 = run_claude_hook(
&drip,
json!({
"tool_name": "Read",
"tool_input": { "file_path": f.to_string_lossy() }
}),
);
let v1: Value = serde_json::from_str(r1.trim()).unwrap();
assert_eq!(
v1["hookSpecificOutput"]["permissionDecision"],
json!("allow")
);
let mut text = std::fs::read_to_string(&f).unwrap();
text = text.replace("line 60\n", "line 60 — TOUCHED EXTERNALLY\n");
fs::write(&f, &text).unwrap();
let r2 = run_claude_hook(
&drip,
json!({
"tool_name": "Read",
"tool_input": { "file_path": f.to_string_lossy() }
}),
);
let v2: Value = serde_json::from_str(r2.trim()).unwrap();
assert_eq!(
v2["hookSpecificOutput"]["permissionDecision"],
json!("allow"),
"external change must trigger native passthrough so the harness's read-tracker refreshes: {r2}"
);
let r3 = run_claude_hook(
&drip,
json!({
"tool_name": "Read",
"tool_input": { "file_path": f.to_string_lossy() }
}),
);
let v3: Value = serde_json::from_str(r3.trim()).unwrap();
assert_eq!(
v3["hookSpecificOutput"]["permissionDecision"],
json!("deny")
);
let reason3 = v3["hookSpecificOutput"]["permissionDecisionReason"]
.as_str()
.unwrap();
assert!(
reason3.contains("[DRIP: unchanged"),
"after the external-change passthrough, optimisation resumes: {reason3}"
);
}
fn make_numbered_file(path: &std::path::Path, n: usize) {
fs::write(
path,
(1..=n).map(|i| format!("line {i}\n")).collect::<String>(),
)
.unwrap();
}
#[test]
fn partial_read_on_unchanged_baseline_returns_window_unchanged() {
let drip = Drip::new();
let dir = tempfile::tempdir().unwrap();
let f = dir.path().join("known.txt");
make_numbered_file(&f, 200);
drip.read_stdout(&f);
let out = run_claude_hook(
&drip,
json!({
"tool_name": "Read",
"tool_input": {
"file_path": f.to_string_lossy(),
"offset": 50,
"limit": 20
}
}),
);
let v: Value = serde_json::from_str(out.trim()).unwrap();
assert_eq!(
v["hookSpecificOutput"]["permissionDecision"],
json!("deny"),
"with baseline, partial read should be intercepted: {out}"
);
let reason = v["hookSpecificOutput"]["permissionDecisionReason"]
.as_str()
.unwrap();
assert!(
reason.contains("[DRIP: unchanged (lines 50-69)"),
"expected window-scoped unchanged header, got: {reason}"
);
}
#[test]
fn partial_read_after_external_change_passes_through_to_refresh_harness() {
let drip = Drip::new();
let dir = tempfile::tempdir().unwrap();
let f = dir.path().join("inrange.txt");
make_numbered_file(&f, 200);
drip.read_stdout(&f);
let mut text = std::fs::read_to_string(&f).unwrap();
text = text.replace("line 60\n", "line 60 — TOUCHED\n");
fs::write(&f, &text).unwrap();
let out = run_claude_hook(
&drip,
json!({
"tool_name": "Read",
"tool_input": {
"file_path": f.to_string_lossy(),
"offset": 50,
"limit": 20
}
}),
);
let v: Value = serde_json::from_str(out.trim()).unwrap();
assert_eq!(
v["hookSpecificOutput"]["permissionDecision"],
json!("allow"),
"external change must pass through to refresh harness: {out}"
);
}
#[test]
fn partial_read_delta_bigger_than_window_passes_through_native() {
let drip = Drip::new();
let dir = tempfile::tempdir().unwrap();
let f = dir.path().join("tiny-window.txt");
make_numbered_file(&f, 5);
drip.read_stdout(&f);
let mut text = std::fs::read_to_string(&f).unwrap();
text = text.replace("line 3\n", "line 3 changed\n");
fs::write(&f, &text).unwrap();
let out = run_claude_hook(
&drip,
json!({
"tool_name": "Read",
"tool_input": {
"file_path": f.to_string_lossy(),
"offset": 3,
"limit": 1
}
}),
);
let v: Value = serde_json::from_str(out.trim()).unwrap();
assert_eq!(
v["hookSpecificOutput"]["permissionDecision"],
json!("allow"),
"diff bigger than the requested window should pass native: {out}"
);
}
#[test]
fn partial_read_after_external_change_outside_window_still_passes_through() {
let drip = Drip::new();
let dir = tempfile::tempdir().unwrap();
let f = dir.path().join("outrange.txt");
make_numbered_file(&f, 200);
drip.read_stdout(&f);
let mut text = std::fs::read_to_string(&f).unwrap();
text = text.replace("line 5\n", "line 5 — TOUCHED OUTSIDE\n");
fs::write(&f, &text).unwrap();
let out = run_claude_hook(
&drip,
json!({
"tool_name": "Read",
"tool_input": {
"file_path": f.to_string_lossy(),
"offset": 50,
"limit": 20
}
}),
);
let v: Value = serde_json::from_str(out.trim()).unwrap();
assert_eq!(
v["hookSpecificOutput"]["permissionDecision"],
json!("allow"),
"external change anywhere in the file must trigger passthrough: {out}"
);
}
#[test]
fn partial_read_passthrough_accounts_window_tokens_not_file_size() {
let drip = Drip::new();
let dir = tempfile::tempdir().unwrap();
let f = dir.path().join("accounting.txt");
make_numbered_file(&f, 200);
run_claude_hook(
&drip,
json!({
"tool_name": "Read",
"tool_input": {
"file_path": f.to_string_lossy(),
"offset": 50,
"limit": 20
}
}),
);
let json = drip.cmd().args(["meter", "--json"]).output().unwrap();
assert!(json.status.success());
let v: Value = serde_json::from_slice(&json.stdout).unwrap();
let tokens_full = v["tokens_full"].as_i64().unwrap();
let tokens_sent = v["tokens_sent"].as_i64().unwrap();
let saved = v["tokens_saved"].as_i64().unwrap();
let file_tokens = (std::fs::read_to_string(&f).unwrap().len() as i64) / 4;
assert!(
tokens_full > 0,
"passthrough must show in the meter, got 0: {v}"
);
assert!(
tokens_full < file_tokens,
"passthrough must record WINDOW size, not file size \
(tokens_full={tokens_full}, file_tokens={file_tokens}): {v}"
);
assert_eq!(
tokens_full, tokens_sent,
"passthrough claims 0 savings, so tokens_full must equal tokens_sent: {v}"
);
assert_eq!(saved, 0, "passthrough must claim 0 savings: {v}");
}
#[test]
fn partial_reads_track_per_window_coverage() {
let drip = Drip::new();
let dir = tempfile::tempdir().unwrap();
let f = dir.path().join("seeded.txt");
make_numbered_file(&f, 200);
let r1 = run_claude_hook(
&drip,
json!({
"tool_name": "Read",
"tool_input": {
"file_path": f.to_string_lossy(),
"offset": 50,
"limit": 20
}
}),
);
let v1: Value = serde_json::from_str(r1.trim()).unwrap();
assert_eq!(
v1["hookSpecificOutput"]["permissionDecision"],
json!("allow"),
"first partial read on unknown file must pass to native: {r1}"
);
let r2 = run_claude_hook(
&drip,
json!({
"tool_name": "Read",
"tool_input": {
"file_path": f.to_string_lossy(),
"offset": 50,
"limit": 20
}
}),
);
let v2: Value = serde_json::from_str(r2.trim()).unwrap();
assert_eq!(
v2["hookSpecificOutput"]["permissionDecision"],
json!("deny"),
"second partial on the same window can collapse — agent has seen those lines: {r2}"
);
let reason2 = v2["hookSpecificOutput"]["permissionDecisionReason"]
.as_str()
.unwrap();
assert!(
reason2.contains("[DRIP: unchanged (lines 50-69)"),
"expected window-scoped unchanged header on 2nd partial of same window: {reason2}"
);
let r3 = run_claude_hook(
&drip,
json!({
"tool_name": "Read",
"tool_input": {
"file_path": f.to_string_lossy(),
"offset": 100,
"limit": 20
}
}),
);
let v3: Value = serde_json::from_str(r3.trim()).unwrap();
assert_eq!(
v3["hookSpecificOutput"]["permissionDecision"],
json!("allow"),
"different-window partial must pass through until seen_ranges covers it: {r3}"
);
let r4 = run_claude_hook(
&drip,
json!({
"tool_name": "Read",
"tool_input": {
"file_path": f.to_string_lossy(),
"offset": 100,
"limit": 20
}
}),
);
let v4: Value = serde_json::from_str(r4.trim()).unwrap();
assert_eq!(
v4["hookSpecificOutput"]["permissionDecision"],
json!("deny"),
"after window 100-119 was delivered natively, the next partial on it can collapse: {r4}"
);
let r5 = run_claude_hook(
&drip,
json!({
"tool_name": "Read",
"tool_input": {
"file_path": f.to_string_lossy(),
"offset": 75,
"limit": 30
}
}),
);
let v5: Value = serde_json::from_str(r5.trim()).unwrap();
assert_eq!(
v5["hookSpecificOutput"]["permissionDecision"],
json!("allow"),
"partial straddling a gap in seen_ranges must pass through: {r5}"
);
}
#[test]
fn partial_read_without_baseline_passes_through_native() {
let drip = Drip::new();
let dir = tempfile::tempdir().unwrap();
let f = dir.path().join("fresh.txt");
make_numbered_file(&f, 100);
let out = run_claude_hook(
&drip,
json!({
"tool_name": "Read",
"tool_input": {
"file_path": f.to_string_lossy(),
"offset": 10,
"limit": 5
}
}),
);
let v: Value = serde_json::from_str(out.trim()).unwrap();
assert_eq!(
v["hookSpecificOutput"]["permissionDecision"],
json!("allow"),
"partial read on unknown file must pass through native: {out}"
);
}
#[test]
fn partial_read_savings_appear_in_session_meter() {
let drip = Drip::new();
let dir = tempfile::tempdir().unwrap();
let f = dir.path().join("session_gain.txt");
make_numbered_file(&f, 200);
drip.read_stdout(&f);
let json_before = drip
.cmd()
.args(["meter", "--session", &drip.session_id, "--json"])
.output()
.unwrap();
assert!(json_before.status.success());
let v_before: Value = serde_json::from_slice(&json_before.stdout).unwrap();
let saved_before = v_before["tokens_saved"].as_i64().unwrap_or(0);
run_claude_hook(
&drip,
json!({
"tool_name": "Read",
"tool_input": {
"file_path": f.to_string_lossy(),
"offset": 50,
"limit": 20
}
}),
);
let json_after = drip
.cmd()
.args(["meter", "--session", &drip.session_id, "--json"])
.output()
.unwrap();
assert!(json_after.status.success());
let v_after: Value = serde_json::from_slice(&json_after.stdout).unwrap();
let saved_after = v_after["tokens_saved"].as_i64().unwrap_or(0);
assert!(
saved_after > saved_before,
"session meter should reflect partial-read savings: before={saved_before}, after={saved_after}"
);
}
#[test]
fn partial_read_on_elided_region_passes_through_native() {
let drip = Drip::new();
let dir = tempfile::tempdir().unwrap();
let f = dir.path().join("compressed.py");
let mut src = String::new();
for i in 0..30 {
src.push_str(&format!("VISIBLE_{i} = {i} # import-style preamble\n"));
}
src.push('\n');
src.push_str("def short_fn(x):\n return x + 1\n\n");
src.push_str("def long_fn(arg):\n");
for i in 0..40 {
src.push_str(&format!(
" step_{i} = arg + {i} # padding to ensure the file exceeds the min compression byte threshold\n"
));
}
src.push_str(" return arg\n");
fs::write(&f, &src).unwrap();
assert!(
src.len() > 1024,
"fixture must exceed compression byte threshold"
);
let first = drip.read_stdout(&f);
assert!(
first.contains("(semantic-compressed)"),
"fixture must trigger compression: {first}"
);
assert!(
first.contains("DRIP-elided"),
"long_fn body must be elided: {first}"
);
let out = run_claude_hook(
&drip,
json!({
"tool_name": "Read",
"tool_input": {
"file_path": f.to_string_lossy(),
"offset": 45,
"limit": 5
}
}),
);
let v: Value = serde_json::from_str(out.trim()).unwrap();
assert_eq!(
v["hookSpecificOutput"]["permissionDecision"],
json!("allow"),
"partial read on elided region must pass to native (agent never saw these lines): {out}"
);
}
#[test]
fn partial_read_on_visible_region_after_compression_returns_window_unchanged() {
let drip = Drip::new();
let dir = tempfile::tempdir().unwrap();
let f = dir.path().join("compressed_visible.py");
let mut src = String::new();
for i in 0..30 {
src.push_str(&format!("VISIBLE_{i} = {i} # import-style preamble\n"));
}
src.push('\n');
src.push_str("def short_fn(x):\n return x + 1\n\n");
src.push_str("def long_fn(arg):\n");
for i in 0..40 {
src.push_str(&format!(
" step_{i} = arg + {i} # padding to ensure the file exceeds the min compression byte threshold\n"
));
}
src.push_str(" return arg\n");
fs::write(&f, &src).unwrap();
assert!(
src.len() > 1024,
"fixture must exceed compression byte threshold"
);
let first = drip.read_stdout(&f);
assert!(
first.contains("(semantic-compressed)"),
"fixture must compress: {first}"
);
let out = run_claude_hook(
&drip,
json!({
"tool_name": "Read",
"tool_input": {
"file_path": f.to_string_lossy(),
"offset": 1,
"limit": 20
}
}),
);
let v: Value = serde_json::from_str(out.trim()).unwrap();
assert_eq!(
v["hookSpecificOutput"]["permissionDecision"],
json!("deny"),
"partial read on visible region should be intercepted: {out}"
);
let reason = v["hookSpecificOutput"]["permissionDecisionReason"]
.as_str()
.unwrap();
assert!(
reason.contains("[DRIP: unchanged (lines 1-20)"),
"expected window-scoped unchanged for visible-region read: {reason}"
);
}
#[test]
fn partial_read_does_not_mutate_baseline() {
let drip = Drip::new();
let dir = tempfile::tempdir().unwrap();
let f = dir.path().join("invariant.txt");
make_numbered_file(&f, 100);
drip.read_stdout(&f);
run_claude_hook(
&drip,
json!({
"tool_name": "Read",
"tool_input": {
"file_path": f.to_string_lossy(),
"offset": 30, "limit": 5
}
}),
);
let r = run_claude_hook(
&drip,
json!({
"tool_name": "Read",
"tool_input": { "file_path": f.to_string_lossy() }
}),
);
let v: Value = serde_json::from_str(r.trim()).unwrap();
assert_eq!(
v["hookSpecificOutput"]["permissionDecision"],
json!("deny"),
"full re-read should still be intercepted: {r}"
);
let reason = v["hookSpecificOutput"]["permissionDecisionReason"]
.as_str()
.unwrap();
assert!(
reason.contains("[DRIP: unchanged since last read"),
"baseline must point at the FULL file, got: {reason}"
);
}
fn run_hook_with_env(drip: &Drip, payload: Value, extra_env: &[(&str, &str)]) -> String {
let mut cmd = Command::new(&drip.bin);
cmd.args(["hook", "claude"])
.env("DRIP_DATA_DIR", drip.data_dir.path())
.env("DRIP_SESSION_ID", &drip.session_id)
.stdin(Stdio::piped())
.stdout(Stdio::piped())
.stderr(Stdio::piped());
for (k, v) in extra_env {
cmd.env(k, v);
}
let mut child = cmd.spawn().unwrap();
child
.stdin
.as_mut()
.unwrap()
.write_all(payload.to_string().as_bytes())
.unwrap();
let o = child.wait_with_output().unwrap();
assert!(o.status.success());
String::from_utf8_lossy(&o.stdout).into_owned()
}
fn long_python_module() -> String {
let mut s = String::from("import os\nimport sys\nimport json\nimport pathlib\n\n");
for i in 0..12 {
s.push_str(&format!("def function_{i}(arg_a, arg_b, arg_c):\n"));
for j in 0..20 {
s.push_str(&format!(
" step_{j} = arg_a + arg_b * {j} - arg_c # padding so the fixture exceeds the compression byte threshold\n"
));
}
s.push_str(" return step_19\n\n");
}
s
}
#[test]
fn first_read_over_claude_limit_substitutes_compressed_view() {
let drip = Drip::new();
let dir = tempfile::tempdir().unwrap();
let f = dir.path().join("over_limit.py");
fs::write(&f, long_python_module()).unwrap();
let out = run_hook_with_env(
&drip,
json!({
"tool_name": "Read",
"tool_input": { "file_path": f.to_string_lossy() }
}),
&[("DRIP_CLAUDE_READ_TOKEN_BUDGET", "100")],
);
let v: Value = serde_json::from_str(out.trim()).unwrap();
assert_eq!(
v["hookSpecificOutput"]["permissionDecision"],
json!("deny"),
"over-limit first read must substitute, not pass to native: {out}"
);
let reason = v["hookSpecificOutput"]["permissionDecisionReason"]
.as_str()
.unwrap();
assert!(
reason.contains("(semantic-compressed)"),
"substitute must carry the compressed view, got: {reason}"
);
assert!(
reason.contains("DRIP-elided"),
"compressed body must show elided-function stubs, got: {reason}"
);
}
#[test]
fn first_read_over_limit_falls_back_to_allow_when_compression_unavailable() {
let drip = Drip::new();
let dir = tempfile::tempdir().unwrap();
let f = dir.path().join("over_limit_no_compress.py");
fs::write(&f, long_python_module()).unwrap();
let out = run_hook_with_env(
&drip,
json!({
"tool_name": "Read",
"tool_input": { "file_path": f.to_string_lossy() }
}),
&[
("DRIP_CLAUDE_READ_TOKEN_BUDGET", "100"),
("DRIP_NO_COMPRESS", "1"),
],
);
let v: Value = serde_json::from_str(out.trim()).unwrap();
assert_eq!(
v["hookSpecificOutput"]["permissionDecision"],
json!("allow"),
"without a compressed view, FullFirst must keep passing native: {out}"
);
}
#[test]
fn deleted_file_with_baseline_under_symlinked_parent_returns_deleted() {
let drip = Drip::new();
let dir = tempfile::tempdir().unwrap();
let f = dir.path().join("ghost.txt");
fs::write(&f, "line 1\nline 2\nline 3\n").unwrap();
let r1 = run_claude_hook(
&drip,
json!({
"tool_name": "Read",
"tool_input": { "file_path": f.to_string_lossy() }
}),
);
let v1: Value = serde_json::from_str(r1.trim()).unwrap();
assert_eq!(
v1["hookSpecificOutput"]["permissionDecision"],
json!("allow"),
"baseline seed: {r1}"
);
std::fs::remove_file(&f).unwrap();
let r2 = run_claude_hook(
&drip,
json!({
"tool_name": "Read",
"tool_input": { "file_path": f.to_string_lossy() }
}),
);
let v2: Value = serde_json::from_str(r2.trim()).unwrap();
assert_eq!(
v2["hookSpecificOutput"]["permissionDecision"],
json!("deny"),
"deleted-file read must substitute with the deletion sentinel: {r2}"
);
let reason = v2["hookSpecificOutput"]["permissionDecisionReason"]
.as_str()
.unwrap();
assert!(
reason.contains("[DRIP: file deleted since last read"),
"expected deletion-sentinel header, got: {reason}"
);
}
#[test]
fn unchanged_reread_of_oversized_file_collapses_to_sentinel() {
let drip = Drip::new();
let dir = tempfile::tempdir().unwrap();
let f = dir.path().join("oversized.py");
let mut body = String::with_capacity(140 * 1024);
for i in 0..70 {
body.push_str(&format!("def function_{i}(arg_a, arg_b, arg_c):\n"));
for j in 0..30 {
body.push_str(&format!(
" step_{j} = arg_a + arg_b * {j} - arg_c # padding line to push past LARGE_FILE_BYTES\n"
));
}
body.push_str(" return step_29\n\n");
}
assert!(body.len() > 100 * 1024);
fs::write(&f, &body).unwrap();
let r1 = run_hook_with_env(
&drip,
json!({
"tool_name": "Read",
"tool_input": { "file_path": f.to_string_lossy() }
}),
&[("DRIP_CLAUDE_READ_TOKEN_BUDGET", "100")],
);
let v1: Value = serde_json::from_str(r1.trim()).unwrap();
assert_eq!(
v1["hookSpecificOutput"]["permissionDecision"],
json!("deny"),
"first read should substitute compressed view: {r1}"
);
let r2 = run_hook_with_env(
&drip,
json!({
"tool_name": "Read",
"tool_input": { "file_path": f.to_string_lossy() }
}),
&[("DRIP_CLAUDE_READ_TOKEN_BUDGET", "100")],
);
let v2: Value = serde_json::from_str(r2.trim()).unwrap();
assert_eq!(
v2["hookSpecificOutput"]["permissionDecision"],
json!("deny"),
"re-read of unchanged oversized file must collapse to sentinel, not bail to native: {r2}"
);
let reason = v2["hookSpecificOutput"]["permissionDecisionReason"]
.as_str()
.unwrap();
assert!(
reason.contains("[DRIP: unchanged"),
"expected unchanged-sentinel header, got: {reason}"
);
}
#[test]
fn first_read_past_large_file_cap_still_compresses() {
let drip = Drip::new();
let dir = tempfile::tempdir().unwrap();
let f = dir.path().join("over_largefile_cap.py");
let mut body = String::with_capacity(140 * 1024);
for i in 0..70 {
body.push_str(&format!("def function_{i}(arg_a, arg_b, arg_c):\n"));
for j in 0..30 {
body.push_str(&format!(
" step_{j} = arg_a + arg_b * {j} - arg_c # padding line to drive the byte count past the 100 KB cap\n"
));
}
body.push_str(" return step_29\n\n");
}
assert!(
body.len() > 100 * 1024,
"fixture must exceed LARGE_FILE_BYTES; got {}B",
body.len()
);
fs::write(&f, &body).unwrap();
let out = run_hook_with_env(
&drip,
json!({
"tool_name": "Read",
"tool_input": { "file_path": f.to_string_lossy() }
}),
&[("DRIP_CLAUDE_READ_TOKEN_BUDGET", "100")],
);
let v: Value = serde_json::from_str(out.trim()).unwrap();
assert_eq!(
v["hookSpecificOutput"]["permissionDecision"],
json!("deny"),
"huge-but-compressible files must substitute, not fall back to allow: {out}"
);
let reason = v["hookSpecificOutput"]["permissionDecisionReason"]
.as_str()
.unwrap();
assert!(
reason.contains("(semantic-compressed)"),
"header must mark the payload as the compressed view: {reason}"
);
}
#[test]
fn first_read_under_limit_still_passes_native() {
let drip = Drip::new();
let dir = tempfile::tempdir().unwrap();
let f = dir.path().join("under_limit.py");
fs::write(&f, long_python_module()).unwrap();
let out = run_hook_with_env(
&drip,
json!({
"tool_name": "Read",
"tool_input": { "file_path": f.to_string_lossy() }
}),
&[],
);
let v: Value = serde_json::from_str(out.trim()).unwrap();
assert_eq!(
v["hookSpecificOutput"]["permissionDecision"],
json!("allow"),
"under-budget first reads must stay on the native path: {out}"
);
}