sqz-engine 0.5.0

Adaptive multi-pass LLM context compression engine — content-aware pipeline with AST parsing, token counting, session persistence, and budget tracking
Documentation
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
71
72
73
74
75
76
77
78
79
80
81
82
83
84
85
86
87
88
89
90
91
92
93
94
95
96
97
98
99
100
101
102
103
104
105
106
107
108
109
110
111
112
113
114
115
116
117
118
119
120
121
122
123
124
125
126
127
128
129
130
131
132
133
134
135
136
137
138
139
140
141
142
143
144
145
146
147
148
149
150
151
152
153
154
155
156
157
158
159
160
161
162
163
164
165
166
167
168
169
170
171
172
173
174
175
176
177
178
179
180
181
182
183
184
185
186
187
188
189
190
191
192
193
194
195
196
197
198
199
200
201
202
203
204
205
206
207
208
209
210
211
212
213
214
215
216
217
218
219
220
221
222
223
224
225
226
227
228
229
230
231
232
233
234
235
236
237
238
239
240
241
242
243
244
245
246
247
248
249
250
251
252
253
254
255
256
257
258
259
260
261
262
263
264
265
266
267
268
269
270
271
272
273
274
275
276
277
278
279
280
281
282
283
284
285
286
287
288
289
290
291
292
293
294
295
296
297
298
299
300
301
302
303
304
305
306
307
308
309
310
311
312
313
314
315
316
317
318
319
320
321
322
323
324
325
326
327
328
329
330
331
332
333
334
335
336
337
338
339
340
341
342
343
344
345
346
347
348
349
350
351
352
353
354
355
356
357
358
359
360
361
362
363
364
365
366
367
368
369
370
371
372
373
374
375
376
377
378
379
380
381
382
383
384
385
386
387
388
389
390
391
392
393
394
395
396
397
398
399
400
401
402
403
404
405
406
407
408
409
410
411
412
413
414
415
416
417
418
419
420
421
422
use crate::types::Provider;

/// A detected prompt cache boundary in a message sequence.
#[derive(Debug, Clone, PartialEq)]
pub struct CacheBoundary {
    /// Byte offset in the concatenated content string where the boundary falls.
    /// Content before this offset is cached; content at/after is not.
    pub offset: usize,
    /// The provider that owns this cache boundary.
    pub provider: Provider,
    /// The cache discount fraction (0.9 for Anthropic, 0.5 for OpenAI).
    pub discount: f64,
}

/// A single message in a conversation, used for cache boundary detection.
#[derive(Debug, Clone)]
pub struct Message {
    pub role: String,
    pub content: String,
    /// Anthropic-style cache control marker.
    /// When `Some("ephemeral")`, this message has a cache_control marker.
    pub cache_control: Option<String>,
}

/// Detects prompt cache boundaries for Anthropic and OpenAI API formats.
///
/// - **Anthropic**: uses `cache_control: {"type": "ephemeral"}` markers on
///   individual messages. The boundary is placed after the last marked message.
/// - **OpenAI**: automatically caches the first 1024 tokens of a prompt.
///   Detected heuristically: if total content length > 4096 chars (≈1024 tokens),
///   the boundary is at char position 4096.
/// - **Google**: no cache boundary concept; always returns `None`.
pub struct PromptCacheDetector;

impl PromptCacheDetector {
    /// Detect a cache boundary in the given message sequence for the specified
    /// provider. Returns `None` if no boundary is detected.
    pub fn detect_boundary(
        &self,
        messages: &[Message],
        provider: Provider,
    ) -> Option<CacheBoundary> {
        match provider {
            Provider::Anthropic => self.detect_anthropic(messages),
            Provider::OpenAI => self.detect_openai(messages),
            Provider::Google => None,
        }
    }

    /// Split `content` at the given boundary offset.
    ///
    /// Returns `(before, after)` where `before` is byte-identical to the
    /// original content up to `boundary.offset`, and `after` is the remainder.
    ///
    /// If `boundary.offset >= content.len()`, `after` will be empty.
    pub fn split_at_boundary(&self, content: &str, boundary: &CacheBoundary) -> (String, String) {
        let offset = boundary.offset.min(content.len());
        let before = content[..offset].to_owned();
        let after = content[offset..].to_owned();
        (before, after)
    }

    // -----------------------------------------------------------------------
    // Private helpers
    // -----------------------------------------------------------------------

    fn detect_anthropic(&self, messages: &[Message]) -> Option<CacheBoundary> {
        // Find the last message that carries a cache_control marker.
        // The boundary offset is the byte position after that message's content.
        let mut offset: usize = 0;
        let mut boundary_offset: Option<usize> = None;

        for msg in messages {
            offset += msg.content.len();
            if msg.cache_control.as_deref() == Some("ephemeral") {
                boundary_offset = Some(offset);
            }
        }

        boundary_offset.map(|off| CacheBoundary {
            offset: off,
            provider: Provider::Anthropic,
            discount: 0.9,
        })
    }

    fn detect_openai(&self, messages: &[Message]) -> Option<CacheBoundary> {
        // OpenAI automatically caches the first 1024 tokens.
        // Heuristic: if total content length > 4096 chars, the boundary is at
        // char position 4096 (≈1024 tokens at ~4 chars/token).
        const OPENAI_CACHE_CHAR_THRESHOLD: usize = 4096;

        let total_len: usize = messages.iter().map(|m| m.content.len()).sum();
        if total_len > OPENAI_CACHE_CHAR_THRESHOLD {
            Some(CacheBoundary {
                offset: OPENAI_CACHE_CHAR_THRESHOLD,
                provider: Provider::OpenAI,
                discount: 0.5,
            })
        } else {
            None
        }
    }
}

// ---------------------------------------------------------------------------
// Tests
// ---------------------------------------------------------------------------

#[cfg(test)]
mod tests {
    use super::*;

    fn msg(role: &str, content: &str) -> Message {
        Message {
            role: role.to_owned(),
            content: content.to_owned(),
            cache_control: None,
        }
    }

    fn cached_msg(role: &str, content: &str) -> Message {
        Message {
            role: role.to_owned(),
            content: content.to_owned(),
            cache_control: Some("ephemeral".to_owned()),
        }
    }

    // -----------------------------------------------------------------------
    // Anthropic tests
    // -----------------------------------------------------------------------

    #[test]
    fn anthropic_no_cache_control_returns_none() {
        let detector = PromptCacheDetector;
        let messages = vec![msg("user", "hello"), msg("assistant", "world")];
        assert!(detector.detect_boundary(&messages, Provider::Anthropic).is_none());
    }

    #[test]
    fn anthropic_single_cached_message() {
        let detector = PromptCacheDetector;
        let messages = vec![cached_msg("user", "hello world")];
        let boundary = detector
            .detect_boundary(&messages, Provider::Anthropic)
            .unwrap();
        assert_eq!(boundary.offset, "hello world".len());
        assert_eq!(boundary.discount, 0.9);
        assert_eq!(boundary.provider, Provider::Anthropic);
    }

    #[test]
    fn anthropic_boundary_after_last_cached_message() {
        let detector = PromptCacheDetector;
        // Two cached messages; boundary should be after the second one.
        let messages = vec![
            cached_msg("user", "first"),   // offset 5
            msg("assistant", "middle"),    // offset 5+6=11
            cached_msg("user", "second"),  // offset 11+6=17
            msg("assistant", "tail"),      // offset 17+4=21
        ];
        let boundary = detector
            .detect_boundary(&messages, Provider::Anthropic)
            .unwrap();
        // "first" (5) + "middle" (6) + "second" (6) = 17
        assert_eq!(boundary.offset, 17);
    }

    #[test]
    fn anthropic_only_first_message_cached() {
        let detector = PromptCacheDetector;
        let messages = vec![
            cached_msg("system", "sys"),  // offset 3
            msg("user", "query"),         // offset 3+5=8
        ];
        let boundary = detector
            .detect_boundary(&messages, Provider::Anthropic)
            .unwrap();
        assert_eq!(boundary.offset, 3);
    }

    // -----------------------------------------------------------------------
    // OpenAI tests
    // -----------------------------------------------------------------------

    #[test]
    fn openai_short_content_returns_none() {
        let detector = PromptCacheDetector;
        let messages = vec![msg("user", "short")];
        assert!(detector.detect_boundary(&messages, Provider::OpenAI).is_none());
    }

    #[test]
    fn openai_long_content_returns_boundary_at_4096() {
        let detector = PromptCacheDetector;
        let long_content = "x".repeat(5000);
        let messages = vec![msg("user", &long_content)];
        let boundary = detector
            .detect_boundary(&messages, Provider::OpenAI)
            .unwrap();
        assert_eq!(boundary.offset, 4096);
        assert_eq!(boundary.discount, 0.5);
        assert_eq!(boundary.provider, Provider::OpenAI);
    }

    #[test]
    fn openai_exactly_at_threshold_returns_none() {
        let detector = PromptCacheDetector;
        let content = "x".repeat(4096);
        let messages = vec![msg("user", &content)];
        // total_len == 4096, not > 4096, so no boundary
        assert!(detector.detect_boundary(&messages, Provider::OpenAI).is_none());
    }

    // -----------------------------------------------------------------------
    // Google tests
    // -----------------------------------------------------------------------

    #[test]
    fn google_always_returns_none() {
        let detector = PromptCacheDetector;
        let messages = vec![cached_msg("user", "x".repeat(10000).as_str())];
        assert!(detector.detect_boundary(&messages, Provider::Google).is_none());
    }

    // -----------------------------------------------------------------------
    // split_at_boundary tests
    // -----------------------------------------------------------------------

    #[test]
    fn split_at_boundary_basic() {
        let detector = PromptCacheDetector;
        let content = "hello world";
        let boundary = CacheBoundary {
            offset: 5,
            provider: Provider::Anthropic,
            discount: 0.9,
        };
        let (before, after) = detector.split_at_boundary(content, &boundary);
        assert_eq!(before, "hello");
        assert_eq!(after, " world");
    }

    #[test]
    fn split_at_boundary_offset_zero() {
        let detector = PromptCacheDetector;
        let content = "hello";
        let boundary = CacheBoundary {
            offset: 0,
            provider: Provider::Anthropic,
            discount: 0.9,
        };
        let (before, after) = detector.split_at_boundary(content, &boundary);
        assert_eq!(before, "");
        assert_eq!(after, "hello");
    }

    #[test]
    fn split_at_boundary_offset_beyond_end() {
        let detector = PromptCacheDetector;
        let content = "hello";
        let boundary = CacheBoundary {
            offset: 100,
            provider: Provider::Anthropic,
            discount: 0.9,
        };
        let (before, after) = detector.split_at_boundary(content, &boundary);
        assert_eq!(before, "hello");
        assert_eq!(after, "");
    }

    #[test]
    fn split_before_is_byte_identical() {
        let detector = PromptCacheDetector;
        let content = "abcdefghij";
        let boundary = CacheBoundary {
            offset: 5,
            provider: Provider::OpenAI,
            discount: 0.5,
        };
        let (before, _) = detector.split_at_boundary(content, &boundary);
        // before must be byte-identical to the original prefix
        assert_eq!(before.as_bytes(), &content.as_bytes()[..5]);
    }

    // -----------------------------------------------------------------------
    // Property tests
    // ---------------------------------------------------------------------------

    use proptest::prelude::*;

    /// Strategy: generate a list of messages where at least one has a
    /// cache_control marker (Anthropic format).
    fn anthropic_messages_with_boundary() -> impl Strategy<Value = Vec<Message>> {
        // Generate 1-8 messages; at least one will be marked as cached.
        (1usize..=8usize).prop_flat_map(|n| {
            // For each message, generate content and whether it's cached.
            let msg_strategy = (
                prop_oneof![Just("user"), Just("assistant"), Just("system")],
                "[a-z]{1,50}",
                any::<bool>(),
            );
            prop::collection::vec(msg_strategy, n).prop_filter(
                "at least one cached message",
                |msgs| msgs.iter().any(|(_, _, cached)| *cached),
            )
        })
        .prop_map(|msgs| {
            msgs.into_iter()
                .map(|(role, content, cached)| Message {
                    role: role.to_owned(),
                    content,
                    cache_control: if cached {
                        Some("ephemeral".to_owned())
                    } else {
                        None
                    },
                })
                .collect()
        })
    }

    proptest! {
        /// **Validates: Requirements 4.1, 4.2**
        ///
        /// Property 3: Prompt cache boundary preservation.
        ///
        /// For any message sequence containing a prompt cache boundary marker
        /// (Anthropic or OpenAI format), the Prompt_Cache_Detector SHALL
        /// identify the boundary, and content preceding the boundary SHALL be
        /// byte-identical to the original.
        #[test]
        fn prop_prompt_cache_boundary_preservation(
            messages in anthropic_messages_with_boundary(),
        ) {
            let detector = PromptCacheDetector;

            // 1. Boundary must be detected (at least one cached message exists).
            let boundary = detector.detect_boundary(&messages, Provider::Anthropic);
            prop_assert!(
                boundary.is_some(),
                "expected boundary to be detected for messages with cache_control markers"
            );
            let boundary = boundary.unwrap();

            // 2. Discount must be 0.9 for Anthropic.
            prop_assert_eq!(boundary.discount, 0.9);

            // 3. Content before the boundary must be byte-identical to the original.
            //    Concatenate all message contents to form the "full content".
            let full_content: String = messages.iter().map(|m| m.content.as_str()).collect();
            let (before, _after) = detector.split_at_boundary(&full_content, &boundary);

            let expected_prefix = &full_content[..boundary.offset.min(full_content.len())];
            prop_assert_eq!(
                before.as_bytes(),
                expected_prefix.as_bytes(),
                "content before boundary must be byte-identical to original prefix"
            );

            // 4. The boundary offset must be at or after the last cached message's
            //    content end, and must not exceed the total content length.
            prop_assert!(
                boundary.offset <= full_content.len(),
                "boundary offset {} exceeds content length {}",
                boundary.offset,
                full_content.len()
            );

            // 5. Verify the boundary is placed after the last cached message.
            let mut cumulative = 0usize;
            let mut last_cached_end = 0usize;
            for msg in &messages {
                cumulative += msg.content.len();
                if msg.cache_control.as_deref() == Some("ephemeral") {
                    last_cached_end = cumulative;
                }
            }
            prop_assert_eq!(
                boundary.offset,
                last_cached_end,
                "boundary offset should be after the last cached message"
            );
        }
    }

    proptest! {
        /// Property 3 (OpenAI variant): For any message sequence whose total
        /// content length exceeds 4096 chars, the detector SHALL identify a
        /// boundary at offset 4096, and content before that offset SHALL be
        /// byte-identical to the original.
        #[test]
        fn prop_openai_cache_boundary_preservation(
            // Generate content that is definitely > 4096 chars total
            extra in 1usize..=1000usize,
            content in "[a-z]{4097,5000}",
        ) {
            let _ = extra; // suppress unused warning
            let detector = PromptCacheDetector;
            let messages = vec![Message {
                role: "user".to_owned(),
                content: content.clone(),
                cache_control: None,
            }];

            let boundary = detector.detect_boundary(&messages, Provider::OpenAI);
            prop_assert!(boundary.is_some(), "expected OpenAI boundary for long content");
            let boundary = boundary.unwrap();

            prop_assert_eq!(boundary.offset, 4096);
            prop_assert_eq!(boundary.discount, 0.5);

            let (before, _) = detector.split_at_boundary(&content, &boundary);
            prop_assert_eq!(
                before.as_bytes(),
                &content.as_bytes()[..4096],
                "content before OpenAI boundary must be byte-identical"
            );
        }
    }
}