wme-stream 0.1.1

Streaming utilities for the Wikimedia Enterprise API
Documentation
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
71
72
73
74
75
76
77
78
79
80
81
82
83
84
85
86
87
88
89
90
91
92
93
94
95
96
97
98
99
100
101
102
103
104
105
106
107
108
109
110
111
112
113
114
115
116
117
118
119
120
121
122
123
124
125
126
127
128
129
130
131
132
133
134
135
136
137
138
139
140
141
142
143
144
145
146
147
148
149
150
151
152
153
154
155
156
157
158
159
160
161
162
163
164
165
166
167
168
169
170
171
172
173
174
175
176
177
178
179
180
181
182
183
184
185
186
187
188
189
190
191
192
193
194
195
196
197
198
199
200
201
202
203
204
205
206
207
208
209
210
211
212
213
214
215
216
217
218
219
220
221
222
223
224
225
226
227
228
229
230
231
232
233
234
235
236
237
238
239
240
241
242
243
244
245
246
247
248
249
250
251
252
253
254
255
256
257
258
259
260
261
262
263
264
265
266
267
268
269
270
271
272
273
274
275
276
277
278
279
280
281
282
283
284
285
286
287
288
289
290
291
292
293
294
295
296
297
298
299
300
301
302
303
304
305
306
307
308
309
310
311
312
313
314
315
316
317
318
319
320
321
322
323
324
325
326
327
328
329
330
331
332
333
334
335
336
337
338
339
340
341
342
343
344
345
346
347
348
349
350
351
352
353
354
355
356
357
358
359
360
361
362
363
364
365
366
367
368
369
370
371
372
373
374
375
376
377
378
379
380
381
382
383
384
385
386
387
388
389
390
391
392
393
394
395
396
397
398
399
400
401
402
403
404
405
406
407
408
409
410
411
//! Streaming utilities for the Wikimedia Enterprise API.
//!
//! This crate provides utilities for processing NDJSON streams from Wikimedia Enterprise
//! APIs (Snapshot and Realtime). It handles deduplication, checkpoint/resume functionality,
//! and efficient streaming parsing.
//!
//! # Overview
//!
//! Wikimedia Enterprise provides data in several formats:
//! - **Snapshot API** - Complete project dumps as `.tar.gz` with NDJSON files
//! - **Realtime API** - Streaming updates via SSE or NDJSON
//! - **Realtime Batch** - Hourly bundles of updates as `.tar.gz`
//!
//! This crate helps you process these data sources efficiently.
//!
//! # Key Features
//!
//! - **NDJSON Streaming** - Parse newline-delimited JSON from any source
//! - **Deduplication** - Handle duplicate articles automatically (keep latest version)
//! - **Checkpoint/Resume** - Save and resume long-running downloads
//! - **Visitor Pattern** - Process articles without full materialization
//! - **Progress Tracking** - Events and statistics for monitoring
//!
//! # Modules
//!
//! - [`ndjson`] - NDJSON parsing from streams and files
//! - [`dedup`] - Duplicate detection and removal
//! - [`checkpoint`] - Resume checkpoint save/load
//! - [`visitor`] - Visitor trait for low-memory processing
//!
//! # Example: Parse NDJSON Stream
//!
//! ```rust,no_run
//! use wme_stream::NdjsonStream;
//! use wme_models::Article;
//! use futures::StreamExt;
//! use std::io::BufReader;
//! use std::fs::File;
//! use std::pin::pin;
//!
//! # async fn example() -> Result<(), Box<dyn std::error::Error>> {
//! // Open NDJSON file
//! let input_file = File::open("articles.ndjson")?;
//! let reader = BufReader::new(input_file);
//!
//! // Create line stream
//! let lines = NdjsonStream::from_reader(reader);
//!
//! // Parse into Articles
//! let articles = NdjsonStream::parse_articles(lines);
//!
//! // Process each article (pin the stream first)
//! let mut pinned = pin!(articles);
//! while let Some(result) = pinned.next().await {
//!     match result {
//!         Ok(article) => println!("{}: {}", article.identifier, article.name),
//!         Err(e) => eprintln!("Error: {}", e),
//!     }
//! }
//! # Ok(())
//! # }
//! ```
//!
//! # Example: Deduplicate Stream
//!
//! ```rust,no_run
//! use wme_stream::dedup_stream;
//! use futures::StreamExt;
//! use std::pin::pin;
//!
//! # async fn example<S>(stream: S)
//! # where
//! #     S: futures::Stream<Item = Result<wme_models::Article, wme_stream::StreamError>>,
//! # {
//! // Wrap stream to deduplicate
//! let deduplicated = dedup_stream(stream);
//!
//! // Pin the stream before iterating
//! let mut pinned = pin!(deduplicated);
//! while let Some(result) = pinned.next().await {
//!     // Only latest version of each article
//!     let _article = result.unwrap();
//! }
//! # }
//! ```
//!
//! # Example: Save/Resume Checkpoint
//!
//! ```rust,no_run
//! use wme_stream::ResumeCheckpoint;
//!
//! # async fn example() -> Result<(), wme_stream::StreamError> {
//! // Save progress after processing 1000 articles
//! let checkpoint = ResumeCheckpoint::new(
//!     "enwiki_namespace_0",
//!     "chunk_0",
//!     5000,  // line offset
//!     1000,  // articles processed
//! );
//! checkpoint.save("/data/checkpoints/").await?;
//!
//! // Later: resume from checkpoint
//! let checkpoint = ResumeCheckpoint::load(
//!     "/data/checkpoints/enwiki_namespace_0.checkpoint.json"
//! ).await?;
//! println!("Resuming from line {}", checkpoint.line_offset);
//! # Ok(())
//! # }
//! ```

#![doc = include_str!("../README.md")]

use std::path::PathBuf;

use chrono::{DateTime, Utc};
use serde::{Deserialize, Serialize};

pub mod checkpoint;
pub mod dedup;
pub mod ndjson;
pub mod visitor;

pub use checkpoint::ResumeCheckpoint;
pub use dedup::{dedup_collect, dedup_stream};
pub use ndjson::{NdjsonExt, NdjsonStream};
pub use visitor::{ArticleVisitor, NoOpVisitor, StatsVisitor};

/// Progress event for snapshot processing.
///
/// These events allow you to monitor the progress of snapshot downloads
/// and processing. Use them to update progress bars or log activity.
///
/// # Example
///
/// ```rust,compile_only
/// use wme_stream::SnapshotEvent;
///
/// fn handle_event(event: SnapshotEvent) {
///     match event {
///         SnapshotEvent::ChunkStarted { chunk_id, size_bytes } => {
///             println!("Starting download of {} ({} bytes)", chunk_id, size_bytes);
///         }
///         SnapshotEvent::ChunkProgress { chunk_id, bytes_downloaded, bytes_total } => {
///             let pct = (bytes_downloaded as f64 / bytes_total as f64) * 100.0;
///             println!("{}: {:.1}%", chunk_id, pct);
///         }
///         SnapshotEvent::ArticleProcessed { article_id, title } => {
///             println!("Processed: {}", title);
///         }
///         _ => {}
///     }
/// }
/// ```
#[derive(Debug, Clone)]
pub enum SnapshotEvent {
    /// Manifest loaded successfully.
    ManifestLoaded {
        /// Snapshot identifier
        snapshot_id: String,
        /// Number of chunks
        chunks: usize,
    },
    /// Chunk download started.
    ChunkStarted {
        /// Chunk identifier
        chunk_id: String,
        /// Size in bytes
        size_bytes: u64,
    },
    /// Chunk download progress.
    ChunkProgress {
        /// Chunk identifier
        chunk_id: String,
        /// Bytes downloaded so far
        bytes_downloaded: u64,
        /// Total bytes
        bytes_total: u64,
    },
    /// Chunk processing completed.
    ChunkCompleted {
        /// Chunk identifier
        chunk_id: String,
        /// Number of articles processed
        articles_count: u64,
        /// Elapsed time
        elapsed: std::time::Duration,
    },
    /// Individual article processed.
    ArticleProcessed {
        /// Article ID
        article_id: u64,
        /// Article title
        title: String,
    },
    /// Checkpoint saved.
    CheckpointSaved {
        /// Path to checkpoint file
        path: PathBuf,
    },
    /// Error occurred.
    Error {
        /// Error details
        error: StreamError,
        /// Whether the error is recoverable
        recoverable: bool,
    },
    /// Processing completed.
    Completed {
        /// Total articles processed
        total_articles: u64,
        /// Total bytes processed
        total_bytes: u64,
    },
}

/// Errors that can occur during streaming.
///
/// These errors cover IO, parsing, decompression, network, and resume failures.
/// All errors implement `std::error::Error` and are cloneable.
#[derive(thiserror::Error, Debug, Clone)]
pub enum StreamError {
    /// IO error (file read/write).
    #[error("IO error: {0}")]
    Io(String),
    /// JSON parse error.
    #[error("JSON parse error: {0}")]
    JsonParse(String),
    /// Decompression error (tar.gz).
    #[error("Decompression error: {0}")]
    Decompression(String),
    /// Network error.
    #[error("Network error: {0}")]
    Network(String),
    /// Checksum mismatch.
    #[error("Checksum mismatch for {file}")]
    ChecksumMismatch {
        /// File identifier
        file: String,
    },
    /// Resume error (checkpoint load/save).
    #[error("Resume error: {0}")]
    Resume(String),
}

/// Processing statistics.
///
/// Tracks metrics during snapshot/realtime processing including
/// articles processed, bytes transferred, errors, and duplicates.
///
/// # Example
///
/// ```rust
/// use wme_stream::ProcessingStats;
/// use chrono::Utc;
///
/// let mut stats = ProcessingStats::new();
/// stats.articles_processed = 1000;
/// stats.bytes_downloaded = 1024 * 1024 * 100; // 100 MB
///
/// let rate = stats.rate();
/// println!("Processing rate: {} articles/sec", rate);
/// ```
#[derive(Debug, Clone, Default, Serialize, Deserialize)]
pub struct ProcessingStats {
    /// Total articles processed
    pub articles_processed: u64,
    /// Total bytes downloaded
    pub bytes_downloaded: u64,
    /// Total bytes decompressed
    pub bytes_decompressed: u64,
    /// Errors encountered
    pub errors: u64,
    /// Duplicates removed
    pub duplicates: u64,
    /// Start time
    pub started_at: Option<DateTime<Utc>>,
    /// End time
    pub completed_at: Option<DateTime<Utc>>,
}

impl ProcessingStats {
    /// Create new empty stats with current timestamp.
    ///
    /// Sets `started_at` to the current time.
    pub fn new() -> Self {
        Self {
            started_at: Some(Utc::now()),
            ..Default::default()
        }
    }

    /// Merge another stats into this one.
    ///
    /// Combines counts from another stats instance. Useful when
    /// aggregating stats from multiple chunks or workers.
    pub fn merge(&mut self, other: &ProcessingStats) {
        self.articles_processed += other.articles_processed;
        self.bytes_downloaded += other.bytes_downloaded;
        self.bytes_decompressed += other.bytes_decompressed;
        self.errors += other.errors;
        self.duplicates += other.duplicates;
    }

    /// Calculate processing rate (articles per second).
    ///
    /// Returns 0.0 if `started_at` is not set.
    /// Uses `completed_at` if set, otherwise uses current time.
    pub fn rate(&self) -> f64 {
        if let Some(started) = self.started_at {
            let duration = self
                .completed_at
                .unwrap_or_else(Utc::now)
                .signed_duration_since(started);
            let seconds = duration.num_seconds() as f64;
            if seconds > 0.0 {
                return self.articles_processed as f64 / seconds;
            }
        }
        0.0
    }

    /// Mark processing as completed.
    ///
    /// Sets `completed_at` to the current time.
    pub fn complete(&mut self) {
        self.completed_at = Some(Utc::now());
    }
}

#[cfg(test)]
mod tests {
    use super::*;
    use chrono::Duration;

    #[test]
    fn test_processing_stats_new() {
        let stats = ProcessingStats::new();
        assert!(stats.started_at.is_some());
        assert_eq!(stats.articles_processed, 0);
        assert_eq!(stats.rate(), 0.0); // No time elapsed yet
    }

    #[test]
    fn test_processing_stats_merge() {
        let mut stats1 = ProcessingStats::new();
        stats1.articles_processed = 100;
        stats1.bytes_downloaded = 1000;
        stats1.errors = 5;

        let mut stats2 = ProcessingStats::new();
        stats2.articles_processed = 50;
        stats2.bytes_downloaded = 500;
        stats2.duplicates = 10;

        stats1.merge(&stats2);

        assert_eq!(stats1.articles_processed, 150);
        assert_eq!(stats1.bytes_downloaded, 1500);
        assert_eq!(stats1.errors, 5);
        assert_eq!(stats1.duplicates, 10);
    }

    #[test]
    fn test_processing_stats_rate() {
        let mut stats = ProcessingStats::new();
        stats.started_at = Some(Utc::now() - Duration::seconds(10));
        stats.articles_processed = 100;

        let rate = stats.rate();
        assert!(rate > 0.0);
        assert!((rate - 10.0).abs() < 1.0); // Approximately 10 articles/sec
    }

    #[test]
    fn test_processing_stats_complete() {
        let mut stats = ProcessingStats::new();
        assert!(stats.completed_at.is_none());

        stats.complete();
        assert!(stats.completed_at.is_some());
    }

    #[test]
    fn test_stream_error_display() {
        let err = StreamError::Io("file not found".to_string());
        assert!(err.to_string().contains("IO error"));
        assert!(err.to_string().contains("file not found"));

        let err = StreamError::JsonParse("invalid json".to_string());
        assert!(err.to_string().contains("JSON parse error"));
    }

    #[test]
    fn test_snapshot_event_creation() {
        let event = SnapshotEvent::ManifestLoaded {
            snapshot_id: "enwiki_namespace_0".to_string(),
            chunks: 5,
        };

        match event {
            SnapshotEvent::ManifestLoaded {
                snapshot_id,
                chunks,
            } => {
                assert_eq!(snapshot_id, "enwiki_namespace_0");
                assert_eq!(chunks, 5);
            }
            _ => panic!("Wrong event type"),
        }
    }
}