Skip to main content

spider_pipeline/
console_writer.rs

1//! Item Pipeline for writing scraped items to the console.
2//!
3//! This module provides the `ConsoleWriterPipeline`, a basic and useful
4//! item pipeline for debugging and immediate inspection of scraped data.
5//! When integrated into a crawler, this pipeline simply logs the received
6//! `ScrapedItem`s to the console (or configured tracing output).
7//!
8//! It serves as a straightforward way to verify that spiders are extracting
9//! data correctly and that items are flowing through the pipeline as expected.
10use spider_util::{error::PipelineError, item::ScrapedItem};
11use crate::pipeline::Pipeline;
12use async_trait::async_trait;
13use tracing::info;
14
15/// A pipeline that prints scraped items to the console.
16pub struct ConsoleWriterPipeline;
17
18impl ConsoleWriterPipeline {
19    /// Creates a new `ConsoleWriterPipeline`.
20    pub fn new() -> Self {
21        Self
22    }
23}
24
25impl Default for ConsoleWriterPipeline {
26    fn default() -> Self {
27        Self::new()
28    }
29}
30
31#[async_trait]
32impl<I: ScrapedItem> Pipeline<I> for ConsoleWriterPipeline {
33    fn name(&self) -> &str {
34        "ConsoleWriterPipeline"
35    }
36
37    async fn process_item(&self, item: I) -> Result<Option<I>, PipelineError> {
38        info!("Pipeline processing item: {:?}", item);
39        Ok(Some(item))
40    }
41}