torrust_tracker/servers/http/v1/extractors/
scrape_request.rs

1//! Axum [`extractor`](axum::extract) for the [`Scrape`]
2//! request.
3//!
4//! It parses the query parameters returning an [`Scrape`]
5//! request.
6//!
7//! Refer to [`Scrape`](crate::servers::http::v1::requests::scrape)  for more
8//! information about the returned structure.
9//!
10//! It returns a bencoded [`Error`](crate::servers::http::v1::responses::error)
11//! response (`500`) if the query parameters are missing or invalid.
12//!
13//! **Sample scrape request**
14//!
15//! <http://0.0.0.0:7070/scrape?info_hash=%81%00%00%00%00%00%00%00%00%00%00%00%00%00%00%00%00%00%00%00>
16//!
17//! **Sample error response**
18//!
19//! Missing query params for scrape request: <http://0.0.0.0:7070/scrape>
20//!
21//! ```text
22//! d14:failure reason143:Cannot parse query params for scrape request: missing query params for scrape request in src/servers/http/v1/extractors/scrape_request.rs:52:23e
23//! ```
24//!
25//! Invalid query params for scrape request: <http://0.0.0.0:7070/scrape?info_hash=invalid>
26//!
27//! ```text
28//! d14:failure reason235:Cannot parse query params for scrape request: invalid param value invalid for info_hash in not enough bytes for infohash: got 7 bytes, expected 20 src/shared/bit_torrent/info_hash.rs:240:27, src/servers/http/v1/requests/scrape.rs:66:46e
29//! ```
30use std::panic::Location;
31
32use axum::extract::FromRequestParts;
33use axum::http::request::Parts;
34use axum::response::{IntoResponse, Response};
35use futures::future::BoxFuture;
36use futures::FutureExt;
37
38use crate::servers::http::v1::query::Query;
39use crate::servers::http::v1::requests::scrape::{ParseScrapeQueryError, Scrape};
40use crate::servers::http::v1::responses;
41
42/// Extractor for the [`Scrape`]
43/// request.
44pub struct ExtractRequest(pub Scrape);
45
46impl<S> FromRequestParts<S> for ExtractRequest
47where
48    S: Send + Sync,
49{
50    type Rejection = Response;
51
52    #[must_use]
53    fn from_request_parts<'life0, 'life1, 'async_trait>(
54        parts: &'life0 mut Parts,
55        _state: &'life1 S,
56    ) -> BoxFuture<'async_trait, Result<Self, Self::Rejection>>
57    where
58        'life0: 'async_trait,
59        'life1: 'async_trait,
60        Self: 'async_trait,
61    {
62        async {
63            match extract_scrape_from(parts.uri.query()) {
64                Ok(scrape_request) => Ok(ExtractRequest(scrape_request)),
65                Err(error) => Err(error.into_response()),
66            }
67        }
68        .boxed()
69    }
70}
71
72fn extract_scrape_from(maybe_raw_query: Option<&str>) -> Result<Scrape, responses::error::Error> {
73    if maybe_raw_query.is_none() {
74        return Err(responses::error::Error::from(ParseScrapeQueryError::MissingParams {
75            location: Location::caller(),
76        }));
77    }
78
79    let query = maybe_raw_query.unwrap().parse::<Query>();
80
81    if let Err(error) = query {
82        return Err(responses::error::Error::from(error));
83    }
84
85    let scrape_request = Scrape::try_from(query.unwrap());
86
87    if let Err(error) = scrape_request {
88        return Err(responses::error::Error::from(error));
89    }
90
91    Ok(scrape_request.unwrap())
92}
93
94#[cfg(test)]
95mod tests {
96    use std::str::FromStr;
97
98    use torrust_tracker_primitives::info_hash::InfoHash;
99
100    use super::extract_scrape_from;
101    use crate::servers::http::v1::requests::scrape::Scrape;
102    use crate::servers::http::v1::responses::error::Error;
103
104    struct TestInfoHash {
105        pub bencoded: String,
106        pub value: InfoHash,
107    }
108
109    fn test_info_hash() -> TestInfoHash {
110        TestInfoHash {
111            bencoded: "%3B%24U%04%CF%5F%11%BB%DB%E1%20%1C%EAjk%F4Z%EE%1B%C0".to_owned(),
112            value: InfoHash::from_str("3b245504cf5f11bbdbe1201cea6a6bf45aee1bc0").unwrap(),
113        }
114    }
115
116    fn assert_error_response(error: &Error, error_message: &str) {
117        assert!(
118            error.failure_reason.contains(error_message),
119            "Error response does not contain message: '{error_message}'. Error: {error:?}"
120        );
121    }
122
123    #[test]
124    fn it_should_extract_the_scrape_request_from_the_url_query_params() {
125        let info_hash = test_info_hash();
126
127        let raw_query = format!("info_hash={}", info_hash.bencoded);
128
129        let scrape = extract_scrape_from(Some(&raw_query)).unwrap();
130
131        assert_eq!(
132            scrape,
133            Scrape {
134                info_hashes: vec![info_hash.value],
135            }
136        );
137    }
138
139    #[test]
140    fn it_should_extract_the_scrape_request_from_the_url_query_params_with_more_than_one_info_hash() {
141        let info_hash = test_info_hash();
142
143        let raw_query = format!("info_hash={}&info_hash={}", info_hash.bencoded, info_hash.bencoded);
144
145        let scrape = extract_scrape_from(Some(&raw_query)).unwrap();
146
147        assert_eq!(
148            scrape,
149            Scrape {
150                info_hashes: vec![info_hash.value, info_hash.value],
151            }
152        );
153    }
154
155    #[test]
156    fn it_should_reject_a_request_without_query_params() {
157        let response = extract_scrape_from(None).unwrap_err();
158
159        assert_error_response(
160            &response,
161            "Cannot parse query params for scrape request: missing query params for scrape request",
162        );
163    }
164
165    #[test]
166    fn it_should_reject_a_request_with_a_query_that_cannot_be_parsed() {
167        let invalid_query = "param1=value1=value2";
168        let response = extract_scrape_from(Some(invalid_query)).unwrap_err();
169
170        assert_error_response(&response, "Cannot parse query params");
171    }
172
173    #[test]
174    fn it_should_reject_a_request_with_a_query_that_cannot_be_parsed_into_a_scrape_request() {
175        let response = extract_scrape_from(Some("param1=value1")).unwrap_err();
176
177        assert_error_response(&response, "Cannot parse query params for scrape request");
178    }
179}