torrust_tracker/console/clients/checker/checks/
http.rs

1use std::str::FromStr as _;
2use std::time::Duration;
3
4use serde::Serialize;
5use torrust_tracker_primitives::info_hash::InfoHash;
6use url::Url;
7
8use crate::console::clients::http::Error;
9use crate::shared::bit_torrent::tracker::http::client::responses::announce::Announce;
10use crate::shared::bit_torrent::tracker::http::client::responses::scrape;
11use crate::shared::bit_torrent::tracker::http::client::{requests, Client};
12
13#[derive(Debug, Clone, Serialize)]
14pub struct Checks {
15    url: Url,
16    results: Vec<(Check, Result<(), Error>)>,
17}
18
19#[derive(Debug, Clone, Serialize)]
20pub enum Check {
21    Announce,
22    Scrape,
23}
24
25pub async fn run(http_trackers: Vec<Url>, timeout: Duration) -> Vec<Result<Checks, Checks>> {
26    let mut results = Vec::default();
27
28    tracing::debug!("HTTP trackers ...");
29
30    for ref url in http_trackers {
31        let mut base_url = url.clone();
32        base_url.set_path("");
33
34        let mut checks = Checks {
35            url: url.clone(),
36            results: Vec::default(),
37        };
38
39        // Announce
40        {
41            let check = check_http_announce(&base_url, timeout).await.map(|_| ());
42
43            checks.results.push((Check::Announce, check));
44        }
45
46        // Scrape
47        {
48            let check = check_http_scrape(&base_url, timeout).await.map(|_| ());
49
50            checks.results.push((Check::Scrape, check));
51        }
52
53        if checks.results.iter().any(|f| f.1.is_err()) {
54            results.push(Err(checks));
55        } else {
56            results.push(Ok(checks));
57        }
58    }
59
60    results
61}
62
63async fn check_http_announce(url: &Url, timeout: Duration) -> Result<Announce, Error> {
64    let info_hash_str = "9c38422213e30bff212b30c360d26f9a02136422".to_string(); // # DevSkim: ignore DS173237
65    let info_hash = InfoHash::from_str(&info_hash_str).expect("a valid info-hash is required");
66
67    let client = Client::new(url.clone(), timeout).map_err(|err| Error::HttpClientError { err })?;
68
69    let response = client
70        .announce(
71            &requests::announce::QueryBuilder::with_default_values()
72                .with_info_hash(&info_hash)
73                .query(),
74        )
75        .await
76        .map_err(|err| Error::HttpClientError { err })?;
77
78    let response = response.bytes().await.map_err(|e| Error::ResponseError { err: e.into() })?;
79
80    let response = serde_bencode::from_bytes::<Announce>(&response).map_err(|e| Error::ParseBencodeError {
81        data: response,
82        err: e.into(),
83    })?;
84
85    Ok(response)
86}
87
88async fn check_http_scrape(url: &Url, timeout: Duration) -> Result<scrape::Response, Error> {
89    let info_hashes: Vec<String> = vec!["9c38422213e30bff212b30c360d26f9a02136422".to_string()]; // # DevSkim: ignore DS173237
90    let query = requests::scrape::Query::try_from(info_hashes).expect("a valid array of info-hashes is required");
91
92    let client = Client::new(url.clone(), timeout).map_err(|err| Error::HttpClientError { err })?;
93
94    let response = client.scrape(&query).await.map_err(|err| Error::HttpClientError { err })?;
95
96    let response = response.bytes().await.map_err(|e| Error::ResponseError { err: e.into() })?;
97
98    let response = scrape::Response::try_from_bencoded(&response).map_err(|e| Error::BencodeParseError {
99        data: response,
100        err: e.into(),
101    })?;
102
103    Ok(response)
104}