1use crate::errors::ScraperError;
2use chrono::{DateTime, Utc};
3use derive_new::new;
4use log::{error, warn};
5use regex::Regex;
6use reqwest::get;
7use scraper::{Html, Selector};
8use serde_json::Value;
9
10#[derive(Debug, new, PartialEq)]
12pub struct BookMetadata {
13 pub title: String,
15 pub subtitle: Option<String>,
17 pub description: Option<String>,
19 pub publisher: Option<String>,
21 pub publication_date: Option<DateTime<Utc>>,
23 pub isbn: Option<String>,
25 pub contributors: Vec<BookContributor>,
27 pub genres: Vec<String>,
29 pub series: Option<BookSeries>,
31 pub page_count: Option<i64>,
33 pub language: Option<String>,
35 pub image_url: Option<String>,
37}
38
39#[derive(Debug, new, PartialEq)]
41pub struct BookContributor {
42 pub name: String,
44 pub role: String,
46}
47
48#[derive(Debug, new, PartialEq)]
50pub struct BookSeries {
51 pub title: String,
53 pub number: f32,
55}
56
57pub async fn fetch_metadata(goodreads_id: &str) -> Result<BookMetadata, ScraperError> {
58 let metadata = extract_book_metadata(goodreads_id).await?;
59 let amazon_id = extract_amazon_id(&metadata, goodreads_id)?;
60
61 let (title, subtitle) = extract_title_and_subtitle(&metadata, &amazon_id)?;
62 let description = extract_description(&metadata, &amazon_id);
63 let image_url = extract_image_url(&metadata, &amazon_id);
64 let contributors = extract_contributors(&metadata, &amazon_id);
65 let genres = extract_genres(&metadata, &amazon_id);
66 let publisher = extract_publisher(&metadata, &amazon_id);
67 let publication_date = extract_publication_date(&metadata, &amazon_id);
68 let isbn = extract_isbn(&metadata, &amazon_id);
69 let page_count = extract_page_count(&metadata, &amazon_id);
70 let language = extract_language(&metadata, &amazon_id);
71 let series = extract_series(&metadata, &amazon_id);
72
73 let metadata = BookMetadata::new(
74 title,
75 subtitle,
76 description,
77 publisher,
78 publication_date,
79 isbn,
80 contributors,
81 genres,
82 series,
83 page_count,
84 language,
85 image_url,
86 );
87
88 Ok(metadata)
89}
90
91async fn extract_book_metadata(goodreads_id: &str) -> Result<Value, ScraperError> {
92 let url = format!("https://www.goodreads.com/book/show/{goodreads_id}");
93 let document = Html::parse_document(&get(&url).await?.text().await?);
94 let metadata_selector = Selector::parse(r#"script[id="__NEXT_DATA__"]"#)?;
95 let metadata = &document.select(&metadata_selector).next();
96
97 let metadata = match metadata {
98 None => {
99 error!("Failed to scrape book metadata");
100 return Err(ScraperError::ScrapeError(
101 "Failed to scrape book metadata".to_string(),
102 ));
103 }
104 Some(m) => serde_json::from_str(&m.text().collect::<String>())?,
105 };
106
107 Ok(metadata)
108}
109
110fn extract_amazon_id(metadata: &Value, goodreads_id: &str) -> Result<String, ScraperError> {
111 let amazon_id_key = format!("getBookByLegacyId({{\"legacyId\":\"{goodreads_id}\"}})");
112 let amazon_id =
113 &metadata["props"]["pageProps"]["apolloState"]["ROOT_QUERY"][amazon_id_key]["__ref"];
114 let Some(amazon_id) = to_string(amazon_id) else {
115 error!("Failed to scrape Amazon ID");
116 return Err(ScraperError::ScrapeError(
117 "Failed to scrape Amazon ID".to_string(),
118 ));
119 };
120
121 Ok(amazon_id)
122}
123
124fn extract_title_and_subtitle(
125 metadata: &Value,
126 amazon_id: &str,
127) -> Result<(String, Option<String>), ScraperError> {
128 let title = &metadata["props"]["pageProps"]["apolloState"][amazon_id]["title"];
129 let Some(title) = to_string(title) else {
130 error!("Failed to scrape book title");
131 return Err(ScraperError::ScrapeError(
132 "Failed to scrape book title".to_string(),
133 ));
134 };
135
136 match title.split_once(':') {
137 Some((title, subtitle)) => Ok((title.to_string(), Some(subtitle.trim().to_string()))),
138 None => Ok((title.clone(), None)),
139 }
140}
141
142fn extract_description(metadata: &Value, amazon_id: &str) -> Option<String> {
143 let description = &metadata["props"]["pageProps"]["apolloState"][amazon_id]["description"];
144 to_string(description)
145}
146
147fn extract_image_url(metadata: &Value, amazon_id: &str) -> Option<String> {
148 let url = &metadata["props"]["pageProps"]["apolloState"][amazon_id]["imageUrl"];
149 to_string(url)
150}
151
152fn extract_contributors(metadata: &Value, amazon_id: &str) -> Vec<BookContributor> {
153 let mut contributors = Vec::new();
154
155 let primary =
156 metadata["props"]["pageProps"]["apolloState"][amazon_id]["primaryContributorEdge"]
157 .as_object()
158 .map(|obj| (to_string(&obj["role"]), to_string(&obj["node"]["__ref"])));
159
160 match primary {
161 Some((Some(role), Some(reference))) => {
162 if let Some(contributor) = fetch_contributor(metadata, (role, reference)) {
163 contributors.push(contributor);
164 }
165 }
166 Some(_) => {
167 warn!("Failed to parse contributor");
168 }
169 None => (),
170 }
171
172 let Some(secondary) =
173 metadata["props"]["pageProps"]["apolloState"][amazon_id]["secondaryContributorEdges"]
174 .as_array()
175 else {
176 return contributors
177 .into_iter()
178 .filter(|s| !s.name.to_lowercase().eq("unknown author"))
179 .collect();
180 };
181
182 for contributor in secondary {
183 let role = to_string(&contributor["role"]);
184 let key = to_string(&contributor["node"]["__ref"]);
185 if role.is_none() || key.is_none() {
186 warn!("Failed to parse contributor");
187 continue;
188 }
189
190 if let Some(contributor) = fetch_contributor(metadata, (role.unwrap(), key.unwrap())) {
191 contributors.push(contributor);
192 }
193 }
194
195 contributors
196 .into_iter()
197 .filter(|s| !s.name.to_lowercase().eq("unknown author"))
198 .collect()
199}
200
201fn fetch_contributor(metadata: &Value, (role, key): (String, String)) -> Option<BookContributor> {
202 let contributor = &metadata["props"]["pageProps"]["apolloState"][key]["name"];
203 let name = to_string(contributor);
204 if name.is_none() {
205 warn!("Failed to parse contributor");
206 }
207
208 name.map(|n| BookContributor::new(n, role))
209}
210
211fn extract_genres(metadata: &Value, amazon_id: &str) -> Vec<String> {
212 let genres = metadata["props"]["pageProps"]["apolloState"][amazon_id]["bookGenres"].as_array();
213
214 let Some(genres) = genres else {
215 return vec![];
216 };
217
218 genres
219 .iter()
220 .filter_map(|genre| {
221 to_string(&genre["genre"]["name"]).or_else(|| {
222 warn!("Failed to parse genre name");
223 None
224 })
225 })
226 .collect()
227}
228
229fn extract_publisher(metadata: &Value, amazon_id: &str) -> Option<String> {
230 let publisher =
231 &metadata["props"]["pageProps"]["apolloState"][amazon_id]["details"]["publisher"];
232 to_string(publisher)
233}
234
235fn extract_publication_date(metadata: &Value, amazon_id: &str) -> Option<DateTime<Utc>> {
236 match &metadata["props"]["pageProps"]["apolloState"][amazon_id]["details"]["publicationTime"] {
237 Value::Null => None,
238 Value::Number(number) => {
239 let timestamp = number.as_i64().map(DateTime::from_timestamp_millis);
240
241 if timestamp.is_none() {
242 warn!("Failed to parse publication date");
243 }
244
245 timestamp.flatten()
246 }
247 _ => panic!("Publication date must be a timestamp"),
248 }
249}
250
251fn extract_isbn(metadata: &Value, amazon_id: &str) -> Option<String> {
252 let isbn = &metadata["props"]["pageProps"]["apolloState"][amazon_id]["details"]["isbn"];
253 if let Some(i) = to_string(isbn) {
254 return Some(i);
255 }
256
257 let isbn13 = &metadata["props"]["pageProps"]["apolloState"][amazon_id]["details"]["isbn13"];
258 if let Some(i) = to_string(isbn13) {
259 return Some(i);
260 }
261
262 let asin = &metadata["props"]["pageProps"]["apolloState"][amazon_id]["details"]["asin"];
263 to_string(asin)
264}
265
266fn extract_page_count(metadata: &Value, amazon_id: &str) -> Option<i64> {
267 let count =
268 metadata["props"]["pageProps"]["apolloState"][amazon_id]["details"]["numPages"].as_i64();
269 match count {
270 Some(0) => None,
271 c => c,
272 }
273}
274
275fn extract_language(metadata: &Value, amazon_id: &str) -> Option<String> {
276 let language =
277 &metadata["props"]["pageProps"]["apolloState"][amazon_id]["details"]["language"]["name"];
278 to_string(language)
279}
280
281fn extract_series(metadata: &Value, amazon_id: &str) -> Option<BookSeries> {
282 let series_array =
283 metadata["props"]["pageProps"]["apolloState"][amazon_id]["bookSeries"].as_array()?;
284
285 let series = series_array.first()?;
286
287 let Some(position) = series["userPosition"]
288 .as_str()
289 .map(|s| s.split('-').next().unwrap_or(""))
290 .and_then(|s| s.parse::<f32>().ok())
291 else {
292 warn!("Failed to parse series number");
293 return None;
294 };
295
296 let Some(key) = to_string(&series["series"]["__ref"]) else {
297 warn!("Failed to parse series key");
298 return None;
299 };
300
301 let title = &metadata["props"]["pageProps"]["apolloState"][key]["title"];
302 let Some(title) = to_string(title) else {
303 warn!("Failed to parse series title");
304 return None;
305 };
306
307 Some(BookSeries::new(title, position))
308}
309
310fn to_string(value: &Value) -> Option<String> {
311 let re = Regex::new(r"\s{2,}").expect("Regex must be valid");
312 value
313 .as_str()
314 .map(str::trim)
315 .map(|s| re.replace_all(s, " ").to_string())
316 .filter(|s| !s.is_empty())
317}
318
319#[cfg(test)]
320mod tests {
321 use super::*;
322
323 #[tokio::test]
324 async fn fetch_metadata_test() {
325 let expected_series = Some(BookSeries::new(
326 "Percy Jackson and the Olympians".to_string(),
327 5.0,
328 ));
329 let expected_contributors = vec![BookContributor::new(
330 "Rick Riordan".to_string(),
331 "Author".to_string(),
332 )];
333 let expected_genres = vec![
334 "Fantasy".to_string(),
335 "Young Adult".to_string(),
336 "Mythology".to_string(),
337 "Fiction".to_string(),
338 "Percy Jackson".to_string(),
339 "Middle Grade".to_string(),
340 "Adventure".to_string(),
341 "Greek Mythology".to_string(),
342 "Urban Fantasy".to_string(),
343 "Childrens".to_string(),
344 ];
345 let expected_metadata = BookMetadata::new(
346 "The Last Olympian".to_string(),
347 None,
348 Some("All year the half-bloods have been preparing for battle against the Titans, knowing the odds of victory are grim. \
349 Kronos's army is stronger than ever, and with every god and half-blood he recruits, the evil Titan's power only grows.\
350 <br /><br />While the Olympians struggle to contain the rampaging monster Typhon, Kronos begins his advance on New York City, \
351 where Mount Olympus stands virtually unguarded. Now it's up to Percy Jackson and an army of young demigods to stop the Lord of Time. \
352 <br /><br />In this momentous final book in the <i>New York Times</i> best-selling series, the long-awaited prophecy surrounding \
353 Percy's sixteenth birthday unfolds. And as the battle for Western civilization rages on the streets of Manhattan, Percy faces a \
354 terrifying suspicion that he may be fighting against his own fate.".to_string()),
355 Some("Disney-Hyperion Books".to_string()),
356 Some(DateTime::parse_from_rfc3339("2009-05-05T07:00:00Z").unwrap().to_utc()),
357 Some("1423101472".to_string()),
358 expected_contributors,
359 expected_genres,
360 expected_series,
361 Some(381),
362 Some("English".to_string()),
363 Some("https://m.media-amazon.com/images/S/compressed.photo.goodreads.com/books/1723393514i/4556058.jpg".to_string()),
364 );
365
366 let metadata = fetch_metadata("4556058").await.unwrap();
367 assert_eq!(metadata, expected_metadata);
368 }
369}