1#[cfg(feature = "http-client")]
13extern crate reqwest;
14extern crate serde_json;
15extern crate thiserror;
16#[cfg(feature = "http-client")]
17extern crate url;
18
19use std::cmp::PartialEq;
20use std::io;
21use std::result;
22
23pub mod http;
24pub mod iter;
25pub use iter::{Iter, IterElems};
26
27const LANGUAGE_URL_MARKER: &str = "{language}";
28
29macro_rules! results {
30 ($data: expr, $query_field: expr) => {
31 $data
33 .as_object()
34 .and_then(|x| x.get("query"))
35 .and_then(|x| x.as_object())
36 .and_then(|x| x.get($query_field))
37 .and_then(|x| x.as_array())
38 .ok_or(Error::JSONPathError)?
39 .into_iter()
40 .filter_map(|i| {
41 i.as_object()
42 .and_then(|i| i.get("title"))
43 .and_then(|s| s.as_str().map(|s| s.to_owned()))
44 })
45 .collect()
46 };
47}
48
49macro_rules! cont {
50 ($this: expr, $cont: expr, $($params: expr),*) => {{
51 let qp = $this.identifier.query_param();
52 let mut params = vec![
53 $($params),*,
54 ("format", "json"),
55 ("action", "query"),
56 (&*qp.0, &*qp.1),
57 ];
58 match *$cont {
59 Some(ref v) => {
60 for x in v.iter() { params.push((&*x.0, &*x.1)); }
61 },
62 None => params.push(("continue", "")),
63 }
64 let q = $this.wikipedia.query(params.into_iter())?;
65
66 let pages = q
67 .as_object()
68 .and_then(|x| x.get("query"))
69 .and_then(|x| x.as_object())
70 .and_then(|x| x.get("pages"))
71 .and_then(|x| x.as_object())
72 .ok_or(Error::JSONPathError)?;
73
74 Ok((pages.values().cloned().collect(), $this.parse_cont(&q)?))
75 }}
76}
77
78#[derive(thiserror::Error, Debug)]
80pub enum Error {
81 #[error("URL Error")]
83 URLError,
84 #[error("HTTP Error")]
86 HTTPError(#[from] Box<dyn std::error::Error>),
87 #[error("IO Error: {0}")]
89 IOError(#[from] io::Error),
90 #[error("JSON Error: {0}")]
92 JSONError(#[from] serde_json::error::Error),
93 #[error("JSON Path Error")]
95 JSONPathError,
96 #[error("Invalid Parameter: {0}")]
98 InvalidParameter(String),
99}
100
101pub type Result<T> = result::Result<T, Error>;
102
103#[derive(Debug)]
104pub struct Wikipedia<A: http::HttpClient> {
105 pub client: A,
107 pub pre_language_url: String,
109 pub post_language_url: String,
110 pub language: String,
111 pub search_results: u32,
113 pub images_results: String,
117 pub links_results: String,
119 pub categories_results: String,
121}
122
123impl<A: http::HttpClient + Default> Default for Wikipedia<A> {
124 fn default() -> Self {
125 Wikipedia::new(A::default())
126 }
127}
128
129impl<A: http::HttpClient + Clone> Clone for Wikipedia<A> {
130 fn clone(&self) -> Self {
131 Wikipedia {
132 client: self.client.clone(),
133 pre_language_url: self.pre_language_url.clone(),
134 post_language_url: self.post_language_url.clone(),
135 language: self.language.clone(),
136 search_results: self.search_results,
137 images_results: self.images_results.clone(),
138 links_results: self.links_results.clone(),
139 categories_results: self.categories_results.clone(),
140 }
141 }
142}
143
144impl<A: http::HttpClient> Wikipedia<A> {
145 pub fn new(client: A) -> Self {
147 Wikipedia {
148 client,
149 pre_language_url: "https://".to_owned(),
150 post_language_url: ".wikipedia.org/w/api.php".to_owned(),
151 language: "en".to_owned(),
152 search_results: 10,
153 images_results: "max".to_owned(),
154 links_results: "max".to_owned(),
155 categories_results: "max".to_owned(),
156 }
157 }
158
159 pub fn get_languages(&self) -> Result<IterElems> {
162 let q = self.query(
163 vec![
164 ("meta", "siteinfo"),
165 ("siprop", "languages"),
166 ("format", "json"),
167 ("action", "query"),
168 ]
169 .into_iter(),
170 )?;
171
172 Ok(q.as_object()
173 .and_then(|x| x.get("query"))
174 .and_then(|x| x.as_object())
175 .and_then(|x| x.get("languages"))
176 .and_then(|x| x.as_array())
177 .ok_or(Error::JSONPathError)?
178 .iter()
179 .filter_map(|x| {
180 let o = x.as_object();
181 Some((
182 match o
183 .and_then(|x| x.get("code"))
184 .and_then(|x| x.as_str())
185 .map(|x| x.to_owned())
186 {
187 Some(v) => v,
188 None => return None,
189 },
190 match o
191 .and_then(|x| x.get("*"))
192 .and_then(|x| x.as_str())
193 .map(|x| x.to_owned())
194 {
195 Some(v) => v,
196 None => return None,
197 },
198 ))
199 })
200 .collect())
201 }
202
203 pub fn base_url(&self) -> String {
205 format!(
206 "{}{}{}",
207 self.pre_language_url, self.language, self.post_language_url
208 )
209 }
210
211 pub fn set_base_url(&mut self, base_url: &str) {
214 let index = match base_url.find(LANGUAGE_URL_MARKER) {
215 Some(i) => i,
216 None => {
217 self.pre_language_url = base_url.to_owned();
218 self.language = "".to_owned();
219 self.post_language_url = "".to_owned();
220 return;
221 }
222 };
223 self.pre_language_url = base_url[0..index].to_owned();
224 self.post_language_url = base_url[index + LANGUAGE_URL_MARKER.len()..].to_owned();
225 }
226
227 fn query<'a, I>(&self, args: I) -> Result<serde_json::Value>
228 where
229 I: Iterator<Item = (&'a str, &'a str)>,
230 {
231 let response_str = self.client.get(&self.base_url(), args)?;
232 let json = serde_json::from_str(&response_str).map_err(Error::JSONError)?;
233 Ok(json)
234 }
235
236 pub fn search(&self, query: &str) -> Result<Vec<String>> {
248 let results = &*format!("{}", self.search_results);
249 let data = self.query(
250 vec![
251 ("list", "search"),
252 ("srprop", ""),
253 ("srlimit", results),
254 ("srsearch", query),
255 ("format", "json"),
256 ("action", "query"),
257 ]
258 .into_iter(),
259 )?;
260
261 Ok(results!(data, "search"))
262 }
263
264 pub fn geosearch(&self, latitude: f64, longitude: f64, radius: u16) -> Result<Vec<String>> {
276 #![allow(clippy::manual_range_contains)]
277
278 if latitude < -90.0 || latitude > 90.0 {
279 return Err(Error::InvalidParameter("latitude".to_string()));
280 }
281 if longitude < -180.0 || longitude > 180.0 {
282 return Err(Error::InvalidParameter("longitude".to_string()));
283 }
284 if radius < 10 || radius > 10000 {
285 return Err(Error::InvalidParameter("radius".to_string()));
286 }
287 let results = &*format!("{}", self.search_results);
288 let data = self.query(
289 vec![
290 ("list", "geosearch"),
291 ("gsradius", &*format!("{}", radius)),
292 ("gscoord", &*format!("{}|{}", latitude, longitude)),
293 ("gslimit", results),
294 ("format", "json"),
295 ("action", "query"),
296 ]
297 .into_iter(),
298 )?;
299 Ok(results!(data, "geosearch"))
300 }
301
302 pub fn random_count(&self, count: u8) -> Result<Vec<String>> {
304 let data = self.query(
305 vec![
306 ("list", "random"),
307 ("rnnamespace", "0"),
308 ("rnlimit", &*format!("{}", count)),
309 ("format", "json"),
310 ("action", "query"),
311 ]
312 .into_iter(),
313 )?;
314 let r: Vec<String> = results!(data, "random");
315 Ok(r)
316 }
317
318 pub fn random(&self) -> Result<Option<String>> {
320 Ok(self.random_count(1)?.into_iter().next())
321 }
322
323 pub fn page_from_title(&self, title: String) -> Page<'_, A> {
325 Page::from_title(self, title)
326 }
327
328 pub fn page_from_pageid(&self, pageid: String) -> Page<'_, A> {
330 Page::from_pageid(self, pageid)
331 }
332}
333
334#[derive(Debug)]
335enum TitlePageId {
336 Title(String),
337 PageId(String),
338}
339
340impl TitlePageId {
341 fn query_param(&self) -> (String, String) {
342 match *self {
343 TitlePageId::Title(ref s) => ("titles".to_owned(), s.clone()),
344 TitlePageId::PageId(ref s) => ("pageids".to_owned(), s.clone()),
345 }
346 }
347}
348
349#[derive(Debug)]
350pub struct Page<'a, A: 'a + http::HttpClient> {
351 wikipedia: &'a Wikipedia<A>,
352 identifier: TitlePageId,
353}
354
355impl<'a, A: http::HttpClient> Page<'a, A> {
357 pub fn from_title(wikipedia: &'a Wikipedia<A>, title: String) -> Page<'a, A> {
359 Page {
360 wikipedia,
361 identifier: TitlePageId::Title(title),
362 }
363 }
364
365 pub fn from_pageid(wikipedia: &'a Wikipedia<A>, pageid: String) -> Page<'a, A> {
367 Page {
368 wikipedia,
369 identifier: TitlePageId::PageId(pageid),
370 }
371 }
372
373 pub fn get_pageid(&self) -> Result<String> {
375 match self.identifier {
376 TitlePageId::PageId(ref s) => Ok(s.clone()),
377 TitlePageId::Title(_) => {
378 let qp = self.identifier.query_param();
379 let q = self.wikipedia.query(
380 vec![
381 ("prop", "info|pageprops"),
382 ("inprop", "url"),
383 ("ppprop", "disambiguation"),
384 ("redirects", ""),
385 ("format", "json"),
386 ("action", "query"),
387 (&*qp.0, &*qp.1),
388 ]
389 .into_iter(),
390 )?;
391
392 if let Some(r) = self.redirect(&q) {
393 return Page::from_title(self.wikipedia, r).get_pageid();
394 }
395 let pages = q
396 .as_object()
397 .and_then(|x| x.get("query"))
398 .and_then(|x| x.as_object())
399 .and_then(|x| x.get("pages"))
400 .and_then(|x| x.as_object())
401 .ok_or(Error::JSONPathError)?;
402 pages.keys().next().cloned().ok_or(Error::JSONPathError)
403 }
404 }
405 }
406
407 pub fn get_title(&self) -> Result<String> {
409 match self.identifier {
410 TitlePageId::Title(ref s) => Ok(s.clone()),
411 TitlePageId::PageId(_) => {
412 let qp = self.identifier.query_param();
413 let q = self.wikipedia.query(
414 vec![
415 ("prop", "info|pageprops"),
416 ("inprop", "url"),
417 ("ppprop", "disambiguation"),
418 ("redirects", ""),
419 ("format", "json"),
420 ("action", "query"),
421 (&*qp.0, &*qp.1),
422 ]
423 .into_iter(),
424 )?;
425
426 if let Some(r) = self.redirect(&q) {
427 return Ok(r);
428 }
429 let pages = q
430 .as_object()
431 .and_then(|x| x.get("query"))
432 .and_then(|x| x.as_object())
433 .and_then(|x| x.get("pages"))
434 .and_then(|x| x.as_object())
435 .ok_or(Error::JSONPathError)?;
436 let page = match pages.values().next() {
437 Some(p) => p,
438 None => return Err(Error::JSONPathError),
439 };
440 Ok(page
441 .as_object()
442 .and_then(|x| x.get("title"))
443 .and_then(|x| x.as_str())
444 .ok_or(Error::JSONPathError)?
445 .to_owned())
446 }
447 }
448 }
449
450 fn redirect(&self, q: &serde_json::Value) -> Option<String> {
453 q.as_object()
454 .and_then(|x| x.get("query"))
455 .and_then(|x| x.as_object())
456 .and_then(|x| x.get("redirects"))
457 .and_then(|x| x.as_array())
458 .and_then(|x| x.iter().next())
459 .and_then(|x| x.as_object())
460 .and_then(|x| x.get("to"))
461 .and_then(|x| x.as_str())
462 .map(|x| x.to_owned())
463 }
464
465 fn get_first_page<'parsed>(
467 &self,
468 data: &'parsed serde_json::Value,
469 ) -> Option<&'parsed serde_json::Value> {
470 let pages = data
471 .as_object()
472 .and_then(|x| x.get("query"))
473 .and_then(|x| x.as_object())
474 .and_then(|x| x.get("pages"))
475 .and_then(|x| x.as_object());
476 let pageid = pages?.keys().next()?;
477 pages.unwrap().get(pageid)
478 }
479
480 pub fn get_content(&self) -> Result<String> {
482 let qp = self.identifier.query_param();
483 let q = self.wikipedia.query(
484 vec![
485 ("prop", "extracts|revisions"),
486 ("explaintext", ""),
487 ("rvprop", "ids"),
488 ("redirects", ""),
489 ("format", "json"),
490 ("action", "query"),
491 (&*qp.0, &*qp.1),
492 ]
493 .into_iter(),
494 )?;
495
496 if let Some(r) = self.redirect(&q) {
497 return Page::from_title(self.wikipedia, r).get_content();
498 }
499
500 Ok(self
501 .get_first_page(&q)
502 .and_then(|x| x.as_object())
503 .and_then(|x| x.get("extract"))
504 .and_then(|x| x.as_str())
505 .ok_or(Error::JSONPathError)?
506 .to_owned())
507 }
508
509 pub fn get_html_content(&self) -> Result<String> {
511 let qp = self.identifier.query_param();
512 let q = self.wikipedia.query(
513 vec![
514 ("prop", "revisions"),
515 ("rvprop", "content"),
516 ("rvlimit", "1"),
517 ("rvparse", ""),
518 ("redirects", ""),
519 ("format", "json"),
520 ("action", "query"),
521 (&*qp.0, &*qp.1),
522 ]
523 .into_iter(),
524 )?;
525
526 if let Some(r) = self.redirect(&q) {
527 return Page::from_title(self.wikipedia, r).get_html_content();
528 }
529
530 Ok(self
531 .get_first_page(&q)
532 .and_then(|x| x.as_object())
533 .and_then(|x| x.get("revisions"))
534 .and_then(|x| x.as_array())
535 .and_then(|x| x.iter().next())
536 .and_then(|x| x.as_object())
537 .and_then(|x| x.get("*"))
538 .and_then(|x| x.as_str())
539 .ok_or(Error::JSONPathError)?
540 .to_owned())
541 }
542
543 pub fn get_summary(&self) -> Result<String> {
545 let qp = self.identifier.query_param();
546 let q = self.wikipedia.query(
547 vec![
548 ("prop", "extracts"),
549 ("explaintext", ""),
550 ("exintro", ""),
551 ("redirects", ""),
552 ("format", "json"),
553 ("action", "query"),
554 (&*qp.0, &*qp.1),
555 ]
556 .into_iter(),
557 )?;
558
559 if let Some(r) = self.redirect(&q) {
560 return Page::from_title(self.wikipedia, r).get_summary();
561 }
562
563 Ok(self
564 .get_first_page(&q)
565 .and_then(|x| x.as_object())
566 .and_then(|x| x.get("extract"))
567 .and_then(|x| x.as_str())
568 .ok_or(Error::JSONPathError)?
569 .to_owned())
570 }
571
572 fn parse_cont(&self, q: &serde_json::Value) -> Result<Option<IterElems>> {
575 let cont = match q
576 .as_object()
577 .and_then(|x| x.get("continue"))
578 .and_then(|x| x.as_object())
579 {
580 Some(v) => v,
581 None => return Ok(None),
582 };
583 let mut cont_v = vec![];
584 for (k, v) in cont.into_iter() {
585 let value = match *v {
586 serde_json::Value::Null => "".to_owned(),
587 serde_json::Value::Bool(b) => if b { "1" } else { "0" }.to_owned(),
588 serde_json::Value::Number(ref f) => format!("{}", f),
589 serde_json::Value::String(ref s) => s.clone(),
590 _ => return Err(Error::JSONPathError),
591 };
592 cont_v.push((k.clone(), value));
593 }
594 Ok(Some(cont_v))
595 }
596
597 fn request_images(
598 &self,
599 cont: &Option<IterElems>,
600 ) -> Result<(Vec<serde_json::Value>, Option<IterElems>)> {
601 cont!(
602 self,
603 cont,
604 ("generator", "images"),
605 ("gimlimit", &*self.wikipedia.images_results),
606 ("prop", "imageinfo"),
607 ("iiprop", "url")
608 )
609 }
610
611 pub fn get_images(&self) -> Result<Iter<A, iter::Image>> {
613 Iter::new(self)
614 }
615
616 fn request_extlinks(
617 &self,
618 cont: &Option<IterElems>,
619 ) -> Result<(Vec<serde_json::Value>, Option<IterElems>)> {
620 let a: Result<(Vec<serde_json::Value>, _)> = cont!(
621 self,
622 cont,
623 ("prop", "extlinks"),
624 ("ellimit", &*self.wikipedia.links_results)
625 );
626 a.map(|(pages, cont)| {
627 let page = match pages.into_iter().next() {
628 Some(p) => p,
629 None => return (Vec::new(), None),
630 };
631 (
632 page.as_object()
633 .and_then(|x| x.get("extlinks"))
634 .and_then(|x| x.as_array())
635 .map(|x| x.to_vec())
636 .unwrap_or(Vec::new()),
637 cont,
638 )
639 })
640 }
641
642 pub fn get_references(&self) -> Result<Iter<A, iter::Reference>> {
644 Iter::new(self)
645 }
646
647 fn request_links(
648 &self,
649 cont: &Option<IterElems>,
650 ) -> Result<(Vec<serde_json::Value>, Option<IterElems>)> {
651 let a: Result<(Vec<serde_json::Value>, _)> = cont!(
652 self,
653 cont,
654 ("prop", "links"),
655 ("plnamespace", "0"),
656 ("ellimit", &*self.wikipedia.links_results)
657 );
658 a.map(|(pages, cont)| {
659 let page = match pages.into_iter().next() {
660 Some(p) => p,
661 None => return (Vec::new(), None),
662 };
663 (
664 page.as_object()
665 .and_then(|x| x.get("links"))
666 .and_then(|x| x.as_array())
667 .map(|x| x.to_vec())
668 .unwrap_or(Vec::new()),
669 cont,
670 )
671 })
672 }
673
674 pub fn get_links(&self) -> Result<Iter<A, iter::Link>> {
676 Iter::new(self)
677 }
678
679 fn request_categories(
680 &self,
681 cont: &Option<IterElems>,
682 ) -> Result<(Vec<serde_json::Value>, Option<IterElems>)> {
683 let a: Result<(Vec<serde_json::Value>, _)> = cont!(
684 self,
685 cont,
686 ("prop", "categories"),
687 ("cllimit", &*self.wikipedia.categories_results)
688 );
689 a.map(|(pages, cont)| {
690 let page = match pages.into_iter().next() {
691 Some(p) => p,
692 None => return (Vec::new(), None),
693 };
694 (
695 page.as_object()
696 .and_then(|x| x.get("categories"))
697 .and_then(|x| x.as_array())
698 .map(|x| x.to_vec())
699 .unwrap_or(Vec::new()),
700 cont,
701 )
702 })
703 }
704
705 pub fn get_categories(&self) -> Result<Iter<A, iter::Category>> {
707 Iter::new(self)
708 }
709
710 fn request_langlinks(
711 &self,
712 cont: &Option<IterElems>,
713 ) -> Result<(Vec<serde_json::Value>, Option<IterElems>)> {
714 let a: Result<(Vec<serde_json::Value>, _)> = cont!(
715 self,
716 cont,
717 ("prop", "langlinks"),
718 ("lllimit", &*self.wikipedia.links_results)
719 );
720 a.map(|(pages, cont)| {
721 let page = match pages.into_iter().next() {
722 Some(p) => p,
723 None => return (Vec::new(), None),
724 };
725 (
726 page.as_object()
727 .and_then(|x| x.get("langlinks"))
728 .and_then(|x| x.as_array())
729 .map(|x| x.to_vec())
730 .unwrap_or(Vec::new()),
731 cont,
732 )
733 })
734 }
735
736 pub fn get_langlinks(&self) -> Result<Iter<A, iter::LangLink>> {
739 Iter::new(self)
740 }
741
742 pub fn get_coordinates(&self) -> Result<Option<(f64, f64)>> {
744 let qp = self.identifier.query_param();
745 let params = vec![
746 ("prop", "coordinates"),
747 ("colimit", "max"),
748 ("redirects", ""),
749 ("format", "json"),
750 ("action", "query"),
751 (&*qp.0, &*qp.1),
752 ];
753 let q = self.wikipedia.query(params.into_iter())?;
754
755 if let Some(r) = self.redirect(&q) {
756 return Page::from_title(self.wikipedia, r).get_coordinates();
757 }
758
759 let coord = match self
760 .get_first_page(&q)
761 .and_then(|x| x.as_object())
762 .and_then(|x| x.get("coordinates"))
763 .and_then(|x| x.as_array())
764 .and_then(|x| x.iter().next())
765 .and_then(|x| x.as_object())
766 {
767 Some(c) => c,
768 None => return Ok(None),
769 };
770 Ok(Some((
771 coord
772 .get("lat")
773 .and_then(|x| x.as_f64())
774 .ok_or(Error::JSONPathError)?,
775 coord
776 .get("lon")
777 .and_then(|x| x.as_f64())
778 .ok_or(Error::JSONPathError)?,
779 )))
780 }
781
782 pub fn get_sections(&self) -> Result<Vec<String>> {
784 let pageid = self.get_pageid()?;
785 let params = vec![
786 ("prop", "sections"),
787 ("format", "json"),
788 ("action", "parse"),
789 ("pageid", &*pageid),
790 ];
791 let q = self.wikipedia.query(params.into_iter())?;
792
793 Ok(q.as_object()
794 .and_then(|x| x.get("parse"))
795 .and_then(|x| x.as_object())
796 .and_then(|x| x.get("sections"))
797 .and_then(|x| x.as_array())
798 .ok_or(Error::JSONPathError)?
799 .iter()
800 .filter_map(|x| {
801 x.as_object()
802 .and_then(|x| x.get("line"))
803 .and_then(|x| x.as_str())
804 .map(|x| x.to_owned())
805 })
806 .collect())
807 }
808
809 pub fn get_section_content(&self, title: &str) -> Result<Option<String>> {
811 let headr = format!("== {} ==", title);
812 let content = self.get_content()?;
813 let index = match content.find(&*headr) {
814 Some(i) => headr.len() + i,
815 None => return Ok(None),
816 };
817 let end = match content[index..].find("==") {
818 Some(i) => index + i,
819 None => content.len(),
820 };
821 Ok(Some(content[index..end].to_owned()))
822 }
823}
824
825impl<'a, A: http::HttpClient> PartialEq<Page<'a, A>> for Page<'a, A> {
826 fn eq(&self, other: &Page<A>) -> bool {
827 match self.identifier {
828 TitlePageId::Title(ref t1) => match other.identifier {
829 TitlePageId::Title(ref t2) => t1 == t2,
830 TitlePageId::PageId(_) => false,
831 },
832 TitlePageId::PageId(ref p1) => match other.identifier {
833 TitlePageId::Title(_) => false,
834 TitlePageId::PageId(ref p2) => p1 == p2,
835 },
836 }
837 }
838}
839
840#[cfg(test)]
841mod test {
842 use super::http::HttpClient;
843 use super::iter;
844 use super::Wikipedia;
845 use std::sync::Mutex;
846
847 const DEFAULT_AGENT: &str = "wikipedia (https://github.com/seppo0010/wikipedia-rs)";
848
849 struct MockClient {
850 pub url: Mutex<Vec<String>>,
851 pub user_agent: Option<String>,
852 pub bearer_token: Option<String>,
853 pub arguments: Mutex<Vec<iter::IterElems>>,
854 pub response: Mutex<Vec<String>>,
855 }
856
857 impl Default for MockClient {
858 fn default() -> Self {
859 MockClient {
860 url: Mutex::new(Vec::new()),
861 user_agent: Some(DEFAULT_AGENT.into()),
862 bearer_token: None,
863 arguments: Mutex::new(Vec::new()),
864 response: Mutex::new(Vec::new()),
865 }
866 }
867 }
868
869 impl super::http::HttpClient for MockClient {
870 fn user_agent(&mut self, user_agent: String) {
871 self.user_agent = Some(user_agent)
872 }
873
874 fn bearer_token(&mut self, bearer_token: String) {
875 self.bearer_token = Some(bearer_token)
876 }
877
878 fn get<'a, I>(&self, base_url: &str, args: I) -> Result<String, super::http::Error>
879 where
880 I: Iterator<Item = (&'a str, &'a str)>,
881 {
882 self.url.lock().unwrap().push(base_url.to_owned());
883 self.arguments
884 .lock()
885 .unwrap()
886 .push(args.map(|x| (x.0.to_owned(), x.1.to_owned())).collect());
887 Ok(self.response.lock().unwrap().remove(0))
888 }
889 }
890
891 #[test]
892 fn base_url() {
893 let mut wikipedia = Wikipedia::<MockClient>::default();
894 assert_eq!(wikipedia.base_url(), "https://en.wikipedia.org/w/api.php");
895 wikipedia.language = "es".to_owned();
896 assert_eq!(wikipedia.base_url(), "https://es.wikipedia.org/w/api.php");
897
898 wikipedia.set_base_url("https://hello.{language}.world/");
899 assert_eq!(wikipedia.base_url(), "https://hello.es.world/");
900
901 wikipedia.set_base_url("https://hello.world/");
902 assert_eq!(wikipedia.base_url(), "https://hello.world/");
903 }
904
905 #[test]
906 fn user_agent() {
907 let mut wikipedia = Wikipedia::<MockClient>::default();
908 wikipedia
909 .client
910 .response
911 .lock()
912 .unwrap()
913 .push("{}".to_owned());
914 wikipedia.search("hello world").unwrap_err();
915 assert_eq!(&*wikipedia.client.user_agent.unwrap(), DEFAULT_AGENT);
916
917 let mut client = MockClient::default();
918 client.user_agent("hello world".to_owned());
919 client.response.lock().unwrap().push("{}".to_owned());
920 wikipedia.client = client;
921 wikipedia.search("hello world").unwrap_err();
922 assert_eq!(&*wikipedia.client.user_agent.unwrap(), "hello world");
923 }
924
925 #[test]
926 fn search() {
927 let wikipedia = Wikipedia::<MockClient>::default();
928 wikipedia.client.response.lock().unwrap().push(
929 "{\"query\":{\"search\":[{\"title\":\"hello\"}, {\"title\":\"world\"}]}}".to_owned(),
930 );
931 assert_eq!(
932 wikipedia.search("hello world").unwrap(),
933 vec!["hello".to_owned(), "world".to_owned(),]
934 );
935 assert_eq!(
936 *wikipedia.client.url.lock().unwrap(),
937 vec!["https://en.wikipedia.org/w/api.php".to_owned()]
938 );
939 assert_eq!(
940 *wikipedia.client.arguments.lock().unwrap(),
941 vec![vec![
942 ("list".to_owned(), "search".to_owned()),
943 ("srprop".to_owned(), "".to_owned()),
944 ("srlimit".to_owned(), "10".to_owned()),
945 ("srsearch".to_owned(), "hello world".to_owned()),
946 ("format".to_owned(), "json".to_owned()),
947 ("action".to_owned(), "query".to_owned())
948 ]]
949 );
950 }
951
952 #[test]
953 fn geosearch() {
954 let wikipedia = Wikipedia::<MockClient>::default();
955 wikipedia.client.response.lock().unwrap().push(
956 "{\"query\":{\"geosearch\":[{\"title\":\"hello\"}, {\"title\":\"world\"}]}}".to_owned(),
957 );
958 assert_eq!(
959 wikipedia.geosearch(-34.603333, -58.381667, 10).unwrap(),
960 vec!["hello".to_owned(), "world".to_owned(),]
961 );
962 assert_eq!(
963 *wikipedia.client.url.lock().unwrap(),
964 vec!["https://en.wikipedia.org/w/api.php".to_owned()]
965 );
966 assert_eq!(
967 *wikipedia.client.arguments.lock().unwrap(),
968 vec![vec![
969 ("list".to_owned(), "geosearch".to_owned()),
970 ("gsradius".to_owned(), "10".to_owned()),
971 ("gscoord".to_owned(), "-34.603333|-58.381667".to_owned()),
972 ("gslimit".to_owned(), "10".to_owned()),
973 ("format".to_owned(), "json".to_owned()),
974 ("action".to_owned(), "query".to_owned())
975 ]]
976 );
977 }
978
979 #[test]
980 fn random_count() {
981 let wikipedia = Wikipedia::<MockClient>::default();
982 wikipedia.client.response.lock().unwrap().push(
983 "{\"query\":{\"random\":[{\"title\":\"hello\"}, {\"title\":\"world\"}]}}".to_owned(),
984 );
985 assert_eq!(
986 wikipedia.random_count(10).unwrap(),
987 vec!["hello".to_owned(), "world".to_owned(),]
988 );
989 assert_eq!(
990 *wikipedia.client.url.lock().unwrap(),
991 vec!["https://en.wikipedia.org/w/api.php".to_owned()]
992 );
993 assert_eq!(
994 *wikipedia.client.arguments.lock().unwrap(),
995 vec![vec![
996 ("list".to_owned(), "random".to_owned()),
997 ("rnnamespace".to_owned(), "0".to_owned()),
998 ("rnlimit".to_owned(), "10".to_owned()),
999 ("format".to_owned(), "json".to_owned()),
1000 ("action".to_owned(), "query".to_owned())
1001 ]]
1002 );
1003 }
1004
1005 #[test]
1006 fn random() {
1007 let wikipedia = Wikipedia::<MockClient>::default();
1008 wikipedia.client.response.lock().unwrap().push(
1009 "{\"query\":{\"random\":[{\"title\":\"hello\"}, {\"title\":\"world\"}]}}".to_owned(),
1010 );
1011 assert_eq!(wikipedia.random().unwrap(), Some("hello".to_owned()));
1012 assert_eq!(
1013 *wikipedia.client.url.lock().unwrap(),
1014 vec!["https://en.wikipedia.org/w/api.php".to_owned()]
1015 );
1016 assert_eq!(
1017 *wikipedia.client.arguments.lock().unwrap(),
1018 vec![vec![
1019 ("list".to_owned(), "random".to_owned()),
1020 ("rnnamespace".to_owned(), "0".to_owned()),
1021 ("rnlimit".to_owned(), "1".to_owned()),
1022 ("format".to_owned(), "json".to_owned()),
1023 ("action".to_owned(), "query".to_owned())
1024 ]]
1025 );
1026 }
1027
1028 #[test]
1029 fn page_content() {
1030 let wikipedia = Wikipedia::<MockClient>::default();
1031 wikipedia
1032 .client
1033 .response
1034 .lock()
1035 .unwrap()
1036 .push("{\"query\":{\"pages\":{\"a\":{\"extract\":\"hello\"}}}}".to_owned());
1037 let page = wikipedia.page_from_pageid("4138548".to_owned());
1038 let html = page.get_content().unwrap();
1039 assert_eq!(html, "hello".to_owned());
1040 assert_eq!(
1041 *wikipedia.client.url.lock().unwrap(),
1042 vec!["https://en.wikipedia.org/w/api.php".to_owned()]
1043 );
1044 assert_eq!(
1045 *wikipedia.client.arguments.lock().unwrap(),
1046 vec![vec![
1047 ("prop".to_owned(), "extracts|revisions".to_owned()),
1048 ("explaintext".to_owned(), "".to_owned()),
1049 ("rvprop".to_owned(), "ids".to_owned()),
1050 ("redirects".to_owned(), "".to_owned()),
1051 ("format".to_owned(), "json".to_owned()),
1052 ("action".to_owned(), "query".to_owned()),
1053 ("pageids".to_owned(), "4138548".to_owned()),
1054 ]]
1055 );
1056 }
1057
1058 #[test]
1059 fn page_html_content() {
1060 let wikipedia = Wikipedia::<MockClient>::default();
1061 wikipedia
1062 .client
1063 .response
1064 .lock()
1065 .unwrap()
1066 .push("{\"query\":{\"pages\":{\"a\":{\"revisions\":[{\"*\":\"hello\"}]}}}}".to_owned());
1067 let page = wikipedia.page_from_pageid("4138548".to_owned());
1068 let html = page.get_html_content().unwrap();
1069 assert_eq!(html, "hello".to_owned());
1070 assert_eq!(
1071 *wikipedia.client.url.lock().unwrap(),
1072 vec!["https://en.wikipedia.org/w/api.php".to_owned()]
1073 );
1074 assert_eq!(
1075 *wikipedia.client.arguments.lock().unwrap(),
1076 vec![vec![
1077 ("prop".to_owned(), "revisions".to_owned()),
1078 ("rvprop".to_owned(), "content".to_owned()),
1079 ("rvlimit".to_owned(), "1".to_owned()),
1080 ("rvparse".to_owned(), "".to_owned()),
1081 ("redirects".to_owned(), "".to_owned()),
1082 ("format".to_owned(), "json".to_owned()),
1083 ("action".to_owned(), "query".to_owned()),
1084 ("pageids".to_owned(), "4138548".to_owned()),
1085 ]]
1086 );
1087 }
1088
1089 #[test]
1090 fn page_summary() {
1091 let wikipedia = Wikipedia::<MockClient>::default();
1092 wikipedia
1093 .client
1094 .response
1095 .lock()
1096 .unwrap()
1097 .push("{\"query\":{\"pages\":{\"a\":{\"extract\":\"hello\"}}}}".to_owned());
1098 let page = wikipedia.page_from_title("Parkinson's law of triviality".to_owned());
1099 let summary = page.get_summary().unwrap();
1100 assert_eq!(summary, "hello".to_owned());
1101 assert_eq!(
1102 *wikipedia.client.url.lock().unwrap(),
1103 vec!["https://en.wikipedia.org/w/api.php".to_owned()]
1104 );
1105 assert_eq!(
1106 *wikipedia.client.arguments.lock().unwrap(),
1107 vec![vec![
1108 ("prop".to_owned(), "extracts".to_owned()),
1109 ("explaintext".to_owned(), "".to_owned()),
1110 ("exintro".to_owned(), "".to_owned()),
1111 ("redirects".to_owned(), "".to_owned()),
1112 ("format".to_owned(), "json".to_owned()),
1113 ("action".to_owned(), "query".to_owned()),
1114 (
1115 "titles".to_owned(),
1116 "Parkinson\'s law of triviality".to_owned()
1117 )
1118 ]]
1119 );
1120 }
1121
1122 #[test]
1123 fn page_redirect_summary() {
1124 let wikipedia = Wikipedia::<MockClient>::default();
1125 wikipedia
1126 .client
1127 .response
1128 .lock()
1129 .unwrap()
1130 .push("{\"query\":{\"redirects\":[{\"to\":\"hello world\"}]}}".to_owned());
1131 wikipedia
1132 .client
1133 .response
1134 .lock()
1135 .unwrap()
1136 .push("{\"query\":{\"pages\":{\"a\":{\"extract\":\"hello\"}}}}".to_owned());
1137 let page = wikipedia.page_from_title("Parkinson's law of triviality".to_owned());
1138 let summary = page.get_summary().unwrap();
1139 assert_eq!(summary, "hello".to_owned());
1140 assert_eq!(
1141 *wikipedia.client.url.lock().unwrap(),
1142 vec![
1143 "https://en.wikipedia.org/w/api.php".to_owned(),
1144 "https://en.wikipedia.org/w/api.php".to_owned(),
1145 ]
1146 );
1147 assert_eq!(
1148 *wikipedia.client.arguments.lock().unwrap(),
1149 vec![
1150 vec![
1151 ("prop".to_owned(), "extracts".to_owned()),
1152 ("explaintext".to_owned(), "".to_owned()),
1153 ("exintro".to_owned(), "".to_owned()),
1154 ("redirects".to_owned(), "".to_owned()),
1155 ("format".to_owned(), "json".to_owned()),
1156 ("action".to_owned(), "query".to_owned()),
1157 (
1158 "titles".to_owned(),
1159 "Parkinson\'s law of triviality".to_owned()
1160 )
1161 ],
1162 vec![
1163 ("prop".to_owned(), "extracts".to_owned()),
1164 ("explaintext".to_owned(), "".to_owned()),
1165 ("exintro".to_owned(), "".to_owned()),
1166 ("redirects".to_owned(), "".to_owned()),
1167 ("format".to_owned(), "json".to_owned()),
1168 ("action".to_owned(), "query".to_owned()),
1169 ("titles".to_owned(), "hello world".to_owned())
1170 ]
1171 ]
1172 );
1173 }
1174
1175 #[test]
1176 fn page_images() {
1177 let wikipedia = Wikipedia::<MockClient>::default();
1178 wikipedia.client.response.lock().unwrap().push("{\"continue\": {\"lol\":\"1\"},\"query\":{\"pages\":{\"a\":{\"title\":\"Image 1\", \"imageinfo\":[{\"url\": \"http://example.com/image1.jpg\", \"descriptionurl\": \"http://example.com/image1.jpg.html\"}]}}}}".to_owned());
1179 wikipedia.client.response.lock().unwrap().push("{\"query\":{\"pages\":{\"a\":{\"title\":\"Image 2\", \"imageinfo\":[{\"url\": \"http://example.com/image2.jpg\", \"descriptionurl\": \"http://example.com/image2.jpg.html\"}]}}}}".to_owned());
1180 let page = wikipedia.page_from_title("Parkinson's law of triviality".to_owned());
1181 let images = page.get_images().unwrap().collect::<Vec<_>>();
1182 assert_eq!(
1183 images,
1184 vec![
1185 iter::Image {
1186 url: "http://example.com/image1.jpg".to_owned(),
1187 title: "Image 1".to_owned(),
1188 description_url: "http://example.com/image1.jpg.html".to_owned(),
1189 },
1190 iter::Image {
1191 url: "http://example.com/image2.jpg".to_owned(),
1192 title: "Image 2".to_owned(),
1193 description_url: "http://example.com/image2.jpg.html".to_owned(),
1194 }
1195 ]
1196 );
1197 assert_eq!(
1198 *wikipedia.client.url.lock().unwrap(),
1199 vec![
1200 "https://en.wikipedia.org/w/api.php".to_owned(),
1201 "https://en.wikipedia.org/w/api.php".to_owned(),
1202 ]
1203 );
1204 assert_eq!(
1205 *wikipedia.client.arguments.lock().unwrap(),
1206 vec![
1207 vec![
1208 ("generator".to_owned(), "images".to_owned()),
1209 ("gimlimit".to_owned(), "max".to_owned()),
1210 ("prop".to_owned(), "imageinfo".to_owned()),
1211 ("iiprop".to_owned(), "url".to_owned()),
1212 ("format".to_owned(), "json".to_owned()),
1213 ("action".to_owned(), "query".to_owned()),
1214 (
1215 "titles".to_owned(),
1216 "Parkinson\'s law of triviality".to_owned()
1217 ),
1218 ("continue".to_owned(), "".to_owned())
1219 ],
1220 vec![
1221 ("generator".to_owned(), "images".to_owned()),
1222 ("gimlimit".to_owned(), "max".to_owned()),
1223 ("prop".to_owned(), "imageinfo".to_owned()),
1224 ("iiprop".to_owned(), "url".to_owned()),
1225 ("format".to_owned(), "json".to_owned()),
1226 ("action".to_owned(), "query".to_owned()),
1227 (
1228 "titles".to_owned(),
1229 "Parkinson\'s law of triviality".to_owned()
1230 ),
1231 ("lol".to_owned(), "1".to_owned())
1232 ]
1233 ]
1234 );
1235 }
1236
1237 #[test]
1238 fn page_coordinates() {
1239 let wikipedia = Wikipedia::<MockClient>::default();
1240 wikipedia.client.response.lock().unwrap().push(
1241 "{\"query\":{\"pages\":{\"a\":{\"coordinates\":[{\"lat\": 2.1, \"lon\":-1.3}]}}}}"
1242 .to_owned(),
1243 );
1244 let page = wikipedia.page_from_title("World".to_owned());
1245 let coordinates = page.get_coordinates().unwrap().unwrap();
1246 assert_eq!(coordinates, (2.1, -1.3));
1247 assert_eq!(
1248 *wikipedia.client.url.lock().unwrap(),
1249 vec!["https://en.wikipedia.org/w/api.php".to_owned()]
1250 );
1251 assert_eq!(
1252 *wikipedia.client.arguments.lock().unwrap(),
1253 vec![vec![
1254 ("prop".to_owned(), "coordinates".to_owned()),
1255 ("colimit".to_owned(), "max".to_owned()),
1256 ("redirects".to_owned(), "".to_owned()),
1257 ("format".to_owned(), "json".to_owned()),
1258 ("action".to_owned(), "query".to_owned()),
1259 ("titles".to_owned(), "World".to_owned())
1260 ]]
1261 );
1262 }
1263
1264 #[test]
1265 fn page_no_coordinates() {
1266 let wikipedia = Wikipedia::<MockClient>::default();
1267 wikipedia
1268 .client
1269 .response
1270 .lock()
1271 .unwrap()
1272 .push("{\"query\":{\"pages\":{\"a\":{}}}}".to_owned());
1273 let page = wikipedia.page_from_title("World".to_owned());
1274 assert!(page.get_coordinates().unwrap().is_none());
1275 assert_eq!(
1276 *wikipedia.client.url.lock().unwrap(),
1277 vec!["https://en.wikipedia.org/w/api.php".to_owned()]
1278 );
1279 assert_eq!(
1280 *wikipedia.client.arguments.lock().unwrap(),
1281 vec![vec![
1282 ("prop".to_owned(), "coordinates".to_owned()),
1283 ("colimit".to_owned(), "max".to_owned()),
1284 ("redirects".to_owned(), "".to_owned()),
1285 ("format".to_owned(), "json".to_owned()),
1286 ("action".to_owned(), "query".to_owned()),
1287 ("titles".to_owned(), "World".to_owned())
1288 ]]
1289 );
1290 }
1291
1292 #[test]
1293 fn get_references() {
1294 let wikipedia = Wikipedia::<MockClient>::default();
1295 wikipedia.client.response.lock().unwrap().push("{\"continue\": {\"lol\":\"1\"},\"query\":{\"pages\":{\"a\":{\"extlinks\":[{\"*\": \"//example.com/reference1.html\"}]}}}}".to_owned());
1296 wikipedia.client.response.lock().unwrap().push("{\"query\":{\"pages\":{\"a\":{\"extlinks\":[{\"*\": \"//example.com/reference2.html\"}]}}}}".to_owned());
1297 let page = wikipedia.page_from_title("World".to_owned());
1298 assert_eq!(
1299 page.get_references().unwrap().collect::<Vec<_>>(),
1300 vec![
1301 iter::Reference {
1302 url: "http://example.com/reference1.html".to_owned(),
1303 },
1304 iter::Reference {
1305 url: "http://example.com/reference2.html".to_owned(),
1306 }
1307 ]
1308 );
1309 assert_eq!(
1310 *wikipedia.client.url.lock().unwrap(),
1311 vec![
1312 "https://en.wikipedia.org/w/api.php".to_owned(),
1313 "https://en.wikipedia.org/w/api.php".to_owned(),
1314 ]
1315 );
1316 assert_eq!(
1317 *wikipedia.client.arguments.lock().unwrap(),
1318 vec![
1319 vec![
1320 ("prop".to_owned(), "extlinks".to_owned()),
1321 ("ellimit".to_owned(), "max".to_owned()),
1322 ("format".to_owned(), "json".to_owned()),
1323 ("action".to_owned(), "query".to_owned()),
1324 ("titles".to_owned(), "World".to_owned()),
1325 ("continue".to_owned(), "".to_owned())
1326 ],
1327 vec![
1328 ("prop".to_owned(), "extlinks".to_owned()),
1329 ("ellimit".to_owned(), "max".to_owned()),
1330 ("format".to_owned(), "json".to_owned()),
1331 ("action".to_owned(), "query".to_owned()),
1332 ("titles".to_owned(), "World".to_owned()),
1333 ("lol".to_owned(), "1".to_owned())
1334 ]
1335 ]
1336 );
1337 }
1338
1339 #[test]
1340 fn get_links() {
1341 let wikipedia = Wikipedia::<MockClient>::default();
1342 wikipedia.client.response.lock().unwrap().push("{\"continue\": {\"lol\":\"1\"},\"query\":{\"pages\":{\"a\":{\"links\":[{\"title\": \"Hello\"}]}}}}".to_owned());
1343 wikipedia.client.response.lock().unwrap().push(
1344 "{\"query\":{\"pages\":{\"a\":{\"links\":[{\"title\": \"World\"}]}}}}".to_owned(),
1345 );
1346 let page = wikipedia.page_from_title("World".to_owned());
1347 assert_eq!(
1348 page.get_links().unwrap().collect::<Vec<_>>(),
1349 vec![
1350 iter::Link {
1351 title: "Hello".to_owned(),
1352 },
1353 iter::Link {
1354 title: "World".to_owned(),
1355 }
1356 ]
1357 );
1358 assert_eq!(
1359 *wikipedia.client.url.lock().unwrap(),
1360 vec![
1361 "https://en.wikipedia.org/w/api.php".to_owned(),
1362 "https://en.wikipedia.org/w/api.php".to_owned(),
1363 ]
1364 );
1365 assert_eq!(
1366 *wikipedia.client.arguments.lock().unwrap(),
1367 vec![
1368 vec![
1369 ("prop".to_owned(), "links".to_owned()),
1370 ("plnamespace".to_owned(), "0".to_owned()),
1371 ("ellimit".to_owned(), "max".to_owned()),
1372 ("format".to_owned(), "json".to_owned()),
1373 ("action".to_owned(), "query".to_owned()),
1374 ("titles".to_owned(), "World".to_owned()),
1375 ("continue".to_owned(), "".to_owned()),
1376 ],
1377 vec![
1378 ("prop".to_owned(), "links".to_owned()),
1379 ("plnamespace".to_owned(), "0".to_owned()),
1380 ("ellimit".to_owned(), "max".to_owned()),
1381 ("format".to_owned(), "json".to_owned()),
1382 ("action".to_owned(), "query".to_owned()),
1383 ("titles".to_owned(), "World".to_owned()),
1384 ("lol".to_owned(), "1".to_owned()),
1385 ]
1386 ]
1387 );
1388 }
1389
1390 #[test]
1391 fn get_categories() {
1392 let wikipedia = Wikipedia::<MockClient>::default();
1393 wikipedia.client.response.lock().unwrap().push("{\"continue\": {\"lol\":\"1\"},\"query\":{\"pages\":{\"a\":{\"categories\":[{\"title\": \"Hello\"}]}}}}".to_owned());
1394 wikipedia.client.response.lock().unwrap().push(
1395 "{\"query\":{\"pages\":{\"a\":{\"categories\":[{\"title\": \"Category: World\"}]}}}}"
1396 .to_owned(),
1397 );
1398 let page = wikipedia.page_from_title("World".to_owned());
1399 assert_eq!(
1400 page.get_categories().unwrap().collect::<Vec<_>>(),
1401 vec![
1402 iter::Category {
1403 title: "Hello".to_owned(),
1404 },
1405 iter::Category {
1406 title: "World".to_owned(),
1407 }
1408 ]
1409 );
1410 assert_eq!(
1411 *wikipedia.client.url.lock().unwrap(),
1412 vec![
1413 "https://en.wikipedia.org/w/api.php".to_owned(),
1414 "https://en.wikipedia.org/w/api.php".to_owned(),
1415 ]
1416 );
1417 assert_eq!(
1418 *wikipedia.client.arguments.lock().unwrap(),
1419 vec![
1420 vec![
1421 ("prop".to_owned(), "categories".to_owned()),
1422 ("cllimit".to_owned(), "max".to_owned()),
1423 ("format".to_owned(), "json".to_owned()),
1424 ("action".to_owned(), "query".to_owned()),
1425 ("titles".to_owned(), "World".to_owned()),
1426 ("continue".to_owned(), "".to_owned()),
1427 ],
1428 vec![
1429 ("prop".to_owned(), "categories".to_owned()),
1430 ("cllimit".to_owned(), "max".to_owned()),
1431 ("format".to_owned(), "json".to_owned()),
1432 ("action".to_owned(), "query".to_owned()),
1433 ("titles".to_owned(), "World".to_owned()),
1434 ("lol".to_owned(), "1".to_owned()),
1435 ]
1436 ]
1437 );
1438 }
1439
1440 #[test]
1441 fn sections() {
1442 let wikipedia = Wikipedia::<MockClient>::default();
1443 wikipedia.client.response.lock().unwrap().push(
1444 "{\"parse\":{\"sections\":[{\"line\":\"hello\"}, {\"line\":\"world\"}]}}".to_owned(),
1445 );
1446 let page = wikipedia.page_from_pageid("123".to_owned());
1447 assert_eq!(
1448 page.get_sections().unwrap(),
1449 vec!["hello".to_owned(), "world".to_owned()]
1450 );
1451 assert_eq!(
1452 *wikipedia.client.url.lock().unwrap(),
1453 vec!["https://en.wikipedia.org/w/api.php".to_owned()]
1454 );
1455 assert_eq!(
1456 *wikipedia.client.arguments.lock().unwrap(),
1457 vec![vec![
1458 ("prop".to_owned(), "sections".to_owned()),
1459 ("format".to_owned(), "json".to_owned()),
1460 ("action".to_owned(), "parse".to_owned()),
1461 ("pageid".to_owned(), "123".to_owned())
1462 ]]
1463 );
1464 }
1465
1466 #[test]
1467 fn languages() {
1468 let wikipedia = Wikipedia::<MockClient>::default();
1469 wikipedia.client.response.lock().unwrap().push("{\"query\":{\"languages\":[{\"*\":\"hello\", \"code\":\"world\"}, {\"*\":\"foo\", \"code\":\"bar\"}]}}".to_owned());
1470 assert_eq!(
1471 wikipedia.get_languages().unwrap(),
1472 vec![
1473 ("world".to_owned(), "hello".to_owned()),
1474 ("bar".to_owned(), "foo".to_owned()),
1475 ]
1476 );
1477 assert_eq!(
1478 *wikipedia.client.url.lock().unwrap(),
1479 vec!["https://en.wikipedia.org/w/api.php".to_owned()]
1480 );
1481 assert_eq!(
1482 *wikipedia.client.arguments.lock().unwrap(),
1483 vec![vec![
1484 ("meta".to_owned(), "siteinfo".to_owned()),
1485 ("siprop".to_owned(), "languages".to_owned()),
1486 ("format".to_owned(), "json".to_owned()),
1487 ("action".to_owned(), "query".to_owned())
1488 ]]
1489 );
1490 }
1491}