use crate::media_wiki_error::MediaWikiError;
use crate::title::Title;
use crate::user::User;
use serde_json::Value;
use std::collections::HashMap;
pub type NamespaceID = i64;
pub trait MediaWikiApi {
fn api_url(&self) -> &str;
fn get_site_info(&self) -> &Value;
fn user(&self) -> &User;
fn user_agent(&self) -> &str;
fn maxlag(&self) -> &Option<u64>;
fn edit_delay(&self) -> &Option<u64>;
fn max_retry_attempts(&self) -> u64;
fn user_agent_full(&self) -> String {
format!(
"{}; {}-rust/{}",
self.user_agent(),
env!("CARGO_PKG_NAME"),
env!("CARGO_PKG_VERSION")
)
}
fn get_site_info_value<'a>(&'a self, k1: &str, k2: &str) -> &'a Value {
&self.get_site_info()["query"][k1][k2]
}
fn get_site_info_string<'a>(&'a self, k1: &str, k2: &str) -> Result<&'a str, MediaWikiError> {
match self.get_site_info_value(k1, k2).as_str() {
Some(s) => Ok(s),
None => Err(MediaWikiError::String(format!(
"No 'query.{}.{}' value in site info",
k1, k2
))),
}
}
fn get_namespace_info(&self, namespace_id: NamespaceID) -> &Value {
self.get_site_info_value("namespaces", &namespace_id.to_string())
}
fn get_canonical_namespace_name(&self, namespace_id: NamespaceID) -> Option<&str> {
let info = self.get_namespace_info(namespace_id);
info["canonical"].as_str().or_else(|| info["*"].as_str())
}
fn get_local_namespace_name(&self, namespace_id: NamespaceID) -> Option<&str> {
let info = self.get_namespace_info(namespace_id);
info["*"].as_str().or_else(|| info["canonical"].as_str())
}
fn params_into(&self, params: &[(&str, &str)]) -> HashMap<String, String> {
params
.iter()
.map(|(k, v)| (k.to_string(), v.to_string()))
.collect()
}
fn no_params(&self) -> HashMap<String, String> {
HashMap::new()
}
fn extract_entity_from_uri(&self, uri: &str) -> Result<String, MediaWikiError> {
let concept_base_uri = self.get_site_info_string("general", "wikibase-conceptbaseuri")?;
match uri.strip_prefix(concept_base_uri) {
Some(s) => Ok(s.to_string()),
None => Err(From::from(format!(
"{} does not start with {}",
uri, concept_base_uri
))),
}
}
fn entities_from_sparql_result(
&self,
sparql_result: &Value,
variable_name: &str,
) -> Vec<String> {
let mut entities = vec![];
if let Some(bindings) = sparql_result["results"]["bindings"].as_array() {
for b in bindings {
if let Some(entity_url) = b[variable_name]["value"].as_str() {
if let Ok(entity) = self.extract_entity_from_uri(entity_url) {
entities.push(entity);
}
}
}
}
entities
}
}
pub fn json_merge(a: &mut Value, b: Value) {
match (a, b) {
(a @ &mut Value::Object(_), Value::Object(b)) => {
if let Some(a) = a.as_object_mut() {
for (k, v) in b {
json_merge(a.entry(k).or_insert(Value::Null), v);
}
}
}
(a @ &mut Value::Array(_), Value::Array(b)) => {
if let Some(a) = a.as_array_mut() {
for v in b {
a.push(v);
}
}
}
(a, b) => *a = b,
}
}
pub fn is_edit_query(params: &HashMap<String, String>, method: &str) -> bool {
method == "POST" && params.contains_key("token")
}
pub fn check_maxlag(v: &Value, maxlag_seconds: Option<u64>) -> Option<u64> {
match v["error"]["code"].as_str() {
Some("maxlag") => v["error"]["lag"].as_u64().or(maxlag_seconds),
_ => None,
}
}
pub fn set_cumulative_maxlag_params(
params: &mut HashMap<String, String>,
method: &str,
maxlag_seconds: Option<u64>,
cumulative: u64,
) {
if !is_edit_query(params, method) {
return;
}
if let Some(maxlag) = maxlag_seconds {
params.insert("maxlag".to_string(), (cumulative + maxlag).to_string());
}
}
pub fn rawurlencode(s: &str) -> String {
urlencoding::encode(s).into_owned()
}
pub fn result_array_to_titles(data: &Value) -> Vec<Title> {
if let Some(obj) = data.as_object() {
obj.iter()
.flat_map(|(_k, v)| result_array_to_titles(v))
.collect()
} else if let Some(arr) = data.as_array() {
arr.iter().map(Title::new_from_api_result).collect()
} else {
vec![]
}
}