pub mod config;
pub mod metadata;
use self::config::AccountConfig;
use self::metadata::MinifluxMetadata;
use crate::feed_api::FeedHeaderMap;
use crate::models::{
self, ArticleID, Category, CategoryID, CategoryMapping, DirectLogin, Enclosure, FatArticle, FavIcon, Feed, FeedID, FeedMapping, FeedUpdateResult,
Headline, LoginData, NEWSFLASH_TOPLEVEL, PasswordLogin, PluginCapabilities, StreamConversionResult, SyncResult, TagID, TokenLogin, Url,
};
use crate::util::favicons::EXPIRES_AFTER_DAYS;
use crate::util::{self, html2text};
use crate::{
feed_api::{FeedApi, FeedApiError, FeedApiResult, Portal},
models::{Marked, Read},
};
use async_trait::async_trait;
use base64::Engine;
use base64::engine::general_purpose::STANDARD as base64_std;
use chrono::{DateTime, Duration, Utc};
use futures::future;
use miniflux_api::models::{Category as MinifluxCategory, Entry as MinifluxArticle, EntryStatus, Feed as MinifluxFeed, OrderBy, OrderDirection};
use miniflux_api::{ApiError as MinifluxApiError, MinifluxApi};
use reqwest::Client;
use reqwest::header::{HeaderMap, HeaderValue};
use std::collections::HashSet;
use std::convert::{From, TryInto};
use std::sync::Arc;
use tokio::sync::RwLock;
const DEFAULT_CATEGORY: &str = "New Category";
impl From<MinifluxApiError> for FeedApiError {
fn from(error: MinifluxApiError) -> FeedApiError {
match error {
MinifluxApiError::Url(e) => FeedApiError::Url(e),
MinifluxApiError::Json { source, json } => FeedApiError::Json { source, json },
MinifluxApiError::Http(e) => FeedApiError::Network(e),
MinifluxApiError::Miniflux(e) => FeedApiError::Api {
message: format!("Miniflux Error: {}", e.error_message),
},
MinifluxApiError::Parse => FeedApiError::Api {
message: MinifluxApiError::Parse.to_string(),
},
}
}
}
pub struct ArticleQuery {
pub status: Option<EntryStatus>,
pub before: Option<i64>,
pub after: Option<i64>,
pub before_entry_id: Option<i64>,
pub after_entry_id: Option<i64>,
pub starred: Option<bool>,
}
const UNREAD_QUERY: ArticleQuery = ArticleQuery {
status: Some(EntryStatus::Unread),
before: None,
after: None,
before_entry_id: None,
after_entry_id: None,
starred: None,
};
const STARRED_QUERY: ArticleQuery = ArticleQuery {
status: None,
before: None,
after: None,
before_entry_id: None,
after_entry_id: None,
starred: Some(true),
};
pub struct Miniflux {
api: Option<MinifluxApi>,
portal: Arc<Box<dyn Portal>>,
logged_in: bool,
config: AccountConfig,
}
impl Miniflux {
fn convert_category_vec(mut categories: Vec<MinifluxCategory>) -> (Vec<Category>, Vec<CategoryMapping>) {
categories
.drain(..)
.enumerate()
.map(|(i, c)| Miniflux::convert_category(c, Some(i as i32)))
.unzip()
}
fn convert_category(category: MinifluxCategory, sort_index: Option<i32>) -> (Category, CategoryMapping) {
let MinifluxCategory { id, user_id: _, title } = category;
let category_id = CategoryID::new(&id.to_string());
let category = Category {
category_id: category_id.clone(),
label: title,
};
let category_mapping = CategoryMapping {
parent_id: NEWSFLASH_TOPLEVEL.clone(),
category_id,
sort_index,
};
(category, category_mapping)
}
fn convert_feed(feed: MinifluxFeed) -> Feed {
let MinifluxFeed {
id,
user_id: _,
title,
site_url,
feed_url,
rewrite_rules: _,
scraper_rules: _,
crawler: _,
checked_at: _,
etag_header: _,
last_modified_header: _,
parsing_error_count,
parsing_error_message,
category: _,
icon: _,
} = feed;
Feed {
feed_id: FeedID::new(&id.to_string()),
label: title,
website: Url::parse(&site_url).ok(),
feed_url: Url::parse(&feed_url).ok(),
icon_url: None,
error_count: parsing_error_count as i32,
error_message: if !parsing_error_message.is_empty() {
Some(parsing_error_message)
} else {
None
},
}
}
fn convert_feed_vec(mut feeds: Vec<MinifluxFeed>) -> (Vec<Feed>, Vec<FeedMapping>) {
let mut mappings: Vec<FeedMapping> = Vec::new();
let feeds = feeds
.drain(..)
.enumerate()
.map(|(i, f)| {
mappings.push(FeedMapping {
feed_id: FeedID::new(&f.id.to_string()),
category_id: CategoryID::new(&f.category.id.to_string()),
sort_index: Some(i as i32),
});
Miniflux::convert_feed(f)
})
.collect();
(feeds, mappings)
}
fn convert_entry(entry: MinifluxArticle, portal: Arc<Box<dyn Portal>>) -> (FatArticle, Vec<Enclosure>) {
let MinifluxArticle {
id,
user_id: _,
feed_id,
title,
url,
comments_url: _,
author,
content,
hash: _,
published_at,
created_at: _,
changed_at: _,
status,
starred,
feed: _,
reading_time: _,
enclosures,
} = entry;
let article_id = ArticleID::new(&id.to_string());
let article_exists_locally = portal.get_article_exists(&article_id).unwrap_or(false);
let plain_text = if article_exists_locally {
None
} else {
Some(html2text::html2text(&content))
};
let summary = plain_text.as_deref().map(util::html2text::text2summary);
let mut thumbnail_url = enclosures.iter().find_map(|e| {
let is_image_type = e.mime_type.starts_with("image/");
let is_image_href = e.url.ends_with(".jpeg") || e.url.ends_with(".jpg") || e.url.ends_with(".png");
if is_image_type || is_image_href { Some(e.url.clone()) } else { None }
});
if thumbnail_url.is_none() {
thumbnail_url = crate::util::thumbnail::extract_thumbnail(&content);
}
let mut enclosures = enclosures
.into_iter()
.filter_map(|miniflux_enclosure| {
Url::parse(&miniflux_enclosure.url).ok().map(|url| Enclosure {
article_id: article_id.clone(),
url,
mime_type: Some(miniflux_enclosure.mime_type),
title: None,
position: None,
summary: None,
thumbnail_url: None,
filesize: if miniflux_enclosure.size > 0 {
Some(miniflux_enclosure.size as i32)
} else {
None
},
width: None,
height: None,
duration: None,
framerate: None,
alternative: None,
is_default: false,
})
})
.collect::<Vec<_>>();
let has_video = enclosures.iter().any(Enclosure::is_video);
let first_image_url = enclosures
.iter()
.find(|enclosure| enclosure.is_image())
.map(|enclosure| enclosure.url.to_string());
if let (true, Some(first_image_url)) = (has_video, first_image_url) {
tracing::debug!(?first_image_url, "has video + first image url");
enclosures = enclosures
.into_iter()
.filter_map(|mut enclosure| {
if enclosure.is_video() {
enclosure.thumbnail_url = Some(first_image_url.clone());
Some(enclosure)
} else if enclosure.is_image() {
None
} else {
Some(enclosure)
}
})
.collect();
}
let article = FatArticle {
article_id,
title: Some(title),
author: if author.is_empty() { None } else { Some(author) },
feed_id: FeedID::new(&feed_id.to_string()),
url: Url::parse(&url).ok(),
date: match DateTime::parse_from_rfc3339(&published_at) {
Ok(date) => date.with_timezone(&Utc),
Err(_) => Utc::now(),
},
synced: Utc::now(),
updated: None,
summary,
html: Some(content),
direction: None,
unread: match status.as_str().try_into() {
Ok(status) => match status {
EntryStatus::Read => models::Read::Read,
_ => models::Read::Unread,
},
Err(_) => models::Read::Unread,
},
marked: if starred { models::Marked::Marked } else { models::Marked::Unmarked },
scraped_content: None,
plain_text,
thumbnail_url,
};
(article, enclosures)
}
async fn convert_entry_vec(entries: Vec<MinifluxArticle>, portal: Arc<Box<dyn Portal>>) -> StreamConversionResult {
let enclosures: Arc<RwLock<Vec<Enclosure>>> = Arc::new(RwLock::new(Vec::new()));
let tasks = entries
.into_iter()
.map(|e| {
let portal = portal.clone();
let enclosures = enclosures.clone();
tokio::spawn(async move {
let (article, mut converted_enclousres) = Self::convert_entry(e, portal);
enclosures.write().await.append(&mut converted_enclousres);
article
})
})
.collect::<Vec<_>>();
let articles = future::join_all(tasks).await.into_iter().filter_map(|res| res.ok()).collect();
StreamConversionResult {
articles,
headlines: Vec::new(),
taggings: Vec::new(),
enclosures: Arc::into_inner(enclosures).map(|e| e.into_inner()).unwrap_or_default(),
}
}
pub async fn get_articles(&self, query: ArticleQuery, client: &Client) -> FeedApiResult<StreamConversionResult> {
if let Some(api) = &self.api {
let batch_size: i64 = 100;
let mut offset: Option<i64> = None;
let mut articles: Vec<FatArticle> = Vec::new();
let mut enclosures: Vec<Enclosure> = Vec::new();
loop {
let entries = api
.get_entries(
query.status,
offset,
Some(batch_size),
Some(OrderBy::PublishedAt),
Some(OrderDirection::Desc),
query.before,
query.after,
query.before_entry_id,
query.after_entry_id,
query.starred,
client,
)
.await?;
let entry_count = entries.len();
let mut converted = Miniflux::convert_entry_vec(entries, self.portal.clone()).await;
articles.append(&mut converted.articles);
enclosures.append(&mut converted.enclosures);
if entry_count < batch_size as usize {
break;
}
offset = match offset {
Some(offset) => Some(offset + batch_size),
None => Some(batch_size),
};
}
return Ok(StreamConversionResult {
articles,
headlines: Vec::new(),
taggings: Vec::new(),
enclosures,
});
}
Err(FeedApiError::Login)
}
fn article_ids_to_i64(ids: &[ArticleID]) -> Vec<i64> {
ids.iter().filter_map(|id| Self::article_id_to_i64(id).ok()).collect()
}
fn article_id_to_i64(id: &ArticleID) -> Result<i64, FeedApiError> {
id.as_str().parse::<i64>().map_err(|_| {
tracing::error!(%id, "Failed to parse ID");
FeedApiError::Unknown
})
}
fn feed_id_to_i64(id: &FeedID) -> Result<i64, FeedApiError> {
id.as_str().parse::<i64>().map_err(|_| {
tracing::error!(%id, "Failed to parse ID");
FeedApiError::Unknown
})
}
fn category_id_to_i64(id: &CategoryID) -> Result<i64, FeedApiError> {
id.as_str().parse::<i64>().map_err(|_| {
tracing::error!(%id, "Failed to parse ID");
FeedApiError::Unknown
})
}
}
#[async_trait]
impl FeedApi for Miniflux {
fn features(&self) -> FeedApiResult<PluginCapabilities> {
Ok(PluginCapabilities::ADD_REMOVE_FEEDS | PluginCapabilities::SUPPORT_CATEGORIES | PluginCapabilities::MODIFY_CATEGORIES)
}
fn has_user_configured(&self) -> FeedApiResult<bool> {
Ok(self.api.is_some())
}
async fn is_reachable(&self, client: &Client) -> FeedApiResult<bool> {
if let Some(api) = &self.api {
api.healthcheck(client).await?;
Ok(true)
} else {
Err(FeedApiError::Login)
}
}
async fn is_logged_in(&self, _client: &Client) -> FeedApiResult<bool> {
Ok(self.logged_in)
}
async fn user_name(&self) -> Option<String> {
self.config.get_user_name()
}
async fn get_login_data(&self) -> Option<LoginData> {
if let Ok(true) = self.has_user_configured() {
if let (Some(username), Some(password)) = (self.config.get_user_name(), self.config.get_password()) {
return Some(LoginData::Direct(DirectLogin::Password(PasswordLogin {
id: MinifluxMetadata::get_id(),
url: self.config.get_url(),
user: username,
password,
basic_auth: None, })));
} else if let Some(token) = self.config.get_token() {
return Some(LoginData::Direct(DirectLogin::Token(TokenLogin {
id: MinifluxMetadata::get_id(),
url: self.config.get_url(),
token,
basic_auth: None,
})));
}
}
None
}
async fn login(&mut self, data: LoginData, client: &Client) -> FeedApiResult<()> {
if let LoginData::Direct(simple_login_data) = data {
let api = match simple_login_data {
DirectLogin::Password(password_data) => {
if let Some(url_string) = password_data.url.clone() {
self.config.set_url(&url_string);
self.config.set_password(&password_data.password);
self.config.set_user_name(&password_data.user);
self.config.clear_token();
let url = Url::parse(&url_string)?;
MinifluxApi::new(&url, password_data.user.clone(), password_data.password)
} else {
tracing::error!("No URL set");
return Err(FeedApiError::Login);
}
}
DirectLogin::Token(token_data) => {
if let Some(url_string) = token_data.url.clone() {
self.config.set_url(&url_string);
self.config.set_token(&token_data.token);
self.config.clear_user_name();
self.config.clear_password();
let url = Url::parse(&url_string)?;
MinifluxApi::new_from_token(&url, token_data.token)
} else {
tracing::error!("No URL set");
return Err(FeedApiError::Login);
}
}
};
if self.config.get_user_name().is_none() {
let user = api.get_current_user(client).await?;
self.config.set_user_name(&user.username);
}
self.config.write()?;
self.api = Some(api);
self.logged_in = true;
return Ok(());
}
self.logged_in = false;
self.api = None;
Err(FeedApiError::Login)
}
async fn logout(&mut self, _client: &Client) -> FeedApiResult<()> {
self.config.delete()?;
Ok(())
}
async fn initial_sync(&self, client: &Client, _custom_header: FeedHeaderMap) -> FeedApiResult<SyncResult> {
if let Some(api) = &self.api {
let categories = api.get_categories(client);
let feeds = api.get_feeds(client);
let starred = self.get_articles(STARRED_QUERY, client);
let unread = self.get_articles(UNREAD_QUERY, client);
let (categories, feeds, starred, unread) = futures::join!(categories, feeds, starred, unread);
let (categories, category_mappings) = Miniflux::convert_category_vec(categories?);
let (feeds, feed_mappings) = Miniflux::convert_feed_vec(feeds?);
let mut starred = starred?;
let mut unread = unread?;
let mut articles: Vec<FatArticle> = Vec::new();
articles.append(&mut starred.articles);
articles.append(&mut unread.articles);
let mut enclosures: Vec<Enclosure> = Vec::new();
enclosures.append(&mut starred.enclosures);
enclosures.append(&mut unread.enclosures);
let entries = api
.get_entries(
Some(EntryStatus::Read),
None,
Some(100),
Some(OrderBy::PublishedAt),
Some(OrderDirection::Desc),
None,
None,
None,
None,
None,
client,
)
.await?;
let mut read = Miniflux::convert_entry_vec(entries, self.portal.clone()).await;
articles.append(&mut read.articles);
enclosures.append(&mut read.enclosures);
return Ok(SyncResult {
feeds: util::vec_to_option(feeds),
categories: util::vec_to_option(categories),
feed_mappings: util::vec_to_option(feed_mappings),
category_mappings: util::vec_to_option(category_mappings),
tags: None,
taggings: None,
headlines: None,
articles: util::vec_to_option(articles),
enclosures: util::vec_to_option(enclosures),
});
}
Err(FeedApiError::Login)
}
async fn sync(&self, client: &Client, _custom_header: FeedHeaderMap) -> FeedApiResult<SyncResult> {
if let Some(api) = &self.api {
let max_count = self.portal.get_config().read().await.get_sync_amount();
let categories = api.get_categories(client);
let feeds = api.get_feeds(client);
let unread = self.get_articles(UNREAD_QUERY, client);
let starred = self.get_articles(STARRED_QUERY, client);
let recent = api.get_entries(
Some(EntryStatus::Read),
None,
Some(i64::from(max_count)),
Some(OrderBy::PublishedAt),
Some(OrderDirection::Desc),
None,
None,
None,
None,
None,
client,
);
let (categories, feeds, starred, unread, recent) = futures::join!(categories, feeds, starred, unread, recent);
let (categories, category_mappings) = Miniflux::convert_category_vec(categories?);
let (feeds, feed_mappings) = Miniflux::convert_feed_vec(feeds?);
let mut recent = Miniflux::convert_entry_vec(recent?, self.portal.clone()).await;
let mut starred = starred?;
let mut unread = unread?;
let remote_unread_ids: HashSet<ArticleID> = unread.articles.iter().map(|a| &a.article_id).cloned().collect();
let remote_marked_ids: HashSet<ArticleID> = starred.articles.iter().map(|a| &a.article_id).cloned().collect();
let mut articles: Vec<FatArticle> = Vec::new();
articles.append(&mut unread.articles);
articles.append(&mut starred.articles);
articles.append(&mut recent.articles);
let mut enclosures: Vec<Enclosure> = Vec::new();
enclosures.append(&mut unread.enclosures);
enclosures.append(&mut starred.enclosures);
enclosures.append(&mut recent.enclosures);
let mut headlines: Vec<Headline> = Vec::new();
let local_unread_ids = self.portal.get_article_ids_unread_all()?;
let local_marked_ids = self.portal.get_article_ids_marked_all()?;
let local_unread_ids: HashSet<ArticleID> = local_unread_ids.into_iter().collect();
let local_marked_ids: HashSet<ArticleID> = local_marked_ids.into_iter().collect();
let mut should_mark_read_headlines = local_unread_ids
.difference(&remote_unread_ids)
.map(|id| Headline {
article_id: ArticleID::new(&id.to_string()),
unread: Read::Read,
marked: if remote_marked_ids.contains(id) {
Marked::Marked
} else {
Marked::Unmarked
},
})
.collect();
headlines.append(&mut should_mark_read_headlines);
let mut missing_unmarked_headlines = local_marked_ids
.difference(&remote_marked_ids)
.map(|id| Headline {
article_id: ArticleID::new(&id.to_string()),
marked: Marked::Unmarked,
unread: if remote_unread_ids.contains(id) { Read::Unread } else { Read::Read },
})
.collect();
headlines.append(&mut missing_unmarked_headlines);
Ok(SyncResult {
feeds: util::vec_to_option(feeds),
categories: util::vec_to_option(categories),
feed_mappings: util::vec_to_option(feed_mappings),
category_mappings: util::vec_to_option(category_mappings),
tags: None,
taggings: None,
headlines: Some(headlines),
articles: util::vec_to_option(articles),
enclosures: util::vec_to_option(enclosures),
})
} else {
Err(FeedApiError::Login)
}
}
async fn fetch_feed(&self, feed_id: &FeedID, client: &Client, _custom_header: HeaderMap<HeaderValue>) -> FeedApiResult<FeedUpdateResult> {
if let Some(api) = &self.api {
let miniflux_feed_id = Self::feed_id_to_i64(feed_id)?;
let miniflux_feed = api.get_feed(miniflux_feed_id, client).await?;
let feed = Miniflux::convert_feed(miniflux_feed);
let entries = api
.get_feed_entries(miniflux_feed_id, None, None, None, None, None, None, None, None, None, None, client)
.await?;
let result = Miniflux::convert_entry_vec(entries, self.portal.clone()).await;
Ok(FeedUpdateResult {
feed: Some(feed),
taggings: None,
articles: util::vec_to_option(result.articles),
enclosures: util::vec_to_option(result.enclosures),
})
} else {
Err(FeedApiError::Login)
}
}
async fn set_article_read(&self, articles: &[ArticleID], read: models::Read, client: &Client) -> FeedApiResult<()> {
if articles.is_empty() {
Ok(())
} else if let Some(api) = &self.api {
let entries = Miniflux::article_ids_to_i64(articles);
let status = match read {
models::Read::Read => EntryStatus::Read,
models::Read::Unread => EntryStatus::Unread,
};
api.update_entries_status(entries, status, client).await?;
return Ok(());
} else {
Err(FeedApiError::Login)
}
}
async fn set_article_marked(&self, articles: &[ArticleID], _marked: models::Marked, client: &Client) -> FeedApiResult<()> {
if let Some(api) = &self.api {
for article in articles {
if let Ok(entry_id) = article.as_str().parse::<i64>() {
api.toggle_bookmark(entry_id, client).await?;
}
}
return Ok(());
}
Err(FeedApiError::Login)
}
async fn set_feed_read(&self, _feeds: &[FeedID], articles: &[ArticleID], client: &Client) -> FeedApiResult<()> {
self.set_article_read(articles, Read::Read, client).await
}
async fn set_category_read(&self, _categories: &[CategoryID], articles: &[ArticleID], client: &Client) -> FeedApiResult<()> {
self.set_article_read(articles, Read::Read, client).await
}
async fn set_tag_read(&self, _tags: &[TagID], _articles: &[ArticleID], _client: &Client) -> FeedApiResult<()> {
Err(FeedApiError::Unsupported)
}
async fn set_all_read(&self, articles: &[ArticleID], client: &Client) -> FeedApiResult<()> {
self.set_article_read(articles, Read::Read, client).await
}
async fn add_feed(
&self,
url: &Url,
title: Option<String>,
category_id: Option<CategoryID>,
client: &Client,
) -> FeedApiResult<(Feed, Option<Category>)> {
if let Some(api) = &self.api {
let category_id = match category_id {
Some(category_id) => Self::category_id_to_i64(&category_id)?,
None => {
tracing::info!("Creating empty category for feed");
match api.create_category(DEFAULT_CATEGORY, client).await {
Ok(category) => category.id,
Err(_) => {
tracing::warn!("Creating empty category failed");
tracing::info!("Checking if 'New Category' already exists");
let categories = api.get_categories(client).await?;
match categories.iter().find(|c| c.title == DEFAULT_CATEGORY) {
Some(new_category) => new_category.id,
None => match categories.first() {
Some(first_category) => first_category.id,
None => {
let msg = "Was not able to create or find cateogry to add feed into";
tracing::error!("{msg}");
return Err(FeedApiError::Api { message: msg.into() });
}
},
}
}
}
}
};
let feed_id = api.create_feed(url, category_id, client).await?;
if let Some(title) = title {
api.update_feed(feed_id, Some(&title), None, None, None, None, None, None, client).await?;
}
let feed = api.get_feed(feed_id, client).await?;
let category = api
.get_categories(client)
.await?
.iter()
.find(|c| c.id == category_id)
.map(|c| Miniflux::convert_category(c.clone(), None))
.map(|(c, _m)| c);
return Ok((Miniflux::convert_feed(feed), category));
}
Err(FeedApiError::Login)
}
async fn remove_feed(&self, id: &FeedID, client: &Client) -> FeedApiResult<()> {
if let Some(api) = &self.api {
let feed_id = Self::feed_id_to_i64(id)?;
api.delete_feed(feed_id, client).await?;
return Ok(());
}
Err(FeedApiError::Login)
}
async fn move_feed(&self, feed_id: &FeedID, _from: &CategoryID, to: &CategoryID, client: &Client) -> FeedApiResult<()> {
if let Some(api) = &self.api {
let category_id = Self::category_id_to_i64(to)?;
let miniflux_feed_id = Self::feed_id_to_i64(feed_id)?;
api.update_feed(miniflux_feed_id, None, Some(category_id), None, None, None, None, None, client)
.await?;
return Ok(());
}
Err(FeedApiError::Login)
}
async fn rename_feed(&self, feed_id: &FeedID, new_title: &str, client: &Client) -> FeedApiResult<FeedID> {
if let Some(api) = &self.api {
let miniflux_feed_id = Self::feed_id_to_i64(feed_id)?;
api.update_feed(miniflux_feed_id, Some(new_title), None, None, None, None, None, None, client)
.await?;
return Ok(feed_id.clone());
}
Err(FeedApiError::Login)
}
async fn edit_feed_url(&self, _feed_id: &FeedID, _new_url: &str, _client: &Client) -> FeedApiResult<()> {
Err(FeedApiError::Unsupported)
}
async fn add_category<'a>(&self, title: &str, _parent: Option<&'a CategoryID>, client: &Client) -> FeedApiResult<CategoryID> {
if let Some(api) = &self.api {
let category = api.create_category(title, client).await?;
return Ok(CategoryID::new(&category.id.to_string()));
}
Err(FeedApiError::Login)
}
async fn remove_category(&self, id: &CategoryID, _remove_children: bool, client: &Client) -> FeedApiResult<()> {
if let Some(api) = &self.api {
let miniflux_id = Self::category_id_to_i64(id)?;
api.delete_category(miniflux_id, client).await?;
return Ok(());
}
Err(FeedApiError::Login)
}
async fn rename_category(&self, id: &CategoryID, new_title: &str, client: &Client) -> FeedApiResult<CategoryID> {
if let Some(api) = &self.api {
let miniflux_id = Self::category_id_to_i64(id)?;
api.update_category(miniflux_id, new_title, client).await?;
return Ok(id.clone());
}
Err(FeedApiError::Login)
}
async fn move_category(&self, _id: &CategoryID, _parent: &CategoryID, _client: &Client) -> FeedApiResult<()> {
Err(FeedApiError::Unsupported)
}
async fn import_opml(&self, opml: &str, client: &Client) -> FeedApiResult<()> {
if let Some(api) = &self.api {
api.import_opml(opml, client).await?;
}
Err(FeedApiError::Login)
}
async fn add_tag(&self, _title: &str, _client: &Client) -> FeedApiResult<TagID> {
Err(FeedApiError::Unsupported)
}
async fn remove_tag(&self, _id: &TagID, _client: &Client) -> FeedApiResult<()> {
Err(FeedApiError::Unsupported)
}
async fn rename_tag(&self, _id: &TagID, _new_title: &str, _client: &Client) -> FeedApiResult<TagID> {
Err(FeedApiError::Unsupported)
}
async fn tag_article(&self, _article_id: &ArticleID, _tag_id: &TagID, _client: &Client) -> FeedApiResult<()> {
Err(FeedApiError::Unsupported)
}
async fn untag_article(&self, _article_id: &ArticleID, _tag_id: &TagID, _client: &Client) -> FeedApiResult<()> {
Err(FeedApiError::Unsupported)
}
async fn get_favicon(&self, feed_id: &FeedID, client: &Client, _custom_header: HeaderMap<HeaderValue>) -> FeedApiResult<FavIcon> {
if let Some(api) = &self.api {
let miniflux_feed_id = Self::feed_id_to_i64(feed_id)?;
let favicon = api.get_feed_icon(miniflux_feed_id, client).await?;
if let Some(start) = favicon.data.find(',') {
let data = base64_std.decode(&favicon.data[start + 1..]).map_err(|_| FeedApiError::Encryption)?;
let favicon = FavIcon {
feed_id: feed_id.clone(),
expires: Utc::now() + Duration::try_days(EXPIRES_AFTER_DAYS).unwrap(),
format: Some(favicon.mime_type),
etag: None,
source_url: None,
data: Some(data),
};
return Ok(favicon);
}
}
Err(FeedApiError::Login)
}
}