1use anyhow::{Context, Result};
2use etcetera::BaseStrategy;
3use serde::{Deserialize, Serialize};
4use std::fs;
5use std::path::{Path, PathBuf};
6
7#[derive(Debug, Default, Serialize, Deserialize, Clone)]
8pub struct Config {
9 #[serde(default)]
10 pub feeds: Vec<FeedEntry>,
11 #[serde(default)]
12 pub cache: CacheConfig,
13 #[serde(default)]
14 pub content: ContentConfig,
15 #[serde(default)]
16 pub tui: TuiConfig,
17}
18
19#[derive(Debug, Serialize, Deserialize, Clone)]
20pub struct FeedEntry {
21 pub name: String,
22 pub url: String,
23 #[serde(default)]
24 pub tags: Vec<String>,
25 #[serde(default, skip_serializing_if = "Option::is_none")]
26 pub extractor: Option<ExtractorMethod>,
27}
28
29#[derive(Debug, Serialize, Deserialize, Clone)]
30pub struct CacheConfig {
31 #[serde(default = "CacheConfig::default_retention_days")]
33 pub retention_days: i32,
34 #[serde(default, skip_serializing_if = "Option::is_none")]
36 pub path: Option<String>,
37}
38
39impl Default for CacheConfig {
40 fn default() -> Self {
41 Self {
42 retention_days: 90,
43 path: None,
44 }
45 }
46}
47
48impl CacheConfig {
49 fn default_retention_days() -> i32 {
50 90
51 }
52}
53
54#[derive(Debug, Default, Serialize, Deserialize, Clone, PartialEq)]
55#[serde(rename_all = "snake_case")]
56pub enum ExtractorMethod {
57 #[default]
58 Readability,
59 RssContent,
60}
61
62#[derive(Debug, Serialize, Deserialize, Clone)]
63pub struct ContentConfig {
64 #[serde(default)]
65 pub extractor: ExtractorMethod,
66 #[serde(default = "ContentConfig::default_auto_mark_read")]
67 pub auto_mark_read: bool,
68}
69
70impl Default for ContentConfig {
71 fn default() -> Self {
72 Self {
73 extractor: ExtractorMethod::default(),
74 auto_mark_read: true,
75 }
76 }
77}
78
79impl ContentConfig {
80 fn default_auto_mark_read() -> bool {
81 true
82 }
83}
84
85#[derive(Debug, Serialize, Deserialize, Clone, Default)]
87pub struct TuiConfig {
88 #[serde(default)]
90 pub auto_refresh_interval: u64,
91}
92
93impl Config {
94 pub fn load(path: &Path) -> Result<Self> {
95 if !path.exists() {
96 return Ok(Config::default());
97 }
98 let content = fs::read_to_string(path)
99 .with_context(|| format!("Failed to read config: {}", path.display()))?;
100 let config: Config = serde_norway::from_str(&content)
101 .with_context(|| format!("Failed to parse config: {}", path.display()))?;
102 Ok(config)
103 }
104
105 pub fn save(&self, path: &Path) -> Result<()> {
106 if let Some(parent) = path.parent() {
107 fs::create_dir_all(parent).with_context(|| {
108 format!("Failed to create config directory: {}", parent.display())
109 })?;
110 }
111 let content = serde_norway::to_string(self).context("Failed to serialize config")?;
112 fs::write(path, content)
113 .with_context(|| format!("Failed to write config: {}", path.display()))?;
114 Ok(())
115 }
116
117 pub fn default_path() -> Result<PathBuf> {
118 let strategy =
119 etcetera::choose_base_strategy().context("Could not determine home directory")?;
120 Ok(strategy.config_dir().join("feed").join("config.yaml"))
121 }
122
123 pub fn resolve_config_path() -> Result<PathBuf> {
124 Self::default_path()
125 }
126
127 pub fn add_feed(&mut self, entry: FeedEntry) {
128 self.feeds.retain(|f| f.url != entry.url);
129 self.feeds.push(entry);
130 }
131
132 pub fn remove_feed(&mut self, target: &str) -> bool {
133 let target_lower = target.to_lowercase();
134 let before = self.feeds.len();
135 self.feeds
136 .retain(|f| f.name.to_lowercase() != target_lower && f.url != target);
137 self.feeds.len() < before
138 }
139
140 pub fn find_feed(&self, target: &str) -> Option<&FeedEntry> {
141 let target_lower = target.to_lowercase();
142 self.feeds
143 .iter()
144 .find(|f| f.name.to_lowercase() == target_lower || f.url == target)
145 }
146
147 pub fn feeds_by_tag(&self, tag: &str) -> Vec<&FeedEntry> {
148 let tag_lower = tag.to_lowercase();
149 self.feeds
150 .iter()
151 .filter(|f| f.tags.iter().any(|t| t.to_lowercase() == tag_lower))
152 .collect()
153 }
154
155 pub fn all_tags(&self) -> Vec<String> {
156 let mut tags: Vec<String> = self.feeds.iter().flat_map(|f| f.tags.clone()).collect();
157 tags.sort();
158 tags.dedup();
159 tags
160 }
161}