1use comments_docsource::CommentDocumentation;
2use options_docsource::{OptionDocumentation, OptionsDatabaseType};
3use rayon::iter::{IntoParallelRefIterator, ParallelIterator};
4use std::path::PathBuf;
5use thiserror::Error;
6use xml_docsource::XmlFuncDocumentation;
7
8pub mod comments_docsource;
9pub mod nixpkgs_tree_docsource;
10pub mod options_docsource;
11pub mod xml_docsource;
12
13pub trait Cache
14where
15 Self: Sized + DocSource + serde::Serialize,
16{
17 fn load<'a>(content: &'a [u8]) -> Result<Self, Errors>
19 where
20 Self: serde::Deserialize<'a>,
21 {
22 Ok(bincode::deserialize(content)?)
23 }
24 fn save(&self, filename: &PathBuf) -> Result<(), Errors> {
26 let x = bincode::serialize(self)?;
27 std::fs::write(filename, x)?;
28 Ok(())
29 }
30}
31
32#[derive(Error, Debug)]
33pub enum Errors {
34 #[error("IO Error for file {}: {}", .filename, .err)]
35 FileIo {
36 filename: String,
37 err: std::io::Error,
38 },
39 #[error("Failed to perform IO on a cache file")]
40 CacheFileIo(#[from] std::io::Error),
41 #[error("Failed to serialize/deserialize cache(bincode)")]
42 Bincode(#[from] bincode::Error),
43 #[error("Failed to serialize/deserialize cache(serde_json)")]
44 SerdeJson(#[from] serde_json::Error),
45 #[error("XML parsing error for file {}: {}", .filename, .err)]
46 XmlParse {
47 filename: String,
48 err: roxmltree::Error,
49 },
50}
51
52#[derive(Debug, PartialEq, Eq)]
53pub enum DocEntry {
54 OptionDoc(OptionsDatabaseType, OptionDocumentation),
55 CommentDoc(CommentDocumentation),
56 XmlFuncDoc(XmlFuncDocumentation),
57 NixpkgsTreeDoc(String),
58}
59
60impl DocEntry {
61 pub fn name(&self) -> String {
62 match self {
63 DocEntry::OptionDoc(_, x) => x.name(),
64 DocEntry::CommentDoc(x) => x.name(),
65 DocEntry::XmlFuncDoc(x) => x.name(),
66 DocEntry::NixpkgsTreeDoc(x) => x.clone(),
67 }
68 }
69 pub fn pretty_printed(&self) -> String {
70 match self {
71 DocEntry::OptionDoc(_, x) => x.pretty_printed(),
72 DocEntry::CommentDoc(x) => x.pretty_printed(),
73 DocEntry::XmlFuncDoc(x) => x.pretty_printed(),
74 DocEntry::NixpkgsTreeDoc(x) => x.clone(),
75 }
76 }
77 pub fn source(&self) -> &str {
78 match self {
79 DocEntry::OptionDoc(typ, _) => match typ {
80 OptionsDatabaseType::NixOS => "NixOS Options",
81 OptionsDatabaseType::HomeManager => "HomeManager Options",
82 },
83 DocEntry::CommentDoc(_) => "Nixpkgs Comments",
84 DocEntry::XmlFuncDoc(_) => "Nixpkgs Documentation",
85 DocEntry::NixpkgsTreeDoc(_) => "Nixpkgs Tree",
86 }
87 }
88}
89
90pub trait DocSource {
91 fn all_keys(&self) -> Vec<&str>;
92 fn search(&self, query: &Lowercase) -> Vec<DocEntry>;
93 fn search_liberal(&self, query: &Lowercase) -> Vec<DocEntry>;
94
95 fn update(&mut self) -> Result<bool, Errors>;
97}
98
99#[derive(Default)]
100pub struct AggregateDocSource {
101 sources: Vec<Box<dyn DocSource + Sync>>,
102}
103
104impl AggregateDocSource {
105 pub fn add_source(&mut self, source: Box<dyn DocSource + Sync>) {
106 self.sources.push(source)
107 }
108}
109
110impl DocSource for AggregateDocSource {
111 fn all_keys(&self) -> Vec<&str> {
112 self.sources
113 .par_iter()
114 .flat_map(|source| source.all_keys())
115 .collect()
116 }
117 fn search(&self, query: &Lowercase) -> Vec<DocEntry> {
118 self.sources
119 .par_iter()
120 .flat_map(|source| source.search(query))
121 .collect()
122 }
123 fn search_liberal(&self, query: &Lowercase) -> Vec<DocEntry> {
124 self.sources
125 .par_iter()
126 .flat_map(|source| source.search_liberal(query))
127 .collect()
128 }
129 fn update(&mut self) -> Result<bool, Errors> {
130 unimplemented!();
131 }
132}
133
134pub struct Lowercase<'a>(pub &'a [u8]);
135
136pub(crate) fn starts_with_insensitive_ascii(s: &[u8], prefix: &Lowercase) -> bool {
137 let prefix = prefix.0;
138
139 if s.len() < prefix.len() {
140 return false;
141 }
142
143 for (i, b) in prefix.into_iter().enumerate() {
144 if unsafe { s.get_unchecked(i) }.to_ascii_lowercase() != *b {
146 return false;
147 }
148 }
149
150 true
151}
152
153pub(crate) fn contains_insensitive_ascii(s: &[u8], inner: &Lowercase) -> bool {
154 let inner = inner.0;
155
156 if s.len() < inner.len() {
157 return false;
158 }
159
160 'outer: for i in 0..(s.len() - inner.len() + 1) {
161 let target = &s[i..i + inner.len()];
162 for (y, b) in target.into_iter().enumerate() {
163 if *unsafe { inner.get_unchecked(y) } != b.to_ascii_lowercase() {
164 continue 'outer;
165 }
166 }
167 return true;
168 }
169
170 false
171}
172
173#[test]
174fn test_starts_with_insensitive_ascii() {
175 assert_eq!(
176 starts_with_insensitive_ascii("This is a string".as_bytes(), &Lowercase(b"this ")),
177 true,
178 );
179 assert_eq!(
180 starts_with_insensitive_ascii("abc".as_bytes(), &Lowercase(b"abc")),
181 true,
182 );
183 assert_eq!(
184 starts_with_insensitive_ascii("This is a string".as_bytes(), &Lowercase(b"x")),
185 false,
186 );
187 assert_eq!(
188 starts_with_insensitive_ascii("thi".as_bytes(), &Lowercase(b"this ")),
189 false,
190 );
191}
192
193#[test]
194fn test_contains_insensitive_ascii() {
195 assert_eq!(
196 contains_insensitive_ascii("abc".as_bytes(), &Lowercase(b"b")),
197 true
198 );
199 assert_eq!(
200 contains_insensitive_ascii("abc".as_bytes(), &Lowercase(b"abc")),
201 true
202 );
203 assert_eq!(
204 contains_insensitive_ascii("xabcx".as_bytes(), &Lowercase(b"abc")),
205 true
206 );
207 assert_eq!(
208 contains_insensitive_ascii("abc".as_bytes(), &Lowercase(b"x")),
209 false
210 );
211 assert_eq!(
212 contains_insensitive_ascii("abc".as_bytes(), &Lowercase(b"abcd")),
213 false
214 );
215}