1use std::{
2 io::{Cursor, ErrorKind},
3 sync::LazyLock,
4};
5
6use color_eyre::{Report, eyre::Context};
7use futures_util::{TryStreamExt, stream};
8use itertools::Itertools;
9use regex::Regex;
10use reqwest::{
11 Url,
12 header::{self, HeaderName, HeaderValue},
13};
14use tokio::{
15 fs::{self, File},
16 io::{AsyncBufReadExt, AsyncRead, BufReader, Lines},
17};
18use tokio_stream::Stream;
19use tokio_util::sync::CancellationToken;
20use tracing::instrument;
21
22use super::IntelliShellService;
23use crate::{
24 cli::{HistorySource, HttpMethod, ImportItemsProcess},
25 config::GistConfig,
26 errors::{AppError, Result, UserFacingError},
27 model::{
28 CATEGORY_USER, Command, ImportExportItem, ImportExportStream, ImportStats, SOURCE_IMPORT, VariableCompletion,
29 },
30 utils::{
31 add_tags_to_description, convert_alt_to_regular,
32 dto::{GIST_README_FILENAME, GIST_README_FILENAME_UPPER, GistDto, ImportExportItemDto},
33 extract_gist_data, github_to_raw, read_history,
34 },
35};
36
37impl IntelliShellService {
38 pub async fn import_items(&self, items: ImportExportStream, overwrite: bool) -> Result<ImportStats> {
40 self.storage.import_items(items, overwrite, false).await
41 }
42
43 pub async fn get_items_from_location(
45 &self,
46 args: ImportItemsProcess,
47 gist_config: GistConfig,
48 cancellation_token: CancellationToken,
49 ) -> Result<ImportExportStream> {
50 let ImportItemsProcess {
51 location,
52 file,
53 http,
54 gist,
55 history,
56 ai,
57 filter,
58 dry_run: _,
59 tags,
60 headers,
61 method,
62 } = args;
63
64 let tags = tags
66 .into_iter()
67 .filter_map(|mut tag| {
68 tag.chars().next().map(|first_char| {
69 if first_char == '#' {
70 tag
71 } else {
72 tag.insert(0, '#');
73 tag
74 }
75 })
76 })
77 .collect::<Vec<_>>();
78
79 let commands = if let Some(history) = history {
81 self.get_history_items(history, filter, tags, ai, cancellation_token)
82 .await?
83 } else if file {
84 if location == "-" {
85 self.get_stdin_items(filter, tags, ai, cancellation_token).await?
86 } else {
87 self.get_file_items(location, filter, tags, ai, cancellation_token)
88 .await?
89 }
90 } else if http {
91 self.get_http_items(location, headers, method, filter, tags, ai, cancellation_token)
92 .await?
93 } else if gist {
94 self.get_gist_items(location, gist_config, filter, tags, ai, cancellation_token)
95 .await?
96 } else {
97 if location == "gist"
99 || location.starts_with("https://gist.github.com")
100 || location.starts_with("https://api.github.com/gists")
101 {
102 self.get_gist_items(location, gist_config, filter, tags, ai, cancellation_token)
103 .await?
104 } else if location.starts_with("http://") || location.starts_with("https://") {
105 self.get_http_items(location, headers, method, filter, tags, ai, cancellation_token)
106 .await?
107 } else if location == "-" {
108 self.get_stdin_items(filter, tags, ai, cancellation_token).await?
109 } else {
110 self.get_file_items(location, filter, tags, ai, cancellation_token)
111 .await?
112 }
113 };
114
115 Ok(commands)
116 }
117
118 #[instrument(skip_all)]
119 async fn get_history_items(
120 &self,
121 history: HistorySource,
122 filter: Option<Regex>,
123 tags: Vec<String>,
124 ai: bool,
125 cancellation_token: CancellationToken,
126 ) -> Result<ImportExportStream> {
127 if let Some(ref filter) = filter {
128 tracing::info!(ai, "Importing commands matching `{filter}` from {history:?} history");
129 } else {
130 tracing::info!(ai, "Importing commands from {history:?} history");
131 }
132 let content = Cursor::new(read_history(history)?);
133 self.extract_and_filter_items(content, filter, tags, ai, cancellation_token)
134 .await
135 }
136
137 #[instrument(skip_all)]
138 async fn get_stdin_items(
139 &self,
140 filter: Option<Regex>,
141 tags: Vec<String>,
142 ai: bool,
143 cancellation_token: CancellationToken,
144 ) -> Result<ImportExportStream> {
145 if let Some(ref filter) = filter {
146 tracing::info!(ai, "Importing commands matching `{filter}` from stdin");
147 } else {
148 tracing::info!(ai, "Importing commands from stdin");
149 }
150 let content = tokio::io::stdin();
151 self.extract_and_filter_items(content, filter, tags, ai, cancellation_token)
152 .await
153 }
154
155 #[instrument(skip_all)]
156 async fn get_file_items(
157 &self,
158 path: String,
159 filter: Option<Regex>,
160 tags: Vec<String>,
161 ai: bool,
162 cancellation_token: CancellationToken,
163 ) -> Result<ImportExportStream> {
164 match fs::metadata(&path).await {
166 Ok(m) if m.is_file() => (),
167 Ok(_) => return Err(UserFacingError::ImportLocationNotAFile.into()),
168 Err(err) if err.kind() == ErrorKind::NotFound => return Err(UserFacingError::ImportFileNotFound.into()),
169 Err(err) if err.kind() == ErrorKind::PermissionDenied => {
170 return Err(UserFacingError::FileNotAccessible("read").into());
171 }
172 Err(err) => return Err(Report::from(err).into()),
173 }
174 if let Some(ref filter) = filter {
175 tracing::info!(ai, "Importing commands matching `{filter}` from file: {path}");
176 } else {
177 tracing::info!(ai, "Importing commands from file: {path}");
178 }
179 let content = File::open(path).await.wrap_err("Couldn't open the file")?;
180 self.extract_and_filter_items(content, filter, tags, ai, cancellation_token)
181 .await
182 }
183
184 #[instrument(skip_all)]
185 async fn get_http_items(
186 &self,
187 mut url: String,
188 headers: Vec<(HeaderName, HeaderValue)>,
189 method: HttpMethod,
190 filter: Option<Regex>,
191 tags: Vec<String>,
192 ai: bool,
193 cancellation_token: CancellationToken,
194 ) -> Result<ImportExportStream> {
195 if url == "-" {
197 let mut buffer = String::new();
198 std::io::stdin().read_line(&mut buffer)?;
199 url = buffer.trim_end_matches("\n").to_string();
200 tracing::debug!("Read url from stdin: {url}");
201 }
202
203 let mut url = Url::parse(&url).map_err(|err| {
205 tracing::error!("Couldn't parse url: {err}");
206 UserFacingError::HttpInvalidUrl
207 })?;
208
209 if let Some(raw_url) = github_to_raw(&url) {
211 url = raw_url;
212 }
213
214 let method = method.into();
215 if let Some(ref filter) = filter {
216 tracing::info!(ai, "Importing commands matching `{filter}` from http: {method} {url}");
217 } else {
218 tracing::info!(ai, "Importing commands from http: {method} {url}");
219 }
220
221 let client = reqwest::Client::new();
223 let mut req = client.request(method, url);
224
225 for (name, value) in headers {
227 tracing::debug!("Appending '{name}' header");
228 req = req.header(name, value);
229 }
230
231 let res = req.send().await.map_err(|err| {
233 tracing::error!("{err:?}");
234 UserFacingError::HttpRequestFailed(err.to_string())
235 })?;
236
237 if !res.status().is_success() {
239 let status = res.status();
240 let status_str = status.as_str();
241 let body = res.text().await.unwrap_or_default();
242 if let Some(reason) = status.canonical_reason() {
243 tracing::error!("Got response [{status_str}] {reason}:\n{body}");
244 return Err(
245 UserFacingError::HttpRequestFailed(format!("received {status_str} {reason} response")).into(),
246 );
247 } else {
248 tracing::error!("Got response [{status_str}]:\n{body}");
249 return Err(UserFacingError::HttpRequestFailed(format!("received {status_str} response")).into());
250 }
251 }
252
253 let mut json = false;
255 if let Some(content_type) = res.headers().get(header::CONTENT_TYPE) {
256 let Ok(content_type) = content_type.to_str() else {
257 return Err(
258 UserFacingError::HttpRequestFailed(String::from("couldn't read content-type header")).into(),
259 );
260 };
261 if content_type.starts_with("application/json") {
262 json = true;
263 } else if !content_type.starts_with("text") {
264 return Err(
265 UserFacingError::HttpRequestFailed(format!("unsupported content-type: {content_type}")).into(),
266 );
267 }
268 }
269
270 if json {
271 let items: Vec<ImportExportItemDto> = match res.json().await {
273 Ok(b) => b,
274 Err(err) if err.is_decode() => {
275 tracing::error!("Couldn't parse api response: {err}");
276 return Err(UserFacingError::GistRequestFailed(String::from("couldn't parse api response")).into());
277 }
278 Err(err) => {
279 tracing::error!("{err:?}");
280 return Err(UserFacingError::GistRequestFailed(err.to_string()).into());
281 }
282 };
283
284 Ok(Box::pin(stream::iter(
285 items.into_iter().map(ImportExportItem::from).map(Ok),
286 )))
287 } else {
288 let content = Cursor::new(res.text().await.map_err(|err| {
289 tracing::error!("Couldn't read api response: {err}");
290 UserFacingError::HttpRequestFailed(String::from("couldn't read api response"))
291 })?);
292 self.extract_and_filter_items(content, filter, tags, ai, cancellation_token)
293 .await
294 }
295 }
296
297 #[instrument(skip_all)]
298 async fn get_gist_items(
299 &self,
300 mut gist: String,
301 gist_config: GistConfig,
302 filter: Option<Regex>,
303 tags: Vec<String>,
304 ai: bool,
305 cancellation_token: CancellationToken,
306 ) -> Result<ImportExportStream> {
307 if gist == "-" {
309 let mut buffer = String::new();
310 std::io::stdin().read_line(&mut buffer)?;
311 gist = buffer.trim_end_matches("\n").to_string();
312 tracing::debug!("Read gist from stdin: {gist}");
313 }
314
315 if gist.starts_with("https://gist.githubusercontent.com") {
317 return self
318 .get_http_items(gist, Vec::new(), HttpMethod::GET, filter, tags, ai, cancellation_token)
319 .await;
320 }
321
322 let (gist_id, gist_sha, gist_file) = extract_gist_data(&gist, &gist_config)?;
324
325 let url = if let Some(sha) = gist_sha {
327 format!("https://api.github.com/gists/{gist_id}/{sha}")
328 } else {
329 format!("https://api.github.com/gists/{gist_id}")
330 };
331
332 if let Some(ref filter) = filter {
333 tracing::info!(ai, "Importing commands matching `{filter}` from gist: {url}");
334 } else {
335 tracing::info!(ai, "Importing commands from gist: {url}");
336 }
337
338 let client = reqwest::Client::new();
340 let res = client
341 .get(url)
342 .header(header::ACCEPT, "application/vnd.github+json")
343 .header(header::USER_AGENT, "intelli-shell")
344 .header("X-GitHub-Api-Version", "2022-11-28")
345 .send()
346 .await
347 .map_err(|err| {
348 tracing::error!("{err:?}");
349 UserFacingError::GistRequestFailed(err.to_string())
350 })?;
351
352 if !res.status().is_success() {
354 let status = res.status();
355 let status_str = status.as_str();
356 let body = res.text().await.unwrap_or_default();
357 if let Some(reason) = status.canonical_reason() {
358 tracing::error!("Got response [{status_str}] {reason}:\n{body}");
359 return Err(
360 UserFacingError::GistRequestFailed(format!("received {status_str} {reason} response")).into(),
361 );
362 } else {
363 tracing::error!("Got response [{status_str}]:\n{body}");
364 return Err(UserFacingError::GistRequestFailed(format!("received {status_str} response")).into());
365 }
366 }
367
368 let mut body: GistDto = match res.json().await {
370 Ok(b) => b,
371 Err(err) if err.is_decode() => {
372 tracing::error!("Couldn't parse api response: {err}");
373 return Err(UserFacingError::GistRequestFailed(String::from("couldn't parse api response")).into());
374 }
375 Err(err) => {
376 tracing::error!("{err:?}");
377 return Err(UserFacingError::GistRequestFailed(err.to_string()).into());
378 }
379 };
380
381 let full_content = if let Some(ref gist_file) = gist_file {
382 body.files
384 .remove(gist_file)
385 .ok_or(UserFacingError::GistFileNotFound)?
386 .content
387 } else {
388 body.files
390 .into_iter()
391 .filter(|(k, _)| k != GIST_README_FILENAME && k != GIST_README_FILENAME_UPPER)
392 .map(|(_, f)| f.content)
393 .join("\n")
394 };
395
396 let content = Cursor::new(full_content);
397 self.extract_and_filter_items(content, filter, tags, ai, cancellation_token)
398 .await
399 }
400
401 async fn extract_and_filter_items(
403 &self,
404 content: impl AsyncRead + Unpin + Send + 'static,
405 filter: Option<Regex>,
406 tags: Vec<String>,
407 ai: bool,
408 cancellation_token: CancellationToken,
409 ) -> Result<ImportExportStream> {
410 let stream: ImportExportStream = if ai {
411 let commands = self
412 .prompt_commands_import(content, tags, CATEGORY_USER, SOURCE_IMPORT, cancellation_token)
413 .await?;
414 Box::pin(commands.map_ok(ImportExportItem::Command))
415 } else {
416 Box::pin(parse_import_items(content, tags, CATEGORY_USER, SOURCE_IMPORT))
417 };
418
419 if let Some(filter) = filter {
420 Ok(Box::pin(stream.try_filter(move |item| {
421 let pass = match item {
422 ImportExportItem::Command(c) => c.matches(&filter),
423 ImportExportItem::Completion(_) => true,
424 };
425 async move { pass }
426 })))
427 } else {
428 Ok(stream)
429 }
430 }
431}
432
433#[instrument(skip_all)]
478pub(super) fn parse_import_items(
479 content: impl AsyncRead + Unpin + Send,
480 tags: Vec<String>,
481 category: impl Into<String>,
482 source: impl Into<String>,
483) -> impl Stream<Item = Result<ImportExportItem>> + Send {
484 struct ParserState<R: AsyncRead> {
486 category: String,
487 source: String,
488 tags: Vec<String>,
489 lines: Lines<BufReader<R>>,
490 description_buffer: Vec<String>,
491 description_paused: bool,
492 }
493
494 let initial_state = ParserState {
496 category: category.into(),
497 source: source.into(),
498 tags,
499 lines: BufReader::new(content).lines(),
500 description_buffer: Vec::new(),
501 description_paused: false,
502 };
503
504 fn get_comment_content(trimmed_line: &str) -> Option<&str> {
506 if let Some(stripped) = trimmed_line.strip_prefix('#') {
507 return Some(stripped.trim());
508 }
509 if let Some(stripped) = trimmed_line.strip_prefix("//") {
510 return Some(stripped.trim());
511 }
512 if let Some(stripped) = trimmed_line.strip_prefix("- ") {
513 return Some(stripped.trim());
514 }
515 if let Some(stripped) = trimmed_line.strip_prefix("::") {
516 return Some(stripped.trim());
517 }
518 None
519 }
520
521 stream::unfold(initial_state, move |mut state| async move {
523 loop {
524 let line: String = match state.lines.next_line().await {
526 Ok(Some(line)) => line,
528 Ok(None) => return None,
530 Err(err) => return Some((Err(AppError::from(err)), state)),
532 };
533 let trimmed_line = line.trim();
534
535 if trimmed_line == "#!intelli-shell" {
537 continue;
538 }
539
540 if trimmed_line.starts_with(">")
542 || trimmed_line.starts_with("```")
543 || trimmed_line.starts_with("%")
544 || trimmed_line.starts_with(";")
545 || trimmed_line.starts_with("@")
546 {
547 continue;
548 }
549
550 if trimmed_line.starts_with('$') {
552 static COMPLETION_RE: LazyLock<Regex> = LazyLock::new(|| {
555 Regex::new(r"^\$\s*(?:\((?P<cmd>[\w-]+)\)\s*)?(?P<var>[^:|{}]+):\s*(?P<provider>.+)$").unwrap()
556 });
557
558 let item = if let Some(caps) = COMPLETION_RE.captures(trimmed_line) {
559 let cmd = caps.name("cmd").map_or("", |m| m.as_str()).trim();
560 let var = caps.name("var").map_or("", |m| m.as_str()).trim();
561 let provider = caps.name("provider").map_or("", |m| m.as_str()).trim();
562
563 if var.is_empty() || provider.is_empty() {
564 Err(UserFacingError::ImportCompletionInvalidFormat(line).into())
565 } else {
566 Ok(ImportExportItem::Completion(VariableCompletion::new(
567 state.source.clone(),
568 cmd,
569 var,
570 provider,
571 )))
572 }
573 } else {
574 Err(UserFacingError::ImportCompletionInvalidFormat(line).into())
575 };
576
577 state.description_buffer.clear();
579 state.description_paused = false;
580 return Some((item, state));
581 }
582
583 if let Some(comment_content) = get_comment_content(trimmed_line) {
585 if state.description_paused {
586 state.description_buffer.clear();
588 }
589 state.description_buffer.push(comment_content.to_string());
590 state.description_paused = false;
591 continue;
592 }
593
594 if trimmed_line.is_empty() {
596 if !state.description_buffer.is_empty() {
598 state.description_paused = true;
599 }
600 continue;
601 }
602
603 let mut current_trimmed_line = trimmed_line.to_string();
605 let mut command_parts: Vec<String> = Vec::new();
606 let mut inline_description: Option<String> = None;
607
608 loop {
610 if get_comment_content(¤t_trimmed_line).is_some() || current_trimmed_line.is_empty() {
612 if let Some(next_line_res) = state.lines.next_line().await.transpose() {
614 current_trimmed_line = match next_line_res {
615 Ok(next_line) => next_line.trim().to_string(),
616 Err(err) => return Some((Err(AppError::from(err)), state)),
617 };
618 continue;
619 } else {
620 break;
622 }
623 }
624
625 let (command_segment, desc) = match current_trimmed_line.split_once(" ## ") {
627 Some((cmd, desc)) => (cmd, Some(desc.trim().to_string())),
628 None => (current_trimmed_line.as_str(), None),
629 };
630 if inline_description.is_none() {
631 inline_description = desc;
632 }
633
634 if let Some(stripped) = command_segment.strip_suffix('\\') {
636 command_parts.push(stripped.trim().to_string());
637 if let Some(next_line_res) = state.lines.next_line().await.transpose() {
639 current_trimmed_line = match next_line_res {
640 Ok(next_line) => next_line.trim().to_string(),
641 Err(err) => return Some((Err(AppError::from(err)), state)),
642 };
643 } else {
644 break;
646 }
647 } else {
648 command_parts.push(command_segment.to_string());
650 break;
651 }
652 }
653
654 let mut full_cmd = command_parts.join(" ");
656 if full_cmd.starts_with('`') && full_cmd.ends_with('`') {
657 full_cmd = full_cmd[1..full_cmd.len() - 1].to_string();
658 }
659 full_cmd = convert_alt_to_regular(&full_cmd);
660 let pre_description = if let Some(inline) = inline_description {
662 inline
663 } else {
664 state.description_buffer.join("\n")
665 };
666 let (alias, mut full_description) = extract_alias(pre_description);
668 if let Some(stripped) = full_description.strip_suffix(':') {
670 full_description = stripped.to_owned();
671 }
672 if !state.tags.is_empty() {
674 full_description = add_tags_to_description(&state.tags, full_description);
675 }
676
677 let command = Command::new(state.category.clone(), state.source.clone(), full_cmd)
679 .with_description(Some(full_description))
680 .with_alias(alias);
681
682 state.description_buffer.clear();
684 state.description_paused = false;
685
686 return Some((Ok(ImportExportItem::Command(command)), state));
688 }
689 })
690}
691
692fn extract_alias(description: String) -> (Option<String>, String) {
696 static ALIAS_RE: LazyLock<Regex> =
699 LazyLock::new(|| Regex::new(r"(?s)(?:\A\s*\[alias:([^\]]+)\]\s*)|(?:\s*\[alias:([^\]]+)\]\s*\z)").unwrap());
700
701 let mut alias = None;
702
703 let new_description = ALIAS_RE.replace(&description, |caps: ®ex::Captures| {
705 alias = caps.get(1).or_else(|| caps.get(2)).map(|m| m.as_str().to_string());
706 ""
708 });
709
710 (alias, new_description.trim().to_string())
711}
712
713#[cfg(test)]
714mod tests {
715 use futures_util::TryStreamExt;
716
717 use super::*;
718
719 const CMD_1: &str = "cmd number 1";
720 const CMD_2: &str = "cmd number 2";
721 const CMD_3: &str = "cmd number 3";
722
723 const ALIAS_1: &str = "a1";
724 const ALIAS_2: &str = "a2";
725 const ALIAS_3: &str = "a3";
726
727 const DESCRIPTION_1: &str = "Line of a description 1";
728 const DESCRIPTION_2: &str = "Line of a description 2";
729 const DESCRIPTION_3: &str = "Line of a description 3";
730
731 const CMD_MULTI_1: &str = "cmd very long";
732 const CMD_MULTI_2: &str = "that is split across";
733 const CMD_MULTI_3: &str = "multiple lines for readability";
734
735 #[tokio::test]
736 async fn test_parse_import_items_empty_input() {
737 let items = parse_import_items("".as_bytes(), Vec::new(), CATEGORY_USER, SOURCE_IMPORT)
738 .try_collect::<Vec<_>>()
739 .await
740 .unwrap();
741 assert!(items.is_empty());
742 }
743
744 #[tokio::test]
745 async fn test_parse_import_items_simple() {
746 let input = format!(
747 r"{CMD_1}
748 {CMD_2}
749 {CMD_3}"
750 );
751 let items = parse_import_items(input.as_bytes(), Vec::new(), CATEGORY_USER, SOURCE_IMPORT)
752 .try_collect::<Vec<_>>()
753 .await
754 .unwrap();
755
756 assert_eq!(items.len(), 3);
757 assert_eq!(get_command(&items[0]).cmd, CMD_1);
758 assert!(get_command(&items[0]).description.is_none());
759 assert_eq!(get_command(&items[1]).cmd, CMD_2);
760 assert!(get_command(&items[1]).description.is_none());
761 assert_eq!(get_command(&items[2]).cmd, CMD_3);
762 assert!(get_command(&items[2]).description.is_none());
763 }
764
765 #[tokio::test]
766 async fn test_parse_import_items_legacy() {
767 let input = format!(
768 r"{CMD_1} ## {DESCRIPTION_1}
769 {CMD_2} ## {DESCRIPTION_2}
770 {CMD_3} ## {DESCRIPTION_3}"
771 );
772 let items = parse_import_items(input.as_bytes(), Vec::new(), CATEGORY_USER, SOURCE_IMPORT)
773 .try_collect::<Vec<_>>()
774 .await
775 .unwrap();
776
777 assert_eq!(items.len(), 3);
778 assert_eq!(get_command(&items[0]).cmd, CMD_1);
779 assert_eq!(get_command(&items[0]).description.as_deref(), Some(DESCRIPTION_1));
780 assert_eq!(get_command(&items[1]).cmd, CMD_2);
781 assert_eq!(get_command(&items[1]).description.as_deref(), Some(DESCRIPTION_2));
782 assert_eq!(get_command(&items[2]).cmd, CMD_3);
783 assert_eq!(get_command(&items[2]).description.as_deref(), Some(DESCRIPTION_3));
784 }
785
786 #[tokio::test]
787 async fn test_parse_import_items_sh_style() {
788 let input = format!(
789 r"# {DESCRIPTION_1}
790 {CMD_1}
791
792 # {DESCRIPTION_2}
793 {CMD_2}
794
795 # {DESCRIPTION_3}
796 {CMD_3}"
797 );
798 let items = parse_import_items(input.as_bytes(), Vec::new(), CATEGORY_USER, SOURCE_IMPORT)
799 .try_collect::<Vec<_>>()
800 .await
801 .unwrap();
802
803 assert_eq!(items.len(), 3);
804 assert_eq!(get_command(&items[0]).cmd, CMD_1);
805 assert_eq!(get_command(&items[0]).description.as_deref(), Some(DESCRIPTION_1));
806 assert_eq!(get_command(&items[1]).cmd, CMD_2);
807 assert_eq!(get_command(&items[1]).description.as_deref(), Some(DESCRIPTION_2));
808 assert_eq!(get_command(&items[2]).cmd, CMD_3);
809 assert_eq!(get_command(&items[2]).description.as_deref(), Some(DESCRIPTION_3));
810 }
811
812 #[tokio::test]
813 async fn test_parse_import_items_tldr_style() {
814 let input = format!(
816 r"# command-name
817
818 > Short, snappy description.
819 > Preferably one line; two are acceptable if necessary.
820 > More information: <https://url-to-upstream.tld>.
821
822 - {DESCRIPTION_1}:
823
824 `{CMD_1}`
825
826 - {DESCRIPTION_2}:
827
828 `{CMD_2}`
829
830 - {DESCRIPTION_3}:
831
832 `{CMD_3}`"
833 );
834 let items = parse_import_items(input.as_bytes(), Vec::new(), CATEGORY_USER, SOURCE_IMPORT)
835 .try_collect::<Vec<_>>()
836 .await
837 .unwrap();
838
839 assert_eq!(items.len(), 3);
840 assert_eq!(get_command(&items[0]).cmd, CMD_1);
841 assert_eq!(get_command(&items[0]).description.as_deref(), Some(DESCRIPTION_1));
842 assert_eq!(get_command(&items[1]).cmd, CMD_2);
843 assert_eq!(get_command(&items[1]).description.as_deref(), Some(DESCRIPTION_2));
844 assert_eq!(get_command(&items[2]).cmd, CMD_3);
845 assert_eq!(get_command(&items[2]).description.as_deref(), Some(DESCRIPTION_3));
846 }
847
848 #[tokio::test]
849 async fn test_parse_import_items_discard_orphan_descriptions() {
850 let input = format!(
851 r"# This is a comment without a command
852
853 # {DESCRIPTION_1}
854 {CMD_1}"
855 );
856 let items = parse_import_items(input.as_bytes(), Vec::new(), CATEGORY_USER, SOURCE_IMPORT)
857 .try_collect::<Vec<_>>()
858 .await
859 .unwrap();
860
861 assert_eq!(items.len(), 1);
862 assert_eq!(get_command(&items[0]).cmd, CMD_1);
863 assert_eq!(get_command(&items[0]).description.as_deref(), Some(DESCRIPTION_1));
864 }
865
866 #[tokio::test]
867 async fn test_parse_import_items_inline_description_takes_precedence() {
868 let input = format!(
869 r"# {DESCRIPTION_2}
870 {CMD_1} ## {DESCRIPTION_1}"
871 );
872 let items = parse_import_items(input.as_bytes(), Vec::new(), CATEGORY_USER, SOURCE_IMPORT)
873 .try_collect::<Vec<_>>()
874 .await
875 .unwrap();
876
877 assert_eq!(items.len(), 1);
878 assert_eq!(get_command(&items[0]).cmd, CMD_1);
879 assert_eq!(get_command(&items[0]).description.as_deref(), Some(DESCRIPTION_1));
880 }
881
882 #[tokio::test]
883 async fn test_parse_import_items_multiline_description() {
884 let input = format!(
885 r"# {DESCRIPTION_1}
886 #
887 # {DESCRIPTION_2}
888 {CMD_1}"
889 );
890 let items = parse_import_items(input.as_bytes(), Vec::new(), CATEGORY_USER, SOURCE_IMPORT)
891 .try_collect::<Vec<_>>()
892 .await
893 .unwrap();
894
895 assert_eq!(items.len(), 1);
896 let cmd = get_command(&items[0]);
897 assert_eq!(cmd.cmd, CMD_1);
898 assert_eq!(
899 cmd.description.as_ref(),
900 Some(&format!("{DESCRIPTION_1}\n\n{DESCRIPTION_2}"))
901 );
902 }
903
904 #[tokio::test]
905 async fn test_parse_import_items_multiline() {
906 let input = format!(
907 r"# {DESCRIPTION_1}
908 {CMD_MULTI_1} \
909 # inner comment, not part of the description or command
910 {CMD_MULTI_2} \
911 {CMD_MULTI_3}"
912 );
913 let items = parse_import_items(input.as_bytes(), Vec::new(), CATEGORY_USER, SOURCE_IMPORT)
914 .try_collect::<Vec<_>>()
915 .await
916 .unwrap();
917
918 assert_eq!(items.len(), 1);
919 let cmd = get_command(&items[0]);
920 assert_eq!(cmd.cmd, format!("{CMD_MULTI_1} {CMD_MULTI_2} {CMD_MULTI_3}"));
921 assert_eq!(cmd.description.as_deref(), Some(DESCRIPTION_1));
922 }
923
924 #[tokio::test]
925 async fn test_parse_import_items_with_tags_no_description() {
926 let input = CMD_1;
927 let tags = vec!["#test".to_string(), "#tag2".to_string()];
928 let items = parse_import_items(input.as_bytes(), tags, CATEGORY_USER, SOURCE_IMPORT)
929 .try_collect::<Vec<_>>()
930 .await
931 .unwrap();
932
933 assert_eq!(items.len(), 1);
934 let cmd = get_command(&items[0]);
935 assert_eq!(cmd.cmd, CMD_1);
936 assert_eq!(cmd.description.as_deref(), Some("#test #tag2"));
937 }
938
939 #[tokio::test]
940 async fn test_parse_import_items_with_tags_simple_description() {
941 let input = format!(
942 r"# {DESCRIPTION_1}
943 {CMD_1}
944
945 {CMD_2} ## {DESCRIPTION_2}"
946 );
947 let tags = vec!["#test".to_string()];
948 let items = parse_import_items(input.as_bytes(), tags, CATEGORY_USER, SOURCE_IMPORT)
949 .try_collect::<Vec<_>>()
950 .await
951 .unwrap();
952
953 assert_eq!(items.len(), 2);
954 let cmd0 = get_command(&items[0]);
955 assert_eq!(cmd0.cmd, CMD_1);
956 assert_eq!(cmd0.description.as_ref(), Some(&format!("{DESCRIPTION_1} #test")));
957 let cmd1 = get_command(&items[1]);
958 assert_eq!(cmd1.cmd, CMD_2);
959 assert_eq!(cmd1.description.as_ref(), Some(&format!("{DESCRIPTION_2} #test")));
960 }
961
962 #[tokio::test]
963 async fn test_parse_import_items_with_tags_and_multiline_description() {
964 let input = format!(
965 r"# {DESCRIPTION_1}
966 # {DESCRIPTION_2}
967 {CMD_1}"
968 );
969 let tags = vec!["#test".to_string()];
970 let items = parse_import_items(input.as_bytes(), tags, CATEGORY_USER, SOURCE_IMPORT)
971 .try_collect::<Vec<_>>()
972 .await
973 .unwrap();
974
975 assert_eq!(items.len(), 1);
976 let cmd = get_command(&items[0]);
977 assert_eq!(cmd.cmd, CMD_1);
978 assert_eq!(
979 cmd.description.as_ref(),
980 Some(&format!("{DESCRIPTION_1}\n{DESCRIPTION_2}\n#test"))
981 );
982 }
983
984 #[tokio::test]
985 async fn test_parse_import_items_skips_existing_tags() {
986 let input = format!(
987 r"# {DESCRIPTION_1} #test
988 {CMD_1}"
989 );
990 let tags = vec!["#test".to_string(), "#new".to_string()];
991 let items = parse_import_items(input.as_bytes(), tags, CATEGORY_USER, SOURCE_IMPORT)
992 .try_collect::<Vec<_>>()
993 .await
994 .unwrap();
995
996 assert_eq!(items.len(), 1);
997 let cmd = get_command(&items[0]);
998 assert_eq!(cmd.cmd, CMD_1);
999 assert_eq!(cmd.description.as_ref(), Some(&format!("{DESCRIPTION_1} #test #new")));
1000 }
1001
1002 #[tokio::test]
1003 async fn test_parse_import_items_with_aliases() {
1004 let input = format!(
1005 r"# [alias:{ALIAS_1}] {DESCRIPTION_1}
1006 {CMD_1}
1007
1008 # [alias:{ALIAS_2}]
1009 # {DESCRIPTION_2}
1010 # {DESCRIPTION_2}
1011 {CMD_2}
1012
1013 # [alias:{ALIAS_3}]
1014 {CMD_3}"
1015 );
1016 let items = parse_import_items(input.as_bytes(), Vec::new(), CATEGORY_USER, SOURCE_IMPORT)
1017 .try_collect::<Vec<_>>()
1018 .await
1019 .unwrap();
1020
1021 assert_eq!(items.len(), 3);
1022 let cmd0 = get_command(&items[0]);
1023 assert_eq!(cmd0.cmd, CMD_1);
1024 assert_eq!(cmd0.description.as_deref(), Some(DESCRIPTION_1));
1025 assert_eq!(cmd0.alias.as_deref(), Some(ALIAS_1));
1026
1027 let cmd1 = get_command(&items[1]);
1028 assert_eq!(cmd1.cmd, CMD_2);
1029 assert_eq!(
1030 cmd1.description.as_ref(),
1031 Some(&format!("{DESCRIPTION_2}\n{DESCRIPTION_2}"))
1032 );
1033 assert_eq!(cmd1.alias.as_deref(), Some(ALIAS_2));
1034
1035 let cmd2 = get_command(&items[2]);
1036 assert_eq!(cmd2.cmd, CMD_3);
1037 assert!(cmd2.description.is_none());
1038 assert_eq!(cmd2.alias.as_deref(), Some(ALIAS_3));
1039 }
1040
1041 #[tokio::test]
1042 async fn test_parse_import_items_completions() {
1043 let input = r#"
1044 # A command to ensure both types are handled
1045 ls -l ## list files
1046
1047 # Completions
1048 $(git) branch: git branch --all
1049 $ file: ls -F
1050 $ (az) group: az group list --output tsv
1051 "#;
1052
1053 let items = parse_import_items(input.as_bytes(), Vec::new(), CATEGORY_USER, SOURCE_IMPORT)
1054 .try_collect::<Vec<_>>()
1055 .await
1056 .unwrap();
1057
1058 assert_eq!(items.len(), 4);
1059
1060 let cmd = get_command(&items[0]);
1061 assert_eq!(cmd.cmd, "ls -l");
1062 assert_eq!(cmd.description.as_deref(), Some("list files"));
1063
1064 if let ImportExportItem::Completion(c) = &items[1] {
1065 assert_eq!(c.flat_root_cmd, "git");
1066 assert_eq!(c.flat_variable, "branch");
1067 assert_eq!(c.suggestions_provider, "git branch --all");
1068 } else {
1069 panic!("Expected a Completion at index 1");
1070 }
1071
1072 if let ImportExportItem::Completion(c) = &items[2] {
1073 assert_eq!(c.flat_root_cmd, ""); assert_eq!(c.flat_variable, "file");
1075 assert_eq!(c.suggestions_provider, "ls -F");
1076 } else {
1077 panic!("Expected a Completion at index 2");
1078 }
1079
1080 if let ImportExportItem::Completion(c) = &items[3] {
1081 assert_eq!(c.flat_root_cmd, "az");
1082 assert_eq!(c.flat_variable, "group");
1083 assert_eq!(c.suggestions_provider, "az group list --output tsv");
1084 } else {
1085 panic!("Expected a Completion at index 3");
1086 }
1087 }
1088
1089 #[tokio::test]
1090 async fn test_parse_import_items_invalid_completion_format() {
1091 let line = "$ invalid completion format";
1092 let result = parse_import_items(line.as_bytes(), Vec::new(), CATEGORY_USER, SOURCE_IMPORT)
1093 .try_collect::<Vec<_>>()
1094 .await;
1095
1096 assert!(result.is_err());
1097 if let Err(err) = result {
1098 assert!(
1099 matches!(err, AppError::UserFacing(UserFacingError::ImportCompletionInvalidFormat(s)) if s == line)
1100 );
1101 }
1102 }
1103
1104 fn get_command(item: &ImportExportItem) -> &Command {
1106 match item {
1107 ImportExportItem::Command(command) => command,
1108 ImportExportItem::Completion(_) => panic!("Expected ImportExportItem::Command, found completion"),
1109 }
1110 }
1111}