1use std::{error::Error, path::PathBuf, str::FromStr};
4
5use clap::{arg, builder::ArgPredicate, crate_version, Parser, ValueEnum};
6use clap_complete::{shells, Generator};
7use clap_complete_nushell::Nushell;
8use clap_verbosity_flag::{InfoLevel, Verbosity};
9use rattler_conda_types::{package::ArchiveType, Platform};
10use rattler_package_streaming::write::CompressionLevel;
11use serde_json::{json, Value};
12use url::Url;
13
14#[cfg(feature = "recipe-generation")]
15use crate::recipe_generator::GenerateRecipeOpts;
16use crate::{
17 console_utils::{Color, LogStyle},
18 tool_configuration::SkipExisting,
19};
20
21#[derive(Parser)]
23#[allow(clippy::large_enum_variant)]
24pub enum SubCommands {
25 Build(BuildOpts),
27
28 Test(TestOpts),
43
44 Rebuild(RebuildOpts),
46
47 Upload(UploadOpts),
49
50 Completion(ShellCompletion),
52
53 #[cfg(feature = "recipe-generation")]
54 GenerateRecipe(GenerateRecipeOpts),
56
57 Auth(rattler::cli::auth::Args),
59}
60
61#[derive(Parser)]
63pub struct ShellCompletion {
64 #[arg(short, long)]
66 pub shell: Shell,
67}
68
69#[allow(clippy::enum_variant_names)]
71#[derive(ValueEnum, Clone, Debug, Copy, Eq, Hash, PartialEq)]
72pub enum Shell {
73 Bash,
75 Elvish,
77 Fish,
79 Nushell,
81 Powershell,
83 Zsh,
85}
86
87impl Generator for Shell {
88 fn file_name(&self, name: &str) -> String {
89 match self {
90 Shell::Bash => shells::Bash.file_name(name),
91 Shell::Elvish => shells::Elvish.file_name(name),
92 Shell::Fish => shells::Fish.file_name(name),
93 Shell::Nushell => Nushell.file_name(name),
94 Shell::Powershell => shells::PowerShell.file_name(name),
95 Shell::Zsh => shells::Zsh.file_name(name),
96 }
97 }
98
99 fn generate(&self, cmd: &clap::Command, buf: &mut dyn std::io::Write) {
100 match self {
101 Shell::Bash => shells::Bash.generate(cmd, buf),
102 Shell::Elvish => shells::Elvish.generate(cmd, buf),
103 Shell::Fish => shells::Fish.generate(cmd, buf),
104 Shell::Nushell => Nushell.generate(cmd, buf),
105 Shell::Powershell => shells::PowerShell.generate(cmd, buf),
106 Shell::Zsh => shells::Zsh.generate(cmd, buf),
107 }
108 }
109}
110
111#[allow(missing_docs)]
112#[derive(Parser)]
113#[clap(version = crate_version!())]
114pub struct App {
115 #[clap(subcommand)]
117 pub subcommand: Option<SubCommands>,
118
119 #[command(flatten)]
121 pub verbose: Verbosity<InfoLevel>,
122
123 #[clap(
125 long,
126 env = "RATTLER_BUILD_LOG_STYLE",
127 default_value = "fancy",
128 global = true
129 )]
130 pub log_style: LogStyle,
131
132 #[clap(
135 long,
136 env = "RATTLER_BUILD_COLOR",
137 default_value = "auto",
138 global = true
139 )]
140 pub color: Color,
141}
142
143impl App {
144 pub fn is_tui(&self) -> bool {
146 match &self.subcommand {
147 Some(SubCommands::Build(args)) => args.tui,
148 _ => false,
149 }
150 }
151}
152
153#[derive(Parser, Clone, Debug)]
155pub struct CommonOpts {
156 #[clap(
158 long,
159 env = "CONDA_BLD_PATH",
160 default_value = "./output",
161 verbatim_doc_comment,
162 help_heading = "Modifying result"
163 )]
164 pub output_dir: Option<PathBuf>,
165
166 #[clap(long, env = "RATTLER_ZSTD", default_value = "true", hide = true)]
168 pub use_zstd: bool,
169
170 #[clap(long, env = "RATTLER_BZ2", default_value = "true", hide = true)]
172 pub use_bz2: bool,
173
174 #[arg(long, env = "RATTLER_BUILD_EXPERIMENTAL")]
176 pub experimental: bool,
177
178 #[clap(long, env = "RATTLER_AUTH_FILE", hide = true)]
180 pub auth_file: Option<PathBuf>,
181}
182
183#[derive(Clone, PartialEq, Eq, Debug)]
185pub struct PackageFormatAndCompression {
186 pub archive_type: ArchiveType,
188 pub compression_level: CompressionLevel,
190}
191
192impl FromStr for PackageFormatAndCompression {
194 type Err = String;
195
196 fn from_str(s: &str) -> Result<Self, Self::Err> {
197 let mut split = s.split(':');
198 let package_format = split.next().ok_or("invalid")?;
199
200 let compression = split.next().unwrap_or("default");
201
202 let package_format = package_format
204 .chars()
205 .filter(|c| c.is_alphanumeric())
206 .collect::<String>();
207
208 let archive_type = match package_format.to_lowercase().as_str() {
209 "tarbz2" => ArchiveType::TarBz2,
210 "conda" => ArchiveType::Conda,
211 _ => return Err(format!("Unknown package format: {}", package_format)),
212 };
213
214 let compression_level = match compression {
215 "max" | "highest" => CompressionLevel::Highest,
216 "default" | "normal" => CompressionLevel::Default,
217 "fast" | "lowest" | "min" => CompressionLevel::Lowest,
218 number if number.parse::<i32>().is_ok() => {
219 let number = number.parse::<i32>().unwrap_or_default();
220 match archive_type {
221 ArchiveType::TarBz2 => {
222 if !(1..=9).contains(&number) {
223 return Err("Compression level for .tar.bz2 must be between 1 and 9"
224 .to_string());
225 }
226 }
227 ArchiveType::Conda => {
228 if !(-7..=22).contains(&number) {
229 return Err(
230 "Compression level for conda packages (zstd) must be between -7 and 22".to_string()
231 );
232 }
233 }
234 }
235 CompressionLevel::Numeric(number)
236 }
237 _ => return Err(format!("Unknown compression level: {}", compression)),
238 };
239
240 Ok(PackageFormatAndCompression {
241 archive_type,
242 compression_level,
243 })
244 }
245}
246
247#[derive(Parser, Clone)]
249pub struct BuildOpts {
250 #[arg(
253 short,
254 long,
255 default_value = ".",
256 default_value_if("recipe_dir", ArgPredicate::IsPresent, None)
257 )]
258 pub recipe: Vec<PathBuf>,
259
260 #[arg(long, value_parser = is_dir)]
262 pub recipe_dir: Option<PathBuf>,
263
264 #[arg(long)]
266 pub up_to: Option<String>,
267
268 #[arg(long, default_value_t = Platform::current())]
271 pub build_platform: Platform,
272
273 #[arg(long)]
275 pub target_platform: Option<Platform>,
276
277 #[arg(long, default_value_t = Platform::current())]
280 pub host_platform: Platform,
281
282 #[arg(short = 'c', long, default_value = "conda-forge")]
284 pub channel: Vec<String>,
285
286 #[arg(short = 'm', long)]
288 pub variant_config: Vec<PathBuf>,
289
290 #[arg(long)]
292 pub ignore_recipe_variants: bool,
293
294 #[arg(long)]
296 pub render_only: bool,
297
298 #[arg(long, requires("render_only"))]
300 pub with_solve: bool,
301
302 #[arg(long)]
304 pub keep_build: bool,
305
306 #[arg(long)]
308 pub no_build_id: bool,
309
310 #[arg(
315 long,
316 default_value = "conda",
317 help_heading = "Modifying result",
318 verbatim_doc_comment
319 )]
320 pub package_format: PackageFormatAndCompression,
321
322 #[arg(long)]
323 pub compression_threads: Option<u32>,
326
327 #[arg(long, help_heading = "Modifying result")]
329 pub no_include_recipe: bool,
330
331 #[arg(long, default_value = "false", help_heading = "Modifying result")]
333 pub no_test: bool,
334
335 #[arg(long, default_value = "true", help_heading = "Modifying result")]
337 pub color_build_log: bool,
338
339 #[allow(missing_docs)]
340 #[clap(flatten)]
341 pub common: CommonOpts,
342
343 #[arg(long, default_value = "false", hide = !cfg!(feature = "tui"))]
345 pub tui: bool,
346
347 #[arg(long, default_missing_value = "local", default_value = "none", num_args = 0..=1, help_heading = "Modifying result"
353 )]
354 pub skip_existing: SkipExisting,
355
356 #[arg(long, value_parser = parse_key_val)]
358 pub extra_meta: Option<Vec<(String, Value)>>,
359}
360
361fn is_dir(dir: &str) -> Result<PathBuf, String> {
362 let path = PathBuf::from(dir);
363 if path.is_dir() {
364 Ok(path)
365 } else {
366 Err(format!(
367 "Path '{dir}' needs to exist on disk and be a directory",
368 ))
369 }
370}
371
372fn parse_key_val(s: &str) -> Result<(String, Value), Box<dyn Error + Send + Sync + 'static>> {
374 let (key, value) = s
375 .split_once('=')
376 .ok_or_else(|| format!("invalid KEY=value: no `=` found in `{}`", s))?;
377 Ok((key.to_string(), json!(value)))
378}
379
380#[derive(Parser)]
382pub struct TestOpts {
383 #[arg(short = 'c', long)]
385 pub channel: Option<Vec<String>>,
386
387 #[arg(short, long)]
389 pub package_file: PathBuf,
390
391 #[clap(long, env = "RATTLER_COMPRESSION_THREADS")]
393 pub compression_threads: Option<u32>,
394
395 #[clap(flatten)]
397 pub common: CommonOpts,
398}
399
400#[derive(Parser)]
402pub struct RebuildOpts {
403 #[arg(short, long)]
405 pub package_file: PathBuf,
406
407 #[arg(long, default_value = "false")]
409 pub no_test: bool,
410
411 #[clap(long, env = "RATTLER_COMPRESSION_THREADS")]
413 pub compression_threads: Option<u32>,
414
415 #[clap(flatten)]
417 pub common: CommonOpts,
418}
419
420#[derive(Parser, Debug)]
422pub struct UploadOpts {
423 #[arg(global = true, required = false)]
425 pub package_files: Vec<PathBuf>,
426
427 #[clap(subcommand)]
429 pub server_type: ServerType,
430
431 #[clap(flatten)]
433 pub common: CommonOpts,
434}
435
436#[derive(Clone, Debug, PartialEq, Parser)]
438#[allow(missing_docs)]
439pub enum ServerType {
440 Quetz(QuetzOpts),
441 Artifactory(ArtifactoryOpts),
442 Prefix(PrefixOpts),
443 Anaconda(AnacondaOpts),
444 #[clap(hide = true)]
445 CondaForge(CondaForgeOpts),
446}
447
448#[derive(Clone, Debug, PartialEq, Parser)]
449pub struct QuetzOpts {
452 #[arg(short, long, env = "QUETZ_SERVER_URL")]
454 pub url: Url,
455
456 #[arg(short, long, env = "QUETZ_CHANNEL")]
458 pub channel: String,
459
460 #[arg(short, long, env = "QUETZ_API_KEY")]
463 pub api_key: Option<String>,
464}
465
466#[derive(Clone, Debug, PartialEq, Parser)]
467pub struct ArtifactoryOpts {
470 #[arg(short, long, env = "ARTIFACTORY_SERVER_URL")]
472 pub url: Url,
473
474 #[arg(short, long, env = "ARTIFACTORY_CHANNEL")]
476 pub channel: String,
477
478 #[arg(short = 'r', long, env = "ARTIFACTORY_USERNAME")]
480 pub username: Option<String>,
481
482 #[arg(short, long, env = "ARTIFACTORY_PASSWORD")]
484 pub password: Option<String>,
485}
486
487#[derive(Clone, Debug, PartialEq, Parser)]
490pub struct PrefixOpts {
491 #[arg(
494 short,
495 long,
496 env = "PREFIX_SERVER_URL",
497 default_value = "https://prefix.dev"
498 )]
499 pub url: Url,
500
501 #[arg(short, long, env = "PREFIX_CHANNEL")]
503 pub channel: String,
504
505 #[arg(short, long, env = "PREFIX_API_KEY")]
508 pub api_key: Option<String>,
509}
510
511#[derive(Clone, Debug, PartialEq, Parser)]
513pub struct AnacondaOpts {
514 #[arg(short, long, env = "ANACONDA_OWNER")]
516 pub owner: String,
517
518 #[arg(short, long, env = "ANACONDA_CHANNEL", default_value = "main")]
520 pub channel: Vec<String>,
521
522 #[arg(short, long, env = "ANACONDA_API_KEY")]
525 pub api_key: Option<String>,
526
527 #[arg(
529 short,
530 long,
531 env = "ANACONDA_SERVER_URL",
532 default_value = "https://api.anaconda.org"
533 )]
534 pub url: Url,
535
536 #[arg(long, short, env = "ANACONDA_FORCE", default_value = "false")]
538 pub force: bool,
539}
540
541#[derive(Clone, Debug, PartialEq, Parser)]
543pub struct CondaForgeOpts {
544 #[arg(long, env = "STAGING_BINSTAR_TOKEN", required = true)]
546 pub staging_token: String,
547
548 #[arg(long, env = "FEEDSTOCK_NAME", required = true)]
550 pub feedstock: String,
551
552 #[arg(long, env = "FEEDSTOCK_TOKEN", required = true)]
554 pub feedstock_token: String,
555
556 #[arg(long, env = "STAGING_CHANNEL", default_value = "cf-staging")]
558 pub staging_channel: String,
559
560 #[arg(
562 long,
563 env = "ANACONDA_SERVER_URL",
564 default_value = "https://api.anaconda.org"
565 )]
566 pub anaconda_url: Url,
567
568 #[arg(
570 long,
571 env = "VALIDATION_ENDPOINT",
572 default_value = "https://conda-forge.herokuapp.com/feedstock-outputs/copy"
573 )]
574 pub validation_endpoint: Url,
575
576 #[arg(long, env = "POST_COMMENT_ON_ERROR", default_value = "true")]
578 pub post_comment_on_error: bool,
579
580 #[arg(long, env = "CI")]
582 pub provider: Option<String>,
583
584 #[arg(long, env = "DRY_RUN", default_value = "false")]
586 pub dry_run: bool,
587}
588
589#[cfg(test)]
590mod test {
591 use std::str::FromStr;
592
593 use rattler_conda_types::package::ArchiveType;
594 use rattler_package_streaming::write::CompressionLevel;
595
596 use super::PackageFormatAndCompression;
597
598 #[test]
599 fn test_parse_packaging() {
600 let package_format = PackageFormatAndCompression::from_str("tar-bz2").unwrap();
601 assert_eq!(
602 package_format,
603 PackageFormatAndCompression {
604 archive_type: ArchiveType::TarBz2,
605 compression_level: CompressionLevel::Default
606 }
607 );
608
609 let package_format = PackageFormatAndCompression::from_str("conda").unwrap();
610 assert_eq!(
611 package_format,
612 PackageFormatAndCompression {
613 archive_type: ArchiveType::Conda,
614 compression_level: CompressionLevel::Default
615 }
616 );
617
618 let package_format = PackageFormatAndCompression::from_str("tar-bz2:1").unwrap();
619 assert_eq!(
620 package_format,
621 PackageFormatAndCompression {
622 archive_type: ArchiveType::TarBz2,
623 compression_level: CompressionLevel::Numeric(1)
624 }
625 );
626
627 let package_format = PackageFormatAndCompression::from_str(".tar.bz2:max").unwrap();
628 assert_eq!(
629 package_format,
630 PackageFormatAndCompression {
631 archive_type: ArchiveType::TarBz2,
632 compression_level: CompressionLevel::Highest
633 }
634 );
635
636 let package_format = PackageFormatAndCompression::from_str("tarbz2:5").unwrap();
637 assert_eq!(
638 package_format,
639 PackageFormatAndCompression {
640 archive_type: ArchiveType::TarBz2,
641 compression_level: CompressionLevel::Numeric(5)
642 }
643 );
644
645 let package_format = PackageFormatAndCompression::from_str("conda:1").unwrap();
646 assert_eq!(
647 package_format,
648 PackageFormatAndCompression {
649 archive_type: ArchiveType::Conda,
650 compression_level: CompressionLevel::Numeric(1)
651 }
652 );
653
654 let package_format = PackageFormatAndCompression::from_str("conda:max").unwrap();
655 assert_eq!(
656 package_format,
657 PackageFormatAndCompression {
658 archive_type: ArchiveType::Conda,
659 compression_level: CompressionLevel::Highest
660 }
661 );
662
663 let package_format = PackageFormatAndCompression::from_str("conda:-5").unwrap();
664 assert_eq!(
665 package_format,
666 PackageFormatAndCompression {
667 archive_type: ArchiveType::Conda,
668 compression_level: CompressionLevel::Numeric(-5)
669 }
670 );
671
672 let package_format = PackageFormatAndCompression::from_str("conda:fast").unwrap();
673 assert_eq!(
674 package_format,
675 PackageFormatAndCompression {
676 archive_type: ArchiveType::Conda,
677 compression_level: CompressionLevel::Lowest
678 }
679 );
680 }
681}