rattler_build/
opt.rs

1//! Command-line options.
2
3use std::{error::Error, path::PathBuf, str::FromStr};
4
5use clap::{arg, builder::ArgPredicate, crate_version, Parser, ValueEnum};
6use clap_complete::{shells, Generator};
7use clap_complete_nushell::Nushell;
8use clap_verbosity_flag::{InfoLevel, Verbosity};
9use rattler_conda_types::{package::ArchiveType, Platform};
10use rattler_package_streaming::write::CompressionLevel;
11use serde_json::{json, Value};
12use url::Url;
13
14#[cfg(feature = "recipe-generation")]
15use crate::recipe_generator::GenerateRecipeOpts;
16use crate::{
17    console_utils::{Color, LogStyle},
18    tool_configuration::SkipExisting,
19};
20
21/// Application subcommands.
22#[derive(Parser)]
23#[allow(clippy::large_enum_variant)]
24pub enum SubCommands {
25    /// Build a package from a recipe
26    Build(BuildOpts),
27
28    /// Run a test for a single package
29    ///
30    /// This creates a temporary directory, copies the package file into it, and
31    /// then runs the indexing. It then creates a test environment that
32    /// installs the package and any extra dependencies specified in the
33    /// package test dependencies file.
34    ///
35    /// With the activated test environment, the packaged test files are run:
36    ///
37    /// * `info/test/run_test.sh` or `info/test/run_test.bat` on Windows
38    /// * `info/test/run_test.py`
39    ///
40    /// These test files are written at "package creation time" and are part of
41    /// the package.
42    Test(TestOpts),
43
44    /// Rebuild a package from a package file instead of a recipe.
45    Rebuild(RebuildOpts),
46
47    /// Upload a package
48    Upload(UploadOpts),
49
50    /// Generate shell completion script
51    Completion(ShellCompletion),
52
53    #[cfg(feature = "recipe-generation")]
54    /// Generate a recipe from PyPI or CRAN
55    GenerateRecipe(GenerateRecipeOpts),
56
57    /// Handle authentication to external channels
58    Auth(rattler::cli::auth::Args),
59}
60
61/// Shell completion options.
62#[derive(Parser)]
63pub struct ShellCompletion {
64    /// Specifies the shell for which the completions should be generated
65    #[arg(short, long)]
66    pub shell: Shell,
67}
68
69/// Defines the shells for which we can provide completions
70#[allow(clippy::enum_variant_names)]
71#[derive(ValueEnum, Clone, Debug, Copy, Eq, Hash, PartialEq)]
72pub enum Shell {
73    /// Bourne Again SHell (bash)
74    Bash,
75    /// Elvish shell
76    Elvish,
77    /// Friendly Interactive SHell (fish)
78    Fish,
79    /// Nushell
80    Nushell,
81    /// PowerShell
82    Powershell,
83    /// Z SHell (zsh)
84    Zsh,
85}
86
87impl Generator for Shell {
88    fn file_name(&self, name: &str) -> String {
89        match self {
90            Shell::Bash => shells::Bash.file_name(name),
91            Shell::Elvish => shells::Elvish.file_name(name),
92            Shell::Fish => shells::Fish.file_name(name),
93            Shell::Nushell => Nushell.file_name(name),
94            Shell::Powershell => shells::PowerShell.file_name(name),
95            Shell::Zsh => shells::Zsh.file_name(name),
96        }
97    }
98
99    fn generate(&self, cmd: &clap::Command, buf: &mut dyn std::io::Write) {
100        match self {
101            Shell::Bash => shells::Bash.generate(cmd, buf),
102            Shell::Elvish => shells::Elvish.generate(cmd, buf),
103            Shell::Fish => shells::Fish.generate(cmd, buf),
104            Shell::Nushell => Nushell.generate(cmd, buf),
105            Shell::Powershell => shells::PowerShell.generate(cmd, buf),
106            Shell::Zsh => shells::Zsh.generate(cmd, buf),
107        }
108    }
109}
110
111#[allow(missing_docs)]
112#[derive(Parser)]
113#[clap(version = crate_version!())]
114pub struct App {
115    /// Subcommand.
116    #[clap(subcommand)]
117    pub subcommand: Option<SubCommands>,
118
119    /// Enable verbose logging.
120    #[command(flatten)]
121    pub verbose: Verbosity<InfoLevel>,
122
123    /// Logging style
124    #[clap(
125        long,
126        env = "RATTLER_BUILD_LOG_STYLE",
127        default_value = "fancy",
128        global = true
129    )]
130    pub log_style: LogStyle,
131
132    /// Enable or disable colored output from rattler-build.
133    /// Also honors the `CLICOLOR` and `CLICOLOR_FORCE` environment variable.
134    #[clap(
135        long,
136        env = "RATTLER_BUILD_COLOR",
137        default_value = "auto",
138        global = true
139    )]
140    pub color: Color,
141}
142
143impl App {
144    /// Returns true if the application will launch a TUI.
145    pub fn is_tui(&self) -> bool {
146        match &self.subcommand {
147            Some(SubCommands::Build(args)) => args.tui,
148            _ => false,
149        }
150    }
151}
152
153/// Common opts that are shared between [`Rebuild`] and [`Build`]` subcommands
154#[derive(Parser, Clone, Debug)]
155pub struct CommonOpts {
156    /// Output directory for build artifacts.
157    #[clap(
158        long,
159        env = "CONDA_BLD_PATH",
160        default_value = "./output",
161        verbatim_doc_comment,
162        help_heading = "Modifying result"
163    )]
164    pub output_dir: Option<PathBuf>,
165
166    /// Enable support for repodata.json.zst
167    #[clap(long, env = "RATTLER_ZSTD", default_value = "true", hide = true)]
168    pub use_zstd: bool,
169
170    /// Enable support for repodata.json.bz2
171    #[clap(long, env = "RATTLER_BZ2", default_value = "true", hide = true)]
172    pub use_bz2: bool,
173
174    /// Enable experimental features
175    #[arg(long, env = "RATTLER_BUILD_EXPERIMENTAL")]
176    pub experimental: bool,
177
178    /// Path to an auth-file to read authentication information from
179    #[clap(long, env = "RATTLER_AUTH_FILE", hide = true)]
180    pub auth_file: Option<PathBuf>,
181}
182
183/// Container for the CLI package format and compression level
184#[derive(Clone, PartialEq, Eq, Debug)]
185pub struct PackageFormatAndCompression {
186    /// The archive type that is selected
187    pub archive_type: ArchiveType,
188    /// The compression level that is selected
189    pub compression_level: CompressionLevel,
190}
191
192// deserializer for the package format and compression level
193impl FromStr for PackageFormatAndCompression {
194    type Err = String;
195
196    fn from_str(s: &str) -> Result<Self, Self::Err> {
197        let mut split = s.split(':');
198        let package_format = split.next().ok_or("invalid")?;
199
200        let compression = split.next().unwrap_or("default");
201
202        // remove all non-alphanumeric characters
203        let package_format = package_format
204            .chars()
205            .filter(|c| c.is_alphanumeric())
206            .collect::<String>();
207
208        let archive_type = match package_format.to_lowercase().as_str() {
209            "tarbz2" => ArchiveType::TarBz2,
210            "conda" => ArchiveType::Conda,
211            _ => return Err(format!("Unknown package format: {}", package_format)),
212        };
213
214        let compression_level = match compression {
215            "max" | "highest" => CompressionLevel::Highest,
216            "default" | "normal" => CompressionLevel::Default,
217            "fast" | "lowest" | "min" => CompressionLevel::Lowest,
218            number if number.parse::<i32>().is_ok() => {
219                let number = number.parse::<i32>().unwrap_or_default();
220                match archive_type {
221                    ArchiveType::TarBz2 => {
222                        if !(1..=9).contains(&number) {
223                            return Err("Compression level for .tar.bz2 must be between 1 and 9"
224                                .to_string());
225                        }
226                    }
227                    ArchiveType::Conda => {
228                        if !(-7..=22).contains(&number) {
229                            return Err(
230                                "Compression level for conda packages (zstd) must be between -7 and 22".to_string()
231                            );
232                        }
233                    }
234                }
235                CompressionLevel::Numeric(number)
236            }
237            _ => return Err(format!("Unknown compression level: {}", compression)),
238        };
239
240        Ok(PackageFormatAndCompression {
241            archive_type,
242            compression_level,
243        })
244    }
245}
246
247/// Build options.
248#[derive(Parser, Clone)]
249pub struct BuildOpts {
250    /// The recipe file or directory containing `recipe.yaml`. Defaults to the
251    /// current directory.
252    #[arg(
253        short,
254        long,
255        default_value = ".",
256        default_value_if("recipe_dir", ArgPredicate::IsPresent, None)
257    )]
258    pub recipe: Vec<PathBuf>,
259
260    /// The directory that contains recipes.
261    #[arg(long, value_parser = is_dir)]
262    pub recipe_dir: Option<PathBuf>,
263
264    /// Build recipes up to the specified package.
265    #[arg(long)]
266    pub up_to: Option<String>,
267
268    /// The build platform to use for the build (e.g. for building with
269    /// emulation, or rendering).
270    #[arg(long, default_value_t = Platform::current())]
271    pub build_platform: Platform,
272
273    /// The target platform for the build.
274    #[arg(long)]
275    pub target_platform: Option<Platform>,
276
277    /// The host platform for the build. If set, it will be used to determine
278    /// also the target_platform (as long as it is not noarch).
279    #[arg(long, default_value_t = Platform::current())]
280    pub host_platform: Platform,
281
282    /// Add a channel to search for dependencies in.
283    #[arg(short = 'c', long, default_value = "conda-forge")]
284    pub channel: Vec<String>,
285
286    /// Variant configuration files for the build.
287    #[arg(short = 'm', long)]
288    pub variant_config: Vec<PathBuf>,
289
290    /// Do not read the `variants.yaml` file next to a recipe.
291    #[arg(long)]
292    pub ignore_recipe_variants: bool,
293
294    /// Render the recipe files without executing the build.
295    #[arg(long)]
296    pub render_only: bool,
297
298    /// Render the recipe files with solving dependencies.
299    #[arg(long, requires("render_only"))]
300    pub with_solve: bool,
301
302    /// Keep intermediate build artifacts after the build.
303    #[arg(long)]
304    pub keep_build: bool,
305
306    /// Don't use build id(timestamp) when creating build directory name.
307    #[arg(long)]
308    pub no_build_id: bool,
309
310    /// The package format to use for the build. Can be one of `tar-bz2` or
311    /// `conda`. You can also add a compression level to the package format,
312    /// e.g. `tar-bz2:<number>` (from 1 to 9) or `conda:<number>` (from -7 to
313    /// 22).
314    #[arg(
315        long,
316        default_value = "conda",
317        help_heading = "Modifying result",
318        verbatim_doc_comment
319    )]
320    pub package_format: PackageFormatAndCompression,
321
322    #[arg(long)]
323    /// The number of threads to use for compression (only relevant when also
324    /// using `--package-format conda`)
325    pub compression_threads: Option<u32>,
326
327    /// Don't store the recipe in the final package
328    #[arg(long, help_heading = "Modifying result")]
329    pub no_include_recipe: bool,
330
331    /// Don't run the tests after building the package
332    #[arg(long, default_value = "false", help_heading = "Modifying result")]
333    pub no_test: bool,
334
335    /// Don't force colors in the output of the build script
336    #[arg(long, default_value = "true", help_heading = "Modifying result")]
337    pub color_build_log: bool,
338
339    #[allow(missing_docs)]
340    #[clap(flatten)]
341    pub common: CommonOpts,
342
343    /// Launch the terminal user interface.
344    #[arg(long, default_value = "false", hide = !cfg!(feature = "tui"))]
345    pub tui: bool,
346
347    /// Whether to skip packages that already exist in any channel
348    /// If set to `none`, do not skip any packages, default when not specified.
349    /// If set to `local`, only skip packages that already exist locally,
350    /// default when using `--skip-existing. If set to `all`, skip packages
351    /// that already exist in any channel.
352    #[arg(long, default_missing_value = "local", default_value = "none", num_args = 0..=1, help_heading = "Modifying result"
353    )]
354    pub skip_existing: SkipExisting,
355
356    /// Extra metadata to include in about.json
357    #[arg(long, value_parser = parse_key_val)]
358    pub extra_meta: Option<Vec<(String, Value)>>,
359}
360
361fn is_dir(dir: &str) -> Result<PathBuf, String> {
362    let path = PathBuf::from(dir);
363    if path.is_dir() {
364        Ok(path)
365    } else {
366        Err(format!(
367            "Path '{dir}' needs to exist on disk and be a directory",
368        ))
369    }
370}
371
372/// Parse a single key-value pair
373fn parse_key_val(s: &str) -> Result<(String, Value), Box<dyn Error + Send + Sync + 'static>> {
374    let (key, value) = s
375        .split_once('=')
376        .ok_or_else(|| format!("invalid KEY=value: no `=` found in `{}`", s))?;
377    Ok((key.to_string(), json!(value)))
378}
379
380/// Test options.
381#[derive(Parser)]
382pub struct TestOpts {
383    /// Channels to use when testing
384    #[arg(short = 'c', long)]
385    pub channel: Option<Vec<String>>,
386
387    /// The package file to test
388    #[arg(short, long)]
389    pub package_file: PathBuf,
390
391    /// The number of threads to use for compression.
392    #[clap(long, env = "RATTLER_COMPRESSION_THREADS")]
393    pub compression_threads: Option<u32>,
394
395    /// Common options.
396    #[clap(flatten)]
397    pub common: CommonOpts,
398}
399
400/// Rebuild options.
401#[derive(Parser)]
402pub struct RebuildOpts {
403    /// The package file to rebuild
404    #[arg(short, long)]
405    pub package_file: PathBuf,
406
407    /// Do not run tests after building
408    #[arg(long, default_value = "false")]
409    pub no_test: bool,
410
411    /// The number of threads to use for compression.
412    #[clap(long, env = "RATTLER_COMPRESSION_THREADS")]
413    pub compression_threads: Option<u32>,
414
415    /// Common options.
416    #[clap(flatten)]
417    pub common: CommonOpts,
418}
419
420/// Upload options.
421#[derive(Parser, Debug)]
422pub struct UploadOpts {
423    /// The package file to upload
424    #[arg(global = true, required = false)]
425    pub package_files: Vec<PathBuf>,
426
427    /// The server type
428    #[clap(subcommand)]
429    pub server_type: ServerType,
430
431    /// Common options.
432    #[clap(flatten)]
433    pub common: CommonOpts,
434}
435
436/// Server type.
437#[derive(Clone, Debug, PartialEq, Parser)]
438#[allow(missing_docs)]
439pub enum ServerType {
440    Quetz(QuetzOpts),
441    Artifactory(ArtifactoryOpts),
442    Prefix(PrefixOpts),
443    Anaconda(AnacondaOpts),
444    #[clap(hide = true)]
445    CondaForge(CondaForgeOpts),
446}
447
448#[derive(Clone, Debug, PartialEq, Parser)]
449/// Upload to aQuetz server.
450/// Authentication is used from the keychain / auth-file.
451pub struct QuetzOpts {
452    /// The URL to your Quetz server
453    #[arg(short, long, env = "QUETZ_SERVER_URL")]
454    pub url: Url,
455
456    /// The URL to your channel
457    #[arg(short, long, env = "QUETZ_CHANNEL")]
458    pub channel: String,
459
460    /// The Quetz API key, if none is provided, the token is read from the
461    /// keychain / auth-file
462    #[arg(short, long, env = "QUETZ_API_KEY")]
463    pub api_key: Option<String>,
464}
465
466#[derive(Clone, Debug, PartialEq, Parser)]
467/// Options for uploading to a Artifactory channel.
468/// Authentication is used from the keychain / auth-file.
469pub struct ArtifactoryOpts {
470    /// The URL to your Artifactory server
471    #[arg(short, long, env = "ARTIFACTORY_SERVER_URL")]
472    pub url: Url,
473
474    /// The URL to your channel
475    #[arg(short, long, env = "ARTIFACTORY_CHANNEL")]
476    pub channel: String,
477
478    /// Your Artifactory username
479    #[arg(short = 'r', long, env = "ARTIFACTORY_USERNAME")]
480    pub username: Option<String>,
481
482    /// Your Artifactory password
483    #[arg(short, long, env = "ARTIFACTORY_PASSWORD")]
484    pub password: Option<String>,
485}
486
487/// Options for uploading to a prefix.dev server.
488/// Authentication is used from the keychain / auth-file
489#[derive(Clone, Debug, PartialEq, Parser)]
490pub struct PrefixOpts {
491    /// The URL to the prefix.dev server (only necessary for self-hosted
492    /// instances)
493    #[arg(
494        short,
495        long,
496        env = "PREFIX_SERVER_URL",
497        default_value = "https://prefix.dev"
498    )]
499    pub url: Url,
500
501    /// The channel to upload the package to
502    #[arg(short, long, env = "PREFIX_CHANNEL")]
503    pub channel: String,
504
505    /// The prefix.dev API key, if none is provided, the token is read from the
506    /// keychain / auth-file
507    #[arg(short, long, env = "PREFIX_API_KEY")]
508    pub api_key: Option<String>,
509}
510
511/// Options for uploading to a Anaconda.org server
512#[derive(Clone, Debug, PartialEq, Parser)]
513pub struct AnacondaOpts {
514    /// The owner of the distribution (e.g. conda-forge or your username)
515    #[arg(short, long, env = "ANACONDA_OWNER")]
516    pub owner: String,
517
518    /// The channel / label to upload the package to (e.g. main / rc)
519    #[arg(short, long, env = "ANACONDA_CHANNEL", default_value = "main")]
520    pub channel: Vec<String>,
521
522    /// The Anaconda API key, if none is provided, the token is read from the
523    /// keychain / auth-file
524    #[arg(short, long, env = "ANACONDA_API_KEY")]
525    pub api_key: Option<String>,
526
527    /// The URL to the Anaconda server
528    #[arg(
529        short,
530        long,
531        env = "ANACONDA_SERVER_URL",
532        default_value = "https://api.anaconda.org"
533    )]
534    pub url: Url,
535
536    /// Replace files on conflict
537    #[arg(long, short, env = "ANACONDA_FORCE", default_value = "false")]
538    pub force: bool,
539}
540
541/// Options for uploading to conda-forge
542#[derive(Clone, Debug, PartialEq, Parser)]
543pub struct CondaForgeOpts {
544    /// The Anaconda API key
545    #[arg(long, env = "STAGING_BINSTAR_TOKEN", required = true)]
546    pub staging_token: String,
547
548    /// The feedstock name
549    #[arg(long, env = "FEEDSTOCK_NAME", required = true)]
550    pub feedstock: String,
551
552    /// The feedstock token
553    #[arg(long, env = "FEEDSTOCK_TOKEN", required = true)]
554    pub feedstock_token: String,
555
556    /// The staging channel name
557    #[arg(long, env = "STAGING_CHANNEL", default_value = "cf-staging")]
558    pub staging_channel: String,
559
560    /// The Anaconda Server URL
561    #[arg(
562        long,
563        env = "ANACONDA_SERVER_URL",
564        default_value = "https://api.anaconda.org"
565    )]
566    pub anaconda_url: Url,
567
568    /// The validation endpoint url
569    #[arg(
570        long,
571        env = "VALIDATION_ENDPOINT",
572        default_value = "https://conda-forge.herokuapp.com/feedstock-outputs/copy"
573    )]
574    pub validation_endpoint: Url,
575
576    /// Post comment on promotion failure
577    #[arg(long, env = "POST_COMMENT_ON_ERROR", default_value = "true")]
578    pub post_comment_on_error: bool,
579
580    /// The CI provider
581    #[arg(long, env = "CI")]
582    pub provider: Option<String>,
583
584    /// Dry run, don't actually upload anything
585    #[arg(long, env = "DRY_RUN", default_value = "false")]
586    pub dry_run: bool,
587}
588
589#[cfg(test)]
590mod test {
591    use std::str::FromStr;
592
593    use rattler_conda_types::package::ArchiveType;
594    use rattler_package_streaming::write::CompressionLevel;
595
596    use super::PackageFormatAndCompression;
597
598    #[test]
599    fn test_parse_packaging() {
600        let package_format = PackageFormatAndCompression::from_str("tar-bz2").unwrap();
601        assert_eq!(
602            package_format,
603            PackageFormatAndCompression {
604                archive_type: ArchiveType::TarBz2,
605                compression_level: CompressionLevel::Default
606            }
607        );
608
609        let package_format = PackageFormatAndCompression::from_str("conda").unwrap();
610        assert_eq!(
611            package_format,
612            PackageFormatAndCompression {
613                archive_type: ArchiveType::Conda,
614                compression_level: CompressionLevel::Default
615            }
616        );
617
618        let package_format = PackageFormatAndCompression::from_str("tar-bz2:1").unwrap();
619        assert_eq!(
620            package_format,
621            PackageFormatAndCompression {
622                archive_type: ArchiveType::TarBz2,
623                compression_level: CompressionLevel::Numeric(1)
624            }
625        );
626
627        let package_format = PackageFormatAndCompression::from_str(".tar.bz2:max").unwrap();
628        assert_eq!(
629            package_format,
630            PackageFormatAndCompression {
631                archive_type: ArchiveType::TarBz2,
632                compression_level: CompressionLevel::Highest
633            }
634        );
635
636        let package_format = PackageFormatAndCompression::from_str("tarbz2:5").unwrap();
637        assert_eq!(
638            package_format,
639            PackageFormatAndCompression {
640                archive_type: ArchiveType::TarBz2,
641                compression_level: CompressionLevel::Numeric(5)
642            }
643        );
644
645        let package_format = PackageFormatAndCompression::from_str("conda:1").unwrap();
646        assert_eq!(
647            package_format,
648            PackageFormatAndCompression {
649                archive_type: ArchiveType::Conda,
650                compression_level: CompressionLevel::Numeric(1)
651            }
652        );
653
654        let package_format = PackageFormatAndCompression::from_str("conda:max").unwrap();
655        assert_eq!(
656            package_format,
657            PackageFormatAndCompression {
658                archive_type: ArchiveType::Conda,
659                compression_level: CompressionLevel::Highest
660            }
661        );
662
663        let package_format = PackageFormatAndCompression::from_str("conda:-5").unwrap();
664        assert_eq!(
665            package_format,
666            PackageFormatAndCompression {
667                archive_type: ArchiveType::Conda,
668                compression_level: CompressionLevel::Numeric(-5)
669            }
670        );
671
672        let package_format = PackageFormatAndCompression::from_str("conda:fast").unwrap();
673        assert_eq!(
674            package_format,
675            PackageFormatAndCompression {
676                archive_type: ArchiveType::Conda,
677                compression_level: CompressionLevel::Lowest
678            }
679        );
680    }
681}