1use clap::{Parser, Subcommand, ValueEnum};
2use std::path::PathBuf;
3
4#[derive(Parser, Debug)]
5#[command(name = "mermaid")]
6#[command(version = "0.1.0")]
7#[command(about = "An open-source, model-agnostic AI pair programmer", long_about = None)]
8pub struct Cli {
9 #[arg(short, long)]
11 pub model: Option<String>,
12
13 #[arg(short, long)]
15 pub config: Option<PathBuf>,
16
17 #[arg(long)]
20 pub num_gpu: Option<i32>,
21
22 #[arg(long)]
25 pub num_thread: Option<i32>,
26
27 #[arg(long)]
29 pub num_ctx: Option<i32>,
30
31 #[arg(long)]
33 pub numa: Option<bool>,
34
35 #[arg(short, long)]
37 pub verbose: bool,
38
39 #[arg(short, long)]
41 pub path: Option<PathBuf>,
42
43 #[arg(long)]
45 pub no_auto_install: bool,
46
47 #[arg(long)]
49 pub no_auto_proxy: bool,
50
51 #[arg(long)]
53 pub stop_proxy_on_exit: bool,
54
55 #[arg(long, conflicts_with = "continue_conversation")]
57 pub resume: bool,
58
59 #[arg(long, name = "continue", conflicts_with = "resume")]
61 pub continue_conversation: bool,
62
63 #[arg(short, long, conflicts_with_all = &["resume", "continue"])]
65 pub prompt: Option<String>,
66
67 #[arg(long, value_enum, default_value_t = OutputFormat::Text, requires = "prompt")]
69 pub output_format: OutputFormat,
70
71 #[arg(long, requires = "prompt")]
73 pub max_tokens: Option<usize>,
74
75 #[arg(long, requires = "prompt")]
77 pub no_execute: bool,
78
79 #[arg(long)]
82 pub backend: Option<String>,
83
84 #[arg(long)]
86 pub list_all_models: bool,
87
88 #[arg(long)]
90 pub backends: bool,
91
92 #[command(subcommand)]
93 pub command: Option<Commands>,
94}
95
96#[derive(Subcommand, Debug)]
97pub enum Commands {
98 Init,
100 List,
102 Chat,
104 Version,
106 Status,
108}
109
110#[derive(Debug, Clone, ValueEnum)]
111pub enum OutputFormat {
112 Text,
114 Json,
116 Markdown,
118}