1use crate::{chat, config, database, input::MultiLineInput, readers};
2use anyhow::Result;
3use clap::{Parser, Subcommand};
4use colored::Colorize;
5use rpassword::read_password;
6use std::collections::HashMap;
7use std::io::{self, Write};
8use std::sync::atomic::{AtomicBool, Ordering};
9
10pub static DEBUG_MODE: AtomicBool = AtomicBool::new(false);
12
13#[macro_export]
15macro_rules! debug_log {
16 ($($arg:tt)*) => {
17 if $crate::cli::DEBUG_MODE.load(std::sync::atomic::Ordering::Relaxed) {
18 use colored::Colorize;
19 eprintln!("{} {}", "[DEBUG]".dimmed(), format!($($arg)*));
20 }
21 };
22}
23
24pub fn set_debug_mode(enabled: bool) {
26 DEBUG_MODE.store(enabled, Ordering::Relaxed);
27}
28
29#[derive(Parser)]
30#[command(name = "lc")]
31#[command(
32 about = "LLM Client - A fast Rust-based LLM CLI tool with PDF support and RAG capabilities"
33)]
34#[command(version = "0.1.0")]
35pub struct Cli {
36 #[arg(value_name = "PROMPT")]
38 pub prompt: Vec<String>,
39
40 #[arg(short = 'p', long = "provider")]
42 pub provider: Option<String>,
43
44 #[arg(short = 'm', long = "model")]
46 pub model: Option<String>,
47
48 #[arg(short = 's', long = "system")]
50 pub system_prompt: Option<String>,
51
52 #[arg(long = "max-tokens")]
54 pub max_tokens: Option<String>,
55
56 #[arg(long = "temperature")]
58 pub temperature: Option<String>,
59
60 #[arg(short = 'a', long = "attach")]
62 pub attachments: Vec<String>,
63
64 #[arg(short = 'i', long = "image")]
66 pub images: Vec<String>,
67
68 #[arg(short = 't', long = "tools")]
70 pub tools: Option<String>,
71
72 #[arg(short = 'v', long = "vectordb")]
74 pub vectordb: Option<String>,
75
76 #[arg(short = 'd', long = "debug")]
78 pub debug: bool,
79
80 #[arg(short = 'c', long = "continue")]
82 pub continue_session: bool,
83
84 #[arg(long = "cid")]
86 pub chat_id: Option<String>,
87
88 #[arg(long = "use-search")]
90 pub use_search: Option<String>,
91
92 #[arg(long = "stream")]
94 pub stream: bool,
95
96 #[command(subcommand)]
97 pub command: Option<Commands>,
98}
99
100#[derive(clap::ValueEnum, Clone, Debug)]
101pub enum CompletionShell {
102 Bash,
104 Zsh,
106 Fish,
108 PowerShell,
110 Elvish,
112}
113
114#[derive(Subcommand)]
115pub enum Commands {
116 #[command(alias = "p")]
118 Providers {
119 #[command(subcommand)]
120 command: ProviderCommands,
121 },
122 #[command(alias = "k")]
124 Keys {
125 #[command(subcommand)]
126 command: KeyCommands,
127 },
128 #[command(alias = "l")]
130 Logs {
131 #[command(subcommand)]
132 command: LogCommands,
133 },
134 #[command(alias = "u")]
136 Usage {
137 #[command(subcommand)]
138 command: Option<UsageCommands>,
139 #[arg(short = 'd', long = "days")]
141 days: Option<u32>,
142 #[arg(short = 't', long = "tokens")]
144 tokens_only: bool,
145 #[arg(short = 'r', long = "requests")]
147 requests_only: bool,
148 #[arg(short = 'n', long = "limit", default_value = "10")]
150 limit: usize,
151 },
152 #[command(alias = "co")]
154 Config {
155 #[command(subcommand)]
156 command: Option<ConfigCommands>,
157 },
158 #[command(alias = "c")]
160 Chat {
161 #[arg(short, long)]
163 model: Option<String>,
164 #[arg(short, long)]
166 provider: Option<String>,
167 #[arg(long)]
169 cid: Option<String>,
170 #[arg(short = 't', long = "tools")]
172 tools: Option<String>,
173 #[arg(short = 'v', long = "vectordb")]
175 database: Option<String>,
176 #[arg(short = 'd', long = "debug")]
178 debug: bool,
179 #[arg(short = 'i', long = "image")]
181 images: Vec<String>,
182 },
183 #[command(alias = "m")]
185 Models {
186 #[command(subcommand)]
187 command: Option<ModelsCommands>,
188 #[arg(short = 'q', long = "query")]
190 query: Option<String>,
191 #[arg(long = "tools")]
193 tools: bool,
194 #[arg(long = "reasoning")]
196 reasoning: bool,
197 #[arg(long = "vision")]
199 vision: bool,
200 #[arg(long = "audio")]
202 audio: bool,
203 #[arg(long = "code")]
205 code: bool,
206 #[arg(long = "ctx")]
208 context_length: Option<String>,
209 #[arg(long = "input")]
211 input_length: Option<String>,
212 #[arg(long = "output")]
214 output_length: Option<String>,
215 #[arg(long = "input-price")]
217 input_price: Option<f64>,
218 #[arg(long = "output-price")]
220 output_price: Option<f64>,
221 },
222 #[command(alias = "a")]
224 Alias {
225 #[command(subcommand)]
226 command: AliasCommands,
227 },
228 #[command(alias = "t")]
230 Templates {
231 #[command(subcommand)]
232 command: TemplateCommands,
233 },
234 #[command(alias = "pr")]
236 Proxy {
237 #[arg(short = 'p', long = "port", default_value = "6789")]
239 port: u16,
240 #[arg(long = "host", default_value = "127.0.0.1")]
242 host: String,
243 #[arg(long = "provider")]
245 provider: Option<String>,
246 #[arg(short = 'm', long = "model")]
248 model: Option<String>,
249 #[arg(short = 'k', long = "key")]
251 api_key: Option<String>,
252 #[arg(short = 'g', long = "generate-key")]
254 generate_key: bool,
255 },
256 Mcp {
258 #[command(subcommand)]
259 command: McpCommands,
260 },
261 #[command(alias = "e")]
263 Embed {
264 #[arg(short, long)]
266 model: String,
267 #[arg(short, long)]
269 provider: Option<String>,
270 #[arg(short = 'v', long = "vectordb")]
272 database: Option<String>,
273 #[arg(short = 'f', long = "files")]
275 files: Vec<String>,
276 text: Option<String>,
278 #[arg(short = 'd', long = "debug")]
280 debug: bool,
281 },
282 #[command(alias = "s")]
284 Similar {
285 #[arg(short, long)]
287 model: Option<String>,
288 #[arg(short, long)]
290 provider: Option<String>,
291 #[arg(short = 'v', long = "vectordb")]
293 database: String,
294 #[arg(short, long, default_value = "5")]
296 limit: usize,
297 query: String,
299 },
300 #[command(alias = "v")]
302 Vectors {
303 #[command(subcommand)]
304 command: VectorCommands,
305 },
306 #[command(alias = "w")]
308 WebChatProxy {
309 #[command(subcommand)]
310 command: WebChatProxyCommands,
311 },
312 #[command(alias = "sy")]
314 Sync {
315 #[command(subcommand)]
316 command: SyncCommands,
317 },
318 #[command(alias = "se")]
320 Search {
321 #[command(subcommand)]
322 command: SearchCommands,
323 },
324 #[command(alias = "img")]
326 Image {
327 prompt: String,
329 #[arg(short, long)]
331 model: Option<String>,
332 #[arg(short, long)]
334 provider: Option<String>,
335 #[arg(short, long, default_value = "1024x1024")]
337 size: String,
338 #[arg(short, long, default_value = "1")]
340 count: u32,
341 #[arg(short, long)]
343 output: Option<String>,
344 #[arg(short = 'd', long = "debug")]
346 debug: bool,
347 },
348 #[command(alias = "dump")]
350 DumpMetadata {
351 provider: Option<String>,
353 #[arg(short, long)]
355 list: bool,
356 },
357 Completions {
359 #[arg(value_enum)]
361 shell: CompletionShell,
362 },
363}
364
365#[derive(Subcommand)]
366pub enum ModelsCommands {
367 #[command(alias = "r")]
369 Refresh,
370 #[command(alias = "i")]
372 Info,
373 #[command(alias = "d")]
375 Dump,
376 #[command(alias = "e")]
378 Embed,
379 #[command(alias = "p")]
381 Path {
382 #[command(subcommand)]
383 command: ModelsPathCommands,
384 },
385 #[command(alias = "t")]
387 Tags {
388 #[command(subcommand)]
389 command: ModelsTagsCommands,
390 },
391 #[command(alias = "f")]
393 Filter {
394 #[arg(short = 't', long = "tag")]
396 tags: String,
397 },
398}
399
400#[derive(Subcommand)]
401pub enum ModelsPathCommands {
402 #[command(alias = "l")]
404 List,
405 #[command(alias = "a")]
407 Add {
408 path: String,
410 },
411 #[command(alias = "d")]
413 Delete {
414 path: String,
416 },
417}
418
419#[derive(Subcommand)]
420pub enum ModelsTagsCommands {
421 #[command(alias = "l")]
423 List,
424 #[command(alias = "a")]
426 Add {
427 tag: String,
429 rule: String,
431 },
432}
433
434#[derive(Subcommand)]
435pub enum AliasCommands {
436 #[command(alias = "a")]
438 Add {
439 name: String,
441 target: String,
443 },
444 #[command(alias = "d")]
446 Delete {
447 name: String,
449 },
450 #[command(alias = "l")]
452 List,
453}
454
455#[derive(Subcommand)]
456pub enum TemplateCommands {
457 #[command(alias = "a")]
459 Add {
460 name: String,
462 prompt: String,
464 },
465 #[command(alias = "d")]
467 Delete {
468 name: String,
470 },
471 #[command(alias = "l")]
473 List,
474}
475
476#[derive(Subcommand)]
477pub enum ProviderCommands {
478 #[command(alias = "a")]
480 Add {
481 name: String,
483 url: String,
485 #[arg(short = 'm', long = "models-path")]
487 models_path: Option<String>,
488 #[arg(short = 'c', long = "chat-path")]
490 chat_path: Option<String>,
491 },
492 #[command(alias = "u")]
494 Update {
495 name: String,
497 url: String,
499 },
500 #[command(alias = "r")]
502 Remove {
503 name: String,
505 },
506 #[command(alias = "l")]
508 List,
509 #[command(alias = "m")]
511 Models {
512 name: String,
514 #[arg(short = 'r', long = "refresh")]
516 refresh: bool,
517 },
518 #[command(alias = "h")]
520 Headers {
521 provider: String,
523 #[command(subcommand)]
524 command: HeaderCommands,
525 },
526 #[command(alias = "v")]
528 Vars {
529 provider: String,
531 #[command(subcommand)]
532 command: ProviderVarsCommands,
533 },
534 #[command(alias = "t")]
536 TokenUrl {
537 provider: String,
539 url: String,
541 },
542 #[command(alias = "path")]
544 Paths {
545 provider: String,
547 #[command(subcommand)]
548 command: ProviderPathCommands,
549 },
550}
551
552#[derive(Subcommand)]
553pub enum ProviderVarsCommands {
554 #[command(alias = "s")]
556 Set {
557 key: String,
559 value: String,
561 },
562 #[command(alias = "g")]
564 Get {
565 key: String,
567 },
568 #[command(alias = "l")]
570 List,
571}
572
573#[derive(Subcommand)]
574pub enum ProviderPathCommands {
575 #[command(alias = "a")]
577 Add {
578 #[arg(short = 'm', long = "models")]
580 models_path: Option<String>,
581 #[arg(short = 'c', long = "chat")]
583 chat_path: Option<String>,
584 #[arg(short = 'i', long = "images")]
586 images_path: Option<String>,
587 #[arg(short = 'e', long = "embeddings")]
589 embeddings_path: Option<String>,
590 },
591 #[command(alias = "d")]
593 Delete {
594 #[arg(short = 'm', long = "models")]
596 models: bool,
597 #[arg(short = 'c', long = "chat")]
599 chat: bool,
600 #[arg(short = 'i', long = "images")]
602 images: bool,
603 #[arg(short = 'e', long = "embeddings")]
605 embeddings: bool,
606 },
607 #[command(alias = "l")]
609 List,
610}
611
612#[derive(Subcommand)]
613pub enum HeaderCommands {
614 #[command(alias = "a")]
616 Add {
617 name: String,
619 value: String,
621 },
622 #[command(alias = "d")]
624 Delete {
625 name: String,
627 },
628 #[command(alias = "l")]
630 List,
631}
632
633#[derive(Subcommand)]
634pub enum KeyCommands {
635 #[command(alias = "a")]
637 Add {
638 name: String,
640 },
641 #[command(alias = "l")]
643 List,
644 #[command(alias = "g")]
646 Get {
647 name: String,
649 },
650 #[command(alias = "r")]
652 Remove {
653 name: String,
655 },
656}
657
658#[derive(Subcommand)]
659pub enum LogCommands {
660 #[command(alias = "sh")]
662 Show {
663 #[arg(long)]
665 minimal: bool,
666 },
667 #[command(alias = "r")]
669 Recent {
670 #[command(subcommand)]
671 command: Option<RecentCommands>,
672 #[arg(short, long, default_value = "10")]
674 count: usize,
675 },
676 #[command(alias = "c")]
678 Current,
679 #[command(alias = "s")]
681 Stats,
682 #[command(alias = "p")]
684 Purge {
685 #[arg(long)]
687 yes: bool,
688 #[arg(long)]
690 older_than_days: Option<u32>,
691 #[arg(long)]
693 keep_recent: Option<usize>,
694 #[arg(long)]
696 max_size_mb: Option<u64>,
697 },
698}
699
700#[derive(Subcommand)]
701pub enum RecentCommands {
702 #[command(alias = "a")]
704 Answer {
705 #[command(subcommand)]
706 command: Option<AnswerCommands>,
707 },
708 #[command(alias = "q")]
710 Question,
711 #[command(alias = "m")]
713 Model,
714 #[command(alias = "s")]
716 Session,
717}
718
719#[derive(Subcommand)]
720pub enum UsageCommands {
721 #[command(alias = "d")]
723 Daily {
724 #[arg(short = 'n', long = "count", default_value = "30")]
726 count: usize,
727 },
728 #[command(alias = "w")]
730 Weekly {
731 #[arg(short = 'n', long = "count", default_value = "12")]
733 count: usize,
734 },
735 #[command(alias = "m")]
737 Monthly {
738 #[arg(short = 'n', long = "count", default_value = "12")]
740 count: usize,
741 },
742 #[command(alias = "y")]
744 Yearly {
745 #[arg(short = 'n', long = "count", default_value = "5")]
747 count: usize,
748 },
749 #[command(alias = "models")]
751 Models {
752 #[arg(short = 'n', long = "count", default_value = "10")]
754 count: usize,
755 },
756}
757
758#[derive(Subcommand)]
759pub enum AnswerCommands {
760 #[command(alias = "c")]
762 Code,
763}
764
765#[derive(Subcommand)]
766pub enum ConfigCommands {
767 #[command(alias = "s")]
769 Set {
770 #[command(subcommand)]
771 command: SetCommands,
772 },
773 #[command(alias = "g")]
775 Get {
776 #[command(subcommand)]
777 command: GetCommands,
778 },
779 #[command(alias = "d")]
781 Delete {
782 #[command(subcommand)]
783 command: DeleteCommands,
784 },
785 #[command(alias = "p")]
787 Path,
788}
789
790#[derive(Subcommand)]
791pub enum SetCommands {
792 #[command(alias = "p")]
794 Provider {
795 name: String,
797 },
798 #[command(alias = "m")]
800 Model {
801 name: String,
803 },
804 #[command(alias = "s")]
806 SystemPrompt {
807 prompt: String,
809 },
810 #[command(alias = "mt")]
812 MaxTokens {
813 value: String,
815 },
816 #[command(alias = "te")]
818 Temperature {
819 value: String,
821 },
822 #[command(alias = "se")]
824 Search {
825 name: String,
827 },
828 #[command(alias = "st")]
830 Stream {
831 value: String,
833 },
834}
835
836#[derive(Subcommand)]
837pub enum GetCommands {
838 #[command(alias = "p")]
840 Provider,
841 #[command(alias = "m")]
843 Model,
844 #[command(alias = "s")]
846 SystemPrompt,
847 #[command(alias = "mt")]
849 MaxTokens,
850 #[command(alias = "te")]
852 Temperature,
853 #[command(alias = "se")]
855 Search,
856 #[command(alias = "st")]
858 Stream,
859}
860
861#[derive(Subcommand)]
862pub enum DeleteCommands {
863 #[command(alias = "p")]
865 Provider,
866 #[command(alias = "m")]
868 Model,
869 #[command(alias = "s")]
871 SystemPrompt,
872 #[command(alias = "mt")]
874 MaxTokens,
875 #[command(alias = "te")]
877 Temperature,
878 #[command(alias = "se")]
880 Search,
881 #[command(alias = "st")]
883 Stream,
884}
885
886#[derive(Subcommand)]
887pub enum McpCommands {
888 #[command(alias = "a")]
890 Add {
891 name: String,
893 command_or_url: String,
895 #[arg(long = "type", value_enum)]
897 server_type: McpServerType,
898 #[arg(short = 'e', long = "env", value_parser = parse_env_var)]
900 env: Vec<(String, String)>,
901 },
902 #[command(alias = "d")]
904 Delete {
905 name: String,
907 },
908 #[command(alias = "l")]
910 List,
911 #[command(alias = "st")]
913 Stop {
914 name: String,
916 },
917 #[command(alias = "f")]
919 Functions {
920 name: String,
922 },
923 #[command(alias = "i")]
925 Invoke {
926 name: String,
928 function: String,
930 args: Vec<String>,
932 },
933}
934
935#[derive(Subcommand)]
936pub enum VectorCommands {
937 #[command(alias = "l")]
939 List,
940 #[command(alias = "d")]
942 Delete {
943 name: String,
945 },
946 #[command(alias = "i")]
948 Info {
949 name: String,
951 },
952}
953
954#[derive(Subcommand)]
955pub enum WebChatProxyCommands {
956 #[command(alias = "p")]
958 Providers {
959 #[command(subcommand)]
960 command: Option<WebChatProxyProviderCommands>,
961 },
962 #[command(alias = "s")]
964 Start {
965 provider: String,
967 #[arg(short = 'p', long = "port", default_value = "8080")]
969 port: u16,
970 #[arg(long = "host", default_value = "127.0.0.1")]
972 host: String,
973 #[arg(short = 'k', long = "key")]
975 key: Option<String>,
976 #[arg(short = 'g', long = "generate-key")]
978 generate_key: bool,
979 #[arg(short = 'd', long = "daemon")]
981 daemon: bool,
982 },
983 Stop {
985 provider: String,
987 },
988 #[command(alias = "ps")]
990 List,
991}
992
993#[derive(Subcommand)]
994pub enum WebChatProxyProviderCommands {
995 #[command(alias = "l")]
997 List,
998 Kagi {
1000 #[command(subcommand)]
1001 command: WebChatProxyKagiCommands,
1002 },
1003}
1004
1005#[derive(Subcommand)]
1006pub enum WebChatProxyKagiCommands {
1007 Auth {
1009 token: Option<String>,
1011 },
1012 #[command(alias = "m")]
1014 Models,
1015}
1016
1017#[derive(Subcommand)]
1018pub enum SyncCommands {
1019 #[command(alias = "p")]
1021 Providers,
1022 #[command(alias = "c")]
1024 Configure {
1025 provider: String,
1027 #[command(subcommand)]
1028 command: Option<ConfigureCommands>,
1029 },
1030 To {
1032 provider: String,
1034 #[arg(short = 'e', long = "encrypted")]
1036 encrypted: bool,
1037 #[arg(short = 'd', long = "debug")]
1039 debug: bool,
1040 #[arg(short = 'y', long = "yes")]
1042 yes: bool,
1043 },
1044 From {
1046 provider: String,
1048 #[arg(short = 'e', long = "encrypted")]
1050 encrypted: bool,
1051 #[arg(short = 'd', long = "debug")]
1053 debug: bool,
1054 #[arg(short = 'y', long = "yes")]
1056 yes: bool,
1057 },
1058}
1059
1060#[derive(Subcommand)]
1061pub enum ConfigureCommands {
1062 #[command(alias = "s")]
1064 Setup,
1065 #[command(alias = "sh")]
1067 Show,
1068 #[command(alias = "r")]
1070 Remove,
1071}
1072
1073#[derive(Subcommand)]
1074pub enum SearchCommands {
1075 #[command(alias = "p")]
1077 Provider {
1078 #[command(subcommand)]
1079 command: SearchProviderCommands,
1080 },
1081 Query {
1083 provider: String,
1085 query: String,
1087 #[arg(short = 'f', long = "format", default_value = "md")]
1089 format: String,
1090 #[arg(short = 'n', long = "count", default_value = "5")]
1092 count: usize,
1093 },
1094}
1095
1096#[derive(Subcommand)]
1097pub enum SearchProviderCommands {
1098 #[command(alias = "a")]
1100 Add {
1101 name: String,
1103 url: String,
1105 },
1106 #[command(alias = "d")]
1108 Delete {
1109 name: String,
1111 },
1112 #[command(alias = "s")]
1114 Set {
1115 provider: String,
1117 header_name: String,
1119 header_value: String,
1121 },
1122 #[command(alias = "l")]
1124 List,
1125}
1126
1127#[derive(clap::ValueEnum, Clone, Debug)]
1128pub enum McpServerType {
1129 Stdio,
1131 Sse,
1133 Streamable,
1135}
1136
1137
1138fn parse_env_var(s: &str) -> Result<(String, String), String> {
1140 let parts: Vec<&str> = s.splitn(2, '=').collect();
1141 if parts.len() != 2 {
1142 return Err(format!(
1143 "Invalid environment variable format: '{}'. Expected 'KEY=VALUE'",
1144 s
1145 ));
1146 }
1147 Ok((parts[0].to_string(), parts[1].to_string()))
1148}
1149
1150fn extract_code_blocks(text: &str) -> Vec<String> {
1152 let mut code_blocks = Vec::new();
1153 let mut in_code_block = false;
1154 let mut current_block = String::new();
1155
1156 for line in text.lines() {
1157 if line.starts_with("```") {
1158 if in_code_block {
1159 if !current_block.trim().is_empty() {
1161 code_blocks.push(current_block.trim().to_string());
1162 }
1163 current_block.clear();
1164 in_code_block = false;
1165 } else {
1166 in_code_block = true;
1168 }
1169 } else if in_code_block {
1170 current_block.push_str(line);
1171 current_block.push('\n');
1172 }
1173 }
1174
1175 if in_code_block && !current_block.trim().is_empty() {
1177 code_blocks.push(current_block.trim().to_string());
1178 }
1179
1180 code_blocks
1181}
1182
1183pub async fn handle_provider_command(command: ProviderCommands) -> Result<()> {
1185 match command {
1186 ProviderCommands::Add {
1187 name,
1188 url,
1189 models_path,
1190 chat_path,
1191 } => {
1192 let mut config = config::Config::load()?;
1193 config.add_provider_with_paths(name.clone(), url, models_path, chat_path)?;
1194 config.save()?;
1195 println!("{} Provider '{}' added successfully", "â".green(), name);
1196 }
1197 ProviderCommands::Update { name, url } => {
1198 let mut config = config::Config::load()?;
1199 if !config.has_provider(&name) {
1200 anyhow::bail!("Provider '{}' not found", name);
1201 }
1202 config.add_provider(name.clone(), url)?; config.save()?;
1204 println!("{} Provider '{}' updated successfully", "â".green(), name);
1205 }
1206 ProviderCommands::Remove { name } => {
1207 let mut config = config::Config::load()?;
1208 if !config.has_provider(&name) {
1209 anyhow::bail!("Provider '{}' not found", name);
1210 }
1211 config.providers.remove(&name);
1212 config.save()?;
1213 println!("{} Provider '{}' removed successfully", "â".green(), name);
1214 }
1215 ProviderCommands::List => {
1216 let config = config::Config::load()?;
1217 if config.providers.is_empty() {
1218 println!("No providers configured.");
1219 return Ok(());
1220 }
1221
1222 println!("\n{}", "Configured Providers:".bold().blue());
1223
1224 let mut sorted_providers: Vec<_> = config.providers.iter().collect();
1226 sorted_providers.sort_by(|a, b| a.0.cmp(b.0));
1227
1228 for (name, provider_config) in sorted_providers {
1229 let has_key = provider_config.api_key.is_some();
1230 let key_status = if has_key { "â".green() } else { "â".red() };
1231 println!(
1232 " {} {} - {} (API Key: {})",
1233 "âĸ".blue(),
1234 name.bold(),
1235 provider_config.endpoint,
1236 key_status
1237 );
1238 }
1239 }
1240 ProviderCommands::Models { name, refresh } => {
1241 debug_log!(
1242 "Handling provider models command for '{}', refresh: {}",
1243 name,
1244 refresh
1245 );
1246
1247 let config = config::Config::load()?;
1248 let _provider_config = config.get_provider(&name)?;
1249
1250 debug_log!("Provider '{}' found in config", name);
1251
1252 match crate::unified_cache::UnifiedCache::fetch_and_cache_provider_models(
1254 &name, refresh,
1255 )
1256 .await
1257 {
1258 Ok(models) => {
1259 debug_log!(
1260 "Successfully fetched {} models for provider '{}'",
1261 models.len(),
1262 name
1263 );
1264 println!("\n{} Available models:", "Models:".bold());
1265 display_provider_models(&models)?;
1266 }
1267 Err(e) => {
1268 debug_log!("Unified cache failed for provider '{}': {}", name, e);
1269 eprintln!("Error fetching models from provider '{}': {}", name, e);
1270
1271 debug_log!(
1273 "Attempting fallback to basic client listing for provider '{}'",
1274 name
1275 );
1276 let mut config_mut = config.clone();
1277 match chat::create_authenticated_client(&mut config_mut, &name).await {
1278 Ok(client) => {
1279 debug_log!("Created fallback client for provider '{}'", name);
1280 if config_mut.get_cached_token(&name) != config.get_cached_token(&name)
1282 {
1283 debug_log!("Tokens updated for provider '{}', saving config", name);
1284 config_mut.save()?;
1285 }
1286
1287 match client.list_models().await {
1288 Ok(models) => {
1289 debug_log!(
1290 "Fallback client returned {} models for provider '{}'",
1291 models.len(),
1292 name
1293 );
1294 println!(
1295 "\n{} Available models (basic listing):",
1296 "Models:".bold()
1297 );
1298 for model in models {
1299 println!(" âĸ {}", model.id);
1300 }
1301 }
1302 Err(e2) => {
1303 debug_log!(
1304 "Fallback client failed for provider '{}': {}",
1305 name,
1306 e2
1307 );
1308 anyhow::bail!("Failed to fetch models: {}", e2);
1309 }
1310 }
1311 }
1312 Err(e2) => {
1313 debug_log!(
1314 "Failed to create fallback client for provider '{}': {}",
1315 name,
1316 e2
1317 );
1318 anyhow::bail!("Failed to create client: {}", e2);
1319 }
1320 }
1321 }
1322 }
1323 }
1324 ProviderCommands::Headers { provider, command } => {
1325 let mut config = config::Config::load()?;
1326
1327 if !config.has_provider(&provider) {
1328 anyhow::bail!("Provider '{}' not found", provider);
1329 }
1330
1331 match command {
1332 HeaderCommands::Add { name, value } => {
1333 config.add_header(provider.clone(), name.clone(), value.clone())?;
1334 config.save()?;
1335 println!(
1336 "{} Header '{}' added to provider '{}'",
1337 "â".green(),
1338 name,
1339 provider
1340 );
1341 }
1342 HeaderCommands::Delete { name } => {
1343 config.remove_header(provider.clone(), name.clone())?;
1344 config.save()?;
1345 println!(
1346 "{} Header '{}' removed from provider '{}'",
1347 "â".green(),
1348 name,
1349 provider
1350 );
1351 }
1352 HeaderCommands::List => {
1353 let headers = config.list_headers(&provider)?;
1354 if headers.is_empty() {
1355 println!("No custom headers configured for provider '{}'", provider);
1356 } else {
1357 println!(
1358 "\n{} Custom headers for provider '{}':",
1359 "Headers:".bold().blue(),
1360 provider
1361 );
1362 for (name, value) in headers {
1363 println!(" {} {}: {}", "âĸ".blue(), name.bold(), value);
1364 }
1365 }
1366 }
1367 }
1368 }
1369 ProviderCommands::TokenUrl { provider, url } => {
1370 let mut config = config::Config::load()?;
1371
1372 if !config.has_provider(&provider) {
1373 anyhow::bail!("Provider '{}' not found", provider);
1374 }
1375
1376 config.set_token_url(provider.clone(), url.clone())?;
1377 config.save()?;
1378 println!("{} Token URL set for provider '{}'", "â".green(), provider);
1379 }
1380 ProviderCommands::Vars { provider, command } => {
1381 let mut config = config::Config::load()?;
1382 if !config.has_provider(&provider) {
1383 anyhow::bail!("Provider '{}' not found", provider);
1384 }
1385 match command {
1386 ProviderVarsCommands::Set { key, value } => {
1387 config.set_provider_var(&provider, &key, &value)?;
1388 config.save()?;
1389 println!(
1390 "{} Set var '{}'='{}' for provider '{}'",
1391 "â".green(),
1392 key,
1393 value,
1394 provider
1395 );
1396 }
1397 ProviderVarsCommands::Get { key } => {
1398 match config.get_provider_var(&provider, &key) {
1399 Some(val) => println!("{}", val),
1400 None => anyhow::bail!("Var '{}' not set for provider '{}'", key, provider),
1401 }
1402 }
1403 ProviderVarsCommands::List => {
1404 let vars = config.list_provider_vars(&provider)?;
1405 if vars.is_empty() {
1406 println!("No vars set for provider '{}'", provider);
1407 } else {
1408 println!(
1409 "\n{} Vars for provider '{}':",
1410 "Vars:".bold().blue(),
1411 provider
1412 );
1413 for (k, v) in vars {
1414 println!(" {} {} = {}", "âĸ".blue(), k.bold(), v);
1415 }
1416 }
1417 }
1418 }
1419 }
1420 ProviderCommands::Paths { provider, command } => {
1421 let mut config = config::Config::load()?;
1422 if !config.has_provider(&provider) {
1423 anyhow::bail!("Provider '{}' not found", provider);
1424 }
1425 match command {
1426 ProviderPathCommands::Add {
1427 models_path,
1428 chat_path,
1429 images_path,
1430 embeddings_path,
1431 } => {
1432 let mut updated = false;
1433 if let Some(path) = models_path {
1434 config.set_provider_models_path(&provider, &path)?;
1435 println!(
1436 "{} Models path set to '{}' for provider '{}'",
1437 "â".green(),
1438 path,
1439 provider
1440 );
1441 updated = true;
1442 }
1443 if let Some(path) = chat_path {
1444 config.set_provider_chat_path(&provider, &path)?;
1445 println!(
1446 "{} Chat path set to '{}' for provider '{}'",
1447 "â".green(),
1448 path,
1449 provider
1450 );
1451 updated = true;
1452 }
1453 if let Some(path) = images_path {
1454 config.set_provider_images_path(&provider, &path)?;
1455 println!(
1456 "{} Images path set to '{}' for provider '{}'",
1457 "â".green(),
1458 path,
1459 provider
1460 );
1461 updated = true;
1462 }
1463 if let Some(path) = embeddings_path {
1464 config.set_provider_embeddings_path(&provider, &path)?;
1465 println!(
1466 "{} Embeddings path set to '{}' for provider '{}'",
1467 "â".green(),
1468 path,
1469 provider
1470 );
1471 updated = true;
1472 }
1473 if !updated {
1474 anyhow::bail!("No paths specified. Use -m, -c, -i, or -e to set paths.");
1475 }
1476 config.save()?;
1477 }
1478 ProviderPathCommands::Delete {
1479 models,
1480 chat,
1481 images,
1482 embeddings,
1483 } => {
1484 let mut updated = false;
1485 if models {
1486 config.reset_provider_models_path(&provider)?;
1487 println!(
1488 "{} Models path reset to default for provider '{}'",
1489 "â".green(),
1490 provider
1491 );
1492 updated = true;
1493 }
1494 if chat {
1495 config.reset_provider_chat_path(&provider)?;
1496 println!(
1497 "{} Chat path reset to default for provider '{}'",
1498 "â".green(),
1499 provider
1500 );
1501 updated = true;
1502 }
1503 if images {
1504 config.reset_provider_images_path(&provider)?;
1505 println!(
1506 "{} Images path reset to default for provider '{}'",
1507 "â".green(),
1508 provider
1509 );
1510 updated = true;
1511 }
1512 if embeddings {
1513 config.reset_provider_embeddings_path(&provider)?;
1514 println!(
1515 "{} Embeddings path reset to default for provider '{}'",
1516 "â".green(),
1517 provider
1518 );
1519 updated = true;
1520 }
1521 if !updated {
1522 anyhow::bail!("No paths specified for deletion. Use -m, -c, -i, or -e to delete paths.");
1523 }
1524 config.save()?;
1525 }
1526 ProviderPathCommands::List => {
1527 let paths = config.list_provider_paths(&provider)?;
1528 println!(
1529 "\n{} API paths for provider '{}':",
1530 "Paths:".bold().blue(),
1531 provider
1532 );
1533 println!(" {} Models: {}", "âĸ".blue(), paths.models_path.bold());
1534 println!(" {} Chat: {}", "âĸ".blue(), paths.chat_path.bold());
1535 if let Some(ref images_path) = paths.images_path {
1536 println!(" {} Images: {}", "âĸ".blue(), images_path.bold());
1537 } else {
1538 println!(" {} Images: {}", "âĸ".blue(), "not set".dimmed());
1539 }
1540 if let Some(ref embeddings_path) = paths.embeddings_path {
1541 println!(" {} Embeddings: {}", "âĸ".blue(), embeddings_path.bold());
1542 } else {
1543 println!(" {} Embeddings: {}", "âĸ".blue(), "not set".dimmed());
1544 }
1545 }
1546 }
1547 }
1548 }
1549 Ok(())
1550}
1551
1552pub async fn handle_key_command(command: KeyCommands) -> Result<()> {
1554 match command {
1555 KeyCommands::Add { name } => {
1556 let mut config = config::Config::load()?;
1557
1558 if !config.has_provider(&name) {
1559 anyhow::bail!(
1560 "Provider '{}' not found. Add it first with 'lc providers add'",
1561 name
1562 );
1563 }
1564
1565 let provider_cfg = config.get_provider(&name)?;
1567 let is_google_sa = provider_cfg.auth_type.as_deref() == Some("google_sa_jwt")
1568 || provider_cfg.endpoint.contains("aiplatform.googleapis.com");
1569
1570 if is_google_sa {
1571 println!(
1572 "Detected Google Vertex AI provider. Please provide the Service Account JSON."
1573 );
1574 println!("Options:");
1575 println!(" 1. Paste the base64 version directly (ex: cat sa.json | base64)");
1576 println!(" 2. Provide the path to the JSON file (ex: /path/to/sa.json)");
1577 print!("Base64 Service Account JSON or file path for {}: ", name);
1578 io::stdout().flush()?;
1579
1580 let mut input = String::new();
1582 io::stdin().read_line(&mut input)?;
1583 let input = input.trim();
1584
1585 let sa_json = if input.starts_with('/') || input.ends_with(".json") {
1586 match std::fs::read_to_string(input) {
1588 Ok(file_content) => file_content,
1589 Err(e) => {
1590 anyhow::bail!("Failed to read service account file '{}': {}", input, e)
1591 }
1592 }
1593 } else {
1594 let sa_json_b64 = input
1596 .trim()
1597 .replace("\n", "")
1598 .replace("\r", "")
1599 .replace(" ", "");
1600
1601 use base64::{engine::general_purpose, Engine as _};
1603 match general_purpose::STANDARD.decode(&sa_json_b64) {
1604 Ok(decoded_bytes) => match String::from_utf8(decoded_bytes) {
1605 Ok(json_str) => json_str,
1606 Err(_) => anyhow::bail!("Invalid UTF-8 in decoded base64 data"),
1607 },
1608 Err(_) => anyhow::bail!("Invalid base64 format"),
1609 }
1610 };
1611
1612 let parsed: serde_json::Value = serde_json::from_str(&sa_json)
1614 .map_err(|e| anyhow::anyhow!("Invalid JSON: {}", e))?;
1615 let sa_type = parsed.get("type").and_then(|v| v.as_str()).unwrap_or("");
1616 let client_email = parsed
1617 .get("client_email")
1618 .and_then(|v| v.as_str())
1619 .unwrap_or("");
1620 let private_key = parsed
1621 .get("private_key")
1622 .and_then(|v| v.as_str())
1623 .unwrap_or("");
1624
1625 if sa_type != "service_account" {
1626 anyhow::bail!("Service Account JSON must have \"type\": \"service_account\"");
1627 }
1628 if client_email.is_empty() {
1629 anyhow::bail!("Service Account JSON missing 'client_email'");
1630 }
1631 if private_key.is_empty() {
1632 anyhow::bail!("Service Account JSON missing 'private_key'");
1633 }
1634
1635 config.set_api_key(name.clone(), sa_json)?;
1637 config.save()?;
1638 println!(
1639 "{} Service Account stored for provider '{}'",
1640 "â".green(),
1641 name
1642 );
1643 } else {
1644 print!("Enter API key for {}: ", name);
1645 io::stdout().flush()?;
1646 let key = read_password()?;
1647
1648 config.set_api_key(name.clone(), key)?;
1649 config.save()?;
1650 println!("{} API key set for provider '{}'", "â".green(), name);
1651 }
1652 }
1653 KeyCommands::Get { name } => {
1654 let config = config::Config::load()?;
1655
1656 if !config.has_provider(&name) {
1657 anyhow::bail!("Provider '{}' not found", name);
1658 }
1659
1660 let provider_config = config.get_provider(&name)?;
1661 if let Some(api_key) = &provider_config.api_key {
1662 println!("{}", api_key);
1663 } else {
1664 anyhow::bail!("No API key configured for provider '{}'", name);
1665 }
1666 }
1667 KeyCommands::List => {
1668 let config = config::Config::load()?;
1669 if config.providers.is_empty() {
1670 println!("No providers configured.");
1671 return Ok(());
1672 }
1673
1674 println!("\n{}", "API Key Status:".bold().blue());
1675 for (name, provider_config) in &config.providers {
1676 let status = if provider_config.api_key.is_some() {
1677 "â Configured".green()
1678 } else {
1679 "â Missing".red()
1680 };
1681 println!(" {} {} - {}", "âĸ".blue(), name.bold(), status);
1682 }
1683 }
1684 KeyCommands::Remove { name } => {
1685 let mut config = config::Config::load()?;
1686
1687 if !config.has_provider(&name) {
1688 anyhow::bail!("Provider '{}' not found", name);
1689 }
1690
1691 if let Some(provider_config) = config.providers.get_mut(&name) {
1692 provider_config.api_key = None;
1693 }
1694 config.save()?;
1695 println!("{} API key removed for provider '{}'", "â".green(), name);
1696 }
1697 }
1698 Ok(())
1699}
1700
1701pub async fn handle_log_command(command: LogCommands) -> Result<()> {
1703 let db = database::Database::new()?;
1704
1705 match command {
1706 LogCommands::Show { minimal } => {
1707 let entries = db.get_all_logs()?;
1708
1709 if entries.is_empty() {
1710 println!("No chat logs found.");
1711 return Ok(());
1712 }
1713
1714 if minimal {
1715 use tabled::{Table, Tabled};
1716
1717 #[derive(Tabled)]
1718 struct LogEntry {
1719 #[tabled(rename = "Chat ID")]
1720 chat_id: String,
1721 #[tabled(rename = "Model")]
1722 model: String,
1723 #[tabled(rename = "Question")]
1724 question: String,
1725 #[tabled(rename = "Time")]
1726 time: String,
1727 }
1728
1729 let table_data: Vec<LogEntry> = entries
1730 .into_iter()
1731 .map(|entry| LogEntry {
1732 chat_id: entry.chat_id[..8].to_string(),
1733 model: entry.model,
1734 question: if entry.question.len() > 50 {
1735 format!("{}...", &entry.question[..50])
1736 } else {
1737 entry.question
1738 },
1739 time: entry.timestamp.format("%m-%d %H:%M").to_string(),
1740 })
1741 .collect();
1742
1743 let table = Table::new(table_data);
1744 println!("{}", table);
1745 } else {
1746 println!("\n{}", "Chat Logs:".bold().blue());
1747
1748 for entry in entries {
1749 println!(
1750 "\n{} {} ({})",
1751 "Session:".bold(),
1752 &entry.chat_id[..8],
1753 entry.timestamp.format("%Y-%m-%d %H:%M:%S")
1754 );
1755 println!("{} {}", "Model:".bold(), entry.model);
1756
1757 if let (Some(input_tokens), Some(output_tokens)) =
1759 (entry.input_tokens, entry.output_tokens)
1760 {
1761 println!(
1762 "{} {} input + {} output = {} total tokens",
1763 "Tokens:".bold(),
1764 input_tokens,
1765 output_tokens,
1766 input_tokens + output_tokens
1767 );
1768 }
1769
1770 println!("{} {}", "Q:".yellow(), entry.question);
1771 println!(
1772 "{} {}",
1773 "A:".green(),
1774 if entry.response.len() > 200 {
1775 format!("{}...", &entry.response[..200])
1776 } else {
1777 entry.response
1778 }
1779 );
1780 println!("{}", "â".repeat(80).dimmed());
1781 }
1782 }
1783 }
1784 LogCommands::Recent { command, count } => {
1785 match command {
1786 Some(RecentCommands::Answer { command }) => {
1787 let entries = db.get_all_logs()?;
1788 if let Some(entry) = entries.first() {
1789 match command {
1790 Some(AnswerCommands::Code) => {
1791 let code_blocks = extract_code_blocks(&entry.response);
1792 if code_blocks.is_empty() {
1793 anyhow::bail!("No code blocks found in the last answer");
1794 } else {
1795 for block in code_blocks {
1796 println!("{}", block);
1797 }
1798 }
1799 }
1800 None => {
1801 println!("{}", entry.response);
1802 }
1803 }
1804 } else {
1805 anyhow::bail!("No recent logs found");
1806 }
1807 }
1808 Some(RecentCommands::Question) => {
1809 let entries = db.get_all_logs()?;
1810 if let Some(entry) = entries.first() {
1811 println!("{}", entry.question);
1812 } else {
1813 anyhow::bail!("No recent logs found");
1814 }
1815 }
1816 Some(RecentCommands::Model) => {
1817 let entries = db.get_all_logs()?;
1818 if let Some(entry) = entries.first() {
1819 println!("{}", entry.model);
1820 } else {
1821 anyhow::bail!("No recent logs found");
1822 }
1823 }
1824 Some(RecentCommands::Session) => {
1825 let entries = db.get_all_logs()?;
1826 if let Some(entry) = entries.first() {
1827 println!("{}", entry.chat_id);
1828 } else {
1829 anyhow::bail!("No recent logs found");
1830 }
1831 }
1832 None => {
1833 let mut entries = db.get_all_logs()?;
1835 entries.truncate(count);
1836
1837 if entries.is_empty() {
1838 println!("No recent logs found.");
1839 return Ok(());
1840 }
1841
1842 println!(
1843 "\n{} (showing {} entries)",
1844 "Recent Logs:".bold().blue(),
1845 entries.len()
1846 );
1847
1848 for entry in entries {
1849 println!(
1850 "\n{} {} ({})",
1851 "Session:".bold(),
1852 &entry.chat_id[..8],
1853 entry.timestamp.format("%Y-%m-%d %H:%M:%S")
1854 );
1855 println!("{} {}", "Model:".bold(), entry.model);
1856
1857 if let (Some(input_tokens), Some(output_tokens)) =
1859 (entry.input_tokens, entry.output_tokens)
1860 {
1861 println!(
1862 "{} {} input + {} output = {} total tokens",
1863 "Tokens:".bold(),
1864 input_tokens,
1865 output_tokens,
1866 input_tokens + output_tokens
1867 );
1868 }
1869
1870 println!("{} {}", "Q:".yellow(), entry.question);
1871 println!(
1872 "{} {}",
1873 "A:".green(),
1874 if entry.response.len() > 150 {
1875 format!("{}...", &entry.response[..150])
1876 } else {
1877 entry.response
1878 }
1879 );
1880 println!("{}", "â".repeat(60).dimmed());
1881 }
1882 }
1883 }
1884 }
1885 LogCommands::Current => {
1886 if let Some(session_id) = db.get_current_session_id()? {
1887 let history = db.get_chat_history(&session_id)?;
1888
1889 println!("\n{} {}", "Current Session:".bold().blue(), session_id);
1890 println!("{} {} messages", "Messages:".bold(), history.len());
1891
1892 for (i, entry) in history.iter().enumerate() {
1893 println!(
1894 "\n{} {} ({})",
1895 format!("Message {}:", i + 1).bold(),
1896 entry.model,
1897 entry.timestamp.format("%H:%M:%S")
1898 );
1899 println!("{} {}", "Q:".yellow(), entry.question);
1900 println!(
1901 "{} {}",
1902 "A:".green(),
1903 if entry.response.len() > 100 {
1904 format!("{}...", &entry.response[..100])
1905 } else {
1906 entry.response.clone()
1907 }
1908 );
1909 }
1910 } else {
1911 println!("No current session found.");
1912 }
1913 }
1914 LogCommands::Stats => {
1915 let stats = db.get_stats()?;
1916
1917 println!("\n{}", "Database Statistics:".bold().blue());
1918 println!();
1919
1920 println!("{} {}", "Total Entries:".bold(), stats.total_entries);
1922 println!("{} {}", "Unique Sessions:".bold(), stats.unique_sessions);
1923
1924 let file_size_str = if stats.file_size_bytes < 1024 {
1926 format!("{} bytes", stats.file_size_bytes)
1927 } else if stats.file_size_bytes < 1024 * 1024 {
1928 format!("{:.1} KB", stats.file_size_bytes as f64 / 1024.0)
1929 } else {
1930 format!("{:.1} MB", stats.file_size_bytes as f64 / (1024.0 * 1024.0))
1931 };
1932 println!("{} {}", "Database Size:".bold(), file_size_str);
1933
1934 if let Some((earliest, latest)) = stats.date_range {
1936 println!(
1937 "{} {} to {}",
1938 "Date Range:".bold(),
1939 earliest.format("%Y-%m-%d %H:%M:%S"),
1940 latest.format("%Y-%m-%d %H:%M:%S")
1941 );
1942 } else {
1943 println!("{} {}", "Date Range:".bold(), "No entries".dimmed());
1944 }
1945
1946 if !stats.model_usage.is_empty() {
1948 println!("\n{}", "Model Usage:".bold().blue());
1949 for (model, count) in stats.model_usage {
1950 let percentage = if stats.total_entries > 0 {
1951 (count as f64 / stats.total_entries as f64) * 100.0
1952 } else {
1953 0.0
1954 };
1955 println!(
1956 " {} {} ({} - {:.1}%)",
1957 "âĸ".blue(),
1958 model.bold(),
1959 count,
1960 percentage
1961 );
1962 }
1963 }
1964 }
1965 LogCommands::Purge {
1966 yes,
1967 older_than_days,
1968 keep_recent,
1969 max_size_mb,
1970 } => {
1971 let has_specific_options =
1973 older_than_days.is_some() || keep_recent.is_some() || max_size_mb.is_some();
1974
1975 if has_specific_options {
1976 let deleted_count = db.smart_purge(older_than_days, keep_recent, max_size_mb)?;
1978
1979 if deleted_count > 0 {
1980 println!("{} Purged {} log entries", "â".green(), deleted_count);
1981
1982 if let Some(days) = older_than_days {
1983 println!(" - Removed entries older than {} days", days);
1984 }
1985 if let Some(count) = keep_recent {
1986 println!(" - Kept only the {} most recent entries", count);
1987 }
1988 if let Some(size) = max_size_mb {
1989 println!(" - Enforced maximum database size of {} MB", size);
1990 }
1991 } else {
1992 println!("{} No logs needed to be purged", "âšī¸".blue());
1993 }
1994 } else {
1995 if !yes {
1997 print!(
1998 "Are you sure you want to purge all logs? This cannot be undone. (y/N): "
1999 );
2000 io::stdout().flush()?;
2002
2003 let mut input = String::new();
2004 io::stdin().read_line(&mut input)?;
2005
2006 if !input.trim().to_lowercase().starts_with('y') {
2007 println!("Purge cancelled.");
2008 return Ok(());
2009 }
2010 }
2011
2012 db.purge_all_logs()?;
2013 println!("{} All logs purged successfully", "â".green());
2014 }
2015 }
2016 }
2017 Ok(())
2018}
2019
2020pub async fn handle_config_command(command: Option<ConfigCommands>) -> Result<()> {
2022 match command {
2023 Some(ConfigCommands::Set { command }) => match command {
2024 SetCommands::Provider { name } => {
2025 let mut config = config::Config::load()?;
2026
2027 if !config.has_provider(&name) {
2028 anyhow::bail!(
2029 "Provider '{}' not found. Add it first with 'lc providers add'",
2030 name
2031 );
2032 }
2033
2034 config.default_provider = Some(name.clone());
2035 config.save()?;
2036 println!("{} Default provider set to '{}'", "â".green(), name);
2037 }
2038 SetCommands::Model { name } => {
2039 let mut config = config::Config::load()?;
2040 config.default_model = Some(name.clone());
2041 config.save()?;
2042 println!("{} Default model set to '{}'", "â".green(), name);
2043 }
2044 SetCommands::SystemPrompt { prompt } => {
2045 let mut config = config::Config::load()?;
2046 let resolved_prompt = config.resolve_template_or_prompt(&prompt);
2047 config.system_prompt = Some(resolved_prompt);
2048 config.save()?;
2049 println!("{} System prompt set", "â".green());
2050 }
2051 SetCommands::MaxTokens { value } => {
2052 let mut config = config::Config::load()?;
2053 let parsed_value = config::Config::parse_max_tokens(&value)?;
2054 config.max_tokens = Some(parsed_value);
2055 config.save()?;
2056 println!("{} Max tokens set to {}", "â".green(), parsed_value);
2057 }
2058 SetCommands::Temperature { value } => {
2059 let mut config = config::Config::load()?;
2060 let parsed_value = config::Config::parse_temperature(&value)?;
2061 config.temperature = Some(parsed_value);
2062 config.save()?;
2063 println!("{} Temperature set to {}", "â".green(), parsed_value);
2064 }
2065 SetCommands::Search { name } => {
2066 let mut search_config = crate::search::SearchConfig::load()?;
2067
2068 if !search_config.has_provider(&name) {
2069 anyhow::bail!("Search provider '{}' not found. Add it first with 'lc search provider add'", name);
2070 }
2071
2072 search_config.set_default_provider(name.clone())?;
2073 search_config.save()?;
2074 println!("{} Default search provider set to '{}'", "â".green(), name);
2075 }
2076 SetCommands::Stream { value } => {
2077 let mut config = config::Config::load()?;
2078 let stream_value = match value.to_lowercase().as_str() {
2079 "true" | "1" | "yes" | "on" => true,
2080 "false" | "0" | "no" | "off" => false,
2081 _ => anyhow::bail!("Invalid stream value '{}'. Use 'true' or 'false'", value),
2082 };
2083 config.stream = Some(stream_value);
2084 config.save()?;
2085 println!("{} Streaming mode set to {}", "â".green(), stream_value);
2086 }
2087 },
2088 Some(ConfigCommands::Get { command }) => {
2089 let config = config::Config::load()?;
2090 match command {
2091 GetCommands::Provider => {
2092 if let Some(provider) = &config.default_provider {
2093 println!("{}", provider);
2094 } else {
2095 anyhow::bail!("No default provider configured");
2096 }
2097 }
2098 GetCommands::Model => {
2099 if let Some(model) = &config.default_model {
2100 println!("{}", model);
2101 } else {
2102 anyhow::bail!("No default model configured");
2103 }
2104 }
2105 GetCommands::SystemPrompt => {
2106 if let Some(system_prompt) = &config.system_prompt {
2107 println!("{}", system_prompt);
2108 } else {
2109 anyhow::bail!("No system prompt configured");
2110 }
2111 }
2112 GetCommands::MaxTokens => {
2113 if let Some(max_tokens) = &config.max_tokens {
2114 println!("{}", max_tokens);
2115 } else {
2116 anyhow::bail!("No max tokens configured");
2117 }
2118 }
2119 GetCommands::Temperature => {
2120 if let Some(temperature) = &config.temperature {
2121 println!("{}", temperature);
2122 } else {
2123 anyhow::bail!("No temperature configured");
2124 }
2125 }
2126 GetCommands::Search => {
2127 let search_config = crate::search::SearchConfig::load()?;
2128 if let Some(provider) = search_config.get_default_provider() {
2129 println!("{}", provider);
2130 } else {
2131 anyhow::bail!("No default search provider configured");
2132 }
2133 }
2134 GetCommands::Stream => {
2135 if let Some(stream) = &config.stream {
2136 println!("{}", stream);
2137 } else {
2138 anyhow::bail!("No streaming mode configured");
2139 }
2140 }
2141 }
2142 }
2143 Some(ConfigCommands::Delete { command }) => {
2144 let mut config = config::Config::load()?;
2145 match command {
2146 DeleteCommands::Provider => {
2147 if config.default_provider.is_some() {
2148 config.default_provider = None;
2149 config.save()?;
2150 println!("{} Default provider deleted", "â".green());
2151 } else {
2152 anyhow::bail!("No default provider configured to delete");
2153 }
2154 }
2155 DeleteCommands::Model => {
2156 if config.default_model.is_some() {
2157 config.default_model = None;
2158 config.save()?;
2159 println!("{} Default model deleted", "â".green());
2160 } else {
2161 anyhow::bail!("No default model configured to delete");
2162 }
2163 }
2164 DeleteCommands::SystemPrompt => {
2165 if config.system_prompt.is_some() {
2166 config.system_prompt = None;
2167 config.save()?;
2168 println!("{} System prompt deleted", "â".green());
2169 } else {
2170 anyhow::bail!("No system prompt configured to delete");
2171 }
2172 }
2173 DeleteCommands::MaxTokens => {
2174 if config.max_tokens.is_some() {
2175 config.max_tokens = None;
2176 config.save()?;
2177 println!("{} Max tokens deleted", "â".green());
2178 } else {
2179 anyhow::bail!("No max tokens configured to delete");
2180 }
2181 }
2182 DeleteCommands::Temperature => {
2183 if config.temperature.is_some() {
2184 config.temperature = None;
2185 config.save()?;
2186 println!("{} Temperature deleted", "â".green());
2187 } else {
2188 anyhow::bail!("No temperature configured to delete");
2189 }
2190 }
2191 DeleteCommands::Search => {
2192 let mut search_config = crate::search::SearchConfig::load()?;
2193 if search_config.get_default_provider().is_some() {
2194 search_config.set_default_provider(String::new())?;
2195 search_config.save()?;
2196 println!("{} Default search provider deleted", "â".green());
2197 } else {
2198 anyhow::bail!("No default search provider configured to delete");
2199 }
2200 }
2201 DeleteCommands::Stream => {
2202 let mut config = config::Config::load()?;
2203 if config.stream.is_some() {
2204 config.stream = None;
2205 config.save()?;
2206 println!("{} Streaming mode deleted", "â".green());
2207 } else {
2208 anyhow::bail!("No streaming mode configured to delete");
2209 }
2210 }
2211 }
2212 }
2213 Some(ConfigCommands::Path) => {
2214 let config_dir = config::Config::config_dir()?;
2215 println!("\n{}", "Configuration Directory:".bold().blue());
2216 println!("{}", config_dir.display());
2217 println!("\n{}", "Files:".bold().blue());
2218 println!(" {} config.toml", "âĸ".blue());
2219 println!(" {} logs.db (synced to cloud)", "âĸ".blue());
2220 println!("\n{}", "Database Management:".bold().blue());
2221 println!(
2222 " {} Purge old logs: {}",
2223 "âĸ".blue(),
2224 "lc logs purge --older-than-days 30".dimmed()
2225 );
2226 println!(
2227 " {} Keep recent logs: {}",
2228 "âĸ".blue(),
2229 "lc logs purge --keep-recent 1000".dimmed()
2230 );
2231 println!(
2232 " {} Size-based purge: {}",
2233 "âĸ".blue(),
2234 "lc logs purge --max-size-mb 50".dimmed()
2235 );
2236 }
2237 None => {
2238 let config = config::Config::load()?;
2240 println!("\n{}", "Current Configuration:".bold().blue());
2241
2242 if let Some(provider) = &config.default_provider {
2243 println!("provider {}", provider);
2244 } else {
2245 println!("provider {}", "not set".dimmed());
2246 }
2247
2248 if let Some(model) = &config.default_model {
2249 if let Some(provider) = &config.default_provider {
2251 match load_provider_enhanced_models(provider).await {
2252 Ok(models) => {
2253 if let Some(model_metadata) = models.iter().find(|m| m.id == *model) {
2255 let _model_info = vec![model.clone()];
2257
2258 let mut capabilities = Vec::new();
2260 if model_metadata.supports_tools
2261 || model_metadata.supports_function_calling
2262 {
2263 capabilities.push("đ§ tools".blue());
2264 }
2265 if model_metadata.supports_vision {
2266 capabilities.push("đ vision".magenta());
2267 }
2268 if model_metadata.supports_audio {
2269 capabilities.push("đ audio".yellow());
2270 }
2271 if model_metadata.supports_reasoning {
2272 capabilities.push("đ§ reasoning".cyan());
2273 }
2274 if model_metadata.supports_code {
2275 capabilities.push("đģ code".green());
2276 }
2277
2278 let mut info_parts = Vec::new();
2280 if let Some(ctx) = model_metadata.context_length {
2281 if ctx >= 1000000 {
2282 info_parts.push(format!("{}m ctx", ctx / 1000000));
2283 } else if ctx >= 1000 {
2284 info_parts.push(format!("{}k ctx", ctx / 1000));
2285 } else {
2286 info_parts.push(format!("{} ctx", ctx));
2287 }
2288 }
2289 if let Some(input_price) = model_metadata.input_price_per_m {
2290 info_parts.push(format!("${:.2}/M in", input_price));
2291 }
2292 if let Some(output_price) = model_metadata.output_price_per_m {
2293 info_parts.push(format!("${:.2}/M out", output_price));
2294 }
2295
2296 let model_display =
2298 if let Some(ref display_name) = model_metadata.display_name {
2299 if display_name != &model_metadata.id {
2300 format!("{} ({})", model, display_name)
2301 } else {
2302 model.clone()
2303 }
2304 } else {
2305 model.clone()
2306 };
2307
2308 print!("model {}", model_display);
2309
2310 if !capabilities.is_empty() {
2311 let capability_strings: Vec<String> =
2312 capabilities.iter().map(|c| c.to_string()).collect();
2313 print!(" [{}]", capability_strings.join(" "));
2314 }
2315
2316 if !info_parts.is_empty() {
2317 print!(" ({})", info_parts.join(", ").dimmed());
2318 }
2319
2320 println!();
2321 } else {
2322 println!("model {}", model);
2324 }
2325 }
2326 Err(_) => {
2327 println!("model {}", model);
2329 }
2330 }
2331 } else {
2332 println!("model {}", model);
2334 }
2335 } else {
2336 println!("model {}", "not set".dimmed());
2337 }
2338
2339 if let Some(system_prompt) = &config.system_prompt {
2340 println!("system_prompt {}", system_prompt);
2341 } else {
2342 println!("system_prompt {}", "not set".dimmed());
2343 }
2344
2345 if let Some(max_tokens) = &config.max_tokens {
2346 println!("max_tokens {}", max_tokens);
2347 } else {
2348 println!("max_tokens {}", "not set".dimmed());
2349 }
2350
2351 if let Some(temperature) = &config.temperature {
2352 println!("temperature {}", temperature);
2353 } else {
2354 println!("temperature {}", "not set".dimmed());
2355 }
2356
2357 if let Some(stream) = &config.stream {
2358 println!("stream {}", stream);
2359 } else {
2360 println!("stream {}", "not set".dimmed());
2361 }
2362 }
2363 }
2364 Ok(())
2365}
2366
2367pub fn resolve_model_and_provider(
2369 config: &config::Config,
2370 provider_override: Option<String>,
2371 model_override: Option<String>,
2372) -> Result<(String, String)> {
2373 let (final_provider_override, final_model_override) = if let Some(model) = &model_override {
2376 if provider_override.is_some() {
2377 (provider_override, model_override)
2379 } else if model.contains(':') {
2380 let parts: Vec<&str> = model.splitn(2, ':').collect();
2382 if parts.len() == 2 {
2383 (Some(parts[0].to_string()), Some(parts[1].to_string()))
2384 } else {
2385 (provider_override, model_override)
2386 }
2387 } else {
2388 if let Some(alias_target) = config.get_alias(model) {
2390 if alias_target.contains(':') {
2392 let parts: Vec<&str> = alias_target.splitn(2, ':').collect();
2393 if parts.len() == 2 {
2394 (Some(parts[0].to_string()), Some(parts[1].to_string()))
2395 } else {
2396 anyhow::bail!(
2397 "Invalid alias target format: '{}'. Expected 'provider:model'",
2398 alias_target
2399 );
2400 }
2401 } else {
2402 anyhow::bail!(
2403 "Invalid alias target format: '{}'. Expected 'provider:model'",
2404 alias_target
2405 );
2406 }
2407 } else {
2408 (provider_override, model_override)
2410 }
2411 }
2412 } else {
2413 (provider_override, model_override)
2414 };
2415
2416 let provider_name = if let Some(provider) = final_provider_override {
2418 if !config.has_provider(&provider) {
2420 anyhow::bail!(
2421 "Provider '{}' not found. Add it first with 'lc providers add'",
2422 provider
2423 );
2424 }
2425 provider
2426 } else {
2427 config.default_provider.as_ref()
2428 .ok_or_else(|| anyhow::anyhow!("No default provider configured. Set one with 'lc config set provider <name>' or use -p flag"))?
2429 .clone()
2430 };
2431
2432 let model_name = if let Some(model) = final_model_override {
2433 model
2434 } else {
2435 config.default_model.as_ref()
2436 .ok_or_else(|| anyhow::anyhow!("No default model configured. Set one with 'lc config set model <name>' or use -m flag"))?
2437 .clone()
2438 };
2439
2440 Ok((provider_name, model_name))
2441}
2442
2443pub fn read_and_format_attachments(attachments: &[String]) -> Result<String> {
2445 if attachments.is_empty() {
2446 return Ok(String::new());
2447 }
2448
2449 let mut formatted_content = String::new();
2450
2451 for (i, file_path) in attachments.iter().enumerate() {
2452 if i > 0 {
2453 formatted_content.push_str("\n\n");
2454 }
2455
2456 let extension = std::path::Path::new(file_path)
2458 .extension()
2459 .and_then(|ext| ext.to_str())
2460 .unwrap_or("");
2461
2462 formatted_content.push_str(&format!("=== File: {} ===\n", file_path));
2463
2464 if let Some(reader) = readers::get_reader_for_extension(extension) {
2466 match reader.read_as_text(file_path) {
2467 Ok(content) => {
2468 formatted_content.push_str(&content);
2469 }
2470 Err(e) => {
2471 anyhow::bail!(
2472 "Failed to read file '{}' with specialized reader: {}",
2473 file_path,
2474 e
2475 );
2476 }
2477 }
2478 } else {
2479 match std::fs::read_to_string(file_path) {
2481 Ok(content) => {
2482 if !extension.is_empty() && is_code_file(extension) {
2484 formatted_content.push_str(&format!("```{}\n{}\n```", extension, content));
2485 } else {
2486 formatted_content.push_str(&content);
2487 }
2488 }
2489 Err(e) => {
2490 anyhow::bail!("Failed to read file '{}': {}", file_path, e);
2491 }
2492 }
2493 }
2494 }
2495
2496 Ok(formatted_content)
2497}
2498
2499pub fn is_code_file(extension: &str) -> bool {
2501 matches!(
2502 extension.to_lowercase().as_str(),
2503 "rs" | "py"
2504 | "js"
2505 | "ts"
2506 | "java"
2507 | "cpp"
2508 | "c"
2509 | "h"
2510 | "hpp"
2511 | "go"
2512 | "rb"
2513 | "php"
2514 | "swift"
2515 | "kt"
2516 | "scala"
2517 | "sh"
2518 | "bash"
2519 | "zsh"
2520 | "fish"
2521 | "ps1"
2522 | "bat"
2523 | "cmd"
2524 | "html"
2525 | "css"
2526 | "scss"
2527 | "sass"
2528 | "less"
2529 | "xml"
2530 | "json"
2531 | "yaml"
2532 | "yml"
2533 | "toml"
2534 | "ini"
2535 | "cfg"
2536 | "conf"
2537 | "sql"
2538 | "r"
2539 | "m"
2540 | "mm"
2541 | "pl"
2542 | "pm"
2543 | "lua"
2544 | "vim"
2545 | "dockerfile"
2546 | "makefile"
2547 | "cmake"
2548 | "gradle"
2549 | "maven"
2550 )
2551}
2552
2553pub async fn handle_direct_prompt(
2555 prompt: String,
2556 provider_override: Option<String>,
2557 model_override: Option<String>,
2558 system_prompt_override: Option<String>,
2559 max_tokens_override: Option<String>,
2560 temperature_override: Option<String>,
2561 attachments: Vec<String>,
2562 images: Vec<String>,
2563 tools: Option<String>,
2564 vectordb: Option<String>,
2565 use_search: Option<String>,
2566 stream: bool,
2567) -> Result<()> {
2568 let config = config::Config::load()?;
2569 let db = database::Database::new()?;
2570
2571 let attachment_content = read_and_format_attachments(&attachments)?;
2576
2577 let processed_images = if !images.is_empty() {
2579 crate::image_utils::process_images(&images)?
2580 } else {
2581 Vec::new()
2582 };
2583
2584 let final_prompt = if attachment_content.is_empty() {
2586 prompt.clone()
2587 } else {
2588 format!("{}\n\n{}", prompt, attachment_content)
2589 };
2590
2591 let system_prompt = if let Some(override_prompt) = &system_prompt_override {
2593 Some(config.resolve_template_or_prompt(override_prompt))
2594 } else if let Some(config_prompt) = &config.system_prompt {
2595 Some(config.resolve_template_or_prompt(config_prompt))
2596 } else {
2597 None
2598 };
2599 let system_prompt = system_prompt.as_deref();
2600
2601 let max_tokens = if let Some(override_tokens) = &max_tokens_override {
2603 Some(config::Config::parse_max_tokens(override_tokens)?)
2604 } else {
2605 config.max_tokens
2606 };
2607
2608 let temperature = if let Some(override_temp) = &temperature_override {
2610 Some(config::Config::parse_temperature(override_temp)?)
2611 } else {
2612 config.temperature
2613 };
2614
2615 let (mcp_tools, mcp_server_names) = if let Some(tools_str) = &tools {
2617 fetch_mcp_tools(tools_str).await?
2618 } else {
2619 (None, Vec::new())
2620 };
2621
2622 let (provider_name, model_name) =
2624 resolve_model_and_provider(&config, provider_override, model_override)?;
2625
2626 let provider_config = config.get_provider(&provider_name)?;
2628
2629 if provider_config.api_key.is_none() {
2630 anyhow::bail!(
2631 "No API key configured for provider '{}'. Add one with 'lc keys add {}'",
2632 provider_name,
2633 provider_name
2634 );
2635 }
2636
2637 let mut config_mut = config.clone();
2638 let client = chat::create_authenticated_client(&mut config_mut, &provider_name).await?;
2639
2640 if config_mut.get_cached_token(&provider_name) != config.get_cached_token(&provider_name) {
2642 config_mut.save()?;
2643 }
2644
2645 let session_id = uuid::Uuid::new_v4().to_string();
2647 db.set_current_session_id(&session_id)?;
2648
2649 let mut enhanced_prompt = final_prompt.clone();
2651 if let Some(ref db_name) = vectordb {
2652 match retrieve_rag_context(db_name, &final_prompt, &client, &model_name, &provider_name)
2653 .await
2654 {
2655 Ok(context) => {
2656 if !context.is_empty() {
2657 enhanced_prompt = format!(
2658 "Context from knowledge base:\n{}\n\nUser question: {}",
2659 context, final_prompt
2660 );
2661 println!(
2662 "{} Retrieved {} relevant context entries from '{}'",
2663 "đ§ ".blue(),
2664 context.lines().filter(|l| l.starts_with("- ")).count(),
2665 db_name
2666 );
2667 }
2668 }
2669 Err(e) => {
2670 eprintln!("Warning: Failed to retrieve RAG context: {}", e);
2671 }
2672 }
2673 }
2674
2675 if let Some(search_spec) = use_search {
2677 match integrate_search_context(&search_spec, &prompt, &mut enhanced_prompt).await {
2678 Ok(search_performed) => {
2679 if search_performed {
2680 println!("{} Search results integrated into context", "đ".blue());
2681 }
2682 }
2683 Err(e) => {
2684 eprintln!("Warning: Failed to integrate search results: {}", e);
2685 }
2686 }
2687 }
2688
2689 let use_streaming = stream || config.stream.unwrap_or(false);
2691
2692 let messages = if !processed_images.is_empty() {
2694 let mut content_parts = vec![crate::provider::ContentPart::Text {
2696 text: enhanced_prompt.clone(),
2697 }];
2698
2699 for image_url in processed_images {
2701 content_parts.push(crate::provider::ContentPart::ImageUrl {
2702 image_url: crate::provider::ImageUrl {
2703 url: image_url,
2704 detail: Some("auto".to_string()),
2705 },
2706 });
2707 }
2708
2709 vec![crate::provider::Message {
2710 role: "user".to_string(),
2711 content_type: crate::provider::MessageContent::Multimodal {
2712 content: content_parts,
2713 },
2714 tool_calls: None,
2715 tool_call_id: None,
2716 }]
2717 } else {
2718 vec![]
2720 };
2721
2722 if use_streaming {
2724 if mcp_tools.is_some() && !mcp_server_names.is_empty() {
2726 print!("{}", "Thinking...".dimmed());
2728 io::stdout().flush()?;
2730 let server_refs: Vec<&str> = mcp_server_names.iter().map(|s| s.as_str()).collect();
2731
2732 let result = if !messages.is_empty() {
2734 chat::send_chat_request_with_tool_execution_messages(
2735 &client,
2736 &model_name,
2737 &messages,
2738 system_prompt,
2739 max_tokens,
2740 temperature,
2741 &provider_name,
2742 mcp_tools,
2743 &server_refs,
2744 )
2745 .await
2746 } else {
2747 chat::send_chat_request_with_tool_execution(
2748 &client,
2749 &model_name,
2750 &enhanced_prompt,
2751 &[],
2752 system_prompt,
2753 max_tokens,
2754 temperature,
2755 &provider_name,
2756 mcp_tools,
2757 &server_refs,
2758 )
2759 .await
2760 };
2761
2762 match result {
2763 Ok((response, input_tokens, output_tokens)) => {
2764 print!("\r{}\r", " ".repeat(12)); println!("{}", response);
2766
2767 if let Err(e) = db.save_chat_entry_with_tokens(
2769 &session_id,
2770 &model_name,
2771 &prompt,
2772 &response,
2773 input_tokens,
2774 output_tokens,
2775 ) {
2776 eprintln!("Warning: Failed to save chat entry: {}", e);
2777 }
2778 }
2779 Err(e) => {
2780 print!("\r{}\r", " ".repeat(12)); anyhow::bail!("Error: {}", e);
2782 }
2783 }
2784 } else {
2785 let result = if !messages.is_empty() {
2787 chat::send_chat_request_with_streaming_messages(
2788 &client,
2789 &model_name,
2790 &messages,
2791 system_prompt,
2792 max_tokens,
2793 temperature,
2794 &provider_name,
2795 None,
2796 )
2797 .await
2798 } else {
2799 chat::send_chat_request_with_streaming(
2800 &client,
2801 &model_name,
2802 &enhanced_prompt,
2803 &[],
2804 system_prompt,
2805 max_tokens,
2806 temperature,
2807 &provider_name,
2808 None,
2809 )
2810 .await
2811 };
2812
2813 match result {
2814 Ok(_) => {
2815 println!();
2817
2818 if let Err(e) = db.save_chat_entry_with_tokens(
2820 &session_id,
2821 &model_name,
2822 &prompt,
2823 "[Streamed Response]",
2824 None,
2825 None,
2826 ) {
2827 eprintln!("Warning: Failed to save chat entry: {}", e);
2828 }
2829 }
2830 Err(e) => {
2831 anyhow::bail!("Error: {}", e);
2832 }
2833 }
2834 }
2835 } else {
2836 print!("{}", "Thinking...".dimmed());
2838 io::stdout().flush()?;
2840
2841 let result = if mcp_tools.is_some() && !mcp_server_names.is_empty() {
2842 let server_refs: Vec<&str> = mcp_server_names.iter().map(|s| s.as_str()).collect();
2844 if !messages.is_empty() {
2845 chat::send_chat_request_with_tool_execution_messages(
2846 &client,
2847 &model_name,
2848 &messages,
2849 system_prompt,
2850 max_tokens,
2851 temperature,
2852 &provider_name,
2853 mcp_tools,
2854 &server_refs,
2855 )
2856 .await
2857 } else {
2858 chat::send_chat_request_with_tool_execution(
2859 &client,
2860 &model_name,
2861 &enhanced_prompt,
2862 &[],
2863 system_prompt,
2864 max_tokens,
2865 temperature,
2866 &provider_name,
2867 mcp_tools,
2868 &server_refs,
2869 )
2870 .await
2871 }
2872 } else {
2873 if !messages.is_empty() {
2875 chat::send_chat_request_with_validation_messages(
2876 &client,
2877 &model_name,
2878 &messages,
2879 system_prompt,
2880 max_tokens,
2881 temperature,
2882 &provider_name,
2883 None,
2884 )
2885 .await
2886 } else {
2887 chat::send_chat_request_with_validation(
2888 &client,
2889 &model_name,
2890 &enhanced_prompt,
2891 &[],
2892 system_prompt,
2893 max_tokens,
2894 temperature,
2895 &provider_name,
2896 None,
2897 )
2898 .await
2899 }
2900 };
2901
2902 match result {
2903 Ok((response, input_tokens, output_tokens)) => {
2904 print!("\r{}\r", " ".repeat(20)); println!("{}", response);
2906
2907 if let Err(e) = db.save_chat_entry_with_tokens(
2909 &session_id,
2910 &model_name,
2911 &prompt,
2912 &response,
2913 input_tokens,
2914 output_tokens,
2915 ) {
2916 eprintln!("Warning: Failed to save chat entry: {}", e);
2917 }
2918 }
2919 Err(e) => {
2920 print!("\r{}\r", " ".repeat(12)); anyhow::bail!("Error: {}", e);
2922 }
2923 }
2924 }
2925
2926 Ok(())
2927}
2928
2929pub async fn handle_direct_prompt_with_piped_input(
2931 piped_content: String,
2932 provider_override: Option<String>,
2933 model_override: Option<String>,
2934 system_prompt_override: Option<String>,
2935 max_tokens_override: Option<String>,
2936 temperature_override: Option<String>,
2937 attachments: Vec<String>,
2938 images: Vec<String>,
2939 tools: Option<String>,
2940 vectordb: Option<String>,
2941 use_search: Option<String>,
2942 stream: bool,
2943) -> Result<()> {
2944 let prompt = "Please analyze the following content:".to_string();
2950
2951 let all_attachments = attachments;
2953
2954 let piped_attachment = format!("=== Piped Input ===\n{}", piped_content);
2956
2957 let config = config::Config::load()?;
2958 let db = database::Database::new()?;
2959
2960 let file_attachment_content = read_and_format_attachments(&all_attachments)?;
2962
2963 let final_prompt = if file_attachment_content.is_empty() {
2965 format!("{}\n\n{}", prompt, piped_attachment)
2966 } else {
2967 format!(
2968 "{}\n\n{}\n\n{}",
2969 prompt, piped_attachment, file_attachment_content
2970 )
2971 };
2972
2973 let system_prompt = if let Some(override_prompt) = &system_prompt_override {
2975 Some(config.resolve_template_or_prompt(override_prompt))
2976 } else if let Some(config_prompt) = &config.system_prompt {
2977 Some(config.resolve_template_or_prompt(config_prompt))
2978 } else {
2979 None
2980 };
2981 let system_prompt = system_prompt.as_deref();
2982
2983 let max_tokens = if let Some(override_tokens) = &max_tokens_override {
2985 Some(config::Config::parse_max_tokens(override_tokens)?)
2986 } else {
2987 config.max_tokens
2988 };
2989
2990 let temperature = if let Some(override_temp) = &temperature_override {
2992 Some(config::Config::parse_temperature(override_temp)?)
2993 } else {
2994 config.temperature
2995 };
2996
2997 let (mcp_tools, mcp_server_names) = if let Some(tools_str) = &tools {
2999 fetch_mcp_tools(tools_str).await?
3000 } else {
3001 (None, Vec::new())
3002 };
3003
3004 let (provider_name, model_name) =
3006 resolve_model_and_provider(&config, provider_override, model_override)?;
3007
3008 let provider_config = config.get_provider(&provider_name)?;
3010
3011 if provider_config.api_key.is_none() {
3012 anyhow::bail!(
3013 "No API key configured for provider '{}'. Add one with 'lc keys add {}'",
3014 provider_name,
3015 provider_name
3016 );
3017 }
3018
3019 let mut config_mut = config.clone();
3020 let client = chat::create_authenticated_client(&mut config_mut, &provider_name).await?;
3021
3022 if config_mut.get_cached_token(&provider_name) != config.get_cached_token(&provider_name) {
3024 config_mut.save()?;
3025 }
3026
3027 let session_id = uuid::Uuid::new_v4().to_string();
3029 db.set_current_session_id(&session_id)?;
3030
3031 let mut enhanced_prompt = final_prompt.clone();
3033 if let Some(ref db_name) = vectordb {
3034 match retrieve_rag_context(db_name, &final_prompt, &client, &model_name, &provider_name)
3035 .await
3036 {
3037 Ok(context) => {
3038 if !context.is_empty() {
3039 enhanced_prompt = format!(
3040 "Context from knowledge base:\n{}\n\nUser question: {}",
3041 context, final_prompt
3042 );
3043 println!(
3044 "{} Retrieved {} relevant context entries from '{}'",
3045 "đ§ ".blue(),
3046 context.lines().filter(|l| l.starts_with("- ")).count(),
3047 db_name
3048 );
3049 }
3050 }
3051 Err(e) => {
3052 eprintln!("Warning: Failed to retrieve RAG context: {}", e);
3053 }
3054 }
3055 }
3056
3057 if let Some(search_spec) = use_search {
3059 match integrate_search_context(&search_spec, &prompt, &mut enhanced_prompt).await {
3060 Ok(search_performed) => {
3061 if search_performed {
3062 println!("{} Search results integrated into context", "đ".blue());
3063 }
3064 }
3065 Err(e) => {
3066 eprintln!("Warning: Failed to integrate search results: {}", e);
3067 }
3068 }
3069 }
3070
3071 let use_streaming = stream || config.stream.unwrap_or(false);
3073
3074 let processed_images = if !images.is_empty() {
3076 crate::image_utils::process_images(&images)?
3077 } else {
3078 Vec::new()
3079 };
3080
3081 let messages = if !processed_images.is_empty() {
3083 let mut content_parts = vec![crate::provider::ContentPart::Text {
3085 text: enhanced_prompt.clone(),
3086 }];
3087
3088 for image_url in processed_images {
3090 content_parts.push(crate::provider::ContentPart::ImageUrl {
3091 image_url: crate::provider::ImageUrl {
3092 url: image_url,
3093 detail: Some("auto".to_string()),
3094 },
3095 });
3096 }
3097
3098 vec![crate::provider::Message {
3099 role: "user".to_string(),
3100 content_type: crate::provider::MessageContent::Multimodal {
3101 content: content_parts,
3102 },
3103 tool_calls: None,
3104 tool_call_id: None,
3105 }]
3106 } else {
3107 vec![]
3109 };
3110
3111 if use_streaming {
3113 if mcp_tools.is_some() && !mcp_server_names.is_empty() {
3115 print!("{}", "Thinking...".dimmed());
3117 io::stdout().flush()?;
3119 let server_refs: Vec<&str> = mcp_server_names.iter().map(|s| s.as_str()).collect();
3120 match chat::send_chat_request_with_tool_execution(
3121 &client,
3122 &model_name,
3123 &enhanced_prompt,
3124 &[],
3125 system_prompt,
3126 max_tokens,
3127 temperature,
3128 &provider_name,
3129 mcp_tools,
3130 &server_refs,
3131 )
3132 .await
3133 {
3134 Ok((response, input_tokens, output_tokens)) => {
3135 print!("\r{}\r", " ".repeat(12)); println!("{}", response);
3137
3138 let log_prompt = if piped_content.len() > 100 {
3140 format!("{}... (piped content)", &piped_content[..100])
3141 } else {
3142 format!("{} (piped content)", piped_content)
3143 };
3144
3145 if let Err(e) = db.save_chat_entry_with_tokens(
3146 &session_id,
3147 &model_name,
3148 &log_prompt,
3149 &response,
3150 input_tokens,
3151 output_tokens,
3152 ) {
3153 eprintln!("Warning: Failed to save chat entry: {}", e);
3154 }
3155 }
3156 Err(e) => {
3157 print!("\r{}\r", " ".repeat(12)); anyhow::bail!("Error: {}", e);
3159 }
3160 }
3161 } else {
3162 let result = if !messages.is_empty() {
3164 chat::send_chat_request_with_streaming_messages(
3165 &client,
3166 &model_name,
3167 &messages,
3168 system_prompt,
3169 max_tokens,
3170 temperature,
3171 &provider_name,
3172 None,
3173 )
3174 .await
3175 } else {
3176 chat::send_chat_request_with_streaming(
3177 &client,
3178 &model_name,
3179 &enhanced_prompt,
3180 &[],
3181 system_prompt,
3182 max_tokens,
3183 temperature,
3184 &provider_name,
3185 None,
3186 )
3187 .await
3188 };
3189
3190 match result {
3191 Ok(_) => {
3192 println!();
3194
3195 let log_prompt = if piped_content.len() > 100 {
3197 format!("{}... (piped content)", &piped_content[..100])
3198 } else {
3199 format!("{} (piped content)", piped_content)
3200 };
3201
3202 if let Err(e) = db.save_chat_entry_with_tokens(
3203 &session_id,
3204 &model_name,
3205 &log_prompt,
3206 "[Streamed Response]",
3207 None,
3208 None,
3209 ) {
3210 eprintln!("Warning: Failed to save chat entry: {}", e);
3211 }
3212 }
3213 Err(e) => {
3214 anyhow::bail!("Error: {}", e);
3215 }
3216 }
3217 }
3218 } else {
3219 print!("{}", "Thinking...".dimmed());
3221 io::stdout().flush()?;
3223
3224 let result = if mcp_tools.is_some() && !mcp_server_names.is_empty() {
3225 let server_refs: Vec<&str> = mcp_server_names.iter().map(|s| s.as_str()).collect();
3227 chat::send_chat_request_with_tool_execution(
3228 &client,
3229 &model_name,
3230 &enhanced_prompt,
3231 &[],
3232 system_prompt,
3233 max_tokens,
3234 temperature,
3235 &provider_name,
3236 mcp_tools,
3237 &server_refs,
3238 )
3239 .await
3240 } else {
3241 if !messages.is_empty() {
3243 chat::send_chat_request_with_validation_messages(
3244 &client,
3245 &model_name,
3246 &messages,
3247 system_prompt,
3248 max_tokens,
3249 temperature,
3250 &provider_name,
3251 None,
3252 )
3253 .await
3254 } else {
3255 chat::send_chat_request_with_validation(
3256 &client,
3257 &model_name,
3258 &enhanced_prompt,
3259 &[],
3260 system_prompt,
3261 max_tokens,
3262 temperature,
3263 &provider_name,
3264 None,
3265 )
3266 .await
3267 }
3268 };
3269
3270 match result {
3271 Ok((response, input_tokens, output_tokens)) => {
3272 print!("\r{}\r", " ".repeat(20)); println!("{}", response);
3274
3275 let log_prompt = if piped_content.len() > 100 {
3277 format!("{}... (piped content)", &piped_content[..100])
3278 } else {
3279 format!("{} (piped content)", piped_content)
3280 };
3281
3282 if let Err(e) = db.save_chat_entry_with_tokens(
3283 &session_id,
3284 &model_name,
3285 &log_prompt,
3286 &response,
3287 input_tokens,
3288 output_tokens,
3289 ) {
3290 eprintln!("Warning: Failed to save chat entry: {}", e);
3291 }
3292 }
3293 Err(e) => {
3294 print!("\r{}\r", " ".repeat(12)); anyhow::bail!("Error: {}", e);
3296 }
3297 }
3298 }
3299
3300 Ok(())
3301}
3302
3303pub async fn handle_chat_command(
3305 model: Option<String>,
3306 provider: Option<String>,
3307 cid: Option<String>,
3308 tools: Option<String>,
3309 database: Option<String>,
3310 debug: bool,
3311 images: Vec<String>,
3312 stream: bool,
3313) -> Result<()> {
3314 if debug {
3316 set_debug_mode(true);
3317 }
3318 let config = config::Config::load()?;
3319 let db = database::Database::new()?;
3320
3321 let session_id = cid.unwrap_or_else(|| {
3326 let new_id = uuid::Uuid::new_v4().to_string();
3327 db.set_current_session_id(&new_id).unwrap();
3328 new_id
3329 });
3330
3331 let (provider_name, resolved_model) = resolve_model_and_provider(&config, provider, model)?;
3333 let _provider_config = config.get_provider(&provider_name)?;
3334
3335 let mut config_mut = config.clone();
3336 let client = chat::create_authenticated_client(&mut config_mut, &provider_name).await?;
3337
3338 if config_mut.get_cached_token(&provider_name) != config.get_cached_token(&provider_name) {
3340 config_mut.save()?;
3341 }
3342
3343 let (mcp_tools, mcp_server_names) = if let Some(tools_str) = &tools {
3345 fetch_mcp_tools(tools_str).await?
3346 } else {
3347 (None, Vec::new())
3348 };
3349
3350 let mut current_model = resolved_model.clone();
3351
3352 let mut processed_images = if !images.is_empty() {
3354 println!(
3355 "{} Processing {} initial image(s)...",
3356 "đŧī¸".blue(),
3357 images.len()
3358 );
3359 crate::image_utils::process_images(&images)?
3360 } else {
3361 Vec::new()
3362 };
3363
3364 println!("\n{} Interactive Chat Mode", "đ".blue());
3365 println!("{} Session ID: {}", "đ".blue(), session_id);
3366 println!("{} Model: {}", "đ¤".blue(), current_model);
3367 if !processed_images.is_empty() {
3368 println!("{} Initial images: {}", "đŧī¸".blue(), images.len());
3369 }
3370 if mcp_tools.is_some() && !mcp_server_names.is_empty() {
3371 println!(
3372 "{} Tools: {} (from MCP servers: {})",
3373 "đ§".blue(),
3374 mcp_tools.as_ref().unwrap().len(),
3375 mcp_server_names.join(", ")
3376 );
3377 }
3378 println!("{} Type /help for commands, /exit to quit", "đĄ".yellow());
3379 println!("{} Use Shift+Enter or Ctrl+J for multi-line input, Enter to send\n", "đĄ".yellow());
3380
3381 let mut input_handler = MultiLineInput::new();
3383
3384 loop {
3385 let input_string = match input_handler.read_input(&format!("{}", "You:".bold().green())) {
3387 Ok(input_text) => input_text.trim().to_string(),
3388 Err(_) => {
3389 print!("{} ", "You:".bold().green());
3391 io::stdout().flush()?;
3392
3393 let mut fallback_input = String::new();
3394 let bytes_read = io::stdin().read_line(&mut fallback_input)?;
3395
3396 if bytes_read == 0 {
3398 println!("Goodbye! đ");
3399 break;
3400 }
3401
3402 fallback_input.trim().to_string()
3403 }
3404 };
3405
3406 if input_string.is_empty() {
3407 continue;
3408 }
3409
3410 let input = input_string.as_str();
3411
3412 if input.starts_with('/') {
3414 match input {
3415 "/exit" => {
3416 println!("Goodbye! đ");
3417 break;
3418 }
3419 "/clear" => {
3420 db.clear_session(&session_id)?;
3421 println!("{} Session cleared", "â".green());
3422 continue;
3423 }
3424 "/help" => {
3425 println!("\n{}", "Available Commands:".bold().blue());
3426 println!(" /exit - Exit chat session");
3427 println!(" /clear - Clear current session");
3428 println!(" /model <name> - Change model");
3429 println!(" /help - Show this help");
3430 println!("\n{}", "Input Controls:".bold().blue());
3431 println!(" Enter - Send message");
3432 println!(" Shift+Enter - New line (multi-line input)");
3433 println!(" Ctrl+J - New line (alternative)");
3434 println!(" Ctrl+C - Cancel current input\n");
3435 continue;
3436 }
3437 _ if input.starts_with("/model ") => {
3438 let new_model = input.strip_prefix("/model ").unwrap().trim();
3439 if !new_model.is_empty() {
3440 current_model = new_model.to_string();
3441 println!("{} Model changed to: {}", "â".green(), current_model);
3442 } else {
3443 println!("{} Please specify a model name", "â".red());
3444 }
3445 continue;
3446 }
3447 _ => {
3448 println!(
3449 "{} Unknown command. Type /help for available commands",
3450 "â".red()
3451 );
3452 continue;
3453 }
3454 }
3455 }
3456
3457 let history = db.get_chat_history(&session_id)?;
3459
3460 let mut enhanced_input = input.to_string();
3462 if let Some(ref db_name) = database {
3463 match retrieve_rag_context(db_name, &input, &client, ¤t_model, &provider_name)
3464 .await
3465 {
3466 Ok(context) => {
3467 if !context.is_empty() {
3468 enhanced_input = format!(
3469 "Context from knowledge base:\n{}\n\nUser question: {}",
3470 context, input
3471 );
3472 println!(
3473 "{} Retrieved {} relevant context entries from '{}'",
3474 "đ§ ".blue(),
3475 context.lines().filter(|l| l.starts_with("- ")).count(),
3476 db_name
3477 );
3478 }
3479 }
3480 Err(e) => {
3481 eprintln!("Warning: Failed to retrieve RAG context: {}", e);
3482 }
3483 }
3484 }
3485
3486 let messages = if !processed_images.is_empty() {
3488 let mut msgs: Vec<crate::provider::Message> = history
3490 .iter()
3491 .flat_map(|entry| {
3492 vec![
3493 crate::provider::Message::user(entry.question.clone()),
3494 crate::provider::Message::assistant(entry.response.clone()),
3495 ]
3496 })
3497 .collect();
3498
3499 let mut content_parts = vec![crate::provider::ContentPart::Text {
3501 text: enhanced_input.clone(),
3502 }];
3503
3504 for image_url in &processed_images {
3506 content_parts.push(crate::provider::ContentPart::ImageUrl {
3507 image_url: crate::provider::ImageUrl {
3508 url: image_url.clone(),
3509 detail: Some("auto".to_string()),
3510 },
3511 });
3512 }
3513
3514 msgs.push(crate::provider::Message {
3515 role: "user".to_string(),
3516 content_type: crate::provider::MessageContent::Multimodal {
3517 content: content_parts,
3518 },
3519 tool_calls: None,
3520 tool_call_id: None,
3521 });
3522
3523 msgs
3524 } else {
3525 Vec::new()
3526 };
3527
3528 println!();
3530 print!("{}", "Thinking...".dimmed());
3531 io::stdout().flush()?;
3533
3534 let resolved_system_prompt = if let Some(system_prompt) = &config.system_prompt {
3535 Some(config.resolve_template_or_prompt(system_prompt))
3536 } else {
3537 None
3538 };
3539
3540 let use_streaming = stream || config.stream.unwrap_or(true);
3543
3544 if mcp_tools.is_some() && !mcp_server_names.is_empty() {
3545 let server_refs: Vec<&str> = mcp_server_names.iter().map(|s| s.as_str()).collect();
3547 let result = if !messages.is_empty() {
3548 chat::send_chat_request_with_tool_execution_messages(
3549 &client,
3550 ¤t_model,
3551 &messages,
3552 resolved_system_prompt.as_deref(),
3553 config.max_tokens,
3554 config.temperature,
3555 &provider_name,
3556 mcp_tools.clone(),
3557 &server_refs,
3558 )
3559 .await
3560 } else {
3561 chat::send_chat_request_with_tool_execution(
3562 &client,
3563 ¤t_model,
3564 &enhanced_input,
3565 &history,
3566 resolved_system_prompt.as_deref(),
3567 config.max_tokens,
3568 config.temperature,
3569 &provider_name,
3570 mcp_tools.clone(),
3571 &server_refs,
3572 )
3573 .await
3574 };
3575
3576 match result {
3577 Ok((response, input_tokens, output_tokens)) => {
3578 print!("\r{}\r", " ".repeat(12)); println!("{} {}", "Assistant:".bold().blue(), response);
3580
3581 if let Err(e) = db.save_chat_entry_with_tokens(
3583 &session_id,
3584 ¤t_model,
3585 &input,
3586 &response,
3587 input_tokens,
3588 output_tokens,
3589 ) {
3590 eprintln!("Warning: Failed to save chat entry: {}", e);
3591 }
3592
3593 if !processed_images.is_empty() {
3595 processed_images.clear();
3596 }
3597 }
3598 Err(e) => {
3599 print!("\r{}\r", " ".repeat(12)); println!("{} Error: {}", "â".red(), e);
3601 }
3602 }
3603 } else if use_streaming {
3604 print!("\r{}\r{} ", " ".repeat(12), "Assistant:".bold().blue()); io::stdout().flush()?;
3608 let result = if !messages.is_empty() {
3609 chat::send_chat_request_with_streaming_messages(
3610 &client,
3611 ¤t_model,
3612 &messages,
3613 resolved_system_prompt.as_deref(),
3614 config.max_tokens,
3615 config.temperature,
3616 &provider_name,
3617 None,
3618 )
3619 .await
3620 } else {
3621 chat::send_chat_request_with_streaming(
3622 &client,
3623 ¤t_model,
3624 &enhanced_input,
3625 &history,
3626 resolved_system_prompt.as_deref(),
3627 config.max_tokens,
3628 config.temperature,
3629 &provider_name,
3630 None,
3631 )
3632 .await
3633 };
3634
3635 match result {
3636 Ok(_) => {
3637 println!();
3639
3640 if let Err(e) = db.save_chat_entry_with_tokens(
3642 &session_id,
3643 ¤t_model,
3644 &input,
3645 "[Streamed Response]",
3646 None,
3647 None,
3648 ) {
3649 eprintln!("Warning: Failed to save chat entry: {}", e);
3650 }
3651
3652 if !processed_images.is_empty() {
3654 processed_images.clear();
3655 }
3656 }
3657 Err(e) => {
3658 println!("\n{} Error: {}", "â".red(), e);
3659 }
3660 }
3661 } else {
3662 let result = if !messages.is_empty() {
3664 chat::send_chat_request_with_validation_messages(
3665 &client,
3666 ¤t_model,
3667 &messages,
3668 resolved_system_prompt.as_deref(),
3669 config.max_tokens,
3670 config.temperature,
3671 &provider_name,
3672 None,
3673 )
3674 .await
3675 } else {
3676 chat::send_chat_request_with_validation(
3677 &client,
3678 ¤t_model,
3679 &enhanced_input,
3680 &history,
3681 resolved_system_prompt.as_deref(),
3682 config.max_tokens,
3683 config.temperature,
3684 &provider_name,
3685 None,
3686 )
3687 .await
3688 };
3689
3690 match result {
3691 Ok((response, input_tokens, output_tokens)) => {
3692 print!("\r{}\r", " ".repeat(12)); println!("{} {}", "Assistant:".bold().blue(), response);
3694
3695 if let Err(e) = db.save_chat_entry_with_tokens(
3697 &session_id,
3698 ¤t_model,
3699 &input,
3700 &response,
3701 input_tokens,
3702 output_tokens,
3703 ) {
3704 eprintln!("Warning: Failed to save chat entry: {}", e);
3705 }
3706
3707 if !processed_images.is_empty() {
3709 processed_images.clear();
3710 }
3711 }
3712 Err(e) => {
3713 print!("\r{}\r", " ".repeat(12)); println!("{} Error: {}", "â".red(), e);
3715 }
3716 }
3717 }
3718
3719 println!(); }
3721
3722 Ok(())
3723}
3724
3725pub async fn handle_models_command(
3727 command: Option<ModelsCommands>,
3728 query: Option<String>,
3729 tags: Option<String>,
3730 context_length: Option<String>,
3731 input_length: Option<String>,
3732 output_length: Option<String>,
3733 input_price: Option<f64>,
3734 output_price: Option<f64>,
3735) -> Result<()> {
3736 use colored::Colorize;
3737
3738 match command {
3739 Some(ModelsCommands::Refresh) => {
3740 crate::unified_cache::UnifiedCache::refresh_all_providers().await?;
3741 }
3742 Some(ModelsCommands::Info) => {
3743 debug_log!("Handling models info command");
3744
3745 let models_dir = crate::unified_cache::UnifiedCache::models_dir()?;
3746 debug_log!("Models cache directory: {}", models_dir.display());
3747
3748 println!("\n{}", "Models Cache Information:".bold().blue());
3749 println!("Cache Directory: {}", models_dir.display());
3750
3751 if !models_dir.exists() {
3752 debug_log!("Cache directory does not exist");
3753 println!("Status: No cache directory found");
3754 return Ok(());
3755 }
3756
3757 let entries = std::fs::read_dir(&models_dir)?;
3758 let mut provider_count = 0;
3759 let mut total_models = 0;
3760
3761 debug_log!("Reading cache directory entries");
3762
3763 let mut provider_info = Vec::new();
3765 for entry in entries {
3766 let entry = entry?;
3767 let path = entry.path();
3768
3769 if let Some(extension) = path.extension() {
3770 if extension == "json" {
3771 if let Some(provider_name) = path.file_stem().and_then(|s| s.to_str()) {
3772 debug_log!("Processing cache file for provider: {}", provider_name);
3773 provider_count += 1;
3774 match crate::unified_cache::UnifiedCache::load_provider_models(
3775 provider_name,
3776 )
3777 .await
3778 {
3779 Ok(models) => {
3780 let count = models.len();
3781 total_models += count;
3782 debug_log!(
3783 "Provider '{}' has {} cached models",
3784 provider_name,
3785 count
3786 );
3787
3788 let age_display =
3789 crate::unified_cache::UnifiedCache::get_cache_age_display(
3790 provider_name,
3791 )
3792 .await
3793 .unwrap_or_else(|_| "Unknown".to_string());
3794 let is_fresh =
3795 crate::unified_cache::UnifiedCache::is_cache_fresh(
3796 provider_name,
3797 )
3798 .await
3799 .unwrap_or(false);
3800 debug_log!(
3801 "Provider '{}' cache age: {}, fresh: {}",
3802 provider_name,
3803 age_display,
3804 is_fresh
3805 );
3806
3807 let status = if is_fresh {
3808 age_display.green()
3809 } else {
3810 format!("{} (expired)", age_display).red()
3811 };
3812 provider_info.push((provider_name.to_string(), count, status));
3813 }
3814 Err(e) => {
3815 debug_log!(
3816 "Error loading cache for provider '{}': {}",
3817 provider_name,
3818 e
3819 );
3820 provider_info.push((
3821 provider_name.to_string(),
3822 0,
3823 "Error loading cache".red(),
3824 ));
3825 }
3826 }
3827 }
3828 }
3829 }
3830 }
3831
3832 debug_log!("Sorting {} providers alphabetically", provider_info.len());
3833
3834 provider_info.sort_by(|a, b| a.0.cmp(&b.0));
3836
3837 println!("\nCached Providers:");
3838 for (provider_name, count, status) in provider_info {
3839 if count > 0 {
3840 println!(
3841 " {} {} - {} models ({})",
3842 "âĸ".blue(),
3843 provider_name.bold(),
3844 count,
3845 status
3846 );
3847 } else {
3848 println!(" {} {} - {}", "âĸ".blue(), provider_name.bold(), status);
3849 }
3850 }
3851
3852 debug_log!(
3853 "Cache summary: {} providers, {} total models",
3854 provider_count,
3855 total_models
3856 );
3857
3858 println!("\nSummary:");
3859 println!(" Providers: {}", provider_count);
3860 println!(" Total Models: {}", total_models);
3861 }
3862 Some(ModelsCommands::Dump) => {
3863 dump_models_data().await?;
3864 }
3865 Some(ModelsCommands::Embed) => {
3866 debug_log!("Handling embedding models command");
3867
3868 debug_log!("Loading all cached models from unified cache");
3870 let enhanced_models =
3871 crate::unified_cache::UnifiedCache::load_all_cached_models().await?;
3872
3873 debug_log!("Loaded {} models from cache", enhanced_models.len());
3874
3875 if enhanced_models.is_empty() {
3877 debug_log!("No cached models found, refreshing all providers");
3878 println!("No cached models found. Refreshing all providers...");
3879 crate::unified_cache::UnifiedCache::refresh_all_providers().await?;
3880 let enhanced_models =
3881 crate::unified_cache::UnifiedCache::load_all_cached_models().await?;
3882
3883 debug_log!("After refresh, loaded {} models", enhanced_models.len());
3884
3885 if enhanced_models.is_empty() {
3886 debug_log!("Still no models found after refresh");
3887 println!("No models found after refresh.");
3888 return Ok(());
3889 }
3890 }
3891
3892 debug_log!("Filtering for embedding models");
3893
3894 let embedding_models: Vec<_> = enhanced_models
3896 .into_iter()
3897 .filter(|model| {
3898 matches!(
3899 model.model_type,
3900 crate::model_metadata::ModelType::Embedding
3901 )
3902 })
3903 .collect();
3904
3905 debug_log!("Found {} embedding models", embedding_models.len());
3906
3907 if embedding_models.is_empty() {
3908 println!("No embedding models found.");
3909 return Ok(());
3910 }
3911
3912 debug_log!("Displaying {} embedding models", embedding_models.len());
3914 display_embedding_models(&embedding_models)?;
3915 }
3916 Some(ModelsCommands::Path { command }) => match command {
3917 ModelsPathCommands::List => {
3918 crate::model_metadata::list_model_paths()?;
3919 }
3920 ModelsPathCommands::Add { path } => {
3921 crate::model_metadata::add_model_path(path)?;
3922 }
3923 ModelsPathCommands::Delete { path } => {
3924 crate::model_metadata::remove_model_path(path)?;
3925 }
3926 },
3927 Some(ModelsCommands::Tags { command }) => {
3928 match command {
3929 ModelsTagsCommands::List => {
3930 crate::model_metadata::list_tags()?;
3931 }
3932 ModelsTagsCommands::Add { tag, rule } => {
3933 crate::model_metadata::add_tag(tag, vec![rule], "string".to_string(), None)?;
3935 }
3936 }
3937 }
3938 Some(ModelsCommands::Filter { tags: filter_tags }) => {
3939 let models = crate::unified_cache::UnifiedCache::load_all_cached_models().await?;
3941
3942 let required_tags: Vec<&str> = filter_tags.split(',').map(|s| s.trim()).collect();
3944
3945 let filtered: Vec<_> = models
3947 .into_iter()
3948 .filter(|model| {
3949 for tag in &required_tags {
3950 match *tag {
3951 "tools" => {
3952 if !model.supports_tools && !model.supports_function_calling {
3953 return false;
3954 }
3955 }
3956 "vision" => {
3957 if !model.supports_vision {
3958 return false;
3959 }
3960 }
3961 "audio" => {
3962 if !model.supports_audio {
3963 return false;
3964 }
3965 }
3966 "reasoning" => {
3967 if !model.supports_reasoning {
3968 return false;
3969 }
3970 }
3971 "code" => {
3972 if !model.supports_code {
3973 return false;
3974 }
3975 }
3976 _ => {
3977 if tag.starts_with("ctx") {
3979 if let Some(ctx) = model.context_length {
3980 if tag.contains('>') {
3981 if let Some(min_str) = tag.split('>').nth(1) {
3982 if let Ok(min_ctx) = parse_token_count(min_str) {
3983 if ctx < min_ctx {
3984 return false;
3985 }
3986 }
3987 }
3988 }
3989 }
3990 }
3991 }
3992 }
3993 }
3994 true
3995 })
3996 .collect();
3997
3998 if filtered.is_empty() {
3999 println!("No models found with tags: {}", filter_tags);
4000 } else {
4001 println!(
4002 "\n{} Models with tags [{}] ({} found):",
4003 "Filtered Results:".bold().blue(),
4004 filter_tags,
4005 filtered.len()
4006 );
4007
4008 let mut current_provider = String::new();
4009 for model in filtered {
4010 if model.provider != current_provider {
4011 current_provider = model.provider.clone();
4012 println!("\n{}", format!("{}:", current_provider).bold().green());
4013 }
4014
4015 print!(" {} {}", "âĸ".blue(), model.id.bold());
4016
4017 let mut capabilities = Vec::new();
4019 if model.supports_tools || model.supports_function_calling {
4020 capabilities.push("đ§ tools".blue());
4021 }
4022 if model.supports_vision {
4023 capabilities.push("đ vision".magenta());
4024 }
4025 if model.supports_audio {
4026 capabilities.push("đ audio".yellow());
4027 }
4028 if model.supports_reasoning {
4029 capabilities.push("đ§ reasoning".cyan());
4030 }
4031 if model.supports_code {
4032 capabilities.push("đģ code".green());
4033 }
4034
4035 if !capabilities.is_empty() {
4036 let capability_strings: Vec<String> =
4037 capabilities.iter().map(|c| c.to_string()).collect();
4038 print!(" [{}]", capability_strings.join(" "));
4039 }
4040
4041 if let Some(ctx) = model.context_length {
4043 if ctx >= 1000 {
4044 print!(" ({}k ctx)", ctx / 1000);
4045 } else {
4046 print!(" ({} ctx)", ctx);
4047 }
4048 }
4049
4050 println!();
4051 }
4052 }
4053 }
4054 None => {
4055 debug_log!("Handling global models command");
4056
4057 debug_log!("Loading all cached models from unified cache");
4059 let enhanced_models =
4060 crate::unified_cache::UnifiedCache::load_all_cached_models().await?;
4061
4062 debug_log!("Loaded {} models from cache", enhanced_models.len());
4063
4064 if enhanced_models.is_empty() {
4066 debug_log!("No cached models found, refreshing all providers");
4067 println!("No cached models found. Refreshing all providers...");
4068 crate::unified_cache::UnifiedCache::refresh_all_providers().await?;
4069 let enhanced_models =
4070 crate::unified_cache::UnifiedCache::load_all_cached_models().await?;
4071
4072 debug_log!("After refresh, loaded {} models", enhanced_models.len());
4073
4074 if enhanced_models.is_empty() {
4075 debug_log!("Still no models found after refresh");
4076 println!("No models found after refresh.");
4077 return Ok(());
4078 }
4079 }
4080
4081 debug_log!("Applying filters to {} models", enhanced_models.len());
4082
4083 let tag_filters = if let Some(ref tag_str) = tags {
4085 let tags_vec: Vec<String> =
4086 tag_str.split(',').map(|s| s.trim().to_string()).collect();
4087 Some(tags_vec)
4088 } else {
4089 None
4090 };
4091
4092 let filtered_models = apply_model_filters_with_tags(
4094 enhanced_models,
4095 &query,
4096 tag_filters,
4097 &context_length,
4098 &input_length,
4099 &output_length,
4100 input_price,
4101 output_price,
4102 )?;
4103
4104 debug_log!("After filtering, {} models remain", filtered_models.len());
4105
4106 if filtered_models.is_empty() {
4107 debug_log!("No models match the specified criteria");
4108 println!("No models found matching the specified criteria.");
4109 return Ok(());
4110 }
4111
4112 debug_log!("Displaying {} filtered models", filtered_models.len());
4114 display_enhanced_models(&filtered_models, &query)?;
4115 }
4116 }
4117
4118 Ok(())
4119}
4120
4121pub async fn handle_template_command(command: TemplateCommands) -> Result<()> {
4123 use colored::Colorize;
4124
4125 match command {
4126 TemplateCommands::Add { name, prompt } => {
4127 let mut config = config::Config::load()?;
4128 config.add_template(name.clone(), prompt.clone())?;
4129 config.save()?;
4130 println!("{} Template '{}' added", "â".green(), name);
4131 }
4132 TemplateCommands::Delete { name } => {
4133 let mut config = config::Config::load()?;
4134 config.remove_template(name.clone())?;
4135 config.save()?;
4136 println!("{} Template '{}' removed", "â".green(), name);
4137 }
4138 TemplateCommands::List => {
4139 let config = config::Config::load()?;
4140 let templates = config.list_templates();
4141
4142 if templates.is_empty() {
4143 println!("No templates configured.");
4144 } else {
4145 println!("\n{}", "Templates:".bold().blue());
4146 for (name, prompt) in templates {
4147 let display_prompt = if prompt.len() > 60 {
4148 format!("{}...", &prompt[..60])
4149 } else {
4150 prompt.clone()
4151 };
4152 println!(" {} {} -> {}", "âĸ".blue(), name.bold(), display_prompt);
4153 }
4154 }
4155 }
4156 }
4157
4158 Ok(())
4159}
4160
4161pub async fn handle_proxy_command(
4163 port: u16,
4164 host: String,
4165 provider: Option<String>,
4166 model: Option<String>,
4167 api_key: Option<String>,
4168 generate_key: bool,
4169) -> Result<()> {
4170 use crate::proxy;
4171
4172 let final_api_key = if generate_key {
4174 let generated_key = proxy::generate_api_key();
4175 println!(
4176 "{} Generated API key: {}",
4177 "đ".green(),
4178 generated_key.bold()
4179 );
4180 Some(generated_key)
4181 } else {
4182 api_key
4183 };
4184
4185 if let Some(ref provider_name) = provider {
4187 let config = config::Config::load()?;
4188 if !config.has_provider(provider_name) {
4189 anyhow::bail!(
4190 "Provider '{}' not found. Add it first with 'lc providers add'",
4191 provider_name
4192 );
4193 }
4194 }
4195
4196 if let Some(ref model_name) = model {
4198 let config = config::Config::load()?;
4199
4200 if let Some(_alias_target) = config.get_alias(model_name) {
4202 } else if model_name.contains(':') {
4204 let parts: Vec<&str> = model_name.splitn(2, ':').collect();
4206 if parts.len() == 2 {
4207 let provider_name = parts[0];
4208 if !config.has_provider(provider_name) {
4209 anyhow::bail!(
4210 "Provider '{}' not found in model specification '{}'",
4211 provider_name,
4212 model_name
4213 );
4214 }
4215 }
4216 } else {
4217 }
4220 }
4221
4222 println!("\n{}", "Proxy Server Configuration:".bold().blue());
4224 println!(" {} {}:{}", "Address:".bold(), host, port);
4225
4226 if let Some(ref provider_filter) = provider {
4227 println!(
4228 " {} {}",
4229 "Provider Filter:".bold(),
4230 provider_filter.green()
4231 );
4232 } else {
4233 println!(
4234 " {} {}",
4235 "Provider Filter:".bold(),
4236 "All providers".dimmed()
4237 );
4238 }
4239
4240 if let Some(ref model_filter) = model {
4241 println!(" {} {}", "Model Filter:".bold(), model_filter.green());
4242 } else {
4243 println!(" {} {}", "Model Filter:".bold(), "All models".dimmed());
4244 }
4245
4246 if final_api_key.is_some() {
4247 println!(" {} {}", "Authentication:".bold(), "Enabled".green());
4248 } else {
4249 println!(" {} {}", "Authentication:".bold(), "Disabled".yellow());
4250 }
4251
4252 println!("\n{}", "Available endpoints:".bold().blue());
4253 println!(" {} http://{}:{}/models", "âĸ".blue(), host, port);
4254 println!(" {} http://{}:{}/v1/models", "âĸ".blue(), host, port);
4255 println!(" {} http://{}:{}/chat/completions", "âĸ".blue(), host, port);
4256 println!(
4257 " {} http://{}:{}/v1/chat/completions",
4258 "âĸ".blue(),
4259 host,
4260 port
4261 );
4262
4263 println!("\n{} Press Ctrl+C to stop the server\n", "đĄ".yellow());
4264
4265 proxy::start_proxy_server(host, port, provider, model, final_api_key).await?;
4267
4268 Ok(())
4269}
4270
4271async fn dump_models_data() -> Result<()> {
4273 use crate::{chat, config::Config};
4274
4275 println!("{} Dumping /models for each provider...", "đ".blue());
4276
4277 let config = Config::load()?;
4279
4280 std::fs::create_dir_all("models")?;
4282
4283 let mut successful_dumps = 0;
4284 let mut total_providers = 0;
4285
4286 for (provider_name, provider_config) in &config.providers {
4287 total_providers += 1;
4288
4289 if provider_config.api_key.is_none() {
4291 println!("{} Skipping {} (no API key)", "â ī¸".yellow(), provider_name);
4292 continue;
4293 }
4294
4295 println!("{} Fetching models from {}...", "đĄ".blue(), provider_name);
4296
4297 let mut config_mut = config.clone();
4299 match chat::create_authenticated_client(&mut config_mut, provider_name).await {
4300 Ok(client) => {
4301 match fetch_raw_models_response(&client, provider_config).await {
4303 Ok(raw_response) => {
4304 let filename = format!("models/{}.json", provider_name);
4306 match std::fs::write(&filename, &raw_response) {
4307 Ok(_) => {
4308 println!(
4309 "{} Saved {} models data to {}",
4310 "â
".green(),
4311 provider_name,
4312 filename
4313 );
4314 successful_dumps += 1;
4315 }
4316 Err(e) => {
4317 println!(
4318 "{} Failed to save {} models data: {}",
4319 "â".red(),
4320 provider_name,
4321 e
4322 );
4323 }
4324 }
4325 }
4326 Err(e) => {
4327 println!(
4328 "{} Failed to fetch models from {}: {}",
4329 "â".red(),
4330 provider_name,
4331 e
4332 );
4333 }
4334 }
4335 }
4336 Err(e) => {
4337 println!(
4338 "{} Failed to create client for {}: {}",
4339 "â".red(),
4340 provider_name,
4341 e
4342 );
4343 }
4344 }
4345 }
4346
4347 println!("\n{} Summary:", "đ".blue());
4348 println!(" Total providers: {}", total_providers);
4349 println!(" Successful dumps: {}", successful_dumps);
4350 println!(" Models data saved to: ./models/");
4351
4352 if successful_dumps > 0 {
4353 println!("\n{} Model data collection complete!", "đ".green());
4354 println!(" Next step: Analyze the JSON files to extract metadata patterns");
4355 }
4356
4357 Ok(())
4358}
4359
4360fn apply_model_filters_with_tags(
4361 models: Vec<crate::model_metadata::ModelMetadata>,
4362 query: &Option<String>,
4363 tag_filters: Option<Vec<String>>,
4364 context_length: &Option<String>,
4365 input_length: &Option<String>,
4366 output_length: &Option<String>,
4367 input_price: Option<f64>,
4368 output_price: Option<f64>,
4369) -> Result<Vec<crate::model_metadata::ModelMetadata>> {
4370 let mut filtered = models;
4371
4372 if let Some(ref search_query) = query {
4374 let query_lower = search_query.to_lowercase();
4375 filtered.retain(|model| {
4376 model.id.to_lowercase().contains(&query_lower)
4377 || model
4378 .display_name
4379 .as_ref()
4380 .map_or(false, |name| name.to_lowercase().contains(&query_lower))
4381 || model
4382 .description
4383 .as_ref()
4384 .map_or(false, |desc| desc.to_lowercase().contains(&query_lower))
4385 });
4386 }
4387
4388 if let Some(tags) = tag_filters {
4390 for tag in tags {
4391 match tag.as_str() {
4392 "tools" => {
4393 filtered
4394 .retain(|model| model.supports_tools || model.supports_function_calling);
4395 }
4396 "reasoning" => {
4397 filtered.retain(|model| model.supports_reasoning);
4398 }
4399 "vision" => {
4400 filtered.retain(|model| model.supports_vision);
4401 }
4402 "audio" => {
4403 filtered.retain(|model| model.supports_audio);
4404 }
4405 "code" => {
4406 filtered.retain(|model| model.supports_code);
4407 }
4408 _ => {
4409 }
4411 }
4412 }
4413 }
4414
4415 if let Some(ref ctx_str) = context_length {
4417 let min_ctx = parse_token_count(ctx_str)?;
4418 filtered.retain(|model| model.context_length.map_or(false, |ctx| ctx >= min_ctx));
4419 }
4420
4421 if let Some(ref input_str) = input_length {
4423 let min_input = parse_token_count(input_str)?;
4424 filtered.retain(|model| {
4425 model
4426 .max_input_tokens
4427 .map_or(false, |input| input >= min_input)
4428 || model.context_length.map_or(false, |ctx| ctx >= min_input)
4429 });
4430 }
4431
4432 if let Some(ref output_str) = output_length {
4434 let min_output = parse_token_count(output_str)?;
4435 filtered.retain(|model| {
4436 model
4437 .max_output_tokens
4438 .map_or(false, |output| output >= min_output)
4439 });
4440 }
4441
4442 if let Some(max_input_price) = input_price {
4444 filtered.retain(|model| {
4445 model
4446 .input_price_per_m
4447 .map_or(true, |price| price <= max_input_price)
4448 });
4449 }
4450
4451 if let Some(max_output_price) = output_price {
4452 filtered.retain(|model| {
4453 model
4454 .output_price_per_m
4455 .map_or(true, |price| price <= max_output_price)
4456 });
4457 }
4458
4459 filtered.sort_by(|a, b| a.provider.cmp(&b.provider).then(a.id.cmp(&b.id)));
4461
4462 Ok(filtered)
4463}
4464
4465fn parse_token_count(input: &str) -> Result<u32> {
4466 let input = input.to_lowercase();
4467 if let Some(num_str) = input.strip_suffix('k') {
4468 let num: f32 = num_str
4469 .parse()
4470 .map_err(|_| anyhow::anyhow!("Invalid token count format: '{}'", input))?;
4471 Ok((num * 1000.0) as u32)
4472 } else if let Some(num_str) = input.strip_suffix('m') {
4473 let num: f32 = num_str
4474 .parse()
4475 .map_err(|_| anyhow::anyhow!("Invalid token count format: '{}'", input))?;
4476 Ok((num * 1000000.0) as u32)
4477 } else {
4478 input
4479 .parse()
4480 .map_err(|_| anyhow::anyhow!("Invalid token count format: '{}'", input))
4481 }
4482}
4483
4484fn display_enhanced_models(
4485 models: &[crate::model_metadata::ModelMetadata],
4486 query: &Option<String>,
4487) -> Result<()> {
4488 use colored::Colorize;
4489
4490 if let Some(ref search_query) = query {
4491 println!(
4492 "\n{} Models matching '{}' ({} found):",
4493 "Search Results:".bold().blue(),
4494 search_query,
4495 models.len()
4496 );
4497 } else {
4498 println!(
4499 "\n{} Available models ({} total):",
4500 "Models:".bold().blue(),
4501 models.len()
4502 );
4503 }
4504
4505 let mut current_provider = String::new();
4506 for model in models {
4507 if model.provider != current_provider {
4508 current_provider = model.provider.clone();
4509 println!("\n{}", format!("{}:", current_provider).bold().green());
4510 }
4511
4512 let mut capabilities = Vec::new();
4514 if model.supports_tools || model.supports_function_calling {
4515 capabilities.push("đ§ tools".blue());
4516 }
4517 if model.supports_vision {
4518 capabilities.push("đ vision".magenta());
4519 }
4520 if model.supports_audio {
4521 capabilities.push("đ audio".yellow());
4522 }
4523 if model.supports_reasoning {
4524 capabilities.push("đ§ reasoning".cyan());
4525 }
4526 if model.supports_code {
4527 capabilities.push("đģ code".green());
4528 }
4529
4530 let mut context_info = Vec::new();
4532 if let Some(ctx) = model.context_length {
4533 context_info.push(format!("{}k ctx", ctx / 1000));
4534 }
4535 if let Some(max_out) = model.max_output_tokens {
4536 context_info.push(format!("{}k out", max_out / 1000));
4537 }
4538
4539 let model_display = if let Some(ref display_name) = model.display_name {
4541 format!("{} ({})", model.id, display_name)
4542 } else {
4543 model.id.clone()
4544 };
4545
4546 print!(" {} {}", "âĸ".blue(), model_display.bold());
4547
4548 if !capabilities.is_empty() {
4549 let capability_strings: Vec<String> =
4550 capabilities.iter().map(|c| c.to_string()).collect();
4551 print!(" [{}]", capability_strings.join(" "));
4552 }
4553
4554 if !context_info.is_empty() {
4555 print!(" ({})", context_info.join(", ").dimmed());
4556 }
4557
4558 println!();
4559 }
4560
4561 Ok(())
4562}
4563
4564pub async fn fetch_raw_models_response(
4565 _client: &crate::chat::LLMClient,
4566 provider_config: &crate::config::ProviderConfig,
4567) -> Result<String> {
4568 use serde_json::Value;
4569
4570 let http_client = reqwest::Client::builder()
4573 .pool_max_idle_per_host(10)
4574 .pool_idle_timeout(std::time::Duration::from_secs(90))
4575 .tcp_keepalive(std::time::Duration::from_secs(60))
4576 .timeout(std::time::Duration::from_secs(60))
4577 .connect_timeout(std::time::Duration::from_secs(10))
4578 .build()?;
4579
4580 let url = provider_config.get_models_url();
4581
4582 debug_log!("Making API request to: {}", url);
4583 debug_log!("Request timeout: 60 seconds");
4584
4585 let mut req = http_client
4586 .get(&url)
4587 .header("Content-Type", "application/json");
4588
4589 debug_log!("Added Content-Type: application/json header");
4590
4591 let mut has_custom_headers = false;
4593 for (name, value) in &provider_config.headers {
4594 debug_log!("Adding custom header: {}: {}", name, value);
4595 req = req.header(name, value);
4596 has_custom_headers = true;
4597 }
4598
4599 if !has_custom_headers {
4601 req = req.header(
4602 "Authorization",
4603 format!("Bearer {}", provider_config.api_key.as_ref().unwrap()),
4604 );
4605 debug_log!("Added Authorization header with API key");
4606 } else {
4607 debug_log!("Skipping Authorization header due to custom headers present");
4608 }
4609
4610 debug_log!("Sending HTTP GET request...");
4611 let response = req.send().await?;
4612
4613 let status = response.status();
4614 debug_log!("Received response with status: {}", status);
4615
4616 if !status.is_success() {
4617 let text = response.text().await.unwrap_or_default();
4618 debug_log!("API request failed with error response: {}", text);
4619 anyhow::bail!("API request failed with status {}: {}", status, text);
4620 }
4621
4622 let response_text = response.text().await?;
4623 debug_log!("Received response body ({} bytes)", response_text.len());
4624
4625 match serde_json::from_str::<Value>(&response_text) {
4627 Ok(json_value) => {
4628 debug_log!("Response is valid JSON, pretty-printing");
4629 Ok(serde_json::to_string_pretty(&json_value)?)
4630 }
4631 Err(_) => {
4632 debug_log!("Response is not valid JSON, returning as-is");
4633 Ok(response_text)
4635 }
4636 }
4637}
4638
4639pub async fn handle_alias_command(command: AliasCommands) -> Result<()> {
4641 use colored::Colorize;
4642
4643 match command {
4644 AliasCommands::Add { name, target } => {
4645 let mut config = config::Config::load()?;
4646 config.add_alias(name.clone(), target.clone())?;
4647 config.save()?;
4648 println!("{} Alias '{}' added for '{}'", "â".green(), name, target);
4649 }
4650 AliasCommands::Delete { name } => {
4651 let mut config = config::Config::load()?;
4652 config.remove_alias(name.clone())?;
4653 config.save()?;
4654 println!("{} Alias '{}' removed", "â".green(), name);
4655 }
4656 AliasCommands::List => {
4657 let config = config::Config::load()?;
4658 let aliases = config.list_aliases();
4659
4660 if aliases.is_empty() {
4661 println!("No aliases configured.");
4662 } else {
4663 println!("\n{}", "Model Aliases:".bold().blue());
4664 for (alias, target) in aliases {
4665 println!(" {} {} -> {}", "âĸ".blue(), alias.bold(), target);
4666 }
4667 }
4668 }
4669 }
4670
4671 Ok(())
4672}
4673
4674async fn load_provider_enhanced_models(
4676 provider_name: &str,
4677) -> Result<Vec<crate::model_metadata::ModelMetadata>> {
4678 use crate::model_metadata::MetadataExtractor;
4679 use std::fs;
4680
4681 let filename = format!("models/{}.json", provider_name);
4682
4683 if !std::path::Path::new(&filename).exists() {
4684 return Ok(Vec::new());
4685 }
4686
4687 match fs::read_to_string(&filename) {
4688 Ok(json_content) => {
4689 match MetadataExtractor::extract_from_provider(provider_name, &json_content) {
4690 Ok(models) => Ok(models),
4691 Err(e) => {
4692 eprintln!(
4693 "Warning: Failed to extract metadata from {}: {}",
4694 provider_name, e
4695 );
4696 Ok(Vec::new())
4697 }
4698 }
4699 }
4700 Err(e) => {
4701 eprintln!("Warning: Failed to read {}: {}", filename, e);
4702 Ok(Vec::new())
4703 }
4704 }
4705}
4706
4707fn display_provider_models(models: &[crate::model_metadata::ModelMetadata]) -> Result<()> {
4709 use colored::Colorize;
4710
4711 for model in models {
4712 if !model.supports_tools
4714 && !model.supports_function_calling
4715 && !model.supports_vision
4716 && !model.supports_audio
4717 && !model.supports_reasoning
4718 && !model.supports_code
4719 {
4720 debug_log!("All capability flags are false for model '{}' - this might indicate a defaulting bug", model.id);
4721 }
4722
4723 let mut capabilities = Vec::new();
4725 if model.supports_tools || model.supports_function_calling {
4726 capabilities.push("đ§ tools".blue());
4727 }
4728 if model.supports_vision {
4729 capabilities.push("đ vision".magenta());
4730 }
4731 if model.supports_audio {
4732 capabilities.push("đ audio".yellow());
4733 }
4734 if model.supports_reasoning {
4735 capabilities.push("đ§ reasoning".cyan());
4736 }
4737 if model.supports_code {
4738 capabilities.push("đģ code".green());
4739 }
4740
4741 let mut info_parts = Vec::new();
4743 if let Some(ctx) = model.context_length {
4744 if ctx >= 1000000 {
4745 info_parts.push(format!("{}m ctx", ctx / 1000000));
4746 } else if ctx >= 1000 {
4747 info_parts.push(format!("{}k ctx", ctx / 1000));
4748 } else {
4749 info_parts.push(format!("{} ctx", ctx));
4750 }
4751 }
4752 if let Some(max_out) = model.max_output_tokens {
4753 if max_out >= 1000 {
4754 info_parts.push(format!("{}k out", max_out / 1000));
4755 } else {
4756 info_parts.push(format!("{} out", max_out));
4757 }
4758 }
4759 if let Some(input_price) = model.input_price_per_m {
4760 info_parts.push(format!("${:.2}/M in", input_price));
4761 }
4762 if let Some(output_price) = model.output_price_per_m {
4763 info_parts.push(format!("${:.2}/M out", output_price));
4764 }
4765
4766 let model_display = if let Some(ref display_name) = model.display_name {
4768 if display_name != &model.id {
4769 format!("{} ({})", model.id, display_name)
4770 } else {
4771 model.id.clone()
4772 }
4773 } else {
4774 model.id.clone()
4775 };
4776
4777 print!(" {} {}", "âĸ".blue(), model_display.bold());
4778
4779 if !capabilities.is_empty() {
4780 let capability_strings: Vec<String> =
4781 capabilities.iter().map(|c| c.to_string()).collect();
4782 print!(" [{}]", capability_strings.join(" "));
4783 }
4784
4785 if !info_parts.is_empty() {
4786 print!(" ({})", info_parts.join(", ").dimmed());
4787 }
4788
4789 println!();
4790 }
4791
4792 Ok(())
4793}
4794
4795pub async fn handle_mcp_command(command: crate::cli::McpCommands) -> Result<()> {
4797 use crate::mcp::{McpConfig, McpServerType as ConfigMcpServerType};
4798 use colored::Colorize;
4799
4800 match command {
4801 crate::cli::McpCommands::Add {
4802 name,
4803 command_or_url,
4804 server_type,
4805 env,
4806 } => {
4807 let mut config = McpConfig::load()?;
4808
4809 let config_server_type = match server_type {
4811 crate::cli::McpServerType::Stdio => ConfigMcpServerType::Stdio,
4812 crate::cli::McpServerType::Sse => ConfigMcpServerType::Sse,
4813 crate::cli::McpServerType::Streamable => ConfigMcpServerType::Streamable,
4814 };
4815
4816 let env_map: HashMap<String, String> = env.into_iter().collect();
4818
4819 let final_command_or_url =
4821 if command_or_url.starts_with("npx ") && !command_or_url.contains(" -y ") {
4822 command_or_url.replacen("npx ", "npx -y ", 1)
4823 } else {
4824 command_or_url.clone()
4825 };
4826
4827 config.add_server_with_env(
4828 name.clone(),
4829 final_command_or_url.clone(),
4830 config_server_type,
4831 env_map.clone(),
4832 )?;
4833 config.save()?;
4834
4835 println!("{} MCP server '{}' added successfully", "â".green(), name);
4836 println!(" Type: {:?}", server_type);
4837 println!(" Command/URL: {}", final_command_or_url);
4838 if !env_map.is_empty() {
4839 println!(" Environment variables:");
4840 for (key, _) in env_map {
4841 println!(" - {}", key);
4842 }
4843 }
4844 }
4845 crate::cli::McpCommands::Delete { name } => {
4846 let mut config = McpConfig::load()?;
4847
4848 if config.get_server(&name).is_none() {
4849 anyhow::bail!("MCP server '{}' not found", name);
4850 }
4851
4852 config.delete_server(&name)?;
4853 config.save()?;
4854
4855 println!("{} MCP server '{}' deleted successfully", "â".green(), name);
4856 }
4857 crate::cli::McpCommands::List => {
4858 let config = McpConfig::load()?;
4859 let servers = config.list_servers();
4860
4861 if servers.is_empty() {
4862 println!("No MCP servers configured.");
4863 } else {
4864 println!("\n{} Configured MCP servers:", "Servers:".bold().blue());
4865 for (name, server_config) in servers {
4866 println!(
4867 " {} {} - {:?} ({})",
4868 "âĸ".blue(),
4869 name.bold(),
4870 server_config.server_type,
4871 server_config.command_or_url
4872 );
4873 }
4874 }
4875 }
4876 crate::cli::McpCommands::Stop { name } => {
4877 println!("{} Closing MCP server connection '{}'...", "đ".red(), name);
4878
4879 let daemon_client = crate::mcp_daemon::DaemonClient::new()?;
4880 match daemon_client.close_server(&name).await {
4881 Ok(_) => {
4882 println!(
4883 "{} MCP server '{}' connection closed successfully",
4884 "â".green(),
4885 name
4886 );
4887 }
4888 Err(e) => {
4889 println!(
4890 "{} Failed to close MCP server '{}': {}",
4891 "â ī¸".yellow(),
4892 name,
4893 e
4894 );
4895 }
4896 }
4897 }
4898 crate::cli::McpCommands::Functions { name } => {
4899 let config = McpConfig::load()?;
4900
4901 if config.get_server(&name).is_some() {
4902 println!(
4903 "{} Listing functions for MCP server '{}'...",
4904 "đ".blue(),
4905 name
4906 );
4907
4908 let daemon_client = crate::mcp_daemon::DaemonClient::new()?;
4910
4911 crate::debug_log!("CLI: Starting MCP functions listing for server '{}'", name);
4912
4913 match daemon_client.ensure_server_connected(&name).await {
4915 Ok(_) => {
4916 crate::debug_log!("CLI: Server '{}' connected successfully", name);
4917 match daemon_client.list_tools(&name).await {
4918 Ok(all_tools) => {
4919 crate::debug_log!(
4920 "CLI: Received tools response with {} servers",
4921 all_tools.len()
4922 );
4923 if let Some(tools) = all_tools.get(&name) {
4924 crate::debug_log!(
4925 "CLI: Server '{}' has {} tools",
4926 name,
4927 tools.len()
4928 );
4929 if tools.is_empty() {
4930 println!("No functions found for server '{}'", name);
4931 } else {
4932 println!(
4933 "\n{} Available functions:",
4934 "Functions:".bold().blue()
4935 );
4936 for tool in tools {
4937 println!(
4938 " {} {} - {}",
4939 "âĸ".blue(),
4940 tool.name.bold(),
4941 tool.description
4942 .as_ref()
4943 .map(|s| s.as_ref())
4944 .unwrap_or("No description")
4945 );
4946
4947 if let Some(properties) =
4948 tool.input_schema.get("properties")
4949 {
4950 if let Some(props_obj) = properties.as_object() {
4951 if !props_obj.is_empty() {
4952 println!(
4953 " Parameters: {}",
4954 props_obj
4955 .keys()
4956 .map(|k| k.as_str())
4957 .collect::<Vec<_>>()
4958 .join(", ")
4959 .dimmed()
4960 );
4961 }
4962 }
4963 }
4964 }
4965 }
4966 } else {
4967 crate::debug_log!(
4968 "CLI: No tools found for server '{}' in response",
4969 name
4970 );
4971 println!("No functions found for server '{}'", name);
4972 }
4973 }
4974 Err(e) => {
4975 crate::debug_log!("CLI: Failed to list tools: {}", e);
4976 anyhow::bail!(
4977 "Failed to list functions from MCP server '{}': {}",
4978 name,
4979 e
4980 );
4981 }
4982 }
4983 }
4984 Err(e) => {
4985 crate::debug_log!("CLI: Failed to connect to server '{}': {}", name, e);
4986 anyhow::bail!("Failed to connect to MCP server '{}': {}", name, e);
4987 }
4988 }
4989 } else {
4990 anyhow::bail!("MCP server '{}' not found", name);
4991 }
4992 }
4993 crate::cli::McpCommands::Invoke {
4994 name,
4995 function,
4996 args,
4997 } => {
4998 let config = McpConfig::load()?;
4999
5000 if config.get_server(&name).is_some() {
5001 println!(
5002 "{} Invoking function '{}' on MCP server '{}'...",
5003 "âĄ".yellow(),
5004 function,
5005 name
5006 );
5007 if !args.is_empty() {
5008 println!("Arguments: {}", args.join(", ").dimmed());
5009 }
5010
5011 let daemon_client = crate::mcp_daemon::DaemonClient::new()?;
5013
5014 match daemon_client.ensure_server_connected(&name).await {
5016 Ok(_) => {
5017 let params = if args.is_empty() {
5019 serde_json::json!({})
5020 } else {
5021 let mut params_obj = serde_json::Map::new();
5022 for arg in args {
5023 if let Some((key, value)) = arg.split_once('=') {
5024 params_obj.insert(key.to_string(), serde_json::json!(value));
5025 } else {
5026 anyhow::bail!(
5027 "Invalid argument format: '{}'. Expected 'key=value'",
5028 arg
5029 );
5030 }
5031 }
5032 serde_json::json!(params_obj)
5033 };
5034
5035 match daemon_client.call_tool(&name, &function, params).await {
5036 Ok(result) => {
5037 println!("\n{} Result:", "Response:".bold().green());
5038 println!("{}", serde_json::to_string_pretty(&result)?);
5039 }
5040 Err(e) => {
5041 anyhow::bail!(
5042 "Failed to invoke function '{}' on MCP server '{}': {}",
5043 function,
5044 name,
5045 e
5046 );
5047 }
5048 }
5049
5050 println!("\n{} Tool invocation completed. Server connection remains active in daemon.", "âšī¸".blue());
5052 println!(
5053 "{} Use 'lc mcp stop {}' if you want to close the server connection.",
5054 "đĄ".yellow(),
5055 name
5056 );
5057 }
5058 Err(e) => {
5059 anyhow::bail!("Failed to connect to MCP server '{}': {}", name, e);
5060 }
5061 }
5062 } else {
5063 anyhow::bail!("MCP server '{}' not found", name);
5064 }
5065 }
5066 }
5067
5068 Ok(())
5069}
5070
5071pub async fn fetch_mcp_tools(
5073 tools_str: &str,
5074) -> Result<(Option<Vec<crate::provider::Tool>>, Vec<String>)> {
5075 use crate::mcp::McpConfig;
5076
5077 let server_names: Vec<&str> = tools_str.split(',').map(|s| s.trim()).collect();
5079 let mut all_tools = Vec::new();
5080 let mut valid_server_names = Vec::new();
5081
5082 let config = McpConfig::load()?;
5084
5085 let daemon_client = crate::mcp_daemon::DaemonClient::new()?;
5087
5088 for server_name in server_names {
5089 if server_name.is_empty() {
5090 continue;
5091 }
5092
5093 crate::debug_log!("Fetching tools from MCP server '{}'", server_name);
5094
5095 if config.get_server(server_name).is_some() {
5097 match daemon_client.ensure_server_connected(server_name).await {
5099 Ok(_) => {
5100 crate::debug_log!("Successfully connected to MCP server '{}'", server_name);
5101 valid_server_names.push(server_name.to_string());
5102 }
5103 Err(e) => {
5104 eprintln!(
5105 "Warning: Failed to connect to MCP server '{}': {}",
5106 server_name, e
5107 );
5108 continue;
5109 }
5110 }
5111 } else {
5112 eprintln!(
5113 "Warning: MCP server '{}' not found in configuration",
5114 server_name
5115 );
5116 continue;
5117 }
5118 }
5119
5120 for server_name in &valid_server_names {
5122 match daemon_client.list_tools(server_name).await {
5123 Ok(server_tools) => {
5124 if let Some(tools) = server_tools.get(server_name) {
5125 crate::debug_log!(
5126 "Retrieved {} tools from server '{}'",
5127 tools.len(),
5128 server_name
5129 );
5130
5131 for tool in tools {
5132 let openai_tool = crate::provider::Tool {
5134 tool_type: "function".to_string(),
5135 function: crate::provider::Function {
5136 name: tool.name.to_string(),
5137 description: tool
5138 .description
5139 .as_ref()
5140 .map(|s| s.to_string())
5141 .unwrap_or_else(|| "No description".to_string()),
5142 parameters: serde_json::to_value(&*tool.input_schema)
5143 .unwrap_or_else(|_| {
5144 serde_json::json!({
5145 "type": "object",
5146 "properties": {},
5147 "required": []
5148 })
5149 }),
5150 },
5151 };
5152
5153 all_tools.push(openai_tool);
5154 crate::debug_log!(
5155 "Added tool '{}' from server '{}'",
5156 tool.name,
5157 server_name
5158 );
5159 }
5160 }
5161 }
5162 Err(e) => {
5163 eprintln!(
5164 "Warning: Failed to list tools from MCP server '{}': {}",
5165 server_name, e
5166 );
5167 }
5168 }
5169 }
5170
5171 if all_tools.is_empty() {
5174 crate::debug_log!("No tools found from any specified MCP servers");
5175 Ok((None, valid_server_names))
5176 } else {
5177 crate::debug_log!("Total {} tools fetched from MCP servers", all_tools.len());
5178 Ok((Some(all_tools), valid_server_names))
5179 }
5180}
5181
5182fn display_embedding_models(models: &[crate::model_metadata::ModelMetadata]) -> Result<()> {
5184 use colored::Colorize;
5185
5186 println!(
5187 "\n{} Available embedding models ({} total):",
5188 "Embedding Models:".bold().blue(),
5189 models.len()
5190 );
5191
5192 let mut current_provider = String::new();
5193 for model in models {
5194 if model.provider != current_provider {
5195 current_provider = model.provider.clone();
5196 println!("\n{}", format!("{}:", current_provider).bold().green());
5197 }
5198
5199 let mut info_parts = Vec::new();
5201 if let Some(ctx) = model.context_length {
5202 if ctx >= 1000000 {
5203 info_parts.push(format!("{}m ctx", ctx / 1000000));
5204 } else if ctx >= 1000 {
5205 info_parts.push(format!("{}k ctx", ctx / 1000));
5206 } else {
5207 info_parts.push(format!("{} ctx", ctx));
5208 }
5209 }
5210 if let Some(input_price) = model.input_price_per_m {
5211 info_parts.push(format!("${:.2}/M", input_price));
5212 }
5213
5214 let model_display = if let Some(ref display_name) = model.display_name {
5216 if display_name != &model.id {
5217 format!("{} ({})", model.id, display_name)
5218 } else {
5219 model.id.clone()
5220 }
5221 } else {
5222 model.id.clone()
5223 };
5224
5225 print!(" {} {}", "âĸ".blue(), model_display.bold());
5226
5227 if !info_parts.is_empty() {
5228 print!(" ({})", info_parts.join(", ").dimmed());
5229 }
5230
5231 println!();
5232 }
5233
5234 Ok(())
5235}
5236
5237pub async fn handle_embed_command(
5239 model: String,
5240 provider: Option<String>,
5241 database: Option<String>,
5242 files: Vec<String>,
5243 text: Option<String>,
5244 debug: bool,
5245) -> Result<()> {
5246 use colored::Colorize;
5247
5248 if debug {
5250 set_debug_mode(true);
5251 }
5252
5253 if text.is_none() && files.is_empty() {
5255 anyhow::bail!("Either text or files must be provided for embedding");
5256 }
5257
5258 let config = config::Config::load()?;
5259
5260 let (provider_name, resolved_model) =
5262 resolve_model_and_provider(&config, provider, Some(model))?;
5263
5264 let provider_config = config.get_provider(&provider_name)?;
5266
5267 if provider_config.api_key.is_none() {
5268 anyhow::bail!(
5269 "No API key configured for provider '{}'. Add one with 'lc keys add {}'",
5270 provider_name,
5271 provider_name
5272 );
5273 }
5274
5275 let mut config_mut = config.clone();
5276 let client = chat::create_authenticated_client(&mut config_mut, &provider_name).await?;
5277
5278 if config_mut.get_cached_token(&provider_name) != config.get_cached_token(&provider_name) {
5280 config_mut.save()?;
5281 }
5282
5283 println!("{} Starting embedding process...", "đ".blue());
5284 println!("{} Model: {}", "đ".blue(), resolved_model);
5285 println!("{} Provider: {}", "đĸ".blue(), provider_name);
5286
5287 let mut total_embeddings = 0;
5288 let mut total_tokens = 0;
5289
5290 if !files.is_empty() {
5292 println!("{} Processing files with glob patterns...", "đ".blue());
5293
5294 let file_paths = crate::vector_db::FileProcessor::expand_file_patterns(&files)?;
5296
5297 if file_paths.is_empty() {
5298 println!(
5299 "{} No text files found matching the patterns",
5300 "â ī¸".yellow()
5301 );
5302 } else {
5303 println!(
5304 "{} Found {} text files to process",
5305 "â
".green(),
5306 file_paths.len()
5307 );
5308
5309 for file_path in file_paths {
5310 println!("\n{} Processing file: {}", "đ".blue(), file_path.display());
5311
5312 match crate::vector_db::FileProcessor::process_file(&file_path) {
5314 Ok(chunks) => {
5315 println!("{} Split into {} chunks", "âī¸".blue(), chunks.len());
5316
5317 for (chunk_index, chunk) in chunks.iter().enumerate() {
5319 let embedding_request = crate::provider::EmbeddingRequest {
5320 model: resolved_model.clone(),
5321 input: chunk.clone(),
5322 encoding_format: Some("float".to_string()),
5323 };
5324
5325 match client.embeddings(&embedding_request).await {
5326 Ok(response) => {
5327 if let Some(embedding_data) = response.data.first() {
5328 total_embeddings += 1;
5329 total_tokens += response.usage.total_tokens;
5330
5331 if let Some(db_name) = &database {
5333 match crate::vector_db::VectorDatabase::new(db_name) {
5334 Ok(vector_db) => {
5335 let file_path_str = file_path.to_string_lossy();
5336 match vector_db.add_vector_with_metadata(
5337 chunk,
5338 &embedding_data.embedding,
5339 &resolved_model,
5340 &provider_name,
5341 Some(&file_path_str),
5342 Some(chunk_index as i32),
5343 Some(chunks.len() as i32),
5344 ) {
5345 Ok(id) => {
5346 println!(" {} Chunk {}/{} stored with ID: {}",
5347 "đž".green(), chunk_index + 1, chunks.len(), id);
5348 }
5349 Err(e) => {
5350 eprintln!(" Warning: Failed to store chunk {}: {}", chunk_index + 1, e);
5351 }
5352 }
5353 }
5354 Err(e) => {
5355 eprintln!(" Warning: Failed to create/open vector database '{}': {}", db_name, e);
5356 }
5357 }
5358 } else {
5359 println!(
5361 " {} Chunk {}/{} embedded ({} dimensions)",
5362 "â
".green(),
5363 chunk_index + 1,
5364 chunks.len(),
5365 embedding_data.embedding.len()
5366 );
5367 }
5368 }
5369 }
5370 Err(e) => {
5371 eprintln!(
5372 " Warning: Failed to embed chunk {}: {}",
5373 chunk_index + 1,
5374 e
5375 );
5376 }
5377 }
5378 }
5379 }
5380 Err(e) => {
5381 eprintln!(
5382 "Warning: Failed to process file '{}': {}",
5383 file_path.display(),
5384 e
5385 );
5386 }
5387 }
5388 }
5389 }
5390 }
5391
5392 if let Some(text_content) = text {
5394 println!("\n{} Processing text input...", "đ".blue());
5395 println!(
5396 "{} Text: \"{}\"",
5397 "đ".blue(),
5398 if text_content.len() > 50 {
5399 format!("{}...", &text_content[..50])
5400 } else {
5401 text_content.clone()
5402 }
5403 );
5404
5405 let embedding_request = crate::provider::EmbeddingRequest {
5406 model: resolved_model.clone(),
5407 input: text_content.clone(),
5408 encoding_format: Some("float".to_string()),
5409 };
5410
5411 match client.embeddings(&embedding_request).await {
5412 Ok(response) => {
5413 if let Some(embedding_data) = response.data.first() {
5414 total_embeddings += 1;
5415 total_tokens += response.usage.total_tokens;
5416
5417 println!(
5418 "{} Vector dimensions: {}",
5419 "đ".blue(),
5420 embedding_data.embedding.len()
5421 );
5422
5423 let embedding = &embedding_data.embedding;
5425 if embedding.len() > 10 {
5426 println!("\n{} Vector preview:", "đ".blue());
5427 print!(" [");
5428 for (i, val) in embedding.iter().take(5).enumerate() {
5429 if i > 0 {
5430 print!(", ");
5431 }
5432 print!("{:.6}", val);
5433 }
5434 print!(" ... ");
5435 for (i, val) in embedding.iter().skip(embedding.len() - 5).enumerate() {
5436 if i > 0 {
5437 print!(", ");
5438 }
5439 print!("{:.6}", val);
5440 }
5441 println!("]");
5442 }
5443
5444 if let Some(db_name) = &database {
5446 match crate::vector_db::VectorDatabase::new(db_name) {
5447 Ok(vector_db) => {
5448 match vector_db.add_vector(
5449 &text_content,
5450 &embedding,
5451 &resolved_model,
5452 &provider_name,
5453 ) {
5454 Ok(id) => {
5455 println!(
5456 "\n{} Stored in vector database '{}' with ID: {}",
5457 "đž".green(),
5458 db_name,
5459 id
5460 );
5461 }
5462 Err(e) => {
5463 eprintln!(
5464 "Warning: Failed to store in vector database: {}",
5465 e
5466 );
5467 }
5468 }
5469 }
5470 Err(e) => {
5471 eprintln!(
5472 "Warning: Failed to create/open vector database '{}': {}",
5473 db_name, e
5474 );
5475 }
5476 }
5477 }
5478
5479 if files.is_empty() {
5481 println!("\n{} Full vector (JSON):", "đ".dimmed());
5483 println!("{}", serde_json::to_string(&embedding)?);
5484 }
5485 }
5486 }
5487 Err(e) => {
5488 anyhow::bail!("Failed to generate embeddings for text: {}", e);
5489 }
5490 }
5491 }
5492
5493 println!("\n{} Embedding process completed!", "đ".green());
5495 println!(
5496 "{} Total embeddings generated: {}",
5497 "đ".blue(),
5498 total_embeddings
5499 );
5500 println!("{} Total tokens used: {}", "đ°".yellow(), total_tokens);
5501
5502 if let Some(db_name) = &database {
5503 println!(
5504 "{} All embeddings stored in database: {}",
5505 "đž".green(),
5506 db_name
5507 );
5508 }
5509
5510 Ok(())
5511}
5512
5513pub async fn handle_similar_command(
5515 model: Option<String>,
5516 provider: Option<String>,
5517 database: String,
5518 limit: usize,
5519 query: String,
5520) -> Result<()> {
5521 use colored::Colorize;
5522
5523 let vector_db = crate::vector_db::VectorDatabase::new(&database)?;
5525
5526 let count = vector_db.count()?;
5528 if count == 0 {
5529 anyhow::bail!(
5530 "Vector database '{}' is empty. Add some vectors first using 'lc embed -d {}'",
5531 database,
5532 database
5533 );
5534 }
5535
5536 let (resolved_model, resolved_provider) = match (&model, &provider) {
5538 (Some(m), Some(p)) => (m.clone(), p.clone()),
5539 _ => {
5540 if let Some((db_model, db_provider)) = vector_db.get_model_info()? {
5541 if model.is_some() || provider.is_some() {
5542 println!(
5543 "{} Using model from database: {}:{}",
5544 "âšī¸".blue(),
5545 db_provider,
5546 db_model
5547 );
5548 }
5549 (db_model, db_provider)
5550 } else {
5551 anyhow::bail!(
5552 "No model specified and database '{}' has no stored model info",
5553 database
5554 );
5555 }
5556 }
5557 };
5558
5559 let config = config::Config::load()?;
5560
5561 let (provider_name, model_name) =
5563 resolve_model_and_provider(&config, Some(resolved_provider), Some(resolved_model))?;
5564
5565 let provider_config = config.get_provider(&provider_name)?;
5567
5568 if provider_config.api_key.is_none() {
5569 anyhow::bail!(
5570 "No API key configured for provider '{}'. Add one with 'lc keys add {}'",
5571 provider_name,
5572 provider_name
5573 );
5574 }
5575
5576 let mut config_mut = config.clone();
5577 let client = chat::create_authenticated_client(&mut config_mut, &provider_name).await?;
5578
5579 if config_mut.get_cached_token(&provider_name) != config.get_cached_token(&provider_name) {
5581 config_mut.save()?;
5582 }
5583
5584 let embedding_request = crate::provider::EmbeddingRequest {
5586 model: model_name.clone(),
5587 input: query.clone(),
5588 encoding_format: Some("float".to_string()),
5589 };
5590
5591 println!("{} Searching for similar content...", "đ".blue());
5592 println!("{} Database: {}", "đ".blue(), database);
5593 println!(
5594 "{} Query: \"{}\"",
5595 "đ".blue(),
5596 if query.len() > 50 {
5597 format!("{}...", &query[..50])
5598 } else {
5599 query.clone()
5600 }
5601 );
5602
5603 match client.embeddings(&embedding_request).await {
5604 Ok(response) => {
5605 if let Some(embedding_data) = response.data.first() {
5606 let query_vector = &embedding_data.embedding;
5607
5608 let similar_results = vector_db.find_similar(query_vector, limit)?;
5610
5611 if similar_results.is_empty() {
5612 println!(
5613 "\n{} No similar content found in database '{}'",
5614 "â".red(),
5615 database
5616 );
5617 } else {
5618 println!(
5619 "\n{} Found {} similar results:",
5620 "â
".green(),
5621 similar_results.len()
5622 );
5623
5624 for (i, (entry, similarity)) in similar_results.iter().enumerate() {
5625 let similarity_percent = (similarity * 100.0).round() as u32;
5626 let similarity_color = if similarity_percent >= 80 {
5627 format!("{}%", similarity_percent).green()
5628 } else if similarity_percent >= 60 {
5629 format!("{}%", similarity_percent).yellow()
5630 } else {
5631 format!("{}%", similarity_percent).red()
5632 };
5633
5634 println!(
5635 "\n{} {} (Similarity: {})",
5636 format!("{}.", i + 1).bold(),
5637 similarity_color,
5638 format!("ID: {}", entry.id).dimmed()
5639 );
5640 println!(" {}", entry.text);
5641 println!(
5642 " {}",
5643 format!(
5644 "Added: {}",
5645 entry.created_at.format("%Y-%m-%d %H:%M:%S UTC")
5646 )
5647 .dimmed()
5648 );
5649 }
5650 }
5651 } else {
5652 anyhow::bail!("No embedding data in response");
5653 }
5654 }
5655 Err(e) => {
5656 anyhow::bail!("Failed to generate query embedding: {}", e);
5657 }
5658 }
5659
5660 Ok(())
5661}
5662
5663pub async fn handle_vectors_command(command: crate::cli::VectorCommands) -> Result<()> {
5665 use colored::Colorize;
5666
5667 match command {
5668 crate::cli::VectorCommands::List => {
5669 let databases = crate::vector_db::VectorDatabase::list_databases()?;
5670
5671 if databases.is_empty() {
5672 println!("No vector databases found.");
5673 println!(
5674 "Create one by running: {}",
5675 "lc embed -d <name> -m <model> \"your text\"".dimmed()
5676 );
5677 } else {
5678 println!("\n{} Vector databases:", "đ".bold().blue());
5679
5680 for db_name in databases {
5681 match crate::vector_db::VectorDatabase::new(&db_name) {
5682 Ok(db) => {
5683 let count = db.count().unwrap_or(0);
5684 let model_info = db.get_model_info().unwrap_or(None);
5685
5686 print!(" {} {} ({} vectors)", "âĸ".blue(), db_name.bold(), count);
5687
5688 if let Some((model, provider)) = model_info {
5689 print!(" - {}:{}", provider.dimmed(), model.dimmed());
5690 }
5691
5692 println!();
5693 }
5694 Err(_) => {
5695 println!(" {} {} (error reading)", "âĸ".red(), db_name.bold());
5696 }
5697 }
5698 }
5699 }
5700 }
5701 crate::cli::VectorCommands::Delete { name } => {
5702 let databases = crate::vector_db::VectorDatabase::list_databases()?;
5704 if !databases.contains(&name) {
5705 anyhow::bail!("Vector database '{}' not found", name);
5706 }
5707
5708 crate::vector_db::VectorDatabase::delete_database(&name)?;
5709 println!(
5710 "{} Vector database '{}' deleted successfully",
5711 "â".green(),
5712 name
5713 );
5714 }
5715 crate::cli::VectorCommands::Info { name } => {
5716 let databases = crate::vector_db::VectorDatabase::list_databases()?;
5717 if !databases.contains(&name) {
5718 anyhow::bail!("Vector database '{}' not found", name);
5719 }
5720
5721 let db = crate::vector_db::VectorDatabase::new(&name)?;
5722 let count = db.count()?;
5723 let model_info = db.get_model_info()?;
5724
5725 println!("\n{} Vector database: {}", "đ".bold().blue(), name.bold());
5726 println!("Vectors: {}", count);
5727
5728 if let Some((model, provider)) = model_info {
5729 println!("Model: {}:{}", provider, model);
5730 } else {
5731 println!("Model: {}", "Not set".dimmed());
5732 }
5733
5734 if count > 0 {
5735 println!("\n{} Recent entries:", "đ".bold().blue());
5736 let vectors = db.get_all_vectors()?;
5737 for (i, entry) in vectors.iter().take(5).enumerate() {
5738 let preview = if entry.text.len() > 60 {
5739 format!("{}...", &entry.text[..60])
5740 } else {
5741 entry.text.clone()
5742 };
5743
5744 let source_info = if let Some(ref file_path) = entry.file_path {
5745 if let (Some(chunk_idx), Some(total_chunks)) =
5746 (entry.chunk_index, entry.total_chunks)
5747 {
5748 format!(" [{}:{}/{}]", file_path, chunk_idx + 1, total_chunks)
5749 } else {
5750 format!(" [{}]", file_path)
5751 }
5752 } else {
5753 String::new()
5754 };
5755
5756 println!(
5757 " {}. {}{} ({})",
5758 i + 1,
5759 preview,
5760 source_info.dimmed(),
5761 entry
5762 .created_at
5763 .format("%Y-%m-%d %H:%M")
5764 .to_string()
5765 .dimmed()
5766 );
5767 }
5768
5769 if vectors.len() > 5 {
5770 println!(" ... and {} more", vectors.len() - 5);
5771 }
5772 }
5773 }
5774 }
5775
5776 Ok(())
5777}
5778
5779pub async fn retrieve_rag_context(
5781 db_name: &str,
5782 query: &str,
5783 _client: &crate::chat::LLMClient,
5784 _model: &str,
5785 _provider: &str,
5786) -> Result<String> {
5787 crate::debug_log!(
5788 "RAG: Starting context retrieval for database '{}' with query '{}'",
5789 db_name,
5790 query
5791 );
5792
5793 let vector_db = crate::vector_db::VectorDatabase::new(db_name)?;
5795 crate::debug_log!("RAG: Successfully opened vector database '{}'", db_name);
5796
5797 let count = vector_db.count()?;
5799 crate::debug_log!("RAG: Database '{}' contains {} vectors", db_name, count);
5800 if count == 0 {
5801 crate::debug_log!("RAG: Database is empty, returning empty context");
5802 return Ok(String::new());
5803 }
5804
5805 let (db_model, db_provider) = if let Some((m, p)) = vector_db.get_model_info()? {
5807 crate::debug_log!("RAG: Using database model '{}' from provider '{}'", m, p);
5808 (m, p)
5809 } else {
5810 crate::debug_log!("RAG: No model info in database, returning empty context");
5811 return Ok(String::new());
5812 };
5813
5814 let config = config::Config::load()?;
5816 let mut config_mut = config.clone();
5817 let embedding_client = chat::create_authenticated_client(&mut config_mut, &db_provider).await?;
5818 crate::debug_log!(
5819 "RAG: Created embedding client for provider '{}'",
5820 db_provider
5821 );
5822
5823 let embedding_request = crate::provider::EmbeddingRequest {
5825 model: db_model.clone(),
5826 input: query.to_string(),
5827 encoding_format: Some("float".to_string()),
5828 };
5829
5830 crate::debug_log!(
5831 "RAG: Generating embedding for query using model '{}'",
5832 db_model
5833 );
5834
5835 let response = embedding_client.embeddings(&embedding_request).await?;
5837 crate::debug_log!("RAG: Successfully generated embedding for query");
5838
5839 if let Some(embedding_data) = response.data.first() {
5840 let query_vector = &embedding_data.embedding;
5841 crate::debug_log!("RAG: Query vector has {} dimensions", query_vector.len());
5842
5843 let similar_results = vector_db.find_similar(query_vector, 3)?;
5845 crate::debug_log!("RAG: Found {} similar results", similar_results.len());
5846
5847 if similar_results.is_empty() {
5848 crate::debug_log!("RAG: No similar results found, returning empty context");
5849 return Ok(String::new());
5850 }
5851
5852 let mut context = String::new();
5854 let mut included_count = 0;
5855 for (entry, similarity) in similar_results {
5856 crate::debug_log!(
5857 "RAG: Result similarity: {:.3} for text: '{}'",
5858 similarity,
5859 &entry.text[..50.min(entry.text.len())]
5860 );
5861 if similarity > 0.3 {
5863 context.push_str(&format!("- {}\n", entry.text));
5864 included_count += 1;
5865 }
5866 }
5867
5868 crate::debug_log!(
5869 "RAG: Included {} results in context (similarity > 0.3)",
5870 included_count
5871 );
5872 crate::debug_log!("RAG: Final context length: {} characters", context.len());
5873
5874 Ok(context)
5875 } else {
5876 crate::debug_log!("RAG: No embedding data in response, returning empty context");
5877 Ok(String::new())
5878 }
5879}
5880
5881pub async fn handle_webchatproxy_command(command: WebChatProxyCommands) -> Result<()> {
5883 match command {
5884 WebChatProxyCommands::Providers { command } => match command {
5885 Some(WebChatProxyProviderCommands::List) => {
5886 handle_webchatproxy_providers_list().await?;
5887 }
5888 Some(WebChatProxyProviderCommands::Kagi { command }) => {
5889 handle_webchatproxy_kagi_command(command).await?;
5890 }
5891 None => {
5892 handle_webchatproxy_providers_list().await?;
5893 }
5894 },
5895 WebChatProxyCommands::Start {
5896 provider,
5897 port,
5898 host,
5899 key,
5900 generate_key,
5901 daemon,
5902 } => {
5903 handle_webchatproxy_start(provider, port, host, key, generate_key, daemon).await?;
5904 }
5905 WebChatProxyCommands::Stop { provider } => {
5906 handle_webchatproxy_stop(provider).await?;
5907 }
5908 WebChatProxyCommands::List => {
5909 handle_webchatproxy_list().await?;
5910 }
5911 }
5912 Ok(())
5913}
5914
5915async fn handle_webchatproxy_providers_list() -> Result<()> {
5916 use colored::Colorize;
5917
5918 println!("\n{}", "Supported WebChatProxy Providers:".bold().blue());
5919 println!(" {} {} - Kagi Assistant API", "âĸ".blue(), "kagi".bold());
5920 println!("\n{}", "Usage:".bold().blue());
5921 println!(
5922 " {} Set auth: {}",
5923 "âĸ".blue(),
5924 "lc w providers set kagi auth <token>".dimmed()
5925 );
5926 println!(
5927 " {} Start proxy: {}",
5928 "âĸ".blue(),
5929 "lc w start kagi".dimmed()
5930 );
5931
5932 Ok(())
5933}
5934
5935async fn fetch_kagi_models() -> Result<Vec<crate::webchatproxy::KagiModelProfile>> {
5936 crate::webchatproxy::fetch_kagi_models().await
5937}
5938
5939async fn handle_webchatproxy_kagi_command(command: WebChatProxyKagiCommands) -> Result<()> {
5940 use colored::Colorize;
5941 use std::io::{self, Write};
5942
5943 match command {
5944 WebChatProxyKagiCommands::Auth { token } => {
5945 let auth_token = if let Some(token) = token {
5946 token
5947 } else {
5948 print!("Enter authentication token for kagi: ");
5949 io::stdout().flush()?;
5951 rpassword::read_password()?
5952 };
5953
5954 let mut config = crate::webchatproxy::WebChatProxyConfig::load()?;
5956 config.set_provider_auth("kagi", &auth_token)?;
5957 config.save()?;
5958
5959 println!("{} Authentication set for provider 'kagi'", "â".green());
5960 }
5961 WebChatProxyKagiCommands::Models => {
5962 match fetch_kagi_models().await {
5964 Ok(models) => {
5965 println!("\n{} Available Kagi models:", "Models:".bold().blue());
5966 for model in models {
5967 let mut capabilities = Vec::new();
5968 if model.internet_access {
5969 capabilities.push("đ web".blue());
5970 }
5971 if model.personalizations {
5972 capabilities.push("đ¤ personal".magenta());
5973 }
5974
5975 let mut info_parts = Vec::new();
5976 if let Some(ctx) = model.model_input_limit {
5977 if ctx >= 1000000 {
5978 info_parts.push(format!("{}m ctx", ctx / 1000000));
5979 } else if ctx >= 1000 {
5980 info_parts.push(format!("{}k ctx", ctx / 1000));
5981 } else {
5982 info_parts.push(format!("{} ctx", ctx));
5983 }
5984 }
5985
5986 print!(
5987 " {} {} ({})",
5988 "âĸ".blue(),
5989 model.model_name.bold(),
5990 model.model
5991 );
5992
5993 if !capabilities.is_empty() {
5994 let capability_strings: Vec<String> =
5995 capabilities.iter().map(|c| c.to_string()).collect();
5996 print!(" [{}]", capability_strings.join(" "));
5997 }
5998
5999 if !info_parts.is_empty() {
6000 print!(" ({})", info_parts.join(", ").dimmed());
6001 }
6002
6003 if let Some(description) = &model.scorecard.description {
6004 print!(" - {}", description.dimmed());
6005 }
6006
6007 if model.scorecard.recommended {
6008 print!(" {}", "â recommended".yellow());
6009 }
6010
6011 println!();
6012 }
6013 }
6014 Err(e) => {
6015 eprintln!("{} Failed to fetch Kagi models: {}", "â".red(), e);
6016 eprintln!("Make sure you have set your Kagi authentication token with:");
6017 eprintln!(" {}", "lc w p kagi auth".dimmed());
6018 }
6019 }
6020 }
6021 }
6022
6023 Ok(())
6024}
6025
6026async fn handle_webchatproxy_start(
6027 provider: String,
6028 port: u16,
6029 host: String,
6030 key: Option<String>,
6031 generate_key: bool,
6032 daemon: bool,
6033) -> Result<()> {
6034 use colored::Colorize;
6035
6036 if provider != "kagi" {
6037 anyhow::bail!(
6038 "Unsupported provider '{}'. Currently only 'kagi' is supported.",
6039 provider
6040 );
6041 }
6042
6043 let final_key = if generate_key {
6045 let generated_key = crate::proxy::generate_api_key();
6046 println!(
6047 "{} Generated API key: {}",
6048 "đ".green(),
6049 generated_key.bold()
6050 );
6051 Some(generated_key)
6052 } else {
6053 key
6054 };
6055
6056 println!("\n{}", "WebChatProxy Server Configuration:".bold().blue());
6057 println!(" {} {}:{}", "Address:".bold(), host, port);
6058 println!(" {} {}", "Provider:".bold(), provider.green());
6059
6060 if final_key.is_some() {
6061 println!(" {} {}", "Authentication:".bold(), "Enabled".green());
6062 } else {
6063 println!(" {} {}", "Authentication:".bold(), "Disabled".yellow());
6064 }
6065
6066 println!("\n{}", "Available endpoints:".bold().blue());
6067 println!(" {} http://{}:{}/chat/completions", "âĸ".blue(), host, port);
6068 println!(
6069 " {} http://{}:{}/v1/chat/completions",
6070 "âĸ".blue(),
6071 host,
6072 port
6073 );
6074
6075 if daemon {
6076 println!("\n{} Starting in daemon mode...", "đ".blue());
6077 println!(
6078 "{} Logs will be written to: ~/Library/Application Support/lc/{}.log",
6079 "đ".blue(),
6080 provider
6081 );
6082
6083 crate::webchatproxy::start_webchatproxy_daemon(host, port, provider.clone(), final_key)
6085 .await?;
6086 } else {
6087 println!("\n{} Press Ctrl+C to stop the server\n", "đĄ".yellow());
6088
6089 crate::webchatproxy::start_webchatproxy_server(host, port, provider, final_key).await?;
6091 }
6092
6093 Ok(())
6094}
6095
6096async fn handle_webchatproxy_stop(provider: String) -> Result<()> {
6097 use colored::Colorize;
6098
6099 if provider != "kagi" {
6100 anyhow::bail!(
6101 "Unsupported provider '{}'. Currently only 'kagi' is supported.",
6102 provider
6103 );
6104 }
6105
6106 println!(
6107 "{} Stopping webchatproxy server for '{}'...",
6108 "đ".red(),
6109 provider
6110 );
6111
6112 match crate::webchatproxy::stop_webchatproxy_daemon(&provider).await {
6114 Ok(_) => {
6115 println!(
6116 "{} WebChatProxy server for '{}' stopped successfully",
6117 "â".green(),
6118 provider
6119 );
6120 }
6121 Err(e) => {
6122 println!(
6123 "{} Failed to stop WebChatProxy server for '{}': {}",
6124 "â ī¸".yellow(),
6125 provider,
6126 e
6127 );
6128 }
6129 }
6130
6131 Ok(())
6132}
6133
6134async fn handle_webchatproxy_list() -> Result<()> {
6135 use colored::Colorize;
6136
6137 println!("\n{} Running WebChatProxy servers:", "đ".bold().blue());
6138
6139 match crate::webchatproxy::list_webchatproxy_daemons().await {
6141 Ok(servers) => {
6142 if servers.is_empty() {
6143 println!("No WebChatProxy servers currently running.");
6144 } else {
6145 for (provider, info) in servers {
6146 println!(
6147 " {} {} - {}:{} (PID: {})",
6148 "âĸ".blue(),
6149 provider.bold(),
6150 info.host,
6151 info.port,
6152 info.pid
6153 );
6154 }
6155 }
6156 }
6157 Err(e) => {
6158 println!(
6159 "{} Failed to list WebChatProxy servers: {}",
6160 "â ī¸".yellow(),
6161 e
6162 );
6163 }
6164 }
6165
6166 Ok(())
6167}
6168
6169pub async fn handle_sync_command(command: SyncCommands) -> Result<()> {
6171 match command {
6172 SyncCommands::Providers => crate::sync::handle_sync_providers().await,
6173 SyncCommands::Configure { provider, command } => {
6174 crate::sync::handle_sync_configure(&provider, command).await
6175 }
6176 SyncCommands::To {
6177 provider,
6178 encrypted,
6179 debug,
6180 yes,
6181 } => {
6182 if debug {
6184 set_debug_mode(true);
6185 }
6186 crate::sync::handle_sync_to(&provider, encrypted, yes).await
6187 }
6188 SyncCommands::From {
6189 provider,
6190 encrypted,
6191 debug,
6192 yes,
6193 } => {
6194 if debug {
6196 set_debug_mode(true);
6197 }
6198 crate::sync::handle_sync_from(&provider, encrypted, yes).await
6199 }
6200 }
6201}
6202
6203pub async fn handle_search_command(command: SearchCommands) -> Result<()> {
6205 use colored::Colorize;
6206
6207 match command {
6208 SearchCommands::Provider { command } => handle_search_provider_command(command).await,
6209 SearchCommands::Query {
6210 provider,
6211 query,
6212 format,
6213 count,
6214 } => {
6215 let engine = crate::search::SearchEngine::new()?;
6216
6217 println!(
6218 "{} Searching with {} for: '{}'",
6219 "đ".blue(),
6220 provider.bold(),
6221 query
6222 );
6223
6224 match engine.search(&provider, &query, Some(count)).await {
6225 Ok(results) => match format.as_str() {
6226 "json" => {
6227 println!("{}", engine.format_results_json(&results)?);
6228 }
6229 "md" | "markdown" => {
6230 println!("{}", engine.format_results_markdown(&results));
6231 }
6232 _ => {
6233 anyhow::bail!("Invalid format '{}'. Use 'json' or 'md'", format);
6234 }
6235 },
6236 Err(e) => {
6237 anyhow::bail!("Search failed: {}", e);
6238 }
6239 }
6240
6241 Ok(())
6242 }
6243 }
6244}
6245
6246async fn handle_search_provider_command(command: SearchProviderCommands) -> Result<()> {
6247 use colored::Colorize;
6248
6249 match command {
6250 SearchProviderCommands::Add { name, url } => {
6251 let mut config = crate::search::SearchConfig::load()?;
6252
6253 match config.add_provider_auto(name.clone(), url.clone()) {
6255 Ok(_) => {
6256 config.save()?;
6257
6258 let provider_config = config.get_provider(&name)?;
6260 let provider_type = &provider_config.provider_type;
6261
6262 println!(
6263 "{} Search provider '{}' added successfully",
6264 "â".green(),
6265 name
6266 );
6267 println!(
6268 " Type: {} (auto-detected)",
6269 format!("{:?}", provider_type).to_lowercase()
6270 );
6271 println!(" URL: {}", url);
6272
6273 let api_key_header = provider_type.api_key_header();
6275 if !api_key_header.is_empty() {
6276 println!("\n{} Don't forget to set the API key:", "đĄ".yellow());
6277 println!(
6278 " lc search provider set {} {} <your-api-key>",
6279 name, api_key_header
6280 );
6281 } else {
6282 println!("\n{} No API key required for this provider!", "â
".green());
6283 }
6284 }
6285 Err(e) => {
6286 anyhow::bail!("Failed to add search provider: {}", e);
6287 }
6288 }
6289 }
6290 SearchProviderCommands::Delete { name } => {
6291 let mut config = crate::search::SearchConfig::load()?;
6292 config.delete_provider(&name)?;
6293 config.save()?;
6294
6295 println!(
6296 "{} Search provider '{}' deleted successfully",
6297 "â".green(),
6298 name
6299 );
6300 }
6301 SearchProviderCommands::Set {
6302 provider,
6303 header_name,
6304 header_value,
6305 } => {
6306 let mut config = crate::search::SearchConfig::load()?;
6307 config.set_header(&provider, header_name.clone(), header_value)?;
6308 config.save()?;
6309
6310 println!(
6311 "{} Header '{}' set for search provider '{}'",
6312 "â".green(),
6313 header_name,
6314 provider
6315 );
6316 }
6317 SearchProviderCommands::List => {
6318 let config = crate::search::SearchConfig::load()?;
6319 let providers = config.list_providers();
6320
6321 if providers.is_empty() {
6322 println!("No search providers configured.");
6323 println!(
6324 "Add one with: {}",
6325 "lc search provider add <name> <url>".dimmed()
6326 );
6327 } else {
6328 println!("\n{}", "Search Providers:".bold().blue());
6329
6330 for (name, provider_config) in providers {
6331 let has_auth = provider_config.headers.contains_key("X-Subscription-Token")
6332 || provider_config.headers.contains_key("Authorization")
6333 || provider_config.headers.contains_key("x-api-key")
6334 || provider_config.headers.contains_key("X-API-KEY");
6335 let auth_status = if has_auth { "â".green() } else { "â".red() };
6336
6337 println!(
6338 " {} {} - {} (Auth: {})",
6339 "âĸ".blue(),
6340 name.bold(),
6341 provider_config.url,
6342 auth_status
6343 );
6344
6345 if !provider_config.headers.is_empty() {
6346 println!(" Headers: {}", provider_config.headers.len());
6347 }
6348 }
6349
6350 if let Some(default) = config.get_default_provider() {
6351 println!("\n{} {}", "Default provider:".bold(), default.green());
6352 }
6353 }
6354 }
6355 }
6356
6357 Ok(())
6358}
6359
6360async fn integrate_search_context(
6362 search_spec: &str,
6363 query: &str,
6364 enhanced_prompt: &mut String,
6365) -> Result<bool> {
6366 use colored::Colorize;
6367
6368 let (provider, search_query) = if search_spec.contains(':') {
6370 let parts: Vec<&str> = search_spec.splitn(2, ':').collect();
6371 (parts[0].to_string(), parts[1].to_string())
6372 } else {
6373 (search_spec.to_string(), query.to_string())
6375 };
6376
6377 let search_config = crate::search::SearchConfig::load()?;
6379 if !search_config.has_provider(&provider) {
6380 if let Some(default_provider) = search_config.get_default_provider() {
6382 if provider == "default" || provider.is_empty() {
6383 println!(
6384 "{} Using default search provider: {}",
6385 "đ".blue(),
6386 default_provider
6387 );
6388 return integrate_search_with_provider(
6389 default_provider,
6390 &search_query,
6391 enhanced_prompt,
6392 )
6393 .await;
6394 }
6395 }
6396 anyhow::bail!(
6397 "Search provider '{}' not found. Configure it with 'lc search provider add'",
6398 provider
6399 );
6400 }
6401
6402 integrate_search_with_provider(&provider, &search_query, enhanced_prompt).await
6403}
6404
6405async fn integrate_search_with_provider(
6406 provider: &str,
6407 search_query: &str,
6408 enhanced_prompt: &mut String,
6409) -> Result<bool> {
6410 use colored::Colorize;
6411
6412 let engine = crate::search::SearchEngine::new()?;
6413
6414 println!("{} Searching for: '{}'", "đ".blue(), search_query);
6415
6416 match engine.search(provider, search_query, Some(5)).await {
6417 Ok(results) => {
6418 if results.results.is_empty() {
6419 println!("{} No search results found", "â ī¸".yellow());
6420 return Ok(false);
6421 }
6422
6423 println!(
6424 "{} Found {} search results",
6425 "â
".green(),
6426 results.results.len()
6427 );
6428
6429 let search_context = engine.extract_context_for_llm(&results, 5);
6431
6432 *enhanced_prompt = format!("{}\n\nUser query: {}", search_context, enhanced_prompt);
6434
6435 Ok(true)
6436 }
6437 Err(e) => {
6438 anyhow::bail!("Search failed: {}", e);
6439 }
6440 }
6441}
6442
6443pub async fn handle_image_command(
6445 prompt: String,
6446 model: Option<String>,
6447 provider: Option<String>,
6448 size: String,
6449 count: u32,
6450 output: Option<String>,
6451 debug: bool,
6452) -> Result<()> {
6453 use colored::Colorize;
6454 use std::fs;
6455 use std::io::{self, Write};
6456 use std::path::Path;
6457
6458 if debug {
6460 set_debug_mode(true);
6461 }
6462
6463 let config = config::Config::load()?;
6464
6465 let (provider_name, model_name) = resolve_model_and_provider(&config, provider, model)?;
6467
6468 let provider_config = config.get_provider(&provider_name)?;
6470
6471 if provider_config.api_key.is_none() {
6472 anyhow::bail!(
6473 "No API key configured for provider '{}'. Add one with 'lc keys add {}'",
6474 provider_name,
6475 provider_name
6476 );
6477 }
6478
6479 let mut config_mut = config.clone();
6480 let client = chat::create_authenticated_client(&mut config_mut, &provider_name).await?;
6481
6482 if config_mut.get_cached_token(&provider_name) != config.get_cached_token(&provider_name) {
6484 config_mut.save()?;
6485 }
6486
6487 println!(
6488 "{} Generating {} image(s) with prompt: \"{}\"",
6489 "đ¨".blue(),
6490 count,
6491 prompt
6492 );
6493 println!("{} Model: {}", "đ¤".blue(), model_name);
6494 println!("{} Provider: {}", "đĸ".blue(), provider_name);
6495 println!("{} Size: {}", "đ".blue(), size);
6496
6497 let image_request = crate::provider::ImageGenerationRequest {
6499 prompt: prompt.clone(),
6500 model: Some(model_name.clone()),
6501 n: Some(count),
6502 size: Some(size.clone()),
6503 quality: Some("standard".to_string()),
6504 style: None,
6505 response_format: Some("url".to_string()),
6506 };
6507
6508 print!("{} ", "Generating...".dimmed());
6510 io::stdout().flush()?;
6511
6512 match client.generate_images(&image_request).await {
6513 Ok(response) => {
6514 print!("\r{}\r", " ".repeat(20)); println!(
6516 "{} Successfully generated {} image(s)!",
6517 "â
".green(),
6518 response.data.len()
6519 );
6520
6521 let output_dir = if let Some(dir) = output {
6523 let path = Path::new(&dir);
6524 if !path.exists() {
6525 fs::create_dir_all(path)?;
6526 println!("{} Created output directory: {}", "đ".blue(), dir);
6527 }
6528 Some(dir)
6529 } else {
6530 None
6531 };
6532
6533 for (i, image_data) in response.data.iter().enumerate() {
6535 let image_num = i + 1;
6536
6537 if let Some(url) = &image_data.url {
6538 println!(
6539 "\n{} Image {}/{}",
6540 "đŧī¸".blue(),
6541 image_num,
6542 response.data.len()
6543 );
6544 println!(" URL: {}", url);
6545
6546 if let Some(revised_prompt) = &image_data.revised_prompt {
6547 if revised_prompt != &prompt {
6548 println!(" Revised prompt: {}", revised_prompt.dimmed());
6549 }
6550 }
6551
6552 if let Some(ref dir) = output_dir {
6554 let filename = format!(
6555 "image_{}_{}.png",
6556 chrono::Utc::now().format("%Y%m%d_%H%M%S"),
6557 image_num
6558 );
6559 let filepath = Path::new(dir).join(&filename);
6560
6561 match download_image(url, &filepath).await {
6562 Ok(_) => {
6563 println!(" {} Saved to: {}", "đž".green(), filepath.display());
6564 }
6565 Err(e) => {
6566 eprintln!(" {} Failed to download image: {}", "â".red(), e);
6567 }
6568 }
6569 }
6570 } else if let Some(b64_data) = &image_data.b64_json {
6571 println!(
6572 "\n{} Image {}/{} (Base64)",
6573 "đŧī¸".blue(),
6574 image_num,
6575 response.data.len()
6576 );
6577
6578 let save_dir = output_dir.as_deref().unwrap_or(".");
6580 let filename = format!(
6581 "image_{}_{}.png",
6582 chrono::Utc::now().format("%Y%m%d_%H%M%S"),
6583 image_num
6584 );
6585 let filepath = Path::new(save_dir).join(&filename);
6586
6587 match save_base64_image(b64_data, &filepath) {
6588 Ok(_) => {
6589 println!(" {} Saved to: {}", "đž".green(), filepath.display());
6590 }
6591 Err(e) => {
6592 eprintln!(" {} Failed to save image: {}", "â".red(), e);
6593 }
6594 }
6595
6596 if let Some(revised_prompt) = &image_data.revised_prompt {
6597 if revised_prompt != &prompt {
6598 println!(" Revised prompt: {}", revised_prompt.dimmed());
6599 }
6600 }
6601 }
6602 }
6603
6604 if output_dir.is_none() {
6605 let has_url_images = response.data.iter().any(|img| img.url.is_some());
6607 if has_url_images {
6608 println!(
6609 "\n{} Use --output <directory> to automatically download URL-based images",
6610 "đĄ".yellow()
6611 );
6612 }
6613 }
6614 }
6615 Err(e) => {
6616 print!("\r{}\r", " ".repeat(20)); anyhow::bail!("Failed to generate images: {}", e);
6618 }
6619 }
6620
6621 Ok(())
6622}
6623
6624async fn download_image(url: &str, filepath: &std::path::Path) -> Result<()> {
6626 let response = reqwest::get(url).await?;
6627
6628 if !response.status().is_success() {
6629 anyhow::bail!("Failed to download image: HTTP {}", response.status());
6630 }
6631
6632 let bytes = response.bytes().await?;
6633 std::fs::write(filepath, bytes)?;
6634
6635 Ok(())
6636}
6637
6638fn save_base64_image(b64_data: &str, filepath: &std::path::Path) -> Result<()> {
6640 use base64::{engine::general_purpose, Engine as _};
6641
6642 let image_bytes = general_purpose::STANDARD.decode(b64_data)?;
6643 std::fs::write(filepath, image_bytes)?;
6644
6645 Ok(())
6646}
6647
6648pub async fn handle_dump_metadata_command(provider: Option<String>, list: bool) -> Result<()> {
6650 use crate::dump_metadata::MetadataDumper;
6651
6652 if list {
6653 MetadataDumper::list_cached_metadata().await?;
6655 } else if let Some(provider_name) = provider {
6656 MetadataDumper::dump_provider_by_name(&provider_name).await?;
6658 } else {
6659 MetadataDumper::dump_all_metadata().await?;
6661 }
6662
6663 Ok(())
6664}
6665
6666pub async fn handle_usage_command(
6668 command: Option<UsageCommands>,
6669 days: Option<u32>,
6670 tokens_only: bool,
6671 requests_only: bool,
6672 limit: usize,
6673) -> Result<()> {
6674 use crate::usage_stats::{UsageAnalyzer, BarChart, display_usage_overview};
6675 use colored::Colorize;
6676
6677 let analyzer = UsageAnalyzer::new()?;
6678 let stats = analyzer.get_usage_stats(days)?;
6679
6680 if stats.total_requests == 0 {
6681 println!("{} No usage data found", "âšī¸".blue());
6682 if days.is_some() {
6683 println!("Try expanding the time range or check if you have any logged interactions.");
6684 }
6685 return Ok(());
6686 }
6687
6688 match command {
6689 Some(UsageCommands::Daily { count }) => {
6690 let value_type = if tokens_only {
6691 "tokens"
6692 } else if requests_only {
6693 "requests"
6694 } else {
6695 "tokens"
6696 };
6697
6698 BarChart::render_time_series(
6699 "đ
Daily Usage",
6700 &stats.daily_usage,
6701 value_type,
6702 50,
6703 count.min(limit),
6704 );
6705 }
6706 Some(UsageCommands::Weekly { count }) => {
6707 let value_type = if tokens_only {
6708 "tokens"
6709 } else if requests_only {
6710 "requests"
6711 } else {
6712 "tokens"
6713 };
6714
6715 BarChart::render_time_series(
6716 "đ Weekly Usage",
6717 &stats.weekly_usage,
6718 value_type,
6719 50,
6720 count.min(limit),
6721 );
6722 }
6723 Some(UsageCommands::Monthly { count }) => {
6724 let value_type = if tokens_only {
6725 "tokens"
6726 } else if requests_only {
6727 "requests"
6728 } else {
6729 "tokens"
6730 };
6731
6732 BarChart::render_time_series(
6733 "đ Monthly Usage",
6734 &stats.monthly_usage,
6735 value_type,
6736 50,
6737 count.min(limit),
6738 );
6739 }
6740 Some(UsageCommands::Yearly { count }) => {
6741 let value_type = if tokens_only {
6742 "tokens"
6743 } else if requests_only {
6744 "requests"
6745 } else {
6746 "tokens"
6747 };
6748
6749 BarChart::render_time_series(
6750 "đ Yearly Usage",
6751 &stats.yearly_usage,
6752 value_type,
6753 50,
6754 count.min(limit),
6755 );
6756 }
6757 Some(UsageCommands::Models { count }) => {
6758 let value_type = if tokens_only {
6759 "tokens"
6760 } else if requests_only {
6761 "requests"
6762 } else {
6763 "tokens"
6764 };
6765
6766 BarChart::render_horizontal(
6767 "đ¤ Top Models by Usage",
6768 &stats.model_usage,
6769 value_type,
6770 50,
6771 count.min(limit),
6772 );
6773 }
6774 None => {
6775 display_usage_overview(&stats);
6777
6778 if !tokens_only && !requests_only {
6779 BarChart::render_horizontal(
6781 "đ¤ Top Models by Token Usage",
6782 &stats.model_usage,
6783 "tokens",
6784 50,
6785 limit.min(5),
6786 );
6787
6788 BarChart::render_time_series(
6789 "đ
Recent Daily Usage (Tokens)",
6790 &stats.daily_usage,
6791 "tokens",
6792 50,
6793 limit.min(14),
6794 );
6795 } else if tokens_only {
6796 BarChart::render_horizontal(
6797 "đ¤ Top Models by Token Usage",
6798 &stats.model_usage,
6799 "tokens",
6800 50,
6801 limit.min(10),
6802 );
6803
6804 BarChart::render_time_series(
6805 "đ
Recent Daily Token Usage",
6806 &stats.daily_usage,
6807 "tokens",
6808 50,
6809 limit.min(14),
6810 );
6811 } else if requests_only {
6812 BarChart::render_horizontal(
6813 "đ¤ Top Models by Request Count",
6814 &stats.model_usage,
6815 "requests",
6816 50,
6817 limit.min(10),
6818 );
6819
6820 BarChart::render_time_series(
6821 "đ
Recent Daily Request Count",
6822 &stats.daily_usage,
6823 "requests",
6824 50,
6825 limit.min(14),
6826 );
6827 }
6828 }
6829 }
6830
6831 Ok(())
6832}
6833
6834pub async fn handle_completions_command(shell: CompletionShell) -> Result<()> {
6836 crate::completion::generate_completions(shell).await
6837}
6838
6839#[allow(dead_code)]
6841pub fn complete_providers() -> Vec<String> {
6842 crate::completion::get_available_providers()
6843}
6844
6845#[allow(dead_code)]
6846pub fn complete_models() -> Vec<String> {
6847 crate::completion::get_available_models()
6848}
6849
6850#[cfg(test)]
6852mod tests;