1use anyhow::{Context, Result};
6use serde::{Deserialize, Serialize};
7use std::collections::HashMap;
8use std::sync::Arc;
9use tokio::sync::{Mutex, RwLock};
10
11#[derive(Debug, Clone, Serialize, Deserialize)]
13pub enum DisplayTarget {
14 AppleTV {
15 name: String,
16 address: String,
17 },
18 Chromecast {
19 name: String,
20 uuid: String,
21 },
22 Miracast {
23 name: String,
24 address: String,
25 },
26 ESP32Display {
27 name: String,
28 address: String,
29 width: u16,
30 height: u16,
31 },
32 WebDashboard {
33 port: u16,
34 },
35 Terminal, Voice, }
38
39#[derive(Debug, Clone, Serialize, Deserialize)]
40pub struct OutputMode {
41 pub verbosity: VerbosityLevel,
42 pub format: OutputFormat,
43 pub theme: String,
44 pub screen_config: ScreenConfig,
45}
46
47#[derive(Debug, Clone, Serialize, Deserialize)]
48pub enum VerbosityLevel {
49 Verbose, Normal, Concise, Minimal, Silent, }
55
56#[derive(Debug, Clone, Serialize, Deserialize)]
57pub enum OutputFormat {
58 Text,
59 HTML,
60 Markdown,
61 JSON,
62 Graphics, Voice, }
65
66#[derive(Debug, Clone, Serialize, Deserialize)]
67pub struct ScreenConfig {
68 pub width: u16,
69 pub height: u16,
70 pub color_depth: u8,
71 pub refresh_rate: u8,
72 pub capabilities: Vec<String>, }
74
75pub struct RustShell {
77 displays: Arc<RwLock<HashMap<String, DisplayTarget>>>,
78 active_display: Arc<RwLock<Option<String>>>,
79 pub output_mode: Arc<RwLock<OutputMode>>,
80 mcp_interface: Arc<MCPInterface>,
81 cast_manager: Arc<CastManager>,
82 voice_detector: Arc<VoiceDetector>,
83 command_history: Arc<Mutex<Vec<String>>>,
84 context_state: Arc<RwLock<ContextState>>,
85}
86
87struct MCPInterface {
89 socket: Arc<Mutex<Option<tokio::net::TcpStream>>>,
90 binary_api: BinaryAPI,
91}
92
93type EndpointHandler = Box<dyn Fn(&[u8]) -> Vec<u8> + Send + Sync>;
94
95struct BinaryAPI {
96 endpoints: HashMap<String, EndpointHandler>,
97}
98
99struct CastManager {
101 airplay: Option<AirPlayCaster>,
102 chromecast: Option<ChromecastCaster>,
103 miracast: Option<MiracastCaster>,
104 esp32: Option<ESP32Caster>,
105 web_server: Option<WebDashboardServer>,
106}
107
108struct VoiceDetector {
110 audio_active: Arc<RwLock<bool>>,
111 last_voice_time: Arc<RwLock<std::time::Instant>>,
112}
113
114#[derive(Debug, Clone, Serialize, Deserialize)]
116struct ContextState {
117 user_location: UserLocation,
118 active_project: Option<String>,
119 conversation_mode: ConversationMode,
120 screen_arrangement: Vec<(String, DisplayTarget)>,
121}
122
123#[derive(Debug, Clone, Serialize, Deserialize)]
124enum UserLocation {
125 AtDesk,
126 Mobile,
127 Remote,
128 Voice,
129}
130
131#[derive(Debug, Clone, Serialize, Deserialize)]
132enum ConversationMode {
133 Interactive, Monitoring, Voice, Automated, }
138
139impl RustShell {
140 pub async fn new() -> Result<Self> {
141 Ok(Self {
142 displays: Arc::new(RwLock::new(HashMap::new())),
143 active_display: Arc::new(RwLock::new(None)),
144 output_mode: Arc::new(RwLock::new(OutputMode::default())),
145 mcp_interface: Arc::new(MCPInterface::new().await?),
146 cast_manager: Arc::new(CastManager::new().await?),
147 voice_detector: Arc::new(VoiceDetector::new()),
148 command_history: Arc::new(Mutex::new(Vec::new())),
149 context_state: Arc::new(RwLock::new(ContextState::default())),
150 })
151 }
152
153 pub async fn discover_displays(&self) -> Result<Vec<DisplayTarget>> {
155 let mut discovered = Vec::new();
156
157 if let Some(airplay) = &self.cast_manager.airplay {
159 discovered.extend(airplay.discover().await?);
160 }
161
162 if let Some(chromecast) = &self.cast_manager.chromecast {
164 discovered.extend(chromecast.discover().await?);
165 }
166
167 if let Some(esp32) = &self.cast_manager.esp32 {
169 discovered.extend(esp32.discover().await?);
170 }
171
172 discovered.push(DisplayTarget::Terminal);
174 discovered.push(DisplayTarget::Voice);
175
176 Ok(discovered)
177 }
178
179 pub async fn cast_to(&self, target: &DisplayTarget, content: &str) -> Result<()> {
181 let adapted_content = self.adapt_content_for_display(content, target).await?;
183
184 match target {
185 DisplayTarget::AppleTV { address, .. } => {
186 self.cast_manager
187 .airplay
188 .as_ref()
189 .context("AirPlay not available")?
190 .cast(address, &adapted_content)
191 .await?
192 }
193 DisplayTarget::Chromecast { uuid, .. } => {
194 self.cast_manager
195 .chromecast
196 .as_ref()
197 .context("Chromecast not available")?
198 .cast(uuid, &adapted_content)
199 .await?
200 }
201 DisplayTarget::ESP32Display {
202 address,
203 width,
204 height,
205 ..
206 } => {
207 let formatted = self.format_for_small_display(&adapted_content, *width, *height);
209 self.cast_manager
210 .esp32
211 .as_ref()
212 .context("ESP32 caster not available")?
213 .send(address, &formatted)
214 .await?
215 }
216 DisplayTarget::WebDashboard { port: _ } => {
217 self.cast_manager
219 .web_server
220 .as_ref()
221 .context("Web server not running")?
222 .update_dashboard(&adapted_content)
223 .await?
224 }
225 DisplayTarget::Terminal => {
226 println!("{}", adapted_content);
227 }
228 DisplayTarget::Voice => {
229 let voice_text = self.make_voice_friendly(&adapted_content);
231 self.speak(&voice_text).await?;
232 }
233 _ => {}
234 }
235
236 Ok(())
237 }
238
239 async fn adapt_content_for_display(
241 &self,
242 content: &str,
243 target: &DisplayTarget,
244 ) -> Result<String> {
245 let _mode = self.output_mode.read().await;
246
247 match target {
248 DisplayTarget::Voice => {
249 Ok(self.make_voice_friendly(content))
251 }
252 DisplayTarget::ESP32Display { width, height, .. } => {
253 Ok(self.format_for_small_display(content, *width, *height))
255 }
256 DisplayTarget::AppleTV { .. } | DisplayTarget::Chromecast { .. } => {
257 Ok(self.format_as_rich_html(content))
259 }
260 _ => Ok(content.to_string()),
261 }
262 }
263
264 pub async fn detect_voice_transition(&self) -> Result<()> {
266 let detector = &self.voice_detector;
267
268 let audio_active = detector.is_audio_active().await?;
270
271 if audio_active {
272 let mut mode = self.output_mode.write().await;
274 mode.verbosity = VerbosityLevel::Minimal;
275 mode.format = OutputFormat::Voice;
276
277 let mut active = self.active_display.write().await;
279 *active = Some("voice".to_string());
280
281 println!("🎤 Voice mode activated - switching to concise output");
282 }
283
284 Ok(())
285 }
286
287 pub async fn execute(&self, command: &str) -> Result<String> {
289 let parts: Vec<&str> = command.split_whitespace().collect();
291
292 if parts.is_empty() {
293 return Ok(String::new());
294 }
295
296 match parts[0] {
297 "cast" => {
298 if parts.len() >= 2 {
300 let target_name = parts[1];
301 let content = parts[2..].join(" ");
302 self.cast_by_name(target_name, &content).await
303 } else {
304 Ok("Usage: cast <target> <content>".to_string())
305 }
306 }
307 "discover" => {
308 let displays = self.discover_displays().await?;
310 Ok(format!(
311 "Found {} displays:\n{:#?}",
312 displays.len(),
313 displays
314 ))
315 }
316 "mode" => {
317 if parts.len() >= 2 {
319 self.set_mode(parts[1]).await
320 } else {
321 Ok(format!(
322 "Current mode: {:?}",
323 self.output_mode.read().await.verbosity
324 ))
325 }
326 }
327 "dashboard" => {
328 self.start_dashboard(8888).await?;
330 Ok("Dashboard started on http://localhost:8888".to_string())
331 }
332 _ => {
333 self.mcp_interface.execute(command).await
335 }
336 }
337 }
338
339 async fn cast_by_name(&self, name: &str, content: &str) -> Result<String> {
341 let displays = self.displays.read().await;
342
343 if let Some(target) = displays.get(name) {
344 self.cast_to(target, content).await?;
345 Ok(format!("Cast to {} complete", name))
346 } else {
347 Ok(format!("Display '{}' not found", name))
348 }
349 }
350
351 async fn set_mode(&self, mode_str: &str) -> Result<String> {
353 let mut mode = self.output_mode.write().await;
354
355 match mode_str {
356 "verbose" => mode.verbosity = VerbosityLevel::Verbose,
357 "normal" => mode.verbosity = VerbosityLevel::Normal,
358 "concise" => mode.verbosity = VerbosityLevel::Concise,
359 "minimal" => mode.verbosity = VerbosityLevel::Minimal,
360 "voice" => {
361 mode.verbosity = VerbosityLevel::Minimal;
362 mode.format = OutputFormat::Voice;
363 }
364 _ => return Ok(format!("Unknown mode: {}", mode_str)),
365 }
366
367 Ok(format!("Mode set to: {}", mode_str))
368 }
369
370 async fn start_dashboard(&self, port: u16) -> Result<()> {
372 println!("Starting dashboard on port {}...", port);
374 Ok(())
376 }
377
378 fn format_for_small_display(&self, content: &str, width: u16, height: u16) -> String {
380 let chars_per_line = (width / 8) as usize; let max_lines = (height / 16) as usize; content
385 .lines()
386 .take(max_lines)
387 .map(|line| {
388 if line.len() > chars_per_line {
389 format!("{}...", &line[..chars_per_line.saturating_sub(3)])
390 } else {
391 line.to_string()
392 }
393 })
394 .collect::<Vec<_>>()
395 .join("\n")
396 }
397
398 fn make_voice_friendly(&self, content: &str) -> String {
400 content
402 .replace("```", "code block")
403 .replace("##", "section")
404 .replace("*", "")
405 .replace("_", "")
406 .lines()
407 .filter(|line| !line.trim().is_empty())
408 .take(5) .collect::<Vec<_>>()
410 .join(". ")
411 }
412
413 fn format_as_rich_html(&self, content: &str) -> String {
415 format!(
416 r#"
417<!DOCTYPE html>
418<html>
419<head>
420 <style>
421 body {{
422 font-family: 'SF Pro Display', -apple-system, sans-serif;
423 background: linear-gradient(135deg, #1e3c72, #2a5298);
424 color: white;
425 padding: 50px;
426 font-size: 24px;
427 line-height: 1.6;
428 }}
429 pre {{
430 background: rgba(0,0,0,0.3);
431 padding: 20px;
432 border-radius: 10px;
433 overflow-x: auto;
434 }}
435 h1 {{
436 font-size: 48px;
437 margin-bottom: 30px;
438 }}
439 .terminal {{
440 font-family: 'SF Mono', monospace;
441 background: black;
442 color: #00ff00;
443 padding: 30px;
444 border-radius: 15px;
445 box-shadow: 0 10px 40px rgba(0,0,0,0.5);
446 }}
447 </style>
448</head>
449<body>
450 <div class="terminal">
451 <pre>{}</pre>
452 </div>
453</body>
454</html>
455 "#,
456 html_escape::encode_text(content)
457 )
458 }
459
460 async fn speak(&self, text: &str) -> Result<()> {
462 println!("🔊 Speaking: {}", text);
463 Ok(())
465 }
466}
467
468struct AirPlayCaster;
470struct ChromecastCaster;
471struct MiracastCaster;
472struct ESP32Caster;
473struct WebDashboardServer;
474
475impl AirPlayCaster {
476 async fn discover(&self) -> Result<Vec<DisplayTarget>> {
477 Ok(vec![])
479 }
480
481 async fn cast(&self, _address: &str, _content: &str) -> Result<()> {
482 Ok(())
484 }
485}
486
487impl ChromecastCaster {
488 async fn discover(&self) -> Result<Vec<DisplayTarget>> {
489 Ok(vec![])
491 }
492
493 async fn cast(&self, _uuid: &str, _content: &str) -> Result<()> {
494 Ok(())
496 }
497}
498
499impl MiracastCaster {
500 async fn discover(&self) -> Result<Vec<DisplayTarget>> {
501 Ok(vec![])
502 }
503}
504
505impl ESP32Caster {
506 async fn discover(&self) -> Result<Vec<DisplayTarget>> {
507 Ok(vec![])
509 }
510
511 async fn send(&self, _address: &str, _content: &str) -> Result<()> {
512 Ok(())
514 }
515}
516
517impl WebDashboardServer {
518 async fn update_dashboard(&self, _content: &str) -> Result<()> {
519 Ok(())
521 }
522}
523
524impl CastManager {
525 async fn new() -> Result<Self> {
526 Ok(Self {
527 airplay: Some(AirPlayCaster),
528 chromecast: Some(ChromecastCaster),
529 miracast: Some(MiracastCaster),
530 esp32: Some(ESP32Caster),
531 web_server: Some(WebDashboardServer),
532 })
533 }
534}
535
536impl MCPInterface {
537 async fn new() -> Result<Self> {
538 Ok(Self {
539 socket: Arc::new(Mutex::new(None)),
540 binary_api: BinaryAPI {
541 endpoints: HashMap::new(),
542 },
543 })
544 }
545
546 async fn execute(&self, command: &str) -> Result<String> {
547 Ok(format!("MCP: {}", command))
549 }
550}
551
552impl VoiceDetector {
553 fn new() -> Self {
554 Self {
555 audio_active: Arc::new(RwLock::new(false)),
556 last_voice_time: Arc::new(RwLock::new(std::time::Instant::now())),
557 }
558 }
559
560 async fn is_audio_active(&self) -> Result<bool> {
561 Ok(*self.audio_active.read().await)
562 }
563}
564
565impl Default for OutputMode {
566 fn default() -> Self {
567 Self {
568 verbosity: VerbosityLevel::Normal,
569 format: OutputFormat::Text,
570 theme: "default".to_string(),
571 screen_config: ScreenConfig {
572 width: 1920,
573 height: 1080,
574 color_depth: 24,
575 refresh_rate: 60,
576 capabilities: vec!["color".to_string()],
577 },
578 }
579 }
580}
581
582impl Default for ContextState {
583 fn default() -> Self {
584 Self {
585 user_location: UserLocation::AtDesk,
586 active_project: None,
587 conversation_mode: ConversationMode::Interactive,
588 screen_arrangement: Vec::new(),
589 }
590 }
591}
592
593pub async fn start_rust_shell() -> Result<()> {
595 println!("🚀 Rust Shell - Ultimate Collaborative Interface\n");
596 println!("Cast to any screen, seamless voice transitions!\n");
597
598 let shell = RustShell::new().await?;
599
600 println!("🔍 Discovering displays...");
602 let displays = shell.discover_displays().await?;
603 println!("Found {} displays", displays.len());
604
605 loop {
607 shell.detect_voice_transition().await?;
609
610 use std::io::{self, Write};
612 print!("rust-shell> ");
613 io::stdout().flush()?;
614
615 let mut input = String::new();
616 io::stdin().read_line(&mut input)?;
617
618 let input = input.trim();
619 if input == "exit" {
620 break;
621 }
622
623 match shell.execute(input).await {
625 Ok(output) => println!("{}", output),
626 Err(e) => eprintln!("Error: {}", e),
627 }
628 }
629
630 Ok(())
631}
632
633mod html_escape {
635 pub fn encode_text(text: &str) -> String {
636 text.replace('&', "&")
637 .replace('<', "<")
638 .replace('>', ">")
639 .replace('"', """)
640 .replace('\'', "'")
641 }
642}