algocline_engine/executor.rs
1//! Lua execution engine.
2//!
3//! Manages the mlua-isle VM and orchestrates StdLib injection
4//! for each session:
5//!
6//! 1. **Layer 0** — [`bridge::register`] injects Rust-backed `alc.*` primitives
7//! 2. **Layer 1** — [`PRELUDE`] adds Lua-based combinators (`alc.map`, etc.)
8//! 3. **Layer 2** — [`mlua_pkg::Registry`] makes `require("explore")` etc.
9//! resolve from `~/.algocline/packages/`
10//!
11//! ## Execution models
12//!
13//! - **`eval_simple`** — sync eval (no LLM bridge). For lightweight ops
14//! like reading package metadata.
15//! - **`start_session`** — coroutine-based execution. `alc.llm()` yields
16//! the coroutine instead of blocking the Lua thread, allowing other
17//! requests to proceed while waiting for LLM responses.
18
19use std::path::PathBuf;
20
21use algocline_core::{ExecutionMetrics, ExecutionSpec};
22use mlua::LuaSerdeExt;
23use mlua_isle::{AsyncIsle, AsyncIsleDriver, IsleError};
24use mlua_pkg::{resolvers::FsResolver, Registry};
25
26use crate::bridge;
27use crate::llm_bridge::LlmRequest;
28use crate::session::Session;
29
30/// Layer 1: Prelude combinators (map, reduce, vote, filter).
31/// Embedded at compile time and loaded into every session.
32const PRELUDE: &str = include_str!("prelude.lua");
33
34/// Lua execution engine.
35///
36/// Wraps mlua-isle AsyncIsle (Handle/Driver pattern) to provide
37/// non-blocking, cancellable Lua execution with alc StdLib injected.
38pub struct Executor {
39 isle: AsyncIsle,
40 _driver: AsyncIsleDriver,
41}
42
43impl Executor {
44 pub async fn new(lib_paths: Vec<PathBuf>) -> anyhow::Result<Self> {
45 let (isle, driver) = AsyncIsle::spawn(move |lua| {
46 // Install mlua-pkg Registry once during VM initialization.
47 // This survives across sessions since mlua-isle reuses the VM.
48 let mut reg = Registry::new();
49 for path in &lib_paths {
50 if let Ok(resolver) = FsResolver::new(path) {
51 reg.add(resolver);
52 }
53 }
54 reg.install(lua)?;
55 Ok(())
56 })
57 .await?;
58
59 Ok(Self {
60 isle,
61 _driver: driver,
62 })
63 }
64
65 /// Evaluate Lua code without LLM bridge. For lightweight operations
66 /// like reading package metadata.
67 pub async fn eval_simple(&self, code: String) -> Result<serde_json::Value, String> {
68 let task = self.isle.spawn_exec(move |lua| {
69 let result: mlua::Value = lua
70 .load(&code)
71 .eval()
72 .map_err(|e| IsleError::Lua(e.to_string()))?;
73 let json: serde_json::Value = lua
74 .from_value(result)
75 .map_err(|e| IsleError::Lua(e.to_string()))?;
76 serde_json::to_string(&json).map_err(|e| IsleError::Lua(format!("JSON serialize: {e}")))
77 });
78
79 let json_str = task.await.map_err(|e| e.to_string())?;
80 serde_json::from_str(&json_str).map_err(|e| format!("JSON parse: {e}"))
81 }
82
83 /// Start a new Lua execution session.
84 ///
85 /// Phase 1 (sync exec): registers alc.* StdLib, ctx, prelude.
86 /// Phase 2 (coroutine): executes user Lua code. When `alc.llm()`
87 /// is called, the coroutine yields instead of blocking the Lua
88 /// thread, allowing other requests to proceed.
89 pub async fn start_session(
90 &self,
91 code: String,
92 ctx: serde_json::Value,
93 ) -> Result<Session, String> {
94 let spec = ExecutionSpec::new(code, ctx);
95 let metrics = ExecutionMetrics::new();
96 let custom_handle = metrics.custom_handle();
97
98 let (llm_tx, llm_rx) = tokio::sync::mpsc::channel::<LlmRequest>(16);
99
100 let ns = spec.namespace.clone();
101 let lua_ctx = spec.ctx.clone();
102 let lua_code = spec.code.clone();
103
104 // Phase 1: Setup (sync exec on Lua thread)
105 // Registers alc.* with async LLM bridge, sets ctx, loads prelude,
106 // clears package.loaded cache.
107 self.isle
108 .exec(move |lua| {
109 // 1. Create alc StdLib table with async LLM bridge + state + stats
110 let alc_table = lua.create_table()?;
111 bridge::register(lua, &alc_table, Some(llm_tx), ns, custom_handle)?;
112 lua.globals().set("alc", alc_table)?;
113
114 // 2. Set ctx global
115 let ctx_value = lua.to_value(&lua_ctx)?;
116 lua.globals().set("ctx", ctx_value)?;
117
118 // 3. Load prelude (alc.map, alc.reduce, alc.vote, alc.filter)
119 lua.load(PRELUDE)
120 .exec()
121 .map_err(|e| IsleError::Lua(format!("Prelude load failed: {e}")))?;
122
123 // 4. Clear package.loaded cache so each session gets fresh modules
124 let loaded: mlua::Table =
125 lua.globals().get::<mlua::Table>("package")?.get("loaded")?;
126 let keys: Vec<String> = loaded
127 .pairs::<String, mlua::Value>()
128 .filter_map(|r| r.ok().map(|(k, _)| k))
129 .collect();
130 for key in keys {
131 loaded.set(key, mlua::Value::Nil)?;
132 }
133
134 Ok("ok".to_string())
135 })
136 .await
137 .map_err(|e| format!("Session setup failed: {e}"))?;
138
139 // Phase 2: Execute user code as a coroutine.
140 // alc.llm() is an async function — when called, the coroutine
141 // yields and other coroutines/requests can make progress.
142 //
143 // The user code is wrapped so its return value is JSON-serialized
144 // via alc.json_encode. This matches the old spawn_exec behavior
145 // where the closure did serde_json::to_string. coroutine_eval's
146 // lua_value_to_string only does tostring(), which loses structure
147 // for tables.
148 let wrapped_code = format!("return alc.json_encode((function()\n{lua_code}\nend)())");
149 let exec_task = self.isle.spawn_coroutine_eval(&wrapped_code);
150
151 Ok(Session::new(llm_rx, exec_task, metrics))
152 }
153}