use menta::providers::openai::{OpenAiProvider, register};
use menta::{GenerateTextRequest, generate_text};
#[tokio::main]
async fn main() -> Result<(), menta::Error> {
register(
OpenAiProvider::new("lmstudio", "LM Studio")
.base_url("http://127.0.0.1:1234/v1")
.api_key("dummy"),
)?;
let result = generate_text(
GenerateTextRequest::new()
.model("lmstudio/qwen3.5-9b")
.prompt("Write one sentence about Rust."),
)
.await?;
println!("{}", result.text);
Ok(())
}