pub struct Llm {
pub proc: Option<Arc<DaggerSessionProc>>,
pub selection: Selection,
pub graphql_client: DynGraphQLClient,
}
Fields§
§proc: Option<Arc<DaggerSessionProc>>
§selection: Selection
§graphql_client: DynGraphQLClient
Implementations§
Source§impl Llm
impl Llm
Sourcepub fn bind_result(&self, name: impl Into<String>) -> Binding
pub fn bind_result(&self, name: impl Into<String>) -> Binding
returns the type of the current state
Sourcepub async fn has_prompt(&self) -> Result<bool, DaggerError>
pub async fn has_prompt(&self) -> Result<bool, DaggerError>
Indicates whether there are any queued prompts or tool results to send to the model
Sourcepub async fn history_json(&self) -> Result<Json, DaggerError>
pub async fn history_json(&self) -> Result<Json, DaggerError>
return the raw llm message history as json
Sourcepub async fn id(&self) -> Result<Llmid, DaggerError>
pub async fn id(&self) -> Result<Llmid, DaggerError>
A unique identifier for this LLM.
Sourcepub async fn last_reply(&self) -> Result<String, DaggerError>
pub async fn last_reply(&self) -> Result<String, DaggerError>
return the last llm reply from the history
Sourcepub fn loop(&self) -> Llm
pub fn loop(&self) -> Llm
Submit the queued prompt, evaluate any tool calls, queue their results, and keep going until the model ends its turn
Sourcepub async fn model(&self) -> Result<String, DaggerError>
pub async fn model(&self) -> Result<String, DaggerError>
return the model used by the llm
Sourcepub async fn provider(&self) -> Result<String, DaggerError>
pub async fn provider(&self) -> Result<String, DaggerError>
return the provider used by the llm
Sourcepub async fn step(&self) -> Result<Llmid, DaggerError>
pub async fn step(&self) -> Result<Llmid, DaggerError>
Submit the queued prompt or tool call results, evaluate any tool calls, and queue their results
Sourcepub async fn sync(&self) -> Result<Llmid, DaggerError>
pub async fn sync(&self) -> Result<Llmid, DaggerError>
synchronize LLM state
Sourcepub fn token_usage(&self) -> LlmTokenUsage
pub fn token_usage(&self) -> LlmTokenUsage
returns the token usage of the current state
Sourcepub async fn tools(&self) -> Result<String, DaggerError>
pub async fn tools(&self) -> Result<String, DaggerError>
print documentation for available tools
Sourcepub fn with_blocked_function(
&self,
type_name: impl Into<String>,
function: impl Into<String>,
) -> Llm
pub fn with_blocked_function( &self, type_name: impl Into<String>, function: impl Into<String>, ) -> Llm
Return a new LLM with the specified function no longer exposed as a tool
§Arguments
type_name
- The type name whose function will be blockedfunction
- The function to block
Will be converted to lowerCamelCase if necessary.
Sourcepub fn with_env(&self, env: impl IntoID<EnvId>) -> Llm
pub fn with_env(&self, env: impl IntoID<EnvId>) -> Llm
allow the LLM to interact with an environment via MCP
Sourcepub fn with_mcp_server(
&self,
name: impl Into<String>,
service: impl IntoID<ServiceId>,
) -> Llm
pub fn with_mcp_server( &self, name: impl Into<String>, service: impl IntoID<ServiceId>, ) -> Llm
Add an external MCP server to the LLM
§Arguments
name
- The name of the MCP serverservice
- The MCP service to run and communicate with over stdio
Sourcepub fn with_model(&self, model: impl Into<String>) -> Llm
pub fn with_model(&self, model: impl Into<String>) -> Llm
Sourcepub fn with_prompt(&self, prompt: impl Into<String>) -> Llm
pub fn with_prompt(&self, prompt: impl Into<String>) -> Llm
Sourcepub fn with_prompt_file(&self, file: impl IntoID<FileId>) -> Llm
pub fn with_prompt_file(&self, file: impl IntoID<FileId>) -> Llm
Sourcepub fn with_static_tools(&self) -> Llm
pub fn with_static_tools(&self) -> Llm
Use a static set of tools for method calls, e.g. for MCP clients that do not support dynamic tool registration
Sourcepub fn with_system_prompt(&self, prompt: impl Into<String>) -> Llm
pub fn with_system_prompt(&self, prompt: impl Into<String>) -> Llm
Sourcepub fn without_default_system_prompt(&self) -> Llm
pub fn without_default_system_prompt(&self) -> Llm
Disable the default system prompt