#[cfg(feature = "async-resolve")]
use skilllite_core::skill::dependency_resolver::{
resolve_packages as core_resolve_packages, LlmProvider,
};
use crate::llm::LlmClient;
use std::path::Path;
#[cfg(feature = "async-resolve")]
mod llm_resolver {
use super::*;
use crate::types::ChatMessage;
use async_trait::async_trait;
#[async_trait]
impl LlmProvider for LlmClient {
async fn extract_packages(&self, model: &str, prompt: &str) -> Option<String> {
let messages = vec![ChatMessage::user(prompt)];
let resp = self
.chat_completion(model, &messages, None, Some(0.0))
.await
.ok()?;
let text = resp.choices.first()?.message.content.as_ref()?.clone();
Some(text)
}
}
pub async fn resolve_packages(
skill_dir: &Path,
compatibility: Option<&str>,
language: &str,
llm_client: Option<&LlmClient>,
model: Option<&str>,
allow_unknown: bool,
) -> anyhow::Result<skilllite_core::skill::dependency_resolver::ResolvedDependencies> {
core_resolve_packages(
skill_dir,
compatibility,
language,
llm_client,
model,
allow_unknown,
)
.await
}
}
#[cfg(not(feature = "async-resolve"))]
mod llm_resolver {
use super::*;
pub async fn resolve_packages(
_skill_dir: &Path,
_compatibility: Option<&str>,
_language: &str,
_llm_client: Option<&LlmClient>,
_model: Option<&str>,
_allow_unknown: bool,
) -> anyhow::Result<skilllite_core::skill::dependency_resolver::ResolvedDependencies> {
anyhow::bail!("async-resolve feature not enabled")
}
}
pub use llm_resolver::resolve_packages;