pub mod batch_processor;
pub mod debug_channel;
pub mod decider;
pub mod invoker;
pub mod json_prompt;
pub mod llama_cpp_server;
pub mod ollama;
pub mod prompt_builder;
pub mod registry;
pub mod response_parser;
pub mod strategy_advisor;
#[cfg(feature = "llama-cpp")]
pub mod llama_cpp_standalone;
pub use llama_cpp_server::{ChatTemplate, LlamaCppServerConfig, LlamaCppServerDecider};
#[cfg(feature = "llama-cpp")]
pub use llama_cpp_standalone::{LlamaCppStandaloneConfig, LlamaCppStandaloneDecider};
pub use batch_processor::{
BatchProcessError, BatchProcessResult, BatchProcessor, LlmBatchProcessor,
LlmBatchProcessorConfig,
};
pub use debug_channel::{LlmDebugChannel, LlmDebugEvent, StderrLlmSubscriber};
pub use decider::{
DecisionResponse, LlmDecider, LlmDeciderConfig, LlmError, LoraConfig, WorkerDecisionRequest,
};
pub use invoker::{create_llm_invoker, LlmBatchInvoker};
pub use ollama::{OllamaConfig, OllamaDecider};
pub use prompt_builder::PromptBuilder;
pub use registry::{ModelInfo, ModelRegistry, RegistryError};
pub use strategy_advisor::{
parse_selection_kind_fuzzy, LlmStrategyAdvisor, SelectionKind, StrategyAdvice,
StrategyAdviceError, StrategyAdvisor, StrategyContext, StrategyPromptBuilder,
StrategyResponseParser,
};