fn env_usize(key: &str, default: usize) -> usize {
std::env::var(key)
.ok()
.and_then(|v| v.parse().ok())
.unwrap_or(default)
}
pub fn get_chunk_size() -> usize {
env_usize("SKILLLITE_CHUNK_SIZE", 6000)
}
pub fn get_head_chunks() -> usize {
env_usize("SKILLLITE_HEAD_CHUNKS", 3)
}
pub fn get_tail_chunks() -> usize {
env_usize("SKILLLITE_TAIL_CHUNKS", 3)
}
pub fn get_max_output_chars() -> usize {
env_usize("SKILLLITE_MAX_OUTPUT_CHARS", 8000)
}
pub fn get_map_model(main_model: &str) -> String {
skilllite_core::config::loader::env_optional("SKILLLITE_MAP_MODEL", &[])
.unwrap_or_else(|| main_model.to_string())
}
#[derive(Debug, Clone, Copy, PartialEq, Eq)]
pub enum LongTextStrategy {
HeadTailOnly,
HeadTailExtract,
MapReduceFull,
}
fn env_str(key: &str, default: &str) -> String {
std::env::var(key).unwrap_or_else(|_| default.to_string())
}
pub fn get_long_text_strategy() -> LongTextStrategy {
let v = env_str("SKILLLITE_LONG_TEXT_STRATEGY", "head_tail_only")
.to_lowercase()
.trim()
.to_string();
match v.as_str() {
"head_tail_extract" | "extract" => LongTextStrategy::HeadTailExtract,
"mapreduce_full" | "mapreduce" | "map_reduce" => LongTextStrategy::MapReduceFull,
_ => LongTextStrategy::HeadTailOnly,
}
}
pub fn get_extract_top_k(total_chunks: usize, head_chunks: usize, tail_chunks: usize) -> usize {
let ratio = std::env::var("SKILLLITE_EXTRACT_TOP_K_RATIO")
.ok()
.and_then(|v| v.parse::<f64>().ok())
.unwrap_or(0.5);
let by_ratio = (total_chunks as f64 * ratio).ceil() as usize;
let floor = head_chunks + tail_chunks;
by_ratio.max(floor).min(total_chunks)
}
pub fn get_summarize_threshold() -> usize {
env_usize("SKILLLITE_SUMMARIZE_THRESHOLD", 30000)
}
pub fn get_max_tokens() -> usize {
env_usize("SKILLLITE_MAX_TOKENS", 8192)
}
pub fn get_user_input_max_chars() -> usize {
env_usize("SKILLLITE_USER_INPUT_MAX_CHARS", 30000)
}
pub fn get_tool_result_max_chars() -> usize {
env_usize("SKILLLITE_TOOL_RESULT_MAX_CHARS", 12000)
}
pub fn get_tool_result_recovery_max_chars() -> usize {
env_usize("SKILLLITE_TOOL_RESULT_RECOVERY_MAX_CHARS", 3000)
}
pub fn get_output_dir() -> Option<String> {
skilllite_core::config::PathsConfig::from_env().output_dir
}
pub fn get_compaction_threshold() -> usize {
env_usize("SKILLLITE_COMPACTION_THRESHOLD", 16)
}
pub fn get_memory_flush_enabled() -> bool {
let v = skilllite_core::config::loader::env_optional("SKILLLITE_MEMORY_FLUSH_ENABLED", &[]);
!matches!(
v.as_deref().map(|s| s.to_lowercase()),
Some(s) if matches!(s.as_str(), "0" | "false" | "no" | "off")
)
}
pub fn get_memory_flush_threshold() -> usize {
env_usize("SKILLLITE_MEMORY_FLUSH_THRESHOLD", 12)
}
pub fn get_compaction_keep_recent() -> usize {
env_usize("SKILLLITE_COMPACTION_KEEP_RECENT", 10)
}
pub fn get_compact_planning(model: Option<&str>) -> bool {
if let Some(v) = skilllite_core::config::loader::env_optional(
skilllite_core::config::env_keys::misc::SKILLLITE_COMPACT_PLANNING,
&[],
) {
return !matches!(v.to_lowercase().as_str(), "0" | "false" | "no" | "off");
}
let model = match model {
Some(m) => m.to_lowercase(),
None => return false, };
let compact_models = ["claude-4.6", "gpt-4.5", "gpt-5", "gemini-2.5", "gemini-3.0"];
compact_models
.iter()
.any(|p| model.starts_with(p) || model.contains(p))
}