#![ warn( missing_docs ) ]
use std ::
{
env,
path :: { Path, PathBuf },
};
use std ::collections ::HashMap;
#[ cfg( feature = "glob" ) ]
use glob ::glob;
#[ cfg( feature = "secrets" ) ]
use std ::fs;
#[ cfg( feature = "validation" ) ]
use jsonschema ::Validator;
#[ cfg( feature = "validation" ) ]
use schemars ::JsonSchema;
#[ cfg( feature = "secure" ) ]
use secrecy :: { SecretString, ExposeSecret };
#[ derive( Debug, Clone ) ]
#[ non_exhaustive ]
pub enum WorkspaceError
{
ConfigurationError( String ),
EnvironmentVariableMissing( String ),
#[ cfg( feature = "glob" ) ]
GlobError( String ),
IoError( String ),
PathNotFound( PathBuf ),
PathOutsideWorkspace( PathBuf ),
CargoError( String ),
TomlError( String ),
#[ cfg( feature = "serde" ) ]
SerdeError( String ),
#[ cfg( feature = "validation" ) ]
ValidationError( String ),
#[ cfg( feature = "secure" ) ]
SecretValidationError( String ),
#[ cfg( feature = "secure" ) ]
SecretInjectionError( String ),
}
impl core::fmt::Display for WorkspaceError
{
#[ inline ]
#[ allow( clippy::elidable_lifetime_names ) ]
fn fmt< 'a >( &self, f: &mut core::fmt::Formatter< 'a > ) -> core::fmt::Result
{
match self
{
WorkspaceError::ConfigurationError( msg ) =>
write!( f, "configuration error: {msg}" ),
WorkspaceError::EnvironmentVariableMissing( var ) =>
write!( f, "environment variable '{var}' not found. ensure .cargo/config.toml is properly configured with WORKSPACE_PATH" ),
#[ cfg( feature = "glob" ) ]
WorkspaceError::GlobError( msg ) =>
write!( f, "glob pattern error: {msg}" ),
WorkspaceError::IoError( msg ) =>
write!( f, "io error: {msg}" ),
WorkspaceError::PathNotFound( path ) =>
write!( f, "path not found: {}. ensure the workspace structure is properly initialized", path.display() ),
WorkspaceError::PathOutsideWorkspace( path ) =>
write!( f, "path is outside workspace boundaries: {}", path.display() ),
WorkspaceError::CargoError( msg ) =>
write!( f, "cargo metadata error: {msg}" ),
WorkspaceError::TomlError( msg ) =>
write!( f, "toml parsing error: {msg}" ),
#[ cfg( feature = "serde" ) ]
WorkspaceError::SerdeError( msg ) =>
write!( f, "serde error: {msg}" ),
#[ cfg( feature = "validation" ) ]
WorkspaceError::ValidationError( msg ) =>
write!( f, "config validation error: {msg}" ),
#[ cfg( feature = "secure" ) ]
WorkspaceError::SecretValidationError( msg ) =>
write!( f, "secret validation error: {msg}" ),
#[ cfg( feature = "secure" ) ]
WorkspaceError::SecretInjectionError( msg ) =>
write!( f, "secret injection error: {msg}" ),
}
}
}
impl core ::error ::Error for WorkspaceError {}
pub type Result< T > = core ::result ::Result< T, WorkspaceError >;
#[ cfg( feature = "secure" ) ]
pub trait SecretInjectable
{
fn inject_secret( &mut self, key: &str, value: String ) -> Result< () >;
fn validate_secrets( &self ) -> Result< () >;
}
#[ derive( Debug, Clone ) ]
pub struct Workspace
{
root: PathBuf,
}
impl Workspace
{
#[ must_use ]
#[ inline ]
pub fn new< P: Into< PathBuf > >( root: P ) -> Self
{
let root = root.into();
let root = Self ::cleanup_path( root );
Self { root }
}
#[ inline ]
pub fn resolve() -> Result< Self >
{
let root = Self ::get_env_path( "WORKSPACE_PATH" )?;
if !root.exists()
{
return Err( WorkspaceError::PathNotFound( root ) );
}
Ok( Self { root } )
}
#[ must_use ]
#[ inline ]
pub fn resolve_with_extended_fallbacks() -> Self
{
Self ::from_cargo_workspace()
.or_else( |_| Self ::resolve() )
.or_else( |_| Self ::from_git_root() )
.or_else( |_| Self ::from_pro_env() ) .or_else( |_| Self ::from_home_dir() ) .unwrap_or_else( |_| Self ::from_cwd() )
}
#[ deprecated(
since = "0.8.0",
note = "use `resolve_with_extended_fallbacks()` for installed CLI app support"
) ]
#[ must_use ]
#[ inline ]
pub fn resolve_or_fallback() -> Self
{
{
Self ::from_cargo_workspace()
.or_else( |_| Self ::resolve() )
.or_else( |_| Self ::from_current_dir() )
.or_else( |_| Self ::from_git_root() )
.unwrap_or_else( |_| Self ::from_cwd() )
}
}
#[ inline ]
pub fn from_current_dir() -> Result< Self >
{
let root = env ::current_dir()
.map_err( | e | WorkspaceError::IoError( e.to_string() ) )?;
Ok( Self { root } )
}
#[ inline ]
pub fn from_git_root() -> Result< Self >
{
let mut current = env ::current_dir()
.map_err( | e | WorkspaceError::IoError( e.to_string() ) )?;
loop
{
if current.join( ".git" ).exists()
{
return Ok( Self { root: current } );
}
match current.parent()
{
Some( parent ) => current = parent.to_path_buf(),
None => return Err( WorkspaceError::PathNotFound( current ) ),
}
}
}
#[ must_use ]
#[ inline ]
pub fn from_cwd() -> Self
{
let root = env ::current_dir().unwrap_or_else( |_| PathBuf ::from( "/" ) );
Self { root }
}
#[ inline ]
pub fn from_pro_env() -> Result< Self >
{
let pro_path = env ::var( "PRO" )
.map_err( |_| WorkspaceError::EnvironmentVariableMissing( "PRO".to_string() ) )?;
let root = PathBuf ::from( pro_path );
if !root.exists()
{
return Err( WorkspaceError::PathNotFound( root ) );
}
let root = Self ::cleanup_path( root );
Ok( Self { root } )
}
#[ inline ]
pub fn from_home_dir() -> Result< Self >
{
let home_path = env ::var( "HOME" )
.or_else( |_| env ::var( "USERPROFILE" ) ) .map_err( |_| WorkspaceError::EnvironmentVariableMissing(
"HOME or USERPROFILE".to_string()
) )?;
let root = PathBuf ::from( home_path );
if !root.exists()
{
return Err( WorkspaceError::PathNotFound( root ) );
}
let root = Self ::cleanup_path( root );
Ok( Self { root } )
}
#[ must_use ]
#[ inline ]
pub fn root( &self ) -> &Path
{
&self.root
}
#[ inline ]
pub fn join< P: AsRef< Path > >( &self, path: P ) -> PathBuf
{
self.root.join( path )
}
#[ must_use ]
#[ inline ]
pub fn config_dir( &self ) -> PathBuf
{
self.root.join( "config" )
}
#[ must_use ]
#[ inline ]
pub fn data_dir( &self ) -> PathBuf
{
self.root.join( "data" )
}
#[ must_use ]
#[ inline ]
pub fn logs_dir( &self ) -> PathBuf
{
self.root.join( "logs" )
}
#[ must_use ]
#[ inline ]
pub fn docs_dir( &self ) -> PathBuf
{
self.root.join( "docs" )
}
#[ must_use ]
#[ inline ]
pub fn tests_dir( &self ) -> PathBuf
{
self.root.join( "tests" )
}
#[ must_use ]
#[ inline ]
pub fn workspace_dir( &self ) -> PathBuf
{
self.root.join( ".workspace" )
}
#[ must_use ]
#[ inline ]
pub fn cargo_toml( &self ) -> PathBuf
{
self.root.join( "Cargo.toml" )
}
#[ must_use ]
#[ inline ]
pub fn readme( &self ) -> PathBuf
{
self.root.join( "readme.md" )
}
#[ inline ]
pub fn validate( &self ) -> Result< () >
{
if !self.root.exists()
{
return Err( WorkspaceError::PathNotFound( self.root.clone() ) );
}
if !self.root.is_dir()
{
return Err( WorkspaceError::ConfigurationError(
format!( "workspace root is not a directory: {}", self.root.display() )
) );
}
Ok( () )
}
#[ inline ]
pub fn is_workspace_file< P: AsRef< Path > >( &self, path: P ) -> bool
{
path.as_ref().starts_with( &self.root )
}
#[ inline ]
pub fn normalize_path< P: AsRef< Path > >( &self, path: P ) -> Result< PathBuf >
{
let path = self.join( path );
path.canonicalize()
.map_err( | e | WorkspaceError::IoError( format!( "failed to normalize path {} : {}", path.display(), e ) ) )
}
fn get_env_path( key: &str ) -> Result< PathBuf >
{
let value = env ::var( key )
.map_err( |_| WorkspaceError::EnvironmentVariableMissing( key.to_string() ) )?;
if value.is_empty()
{
return Err( WorkspaceError::PathNotFound( PathBuf ::from( "" ) ) );
}
let path = PathBuf ::from( value );
let absolute = if path.is_relative()
{
env ::current_dir()
.map_err( | e | WorkspaceError::IoError( e.to_string() ) )?
.join( path )
}
else
{
path
};
Ok( Self ::cleanup_path( absolute ) )
}
fn cleanup_path< P: AsRef< Path > >( path: P ) -> PathBuf
{
let mut normalized = PathBuf::new();
let mut components = path.as_ref().components().peekable();
while let Some( component ) = components.next()
{
use std ::path ::Component;
match component
{
Component ::CurDir =>
{
if normalized.as_os_str().is_empty() && components.peek().is_none()
{
normalized.push( "." );
}
}
Component ::ParentDir =>
{
if !normalized.pop()
{
normalized.push( component );
}
}
_ => normalized.push( component ),
}
}
normalized
}
pub fn find_config( &self, name: &str ) -> Result< PathBuf >
{
let candidates = vec!
[
self.config_dir().join( format!( "{name}.toml" ) ),
self.config_dir().join( format!( "{name}.yaml" ) ),
self.config_dir().join( format!( "{name}.yml" ) ),
self.config_dir().join( format!( "{name}.json" ) ),
self.root.join( format!( ".{name}.toml" ) ),
self.root.join( format!( ".{name}.yaml" ) ),
self.root.join( format!( ".{name}.yml" ) ),
];
for candidate in candidates
{
if candidate.exists()
{
return Ok( candidate );
}
}
Err( WorkspaceError::PathNotFound(
self.config_dir().join( format!( "{name}.toml" ) )
) )
}
}
#[ derive( Debug, Clone ) ]
pub struct CargoMetadata
{
pub workspace_root: PathBuf,
pub members: Vec< CargoPackage >,
pub workspace_dependencies: HashMap< String, String >,
}
#[ derive( Debug, Clone ) ]
pub struct CargoPackage
{
pub name: String,
pub version: String,
pub manifest_path: PathBuf,
pub package_root: PathBuf,
}
#[ cfg( feature = "serde" ) ]
pub trait ConfigMerge: Sized
{
#[ must_use ]
fn merge( self, other: Self ) -> Self;
}
#[ cfg( feature = "serde" ) ]
#[ derive( Debug ) ]
pub struct WorkspaceDeserializer< 'ws >
{
pub workspace: &'ws Workspace,
}
#[ cfg( feature = "serde" ) ]
#[ derive( Debug, Clone, PartialEq ) ]
pub struct WorkspacePath( pub PathBuf );
#[ cfg( feature = "glob" ) ]
impl Workspace
{
pub fn find_resources( &self, pattern: &str ) -> Result< Vec< PathBuf > >
{
let full_pattern = self.join( pattern );
let pattern_str = full_pattern.to_string_lossy();
let mut results = Vec ::new();
for entry in glob( &pattern_str )
.map_err( | e | WorkspaceError::GlobError( e.to_string() ) )?
{
match entry
{
Ok( path ) => results.push( path ),
Err( e ) => return Err( WorkspaceError::GlobError( e.to_string() ) ),
}
}
Ok( results )
}
}
#[ cfg( feature = "secrets" ) ]
impl Workspace
{
#[ must_use ]
pub fn secret_dir( &self ) -> PathBuf
{
self.root.join( "secret" )
}
#[ must_use ]
pub fn secret_file( &self, name: &str ) -> PathBuf
{
self.secret_dir().join( name )
}
pub fn load_secrets_from_file( &self, filename: &str ) -> Result< HashMap< String, String > >
{
Self::warn_if_path_like( filename );
self.try_load_secrets_with_fallback( filename )
}
pub fn load_secret_key( &self, key_name: &str, filename: &str ) -> Result< String >
{
let secret_file_path = self.secret_file( filename );
if let Ok( secrets ) = self.load_secrets_from_file( filename )
{
if let Some( value ) = secrets.get( key_name )
{
return Ok( value.clone() );
}
}
env ::var( key_name )
.map_err( |_| WorkspaceError::ConfigurationError(
format!(
"{} not found in secrets file '{}' (resolved to: {}) or environment variables",
key_name,
filename,
secret_file_path.display()
)
))
}
fn parse_key_value_file( content: &str ) -> HashMap< String, String >
{
let mut secrets = HashMap ::new();
for line in content.lines()
{
let line = line.trim();
if line.is_empty() || line.starts_with( '#' )
{
continue;
}
let processed_line = if line.starts_with( "export " )
{
line.strip_prefix( "export " ).unwrap_or( line ).trim()
}
else
{
line
};
if let Some( ( key, value ) ) = processed_line.split_once( '=' )
{
let key = key.trim();
let value = value.trim();
let value = if ( value.starts_with( '"' ) && value.ends_with( '"' ) ) ||
( value.starts_with( '\'' ) && value.ends_with( '\'' ) )
{
&value[ 1..value.len() - 1 ]
}
else
{
value
};
secrets.insert( key.to_string(), value.to_string() );
}
}
secrets
}
fn warn_if_path_like( filename: &str )
{
if filename.contains( '/' ) || filename.contains( '\\' )
{
eprintln!(
"⚠️ Warning: '{filename}' contains path separators. Use load_secrets_from_path() for paths."
);
}
}
fn try_load_secrets_with_fallback( &self, filename: &str ) -> Result< HashMap< String, String > >
{
let mut tried_paths = Vec ::new();
let mut canonical_paths = std ::collections ::HashSet ::new();
let local_path = self.secret_file( filename );
tried_paths.push( format!( " - {} (local workspace)", local_path.display() ) );
if let Some( canonical ) = Self::try_canonicalize( &local_path )
{
canonical_paths.insert( canonical );
if local_path.exists()
{
match Self::read_secret_file_validated( &local_path )
{
Ok( content ) => return Ok( Self::parse_key_value_file( &content ) ),
Err( e ) => return Err( e ),
}
}
}
if let Ok( pro_env ) = env::var( "PRO" )
{
if !pro_env.trim().is_empty()
{
if let Ok( pro_ws ) = Workspace::from_pro_env()
{
let pro_path = pro_ws.secret_file( filename );
if let Some( canonical ) = Self::try_canonicalize( &pro_path )
{
if !canonical_paths.contains( &canonical )
{
canonical_paths.insert( canonical );
tried_paths.push( format!( " - {} ($PRO workspace)", pro_path.display() ) );
if pro_path.exists()
{
match Self::read_secret_file_validated( &pro_path )
{
Ok( content ) => return Ok( Self::parse_key_value_file( &content ) ),
Err( e ) => return Err( e ),
}
}
}
}
}
}
}
#[ cfg( not( target_os = "windows" ) ) ]
let home_env_var = "HOME";
#[ cfg( target_os = "windows" ) ]
let home_env_var = "USERPROFILE";
if let Ok( home_env ) = env::var( home_env_var )
{
if !home_env.trim().is_empty()
{
if let Ok( home_ws ) = Workspace::from_home_dir()
{
let home_path = home_ws.secret_file( filename );
if let Some( canonical ) = Self::try_canonicalize( &home_path )
{
if !canonical_paths.contains( &canonical )
{
canonical_paths.insert( canonical );
tried_paths.push( format!( " - {} ($HOME directory)", home_path.display() ) );
if home_path.exists()
{
match Self::read_secret_file_validated( &home_path )
{
Ok( content ) => return Ok( Self::parse_key_value_file( &content ) ),
Err( e ) => return Err( e ),
}
}
}
}
}
}
}
let mut error_msg = format!(
"Secrets file '{}' not found in any location.\n\nTried:\n{}",
filename,
tried_paths.join( "\n" )
);
if let Ok( available_files ) = self.list_secrets_files()
{
if !available_files.is_empty()
{
error_msg.push_str( "\n\nAvailable files: " );
error_msg.push_str( &available_files.join( ", " ) );
}
}
error_msg.push_str( "\n\nCreate secret file in one of the above locations." );
Err( WorkspaceError::ConfigurationError( error_msg ) )
}
fn try_canonicalize( path: &Path ) -> Option< PathBuf >
{
path.canonicalize().ok()
}
fn read_secret_file_validated( path: &Path ) -> Result< String >
{
let metadata = fs::metadata( path )
.map_err( | e | WorkspaceError::IoError( format!( "Failed to read secrets file\n Absolute path: {}\n Error: {}", path.display(), e ) ) )?;
if !metadata.is_file()
{
let file_type = if metadata.is_dir() { "directory" }
else if metadata.file_type().is_symlink() { "symbolic link" }
else { "special file (device, socket, or pipe)" };
return Err( WorkspaceError::ConfigurationError( format!(
"Secrets file is a {}, not a regular file\n Path: {}",
file_type, path.display()
) ) );
}
const MAX_SIZE: u64 = 10 * 1024 * 1024;
if metadata.len() > MAX_SIZE
{
return Err( WorkspaceError::ConfigurationError( format!(
"Secrets file too large ({} bytes, max {} bytes)\n Path: {}\n Hint: Secret files should be small key-value files",
metadata.len(), MAX_SIZE, path.display()
) ) );
}
fs::read_to_string( path )
.map_err( | e | WorkspaceError::IoError( format!( "Failed to read secrets file\n Absolute path: {}\n Error: {}", path.display(), e ) ) )
}
pub fn list_secrets_files( &self ) -> Result< Vec< String > >
{
let secret_dir = self.secret_dir();
if !secret_dir.exists()
{
return Ok( Vec ::new() );
}
let entries = fs ::read_dir( &secret_dir )
.map_err( | e | WorkspaceError::IoError( format!( "failed to read secrets directory {} : {}", secret_dir.display(), e ) ) )?;
let mut files = Vec ::new();
for entry in entries
{
let entry = entry
.map_err( | e | WorkspaceError::IoError( format!( "failed to read directory entry: {e}" ) ) )?;
let path = entry.path();
if path.is_file()
{
if let Some( filename ) = path.file_name()
{
if let Some( filename_str ) = filename.to_str()
{
files.push( filename_str.to_string() );
}
}
}
}
files.sort();
Ok( files )
}
#[ must_use ]
pub fn secrets_file_exists( &self, secret_file_name: &str ) -> bool
{
self.secret_file( secret_file_name ).exists()
}
#[ must_use ]
pub fn resolve_secrets_path( &self, secret_file_name: &str ) -> PathBuf
{
self.secret_file( secret_file_name )
}
pub fn load_secrets_from_path( &self, relative_path: &str ) -> Result< HashMap< String, String > >
{
let secret_file = self.join( relative_path );
let content = Self::read_secret_file_validated( &secret_file )?;
Ok( Self::parse_key_value_file( &content ) )
}
pub fn load_secrets_from_absolute_path( &self, absolute_path: &Path ) -> Result< HashMap< String, String > >
{
let content = Self::read_secret_file_validated( absolute_path )?;
Ok( Self::parse_key_value_file( &content ) )
}
pub fn load_secrets_with_debug( &self, secret_file_name: &str ) -> Result< HashMap< String, String > >
{
println!( "🔍 Debug: Loading secrets with detailed information" );
println!( " Parameter: '{secret_file_name}'" );
if secret_file_name.contains( '/' ) || secret_file_name.contains( '\\' )
{
println!( " ⚠️ Parameter contains path separators - consider using load_secrets_from_path()" );
}
let secret_file = self.secret_file( secret_file_name );
println!( " Resolved path: {}", secret_file.display() );
println!( " File exists: {}", secret_file.exists() );
match self.list_secrets_files()
{
Ok( files ) =>
{
if files.is_empty()
{
println!( " Available files: none (secrets directory: {})", self.secret_dir().display() );
}
else
{
println!( " Available files: {}", files.join( ", " ) );
}
}
Err( e ) => println!( " Could not list available files: {e}" ),
}
match self.load_secrets_from_file( secret_file_name )
{
Ok( secrets ) =>
{
println!( " ✅ Successfully loaded {} secrets", secrets.len() );
for key in secrets.keys()
{
println!( " - {key}" );
}
Ok( secrets )
}
Err( e ) =>
{
println!( " ❌ Failed to load secrets: {e}" );
Err( e )
}
}
}
}
#[ cfg( feature = "secure" ) ]
trait AsSecure
{
type Secure;
fn into_secure( self ) -> Self::Secure;
}
#[ cfg( feature = "secure" ) ]
impl AsSecure for String
{
type Secure = SecretString;
fn into_secure( self ) -> Self::Secure
{
SecretString::new( self )
}
}
#[ cfg( feature = "secure" ) ]
impl AsSecure for HashMap< String, String >
{
type Secure = HashMap< String, SecretString >;
fn into_secure( self ) -> Self::Secure
{
self.into_iter()
.map( | ( key, value ) | ( key, SecretString::new( value ) ) )
.collect()
}
}
#[ cfg( feature = "secure" ) ]
impl Workspace
{
pub fn load_secrets_secure( &self, filename: &str ) -> Result< HashMap< String, SecretString > >
{
self.load_secrets_from_file( filename ).map( AsSecure::into_secure )
}
pub fn load_secret_key_secure( &self, key_name: &str, filename: &str ) -> Result< SecretString >
{
self.load_secret_key( key_name, filename ).map( AsSecure::into_secure )
}
#[ must_use ]
pub fn env_secret( &self, key: &str ) -> Option< SecretString >
{
env ::var( key ).ok().map( SecretString ::new )
}
pub fn validate_secret( &self, secret: &str ) -> Result< () >
{
if secret.len() < 8
{
return Err( WorkspaceError::SecretValidationError(
"secret must be at least 8 characters long".to_string()
) );
}
if secret == "123" || secret == "password" || secret == "secret" || secret.to_lowercase() == "test"
{
return Err( WorkspaceError::SecretValidationError(
"secret is too weak or uses common patterns".to_string()
) );
}
let has_letter = secret.chars().any( char ::is_alphabetic );
let has_number = secret.chars().any( char ::is_numeric );
let has_special = secret.chars().any( | c | !c.is_alphanumeric() );
if !( has_letter || has_number || has_special )
{
return Err( WorkspaceError::SecretValidationError(
"secret should contain letters, numbers, or special characters".to_string()
) );
}
Ok( () )
}
pub fn load_config_with_secret_injection( &self, config_file: &str, secret_file: &str ) -> Result< String >
{
let config_path = self.join( config_file );
let config_content = Self::read_file_to_string( &config_path )?;
let secrets = self.load_secrets_secure( secret_file )?;
let mut result = config_content;
for ( key, secret_value ) in secrets
{
let placeholder = format!( "${{{key}}}" );
let replacement = secret_value.expose_secret();
result = result.replace( &placeholder, replacement );
}
if result.contains( "${" )
{
return Err( WorkspaceError::SecretInjectionError(
"configuration contains unresolved placeholders - check secret file completeness".to_string()
) );
}
Ok( result )
}
pub fn load_config_with_secrets< T: SecretInjectable >( &self, mut config: T, secret_file: &str ) -> Result< T >
{
let secrets = self.load_secrets_secure( secret_file )?;
for ( key, secret_value ) in secrets
{
config.inject_secret( &key, secret_value.expose_secret().clone() )?;
}
config.validate_secrets()?;
Ok( config )
}
pub fn load_secrets_from_path_secure( &self, relative_path: &str ) -> Result< HashMap< String, SecretString > >
{
self.load_secrets_from_path( relative_path ).map( AsSecure::into_secure )
}
pub fn load_secrets_from_absolute_path_secure( &self, absolute_path: &Path ) -> Result< HashMap< String, SecretString > >
{
self.load_secrets_from_absolute_path( absolute_path ).map( AsSecure::into_secure )
}
pub fn load_secrets_with_debug_secure( &self, secret_file_name: &str ) -> Result< HashMap< String, SecretString > >
{
self.load_secrets_with_debug( secret_file_name ).map( AsSecure::into_secure )
}
}
impl Workspace
{
pub fn from_cargo_workspace() -> Result< Self >
{
let workspace_root = Self ::find_cargo_workspace()?;
let workspace_root = Self ::cleanup_path( workspace_root );
Ok( Self { root: workspace_root } )
}
pub fn from_cargo_manifest< P: AsRef< Path > >( manifest_path: P ) -> Result< Self >
{
let manifest_path = manifest_path.as_ref();
if !manifest_path.exists()
{
return Err( WorkspaceError::PathNotFound( manifest_path.to_path_buf() ) );
}
let workspace_root = if manifest_path.file_name() == Some( std ::ffi ::OsStr ::new( "Cargo.toml" ) )
{
manifest_path.parent()
.ok_or_else( || WorkspaceError::ConfigurationError( "invalid manifest path".to_string() ) )?
.to_path_buf()
}
else
{
manifest_path.to_path_buf()
};
let workspace_root = Self ::cleanup_path( workspace_root );
Ok( Self { root: workspace_root } )
}
pub fn cargo_metadata( &self ) -> Result< CargoMetadata >
{
let cargo_toml = self.cargo_toml();
if !cargo_toml.exists()
{
return Err( WorkspaceError::CargoError( "not a cargo workspace".to_string() ) );
}
let metadata = cargo_metadata ::MetadataCommand ::new()
.manifest_path( &cargo_toml )
.exec()
.map_err( | e | WorkspaceError::CargoError( e.to_string() ) )?;
let mut members = Vec ::new();
let mut workspace_dependencies = HashMap ::new();
for package in metadata.workspace_packages()
{
members.push( CargoPackage {
name: package.name.clone(),
version: package.version.to_string(),
manifest_path: package.manifest_path.clone().into(),
package_root: package.manifest_path
.parent()
.unwrap_or( &package.manifest_path )
.into(),
} );
}
if let Some( deps ) = metadata.workspace_metadata.get( "dependencies" )
{
if let Some( deps_map ) = deps.as_object()
{
for ( name, version ) in deps_map
{
if let Some( version_str ) = version.as_str()
{
workspace_dependencies.insert( name.clone(), version_str.to_string() );
}
}
}
}
Ok( CargoMetadata {
workspace_root: metadata.workspace_root.into(),
members,
workspace_dependencies,
} )
}
#[ must_use ]
pub fn is_cargo_workspace( &self ) -> bool
{
let cargo_toml = self.cargo_toml();
if !cargo_toml.exists()
{
return false;
}
if let Ok( content ) = std ::fs ::read_to_string( &cargo_toml )
{
if let Ok( parsed ) = toml ::from_str :: < toml ::Value >( &content )
{
return parsed.get( "workspace" ).is_some();
}
}
false
}
pub fn workspace_members( &self ) -> Result< Vec< PathBuf > >
{
let metadata = self.cargo_metadata()?;
Ok( metadata.members.into_iter().map( | pkg | pkg.package_root ).collect() )
}
fn find_cargo_workspace() -> Result< PathBuf >
{
let mut current = std ::env ::current_dir()
.map_err( | e | WorkspaceError::IoError( e.to_string() ) )?;
loop
{
let manifest = current.join( "Cargo.toml" );
if manifest.exists()
{
let content = std ::fs ::read_to_string( &manifest )
.map_err( | e | WorkspaceError::IoError( e.to_string() ) )?;
let parsed: toml ::Value = toml ::from_str( &content )
.map_err( | e | WorkspaceError::TomlError( e.to_string() ) )?;
if parsed.get( "workspace" ).is_some()
{
return Ok( current );
}
if let Some( package ) = parsed.get( "package" )
{
if package.get( "workspace" ).is_some()
{
}
}
}
match current.parent()
{
Some( parent ) => current = parent.to_path_buf(),
None => return Err( WorkspaceError::PathNotFound( current ) ),
}
}
}
}
#[ cfg( any( feature = "serde", feature = "validation", feature = "secure" ) ) ]
impl Workspace
{
fn read_file_to_string< P: AsRef< Path > >( path: P ) -> Result< String >
{
let path = path.as_ref();
std ::fs ::read_to_string( path )
.map_err( | e | WorkspaceError::IoError(
format!( "failed to read {}: {}", path.display(), e )
) )
}
fn detect_format< P: AsRef< Path > >( path: P ) -> String
{
path.as_ref()
.extension()
.and_then( | ext | ext.to_str() )
.unwrap_or( "toml" )
.to_string()
}
}
#[ cfg( feature = "serde" ) ]
impl Workspace
{
fn parse_content< T >( content: &str, format: &str ) -> Result< T >
where
T: serde ::de ::DeserializeOwned,
{
match format
{
"toml" => toml ::from_str( content )
.map_err( | e | WorkspaceError::SerdeError( format!( "toml error: {e}" ) ) ),
"json" => serde_json ::from_str( content )
.map_err( | e | WorkspaceError::SerdeError( format!( "json error: {e}" ) ) ),
"yaml" | "yml" => serde_yaml ::from_str( content )
.map_err( | e | WorkspaceError::SerdeError( format!( "yaml error: {e}" ) ) ),
_ => Err( WorkspaceError::ConfigurationError(
format!( "unsupported format: {format}" )
) ),
}
}
fn serialize_content< T >( config: &T, format: &str ) -> Result< String >
where
T: serde ::Serialize,
{
match format
{
"toml" => toml ::to_string_pretty( config )
.map_err( | e | WorkspaceError::SerdeError( format!( "toml error: {e}" ) ) ),
"json" => serde_json ::to_string_pretty( config )
.map_err( | e | WorkspaceError::SerdeError( format!( "json error: {e}" ) ) ),
"yaml" | "yml" => serde_yaml ::to_string( config )
.map_err( | e | WorkspaceError::SerdeError( format!( "yaml error: {e}" ) ) ),
_ => Err( WorkspaceError::ConfigurationError(
format!( "unsupported format: {format}" )
) ),
}
}
pub fn load_config< T >( &self, name: &str ) -> Result< T >
where
T: serde ::de ::DeserializeOwned,
{
let config_path = self.find_config( name )?;
self.load_config_from( config_path )
}
pub fn load_config_from< T, P >( &self, path: P ) -> Result< T >
where
T: serde ::de ::DeserializeOwned,
P: AsRef< Path >,
{
let path = path.as_ref();
let content = Self::read_file_to_string( path )?;
let format = Self::detect_format( path );
Self::parse_content( &content, &format )
}
pub fn save_config< T >( &self, name: &str, config: &T ) -> Result< () >
where
T: serde ::Serialize,
{
let config_path = self.find_config( name )
.or_else( |_| Ok( self.config_dir().join( format!( "{name}.toml" ) ) ) )?;
self.save_config_to( config_path, config )
}
pub fn save_config_to< T, P >( &self, path: P, config: &T ) -> Result< () >
where
T: serde ::Serialize,
P: AsRef< Path >,
{
let path = path.as_ref();
let format = Self::detect_format( path );
let content = Self::serialize_content( config, &format )?;
if let Some( parent ) = path.parent()
{
std ::fs ::create_dir_all( parent )
.map_err( | e | WorkspaceError::IoError( format!( "failed to create directory {}: {}", parent.display(), e ) ) )?;
}
let temp_path = path.with_extension( format!( "{format}.tmp" ) );
std ::fs ::write( &temp_path, content )
.map_err( | e | WorkspaceError::IoError( format!( "failed to write temporary file {}: {}", temp_path.display(), e ) ) )?;
std ::fs ::rename( &temp_path, path )
.map_err( | e | WorkspaceError::IoError( format!( "failed to rename {} to {}: {}", temp_path.display(), path.display(), e ) ) )?;
Ok( () )
}
pub fn load_config_layered< T >( &self, names: &[ &str ] ) -> Result< T >
where
T: serde ::de ::DeserializeOwned + ConfigMerge,
{
let mut result: Option< T > = None;
for name in names
{
if let Ok( config ) = self.load_config :: < T >( name )
{
result = Some( match result
{
Some( existing ) => existing.merge( config ),
None => config,
} );
}
}
result.ok_or_else( || WorkspaceError::ConfigurationError( "no configuration files found".to_string() ) )
}
pub fn update_config< T, U >( &self, name: &str, updates: U ) -> Result< T >
where
T: serde ::de ::DeserializeOwned + serde ::Serialize,
U: serde ::Serialize,
{
let existing: T = self.load_config( name )?;
let existing_json = serde_json ::to_value( &existing )
.map_err( | e | WorkspaceError::SerdeError( format!( "failed to serialize existing config: {e}" ) ) )?;
let updates_json = serde_json ::to_value( updates )
.map_err( | e | WorkspaceError::SerdeError( format!( "failed to serialize updates: {e}" ) ) )?;
let merged = Self ::merge_json_objects( existing_json, updates_json )?;
let merged_config: T = serde_json ::from_value( merged )
.map_err( | e | WorkspaceError::SerdeError( format!( "failed to deserialize merged config: {e}" ) ) )?;
self.save_config( name, &merged_config )?;
Ok( merged_config )
}
fn merge_json_objects( mut base: serde_json ::Value, updates: serde_json ::Value ) -> Result< serde_json ::Value >
{
match ( &mut base, updates )
{
( serde_json ::Value ::Object( ref mut base_map ), serde_json ::Value ::Object( updates_map ) ) =>
{
for ( key, value ) in updates_map
{
match base_map.get_mut( &key )
{
Some( existing ) if existing.is_object() && value.is_object() =>
{
*existing = Self ::merge_json_objects( existing.clone(), value )?;
}
_ =>
{
base_map.insert( key, value );
}
}
}
}
( _, updates_value ) =>
{
base = updates_value;
}
}
Ok( base )
}
}
#[ cfg( feature = "serde" ) ]
impl serde ::Serialize for WorkspacePath
{
fn serialize< S >( &self, serializer: S ) -> core ::result ::Result< S ::Ok, S ::Error >
where
S: serde ::Serializer,
{
self.0.serialize( serializer )
}
}
#[ cfg( feature = "serde" ) ]
impl< 'de > serde ::Deserialize< 'de > for WorkspacePath
{
fn deserialize< D >( deserializer: D ) -> core ::result ::Result< Self, D ::Error >
where
D: serde ::Deserializer< 'de >,
{
let path = PathBuf ::deserialize( deserializer )?;
Ok( WorkspacePath( path ) )
}
}
#[ cfg( feature = "validation" ) ]
impl Workspace
{
fn parse_to_json( content: &str, format: &str ) -> Result< serde_json ::Value >
{
match format
{
"toml" =>
{
let toml_value: toml ::Value = toml ::from_str( content )
.map_err( | e | WorkspaceError::SerdeError( format!( "toml parse: {e}" ) ) )?;
serde_json ::to_value( toml_value )
.map_err( | e | WorkspaceError::SerdeError( format!( "toml→json: {e}" ) ) )
}
"json" => serde_json ::from_str( content )
.map_err( | e | WorkspaceError::SerdeError( format!( "json parse: {e}" ) ) ),
"yaml" | "yml" =>
{
let yaml_value: serde_yaml ::Value = serde_yaml ::from_str( content )
.map_err( | e | WorkspaceError::SerdeError( format!( "yaml parse: {e}" ) ) )?;
serde_json ::to_value( yaml_value )
.map_err( | e | WorkspaceError::SerdeError( format!( "yaml→json: {e}" ) ) )
}
_ => Err( WorkspaceError::ConfigurationError(
format!( "unsupported format: {format}" )
) ),
}
}
fn validate_against_schema(
json_value: &serde_json ::Value,
schema: &Validator
) -> Result< () >
{
if let Err( validation_errors ) = schema.validate( json_value )
{
let errors: Vec< String > = validation_errors
.map( | error | format!( "{}: {}", error.instance_path, error ) )
.collect();
return Err( WorkspaceError::ValidationError(
format!( "validation failed: {}", errors.join( "; " ) )
) );
}
Ok( () )
}
pub fn load_config_with_validation< T >( &self, name: &str ) -> Result< T >
where
T: serde ::de ::DeserializeOwned + JsonSchema,
{
let schema = schemars ::schema_for!( T );
let schema_json = serde_json ::to_value( &schema )
.map_err( | e | WorkspaceError::ValidationError( format!( "failed to serialize schema: {e}" ) ) )?;
let compiled_schema = Validator ::new( &schema_json )
.map_err( | e | WorkspaceError::ValidationError( format!( "failed to compile schema: {e}" ) ) )?;
self.load_config_with_schema( name, &compiled_schema )
}
pub fn load_config_with_schema< T >( &self, name: &str, schema: &Validator ) -> Result< T >
where
T: serde ::de ::DeserializeOwned,
{
let config_path = self.find_config( name )?;
self.load_config_from_with_schema( config_path, schema )
}
pub fn load_config_from_with_schema< T, P >( &self, path: P, schema: &Validator ) -> Result< T >
where
T: serde ::de ::DeserializeOwned,
P: AsRef< Path >,
{
let path = path.as_ref();
let content = Self::read_file_to_string( path )?;
let format = Self::detect_format( path );
let json_value = Self::parse_to_json( &content, &format )?;
Self::validate_against_schema( &json_value, schema )?;
serde_json ::from_value( json_value )
.map_err( | e | WorkspaceError::SerdeError( format!( "deserialization error: {e}" ) ) )
}
pub fn validate_config_content( content: &str, schema: &Validator, format: &str ) -> Result< () >
{
let json_value = Self::parse_to_json( content, format )?;
Self::validate_against_schema( &json_value, schema )
}
}
#[ cfg( feature = "testing" ) ]
pub mod testing
{
use super ::Workspace;
use tempfile ::TempDir;
#[ must_use ]
#[ inline ]
pub fn create_test_workspace() -> ( TempDir, Workspace )
{
let temp_dir = TempDir ::new().unwrap_or_else( | e | panic!( "failed to create temp directory: {e}" ) );
std ::env ::set_var( "WORKSPACE_PATH", temp_dir.path() );
let workspace = Workspace ::resolve().unwrap_or_else( | e | panic!( "failed to resolve test workspace: {e}" ) );
( temp_dir, workspace )
}
#[ must_use ]
#[ inline ]
pub fn create_test_workspace_with_structure() -> ( TempDir, Workspace )
{
let ( temp_dir, workspace ) = create_test_workspace();
let base_dirs = vec!
[
workspace.config_dir(),
workspace.data_dir(),
workspace.logs_dir(),
workspace.docs_dir(),
workspace.tests_dir(),
workspace.workspace_dir(),
];
#[ cfg( feature = "secrets" ) ]
let all_dirs = {
let mut dirs = base_dirs;
dirs.push( workspace.secret_dir() );
dirs
};
#[ cfg( not( feature = "secrets" ) ) ]
let all_dirs = base_dirs;
for dir in all_dirs
{
std ::fs ::create_dir_all( &dir )
.unwrap_or_else( | e | panic!( "failed to create directory {} : {}", dir.display(), e ) );
}
( temp_dir, workspace )
}
}
#[ inline ]
pub fn workspace() -> Result< Workspace >
{
Ok( Workspace ::resolve_with_extended_fallbacks() )
}