use crate::error::{Result, SammError};
use crate::performance::profiling;
use oxiarc_archive::{ZipCompressionLevel, ZipReader, ZipWriter};
use std::collections::HashMap;
use std::fs;
use std::path::{Path, PathBuf};
use tokio::fs as async_fs;
#[derive(Debug, Clone)]
pub struct ImportResult {
pub namespaces: HashMap<String, Vec<ModelInfo>>,
pub total_models: usize,
pub skipped: usize,
}
#[derive(Debug, Clone)]
pub struct ModelInfo {
pub name: String,
pub namespace: String,
pub version: String,
pub path: PathBuf,
pub exists: bool,
}
#[derive(Debug, Clone)]
pub struct ExportResult {
pub namespace: String,
pub version: String,
pub models: Vec<String>,
pub size_bytes: u64,
}
pub async fn import_package(
package_path: &Path,
models_root: &Path,
dry_run: bool,
force: bool,
) -> Result<ImportResult> {
let (result, duration) = profiling::profile_async(
"package_import",
import_package_impl(package_path, models_root, dry_run, force),
)
.await;
tracing::info!(
"Package import completed in {:?}: {} models ({} skipped)",
duration,
result.as_ref().map(|r| r.total_models).unwrap_or(0),
result.as_ref().map(|r| r.skipped).unwrap_or(0)
);
result
}
async fn import_package_impl(
package_path: &Path,
models_root: &Path,
dry_run: bool,
force: bool,
) -> Result<ImportResult> {
let file = std::fs::File::open(package_path)
.map_err(|e| SammError::ParseError(format!("Failed to open package file: {}", e)))?;
let mut archive = ZipReader::new(file)
.map_err(|e| SammError::ParseError(format!("Invalid ZIP package: {}", e)))?;
let mut namespaces: HashMap<String, Vec<ModelInfo>> = HashMap::new();
let mut total_models = 0;
let mut skipped = 0;
let entries_to_process: Vec<_> = archive
.entries()
.iter()
.filter(|entry| !entry.is_dir() && entry.name.ends_with(".ttl"))
.cloned()
.collect();
for entry in entries_to_process {
let file_path = entry.name.clone();
let parts: Vec<&str> = file_path.split('/').collect();
if parts.len() < 3 {
continue; }
let namespace = parts[parts.len() - 3].to_string();
let version = parts[parts.len() - 2].to_string();
let filename = parts[parts.len() - 1];
let model_name = filename.trim_end_matches(".ttl").to_string();
let target_path = models_root.join(&namespace).join(&version).join(filename);
let exists = target_path.exists();
if exists && !force {
skipped += 1;
}
let model_info = ModelInfo {
name: model_name,
namespace: namespace.clone(),
version: version.clone(),
path: target_path.clone(),
exists,
};
namespaces
.entry(namespace.clone())
.or_default()
.push(model_info);
total_models += 1;
if !dry_run && (force || !exists) {
if let Some(parent) = target_path.parent() {
async_fs::create_dir_all(parent).await?;
}
let content = archive.extract(&entry).map_err(|e| {
SammError::ParseError(format!("Failed to extract file from ZIP: {}", e))
})?;
async_fs::write(&target_path, content).await?;
}
}
Ok(ImportResult {
namespaces,
total_models,
skipped,
})
}
pub async fn export_from_file(model_file: &str, output_path: &Path) -> Result<ExportResult> {
let (result, duration) = profiling::profile_async(
"package_export_file",
export_from_file_impl(model_file, output_path),
)
.await;
tracing::info!(
"Package export from file completed in {:?}: {}",
duration,
model_file
);
result
}
async fn export_from_file_impl(model_file: &str, output_path: &Path) -> Result<ExportResult> {
let model_path = PathBuf::from(model_file);
let content = async_fs::read_to_string(&model_path).await?;
let (namespace, version) = extract_namespace_from_content(&content)?;
let model_name = model_path
.file_stem()
.and_then(|s| s.to_str())
.ok_or_else(|| SammError::ParseError("Invalid filename".to_string()))?
.to_string();
let zip_file = std::fs::File::create(output_path).map_err(SammError::Io)?;
let mut zip = ZipWriter::new(zip_file);
zip.set_compression(ZipCompressionLevel::Normal);
let zip_path = format!("{}/{}/{}.ttl", namespace, version, model_name);
zip.add_file(&zip_path, content.as_bytes())
.map_err(|e| SammError::ParseError(format!("Failed to add ZIP entry: {}", e)))?;
let file = zip
.into_inner()
.map_err(|e| SammError::ParseError(format!("Failed to finalize ZIP: {}", e)))?;
let size_bytes = file.metadata().map(|m| m.len()).unwrap_or(0);
Ok(ExportResult {
namespace,
version,
models: vec![model_name],
size_bytes,
})
}
pub async fn export_from_urn(
urn: &str,
output_path: &Path,
version_filter: Option<&str>,
) -> Result<ExportResult> {
let (result, duration) = profiling::profile_async(
"package_export_urn",
export_from_urn_impl(urn, output_path, version_filter),
)
.await;
tracing::info!(
"Package export from URN completed in {:?}: {} ({} models)",
duration,
urn,
result.as_ref().map(|r| r.models.len()).unwrap_or(0)
);
result
}
async fn export_from_urn_impl(
urn: &str,
output_path: &Path,
version_filter: Option<&str>,
) -> Result<ExportResult> {
use tokio::fs as async_fs;
let (namespace, version) = parse_urn(urn)?;
let final_version = version_filter.unwrap_or(&version);
let models_root = if let Ok(env_path) = std::env::var("SAMM_MODELS_ROOT") {
PathBuf::from(env_path)
} else if PathBuf::from("./models").exists() {
PathBuf::from("./models")
} else {
std::env::current_dir().map_err(SammError::Io)?
};
let namespace_dir = models_root.join(&namespace).join(final_version);
if !namespace_dir.exists() {
return Err(SammError::ParseError(format!(
"Namespace directory not found: {} (models root: {})",
namespace_dir.display(),
models_root.display()
)));
}
let mut entries = async_fs::read_dir(&namespace_dir)
.await
.map_err(SammError::Io)?;
let mut model_files: Vec<PathBuf> = Vec::new();
while let Some(entry) = entries.next_entry().await.map_err(SammError::Io)? {
let path = entry.path();
if path.is_file() && path.extension().and_then(|s| s.to_str()) == Some("ttl") {
model_files.push(path);
}
}
if model_files.is_empty() {
return Err(SammError::ParseError(format!(
"No .ttl files found in namespace directory: {}",
namespace_dir.display()
)));
}
let zip_file = std::fs::File::create(output_path).map_err(SammError::Io)?;
let mut zip = ZipWriter::new(zip_file);
zip.set_compression(ZipCompressionLevel::Normal);
let mut model_names = Vec::new();
for model_file in &model_files {
let content = async_fs::read_to_string(model_file)
.await
.map_err(SammError::Io)?;
let model_name = model_file
.file_stem()
.and_then(|s| s.to_str())
.ok_or_else(|| SammError::ParseError("Invalid filename".to_string()))?;
model_names.push(model_name.to_string());
let zip_path = format!("{}/{}/{}.ttl", namespace, final_version, model_name);
zip.add_file(&zip_path, content.as_bytes())
.map_err(|e| SammError::ParseError(format!("Failed to add ZIP entry: {}", e)))?;
}
let file = zip
.into_inner()
.map_err(|e| SammError::ParseError(format!("Failed to finalize ZIP: {}", e)))?;
let size_bytes = file.metadata().map(|m| m.len()).unwrap_or(0);
Ok(ExportResult {
namespace,
version: final_version.to_string(),
models: model_names,
size_bytes,
})
}
fn extract_namespace_from_content(content: &str) -> Result<(String, String)> {
for line in content.lines() {
if line.trim().starts_with("@prefix") {
continue;
}
if line.contains("urn:samm:")
&& (line.contains("a samm:Aspect")
|| line.contains("a samm:Property")
|| line.contains("a samm:Operation"))
{
if let Some(start) = line.find("<urn:samm:") {
let urn_part = &line[start + 1..]; if let Some(end) = urn_part.find('>') {
let urn = &urn_part[..end];
let urn_without_element = if let Some(hash_pos) = urn.find('#') {
&urn[..hash_pos]
} else {
urn
};
if let Ok((namespace, version)) = parse_urn(urn_without_element) {
if !namespace.contains("esmf.samm") {
return Ok((namespace, version));
}
}
}
}
}
}
for line in content.lines() {
if line.contains("urn:samm:") && !line.trim().starts_with("@prefix") {
if let Some(start) = line.find("urn:samm:") {
let urn_part = &line[start..];
if let Some(end) = urn_part.find(['>', '#', ' ']) {
let urn = &urn_part[..end];
if let Ok((namespace, version)) = parse_urn(urn) {
if !namespace.contains("esmf.samm") {
return Ok((namespace, version));
}
}
}
}
}
}
Ok(("org.example.default".to_string(), "1.0.0".to_string()))
}
fn parse_urn(urn: &str) -> Result<(String, String)> {
let without_prefix = urn
.strip_prefix("urn:samm:")
.ok_or_else(|| SammError::ParseError(format!("Invalid URN format: {}", urn)))?;
let without_element = without_prefix.split('#').next().unwrap_or(without_prefix);
let parts: Vec<&str> = without_element.rsplitn(2, ':').collect();
if parts.len() == 2 {
let version = parts[0].to_string();
let namespace = parts[1].to_string();
Ok((namespace, version))
} else {
Ok((without_element.to_string(), "1.0.0".to_string()))
}
}
#[cfg(test)]
mod tests {
use super::*;
#[test]
fn test_parse_urn_with_version() {
let (namespace, version) = parse_urn("urn:samm:org.eclipse.example:1.0.0#Movement")
.expect("parsing should succeed");
assert_eq!(namespace, "org.eclipse.example");
assert_eq!(version, "1.0.0");
}
#[test]
fn test_parse_urn_without_element() {
let (namespace, version) =
parse_urn("urn:samm:org.eclipse.example:2.1.0").expect("parsing should succeed");
assert_eq!(namespace, "org.eclipse.example");
assert_eq!(version, "2.1.0");
}
#[test]
fn test_parse_urn_without_version() {
let (namespace, version) =
parse_urn("urn:samm:org.eclipse.example").expect("parsing should succeed");
assert_eq!(namespace, "org.eclipse.example");
assert_eq!(version, "1.0.0"); }
#[test]
fn test_parse_urn_invalid() {
assert!(parse_urn("invalid:urn").is_err());
assert!(parse_urn("urn:other:namespace").is_err());
}
#[test]
fn test_extract_namespace_from_content() {
let content = r#"
@prefix samm: <urn:samm:org.eclipse.esmf.samm:meta-model:2.1.0#> .
<urn:samm:org.eclipse.example:1.0.0#Movement> a samm:Aspect .
"#;
let (namespace, version) =
extract_namespace_from_content(content).expect("operation should succeed");
assert_eq!(namespace, "org.eclipse.example");
assert_eq!(version, "1.0.0");
}
}