use dynamo_llm::model_card::{ModelDeploymentCard, PromptFormatterArtifact, TokenizerKind};
use tempfile::tempdir;
const HF_PATH: &str = "tests/data/sample-models/TinyLlama_v1.1";
#[tokio::test]
async fn test_model_info_from_hf_like_local_repo() {
let mdc = ModelDeploymentCard::load_from_disk(HF_PATH, None).unwrap();
let info = mdc.model_info.unwrap().get_model_info().unwrap();
assert_eq!(info.model_type(), "llama");
assert_eq!(info.bos_token_id(), Some(1));
assert_eq!(info.eos_token_ids(), vec![2]);
assert_eq!(info.max_position_embeddings(), Some(2048));
assert_eq!(info.vocab_size(), Some(32000));
}
#[tokio::test]
async fn test_model_info_from_non_existent_local_repo() {
let path = "tests/data/sample-models/this-model-does-not-exist";
let result = ModelDeploymentCard::load_from_disk(path, None);
assert!(result.is_err());
}
#[tokio::test]
async fn test_tokenizer_from_hf_like_local_repo() {
let mdc = ModelDeploymentCard::load_from_disk(HF_PATH, None).unwrap();
match mdc.tokenizer.unwrap() {
TokenizerKind::HfTokenizerJson(_) => (),
TokenizerKind::TikTokenModel(_) => panic!("Expected HfTokenizerJson, got TikTokenModel"),
}
}
#[tokio::test]
async fn test_prompt_formatter_from_hf_like_local_repo() {
let mdc = ModelDeploymentCard::load_from_disk(HF_PATH, None).unwrap();
match mdc.prompt_formatter {
Some(PromptFormatterArtifact::HfTokenizerConfigJson(_)) => (),
_ => panic!("Expected HfTokenizerConfigJson prompt formatter"),
}
}
#[tokio::test]
async fn test_missing_required_files() {
let temp_dir = tempdir().unwrap();
let result = ModelDeploymentCard::load_from_disk(temp_dir.path(), None);
assert!(result.is_err());
let err = result.unwrap_err().to_string();
assert!(err.contains("unable to extract"));
}
#[tokio::test]
async fn test_model_loads_without_tokenizer_json() {
let path = "tests/data/sample-models/mock-no-tokenizer-json";
let mdc = ModelDeploymentCard::load_from_disk(path, None).unwrap();
assert!(
mdc.tokenizer.is_none(),
"Expected tokenizer to be None for model without tokenizer.json"
);
assert!(!mdc.has_tokenizer(), "has_tokenizer() should be false");
assert!(mdc.model_info.is_some());
}
#[tokio::test]
async fn test_chat_template_json_fallback() {
let path = "tests/data/sample-models/mock-no-tokenizer-json";
let mdc = ModelDeploymentCard::load_from_disk(path, None).unwrap();
match &mdc.chat_template_file {
Some(PromptFormatterArtifact::HfChatTemplateJson { file, is_custom }) => {
assert!(!is_custom, "Should not be marked as custom template");
let p = file.path().expect("Should be a local path");
assert!(
p.ends_with("chat_template.json"),
"Expected chat_template.json, got {:?}",
p
);
}
other => panic!("Expected HfChatTemplateJson, got {:?}", other),
}
}