#[test]
fn test_run_gguf_inference_config_temperature_boundary_0_01() {
let result = inference::run_gguf_inference(
"/nonexistent/model.gguf",
&[],
"boundary test",
5,
0.01, "text",
false,
false,
None, );
assert!(result.is_err());
}
#[test]
fn test_run_gguf_inference_config_temperature_just_above_boundary() {
let result = inference::run_gguf_inference(
"/nonexistent/model.gguf",
&[],
"sampling boundary",
5,
0.011, "text",
false,
false,
None, );
assert!(result.is_err());
}
#[test]
fn test_run_gguf_inference_max_tokens_one() {
let result = inference::run_gguf_inference(
"/nonexistent/model.gguf",
&[],
"single token",
1, 0.0,
"text",
false,
false,
None, );
assert!(result.is_err());
}
#[test]
fn test_run_gguf_inference_max_tokens_very_large() {
let result = inference::run_gguf_inference(
"/nonexistent/model.gguf",
&[],
"large generation",
usize::MAX / 2, 0.0,
"text",
false,
false,
None, );
assert!(result.is_err());
}
#[test]
fn test_run_gguf_inference_format_json_lowercase() {
let result = inference::run_gguf_inference(
"/nonexistent/model.gguf",
&[],
"json test",
5,
0.5,
"json",
false,
false,
None, );
assert!(result.is_err());
}
#[test]
fn test_run_gguf_inference_format_text_lowercase() {
let result = inference::run_gguf_inference(
"/nonexistent/model.gguf",
&[],
"text test",
5,
0.5,
"text",
false,
false,
None, );
assert!(result.is_err());
}
#[test]
fn test_run_gguf_inference_format_arbitrary_string() {
let result = inference::run_gguf_inference(
"/nonexistent/model.gguf",
&[],
"unknown format",
5,
0.5,
"yaml", false,
false,
None, );
assert!(result.is_err());
}
#[test]
fn test_run_gguf_inference_format_empty_string() {
let result = inference::run_gguf_inference(
"/nonexistent/model.gguf",
&[],
"empty format",
5,
0.5,
"", false,
false,
None, );
assert!(result.is_err());
}
#[test]
fn test_run_gguf_inference_gpu_flag_no_cuda_feature() {
let result = inference::run_gguf_inference(
"/nonexistent/model.gguf",
&[],
"gpu warning test",
5,
0.5,
"text",
true, false,
None, );
assert!(result.is_err()); }
#[test]
fn test_run_apr_inference_gpu_flag_no_cuda_feature() {
let result = inference::run_apr_inference(
"/nonexistent/model.apr",
&[],
"gpu warning test",
5,
0.5,
"text",
true, false, None, );
assert!(result.is_err());
}
#[test]
fn test_run_gguf_inference_verbose_only() {
let result = inference::run_gguf_inference(
"/nonexistent/model.gguf",
&[],
"verbose only",
5,
0.5,
"text",
false,
true, None, );
assert!(result.is_err());
}
#[test]
fn test_run_gguf_inference_trace_only() {
let result = inference::run_gguf_inference(
"/nonexistent/model.gguf",
&[],
"trace only",
5,
0.5,
"text",
false,
false, Some(TraceConfig::enabled()), );
assert!(result.is_err());
}
#[test]
fn test_run_gguf_inference_verbose_and_trace() {
let result = inference::run_gguf_inference(
"/nonexistent/model.gguf",
&[],
"both flags",
5,
0.5,
"text",
false,
true, Some(TraceConfig::enabled()), );
assert!(result.is_err());
}
#[test]
fn test_run_gguf_inference_all_debug_flags() {
let result = inference::run_gguf_inference(
"/nonexistent/model.gguf",
&[],
"all flags",
5,
0.5,
"json",
true, true, Some(TraceConfig::enabled()), );
assert!(result.is_err());
}
#[test]
fn test_run_apr_inference_verbose_only() {
let result = inference::run_apr_inference(
"/nonexistent/model.apr",
&[],
"verbose apr",
5,
0.5,
"text",
false, true, None, );
assert!(result.is_err());
}
#[test]
fn test_run_apr_inference_verbose_with_gpu() {
let result = inference::run_apr_inference(
"/nonexistent/model.apr",
&[],
"verbose gpu apr",
5,
0.5,
"json",
true, true, None, );
assert!(result.is_err());
}
#[test]
fn test_run_safetensors_inference_format_variations() {
for format in &["json", "text", "yaml", "", "markdown"] {
let result = inference::run_safetensors_inference(
"/nonexistent/model.safetensors",
"format test",
5,
0.5,
format,
None, );
assert!(result.is_err());
}
}
#[test]
fn test_run_safetensors_inference_temperature_ignored() {
for temp in &[0.0, 0.01, 0.5, 1.0, 2.0, -1.0, f32::MAX, f32::MIN] {
let result = inference::run_safetensors_inference(
"/nonexistent/model.safetensors",
"temp test",
5,
*temp,
"text",
None, );
assert!(result.is_err());
}
}
#[test]
fn test_run_safetensors_inference_max_tokens_variations() {
for max_tokens in &[0, 1, 5, 100, 1000, usize::MAX / 4] {
let result = inference::run_safetensors_inference(
"/nonexistent/model.safetensors",
"tokens test",
*max_tokens,
0.5,
"text",
None, );
assert!(result.is_err());
}
}
#[test]
fn test_run_gguf_inference_error_message_structure() {
let result = inference::run_gguf_inference(
"/definitely/not/a/real/path/model.gguf",
&[],
"error structure test",
5,
0.5,
"text",
false,
false,
None, );
assert!(result.is_err());
let err = result.unwrap_err();
let err_string = format!("{}", err);
assert!(
err_string.contains("mmap")
|| err_string.contains("GGUF")
|| err_string.contains("load"),
"Error message should contain diagnostic info: {}",
err_string
);
}
#[test]
fn test_run_apr_inference_error_message_structure() {
let result = inference::run_apr_inference(
"/definitely/not/a/real/path/model.apr",
&[0x00, 0x01, 0x02, 0x03], "error structure test",
5,
0.5,
"text",
false, false, None, );
assert!(result.is_err());
let err = result.unwrap_err();
let err_string = format!("{}", err);
assert!(
err_string.contains("APR")
|| err_string.contains("parse")
|| err_string.contains("Failed"),
"Error message should contain APR-related info: {}",
err_string
);
}
#[test]
fn test_run_safetensors_inference_error_message_structure() {
let result = inference::run_safetensors_inference(
"/definitely/not/a/real/path/model.safetensors",
"error structure test",
5,
0.5,
"text",
None, );
assert!(result.is_err());
let err = result.unwrap_err();
let err_string = format!("{}", err);
assert!(
err_string.contains("SafeTensors")
|| err_string.contains("convert")
|| err_string.contains("Failed")
|| err_string.contains("No such file"),
"Error message should contain useful info: {}",
err_string
);
}
#[test]
fn test_prompt_with_null_bytes() {
let result = inference::run_gguf_inference(
"/nonexistent/model.gguf",
&[],
"hello\x00world", 5,
0.5,
"text",
false,
false,
None, );
assert!(result.is_err());
}