use ort_openrouter_core::cli;
#[test]
fn test_hello() {
const MODEL: &str = "openai/gpt-oss-20b:free";
let mut out = Vec::new();
let args = ["ort", "-m", MODEL, "-r", "low", "Hello"]
.into_iter()
.map(|s| s.to_string())
.collect();
let ret = cli::main(args, false, &mut out).unwrap();
assert_eq!(ret, 0);
let contents = String::from_utf8_lossy(&out);
if contents.is_empty() {
panic!("No output from 'ort'. Try it at the command line.");
}
let mut lines = contents.lines();
let first_line = lines.next().unwrap();
assert!(
first_line.contains("assist") || first_line.contains("help"),
"Invalid line: '{first_line}'"
);
let last_line = lines.last().unwrap();
assert!(
last_line.starts_with(&format!("Stats: {MODEL}")),
"Invalid last line: '{last_line}'",
);
}
#[test]
fn test_list() {
let mut out = Vec::new();
let args = ["ort", "list"].into_iter().map(|s| s.to_string()).collect();
let ret = cli::main(args, false, &mut out).unwrap();
assert_eq!(ret, 0);
let contents = String::from_utf8_lossy(&out);
let mut count = 0;
for line in contents.lines() {
count += 1;
if line == "meta-llama/llama-3-70b-instruct" {
return;
}
}
assert!(count > 20, "Too few lines: {count}");
panic!("List did not include Llama 3 70B");
}