objectiveai_api/chat/completions/
error.rs1#[derive(thiserror::Error, Debug)]
5pub enum Error {
6 #[error("upstream error: {0}")]
8 UpstreamError(#[from] super::upstream::Error),
9 #[error("no upstreams found for request")]
11 NoUpstreamsFound,
12 #[error("fetch Ensemble LLM error: {0}")]
14 FetchEnsembleLlm(objectiveai::error::ResponseError),
15 #[error("Ensemble LLM not found")]
17 EnsembleLlmNotFound,
18 #[error("invalid Ensemble LLM: {0}")]
20 InvalidEnsembleLlm(String),
21 #[error("multiple errors: {0:?}")]
23 MultipleErrors(Vec<Error>),
24}
25
26impl objectiveai::error::StatusError for Error {
27 fn status(&self) -> u16 {
28 match self {
29 Error::UpstreamError(e) => e.status(),
30 Error::NoUpstreamsFound => 400,
31 Error::FetchEnsembleLlm(e) => e.status(),
32 Error::EnsembleLlmNotFound => 404,
33 Error::InvalidEnsembleLlm(_) => 400,
34 Error::MultipleErrors(_) => 500,
35 }
36 }
37
38 fn message(&self) -> Option<serde_json::Value> {
39 Some(serde_json::json!({
40 "kind": "chat_completion",
41 "error": match self {
42 Error::UpstreamError(e) => serde_json::json!({
43 "kind": "upstream_error",
44 "error": e.message(),
45 }),
46 Error::NoUpstreamsFound => serde_json::json!({
47 "kind": "no_upstreams_found",
48 "error": "no upstreams available for the given request",
49 }),
50 Error::FetchEnsembleLlm(e) => serde_json::json!({
51 "kind": "fetch_ensemble_llm",
52 "error": e.message(),
53 }),
54 Error::EnsembleLlmNotFound => serde_json::json!({
55 "kind": "ensemble_llm_not_found",
56 "error": "Ensemble LLM not found",
57 }),
58 Error::InvalidEnsembleLlm(msg) => serde_json::json!({
59 "kind": "invalid_ensemble_llm",
60 "error": msg,
61 }),
62 Error::MultipleErrors(errors) => serde_json::json!({
63 "kind": "multiple_errors",
64 "errors": errors.iter().map(|e| {
65 serde_json::json!({
66 "status": e.status(),
67 "message": e.message(),
68 })
69 }).collect::<Vec<_>>(),
70 }),
71 }
72 }))
73 }
74}