litellm-rs 0.4.16

A high-performance AI Gateway written in Rust, providing OpenAI-compatible APIs with intelligent routing, load balancing, and enterprise features
Documentation
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
//! Predibase Provider
//!
//! Predibase provides fine-tuned LLM serving.
//! API Reference: <https://docs.predibase.com/>

mod config;
mod error;
mod model_info;
mod provider;

#[cfg(test)]
mod tests;

pub use config::PredibaseConfig;
pub use error::PredibaseError;
pub use model_info::{get_available_models, get_model_info};
pub use provider::PredibaseProvider;