use oris_runtime::llm::OpenAIConfig;
use oris_runtime::{language_models::llm::LLM, llm::openai::OpenAI};
#[tokio::main]
async fn main() {
//OpenAI Example
let open_ai = OpenAI::default();
let response = open_ai.invoke("hola").await.unwrap();
println!("{}", response);
//or we can set config as
let open_ai = OpenAI::default().with_config(
OpenAIConfig::default()
.with_api_base("xxx") //if you want to specify base url
.with_api_key("<you_api_key>"), //if you want to set you open ai key,
);
let response = open_ai.invoke("hola").await.unwrap();
println!("{}", response);
}