mod code {
pub mod cli_error;
pub mod my_helper;
pub mod shared_state;
}
use code::shared_state::SharedState;
use chrono::Local;
use code::my_helper::MyHelper;
use directories::ProjectDirs;
use image::ImageFormat;
use llm_rs::model_mode::ModelMode;
use openai_interface::ApiInterface;
use rand::distributions::Alphanumeric;
use rand::Rng;
use regex::Regex;
use reqwest::blocking::get;
use rustyline::completion::FilenameCompleter;
use rustyline::highlight::MatchingBracketHighlighter;
use rustyline::hint::HistoryHinter;
use rustyline::history::FileHistory;
use rustyline::validate::MatchingBracketValidator;
use rustyline::{Cmd, CompletionType, Config, EditMode, Editor, Event, EventHandler, KeyEvent};
use std::collections::HashMap;
use std::env::current_dir;
use std::error::Error;
use std::fs::File;
use std::fs::OpenOptions;
use std::io::Read;
use std::io::Write;
use std::path::Path;
use std::path::PathBuf;
use std::str::FromStr;
use std::time::Instant;
use std::{env, fs};
extern crate llm_rs;
use clap::Parser;
use llm_rs::openai_interface;
const DEFAULT_MODEL: &str = "gpt-4";
const DEFAULT_TOKENS: u32 = 2_000_u32;
const DEFAULT_TEMPERATURE: f32 = 0.9_f32;
const DEFAULT_MODE: &str = "chat";
const DEFAULT_RECORD_FILE: &str = "reply.txt";
const DEFAULT_HISTORY_FILE: &str = "history.txt";
#[derive(Parser, Debug)]
#[command(author, version, about, long_about = None)]
struct Arguments {
#[arg(long, short = 'm',default_value=DEFAULT_MODEL)]
model: String,
#[arg(long, short = 't', default_value_t=DEFAULT_TOKENS)]
max_tokens: u32,
#[arg(long, short = 'T', default_value_t = DEFAULT_TEMPERATURE)]
temperature: f32,
#[arg(long)]
api_key: Option<String>,
#[arg(long, short='d', default_value=DEFAULT_MODE)]
mode: String,
#[arg(long, short='r', default_value=DEFAULT_RECORD_FILE)]
record_file: String,
#[arg(long, short='p', default_value=None)]
system_prompt: Option<String>,
}
struct CliInterface {
verbose: usize,
history_file: String,
record_file: String,
audio_file: Option<String>,
model_mode: ModelMode,
model: String,
pub focus_image_url: Option<String>,
pub image: Option<PathBuf>,
pub mask: Option<PathBuf>,
header_cache: HashMap<String, String>,
cost: f64,
local_data: HashMap<String, String>,
}
impl CliInterface {
fn make_file(suffix: &str) -> Result<PathBuf, Box<dyn Error>> {
let project_dir = ProjectDirs::from("worik", "org", "llm-rs").unwrap();
println!("project_dir ({:?})", project_dir);
std::fs::create_dir_all(project_dir.config_dir())?;
let rand_file_name: String = rand::thread_rng()
.sample_iter(&Alphanumeric)
.take(10)
.map(char::from)
.collect();
let file_path: PathBuf = project_dir
.config_dir()
.join(rand_file_name)
.with_extension(suffix);
Ok(file_path)
}
fn process_image_url(&mut self, url: &str) -> Result<(), Box<dyn Error>> {
println!("process_image_url({url})");
let start = Instant::now();
let mut img_data: Vec<u8> = Vec::new();
get(url).unwrap().read_to_end(&mut img_data).unwrap();
println!("Down loaded URL: {} bytes", img_data.len());
let incomming_image_file_path = Self::make_file("png")?;
println!("incomming_image_file_path {:?}", incomming_image_file_path);
let mut incomming_image_file = OpenOptions::new()
.write(true)
.create(true)
.open(&incomming_image_file_path)?;
println!("Created {:?}", incomming_image_file_path);
incomming_image_file.write_all(&img_data)?;
let img = image::open(&incomming_image_file_path)?;
println!("Opened {:?}", incomming_image_file);
incomming_image_file.write_all(&img_data)?;
println!(
"Wrote image: {:?} {:#?}",
start.elapsed(),
incomming_image_file_path,
);
let img_rgba = img.into_rgba8();
self.image = Some(incomming_image_file_path.as_path().to_owned());
img_rgba.save_with_format(self.image.clone().unwrap(), ImageFormat::Png)?;
webbrowser::open(self.image.clone().unwrap().as_os_str().to_str().unwrap())?;
Ok(())
}
fn set_up_read_line(&self) -> rustyline::Result<Editor<MyHelper, FileHistory>> {
let config = Config::builder()
.history_ignore_space(true)
.completion_type(CompletionType::List)
.edit_mode(EditMode::Emacs)
.build();
let h = MyHelper {
completer: FilenameCompleter::new(),
highlighter: MatchingBracketHighlighter::new(),
hinter: HistoryHinter {},
colored_prompt: "".to_owned(),
validator: MatchingBracketValidator::new(),
};
let mut read_line = Editor::with_config(config)?;
read_line.set_helper(Some(h));
read_line.bind_sequence(KeyEvent::alt('n'), Cmd::HistorySearchForward);
read_line.bind_sequence(KeyEvent::alt('p'), Cmd::HistorySearchBackward);
if read_line.load_history(self.history_file.as_str()).is_err() {
println!("No previous history.");
}
read_line.bind_sequence(
Event::KeySeq(vec![KeyEvent::ctrl('q')]),
EventHandler::Simple(Cmd::Interrupt),
);
Ok(read_line)
}
fn expand_variables(&self, input: String) -> Result<String, Box<dyn Error>> {
let re = Regex::new(r"\{(\w+)\}").unwrap();
let result = re
.replace_all(&input, |caps: ®ex::Captures| {
self.local_data
.get(&caps[1])
.unwrap_or(&caps[0].to_string())
.to_string()
})
.to_string();
Ok(result)
}
fn process_meta(
&mut self,
prompt: &str,
api_interface: &mut ApiInterface,
) -> Result<String, Box<dyn Error>> {
let mut meta = prompt.split_whitespace();
let response_text: String;
if let Some(cmd) = meta.nth(1) {
match cmd {
"f" => {
let vl = api_interface.files_list().unwrap();
let mut sorted_vec = vl.body;
sorted_vec.sort();
response_text = format!(
".....File ID...................Name{}",
sorted_vec
.iter()
.fold(String::new(), |a, b| format!("{a}\n{}: {}", b.1, b.0))
);
}
"fu" => {
let file_name: String = meta.collect::<Vec<&str>>().join(" ");
if file_name.is_empty() {
response_text = format!(
"Enter an audio file to transcribe: {}",
current_dir()?.display()
);
} else if PathBuf::from(file_name.as_str()).exists() {
response_text = match api_interface
.files_upload_fine_tuning(Path::new(file_name.as_str()))
{
Ok(r) => r.body,
Err(err) => format!("{err}: Failed to upload {file_name}"),
};
} else {
response_text = format!(
"{file_name} dose not exist. Paths relative to {}",
current_dir()?.display()
);
}
}
"fi" => {
if let Some(file_id) = meta.next() {
response_text = match api_interface.file_info(file_id.to_string()) {
Ok(s) => s.body,
Err(err) => format!("{err} Failed to delete"),
};
}else{
response_text = "Enter a file ID".to_string();
}
}
"fc" => {
if let Some(file_id) = meta.next() {
let local_file = meta.next();
response_text = match api_interface.file_contents(file_id.to_string()) {
Ok(s) => if let Some(local_file) = local_file {
let mut file = File::create(local_file)?;
file.write_all(s.body.as_bytes())?;
"success".to_string()
}else{
s.body
},
Err(err) => format!("{err} Failed to get contents"),
};
}else{
response_text = "Enter a file ID".to_string();
}
}
"fd" => {
let file_id: String = meta.collect::<Vec<&str>>().join(" ");
response_text = match api_interface.files_delete(file_id) {
Ok(_) => "Deleted".to_string(),
Err(err) => format!("{err} Failed to delete"),
};
}
"p" => {
response_text = format!(
"OpenAI Interface: {api_interface}\n\
Record File:{}\n\
Model: {}\n\
Model Mode: {}\n\
Image: {:#?}\n\
mask: {:#?}\n\
audio file:{:#?}\n\
Completions{}",
self.record_file,
self.model,
self.model_mode,
self.image,
self.mask,
self.audio_file,
self.local_data.keys().fold("".to_string(), |a, b| format!("{a}\n\t{b}")),
);
}
"md" => {
let mut model_list: Vec<&str> = self.model_mode.models_available();
model_list.sort();
response_text = format!(
"Models for mode: {}: {}",
self.model_mode,
model_list
.iter()
.fold(String::new(), |a, b| format!("{a}\n\t{b}"))
);
}
"ms" => {
if let Some(model_name) = meta.next() {
response_text = format!("New model: {model_name}");
self.model = model_name.to_string();
} else {
response_text = "No model".to_string();
}
}
"ml" => {
response_text = "Modes\n\t\
completions\n\t\
chat\n\t\
image\n\t\
image_edit\n\t\
audio_transcription\n\t\
"
.to_string()
}
"m" => {
match meta.next() {
Some(mode) => match mode {
"completions" => {
response_text = "Model mode => Completions\n".to_string();
self.model_mode = ModelMode::Completions;
}
"chat" => {
let system_prompt = meta.collect::<Vec<&str>>().join(" ");
if system_prompt.is_empty()
&& api_interface.system_prompt.is_empty()
{
response_text =
"Provide a system prompt for the chat".to_string();
} else {
self.model_mode = ModelMode::Chat;
response_text = "Model mode => Chat\n".to_string();
if !system_prompt.is_empty() {
api_interface.system_prompt = system_prompt;
}
}
}
"image" => {
let file_name: String = meta.collect::<Vec<&str>>().join(" ");
if file_name.is_empty() {
self.model_mode = ModelMode::Image;
response_text = "Model mode => Image\n".to_string();
} else {
if PathBuf::from(file_name.as_str()).exists() {
self.image = Some(PathBuf::from(file_name));
self.model_mode = ModelMode::ImageEdit;
response_text = "Model mode => ImageEdit\n".to_string();
} else {
self.model_mode = ModelMode::Image;
response_text =
"File: {file_name} does not exist. Model mode => Image\n"
.to_string();
}
}
}
"image_edit" => {
match self.model_mode {
ModelMode::Image => {
if self.image.is_none() && self.focus_image_url.is_none() {
response_text = format!(
"Cannot switch to ImageEdit mode \
from {} mode until you have created \
an image. Enter a prompt to create an image",
self.model_mode
);
} else if self.mask.is_none() {
response_text = format!(
"Cannot switch to ImageEdit mode \
from {} mode until you have created \
a mask.",
self.model_mode
);
} else {
response_text = "Edit image".to_string();
self.model_mode = ModelMode::ImageEdit;
}
}
_ => {
response_text = format!("Cannot switch to ImageEdit mode from {} mode. Must be in Image mode", self.model_mode);
}
};
}
"audio_transcription" => {
if self.audio_file.is_none() {
response_text = "Add an audio file before switching to audio_transcription mode".to_string();
} else {
self.model_mode = ModelMode::AudioTranscription;
response_text = "Audio Transcription mode".to_string();
}
}
_ => response_text = format!("{mode} not a Model Mode\n"),
},
None => {
response_text = "Model modes\n\
completions\n\
chat\n\
image\n\
image_edit\n\
audio_transcription\n"
.to_string()
}
}
}
"dx" => {
response_text = api_interface.context.join("\n");
}
"cx" => {
response_text = "Clear context".to_string();
api_interface.clear_context();
}
"ppx" => {
let file_path: String = meta.collect::<Vec<&str>>().join(" ");
response_text = match File::create(file_path.clone()) {
Ok(mut f) => {
let context: Vec<String> = api_interface.get_context()?;
assert!(context.len() % 2 == 0);
let context = CliInterface::pretty_print_conversation(context)?;
f.write_all(context.as_bytes())?;
format!("Wrote context to {file_path}")
}
Err(err) => {
format!("{err}: Failed to open file at: {file_path}")
}
};
}
"v" => {
if let Some(v) = meta.next() {
response_text = match v.parse::<usize>() {
Ok(v) => {
self.verbose = v;
format!("Verbosity set to {v}\n")
}
Err(err) => format!("Cannot make a usize from {v} because: {err}\n"),
}
} else {
response_text = "No verbosity level passed".to_string();
}
}
"k" => {
if let Some(t) = meta.next() {
response_text = match t.parse::<u32>() {
Ok(t) => {
api_interface.tokens = t;
format!("New tokens: {t}\n")
}
Err(err) => format!("Cannot make a float from {t} because: {err}\n"),
};
} else {
response_text = "No tokens".to_string();
}
}
"t" => {
if let Some(t) = meta.next() {
response_text = match t.parse::<f32>() {
Ok(t) => {
if (0.0_..=2.0).contains(&t) {
api_interface.temperature = t;
format!("New temperature: {t}\n")
} else {
"A float between 0 and 2\n".to_string()
}
}
Err(err) => format!("Cannot make a float from {t} because: {err}\n"),
}
} else {
response_text = "No temperature".to_string();
}
}
"sp" => {
if self.model_mode != ModelMode::Chat {
response_text = "This only makes sense in Chat mode".to_string();
} else {
let system_prompt = meta.collect::<Vec<&str>>().join(" ");
if system_prompt.is_empty() {
if api_interface.system_prompt.is_empty() {
response_text = "Provide a system prompt for the chat".to_string();
} else {
response_text =
format!("System Prompt {}", api_interface.system_prompt);
}
} else {
response_text = format!("System Prompt {system_prompt}");
api_interface.system_prompt = system_prompt;
}
}
}
"ci" => {
self.image = None;
self.focus_image_url = None;
if self.model_mode == ModelMode::ImageEdit {
response_text = format!("Image cleared. Mode: {}", self.model_mode);
} else {
response_text = "Image cleared".to_string();
}
}
"a" => {
let file_name: String = meta.collect::<Vec<&str>>().join(" ");
if file_name.is_empty() {
response_text = format!(
"Enter an audio file to transcribe: {}",
current_dir()?.display()
);
} else if PathBuf::from(file_name.as_str()).exists() {
self.model_mode = ModelMode::AudioTranscription;
self.audio_file = Some(file_name.clone());
let _path = Path::new(file_name.as_str());
response_text = format!(
"Audio Transcription mode. \
File: {file_name}"
);
} else {
response_text = format!(
"{file_name} dose not exist. Paths relative to {}",
current_dir()?.display()
);
}
}
"mask" => {
let file_name: String = meta.collect::<Vec<&str>>().join(" ");
if file_name.is_empty() {
response_text = format!(
"Enter the mask file path relative to: {}",
current_dir()?.display()
);
} else if PathBuf::from(file_name.as_str()).exists() {
self.mask = Some(PathBuf::from(file_name));
response_text = format!("Mask set to: {:?}", self.mask.clone().unwrap());
} else {
response_text = format!(
"{file_name} dose not exist. Paths relative to {}",
current_dir()?.display()
);
}
}
"ft" => {
match meta.next() {
Some(name) => {
response_text = match api_interface.fine_tune_create(name.to_string()) {
Ok(result) => format!("{:?}", result,),
Err(err) => format!("{err}: Failed fine_tune_create{name}"),
};
},
None => {
response_text = "Cannot get name".to_string()
}
};
}
"fl" => {
match meta.next() {
Some(name) => {
let file_name: String = meta.collect::<Vec<&str>>().join(" ");
if file_name.is_empty() {
response_text = format!(
"! fl <name> <file path>: The contents of a file\
is bound to the name for use in prompts: {{name}}\
expands to file content. The path is relative to:\
{}",
current_dir()?.display()
);
} else if !PathBuf::from(file_name.as_str()).exists() {
response_text = format!(
"{file_name} does not exist. Paths relative to {}",
current_dir()?.display()
);
} else {
let contents = fs::read_to_string(Path::new(&file_name))?;
_ = self.local_data.insert(name.to_string(), contents);
response_text = format!("Loaded {name}");
}
},
None => {
response_text = "Cannot get name".to_string()
}
};
}
"sx" => {
let file_path: String = meta.collect::<Vec<&str>>().join(" ");
let context = api_interface.get_context()?;
let serialized_context = serde_json::to_string(&context)?;
response_text = format!("Saved context to {}", file_path);
let mut file = File::create(file_path)?;
file.write_all(serialized_context.as_bytes())?;
}
"rx" => {
let file_path: String = meta.collect::<Vec<&str>>().join(" ");
if file_path.is_empty() {
response_text = format!(
"Enter the path of the file containing the context: {}",
current_dir()?.display()
);
} else if PathBuf::from(file_path.as_str()).exists() {
let file_contents = fs::read_to_string(Path::new(&file_path))?;
let context: Vec<String> = serde_json::from_str(&file_contents)?;
api_interface.set_context(context);
response_text = "Context loaded from file.".to_string();
} else {
response_text = format!(
"{file_path} does not exist. Paths relative to {}",
current_dir()?.display()
);
}
}
"?" => {
response_text = "\
p Display settings\n\
md Display all available models for the current mode\n\
ms <model> Change the current model\n\
ml List modes\
m <mode> Change mode (API endpoint\n\
dx Display context (for chat)\n\
cx Clear context\n\
ppx <path> Pretty print conversation to path\n\
v Set verbosity\n\
k Set max tokens for completions\n\
t Set temperature for completions\n\
sp Set system prompt (after `! cc`\n\
ci Clear image\
mask <path> Set the mask to use in image edit mode. A 1024x1024 PNG with transparent mask\n\
a <path> Audio file for transcription\n\
ci Clear the image stored for editing\n\
f List the files stored on the server\n\
fu <path> Upload a file of fine tuning data\n\
fd <file id> Delete a file\n\
fi <file id> Get information about file\n\
fc <file id> [destination_file] Get contents of file\n\
ft <file ID> Start a fine tune model using a JASONL training file \n\
fl <name> <path> Associate the contents of the `path` with `name` for use in prompts like: {{name}}\n\
sx <path> Save the context to a file at the specified path\n\
rx <path> Restore the context from a file at the specified path\n\
? This text\n"
.to_string()
}
_ => response_text = format!("Unknown command: {cmd}\n"),
};
} else {
response_text = "Enter a meta command".to_string();
}
Ok(response_text)
}
pub fn after_request(
&mut self,
response_headers: HashMap<String, String>,
) -> Result<String, Box<dyn std::error::Error>> {
let mut result = "".to_string();
if self.verbose > 0 {
for k in response_headers.keys() {
if let Some(v) = self.header_cache.get(k) {
if v == response_headers.get(k).unwrap() {
continue;
}
}
self.header_cache
.insert(k.clone(), response_headers.get(k).unwrap().clone());
result += &format!("{k}: {}\n", response_headers[k]);
}
} else {
}
Ok(result)
}
pub fn pretty_print_conversation(context: Vec<String>) -> Result<String, Box<dyn Error>> {
let mut saved_context = String::new();
let mut xit = context.iter();
for i in 0..context.len() {
if i % 2 == 0 {
saved_context = format!("{saved_context}Question : {}\n", xit.next().unwrap());
} else {
saved_context = format!("{saved_context}Answer: {}\n", xit.next().unwrap());
}
}
Ok(saved_context)
}
}
fn main() -> Result<(), Box<dyn Error>> {
let cmd_line_opts = Arguments::parse();
let _key_binding: String;
let api_key = match cmd_line_opts.api_key.as_deref() {
Some(key) => key,
None => {
_key_binding = env::var("OPENAI_API_KEY").unwrap();
_key_binding.as_str()
}
};
let model = cmd_line_opts.model.as_str();
let tokens: u32 = cmd_line_opts.max_tokens;
let temperature: f32 = cmd_line_opts.temperature;
let mode: ModelMode = match ModelMode::from_str(cmd_line_opts.mode.as_str()) {
Ok(m) => m,
Err(_) => panic!("{} is an invalid mode", cmd_line_opts.mode.as_str()),
};
let mut cli_interface = CliInterface {
record_file: DEFAULT_RECORD_FILE.to_string(),
history_file: DEFAULT_HISTORY_FILE.to_string(),
verbose: 0,
audio_file: None,
model: model.to_string(),
model_mode: mode.clone(),
focus_image_url: None,
mask: None,
image: None,
header_cache: HashMap::new(),
cost: 0.0,
local_data: HashMap::new(),
};
cli_interface.record_file = cmd_line_opts.record_file;
let mut options = OpenOptions::new();
let mut conversation_record_file: File = options
.write(true)
.append(true)
.create(true)
.open(cli_interface.record_file.as_str())
.unwrap();
let mut read_line: Editor<MyHelper, FileHistory> = cli_interface.set_up_read_line()?;
let mut prompt: String;
let mut api_interface = ApiInterface::new(api_key, tokens, temperature);
if let Some(sp) = cmd_line_opts.system_prompt {
api_interface.system_prompt = sp;
}
loop {
let p = format!("{}/{mode}:> ", cli_interface.model);
read_line.helper_mut().expect("No helper").colored_prompt = format!("\x1b[1;32m{p}\x1b[0m");
let readline = read_line.readline(&p);
let input = match readline {
Ok(line) => line,
Err(err) => {
eprintln!("Readline {err}");
break;
}
};
read_line.add_history_entry(input.as_str())?;
prompt = cli_interface.expand_variables(input.clone())?;
_ = conversation_record_file
.write(
format!(
"Q: {}\n{}\n",
Local::now().format("%Y-%m-%dT%H:%M:%S"),
prompt
)
.as_bytes(),
)
.unwrap();
let response_text: String;
let prompt = prompt.as_str().trim();
if prompt.is_empty() {
response_text = "No prompt\n".to_string();
} else if prompt.starts_with('!') {
let cprompt = prompt;
response_text = cli_interface.process_meta(cprompt, &mut api_interface)?;
} else {
let start_time = Local::now();
let response = match cli_interface.model_mode {
ModelMode::AudioTranscription => {
let prompt_param: Option<&str> = if prompt.is_empty() {
None
} else {
Some(prompt)
};
match api_interface.audio_transcription(
Path::new(cli_interface.audio_file.as_ref().unwrap().as_str()),
prompt_param,
) {
Ok(r) => {
format!("{}\n{}", cli_interface.after_request(r.headers)?, r.body,)
}
Err(err) => format!("{err}"),
}
}
ModelMode::Chat => match api_interface.chat(prompt, cli_interface.model.as_str()) {
Ok(apt_result) => {
cli_interface.cost = apt_result
.headers
.get("Cost")
.unwrap()
.parse::<f64>()
.unwrap();
fn update_spent(cost: f64) -> impl FnMut(SharedState) -> SharedState {
move |mut ss| {
ss.spent += cost;
ss
}
}
let ss: SharedState = match SharedState::read_write_atomic(update_spent(cli_interface.cost)){
Ok(ss) => ss,
Err(err) => panic!("{err}: Failed to update costs"),
};
let this_cost = cli_interface.cost;
let total_cost = ss.spent;
format!(
"{:.2}/{:.2}{}\n{}",
this_cost, total_cost, cli_interface.after_request(apt_result.headers)?,
apt_result.body,
)
}
Err(err) => format!("{err}"),
},
ModelMode::Completions => {
match api_interface.completion(prompt, cli_interface.model.as_str()) {
Ok(r) => {
format!("{}\n{}", cli_interface.after_request(r.headers)?, r.body,)
}
Err(err) => format!("{err}"),
}
}
ModelMode::Image => match api_interface.image(prompt) {
Ok(r) => {
cli_interface.focus_image_url = Some(r.body);
let url: String = cli_interface.focus_image_url.as_ref().unwrap().clone();
match cli_interface.process_image_url(&url) {
Ok(_) => format!("Opened: {url}"),
Err(err) => format!("{err}: Failed to open: {url}"),
}
}
Err(err) => format!("{err}"),
},
ModelMode::ImageEdit => {
match api_interface.image_edit(
prompt,
cli_interface.image.clone().unwrap().as_path(),
cli_interface.mask.clone().unwrap().as_path(),
) {
Ok(r) => {
match cli_interface.process_image_url(r.body.as_str()) {
Ok(_) => format!("Opened: {}", r.body),
Err(err) => format!("{err}: Failed to open: {}", r.body),
}
}
Err(err) => format!("{err}"),
}
}
};
let end_time = Local::now();
let duration = end_time.signed_duration_since(start_time);
response_text = format!("{} seconds\n{response}", duration.num_seconds());
}
if cli_interface.verbose > 0 {
eprintln!(
"Conversation: {} turns and {} bytes",
api_interface.context.len(),
api_interface.context.iter().fold(0, |a, b| {
a + b.len()
})
);
}
_ = conversation_record_file
.write(
format!(
"A: {}\n{response_text}\n",
Local::now().format("%Y-%m-%dT%H:%M:%S"),
)
.as_bytes(),
)
.unwrap();
println! {"{response_text}"};
}
read_line
.append_history(cli_interface.history_file.as_str())
.unwrap();
read_line.clear_history().unwrap();
Ok(())
}