pub fn create_chat_completion_request(
model: String,
prompt: &Prompt,
is_streaming: bool,
) -> Result<CreateChatCompletionRequest, OpenAICompatibleInnerError>pub fn create_chat_completion_request(
model: String,
prompt: &Prompt,
is_streaming: bool,
) -> Result<CreateChatCompletionRequest, OpenAICompatibleInnerError>