1use crate::{
2 client_utils, id_utils,
3 openai::responses_api::{
4 self, FunctionTool, ResponseCreateParams, ResponseFormatJSONObject, ResponseFormatText,
5 ResponseFormatTextConfig, ResponseFormatTextJSONSchemaConfig, ResponseFunctionToolCall,
6 ResponseIncludable, ResponseInputAudio, ResponseInputAudioInputAudio, ResponseInputContent,
7 ResponseInputImage, ResponseInputItem, ResponseInputItemFunctionCallOutput,
8 ResponseInputItemMessage, ResponseInputText, ResponseOutputContent, ResponseOutputItem,
9 ResponseOutputItemImageGenerationCall, ResponseOutputMessage, ResponseOutputText,
10 ResponseReasoningItem, ResponseReasoningItemSummary, ResponseReasoningItemSummaryUnion,
11 ResponseStreamEvent, ResponseTextConfig, ResponseUsage, ToolChoiceFunction,
12 ToolImageGeneration,
13 },
14 source_part_utils, AssistantMessage, AudioFormat, ContentDelta, ImagePart, ImagePartDelta,
15 LanguageModel, LanguageModelError, LanguageModelInput, LanguageModelMetadata,
16 LanguageModelResult, LanguageModelStream, Message, ModelResponse, ModelUsage, Part, PartDelta,
17 PartialModelResponse, ReasoningOptions, ReasoningPart, ReasoningPartDelta, ResponseFormatJson,
18 ResponseFormatOption, TextPartDelta, Tool, ToolCallPart, ToolCallPartDelta, ToolChoiceOption,
19 ToolMessage, ToolResultPart, UserMessage,
20};
21use async_stream::try_stream;
22use futures::{future::BoxFuture, StreamExt};
23use reqwest::{
24 header::{self, HeaderMap, HeaderName, HeaderValue},
25 Client,
26};
27use std::{collections::HashMap, sync::Arc};
28
29const PROVIDER: &str = "openai";
30
31pub struct OpenAIModel {
32 model_id: String,
33 api_key: String,
34 base_url: String,
35 client: Client,
36 metadata: Option<Arc<LanguageModelMetadata>>,
37 headers: HashMap<String, String>,
38}
39
40#[derive(Clone, Default)]
41pub struct OpenAIModelOptions {
42 pub base_url: Option<String>,
43 pub api_key: String,
44 pub headers: Option<HashMap<String, String>>,
45 pub client: Option<Client>,
46}
47
48impl OpenAIModel {
49 #[must_use]
50 pub fn new(model_id: impl Into<String>, options: OpenAIModelOptions) -> Self {
51 let OpenAIModelOptions {
52 base_url,
53 api_key,
54 headers,
55 client,
56 } = options;
57
58 let base_url = base_url
59 .unwrap_or_else(|| "https://api.openai.com/v1".to_string())
60 .trim_end_matches('/')
61 .to_string();
62 let client = client.unwrap_or_else(Client::new);
63 let headers = headers.unwrap_or_default();
64
65 Self {
66 model_id: model_id.into(),
67 api_key,
68 base_url,
69 client,
70 metadata: None,
71 headers,
72 }
73 }
74
75 #[must_use]
76 pub fn with_metadata(mut self, metadata: LanguageModelMetadata) -> Self {
77 self.metadata = Some(Arc::new(metadata));
78 self
79 }
80
81 fn request_headers(&self) -> LanguageModelResult<HeaderMap> {
82 let mut headers = HeaderMap::new();
83
84 let auth_header =
85 HeaderValue::from_str(&format!("Bearer {}", self.api_key)).map_err(|error| {
86 LanguageModelError::InvalidInput(format!(
87 "Invalid OpenAI API key header value: {error}"
88 ))
89 })?;
90 headers.insert(header::AUTHORIZATION, auth_header);
91
92 for (key, value) in &self.headers {
93 let header_name = HeaderName::from_bytes(key.as_bytes()).map_err(|error| {
94 LanguageModelError::InvalidInput(format!(
95 "Invalid OpenAI header name '{key}': {error}"
96 ))
97 })?;
98 let header_value = HeaderValue::from_str(value).map_err(|error| {
99 LanguageModelError::InvalidInput(format!(
100 "Invalid OpenAI header value for '{key}': {error}"
101 ))
102 })?;
103 headers.insert(header_name, header_value);
104 }
105
106 Ok(headers)
107 }
108}
109
110impl LanguageModel for OpenAIModel {
111 fn provider(&self) -> &'static str {
112 PROVIDER
113 }
114
115 fn model_id(&self) -> String {
116 self.model_id.clone()
117 }
118
119 fn metadata(&self) -> Option<&LanguageModelMetadata> {
120 self.metadata.as_deref()
121 }
122
123 fn generate(
124 &self,
125 input: LanguageModelInput,
126 ) -> BoxFuture<'_, LanguageModelResult<ModelResponse>> {
127 Box::pin(async move {
128 crate::opentelemetry::trace_generate(
129 self.provider(),
130 &self.model_id(),
131 input,
132 |input| async move {
133 let params = convert_to_response_create_params(input, &self.model_id())?;
134 let header_map = self.request_headers()?;
135
136 let json: responses_api::Response = client_utils::send_json(
137 &self.client,
138 &format!("{}/responses", self.base_url),
139 ¶ms,
140 header_map,
141 )
142 .await?;
143
144 let responses_api::Response { output, usage, .. } = json;
145
146 let content = map_openai_output_items(output)?;
147 let usage = usage.map(ModelUsage::from);
148
149 let cost = if let (Some(usage), Some(pricing)) = (
150 usage.as_ref(),
151 self.metadata().and_then(|m| m.pricing.as_ref()),
152 ) {
153 Some(usage.calculate_cost(pricing))
154 } else {
155 None
156 };
157
158 Ok(ModelResponse {
159 content,
160 usage,
161 cost,
162 })
163 },
164 )
165 .await
166 })
167 }
168
169 fn stream(
170 &self,
171 input: LanguageModelInput,
172 ) -> BoxFuture<'_, LanguageModelResult<LanguageModelStream>> {
173 Box::pin(async move {
174 crate::opentelemetry::trace_stream(
175 self.provider(),
176 &self.model_id(),
177 input,
178 |input| async move {
179 let metadata = self.metadata.clone();
180 let mut params = convert_to_response_create_params(input, &self.model_id())?;
181 params.stream = Some(true);
182 let header_map = self.request_headers()?;
183
184 let mut chunk_stream = client_utils::send_sse_stream::<_, ResponseStreamEvent>(
185 &self.client,
186 &format!("{}/responses", self.base_url),
187 ¶ms,
188 header_map,
189 self.provider(),
190 )
191 .await?;
192
193 let stream = try_stream! {
194 let mut refusal = String::new();
195
196 while let Some(event) = chunk_stream.next().await {
197 let event = event?;
198
199 if let ResponseStreamEvent::Completed(ref completed_event) = event {
200 if let Some(usage) = &completed_event.response.usage {
201 let usage = ModelUsage::from(usage.clone());
202 yield PartialModelResponse {
203 delta: None,
204 cost: metadata.as_ref().and_then(|m| m.pricing.as_ref()).map(|pricing| usage.calculate_cost(pricing)),
205 usage: Some(usage),
206 }
207 }
208 }
209
210 if let ResponseStreamEvent::RefusalDelta(ref refusal_delta_event) = event {
211 refusal.push_str(&refusal_delta_event.delta);
212 }
213
214 let part_delta = map_openai_stream_event(event)?;
215 if let Some(part_delta) = part_delta {
216 yield PartialModelResponse {
217 delta: Some(part_delta),
218 ..Default::default()
219 }
220 }
221 }
222
223 if !refusal.is_empty() {
224 Err(LanguageModelError::Refusal(refusal))?;
225 }
226 };
227
228 Ok(LanguageModelStream::from_stream(stream))
229 },
230 )
231 .await
232 })
233 }
234}
235
236fn convert_to_response_create_params(
237 input: LanguageModelInput,
238 model_id: &str,
239) -> LanguageModelResult<ResponseCreateParams> {
240 let LanguageModelInput {
241 messages,
242 system_prompt,
243 max_tokens,
244 temperature,
245 top_p,
246 response_format,
247 tools,
248 tool_choice,
249 extra,
250 modalities,
251 reasoning,
252 ..
253 } = input;
254
255 let mut params = ResponseCreateParams {
256 store: Some(false),
257 model: Some(model_id.to_string()),
258 input: Some(convert_to_openai_inputs(messages)?),
259 instructions: system_prompt,
260 max_output_tokens: max_tokens,
261 temperature,
262 top_p,
263 tools: tools.map(|ts| ts.into_iter().map(Into::into).collect()),
264 tool_choice: tool_choice
265 .map(convert_to_openai_response_tool_choice)
266 .transpose()?,
267 text: response_format.map(Into::into),
268 include: if reasoning.as_ref().is_some_and(|r| r.enabled) {
269 Some(vec![ResponseIncludable::ReasoningEncryptedContent])
270 } else {
271 None
272 },
273 reasoning: reasoning.map(TryInto::try_into).transpose()?,
274 extra,
275 ..Default::default()
276 };
277
278 if modalities.is_some_and(|m| m.contains(&crate::Modality::Image)) {
279 params
280 .tools
281 .get_or_insert_with(Vec::new)
282 .push(responses_api::Tool::ImageGeneration(ToolImageGeneration {
283 ..Default::default()
284 }));
285 }
286
287 Ok(params)
288}
289
290fn convert_to_openai_inputs(messages: Vec<Message>) -> LanguageModelResult<Vec<ResponseInputItem>> {
291 messages
292 .into_iter()
293 .try_fold(Vec::new(), |mut acc, message| {
294 let mut items = match message {
295 Message::User(user_message) => vec![user_message.try_into()?],
296 Message::Assistant(assistant_message) => {
297 convert_assistant_message_to_response_input_items(assistant_message)?
298 }
299 Message::Tool(tool_message) => {
300 convert_tool_message_to_response_input_items(tool_message)?
301 }
302 };
303 acc.append(&mut items);
304 Ok(acc)
305 })
306}
307
308impl TryFrom<UserMessage> for ResponseInputItem {
309 type Error = LanguageModelError;
310 fn try_from(user_message: UserMessage) -> Result<Self, Self::Error> {
311 let message_parts =
312 source_part_utils::get_compatible_parts_without_source_parts(user_message.content);
313 Ok(Self::Message(ResponseInputItemMessage {
314 role: "user".to_string(),
315 content: message_parts
316 .into_iter()
317 .map(|part| {
318 Ok(match part {
319 Part::Text(text_part) => {
320 ResponseInputContent::InputText(ResponseInputText {
321 text: text_part.text,
322 })
323 }
324 Part::Image(image_part) => {
325 ResponseInputContent::InputImage(ResponseInputImage {
326 file_id: None,
327 image_url: format!(
328 "data:{};base64,{}",
329 image_part.mime_type, image_part.data
330 )
331 .into(),
332 detail: "auto".to_string(),
333 })
334 }
335 Part::Audio(audio_part) => {
336 let format = match audio_part.format {
337 AudioFormat::Mp3 => Ok("mp3"),
338 AudioFormat::Wav => Ok("wav"),
339 _ => Err(LanguageModelError::Unsupported(
340 PROVIDER,
341 format!(
342 "Cannot convert audio format to OpenAI InputAudio format \
343 for format {:?}",
344 audio_part.format
345 ),
346 )),
347 }?;
348
349 ResponseInputContent::InputAudio(ResponseInputAudio {
350 input_audio: ResponseInputAudioInputAudio {
351 data: audio_part.data,
352 format: format.to_string(),
353 },
354 })
355 }
356 _ => Err(LanguageModelError::Unsupported(
357 PROVIDER,
358 format!(
359 "Cannot convert part to OpenAI input content for part {part:?}"
360 ),
361 ))?,
362 })
363 })
364 .collect::<LanguageModelResult<Vec<_>>>()?,
365 status: None,
366 }))
367 }
368}
369
370fn convert_assistant_message_to_response_input_items(
371 assistant_message: AssistantMessage,
372) -> LanguageModelResult<Vec<ResponseInputItem>> {
373 let message_parts =
374 source_part_utils::get_compatible_parts_without_source_parts(assistant_message.content);
375
376 message_parts
377 .into_iter()
378 .map(|part| {
379 Ok(match part {
380 Part::Text(text_part) => {
381 ResponseInputItem::OutputMessage(ResponseOutputMessage {
382 id: format!("msg_{}", id_utils::generate_string(15)),
389 role: "assistant".to_string(),
390 content: vec![ResponseOutputContent::OutputText(ResponseOutputText {
391 text: text_part.text,
392 annotations: vec![],
393 })],
394 status: "completed".to_string(),
395 })
396 }
397 Part::Reasoning(reasoning_part) => {
398 ResponseInputItem::Reasoning(ResponseReasoningItem {
399 id: reasoning_part.id.unwrap_or_default(),
400 summary: vec![ResponseReasoningItemSummaryUnion::SummaryText(
401 ResponseReasoningItemSummary {
402 text: reasoning_part.text,
403 },
404 )],
405 content: None,
407 encrypted_content: reasoning_part.signature,
408 status: None,
409 })
410 }
411 Part::Image(image_part) => {
412 ResponseInputItem::ImageGenerationCall(ResponseOutputItemImageGenerationCall {
413 id: image_part.id.unwrap_or_default(),
414 status: "completed".to_string(),
415 result: Some(format!(
416 "data:{};base64,{}",
417 image_part.mime_type, image_part.data
418 )),
419 output_format: image_part
420 .mime_type
421 .strip_prefix("image/")
422 .unwrap_or("png")
423 .to_string(),
424 size: if let (Some(width), Some(height)) =
425 (image_part.width, image_part.height)
426 {
427 Some(format!("{width}x{height}"))
428 } else {
429 None
430 },
431 })
432 }
433 Part::ToolCall(tool_call_part) => {
434 ResponseInputItem::FunctionCall(ResponseFunctionToolCall {
435 arguments: tool_call_part.args.to_string(),
436 call_id: tool_call_part.tool_call_id,
437 name: tool_call_part.tool_name,
438 id: tool_call_part.id,
439 status: None,
440 })
441 }
442 _ => Err(LanguageModelError::Unsupported(
443 PROVIDER,
444 format!("Cannot convert part to OpenAI input item for part {part:?}"),
445 ))?,
446 })
447 })
448 .collect::<LanguageModelResult<_>>()
449}
450
451fn convert_tool_message_to_response_input_items(
452 tool_message: ToolMessage,
453) -> LanguageModelResult<Vec<ResponseInputItem>> {
454 tool_message
455 .content
456 .into_iter()
457 .try_fold(Vec::new(), |mut acc, part| {
458 if let Part::ToolResult(ToolResultPart {
459 content,
460 tool_call_id,
461 ..
462 }) = part
463 {
464 let tool_result_part_content =
465 source_part_utils::get_compatible_parts_without_source_parts(content);
466
467 let items = tool_result_part_content
468 .into_iter()
469 .map(|tool_result_part_part| {
470 Ok(match tool_result_part_part {
471 Part::Text(text_part) => ResponseInputItem::FunctionCallOutput(
472 ResponseInputItemFunctionCallOutput {
473 call_id: tool_call_id.clone(),
474 output: text_part.text,
475 id: None,
476 status: None,
477 },
478 ),
479 _ => Err(LanguageModelError::Unsupported(
480 PROVIDER,
481 format!(
482 "Cannot convert tool result part to OpenAI input item for \
483 part {tool_result_part_part:?}"
484 ),
485 ))?,
486 })
487 })
488 .collect::<LanguageModelResult<Vec<_>>>()?;
489
490 acc.extend(items);
491
492 Ok(acc)
493 } else {
494 Err(LanguageModelError::InvalidInput(
495 "Tool messages must contain only tool result parts".to_string(),
496 ))
497 }
498 })
499}
500
501impl From<Tool> for responses_api::Tool {
502 fn from(tool: Tool) -> Self {
503 Self::Function(FunctionTool {
504 name: tool.name,
505 description: Some(tool.description),
506 parameters: Some(tool.parameters),
507 strict: Some(true),
508 })
509 }
510}
511
512fn convert_to_openai_response_tool_choice(
513 tool_choice: ToolChoiceOption,
514) -> LanguageModelResult<serde_json::Value> {
515 match tool_choice {
516 ToolChoiceOption::None => Ok("none".into()),
517 ToolChoiceOption::Auto => Ok("auto".into()),
518 ToolChoiceOption::Required => Ok("required".into()),
519 ToolChoiceOption::Tool(tool) => serde_json::to_value(ToolChoiceFunction {
520 choice_type: "function".into(),
521 name: tool.tool_name,
522 })
523 .map_err(|e| {
524 LanguageModelError::InvalidInput(format!(
525 "Failed to convert tool choice to OpenAI format: {e}"
526 ))
527 }),
528 }
529}
530
531impl From<ResponseFormatOption> for ResponseTextConfig {
532 fn from(value: ResponseFormatOption) -> Self {
533 match value {
534 ResponseFormatOption::Json(ResponseFormatJson {
535 name,
536 description,
537 schema,
538 }) => {
539 if let Some(schema) = schema {
540 Self {
541 format: Some(ResponseFormatTextConfig::JsonSchema(
542 ResponseFormatTextJSONSchemaConfig {
543 name,
544 description,
545 schema,
546 strict: Some(true),
547 },
548 )),
549 verbosity: None,
550 }
551 } else {
552 Self {
553 format: Some(ResponseFormatTextConfig::JsonObject(
554 ResponseFormatJSONObject {},
555 )),
556 verbosity: None,
557 }
558 }
559 }
560 ResponseFormatOption::Text => Self {
561 format: Some(ResponseFormatTextConfig::Text(ResponseFormatText {})),
562 verbosity: None,
563 },
564 }
565 }
566}
567
568impl TryFrom<ReasoningOptions> for responses_api::Reasoning {
569 type Error = LanguageModelError;
570
571 fn try_from(value: ReasoningOptions) -> Result<Self, Self::Error> {
572 Ok(Self {
573 summary: value.enabled.then(|| "auto".to_string()),
574 effort: value.budget_tokens.map(TryInto::try_into).transpose()?,
575 })
576 }
577}
578
579fn map_openai_output_items(items: Vec<ResponseOutputItem>) -> LanguageModelResult<Vec<Part>> {
580 items
581 .into_iter()
582 .try_fold(Vec::new(), |mut acc, item| match item {
583 ResponseOutputItem::Message(msg) => {
584 let parts = msg
585 .content
586 .into_iter()
587 .map(|content| match content {
588 ResponseOutputContent::OutputText(output_text) => {
589 Ok(Part::text(output_text.text))
590 }
591 ResponseOutputContent::Refusal(refusal) => {
592 Err(LanguageModelError::Refusal(refusal.refusal))
593 }
594 })
595 .collect::<LanguageModelResult<Vec<_>>>()?;
596
597 acc.extend(parts);
598 Ok(acc)
599 }
600 ResponseOutputItem::FunctionCall(function_tool_call) => {
601 let args = serde_json::from_str(&function_tool_call.arguments).map_err(|e| {
602 LanguageModelError::Invariant(
603 PROVIDER,
604 format!("Failed to parse function tool call arguments: {e}"),
605 )
606 })?;
607 let mut tool_call_part =
608 ToolCallPart::new(function_tool_call.call_id, function_tool_call.name, args);
609
610 if let Some(id) = function_tool_call.id {
611 tool_call_part.id = Some(id);
612 }
613 let part = Part::ToolCall(tool_call_part);
614
615 acc.push(part);
616 Ok(acc)
617 }
618 ResponseOutputItem::ImageGenerationCall(image_gen_call) => {
619 let (width, height) = if let Some(size) = image_gen_call.size {
620 parse_openai_image_size(&size)
621 } else {
622 (None, None)
623 };
624
625 let mut image_part = ImagePart::new(
626 image_gen_call.result.ok_or_else(|| {
627 LanguageModelError::Invariant(
628 PROVIDER,
629 "Image generation call did not return a result".to_string(),
630 )
631 })?,
632 format!("image/{}", image_gen_call.output_format),
633 )
634 .with_id(image_gen_call.id);
635 if let Some(width) = width {
636 image_part = image_part.with_width(width);
637 }
638 if let Some(height) = height {
639 image_part = image_part.with_height(height);
640 }
641 let part: Part = image_part.into();
642
643 acc.push(part);
644 Ok(acc)
645 }
646 ResponseOutputItem::Reasoning(reasoning_item) => {
647 let summary_text = reasoning_item
648 .summary
649 .into_iter()
650 .map(|summary_union| match summary_union {
651 ResponseReasoningItemSummaryUnion::SummaryText(summary_text) => {
652 summary_text.text
653 }
654 })
655 .collect::<Vec<_>>()
656 .join("\n");
657
658 let mut reasoning_part =
659 ReasoningPart::new(summary_text).with_id(reasoning_item.id);
660 if let Some(signature) = reasoning_item.encrypted_content {
661 reasoning_part = reasoning_part.with_signature(signature);
662 }
663 let part: Part = reasoning_part.into();
664
665 acc.push(part);
666 Ok(acc)
667 }
668 ResponseOutputItem::WebSearchCall(_) => Ok(acc),
669 })
670}
671
672fn map_openai_stream_event(
673 event: ResponseStreamEvent,
674) -> LanguageModelResult<Option<ContentDelta>> {
675 match event {
676 ResponseStreamEvent::Failed(_) => Err(LanguageModelError::Invariant(
677 PROVIDER,
678 "OpenAI stream event failed".to_string(),
679 )),
680 ResponseStreamEvent::OutputItemAdded(output_item_added_event) => {
681 match output_item_added_event.item {
682 ResponseOutputItem::FunctionCall(function_tool_call) => {
683 let tool_call_part = PartDelta::ToolCall(ToolCallPartDelta {
684 args: Some(function_tool_call.arguments),
685 tool_name: Some(function_tool_call.name),
686 tool_call_id: Some(function_tool_call.call_id),
687 id: function_tool_call.id,
688 });
689 Ok(Some(ContentDelta {
690 index: output_item_added_event.output_index,
691 part: tool_call_part,
692 }))
693 }
694 ResponseOutputItem::Reasoning(reasoning_item) => {
695 if let Some(encrypted_content) = reasoning_item.encrypted_content {
696 let reasoning_part = PartDelta::Reasoning(ReasoningPartDelta {
697 signature: Some(encrypted_content),
698 text: None,
699 id: Some(reasoning_item.id),
700 });
701 Ok(Some(ContentDelta {
702 index: output_item_added_event.output_index,
703 part: reasoning_part,
704 }))
705 } else {
706 Ok(None)
707 }
708 }
709 _ => Ok(None),
710 }
711 }
712 ResponseStreamEvent::TextDelta(text_delta_event) => {
713 let text_part = PartDelta::Text(TextPartDelta {
714 text: text_delta_event.delta,
715 citation: None,
716 });
717 Ok(Some(ContentDelta {
718 index: text_delta_event.output_index,
719 part: text_part,
720 }))
721 }
722 ResponseStreamEvent::FunctionCallArgumentsDelta(function_call_arguments_delta_event) => {
723 let tool_call_part = PartDelta::ToolCall(ToolCallPartDelta {
724 args: Some(function_call_arguments_delta_event.delta),
725 ..Default::default()
726 });
727
728 Ok(Some(ContentDelta {
729 index: function_call_arguments_delta_event.output_index,
730 part: tool_call_part,
731 }))
732 }
733 ResponseStreamEvent::ImageGenCallPartialImage(partial_image_event) => {
734 let (width, height) = if let Some(size) = partial_image_event.size {
735 parse_openai_image_size(&size)
736 } else {
737 (None, None)
738 };
739
740 let image_part = PartDelta::Image(ImagePartDelta {
741 width,
742 height,
743 mime_type: Some(format!("image/{}", partial_image_event.output_format)),
744 data: Some(partial_image_event.partial_image_b64),
745 id: Some(partial_image_event.item_id),
746 });
747
748 Ok(Some(ContentDelta {
749 index: partial_image_event.output_index,
750 part: image_part,
751 }))
752 }
753 ResponseStreamEvent::ReasoningSummaryTextDelta(reasoning_summary_text_delta_event) => {
754 let reasoning_part = PartDelta::Reasoning(ReasoningPartDelta {
755 text: Some(reasoning_summary_text_delta_event.delta),
756 ..Default::default()
757 });
758 Ok(Some(ContentDelta {
759 index: reasoning_summary_text_delta_event.output_index,
760 part: reasoning_part,
761 }))
762 }
763 _ => Ok(None),
764 }
765}
766
767impl From<ResponseUsage> for ModelUsage {
768 fn from(value: ResponseUsage) -> Self {
769 Self {
770 input_tokens: value.input_tokens,
771 output_tokens: value.output_tokens,
772 ..Default::default()
773 }
774 }
775}
776
777fn parse_openai_image_size(size_dim: &str) -> (Option<u32>, Option<u32>) {
780 let parts: Vec<&str> = size_dim.split('x').collect();
781 let width = parts.first().and_then(|w| w.parse().ok());
782 let height = parts.get(1).and_then(|h| h.parse().ok());
783 (width, height)
784}