1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
71
72
73
74
75
76
77
78
79
80
81
82
83
84
85
86
87
88
89
90
91
92
93
94
95
96
97
98
99
100
101
102
103
104
105
106
107
108
109
110
111
112
113
114
115
116
117
118
119
120
121
122
123
124
125
126
127
128
129
130
131
132
133
134
135
136
137
138
139
140
141
142
143
144
145
146
147
148
149
150
151
152
153
154
155
156
157
158
159
160
161
162
163
164
165
166
167
168
169
170
171
172
173
174
175
176
177
178
179
180
181
182
183
184
185
186
187
188
189
190
191
192
193
194
195
196
197
198
199
200
//! # Embedding Module
//!
//! This module provides functionality for interacting with the OpenAI Embeddings API.
//! It allows you to convert text into numerical vector representations (embeddings)
//! that capture semantic meaning, enabling various NLP tasks such as semantic search,
//! clustering, and similarity comparison.
//!
//! ## Key Features
//!
//! - **Text Embedding Generation**: Convert single or multiple texts into vector embeddings
//! - **Multiple Input Formats**: Support for single text strings or arrays of texts
//! - **Flexible Encoding**: Support for both `float` and `base64` encoding formats
//! - **Various Model Support**: Compatible with OpenAI's embedding models (e.g., `text-embedding-3-small`, `text-embedding-3-large`)
//! - **Multi-dimensional Output**: Support for 1D, 2D, and 3D embedding vectors
//!
//! ## Quick Start
//!
//! ```rust,no_run
//! use openai_tools::embedding::request::Embedding;
//! use openai_tools::common::models::EmbeddingModel;
//!
//! #[tokio::main]
//! async fn main() -> Result<(), Box<dyn std::error::Error>> {
//! // Initialize the embedding client
//! let mut embedding = Embedding::new()?;
//!
//! // Configure the model and input text
//! embedding
//! .model(EmbeddingModel::TextEmbedding3Small)
//! .input_text("Hello, world!");
//!
//! // Generate embedding
//! let response = embedding.embed().await?;
//!
//! // Access the embedding vector
//! let vector = response.data[0].embedding.as_1d().unwrap();
//! println!("Embedding dimension: {}", vector.len());
//! Ok(())
//! }
//! ```
//!
//! ## Usage Examples
//!
//! ### Single Text Embedding
//!
//! ```rust,no_run
//! use openai_tools::embedding::request::Embedding;
//! use openai_tools::common::models::EmbeddingModel;
//!
//! #[tokio::main]
//! async fn main() -> Result<(), Box<dyn std::error::Error>> {
//! let mut embedding = Embedding::new()?;
//!
//! embedding
//! .model(EmbeddingModel::TextEmbedding3Small)
//! .input_text("The quick brown fox jumps over the lazy dog.");
//!
//! let response = embedding.embed().await?;
//!
//! // The response contains embedding data
//! assert_eq!(response.object, "list");
//! assert_eq!(response.data.len(), 1);
//!
//! let vector = response.data[0].embedding.as_1d().unwrap();
//! println!("Generated embedding with {} dimensions", vector.len());
//! Ok(())
//! }
//! ```
//!
//! ### Batch Text Embedding
//!
//! ```rust,no_run
//! use openai_tools::embedding::request::Embedding;
//! use openai_tools::common::models::EmbeddingModel;
//!
//! #[tokio::main]
//! async fn main() -> Result<(), Box<dyn std::error::Error>> {
//! let mut embedding = Embedding::new()?;
//!
//! // Embed multiple texts at once
//! let texts = vec![
//! "Hello, world!",
//! "こんにちは,世界!",
//! "Bonjour le monde!",
//! ];
//!
//! embedding
//! .model(EmbeddingModel::TextEmbedding3Small)
//! .input_text_array(texts);
//!
//! let response = embedding.embed().await?;
//!
//! // Each input text gets its own embedding
//! for (i, data) in response.data.iter().enumerate() {
//! let vector = data.embedding.as_1d().unwrap();
//! println!("Text {}: {} dimensions", i, vector.len());
//! }
//! Ok(())
//! }
//! ```
//!
//! ### Using Different Encoding Formats
//!
//! ```rust,no_run
//! use openai_tools::embedding::request::Embedding;
//! use openai_tools::common::models::EmbeddingModel;
//!
//! #[tokio::main]
//! async fn main() -> Result<(), Box<dyn std::error::Error>> {
//! let mut embedding = Embedding::new()?;
//!
//! embedding
//! .model(EmbeddingModel::TextEmbedding3Small)
//! .input_text("Sample text for embedding")
//! .encoding_format("float"); // or "base64"
//!
//! let response = embedding.embed().await?;
//! println!("Model used: {}", response.model);
//! println!("Token usage: {:?}", response.usage);
//! Ok(())
//! }
//! ```
//!
//! ## Supported Models
//!
//! | Model | Dimensions | Description |
//! |-------|------------|-------------|
//! | `text-embedding-3-small` | 1536 | Efficient model for most use cases |
//! | `text-embedding-3-large` | 3072 | Higher quality embeddings for demanding tasks |
//! | `text-embedding-ada-002` | 1536 | Legacy model (still supported) |
//!
//! ## Response Structure
//!
//! The embedding response contains:
//! - `object`: Always "list" for embedding responses
//! - `data`: Array of embedding objects, each containing:
//! - `object`: Type identifier ("embedding")
//! - `embedding`: The vector representation (1D, 2D, or 3D)
//! - `index`: Position in the input array
//! - `model`: The model used for embedding
//! - `usage`: Token usage information