fine_tuning/
fine_tuning.rs1#![allow(clippy::uninlined_format_args)]
37#![allow(clippy::no_effect_underscore_binding)]
38#![allow(clippy::doc_markdown)]
39#![allow(clippy::cast_possible_wrap)]
40#![allow(clippy::too_many_lines)]
41#![allow(clippy::missing_docs_in_private_items)]
42#![allow(clippy::cast_possible_truncation)]
43#![allow(clippy::cast_lossless)]
44#![allow(unused_variables)]
45#![allow(missing_docs)]
46#![allow(dead_code)]
47
48use openai_ergonomic::{builders::fine_tuning::FineTuningJobBuilder, Client};
49
50#[derive(Debug, Clone)]
52pub struct JobInfo {
53 pub id: String,
54 pub model: String,
55 pub status: String,
56 pub training_file: String,
57 pub created_at: i64,
58}
59
60impl JobInfo {
61 pub fn new(
62 id: impl Into<String>,
63 model: impl Into<String>,
64 status: impl Into<String>,
65 training_file: impl Into<String>,
66 ) -> Self {
67 Self {
68 id: id.into(),
69 model: model.into(),
70 status: status.into(),
71 training_file: training_file.into(),
72 created_at: std::time::SystemTime::now()
73 .duration_since(std::time::UNIX_EPOCH)
74 .unwrap()
75 .as_secs() as i64,
76 }
77 }
78
79 pub fn display(&self) {
80 println!(" ID: {}", self.id);
81 println!(" Model: {}", self.model);
82 println!(" Status: {}", self.status);
83 println!(" Training File: {}", self.training_file);
84 println!(" Created At: {}", self.created_at);
85 }
86}
87
88#[tokio::main]
89async fn main() -> Result<(), Box<dyn std::error::Error>> {
90 println!("š OpenAI Ergonomic - Comprehensive Fine-tuning Example\n");
91
92 println!("š Initializing OpenAI client...");
94 let client = match Client::from_env() {
95 Ok(c) => {
96 println!("ā
Client initialized successfully\n");
97 c.build()
98 }
99 Err(e) => {
100 eprintln!("ā Failed to initialize client: {}", e);
101 eprintln!("š” Make sure OPENAI_API_KEY is set");
102 return Ok(());
103 }
104 };
105
106 println!("āāāāāāāāāāāāāāāāāāāāāāāāāāāāāāāāāāāāāāāā");
108 println!("š Example 1: Create Fine-tuning Job");
109 println!("āāāāāāāāāāāāāāāāāāāāāāāāāāāāāāāāāāāāāāāā\n");
110
111 let training_file_id = "file-training-data";
114
115 println!("Creating fine-tuning job...");
116 println!(" Base Model: gpt-3.5-turbo");
117 println!(" Training File: {}", training_file_id);
118 println!(" Suffix: my-custom-model");
119
120 let builder = FineTuningJobBuilder::new("gpt-3.5-turbo", training_file_id)
121 .suffix("my-custom-model")
122 .epochs(3);
123
124 println!("\nš” Note: This would create a real fine-tuning job with your API key.");
125 println!(" Commented out to avoid accidental charges.\n");
126
127 let demo_job = JobInfo::new(
142 "ftjob-demo123",
143 "gpt-3.5-turbo",
144 "validating",
145 training_file_id,
146 );
147 println!("š Demo Job Created:");
148 demo_job.display();
149
150 println!("\nāāāāāāāāāāāāāāāāāāāāāāāāāāāāāāāāāāāāāāāā");
152 println!("š Example 2: List Fine-tuning Jobs");
153 println!("āāāāāāāāāāāāāāāāāāāāāāāāāāāāāāāāāāāāāāāā\n");
154
155 println!("Listing fine-tuning jobs (limit: 5)...\n");
156
157 println!("š” Demo: Would list your fine-tuning jobs here");
175
176 println!("\nāāāāāāāāāāāāāāāāāāāāāāāāāāāāāāāāāāāāāāāā");
178 println!("š Example 3: Get Fine-tuning Job Details");
179 println!("āāāāāāāāāāāāāāāāāāāāāāāāāāāāāāāāāāāāāāāā\n");
180
181 let job_id = "ftjob-demo123";
182 println!("Retrieving job: {}\n", job_id);
183
184 println!("š” Demo: Would show detailed job information");
202
203 println!("\nāāāāāāāāāāāāāāāāāāāāāāāāāāāāāāāāāāāāāāāā");
205 println!("š Example 4: List Fine-tuning Job Events");
206 println!("āāāāāāāāāāāāāāāāāāāāāāāāāāāāāāāāāāāāāāāā\n");
207
208 println!("Listing events for job: {}\n", job_id);
209
210 println!("š” Demo: Would show training events like:");
229 println!(" - Job started");
230 println!(" - Training step 1/100 complete");
231 println!(" - Validation loss: 0.452");
232 println!(" - Training complete");
233
234 println!("\nāāāāāāāāāāāāāāāāāāāāāāāāāāāāāāāāāāāāāāāā");
236 println!("š Example 5: List Fine-tuning Job Checkpoints");
237 println!("āāāāāāāāāāāāāāāāāāāāāāāāāāāāāāāāāāāāāāāā\n");
238
239 println!("Listing checkpoints for job: {}\n", job_id);
240
241 println!("š” Demo: Would show model checkpoints from training");
258
259 println!("\nāāāāāāāāāāāāāāāāāāāāāāāāāāāāāāāāāāāāāāāā");
261 println!("š Example 6: Cancel Fine-tuning Job");
262 println!("āāāāāāāāāāāāāāāāāāāāāāāāāāāāāāāāāāāāāāāā\n");
263
264 println!("Cancelling job: {}\n", job_id);
265
266 println!("š” Demo: Would cancel the running fine-tuning job");
279
280 println!("\nāāāāāāāāāāāāāāāāāāāāāāāāāāāāāāāāāāāāāāāā");
282 println!("š Example 7: Create Job with Validation File");
283 println!("āāāāāāāāāāāāāāāāāāāāāāāāāāāāāāāāāāāāāāāā\n");
284
285 let validation_file_id = "file-validation-data";
286
287 println!("Creating fine-tuning job with validation...");
288 println!(" Base Model: gpt-3.5-turbo");
289 println!(" Training File: {}", training_file_id);
290 println!(" Validation File: {}", validation_file_id);
291 println!(" Epochs: 5");
292 println!(" Learning Rate Multiplier: 0.1");
293
294 let builder_with_validation = FineTuningJobBuilder::new("gpt-3.5-turbo", training_file_id)
295 .validation_file(validation_file_id)
296 .epochs(5)
297 .learning_rate_multiplier(0.1);
298
299 println!("\nš” Note: Validation files help monitor overfitting during training");
300
301 println!("\nāāāāāāāāāāāāāāāāāāāāāāāāāāāāāāāāāāāāāāāā");
303 println!("š Example 8: Create Job with W&B Integration");
304 println!("āāāāāāāāāāāāāāāāāāāāāāāāāāāāāāāāāāāāāāāā\n");
305
306 println!("Creating fine-tuning job with W&B...");
307 println!(" Base Model: gpt-3.5-turbo");
308 println!(" Training File: {}", training_file_id);
309 println!(" W&B Project: my-finetuning-project");
310
311 let builder_with_wandb = FineTuningJobBuilder::new("gpt-3.5-turbo", training_file_id)
312 .with_wandb("my-finetuning-project");
313
314 println!("\nš” Note: W&B integration provides detailed training metrics visualization");
315
316 println!("\nāāāāāāāāāāāāāāāāāāāāāāāāāāāāāāāāāāāāāāāā");
318 println!("š Summary");
319 println!("āāāāāāāāāāāāāāāāāāāāāāāāāāāāāāāāāāāāāāāā\n");
320
321 println!("ā
Fine-tuning API examples completed!");
322 println!("\nš Key Takeaways:");
323 println!(" ⢠Fine-tuning allows customizing models for specific tasks");
324 println!(" ⢠Jobs can be created with various hyperparameters");
325 println!(" ⢠Progress can be monitored through events and checkpoints");
326 println!(" ⢠Validation files help prevent overfitting");
327 println!(" ⢠Integrations like W&B provide detailed metrics");
328 println!(" ⢠Jobs can be cancelled if needed");
329
330 println!("\nš” Next Steps:");
331 println!(" 1. Prepare your training data in JSONL format");
332 println!(" 2. Upload training data using the Files API");
333 println!(" 3. Create a fine-tuning job with appropriate parameters");
334 println!(" 4. Monitor progress through events");
335 println!(" 5. Use the fine-tuned model in your applications");
336
337 println!("\nš Example completed successfully!");
338
339 Ok(())
340}