completions/
completions.rs1#![allow(clippy::uninlined_format_args)]
2#![allow(clippy::doc_markdown)]
3#![allow(clippy::useless_vec)]
4use openai_ergonomic::{Client, Result};
17
18#[tokio::main]
19async fn main() -> Result<()> {
20 println!("=== Completions API Examples ===\n");
21
22 let client = Client::from_env()?.build();
24
25 println!("1. Basic Text Completion:");
27 basic_completion(&client).await?;
28
29 println!("\n2. Completion with Parameters:");
31 completion_with_parameters(&client).await?;
32
33 println!("\n3. Multiple Completions:");
35 multiple_completions(&client).await?;
36
37 println!("\n4. Completion with Stop Sequences:");
39 completion_with_stop(&client).await?;
40
41 println!("\n5. Completion with Suffix (Insert Mode):");
43 completion_with_suffix(&client).await?;
44
45 println!("\n=== All examples completed successfully ===");
52
53 Ok(())
54}
55
56async fn basic_completion(client: &Client) -> Result<()> {
57 let builder = client
58 .completions()
59 .builder("gpt-3.5-turbo-instruct")
60 .prompt("Write a tagline for an ice cream shop")
61 .max_tokens(60);
62
63 let response = client.completions().create(builder).await?;
64
65 println!("Prompt: Write a tagline for an ice cream shop");
66 if let Some(choice) = response.choices.first() {
67 println!("Completion: {}", choice.text);
68 println!("Finish reason: {:?}", choice.finish_reason);
69 }
70
71 if let Some(usage) = response.usage {
72 println!(
73 "Tokens used: {} prompt + {} completion = {} total",
74 usage.prompt_tokens, usage.completion_tokens, usage.total_tokens
75 );
76 }
77
78 Ok(())
79}
80
81async fn completion_with_parameters(client: &Client) -> Result<()> {
82 let builder = client
83 .completions()
84 .builder("gpt-3.5-turbo-instruct")
85 .prompt("Explain quantum computing in simple terms:")
86 .max_tokens(100)
87 .temperature(0.7)
88 .top_p(0.9)
89 .frequency_penalty(0.5)
90 .presence_penalty(0.0);
91
92 let response = client.completions().create(builder).await?;
93
94 println!("Parameters:");
95 println!(" Temperature: 0.7");
96 println!(" Top P: 0.9");
97 println!(" Frequency Penalty: 0.5");
98 println!(" Presence Penalty: 0.0");
99 println!();
100
101 if let Some(choice) = response.choices.first() {
102 println!("Completion: {}", choice.text);
103 }
104
105 Ok(())
106}
107
108async fn multiple_completions(client: &Client) -> Result<()> {
109 let builder = client
110 .completions()
111 .builder("gpt-3.5-turbo-instruct")
112 .prompt("Brainstorm three names for a pet cat:")
113 .max_tokens(50)
114 .n(3) .temperature(0.9); let response = client.completions().create(builder).await?;
118
119 println!("Generating {} completions:", response.choices.len());
120 for (i, choice) in response.choices.iter().enumerate() {
121 println!(" {}. {}", i + 1, choice.text.trim());
122 }
123
124 Ok(())
125}
126
127async fn completion_with_stop(client: &Client) -> Result<()> {
128 let builder = client
129 .completions()
130 .builder("gpt-3.5-turbo-instruct")
131 .prompt("List three programming languages:\n1.")
132 .max_tokens(100)
133 .temperature(0.0)
134 .add_stop("\n4.") .add_stop("\n\n"); let response = client.completions().create(builder).await?;
138
139 println!("Prompt: List three programming languages:");
140 if let Some(choice) = response.choices.first() {
141 println!("Completion:\n1.{}", choice.text);
142 println!("Stopped because: {:?}", choice.finish_reason);
143 }
144
145 Ok(())
146}
147
148async fn completion_with_suffix(client: &Client) -> Result<()> {
149 let builder = client
151 .completions()
152 .builder("gpt-3.5-turbo-instruct")
153 .prompt("def hello_world():\n print(\"Hello, ")
154 .suffix("\")\n return True")
155 .max_tokens(10)
156 .temperature(0.0);
157
158 let response = client.completions().create(builder).await?;
159
160 println!("Insert mode example:");
161 println!("Before: def hello_world():\\n print(\"Hello, ");
162 if let Some(choice) = response.choices.first() {
163 println!("Inserted: {}", choice.text);
164 }
165 println!("After: \")\\n return True");
166
167 Ok(())
168}
169
170#[allow(dead_code)]
171async fn completion_with_echo(client: &Client) -> Result<()> {
172 let builder = client
173 .completions()
174 .builder("gpt-3.5-turbo-instruct")
175 .prompt("The capital of France is")
176 .max_tokens(10)
177 .echo(true) .temperature(0.0);
179
180 let response = client.completions().create(builder).await?;
181
182 println!("Echo enabled:");
183 if let Some(choice) = response.choices.first() {
184 println!("Full text (prompt + completion): {}", choice.text);
185 }
186
187 Ok(())
188}