blackman_client/models/
completion_request.rs

1/*
2 * Blackman AI API
3 *
4 * A transparent AI API proxy that optimizes token usage to reduce costs.  ## Authentication  Blackman AI supports two authentication methods:  ### 1. API Key (Recommended for integrations)  Use the API key created from your dashboard:  ```bash curl -X POST https://app.useblackman.ai/v1/completions \\   -H \"Authorization: Bearer sk_your_api_key_here\" \\   -H \"Content-Type: application/json\" \\   -d '{\"provider\": \"OpenAI\", \"model\": \"gpt-4\", \"messages\": [{\"role\": \"user\", \"content\": \"Hello!\"}]}' ```  ### 2. JWT Token (For web UI)  Obtain a JWT token by logging in:  ```bash curl -X POST https://app.useblackman.ai/v1/auth/login \\   -H \"Content-Type: application/json\" \\   -d '{\"email\": \"user@example.com\", \"password\": \"yourpassword\"}' ```  Then use the token:  ```bash curl -X POST https://app.useblackman.ai/v1/completions \\   -H \"Authorization: Bearer your_jwt_token\" \\   -H \"Content-Type: application/json\" \\   -d '{...}' ```  ### Provider API Keys (Optional)  You can optionally provide your own LLM provider API key via the `X-Provider-Api-Key` header, or store it in your account settings.  ## Client SDKs  Auto-generated SDKs are available for 10 languages:  - **TypeScript**: [View Docs](https://github.com/blackman-ai/typescript-sdk) - **Python**: [View Docs](https://github.com/blackman-ai/python-sdk) - **Go**: [View Docs](https://github.com/blackman-ai/go-sdk) - **Java**: [View Docs](https://github.com/blackman-ai/java-sdk) - **Ruby**: [View Docs](https://github.com/blackman-ai/ruby-sdk) - **PHP**: [View Docs](https://github.com/blackman-ai/php-sdk) - **C#**: [View Docs](https://github.com/blackman-ai/csharp-sdk) - **Rust**: [View Docs](https://github.com/blackman-ai/rust-sdk) - **Swift**: [View Docs](https://github.com/blackman-ai/swift-sdk) - **Kotlin**: [View Docs](https://github.com/blackman-ai/kotlin-sdk)  All SDKs are generated from this OpenAPI spec using [openapi-generator](https://openapi-generator.tech).  ## Quick Start  ```python # Python example with API key import blackman_client from blackman_client import CompletionRequest  configuration = blackman_client.Configuration(     host=\"http://localhost:8080\",     access_token=\"sk_your_api_key_here\"  # Your Blackman API key )  with blackman_client.ApiClient(configuration) as api_client:     api = blackman_client.CompletionsApi(api_client)     response = api.completions(         CompletionRequest(             provider=\"OpenAI\",             model=\"gpt-4o\",             messages=[{\"role\": \"user\", \"content\": \"Hello!\"}]         )     ) ```
5 *
6 * The version of the OpenAPI document: 0.1.0
7 * 
8 * Generated by: https://openapi-generator.tech
9 */
10
11use crate::models;
12use serde::{Deserialize, Serialize};
13
14#[derive(Clone, Default, Debug, PartialEq, Serialize, Deserialize)]
15pub struct CompletionRequest {
16    #[serde(rename = "max_tokens", default, with = "::serde_with::rust::double_option", skip_serializing_if = "Option::is_none")]
17    pub max_tokens: Option<Option<i32>>,
18    #[serde(rename = "stop", default, with = "::serde_with::rust::double_option", skip_serializing_if = "Option::is_none")]
19    pub stop: Option<Option<Vec<String>>>,
20    #[serde(rename = "stream", default, with = "::serde_with::rust::double_option", skip_serializing_if = "Option::is_none")]
21    pub stream: Option<Option<bool>>,
22    #[serde(rename = "temperature", default, with = "::serde_with::rust::double_option", skip_serializing_if = "Option::is_none")]
23    pub temperature: Option<Option<f32>>,
24    #[serde(rename = "top_p", default, with = "::serde_with::rust::double_option", skip_serializing_if = "Option::is_none")]
25    pub top_p: Option<Option<f32>>,
26    #[serde(rename = "messages")]
27    pub messages: Vec<models::Message>,
28    /// Optional metadata for tracking, analytics, and conditional processing. Can include session IDs, user context, feature flags, or any custom data. This metadata is logged with the request and can be used for filtering/analysis.
29    #[serde(rename = "metadata", default, with = "::serde_with::rust::double_option", skip_serializing_if = "Option::is_none")]
30    pub metadata: Option<Option<serde_json::Value>>,
31    #[serde(rename = "model")]
32    pub model: String,
33    #[serde(rename = "provider")]
34    pub provider: models::Provider,
35}
36
37impl CompletionRequest {
38    pub fn new(messages: Vec<models::Message>, model: String, provider: models::Provider) -> CompletionRequest {
39        CompletionRequest {
40            max_tokens: None,
41            stop: None,
42            stream: None,
43            temperature: None,
44            top_p: None,
45            messages,
46            metadata: None,
47            model,
48            provider,
49        }
50    }
51}
52