api_openai 0.3.0

OpenAI's API for accessing large language models (LLMs).
Documentation
//! Structures related to embeddings, representing vector representations of text.

/// Define a private namespace for all its items.
mod private
{
  // Use full paths from crate root for components
  use crate::components::common::ResponseUsage;
  // Serde imports
  use serde::{ Serialize, Deserialize }; // Added Serialize

  /// Represents an embedding vector returned by embedding endpoint.
  ///
  /// # Used By
  /// - `CreateEmbeddingResponse`
  #[ derive( Debug, Serialize, Deserialize, Clone, PartialEq ) ] // Added Serialize
  pub struct Embedding
  {
    /// The index of the embedding in the list of embeddings.
    pub index : i32,
    /// The embedding vector, which is a list of floats. The length depends on the model.
    pub embedding : Vec< f64 >,
    /// The object type, which is always "embedding".
    pub object : String,
  }

  /// Response containing a list of embeddings.
  ///
  /// # Used By
  /// - `/embeddings` (POST)
  #[ derive( Debug, Serialize, Deserialize, Clone, PartialEq ) ] // Added Serialize
  pub struct CreateEmbeddingResponse
  {
    /// The list of embeddings generated by the model.
    pub data : Vec< Embedding >,
    /// The name of the model used to generate the embedding.
    pub model : String,
    /// The object type, which is always "list".
    pub object : String,
    /// The usage information for the request.
    pub usage : ResponseUsage,
  }
} // end mod private

crate ::mod_interface!
{
  exposed use
  {
    Embedding,
    CreateEmbeddingResponse
  };
}