async_llm/types/chat_function.rs
use serde::{Deserialize, Serialize};
#[derive(Debug, Clone, Default, Serialize, Deserialize, PartialEq)]
pub struct ChatFunction {
/// The name of the function to be called. Must be a-z, A-Z, 0-9, or contain underscores and dashes, with a maximum length of 64.
pub name: String,
/// A description of what the function does, used by the model to choose when and how to call the function.
#[serde(skip_serializing_if = "Option::is_none")]
pub description: Option<String>,
/// The parameters the functions accepts, described as a JSON Schema object. See the [guide](https://platform.openai.com/docs/guides/text-generation/function-calling) for examples, and the [JSON Schema reference](https://json-schema.org/understanding-json-schema/) for documentation about the format.
///
/// Omitting `parameters` defines a function with an empty parameter list.
#[serde(skip_serializing_if = "Option::is_none")]
pub parameters: Option<serde_json::Value>,
}