vapi_client/models/
anthropic_model.rs

1/*
2 * Vapi API
3 *
4 * API for building voice assistants
5 *
6 * The version of the OpenAPI document: 1.0
7 *
8 * Generated by: https://openapi-generator.tech
9 */
10
11use serde::{Deserialize, Serialize};
12use utoipa::OpenApi;
13
14
15use crate::models;
16
17#[derive(Clone, Default, Debug, PartialEq, Serialize, Deserialize, OpenApi)]
18pub struct AnthropicModel {
19    /// This is the starting state for the conversation.
20    #[serde(rename = "messages", skip_serializing_if = "Option::is_none")]
21    pub messages: Option<Vec<models::OpenAiMessage>>,
22    /// These are the tools that the assistant can use during the call. To use existing tools, use `toolIds`.  Both `tools` and `toolIds` can be used together.
23    #[serde(rename = "tools", skip_serializing_if = "Option::is_none")]
24    pub tools: Option<Vec<models::AnyscaleModelToolsInner>>,
25    /// These are the tools that the assistant can use during the call. To use transient tools, use `tools`.  Both `tools` and `toolIds` can be used together.
26    #[serde(rename = "toolIds", skip_serializing_if = "Option::is_none")]
27    pub tool_ids: Option<Vec<String>>,
28    #[serde(rename = "knowledgeBase", skip_serializing_if = "Option::is_none")]
29    pub knowledge_base: Option<models::AnyscaleModelKnowledgeBase>,
30    /// This is the ID of the knowledge base the model will use.
31    #[serde(rename = "knowledgeBaseId", skip_serializing_if = "Option::is_none")]
32    pub knowledge_base_id: Option<String>,
33    /// The specific Anthropic/Claude model that will be used.
34    #[serde(rename = "model")]
35    pub model: Model,
36    /// The provider identifier for Anthropic.
37    #[serde(rename = "provider")]
38    pub provider: Provider,
39    /// Optional configuration for Anthropic's thinking feature. Only applicable for claude-3-7-sonnet-20250219 model. If provided, maxTokens must be greater than thinking.budgetTokens.
40    #[serde(rename = "thinking", skip_serializing_if = "Option::is_none")]
41    pub thinking: Option<models::AnthropicThinkingConfig>,
42    /// This is the temperature that will be used for calls. Default is 0 to leverage caching for lower latency.
43    #[serde(rename = "temperature", skip_serializing_if = "Option::is_none")]
44    pub temperature: Option<f64>,
45    /// This is the max number of tokens that the assistant will be allowed to generate in each turn of the conversation. Default is 250.
46    #[serde(rename = "maxTokens", skip_serializing_if = "Option::is_none")]
47    pub max_tokens: Option<f64>,
48    /// This determines whether we detect user's emotion while they speak and send it as an additional info to model.  Default `false` because the model is usually are good at understanding the user's emotion from text.  @default false
49    #[serde(
50        rename = "emotionRecognitionEnabled",
51        skip_serializing_if = "Option::is_none"
52    )]
53    pub emotion_recognition_enabled: Option<bool>,
54    /// This sets how many turns at the start of the conversation to use a smaller, faster model from the same provider before switching to the primary model. Example, gpt-3.5-turbo if provider is openai.  Default is 0.  @default 0
55    #[serde(rename = "numFastTurns", skip_serializing_if = "Option::is_none")]
56    pub num_fast_turns: Option<f64>,
57}
58
59impl AnthropicModel {
60    pub fn new(model: Model, provider: Provider) -> AnthropicModel {
61        AnthropicModel {
62            messages: None,
63            tools: None,
64            tool_ids: None,
65            knowledge_base: None,
66            knowledge_base_id: None,
67            model,
68            provider,
69            thinking: None,
70            temperature: None,
71            max_tokens: None,
72            emotion_recognition_enabled: None,
73            num_fast_turns: None,
74        }
75    }
76}
77/// The specific Anthropic/Claude model that will be used.
78#[derive(Clone, Copy, Debug, Eq, PartialEq, Ord, PartialOrd, Hash, Serialize, Deserialize, OpenApi)]
79pub enum Model {
80    #[serde(rename = "claude-3-opus-20240229")]
81    Claude3Opus20240229,
82    #[serde(rename = "claude-3-sonnet-20240229")]
83    Claude3Sonnet20240229,
84    #[serde(rename = "claude-3-haiku-20240307")]
85    Claude3Haiku20240307,
86    #[serde(rename = "claude-3-5-sonnet-20240620")]
87    Claude35Sonnet20240620,
88    #[serde(rename = "claude-3-5-sonnet-20241022")]
89    Claude35Sonnet20241022,
90    #[serde(rename = "claude-3-5-haiku-20241022")]
91    Claude35Haiku20241022,
92    #[serde(rename = "claude-3-7-sonnet-20250219")]
93    Claude37Sonnet20250219,
94}
95
96impl Default for Model {
97    fn default() -> Model {
98        Self::Claude3Opus20240229
99    }
100}
101/// The provider identifier for Anthropic.
102#[derive(Clone, Copy, Debug, Eq, PartialEq, Ord, PartialOrd, Hash, Serialize, Deserialize, OpenApi)]
103pub enum Provider {
104    #[serde(rename = "anthropic")]
105    Anthropic,
106}
107
108impl Default for Provider {
109    fn default() -> Provider {
110        Self::Anthropic
111    }
112}