Skip to main content

windmill_api/models/
kafka_trigger.rs

1/*
2 * Windmill API
3 *
4 * No description provided (generated by Openapi Generator https://github.com/openapitools/openapi-generator)
5 *
6 * The version of the OpenAPI document: 1.672.0
7 * Contact: contact@windmill.dev
8 * Generated by: https://openapi-generator.tech
9 */
10
11use crate::models;
12use serde::{Deserialize, Serialize};
13
14#[derive(Clone, Default, Debug, PartialEq, Serialize, Deserialize)]
15pub struct KafkaTrigger {
16    /// Path to the Kafka resource containing connection configuration
17    #[serde(rename = "kafka_resource_path")]
18    pub kafka_resource_path: String,
19    /// Kafka consumer group ID for this trigger
20    #[serde(rename = "group_id")]
21    pub group_id: String,
22    /// Array of Kafka topic names to subscribe to
23    #[serde(rename = "topics")]
24    pub topics: Vec<String>,
25    #[serde(rename = "filters")]
26    pub filters: Vec<serde_json::Value>,
27    /// Logic to apply when evaluating filters. 'and' requires all filters to match, 'or' requires any filter to match.
28    #[serde(rename = "filter_logic", skip_serializing_if = "Option::is_none")]
29    pub filter_logic: Option<FilterLogic>,
30    /// Initial offset behavior when consumer group has no committed offset. 'latest' starts from new messages only, 'earliest' starts from the beginning.
31    #[serde(rename = "auto_offset_reset", skip_serializing_if = "Option::is_none")]
32    pub auto_offset_reset: Option<AutoOffsetReset>,
33    /// When true (default), offsets are committed automatically after receiving each message. When false, you must manually commit offsets using the commit_offsets endpoint.
34    #[serde(rename = "auto_commit", skip_serializing_if = "Option::is_none")]
35    pub auto_commit: Option<bool>,
36    /// ID of the server currently handling this trigger (internal)
37    #[serde(rename = "server_id", skip_serializing_if = "Option::is_none")]
38    pub server_id: Option<String>,
39    /// Timestamp of last server heartbeat (internal)
40    #[serde(rename = "last_server_ping", skip_serializing_if = "Option::is_none")]
41    pub last_server_ping: Option<String>,
42    /// Last error message if the trigger failed
43    #[serde(rename = "error", skip_serializing_if = "Option::is_none")]
44    pub error: Option<String>,
45    /// Path to a script or flow to run when the triggered job fails
46    #[serde(rename = "error_handler_path", skip_serializing_if = "Option::is_none")]
47    pub error_handler_path: Option<String>,
48    /// The arguments to pass to the script or flow
49    #[serde(rename = "error_handler_args", skip_serializing_if = "Option::is_none")]
50    pub error_handler_args: Option<std::collections::HashMap<String, serde_json::Value>>,
51    #[serde(rename = "retry", skip_serializing_if = "Option::is_none")]
52    pub retry: Option<Box<models::Retry>>,
53    /// The unique path identifier for this trigger
54    #[serde(rename = "path")]
55    pub path: String,
56    /// Path to the script or flow to execute when triggered
57    #[serde(rename = "script_path")]
58    pub script_path: String,
59    /// The user or group this trigger runs as (permissioned_as)
60    #[serde(rename = "permissioned_as")]
61    pub permissioned_as: String,
62    /// Additional permissions for this trigger
63    #[serde(rename = "extra_perms")]
64    pub extra_perms: std::collections::HashMap<String, bool>,
65    /// The workspace this trigger belongs to
66    #[serde(rename = "workspace_id")]
67    pub workspace_id: String,
68    /// Username of the last person who edited this trigger
69    #[serde(rename = "edited_by")]
70    pub edited_by: String,
71    /// Timestamp of the last edit
72    #[serde(rename = "edited_at")]
73    pub edited_at: String,
74    /// True if script_path points to a flow, false if it points to a script
75    #[serde(rename = "is_flow")]
76    pub is_flow: bool,
77    #[serde(rename = "mode")]
78    pub mode: models::TriggerMode,
79}
80
81impl KafkaTrigger {
82    pub fn new(kafka_resource_path: String, group_id: String, topics: Vec<String>, filters: Vec<serde_json::Value>, path: String, script_path: String, permissioned_as: String, extra_perms: std::collections::HashMap<String, bool>, workspace_id: String, edited_by: String, edited_at: String, is_flow: bool, mode: models::TriggerMode) -> KafkaTrigger {
83        KafkaTrigger {
84            kafka_resource_path,
85            group_id,
86            topics,
87            filters,
88            filter_logic: None,
89            auto_offset_reset: None,
90            auto_commit: None,
91            server_id: None,
92            last_server_ping: None,
93            error: None,
94            error_handler_path: None,
95            error_handler_args: None,
96            retry: None,
97            path,
98            script_path,
99            permissioned_as,
100            extra_perms,
101            workspace_id,
102            edited_by,
103            edited_at,
104            is_flow,
105            mode,
106        }
107    }
108}
109/// Logic to apply when evaluating filters. 'and' requires all filters to match, 'or' requires any filter to match.
110#[derive(Clone, Copy, Debug, Eq, PartialEq, Ord, PartialOrd, Hash, Serialize, Deserialize)]
111pub enum FilterLogic {
112    #[serde(rename = "and")]
113    And,
114    #[serde(rename = "or")]
115    Or,
116}
117
118impl Default for FilterLogic {
119    fn default() -> FilterLogic {
120        Self::And
121    }
122}
123/// Initial offset behavior when consumer group has no committed offset. 'latest' starts from new messages only, 'earliest' starts from the beginning.
124#[derive(Clone, Copy, Debug, Eq, PartialEq, Ord, PartialOrd, Hash, Serialize, Deserialize)]
125pub enum AutoOffsetReset {
126    #[serde(rename = "latest")]
127    Latest,
128    #[serde(rename = "earliest")]
129    Earliest,
130}
131
132impl Default for AutoOffsetReset {
133    fn default() -> AutoOffsetReset {
134        Self::Latest
135    }
136}
137