Skip to main content

windmill_api/models/
kafka_trigger.rs

1/*
2 * Windmill API
3 *
4 * No description provided (generated by Openapi Generator https://github.com/openapitools/openapi-generator)
5 *
6 * The version of the OpenAPI document: 1.655.0
7 * Contact: contact@windmill.dev
8 * Generated by: https://openapi-generator.tech
9 */
10
11use crate::models;
12use serde::{Deserialize, Serialize};
13
14#[derive(Clone, Default, Debug, PartialEq, Serialize, Deserialize)]
15pub struct KafkaTrigger {
16    /// Path to the Kafka resource containing connection configuration
17    #[serde(rename = "kafka_resource_path")]
18    pub kafka_resource_path: String,
19    /// Kafka consumer group ID for this trigger
20    #[serde(rename = "group_id")]
21    pub group_id: String,
22    /// Array of Kafka topic names to subscribe to
23    #[serde(rename = "topics")]
24    pub topics: Vec<String>,
25    #[serde(rename = "filters")]
26    pub filters: Vec<serde_json::Value>,
27    /// Initial offset behavior when consumer group has no committed offset. 'latest' starts from new messages only, 'earliest' starts from the beginning.
28    #[serde(rename = "auto_offset_reset", skip_serializing_if = "Option::is_none")]
29    pub auto_offset_reset: Option<AutoOffsetReset>,
30    /// When true (default), offsets are committed automatically after receiving each message. When false, you must manually commit offsets using the commit_offsets endpoint.
31    #[serde(rename = "auto_commit", skip_serializing_if = "Option::is_none")]
32    pub auto_commit: Option<bool>,
33    /// ID of the server currently handling this trigger (internal)
34    #[serde(rename = "server_id", skip_serializing_if = "Option::is_none")]
35    pub server_id: Option<String>,
36    /// Timestamp of last server heartbeat (internal)
37    #[serde(rename = "last_server_ping", skip_serializing_if = "Option::is_none")]
38    pub last_server_ping: Option<String>,
39    /// Last error message if the trigger failed
40    #[serde(rename = "error", skip_serializing_if = "Option::is_none")]
41    pub error: Option<String>,
42    /// Path to a script or flow to run when the triggered job fails
43    #[serde(rename = "error_handler_path", skip_serializing_if = "Option::is_none")]
44    pub error_handler_path: Option<String>,
45    /// The arguments to pass to the script or flow
46    #[serde(rename = "error_handler_args", skip_serializing_if = "Option::is_none")]
47    pub error_handler_args: Option<std::collections::HashMap<String, serde_json::Value>>,
48    #[serde(rename = "retry", skip_serializing_if = "Option::is_none")]
49    pub retry: Option<Box<models::Retry>>,
50    /// The unique path identifier for this trigger
51    #[serde(rename = "path")]
52    pub path: String,
53    /// Path to the script or flow to execute when triggered
54    #[serde(rename = "script_path")]
55    pub script_path: String,
56    /// Email of the user who owns this trigger, used for permissioned_as
57    #[serde(rename = "email")]
58    pub email: String,
59    /// Additional permissions for this trigger
60    #[serde(rename = "extra_perms")]
61    pub extra_perms: std::collections::HashMap<String, bool>,
62    /// The workspace this trigger belongs to
63    #[serde(rename = "workspace_id")]
64    pub workspace_id: String,
65    /// Username of the last person who edited this trigger
66    #[serde(rename = "edited_by")]
67    pub edited_by: String,
68    /// Timestamp of the last edit
69    #[serde(rename = "edited_at")]
70    pub edited_at: String,
71    /// True if script_path points to a flow, false if it points to a script
72    #[serde(rename = "is_flow")]
73    pub is_flow: bool,
74    #[serde(rename = "mode")]
75    pub mode: models::TriggerMode,
76}
77
78impl KafkaTrigger {
79    pub fn new(kafka_resource_path: String, group_id: String, topics: Vec<String>, filters: Vec<serde_json::Value>, path: String, script_path: String, email: String, extra_perms: std::collections::HashMap<String, bool>, workspace_id: String, edited_by: String, edited_at: String, is_flow: bool, mode: models::TriggerMode) -> KafkaTrigger {
80        KafkaTrigger {
81            kafka_resource_path,
82            group_id,
83            topics,
84            filters,
85            auto_offset_reset: None,
86            auto_commit: None,
87            server_id: None,
88            last_server_ping: None,
89            error: None,
90            error_handler_path: None,
91            error_handler_args: None,
92            retry: None,
93            path,
94            script_path,
95            email,
96            extra_perms,
97            workspace_id,
98            edited_by,
99            edited_at,
100            is_flow,
101            mode,
102        }
103    }
104}
105/// Initial offset behavior when consumer group has no committed offset. 'latest' starts from new messages only, 'earliest' starts from the beginning.
106#[derive(Clone, Copy, Debug, Eq, PartialEq, Ord, PartialOrd, Hash, Serialize, Deserialize)]
107pub enum AutoOffsetReset {
108    #[serde(rename = "latest")]
109    Latest,
110    #[serde(rename = "earliest")]
111    Earliest,
112}
113
114impl Default for AutoOffsetReset {
115    fn default() -> AutoOffsetReset {
116        Self::Latest
117    }
118}
119