windmill-api 1.683.1

No description provided (generated by Openapi Generator https://github.com/openapitools/openapi-generator)
Documentation
/*
 * Windmill API
 *
 * No description provided (generated by Openapi Generator https://github.com/openapitools/openapi-generator)
 *
 * The version of the OpenAPI document: 1.683.1
 * Contact: contact@windmill.dev
 * Generated by: https://openapi-generator.tech
 */

use crate::models;
use serde::{Deserialize, Serialize};

#[derive(Clone, Default, Debug, PartialEq, Serialize, Deserialize)]
pub struct KafkaTrigger {
    /// Path to the Kafka resource containing connection configuration
    #[serde(rename = "kafka_resource_path")]
    pub kafka_resource_path: String,
    /// Kafka consumer group ID for this trigger
    #[serde(rename = "group_id")]
    pub group_id: String,
    /// Array of Kafka topic names to subscribe to
    #[serde(rename = "topics")]
    pub topics: Vec<String>,
    #[serde(rename = "filters")]
    pub filters: Vec<serde_json::Value>,
    /// Logic to apply when evaluating filters. 'and' requires all filters to match, 'or' requires any filter to match.
    #[serde(rename = "filter_logic", skip_serializing_if = "Option::is_none")]
    pub filter_logic: Option<FilterLogic>,
    /// Initial offset behavior when consumer group has no committed offset. 'latest' starts from new messages only, 'earliest' starts from the beginning.
    #[serde(rename = "auto_offset_reset", skip_serializing_if = "Option::is_none")]
    pub auto_offset_reset: Option<AutoOffsetReset>,
    /// When true (default), offsets are committed automatically after receiving each message. When false, you must manually commit offsets using the commit_offsets endpoint.
    #[serde(rename = "auto_commit", skip_serializing_if = "Option::is_none")]
    pub auto_commit: Option<bool>,
    /// ID of the server currently handling this trigger (internal)
    #[serde(rename = "server_id", skip_serializing_if = "Option::is_none")]
    pub server_id: Option<String>,
    /// Timestamp of last server heartbeat (internal)
    #[serde(rename = "last_server_ping", skip_serializing_if = "Option::is_none")]
    pub last_server_ping: Option<String>,
    /// Last error message if the trigger failed
    #[serde(rename = "error", skip_serializing_if = "Option::is_none")]
    pub error: Option<String>,
    /// Path to a script or flow to run when the triggered job fails
    #[serde(rename = "error_handler_path", skip_serializing_if = "Option::is_none")]
    pub error_handler_path: Option<String>,
    /// The arguments to pass to the script or flow
    #[serde(rename = "error_handler_args", skip_serializing_if = "Option::is_none")]
    pub error_handler_args: Option<std::collections::HashMap<String, serde_json::Value>>,
    #[serde(rename = "retry", skip_serializing_if = "Option::is_none")]
    pub retry: Option<Box<models::Retry>>,
    /// The unique path identifier for this trigger
    #[serde(rename = "path")]
    pub path: String,
    /// Path to the script or flow to execute when triggered
    #[serde(rename = "script_path")]
    pub script_path: String,
    /// The user or group this trigger runs as (permissioned_as)
    #[serde(rename = "permissioned_as")]
    pub permissioned_as: String,
    /// Additional permissions for this trigger
    #[serde(rename = "extra_perms")]
    pub extra_perms: std::collections::HashMap<String, bool>,
    /// The workspace this trigger belongs to
    #[serde(rename = "workspace_id")]
    pub workspace_id: String,
    /// Username of the last person who edited this trigger
    #[serde(rename = "edited_by")]
    pub edited_by: String,
    /// Timestamp of the last edit
    #[serde(rename = "edited_at")]
    pub edited_at: String,
    /// True if script_path points to a flow, false if it points to a script
    #[serde(rename = "is_flow")]
    pub is_flow: bool,
    #[serde(rename = "mode")]
    pub mode: models::TriggerMode,
    #[serde(rename = "labels", skip_serializing_if = "Option::is_none")]
    pub labels: Option<Vec<String>>,
}

impl KafkaTrigger {
    pub fn new(kafka_resource_path: String, group_id: String, topics: Vec<String>, filters: Vec<serde_json::Value>, path: String, script_path: String, permissioned_as: String, extra_perms: std::collections::HashMap<String, bool>, workspace_id: String, edited_by: String, edited_at: String, is_flow: bool, mode: models::TriggerMode) -> KafkaTrigger {
        KafkaTrigger {
            kafka_resource_path,
            group_id,
            topics,
            filters,
            filter_logic: None,
            auto_offset_reset: None,
            auto_commit: None,
            server_id: None,
            last_server_ping: None,
            error: None,
            error_handler_path: None,
            error_handler_args: None,
            retry: None,
            path,
            script_path,
            permissioned_as,
            extra_perms,
            workspace_id,
            edited_by,
            edited_at,
            is_flow,
            mode,
            labels: None,
        }
    }
}
/// Logic to apply when evaluating filters. 'and' requires all filters to match, 'or' requires any filter to match.
#[derive(Clone, Copy, Debug, Eq, PartialEq, Ord, PartialOrd, Hash, Serialize, Deserialize)]
pub enum FilterLogic {
    #[serde(rename = "and")]
    And,
    #[serde(rename = "or")]
    Or,
}

impl Default for FilterLogic {
    fn default() -> FilterLogic {
        Self::And
    }
}
/// Initial offset behavior when consumer group has no committed offset. 'latest' starts from new messages only, 'earliest' starts from the beginning.
#[derive(Clone, Copy, Debug, Eq, PartialEq, Ord, PartialOrd, Hash, Serialize, Deserialize)]
pub enum AutoOffsetReset {
    #[serde(rename = "latest")]
    Latest,
    #[serde(rename = "earliest")]
    Earliest,
}

impl Default for AutoOffsetReset {
    fn default() -> AutoOffsetReset {
        Self::Latest
    }
}