windmill-api 1.683.2

No description provided (generated by Openapi Generator https://github.com/openapitools/openapi-generator)
Documentation
/*
 * Windmill API
 *
 * No description provided (generated by Openapi Generator https://github.com/openapitools/openapi-generator)
 *
 * The version of the OpenAPI document: 1.683.2
 * Contact: contact@windmill.dev
 * Generated by: https://openapi-generator.tech
 */

use crate::models;
use serde::{Deserialize, Serialize};

#[derive(Clone, Default, Debug, PartialEq, Serialize, Deserialize)]
pub struct EditKafkaTrigger {
    /// Path to the Kafka resource containing connection configuration
    #[serde(rename = "kafka_resource_path")]
    pub kafka_resource_path: String,
    /// Kafka consumer group ID for this trigger
    #[serde(rename = "group_id")]
    pub group_id: String,
    /// Array of Kafka topic names to subscribe to
    #[serde(rename = "topics")]
    pub topics: Vec<String>,
    #[serde(rename = "filters")]
    pub filters: Vec<models::NewWebsocketTriggerFiltersInner>,
    /// Logic to apply when evaluating filters. 'and' requires all filters to match, 'or' requires any filter to match.
    #[serde(rename = "filter_logic", skip_serializing_if = "Option::is_none")]
    pub filter_logic: Option<FilterLogic>,
    /// Initial offset behavior when consumer group has no committed offset.
    #[serde(rename = "auto_offset_reset", skip_serializing_if = "Option::is_none")]
    pub auto_offset_reset: Option<AutoOffsetReset>,
    /// When true (default), offsets are committed automatically after receiving each message. When false, you must manually commit offsets using the commit_offsets endpoint.
    #[serde(rename = "auto_commit", skip_serializing_if = "Option::is_none")]
    pub auto_commit: Option<bool>,
    /// The unique path identifier for this trigger
    #[serde(rename = "path")]
    pub path: String,
    /// Path to the script or flow to execute when a message is received
    #[serde(rename = "script_path")]
    pub script_path: String,
    /// True if script_path points to a flow, false if it points to a script
    #[serde(rename = "is_flow")]
    pub is_flow: bool,
    /// Path to a script or flow to run when the triggered job fails
    #[serde(rename = "error_handler_path", skip_serializing_if = "Option::is_none")]
    pub error_handler_path: Option<String>,
    /// The arguments to pass to the script or flow
    #[serde(rename = "error_handler_args", skip_serializing_if = "Option::is_none")]
    pub error_handler_args: Option<std::collections::HashMap<String, serde_json::Value>>,
    #[serde(rename = "retry", skip_serializing_if = "Option::is_none")]
    pub retry: Option<Box<models::Retry>>,
    /// The user or group this trigger runs as. Used during deployment to preserve the original trigger owner.
    #[serde(rename = "permissioned_as", skip_serializing_if = "Option::is_none")]
    pub permissioned_as: Option<String>,
    /// When true and the caller is a member of the 'wm_deployers' group, preserves the original permissioned_as value instead of overwriting it.
    #[serde(rename = "preserve_permissioned_as", skip_serializing_if = "Option::is_none")]
    pub preserve_permissioned_as: Option<bool>,
    #[serde(rename = "labels", skip_serializing_if = "Option::is_none")]
    pub labels: Option<Vec<String>>,
}

impl EditKafkaTrigger {
    pub fn new(kafka_resource_path: String, group_id: String, topics: Vec<String>, filters: Vec<models::NewWebsocketTriggerFiltersInner>, path: String, script_path: String, is_flow: bool) -> EditKafkaTrigger {
        EditKafkaTrigger {
            kafka_resource_path,
            group_id,
            topics,
            filters,
            filter_logic: None,
            auto_offset_reset: None,
            auto_commit: None,
            path,
            script_path,
            is_flow,
            error_handler_path: None,
            error_handler_args: None,
            retry: None,
            permissioned_as: None,
            preserve_permissioned_as: None,
            labels: None,
        }
    }
}
/// Logic to apply when evaluating filters. 'and' requires all filters to match, 'or' requires any filter to match.
#[derive(Clone, Copy, Debug, Eq, PartialEq, Ord, PartialOrd, Hash, Serialize, Deserialize)]
pub enum FilterLogic {
    #[serde(rename = "and")]
    And,
    #[serde(rename = "or")]
    Or,
}

impl Default for FilterLogic {
    fn default() -> FilterLogic {
        Self::And
    }
}
/// Initial offset behavior when consumer group has no committed offset.
#[derive(Clone, Copy, Debug, Eq, PartialEq, Ord, PartialOrd, Hash, Serialize, Deserialize)]
pub enum AutoOffsetReset {
    #[serde(rename = "latest")]
    Latest,
    #[serde(rename = "earliest")]
    Earliest,
}

impl Default for AutoOffsetReset {
    fn default() -> AutoOffsetReset {
        Self::Latest
    }
}