opensearch-client 0.3.1

Strongly typed OpenSearch Client
Documentation
/*
 * opensearch-client
 *
 * Rust Client for OpenSearch
 *
 * The version of the OpenAPI document: 3.1.0
 * Contact: alberto.paro@gmail.com
 * Generated by Paro OpenAPI Generator
 */

use crate::common;
use crate::indices;
use serde::{Deserialize, Serialize};

/// AnalyzeRequestBodyJson
/// Define analyzer/tokenizer parameters and the text on which the analysis should be performed

#[derive(Clone, Default, Debug, Serialize, Deserialize)]
pub struct AnalyzeRequestBodyJson {
    /// The name of the analyzer that should be applied to the provided `text`.
    /// This could be a built-in analyzer, or an analyzer that's been configured in the index.
    #[serde(rename = "analyzer", default, skip_serializing_if = "Option::is_none")]
    pub analyzer: Option<String>,
    /// The path to a field or an array of paths. Some APIs support wildcards in the path, which allows you to select multiple fields.
    #[serde(rename = "field", default, skip_serializing_if = "Option::is_none")]
    pub field: Option<String>,
    /// Array of token attributes used to filter the output of the `explain` parameter.
    #[serde(
        rename = "attributes",
        default,
        skip_serializing_if = "Option::is_none"
    )]
    pub attributes: Option<Vec<String>>,
    /// Array of character filters used to preprocess characters before the tokenizer.
    #[serde(
        rename = "char_filter",
        default,
        skip_serializing_if = "Option::is_none"
    )]
    pub char_filter: Option<Vec<common::analysis::CharFilter>>,
    #[serde(rename = "tokenizer", default, skip_serializing_if = "Option::is_none")]
    pub tokenizer: Option<common::analysis::Tokenizer>,
    /// Array of token filters used to apply after the tokenizer.
    #[serde(rename = "filter", default, skip_serializing_if = "Option::is_none")]
    pub filter: Option<Vec<common::analysis::TokenFilter>>,
    #[serde(rename = "text", default, skip_serializing_if = "Option::is_none")]
    pub text: Option<indices::analyze::TextToAnalyze>,
    /// If `true`, the response includes token attributes and additional details.
    #[serde(rename = "explain", default, skip_serializing_if = "Option::is_none")]
    pub explain: Option<bool>,
    /// Normalizer to use to convert text into a single token.
    #[serde(
        rename = "normalizer",
        default,
        skip_serializing_if = "Option::is_none"
    )]
    pub normalizer: Option<String>,
}

impl AnalyzeRequestBodyJson {
    /// Define analyzer/tokenizer parameters and the text on which the analysis should be performed
    pub fn new() -> AnalyzeRequestBodyJson {
        AnalyzeRequestBodyJson {
            analyzer: None,
            field: None,
            attributes: None,
            char_filter: None,
            tokenizer: None,
            filter: None,
            text: None,
            explain: None,
            normalizer: None,
        }
    }
}