Struct rust_bert::pipelines::zero_shot_classification::ZeroShotClassificationConfig[][src]

pub struct ZeroShotClassificationConfig {
    pub model_type: ModelType,
    pub model_resource: Resource,
    pub config_resource: Resource,
    pub vocab_resource: Resource,
    pub merges_resource: Option<Resource>,
    pub lower_case: bool,
    pub strip_accents: Option<bool>,
    pub add_prefix_space: Option<bool>,
    pub device: Device,
}

Configuration for ZeroShotClassificationModel

Contains information regarding the model to load and device to place the model on.

Fields

model_type: ModelType

Model type

model_resource: Resource

Model weights resource (default: pretrained BERT model on CoNLL)

config_resource: Resource

Config resource (default: pretrained BERT model on CoNLL)

vocab_resource: Resource

Vocab resource (default: pretrained BERT model on CoNLL)

merges_resource: Option<Resource>

Merges resource (default: None)

lower_case: bool

Automatically lower case all input upon tokenization (assumes a lower-cased model)

strip_accents: Option<bool>

Flag indicating if the tokenizer should strip accents (normalization). Only used for BERT / ALBERT models

add_prefix_space: Option<bool>

Flag indicating if the tokenizer should add a white space before each tokenized input (needed for some Roberta models)

device: Device

Device to place the model on (default: CUDA/GPU when available)

Implementations

impl ZeroShotClassificationConfig[src]

pub fn new(
    model_type: ModelType,
    model_resource: Resource,
    config_resource: Resource,
    vocab_resource: Resource,
    merges_resource: Option<Resource>,
    lower_case: bool,
    strip_accents: impl Into<Option<bool>>,
    add_prefix_space: impl Into<Option<bool>>
) -> ZeroShotClassificationConfig
[src]

Instantiate a new zero shot classification configuration of the supplied type.

Arguments

  • model_type - ModelType indicating the model type to load (must match with the actual data to be loaded!)
  • model - The Resource pointing to the model to load (e.g. model.ot)
  • config - The `Resource' pointing to the model configuration to load (e.g. config.json)
  • vocab - The `Resource' pointing to the tokenizer's vocabulary to load (e.g. vocab.txt/vocab.json)
  • vocab - An optional Resource tuple (Option<Resource>) pointing to the tokenizer's merge file to load (e.g. merges.txt), needed only for Roberta.
  • lower_case - A `bool' indicating whether the tokenizer should lower case all input (in case of a lower-cased model)

Trait Implementations

impl Default for ZeroShotClassificationConfig[src]

fn default() -> ZeroShotClassificationConfig[src]

Provides a defaultSST-2 sentiment analysis model (English)

Auto Trait Implementations

Blanket Implementations

impl<T> Any for T where
    T: 'static + ?Sized
[src]

impl<T> Borrow<T> for T where
    T: ?Sized
[src]

impl<T> BorrowMut<T> for T where
    T: ?Sized
[src]

impl<T> From<T> for T[src]

impl<T> Instrument for T[src]

impl<T, U> Into<U> for T where
    U: From<T>, 
[src]

impl<T> Pointable for T

type Init = T

The type for initializers.

impl<T> Same<T> for T

type Output = T

Should always be Self

impl<T, U> TryFrom<U> for T where
    U: Into<T>, 
[src]

type Error = Infallible

The type returned in the event of a conversion error.

impl<T, U> TryInto<U> for T where
    U: TryFrom<T>, 
[src]

type Error = <U as TryFrom<T>>::Error

The type returned in the event of a conversion error.

impl<V, T> VZip<V> for T where
    V: MultiLane<T>,