[−][src]Struct rust_bert::pipelines::translation::TranslationConfig
Configuration for text translation
Contains information regarding the model to load, mirrors the GenerationConfig, with a different set of default parameters and sets the device to place the model on.
Fields
model_resource: ResourceModel weights resource (default: pretrained BART model on CNN-DM)
config_resource: ResourceConfig resource (default: pretrained BART model on CNN-DM)
vocab_resource: ResourceVocab resource (default: pretrained BART model on CNN-DM)
merges_resource: ResourceMerges resource (default: pretrained BART model on CNN-DM)
min_length: u64Minimum sequence length (default: 0)
max_length: u64Maximum sequence length (default: 20)
do_sample: boolSampling flag. If true, will perform top-k and/or nucleus sampling on generated tokens, otherwise greedy (deterministic) decoding (default: true)
early_stopping: boolEarly stopping flag indicating if the beam search should stop as soon as num_beam hypotheses have been generated (default: false)
num_beams: u64Number of beams for beam search (default: 5)
temperature: f64Temperature setting. Values higher than 1 will improve originality at the risk of reducing relevance (default: 1.0)
top_k: u64Top_k values for sampling tokens. Value higher than 0 will enable the feature (default: 0)
top_p: f64Top_p value for Nucleus sampling, Holtzman et al.. Keep top tokens until cumulative probability reaches top_p (default: 0.9)
repetition_penalty: f64Repetition penalty (mostly useful for CTRL decoders). Values higher than 1 will penalize tokens that have been already generated. (default: 1.0)
length_penalty: f64Exponential penalty based on the length of the hypotheses generated (default: 1.0)
no_repeat_ngram_size: u64Number of allowed repetitions of n-grams. Values higher than 0 turn on this feature (default: 3)
num_return_sequences: u64Number of sequences to return for each prompt text (default: 1)
device: DeviceDevice to place the model on (default: CUDA/GPU when available)
prefix: Option<String>Prefix to append translation inputs with
Implementations
impl TranslationConfig[src]
pub fn new(language: Language, device: Device) -> TranslationConfig[src]
Create a new TranslationCondiguration from an available language.
Arguments
language-Languageenum value (e.g.Language::EnglishToFrench)device-Deviceto place the model on (CPU/GPU)
Example
use rust_bert::pipelines::translation::{TranslationConfig, Language}; use tch::Device; let translation_config = TranslationConfig::new(Language::FrenchToEnglish, Device::cuda_if_available());
pub fn new_from_resources(
model_resource: Resource,
config_resource: Resource,
vocab_resource: Resource,
sentence_piece_resource: Resource,
prefix: Option<String>,
device: Device
) -> TranslationConfig[src]
model_resource: Resource,
config_resource: Resource,
vocab_resource: Resource,
sentence_piece_resource: Resource,
prefix: Option<String>,
device: Device
) -> TranslationConfig
Create a new TranslationCondiguration from custom (e.g. local) resources.
Arguments
model_resource-Resourcepointing to the modelconfig_resource-Resourcepointing to the configurationvocab_resource-Resourcepointing to the vocabularysentence_piece_resource-Resourcepointing to the sentence piece model of the source languagedevice-Deviceto place the model on (CPU/GPU)
Example
use rust_bert::pipelines::translation::TranslationConfig; use tch::Device; use rust_bert::resources::{Resource, LocalResource}; use std::path::PathBuf; let config_resource = Resource::Local(LocalResource { local_path: PathBuf::from("path/to/config.json") }); let model_resource = Resource::Local(LocalResource { local_path: PathBuf::from("path/to/model.ot") }); let vocab_resource = Resource::Local(LocalResource { local_path: PathBuf::from("path/to/vocab.json") }); let sentence_piece_resource = Resource::Local(LocalResource { local_path: PathBuf::from("path/to/spiece.model") }); let translation_config = TranslationConfig::new_from_resources(model_resource, config_resource, vocab_resource, sentence_piece_resource, Some(">>fr<<".to_string()), Device::cuda_if_available());
Auto Trait Implementations
impl RefUnwindSafe for TranslationConfig
impl Send for TranslationConfig
impl Sync for TranslationConfig
impl Unpin for TranslationConfig
impl UnwindSafe for TranslationConfig
Blanket Implementations
impl<T> Any for T where
T: 'static + ?Sized, [src]
T: 'static + ?Sized,
impl<T> Borrow<T> for T where
T: ?Sized, [src]
T: ?Sized,
impl<T> BorrowMut<T> for T where
T: ?Sized, [src]
T: ?Sized,
fn borrow_mut(&mut self) -> &mut T[src]
impl<T> From<T> for T[src]
impl<T, U> Into<U> for T where
U: From<T>, [src]
U: From<T>,
impl<T, U> TryFrom<U> for T where
U: Into<T>, [src]
U: Into<T>,
type Error = Infallible
The type returned in the event of a conversion error.
fn try_from(value: U) -> Result<T, <T as TryFrom<U>>::Error>[src]
impl<T, U> TryInto<U> for T where
U: TryFrom<T>, [src]
U: TryFrom<T>,