Struct edge_transformers::pipelines::sequence_classification::SequenceClassificationPipeline
source · pub struct SequenceClassificationPipeline<'a> { /* private fields */ }Expand description
Wraps Huggingface Optimum pipeline exported to ONNX with sequence-classification task.
Export docs https://huggingface.co/docs/optimum/exporters/onnx/usage_guides/export_a_model
Example
use std::fs;
use ort::{GraphOptimizationLevel, LoggingLevel};
use ort::environment::Environment;
use edge_transformers::{EmbeddingPipeline, PoolingStrategy, SequenceClassificationPipeline, Device};
let environment = Environment::builder()
.with_name("test")
.with_log_level(LoggingLevel::Verbose)
.build()
.unwrap();
let pipeline = SequenceClassificationPipeline::from_pretrained(
environment.into_arc(),
"npc-engine/deberta-v3-small-finetuned-hate_speech18".to_string(),
Device::CPU,
GraphOptimizationLevel::Level3,
).unwrap();
let input = "This is a test";
println!("Best label {:?}", pipeline.classify(input).unwrap().best.label);Implementations§
source§impl<'a> SequenceClassificationPipeline<'a>
impl<'a> SequenceClassificationPipeline<'a>
pub fn from_pretrained( env: Arc<Environment>, model_id: String, device: Device, optimization_level: GraphOptimizationLevel ) -> Result<Self, Error>
sourcepub fn new_from_files(
environment: Arc<Environment>,
model_path: PathBuf,
tokenizer_config: PathBuf,
special_tokens_map: PathBuf,
device: Device,
optimization_level: GraphOptimizationLevel,
labels: Option<Vec<String>>
) -> Result<Self, Error>
pub fn new_from_files( environment: Arc<Environment>, model_path: PathBuf, tokenizer_config: PathBuf, special_tokens_map: PathBuf, device: Device, optimization_level: GraphOptimizationLevel, labels: Option<Vec<String>> ) -> Result<Self, Error>
Creates new pipeline from model and tokenizer configuration files.
Arguments
environment- ONNX Runtime environment.model_path- Path to ONNX model file.tokenizer_config- Path to tokenizer configuration file.special_tokens_map- Path to special tokens map file. Maps token names to their string values.device- Device to run the model on.optimization_level- ONNX Runtime graph optimization level.
sourcepub fn new_from_memory(
environment: Arc<Environment>,
model: &'a [u8],
tokenizer_config: String,
special_tokens_map: String,
device: Device,
optimization_level: GraphOptimizationLevel,
labels: Option<Vec<String>>
) -> Result<Self, Error>
pub fn new_from_memory( environment: Arc<Environment>, model: &'a [u8], tokenizer_config: String, special_tokens_map: String, device: Device, optimization_level: GraphOptimizationLevel, labels: Option<Vec<String>> ) -> Result<Self, Error>
Creates new pipeline from model and tokenizer configuration files.
Arguments
environment- ONNX Runtime environment.model- ONNX model file content.tokenizer_config- Path to tokenizer configuration file.special_tokens_map- Path to special tokens map file.device- Device to run the model on.optimization_level- ONNX Runtime graph optimization level.