gliclass/
lib.rs

1//! An inference engine for [GLiClass](https://github.com/Knowledgator/GLiClass) models. 
2//! 
3//! These language models are efficient for zero-shot topic classification or derivatives like sentiment analysis. 
4//! They can also be used for efficient re-ranking.
5//! 
6//! GLiClass stands for "Generalist and Lightweight Model for Sequence Classification", after an original work from 
7//! [Knowledgator](https://knowledgator.com), which was itself inspired by [GLiNER](https://github.com/urchade/GLiNER).
8
9pub mod util;
10pub mod params;
11pub mod tokenizer;
12pub mod input;
13pub mod output;
14pub mod pipeline;
15
16
17/// Convenience front-end for easy use with default runtime parameters (CPU).
18/// For more advanced use, see examples and the `orp` crate.
19pub struct GLiClass {
20    params: params::Parameters,
21    pipeline: pipeline::ClassificationPipeline,
22    model: orp::model::Model,
23}
24
25impl GLiClass {
26    /// Loads the model given a tokenizer, an ONNX model, and the required parameters
27    pub fn new<P: AsRef<std::path::Path>>(tokenizer_path: P, model_path: P, params: params::Parameters) -> crate::util::result::Result<Self> {
28        Ok(Self {
29            pipeline: pipeline::ClassificationPipeline::new(tokenizer_path, &params)?,
30            model: orp::model::Model::new(model_path, orp::params::RuntimeParameters::default())?,
31            params,            
32        })
33    }
34
35    /// Performs classification on the given output
36    pub fn inference(&self, input: input::text::TextInput) -> crate::util::result::Result<output::classes::Classes> {
37        self.model.inference(input, &self.pipeline, &self.params)
38    }
39}
40