TextInput

Struct TextInput 

Source
pub struct TextInput {
    pub texts: Vec<String>,
    pub entities: Vec<String>,
}
Expand description

Represents the raw text input, as a list of text chunks and a list of entity classes

Fields§

§texts: Vec<String>§entities: Vec<String>

Implementations§

Source§

impl TextInput

Source

pub fn new(texts: Vec<String>, entities: Vec<String>) -> Result<Self>

Default constructor that moves the input data given as a vector of the text sequences to be analyzed, and a vector of entity classes.

Source

pub fn from_str(texts: &[&str], entities: &[&str]) -> Result<Self>

This constructor will mostly be used to test with plain arrays of static strs.

Examples found in repository?
examples/multilingual.rs (line 39)
34fn main() -> Result<()> {    
35    println!("Loading model...");
36    let model = GLiNER::<SpanMode>::new(Parameters::default(), RuntimeParameters::default(), TOKENIZER, MODEL)?;    
37
38    println!("Inferencing...");
39    let input = TextInput::from_str(&[TEXT], &LABELS)?;
40    let output = model.inference(input)?;
41
42    println!("Results:\n{output}");    
43
44    Ok(check_results(&output))
45}
More examples
Hide additional examples
examples/span_mode.rs (lines 17-30)
7fn main() -> Result<()> {    
8    
9    println!("Loading model...");
10    let model = GLiNER::<SpanMode>::new(
11        Parameters::default(),
12        RuntimeParameters::default(),
13        "models/gliner_small-v2.1/tokenizer.json",
14        "models/gliner_small-v2.1/onnx/model.onnx",
15    )?;
16
17    let input = TextInput::from_str(
18        &[ 
19            "I am James Bond",
20            "This is James and I live in Chelsea, London.",
21            "My name is Bond, James Bond.",
22            "I like to drive my Aston Martin.",
23            "The villain in the movie is Auric Goldfinger."
24        ],
25        &[
26            "person", 
27            "location",
28            "vehicle",
29        ]
30    )?;
31
32    println!("Inferencing...");
33    let output = model.inference(input)?;
34
35    println!("Results:");
36    for spans in output.spans {
37        for span in spans {
38            println!("{:3} | {:16} | {:10} | {:.1}%", span.sequence(), span.text(), span.class(), span.probability() * 100.0);
39        }
40    }
41
42    Ok(())
43
44}
examples/token_mode.rs (lines 17-30)
7fn main() -> Result<()> {    
8    
9    println!("Loading model...");
10    let model = GLiNER::<TokenMode>::new(
11        Parameters::default(),
12        RuntimeParameters::default(),
13        "models/gliner-multitask-large-v0.5/tokenizer.json",
14        "models/gliner-multitask-large-v0.5/onnx/model.onnx",
15    )?;
16    
17    let input = TextInput::from_str(
18        &[ 
19            "I am James Bond",
20            "This is James and I live in Chelsea, London.",
21            "My name is Bond, James Bond.",
22            "I like to drive my Aston Martin.",
23            "The villain in the movie is Auric Goldfinger."
24        ],
25        &[
26            "person", 
27            "location",
28            "vehicle",
29        ]
30    )?;
31
32    println!("Inferencing...");
33    let output = model.inference(input)?;
34
35    println!("Results:");
36    for spans in output.spans {
37        for span in spans {
38            println!("{:3} | {:16} | {:10} | {:.1}%", span.sequence(), span.text(), span.class(), span.probability() * 100.0);
39        }
40    }
41
42    Ok(())
43
44}
examples/relation_extraction.rs (lines 39-42)
24fn main() -> Result<()> {
25    // Set model and tokenizer paths    
26    const MODEL_PATH: &str = "models/gliner-multitask-large-v0.5/onnx/model.onnx";
27    const TOKENIZER_PATH: &str = "models/gliner-multitask-large-v0.5/tokenizer.json";
28    
29    // Use default parameters
30    let params: Parameters = Parameters::default();
31    let runtime_params = RuntimeParameters::default();
32
33    // Define a relation schema.
34    // We declare a "founded" relation which subject has to be a "person" and object has to be a "company"
35    let mut relation_schema = RelationSchema::new();
36    relation_schema.push_with_allowed_labels("founded", &["person"], &["company"]);
37
38    // Sample input text and entity labels
39    let input = TextInput::from_str(
40        &["Bill Gates is an American businessman who co-founded Microsoft."],
41        &["person", "company"],
42    )?;
43    
44    // Load the model that will be leveraged for the pipeline below
45    println!("Loading model...");      
46    let model = Model::new(MODEL_PATH, runtime_params)?;
47    
48    // Relation Extraction needs Named Entity Recognition to be applied first.
49    // Here we combine the two pipelines: one for NER, and one for RE.
50    // For testing purposes we also insert printing functions.
51    let pipeline = composed![
52        TokenPipeline::new(TOKENIZER_PATH)?.to_composable(&model, &params),
53        Print::new(Some("Entities:\n"), None),
54        RelationPipeline::default(TOKENIZER_PATH, &relation_schema)?.to_composable(&model, &params),
55        Print::new(Some("Relations:\n"), None)
56    ];
57
58    // Actually perform inferences using the pipeline defined above
59    pipeline.apply(input)?;
60    
61    Ok(())
62}
Source

pub fn new_from_csv<P: AsRef<Path>>( path: P, column: usize, limit: usize, entities: Vec<String>, ) -> Result<Self>

For testing purposes. Panics if the specified column does not exist

Examples found in repository?
examples/benchmark_gpu.rs (line 33)
21fn main() -> Result<()> {    
22
23    const MAX_SAMPLES: usize = 1000;
24    const CSV_PATH: &str = "data/nuner-sample-1k.csv";
25
26    let entities = [
27        "person", 
28        "location",
29        "vehicle",
30    ];
31
32    println!("Loading data...");
33    let input = TextInput::new_from_csv(CSV_PATH, 0, MAX_SAMPLES, entities.map(|x| x.to_string()).to_vec())?;
34    let nb_samples = input.texts.len();
35    
36    println!("Loading model...");
37    let model = GLiNER::<TokenMode>::new(
38        Parameters::default(),
39        RuntimeParameters::default().with_execution_providers([
40            CUDAExecutionProvider::default().build(),
41            CoreMLExecutionProvider::default().build(),            
42        ]),
43        "models/gliner-multitask-large-v0.5/tokenizer.json",
44        "models/gliner-multitask-large-v0.5/onnx/model.onnx",
45    )?;
46
47    println!("Inferencing...");
48    let inference_start = std::time::Instant::now();
49    let _output = model.inference(input)?;
50    
51    let inference_time = inference_start.elapsed();
52    println!("Inference took {} seconds on {} samples ({:.2} samples/sec)", inference_time.as_secs(), nb_samples, nb_samples as f32 / inference_time.as_secs() as f32);
53
54    Ok(())
55}
More examples
Hide additional examples
examples/benchmark_cpu.rs (line 23)
15fn main() -> Result<()> {
16    let entities = [
17        "person", 
18        "location",
19        "vehicle",
20    ];
21
22    println!("Loading data...");
23    let input = input::text::TextInput::new_from_csv(CSV_PATH, 0, MAX_SAMPLES, entities.map(|x| x.to_string()).to_vec())?;
24    let nb_samples = input.texts.len();
25    
26    println!("Loading model...");
27    let model = GLiNER::<TokenMode>::new(
28        Parameters::default(),
29        RuntimeParameters::default().with_threads(THREADS),
30        std::path::Path::new("models/gliner-multitask-large-v0.5/tokenizer.json"),
31        std::path::Path::new("models/gliner-multitask-large-v0.5/onnx/model.onnx")
32    )?;
33    
34    let global_inference_start = std::time::Instant::now();
35
36    for i in 0..REPEAT {
37        println!("Inferencing ({})...", i + 1);
38        let inference_start = std::time::Instant::now();
39        let _output = model.inference(input.clone())?;
40        
41        let inference_time = inference_start.elapsed();
42        println!("Took {} seconds on {} samples ({:.2} samples/sec)", inference_time.as_secs(), nb_samples, nb_samples as f32 / inference_time.as_secs() as f32);
43
44        #[cfg(feature = "memprof")] 
45        print_memory_usage();
46    }
47
48    let global_inference_time = global_inference_start.elapsed();
49    let global_nb_samples = nb_samples * REPEAT;
50    println!("All {} inferences took {} seconds on {} samples total ({:.2} samples/sec)", REPEAT, global_inference_time.as_secs(), global_nb_samples, global_nb_samples as f32 / global_inference_time.as_secs() as f32);
51
52    Ok(())
53}

Trait Implementations§

Source§

impl Clone for TextInput

Source§

fn clone(&self) -> TextInput

Returns a duplicate of the value. Read more
1.0.0 · Source§

fn clone_from(&mut self, source: &Self)

Performs copy-assignment from source. Read more
Source§

impl<S: Splitter> Composable<TextInput, TokenizedInput> for RawToTokenized<'_, S>

Source§

fn apply(&self, input: TextInput) -> Result<TokenizedInput>

Source§

fn compose<T, P>(self, other: T) -> impl Composable<I, P>
where Self: Sized, T: Composable<O, P>,

Source§

impl Debug for TextInput

Source§

fn fmt(&self, f: &mut Formatter<'_>) -> Result

Formats the value using the given formatter. Read more

Auto Trait Implementations§

Blanket Implementations§

Source§

impl<T> Any for T
where T: 'static + ?Sized,

Source§

fn type_id(&self) -> TypeId

Gets the TypeId of self. Read more
Source§

impl<T> Borrow<T> for T
where T: ?Sized,

Source§

fn borrow(&self) -> &T

Immutably borrows from an owned value. Read more
Source§

impl<T> BorrowMut<T> for T
where T: ?Sized,

Source§

fn borrow_mut(&mut self) -> &mut T

Mutably borrows from an owned value. Read more
Source§

impl<T> CloneToUninit for T
where T: Clone,

Source§

unsafe fn clone_to_uninit(&self, dest: *mut u8)

🔬This is a nightly-only experimental API. (clone_to_uninit)
Performs copy-assignment from self to dest. Read more
Source§

impl<T> From<T> for T

Source§

fn from(t: T) -> T

Returns the argument unchanged.

Source§

impl<T> Instrument for T

Source§

fn instrument(self, span: Span) -> Instrumented<Self>

Instruments this type with the provided Span, returning an Instrumented wrapper. Read more
Source§

fn in_current_span(self) -> Instrumented<Self>

Instruments this type with the current Span, returning an Instrumented wrapper. Read more
Source§

impl<T, U> Into<U> for T
where U: From<T>,

Source§

fn into(self) -> U

Calls U::from(self).

That is, this conversion is whatever the implementation of From<T> for U chooses to do.

Source§

impl<T> IntoEither for T

Source§

fn into_either(self, into_left: bool) -> Either<Self, Self>

Converts self into a Left variant of Either<Self, Self> if into_left is true. Converts self into a Right variant of Either<Self, Self> otherwise. Read more
Source§

fn into_either_with<F>(self, into_left: F) -> Either<Self, Self>
where F: FnOnce(&Self) -> bool,

Converts self into a Left variant of Either<Self, Self> if into_left(&self) returns true. Converts self into a Right variant of Either<Self, Self> otherwise. Read more
Source§

impl<T> Pointable for T

Source§

const ALIGN: usize

The alignment of pointer.
Source§

type Init = T

The type for initializers.
Source§

unsafe fn init(init: <T as Pointable>::Init) -> usize

Initializes a with the given initializer. Read more
Source§

unsafe fn deref<'a>(ptr: usize) -> &'a T

Dereferences the given pointer. Read more
Source§

unsafe fn deref_mut<'a>(ptr: usize) -> &'a mut T

Mutably dereferences the given pointer. Read more
Source§

unsafe fn drop(ptr: usize)

Drops the object pointed to by the given pointer. Read more
Source§

impl<T> ToOwned for T
where T: Clone,

Source§

type Owned = T

The resulting type after obtaining ownership.
Source§

fn to_owned(&self) -> T

Creates owned data from borrowed data, usually by cloning. Read more
Source§

fn clone_into(&self, target: &mut T)

Uses borrowed data to replace owned data, usually by cloning. Read more
Source§

impl<T, U> TryFrom<U> for T
where U: Into<T>,

Source§

type Error = Infallible

The type returned in the event of a conversion error.
Source§

fn try_from(value: U) -> Result<T, <T as TryFrom<U>>::Error>

Performs the conversion.
Source§

impl<T, U> TryInto<U> for T
where U: TryFrom<T>,

Source§

type Error = <U as TryFrom<T>>::Error

The type returned in the event of a conversion error.
Source§

fn try_into(self) -> Result<U, <U as TryFrom<T>>::Error>

Performs the conversion.
Source§

impl<V, T> VZip<V> for T
where V: MultiLane<T>,

Source§

fn vzip(self) -> V

Source§

impl<T> WithSubscriber for T

Source§

fn with_subscriber<S>(self, subscriber: S) -> WithDispatch<Self>
where S: Into<Dispatch>,

Attaches the provided Subscriber to this type, returning a WithDispatch wrapper. Read more
Source§

fn with_current_subscriber(self) -> WithDispatch<Self>

Attaches the current default Subscriber to this type, returning a WithDispatch wrapper. Read more