Struct elasticlunr::Index [−][src]
pub struct Index { pub fields: Vec<String>, pub pipeline: Pipeline, pub ref_field: String, pub version: &'static str, pub document_store: DocumentStore, // some fields omitted }
Expand description
An elasticlunr search index.
Fields
fields: Vec<String>
pipeline: Pipeline
ref_field: String
version: &'static str
document_store: DocumentStore
Implementations
impl Index
[src]
impl Index
[src]pub fn with_language<I>(lang: Language, fields: I) -> Self where
I: IntoIterator,
I::Item: AsRef<str>,
[src]
pub fn with_language<I>(lang: Language, fields: I) -> Self where
I: IntoIterator,
I::Item: AsRef<str>,
[src]pub fn add_doc<I>(&mut self, doc_ref: &str, data: I) where
I: IntoIterator,
I::Item: AsRef<str>,
[src]
pub fn add_doc<I>(&mut self, doc_ref: &str, data: I) where
I: IntoIterator,
I::Item: AsRef<str>,
[src]Add the data from a document to the index.
NOTE: The elements of data
should be provided in the same order as
the fields used to create the index.
Example
let mut index = Index::new(&["title", "body"]); index.add_doc("1", &["this is a title", "this is body text"]);
pub fn add_doc_with_tokenizer<I>(
&mut self,
doc_ref: &str,
data: I,
tokenizer: TokenizerFn
) where
I: IntoIterator,
I::Item: AsRef<str>,
[src]
pub fn add_doc_with_tokenizer<I>(
&mut self,
doc_ref: &str,
data: I,
tokenizer: TokenizerFn
) where
I: IntoIterator,
I::Item: AsRef<str>,
[src]Add the data from a document to the index.
NOTE: The elements of data
should be provided in the same order as
the fields used to create the index.
Example
fn css_tokenizer(text: &str) -> Vec<String> { text.split(|c: char| c.is_whitespace()) .filter(|s| !s.is_empty()) .map(|s| s.trim().to_lowercase()) .collect() } let mut index = Index::new(&["title", "body"]); index.add_doc_with_tokenizer("1", &["this is a title", "this is body text"], css_tokenizer);
pub fn add_doc_with_tokenizers<I, T>(
&mut self,
doc_ref: &str,
data: I,
tokenizers: T
) where
I: IntoIterator,
I::Item: AsRef<str>,
T: IntoIterator<Item = TokenizerFn>,
[src]
pub fn add_doc_with_tokenizers<I, T>(
&mut self,
doc_ref: &str,
data: I,
tokenizers: T
) where
I: IntoIterator,
I::Item: AsRef<str>,
T: IntoIterator<Item = TokenizerFn>,
[src]Add the data from a document to the index.
NOTE: The elements of data
and tokenizers
should be provided in
the same order as the fields used to create the index.
Example
use elasticlunr::pipeline::{tokenize, TokenizerFn}; fn css_tokenizer(text: &str) -> Vec<String> { text.split(|c: char| c.is_whitespace()) .filter(|s| !s.is_empty()) .map(|s| s.trim().to_lowercase()) .collect() } let mut index = Index::new(&["title", "body"]); let tokenizers: Vec<TokenizerFn> = vec![tokenize, css_tokenizer]; index.add_doc_with_tokenizers("1", &["this is a title", "this is body text"], tokenizers);
pub fn get_fields(&self) -> &[String]
[src]
pub fn to_json_pretty(&self) -> String
[src]
pub fn to_json_pretty(&self) -> String
[src]Returns the index, serialized to pretty-printed JSON.
Trait Implementations
impl Deserialize<'static> for Index
[src]
impl Deserialize<'static> for Index
[src]fn deserialize<__D>(__deserializer: __D) -> Result<Self, __D::Error> where
__D: Deserializer<'static>,
[src]
fn deserialize<__D>(__deserializer: __D) -> Result<Self, __D::Error> where
__D: Deserializer<'static>,
[src]Deserialize this value from the given Serde deserializer. Read more