pub struct BatchBuilder<'a> { /* private fields */ }
Expand description
Used for building configuration for a Batch. Created by a Consumer::batch on a Consumer.
§Examples
use async_nats::jetstream::consumer::PullConsumer;
use futures::StreamExt;
let client = async_nats::connect("localhost:4222").await?;
let jetstream = async_nats::jetstream::new(client);
let consumer: PullConsumer = jetstream
.get_stream("events")
.await?
.get_consumer("pull")
.await?;
let mut messages = consumer
.batch()
.max_messages(100)
.max_bytes(1024)
.messages()
.await?;
while let Some(message) = messages.next().await {
let message = message?;
println!("message: {:?}", message);
message.ack().await?;
}
Implementations§
Source§impl<'a> BatchBuilder<'a>
impl<'a> BatchBuilder<'a>
pub fn new(consumer: &'a Consumer<Config>) -> Self
Sourcepub fn max_bytes(self, max_bytes: usize) -> Self
pub fn max_bytes(self, max_bytes: usize) -> Self
Sets max bytes that can be buffered on the Client while processing already received messages. Higher values will yield better performance, but also potentially increase memory usage if application is acknowledging messages much slower than they arrive.
Default values should provide reasonable balance between performance and memory usage.
§Examples
use async_nats::jetstream::consumer::PullConsumer;
use futures::StreamExt;
let client = async_nats::connect("localhost:4222").await?;
let jetstream = async_nats::jetstream::new(client);
let consumer: PullConsumer = jetstream
.get_stream("events")
.await?
.get_consumer("pull")
.await?;
let mut messages = consumer.batch().max_bytes(1024).messages().await?;
while let Some(message) = messages.next().await {
let message = message?;
println!("message: {:?}", message);
message.ack().await?;
}
Sourcepub fn max_messages(self, batch: usize) -> Self
pub fn max_messages(self, batch: usize) -> Self
Sets max number of messages that can be buffered on the Client while processing already received messages. Higher values will yield better performance, but also potentially increase memory usage if application is acknowledging messages much slower than they arrive.
Default values should provide reasonable balance between performance and memory usage.
§Examples
use async_nats::jetstream::consumer::PullConsumer;
use futures::StreamExt;
let client = async_nats::connect("localhost:4222").await?;
let jetstream = async_nats::jetstream::new(client);
let consumer: PullConsumer = jetstream
.get_stream("events")
.await?
.get_consumer("pull")
.await?;
let mut messages = consumer.batch().max_messages(100).messages().await?;
while let Some(message) = messages.next().await {
let message = message?;
println!("message: {:?}", message);
message.ack().await?;
}
Sourcepub fn heartbeat(self, heartbeat: Duration) -> Self
pub fn heartbeat(self, heartbeat: Duration) -> Self
Sets heartbeat which will be send by the server if there are no messages for a given Consumer pending.
§Examples
use async_nats::jetstream::consumer::PullConsumer;
use futures::StreamExt;
let client = async_nats::connect("localhost:4222").await?;
let jetstream = async_nats::jetstream::new(client);
let consumer: PullConsumer = jetstream
.get_stream("events")
.await?
.get_consumer("pull")
.await?;
let mut messages = consumer
.batch()
.heartbeat(std::time::Duration::from_secs(10))
.messages()
.await?;
while let Some(message) = messages.next().await {
let message = message?;
println!("message: {:?}", message);
message.ack().await?;
}
Sourcepub fn min_pending(self, min_pending: usize) -> Self
pub fn min_pending(self, min_pending: usize) -> Self
Sets overflow threshold for minimum pending messages before this stream will start getting messages. To use overflow, Consumer needs to have enabled Config::priority_groups and PriorityPolicy::Overflow set.
§Examples
use async_nats::jetstream::consumer::PullConsumer;
use futures::StreamExt;
let client = async_nats::connect("localhost:4222").await?;
let jetstream = async_nats::jetstream::new(client);
let consumer: PullConsumer = jetstream
.get_stream("events")
.await?
.get_consumer("pull")
.await?;
let mut messages = consumer
.batch()
.expires(std::time::Duration::from_secs(30))
.group("A")
.min_pending(100)
.messages()
.await?;
while let Some(message) = messages.next().await {
let message = message?;
println!("message: {:?}", message);
message.ack().await?;
}
Sourcepub fn min_ack_pending(self, min_ack_pending: usize) -> Self
pub fn min_ack_pending(self, min_ack_pending: usize) -> Self
Sets overflow threshold for minimum pending acknowledgments before this stream will start getting messages. To use overflow, Consumer needs to have enabled Config::priority_groups and PriorityPolicy::Overflow set.
§Examples
use async_nats::jetstream::consumer::PullConsumer;
use futures::StreamExt;
let client = async_nats::connect("localhost:4222").await?;
let jetstream = async_nats::jetstream::new(client);
let consumer: PullConsumer = jetstream
.get_stream("events")
.await?
.get_consumer("pull")
.await?;
let mut messages = consumer
.batch()
.expires(std::time::Duration::from_secs(30))
.group("A")
.min_ack_pending(100)
.messages()
.await?;
while let Some(message) = messages.next().await {
let message = message?;
println!("message: {:?}", message);
message.ack().await?;
}
Sourcepub fn group<T: Into<String>>(self, group: T) -> Self
pub fn group<T: Into<String>>(self, group: T) -> Self
Setting group when using Consumer with PriorityPolicy.
§Examples
use async_nats::jetstream::consumer::PullConsumer;
use futures::StreamExt;
let client = async_nats::connect("localhost:4222").await?;
let jetstream = async_nats::jetstream::new(client);
let consumer: PullConsumer = jetstream
.get_stream("events")
.await?
.get_consumer("pull")
.await?;
let mut messages = consumer
.batch()
.expires(std::time::Duration::from_secs(30))
.group("A")
.min_ack_pending(100)
.messages()
.await?;
while let Some(message) = messages.next().await {
let message = message?;
println!("message: {:?}", message);
message.ack().await?;
}
Sourcepub fn expires(self, expires: Duration) -> Self
pub fn expires(self, expires: Duration) -> Self
Low level API that does not need tweaking for most use cases. Sets how long each batch request waits for whole batch of messages before timing out. Consumer pending.
§Examples
use async_nats::jetstream::consumer::PullConsumer;
use futures::StreamExt;
let client = async_nats::connect("localhost:4222").await?;
let jetstream = async_nats::jetstream::new(client);
let consumer: PullConsumer = jetstream
.get_stream("events")
.await?
.get_consumer("pull")
.await?;
let mut messages = consumer
.batch()
.expires(std::time::Duration::from_secs(30))
.messages()
.await?;
while let Some(message) = messages.next().await {
let message = message?;
println!("message: {:?}", message);
message.ack().await?;
}
Sourcepub async fn messages(self) -> Result<Batch, BatchError>
pub async fn messages(self) -> Result<Batch, BatchError>
Creates actual Stream with provided configuration.
§Examples
use async_nats::jetstream::consumer::PullConsumer;
use futures::StreamExt;
let client = async_nats::connect("localhost:4222").await?;
let jetstream = async_nats::jetstream::new(client);
let consumer: PullConsumer = jetstream
.get_stream("events")
.await?
.get_consumer("pull")
.await?;
let mut messages = consumer.batch().max_messages(100).messages().await?;
while let Some(message) = messages.next().await {
let message = message?;
println!("message: {:?}", message);
message.ack().await?;
}