use core::mem;
use core::cmp::Ordering;
extern crate flatbuffers;
use self::flatbuffers::{EndianScalar, Follow};
#[deprecated(since = "2.0.0", note = "Use associated constants instead. This will no longer be generated in 2021.")]
pub const ENUM_MIN_BOUND_TYPE: i8 = 0;
#[deprecated(since = "2.0.0", note = "Use associated constants instead. This will no longer be generated in 2021.")]
pub const ENUM_MAX_BOUND_TYPE: i8 = 3;
#[deprecated(since = "2.0.0", note = "Use associated constants instead. This will no longer be generated in 2021.")]
#[allow(non_camel_case_types)]
pub const ENUM_VALUES_BOUND_TYPE: [BoundType; 4] = [
BoundType::Unknown,
BoundType::Unbounded,
BoundType::Included,
BoundType::Excluded,
];
#[derive(Clone, Copy, PartialEq, Eq, PartialOrd, Ord, Hash, Default)]
#[repr(transparent)]
pub struct BoundType(pub i8);
#[allow(non_upper_case_globals)]
impl BoundType {
pub const Unknown: Self = Self(0);
pub const Unbounded: Self = Self(1);
pub const Included: Self = Self(2);
pub const Excluded: Self = Self(3);
pub const ENUM_MIN: i8 = 0;
pub const ENUM_MAX: i8 = 3;
pub const ENUM_VALUES: &'static [Self] = &[
Self::Unknown,
Self::Unbounded,
Self::Included,
Self::Excluded,
];
pub fn variant_name(self) -> Option<&'static str> {
match self {
Self::Unknown => Some("Unknown"),
Self::Unbounded => Some("Unbounded"),
Self::Included => Some("Included"),
Self::Excluded => Some("Excluded"),
_ => None,
}
}
}
impl core::fmt::Debug for BoundType {
fn fmt(&self, f: &mut core::fmt::Formatter) -> core::fmt::Result {
if let Some(name) = self.variant_name() {
f.write_str(name)
} else {
f.write_fmt(format_args!("<UNKNOWN {:?}>", self.0))
}
}
}
impl<'a> flatbuffers::Follow<'a> for BoundType {
type Inner = Self;
#[inline]
unsafe fn follow(buf: &'a [u8], loc: usize) -> Self::Inner {
let b = flatbuffers::read_scalar_at::<i8>(buf, loc);
Self(b)
}
}
impl flatbuffers::Push for BoundType {
type Output = BoundType;
#[inline]
unsafe fn push(&self, dst: &mut [u8], _written_len: usize) {
flatbuffers::emplace_scalar::<i8>(dst, self.0);
}
}
impl flatbuffers::EndianScalar for BoundType {
type Scalar = i8;
#[inline]
fn to_little_endian(self) -> i8 {
self.0.to_le()
}
#[inline]
#[allow(clippy::wrong_self_convention)]
fn from_little_endian(v: i8) -> Self {
let b = i8::from_le(v);
Self(b)
}
}
impl<'a> flatbuffers::Verifiable for BoundType {
#[inline]
fn run_verifier(
v: &mut flatbuffers::Verifier, pos: usize
) -> Result<(), flatbuffers::InvalidFlatbuffer> {
use self::flatbuffers::Verifiable;
i8::run_verifier(v, pos)
}
}
impl flatbuffers::SimpleToVerifyInSlice for BoundType {}
#[deprecated(since = "2.0.0", note = "Use associated constants instead. This will no longer be generated in 2021.")]
pub const ENUM_MIN_COMPRESSION_FORMAT: i8 = 0;
#[deprecated(since = "2.0.0", note = "Use associated constants instead. This will no longer be generated in 2021.")]
pub const ENUM_MAX_COMPRESSION_FORMAT: i8 = 4;
#[deprecated(since = "2.0.0", note = "Use associated constants instead. This will no longer be generated in 2021.")]
#[allow(non_camel_case_types)]
pub const ENUM_VALUES_COMPRESSION_FORMAT: [CompressionFormat; 5] = [
CompressionFormat::None,
CompressionFormat::Snappy,
CompressionFormat::Zlib,
CompressionFormat::Lz4,
CompressionFormat::Zstd,
];
#[derive(Clone, Copy, PartialEq, Eq, PartialOrd, Ord, Hash, Default)]
#[repr(transparent)]
pub struct CompressionFormat(pub i8);
#[allow(non_upper_case_globals)]
impl CompressionFormat {
pub const None: Self = Self(0);
pub const Snappy: Self = Self(1);
pub const Zlib: Self = Self(2);
pub const Lz4: Self = Self(3);
pub const Zstd: Self = Self(4);
pub const ENUM_MIN: i8 = 0;
pub const ENUM_MAX: i8 = 4;
pub const ENUM_VALUES: &'static [Self] = &[
Self::None,
Self::Snappy,
Self::Zlib,
Self::Lz4,
Self::Zstd,
];
pub fn variant_name(self) -> Option<&'static str> {
match self {
Self::None => Some("None"),
Self::Snappy => Some("Snappy"),
Self::Zlib => Some("Zlib"),
Self::Lz4 => Some("Lz4"),
Self::Zstd => Some("Zstd"),
_ => None,
}
}
}
impl core::fmt::Debug for CompressionFormat {
fn fmt(&self, f: &mut core::fmt::Formatter) -> core::fmt::Result {
if let Some(name) = self.variant_name() {
f.write_str(name)
} else {
f.write_fmt(format_args!("<UNKNOWN {:?}>", self.0))
}
}
}
impl<'a> flatbuffers::Follow<'a> for CompressionFormat {
type Inner = Self;
#[inline]
unsafe fn follow(buf: &'a [u8], loc: usize) -> Self::Inner {
let b = flatbuffers::read_scalar_at::<i8>(buf, loc);
Self(b)
}
}
impl flatbuffers::Push for CompressionFormat {
type Output = CompressionFormat;
#[inline]
unsafe fn push(&self, dst: &mut [u8], _written_len: usize) {
flatbuffers::emplace_scalar::<i8>(dst, self.0);
}
}
impl flatbuffers::EndianScalar for CompressionFormat {
type Scalar = i8;
#[inline]
fn to_little_endian(self) -> i8 {
self.0.to_le()
}
#[inline]
#[allow(clippy::wrong_self_convention)]
fn from_little_endian(v: i8) -> Self {
let b = i8::from_le(v);
Self(b)
}
}
impl<'a> flatbuffers::Verifiable for CompressionFormat {
#[inline]
fn run_verifier(
v: &mut flatbuffers::Verifier, pos: usize
) -> Result<(), flatbuffers::InvalidFlatbuffer> {
use self::flatbuffers::Verifiable;
i8::run_verifier(v, pos)
}
}
impl flatbuffers::SimpleToVerifyInSlice for CompressionFormat {}
#[deprecated(since = "2.0.0", note = "Use associated constants instead. This will no longer be generated in 2021.")]
pub const ENUM_MIN_CHECKPOINT_METADATA: u8 = 0;
#[deprecated(since = "2.0.0", note = "Use associated constants instead. This will no longer be generated in 2021.")]
pub const ENUM_MAX_CHECKPOINT_METADATA: u8 = 1;
#[deprecated(since = "2.0.0", note = "Use associated constants instead. This will no longer be generated in 2021.")]
#[allow(non_camel_case_types)]
pub const ENUM_VALUES_CHECKPOINT_METADATA: [CheckpointMetadata; 2] = [
CheckpointMetadata::NONE,
CheckpointMetadata::WriterCheckpoint,
];
#[derive(Clone, Copy, PartialEq, Eq, PartialOrd, Ord, Hash, Default)]
#[repr(transparent)]
pub struct CheckpointMetadata(pub u8);
#[allow(non_upper_case_globals)]
impl CheckpointMetadata {
pub const NONE: Self = Self(0);
pub const WriterCheckpoint: Self = Self(1);
pub const ENUM_MIN: u8 = 0;
pub const ENUM_MAX: u8 = 1;
pub const ENUM_VALUES: &'static [Self] = &[
Self::NONE,
Self::WriterCheckpoint,
];
pub fn variant_name(self) -> Option<&'static str> {
match self {
Self::NONE => Some("NONE"),
Self::WriterCheckpoint => Some("WriterCheckpoint"),
_ => None,
}
}
}
impl core::fmt::Debug for CheckpointMetadata {
fn fmt(&self, f: &mut core::fmt::Formatter) -> core::fmt::Result {
if let Some(name) = self.variant_name() {
f.write_str(name)
} else {
f.write_fmt(format_args!("<UNKNOWN {:?}>", self.0))
}
}
}
impl<'a> flatbuffers::Follow<'a> for CheckpointMetadata {
type Inner = Self;
#[inline]
unsafe fn follow(buf: &'a [u8], loc: usize) -> Self::Inner {
let b = flatbuffers::read_scalar_at::<u8>(buf, loc);
Self(b)
}
}
impl flatbuffers::Push for CheckpointMetadata {
type Output = CheckpointMetadata;
#[inline]
unsafe fn push(&self, dst: &mut [u8], _written_len: usize) {
flatbuffers::emplace_scalar::<u8>(dst, self.0);
}
}
impl flatbuffers::EndianScalar for CheckpointMetadata {
type Scalar = u8;
#[inline]
fn to_little_endian(self) -> u8 {
self.0.to_le()
}
#[inline]
#[allow(clippy::wrong_self_convention)]
fn from_little_endian(v: u8) -> Self {
let b = u8::from_le(v);
Self(b)
}
}
impl<'a> flatbuffers::Verifiable for CheckpointMetadata {
#[inline]
fn run_verifier(
v: &mut flatbuffers::Verifier, pos: usize
) -> Result<(), flatbuffers::InvalidFlatbuffer> {
use self::flatbuffers::Verifiable;
u8::run_verifier(v, pos)
}
}
impl flatbuffers::SimpleToVerifyInSlice for CheckpointMetadata {}
pub struct CheckpointMetadataUnionTableOffset {}
pub enum UuidOffset {}
#[derive(Copy, Clone, PartialEq)]
pub struct Uuid<'a> {
pub _tab: flatbuffers::Table<'a>,
}
impl<'a> flatbuffers::Follow<'a> for Uuid<'a> {
type Inner = Uuid<'a>;
#[inline]
unsafe fn follow(buf: &'a [u8], loc: usize) -> Self::Inner {
Self { _tab: flatbuffers::Table::new(buf, loc) }
}
}
impl<'a> Uuid<'a> {
pub const VT_HIGH: flatbuffers::VOffsetT = 4;
pub const VT_LOW: flatbuffers::VOffsetT = 6;
#[inline]
pub unsafe fn init_from_table(table: flatbuffers::Table<'a>) -> Self {
Uuid { _tab: table }
}
#[allow(unused_mut)]
pub fn create<'bldr: 'args, 'args: 'mut_bldr, 'mut_bldr, A: flatbuffers::Allocator + 'bldr>(
_fbb: &'mut_bldr mut flatbuffers::FlatBufferBuilder<'bldr, A>,
args: &'args UuidArgs
) -> flatbuffers::WIPOffset<Uuid<'bldr>> {
let mut builder = UuidBuilder::new(_fbb);
builder.add_low(args.low);
builder.add_high(args.high);
builder.finish()
}
#[inline]
pub fn high(&self) -> u64 {
unsafe { self._tab.get::<u64>(Uuid::VT_HIGH, Some(0)).unwrap()}
}
#[inline]
pub fn low(&self) -> u64 {
unsafe { self._tab.get::<u64>(Uuid::VT_LOW, Some(0)).unwrap()}
}
}
impl flatbuffers::Verifiable for Uuid<'_> {
#[inline]
fn run_verifier(
v: &mut flatbuffers::Verifier, pos: usize
) -> Result<(), flatbuffers::InvalidFlatbuffer> {
use self::flatbuffers::Verifiable;
v.visit_table(pos)?
.visit_field::<u64>("high", Self::VT_HIGH, false)?
.visit_field::<u64>("low", Self::VT_LOW, false)?
.finish();
Ok(())
}
}
pub struct UuidArgs {
pub high: u64,
pub low: u64,
}
impl<'a> Default for UuidArgs {
#[inline]
fn default() -> Self {
UuidArgs {
high: 0,
low: 0,
}
}
}
pub struct UuidBuilder<'a: 'b, 'b, A: flatbuffers::Allocator + 'a> {
fbb_: &'b mut flatbuffers::FlatBufferBuilder<'a, A>,
start_: flatbuffers::WIPOffset<flatbuffers::TableUnfinishedWIPOffset>,
}
impl<'a: 'b, 'b, A: flatbuffers::Allocator + 'a> UuidBuilder<'a, 'b, A> {
#[inline]
pub fn add_high(&mut self, high: u64) {
self.fbb_.push_slot::<u64>(Uuid::VT_HIGH, high, 0);
}
#[inline]
pub fn add_low(&mut self, low: u64) {
self.fbb_.push_slot::<u64>(Uuid::VT_LOW, low, 0);
}
#[inline]
pub fn new(_fbb: &'b mut flatbuffers::FlatBufferBuilder<'a, A>) -> UuidBuilder<'a, 'b, A> {
let start = _fbb.start_table();
UuidBuilder {
fbb_: _fbb,
start_: start,
}
}
#[inline]
pub fn finish(self) -> flatbuffers::WIPOffset<Uuid<'a>> {
let o = self.fbb_.end_table(self.start_);
flatbuffers::WIPOffset::new(o.value())
}
}
impl core::fmt::Debug for Uuid<'_> {
fn fmt(&self, f: &mut core::fmt::Formatter<'_>) -> core::fmt::Result {
let mut ds = f.debug_struct("Uuid");
ds.field("high", &self.high());
ds.field("low", &self.low());
ds.finish()
}
}
pub enum UlidOffset {}
#[derive(Copy, Clone, PartialEq)]
pub struct Ulid<'a> {
pub _tab: flatbuffers::Table<'a>,
}
impl<'a> flatbuffers::Follow<'a> for Ulid<'a> {
type Inner = Ulid<'a>;
#[inline]
unsafe fn follow(buf: &'a [u8], loc: usize) -> Self::Inner {
Self { _tab: flatbuffers::Table::new(buf, loc) }
}
}
impl<'a> Ulid<'a> {
pub const VT_HIGH: flatbuffers::VOffsetT = 4;
pub const VT_LOW: flatbuffers::VOffsetT = 6;
#[inline]
pub unsafe fn init_from_table(table: flatbuffers::Table<'a>) -> Self {
Ulid { _tab: table }
}
#[allow(unused_mut)]
pub fn create<'bldr: 'args, 'args: 'mut_bldr, 'mut_bldr, A: flatbuffers::Allocator + 'bldr>(
_fbb: &'mut_bldr mut flatbuffers::FlatBufferBuilder<'bldr, A>,
args: &'args UlidArgs
) -> flatbuffers::WIPOffset<Ulid<'bldr>> {
let mut builder = UlidBuilder::new(_fbb);
builder.add_low(args.low);
builder.add_high(args.high);
builder.finish()
}
#[inline]
pub fn high(&self) -> u64 {
unsafe { self._tab.get::<u64>(Ulid::VT_HIGH, Some(0)).unwrap()}
}
#[inline]
pub fn low(&self) -> u64 {
unsafe { self._tab.get::<u64>(Ulid::VT_LOW, Some(0)).unwrap()}
}
}
impl flatbuffers::Verifiable for Ulid<'_> {
#[inline]
fn run_verifier(
v: &mut flatbuffers::Verifier, pos: usize
) -> Result<(), flatbuffers::InvalidFlatbuffer> {
use self::flatbuffers::Verifiable;
v.visit_table(pos)?
.visit_field::<u64>("high", Self::VT_HIGH, false)?
.visit_field::<u64>("low", Self::VT_LOW, false)?
.finish();
Ok(())
}
}
pub struct UlidArgs {
pub high: u64,
pub low: u64,
}
impl<'a> Default for UlidArgs {
#[inline]
fn default() -> Self {
UlidArgs {
high: 0,
low: 0,
}
}
}
pub struct UlidBuilder<'a: 'b, 'b, A: flatbuffers::Allocator + 'a> {
fbb_: &'b mut flatbuffers::FlatBufferBuilder<'a, A>,
start_: flatbuffers::WIPOffset<flatbuffers::TableUnfinishedWIPOffset>,
}
impl<'a: 'b, 'b, A: flatbuffers::Allocator + 'a> UlidBuilder<'a, 'b, A> {
#[inline]
pub fn add_high(&mut self, high: u64) {
self.fbb_.push_slot::<u64>(Ulid::VT_HIGH, high, 0);
}
#[inline]
pub fn add_low(&mut self, low: u64) {
self.fbb_.push_slot::<u64>(Ulid::VT_LOW, low, 0);
}
#[inline]
pub fn new(_fbb: &'b mut flatbuffers::FlatBufferBuilder<'a, A>) -> UlidBuilder<'a, 'b, A> {
let start = _fbb.start_table();
UlidBuilder {
fbb_: _fbb,
start_: start,
}
}
#[inline]
pub fn finish(self) -> flatbuffers::WIPOffset<Ulid<'a>> {
let o = self.fbb_.end_table(self.start_);
flatbuffers::WIPOffset::new(o.value())
}
}
impl core::fmt::Debug for Ulid<'_> {
fn fmt(&self, f: &mut core::fmt::Formatter<'_>) -> core::fmt::Result {
let mut ds = f.debug_struct("Ulid");
ds.field("high", &self.high());
ds.field("low", &self.low());
ds.finish()
}
}
pub enum BytesBoundOffset {}
#[derive(Copy, Clone, PartialEq)]
pub struct BytesBound<'a> {
pub _tab: flatbuffers::Table<'a>,
}
impl<'a> flatbuffers::Follow<'a> for BytesBound<'a> {
type Inner = BytesBound<'a>;
#[inline]
unsafe fn follow(buf: &'a [u8], loc: usize) -> Self::Inner {
Self { _tab: flatbuffers::Table::new(buf, loc) }
}
}
impl<'a> BytesBound<'a> {
pub const VT_KEY: flatbuffers::VOffsetT = 4;
pub const VT_BOUND_TYPE: flatbuffers::VOffsetT = 6;
#[inline]
pub unsafe fn init_from_table(table: flatbuffers::Table<'a>) -> Self {
BytesBound { _tab: table }
}
#[allow(unused_mut)]
pub fn create<'bldr: 'args, 'args: 'mut_bldr, 'mut_bldr, A: flatbuffers::Allocator + 'bldr>(
_fbb: &'mut_bldr mut flatbuffers::FlatBufferBuilder<'bldr, A>,
args: &'args BytesBoundArgs<'args>
) -> flatbuffers::WIPOffset<BytesBound<'bldr>> {
let mut builder = BytesBoundBuilder::new(_fbb);
if let Some(x) = args.key { builder.add_key(x); }
builder.add_bound_type(args.bound_type);
builder.finish()
}
#[inline]
pub fn key(&self) -> Option<flatbuffers::Vector<'a, u8>> {
unsafe { self._tab.get::<flatbuffers::ForwardsUOffset<flatbuffers::Vector<'a, u8>>>(BytesBound::VT_KEY, None)}
}
#[inline]
pub fn bound_type(&self) -> BoundType {
unsafe { self._tab.get::<BoundType>(BytesBound::VT_BOUND_TYPE, Some(BoundType::Unknown)).unwrap()}
}
}
impl flatbuffers::Verifiable for BytesBound<'_> {
#[inline]
fn run_verifier(
v: &mut flatbuffers::Verifier, pos: usize
) -> Result<(), flatbuffers::InvalidFlatbuffer> {
use self::flatbuffers::Verifiable;
v.visit_table(pos)?
.visit_field::<flatbuffers::ForwardsUOffset<flatbuffers::Vector<'_, u8>>>("key", Self::VT_KEY, false)?
.visit_field::<BoundType>("bound_type", Self::VT_BOUND_TYPE, false)?
.finish();
Ok(())
}
}
pub struct BytesBoundArgs<'a> {
pub key: Option<flatbuffers::WIPOffset<flatbuffers::Vector<'a, u8>>>,
pub bound_type: BoundType,
}
impl<'a> Default for BytesBoundArgs<'a> {
#[inline]
fn default() -> Self {
BytesBoundArgs {
key: None,
bound_type: BoundType::Unknown,
}
}
}
pub struct BytesBoundBuilder<'a: 'b, 'b, A: flatbuffers::Allocator + 'a> {
fbb_: &'b mut flatbuffers::FlatBufferBuilder<'a, A>,
start_: flatbuffers::WIPOffset<flatbuffers::TableUnfinishedWIPOffset>,
}
impl<'a: 'b, 'b, A: flatbuffers::Allocator + 'a> BytesBoundBuilder<'a, 'b, A> {
#[inline]
pub fn add_key(&mut self, key: flatbuffers::WIPOffset<flatbuffers::Vector<'b , u8>>) {
self.fbb_.push_slot_always::<flatbuffers::WIPOffset<_>>(BytesBound::VT_KEY, key);
}
#[inline]
pub fn add_bound_type(&mut self, bound_type: BoundType) {
self.fbb_.push_slot::<BoundType>(BytesBound::VT_BOUND_TYPE, bound_type, BoundType::Unknown);
}
#[inline]
pub fn new(_fbb: &'b mut flatbuffers::FlatBufferBuilder<'a, A>) -> BytesBoundBuilder<'a, 'b, A> {
let start = _fbb.start_table();
BytesBoundBuilder {
fbb_: _fbb,
start_: start,
}
}
#[inline]
pub fn finish(self) -> flatbuffers::WIPOffset<BytesBound<'a>> {
let o = self.fbb_.end_table(self.start_);
flatbuffers::WIPOffset::new(o.value())
}
}
impl core::fmt::Debug for BytesBound<'_> {
fn fmt(&self, f: &mut core::fmt::Formatter<'_>) -> core::fmt::Result {
let mut ds = f.debug_struct("BytesBound");
ds.field("key", &self.key());
ds.field("bound_type", &self.bound_type());
ds.finish()
}
}
pub enum BytesRangeOffset {}
#[derive(Copy, Clone, PartialEq)]
pub struct BytesRange<'a> {
pub _tab: flatbuffers::Table<'a>,
}
impl<'a> flatbuffers::Follow<'a> for BytesRange<'a> {
type Inner = BytesRange<'a>;
#[inline]
unsafe fn follow(buf: &'a [u8], loc: usize) -> Self::Inner {
Self { _tab: flatbuffers::Table::new(buf, loc) }
}
}
impl<'a> BytesRange<'a> {
pub const VT_START_BOUND: flatbuffers::VOffsetT = 4;
pub const VT_END_BOUND: flatbuffers::VOffsetT = 6;
#[inline]
pub unsafe fn init_from_table(table: flatbuffers::Table<'a>) -> Self {
BytesRange { _tab: table }
}
#[allow(unused_mut)]
pub fn create<'bldr: 'args, 'args: 'mut_bldr, 'mut_bldr, A: flatbuffers::Allocator + 'bldr>(
_fbb: &'mut_bldr mut flatbuffers::FlatBufferBuilder<'bldr, A>,
args: &'args BytesRangeArgs<'args>
) -> flatbuffers::WIPOffset<BytesRange<'bldr>> {
let mut builder = BytesRangeBuilder::new(_fbb);
if let Some(x) = args.end_bound { builder.add_end_bound(x); }
if let Some(x) = args.start_bound { builder.add_start_bound(x); }
builder.finish()
}
#[inline]
pub fn start_bound(&self) -> BytesBound<'a> {
unsafe { self._tab.get::<flatbuffers::ForwardsUOffset<BytesBound>>(BytesRange::VT_START_BOUND, None).unwrap()}
}
#[inline]
pub fn end_bound(&self) -> BytesBound<'a> {
unsafe { self._tab.get::<flatbuffers::ForwardsUOffset<BytesBound>>(BytesRange::VT_END_BOUND, None).unwrap()}
}
}
impl flatbuffers::Verifiable for BytesRange<'_> {
#[inline]
fn run_verifier(
v: &mut flatbuffers::Verifier, pos: usize
) -> Result<(), flatbuffers::InvalidFlatbuffer> {
use self::flatbuffers::Verifiable;
v.visit_table(pos)?
.visit_field::<flatbuffers::ForwardsUOffset<BytesBound>>("start_bound", Self::VT_START_BOUND, true)?
.visit_field::<flatbuffers::ForwardsUOffset<BytesBound>>("end_bound", Self::VT_END_BOUND, true)?
.finish();
Ok(())
}
}
pub struct BytesRangeArgs<'a> {
pub start_bound: Option<flatbuffers::WIPOffset<BytesBound<'a>>>,
pub end_bound: Option<flatbuffers::WIPOffset<BytesBound<'a>>>,
}
impl<'a> Default for BytesRangeArgs<'a> {
#[inline]
fn default() -> Self {
BytesRangeArgs {
start_bound: None, end_bound: None, }
}
}
pub struct BytesRangeBuilder<'a: 'b, 'b, A: flatbuffers::Allocator + 'a> {
fbb_: &'b mut flatbuffers::FlatBufferBuilder<'a, A>,
start_: flatbuffers::WIPOffset<flatbuffers::TableUnfinishedWIPOffset>,
}
impl<'a: 'b, 'b, A: flatbuffers::Allocator + 'a> BytesRangeBuilder<'a, 'b, A> {
#[inline]
pub fn add_start_bound(&mut self, start_bound: flatbuffers::WIPOffset<BytesBound<'b >>) {
self.fbb_.push_slot_always::<flatbuffers::WIPOffset<BytesBound>>(BytesRange::VT_START_BOUND, start_bound);
}
#[inline]
pub fn add_end_bound(&mut self, end_bound: flatbuffers::WIPOffset<BytesBound<'b >>) {
self.fbb_.push_slot_always::<flatbuffers::WIPOffset<BytesBound>>(BytesRange::VT_END_BOUND, end_bound);
}
#[inline]
pub fn new(_fbb: &'b mut flatbuffers::FlatBufferBuilder<'a, A>) -> BytesRangeBuilder<'a, 'b, A> {
let start = _fbb.start_table();
BytesRangeBuilder {
fbb_: _fbb,
start_: start,
}
}
#[inline]
pub fn finish(self) -> flatbuffers::WIPOffset<BytesRange<'a>> {
let o = self.fbb_.end_table(self.start_);
self.fbb_.required(o, BytesRange::VT_START_BOUND,"start_bound");
self.fbb_.required(o, BytesRange::VT_END_BOUND,"end_bound");
flatbuffers::WIPOffset::new(o.value())
}
}
impl core::fmt::Debug for BytesRange<'_> {
fn fmt(&self, f: &mut core::fmt::Formatter<'_>) -> core::fmt::Result {
let mut ds = f.debug_struct("BytesRange");
ds.field("start_bound", &self.start_bound());
ds.field("end_bound", &self.end_bound());
ds.finish()
}
}
pub enum SsTableInfoOffset {}
#[derive(Copy, Clone, PartialEq)]
pub struct SsTableInfo<'a> {
pub _tab: flatbuffers::Table<'a>,
}
impl<'a> flatbuffers::Follow<'a> for SsTableInfo<'a> {
type Inner = SsTableInfo<'a>;
#[inline]
unsafe fn follow(buf: &'a [u8], loc: usize) -> Self::Inner {
Self { _tab: flatbuffers::Table::new(buf, loc) }
}
}
impl<'a> SsTableInfo<'a> {
pub const VT_FIRST_KEY: flatbuffers::VOffsetT = 4;
pub const VT_INDEX_OFFSET: flatbuffers::VOffsetT = 6;
pub const VT_INDEX_LEN: flatbuffers::VOffsetT = 8;
pub const VT_FILTER_OFFSET: flatbuffers::VOffsetT = 10;
pub const VT_FILTER_LEN: flatbuffers::VOffsetT = 12;
pub const VT_COMPRESSION_FORMAT: flatbuffers::VOffsetT = 14;
#[inline]
pub unsafe fn init_from_table(table: flatbuffers::Table<'a>) -> Self {
SsTableInfo { _tab: table }
}
#[allow(unused_mut)]
pub fn create<'bldr: 'args, 'args: 'mut_bldr, 'mut_bldr, A: flatbuffers::Allocator + 'bldr>(
_fbb: &'mut_bldr mut flatbuffers::FlatBufferBuilder<'bldr, A>,
args: &'args SsTableInfoArgs<'args>
) -> flatbuffers::WIPOffset<SsTableInfo<'bldr>> {
let mut builder = SsTableInfoBuilder::new(_fbb);
builder.add_filter_len(args.filter_len);
builder.add_filter_offset(args.filter_offset);
builder.add_index_len(args.index_len);
builder.add_index_offset(args.index_offset);
if let Some(x) = args.first_key { builder.add_first_key(x); }
builder.add_compression_format(args.compression_format);
builder.finish()
}
#[inline]
pub fn first_key(&self) -> Option<flatbuffers::Vector<'a, u8>> {
unsafe { self._tab.get::<flatbuffers::ForwardsUOffset<flatbuffers::Vector<'a, u8>>>(SsTableInfo::VT_FIRST_KEY, None)}
}
#[inline]
pub fn index_offset(&self) -> u64 {
unsafe { self._tab.get::<u64>(SsTableInfo::VT_INDEX_OFFSET, Some(0)).unwrap()}
}
#[inline]
pub fn index_len(&self) -> u64 {
unsafe { self._tab.get::<u64>(SsTableInfo::VT_INDEX_LEN, Some(0)).unwrap()}
}
#[inline]
pub fn filter_offset(&self) -> u64 {
unsafe { self._tab.get::<u64>(SsTableInfo::VT_FILTER_OFFSET, Some(0)).unwrap()}
}
#[inline]
pub fn filter_len(&self) -> u64 {
unsafe { self._tab.get::<u64>(SsTableInfo::VT_FILTER_LEN, Some(0)).unwrap()}
}
#[inline]
pub fn compression_format(&self) -> CompressionFormat {
unsafe { self._tab.get::<CompressionFormat>(SsTableInfo::VT_COMPRESSION_FORMAT, Some(CompressionFormat::None)).unwrap()}
}
}
impl flatbuffers::Verifiable for SsTableInfo<'_> {
#[inline]
fn run_verifier(
v: &mut flatbuffers::Verifier, pos: usize
) -> Result<(), flatbuffers::InvalidFlatbuffer> {
use self::flatbuffers::Verifiable;
v.visit_table(pos)?
.visit_field::<flatbuffers::ForwardsUOffset<flatbuffers::Vector<'_, u8>>>("first_key", Self::VT_FIRST_KEY, false)?
.visit_field::<u64>("index_offset", Self::VT_INDEX_OFFSET, false)?
.visit_field::<u64>("index_len", Self::VT_INDEX_LEN, false)?
.visit_field::<u64>("filter_offset", Self::VT_FILTER_OFFSET, false)?
.visit_field::<u64>("filter_len", Self::VT_FILTER_LEN, false)?
.visit_field::<CompressionFormat>("compression_format", Self::VT_COMPRESSION_FORMAT, false)?
.finish();
Ok(())
}
}
pub struct SsTableInfoArgs<'a> {
pub first_key: Option<flatbuffers::WIPOffset<flatbuffers::Vector<'a, u8>>>,
pub index_offset: u64,
pub index_len: u64,
pub filter_offset: u64,
pub filter_len: u64,
pub compression_format: CompressionFormat,
}
impl<'a> Default for SsTableInfoArgs<'a> {
#[inline]
fn default() -> Self {
SsTableInfoArgs {
first_key: None,
index_offset: 0,
index_len: 0,
filter_offset: 0,
filter_len: 0,
compression_format: CompressionFormat::None,
}
}
}
pub struct SsTableInfoBuilder<'a: 'b, 'b, A: flatbuffers::Allocator + 'a> {
fbb_: &'b mut flatbuffers::FlatBufferBuilder<'a, A>,
start_: flatbuffers::WIPOffset<flatbuffers::TableUnfinishedWIPOffset>,
}
impl<'a: 'b, 'b, A: flatbuffers::Allocator + 'a> SsTableInfoBuilder<'a, 'b, A> {
#[inline]
pub fn add_first_key(&mut self, first_key: flatbuffers::WIPOffset<flatbuffers::Vector<'b , u8>>) {
self.fbb_.push_slot_always::<flatbuffers::WIPOffset<_>>(SsTableInfo::VT_FIRST_KEY, first_key);
}
#[inline]
pub fn add_index_offset(&mut self, index_offset: u64) {
self.fbb_.push_slot::<u64>(SsTableInfo::VT_INDEX_OFFSET, index_offset, 0);
}
#[inline]
pub fn add_index_len(&mut self, index_len: u64) {
self.fbb_.push_slot::<u64>(SsTableInfo::VT_INDEX_LEN, index_len, 0);
}
#[inline]
pub fn add_filter_offset(&mut self, filter_offset: u64) {
self.fbb_.push_slot::<u64>(SsTableInfo::VT_FILTER_OFFSET, filter_offset, 0);
}
#[inline]
pub fn add_filter_len(&mut self, filter_len: u64) {
self.fbb_.push_slot::<u64>(SsTableInfo::VT_FILTER_LEN, filter_len, 0);
}
#[inline]
pub fn add_compression_format(&mut self, compression_format: CompressionFormat) {
self.fbb_.push_slot::<CompressionFormat>(SsTableInfo::VT_COMPRESSION_FORMAT, compression_format, CompressionFormat::None);
}
#[inline]
pub fn new(_fbb: &'b mut flatbuffers::FlatBufferBuilder<'a, A>) -> SsTableInfoBuilder<'a, 'b, A> {
let start = _fbb.start_table();
SsTableInfoBuilder {
fbb_: _fbb,
start_: start,
}
}
#[inline]
pub fn finish(self) -> flatbuffers::WIPOffset<SsTableInfo<'a>> {
let o = self.fbb_.end_table(self.start_);
flatbuffers::WIPOffset::new(o.value())
}
}
impl core::fmt::Debug for SsTableInfo<'_> {
fn fmt(&self, f: &mut core::fmt::Formatter<'_>) -> core::fmt::Result {
let mut ds = f.debug_struct("SsTableInfo");
ds.field("first_key", &self.first_key());
ds.field("index_offset", &self.index_offset());
ds.field("index_len", &self.index_len());
ds.field("filter_offset", &self.filter_offset());
ds.field("filter_len", &self.filter_len());
ds.field("compression_format", &self.compression_format());
ds.finish()
}
}
pub enum BlockMetaOffset {}
#[derive(Copy, Clone, PartialEq)]
pub struct BlockMeta<'a> {
pub _tab: flatbuffers::Table<'a>,
}
impl<'a> flatbuffers::Follow<'a> for BlockMeta<'a> {
type Inner = BlockMeta<'a>;
#[inline]
unsafe fn follow(buf: &'a [u8], loc: usize) -> Self::Inner {
Self { _tab: flatbuffers::Table::new(buf, loc) }
}
}
impl<'a> BlockMeta<'a> {
pub const VT_OFFSET: flatbuffers::VOffsetT = 4;
pub const VT_FIRST_KEY: flatbuffers::VOffsetT = 6;
#[inline]
pub unsafe fn init_from_table(table: flatbuffers::Table<'a>) -> Self {
BlockMeta { _tab: table }
}
#[allow(unused_mut)]
pub fn create<'bldr: 'args, 'args: 'mut_bldr, 'mut_bldr, A: flatbuffers::Allocator + 'bldr>(
_fbb: &'mut_bldr mut flatbuffers::FlatBufferBuilder<'bldr, A>,
args: &'args BlockMetaArgs<'args>
) -> flatbuffers::WIPOffset<BlockMeta<'bldr>> {
let mut builder = BlockMetaBuilder::new(_fbb);
builder.add_offset(args.offset);
if let Some(x) = args.first_key { builder.add_first_key(x); }
builder.finish()
}
#[inline]
pub fn offset(&self) -> u64 {
unsafe { self._tab.get::<u64>(BlockMeta::VT_OFFSET, Some(0)).unwrap()}
}
#[inline]
pub fn first_key(&self) -> flatbuffers::Vector<'a, u8> {
unsafe { self._tab.get::<flatbuffers::ForwardsUOffset<flatbuffers::Vector<'a, u8>>>(BlockMeta::VT_FIRST_KEY, None).unwrap()}
}
}
impl flatbuffers::Verifiable for BlockMeta<'_> {
#[inline]
fn run_verifier(
v: &mut flatbuffers::Verifier, pos: usize
) -> Result<(), flatbuffers::InvalidFlatbuffer> {
use self::flatbuffers::Verifiable;
v.visit_table(pos)?
.visit_field::<u64>("offset", Self::VT_OFFSET, false)?
.visit_field::<flatbuffers::ForwardsUOffset<flatbuffers::Vector<'_, u8>>>("first_key", Self::VT_FIRST_KEY, true)?
.finish();
Ok(())
}
}
pub struct BlockMetaArgs<'a> {
pub offset: u64,
pub first_key: Option<flatbuffers::WIPOffset<flatbuffers::Vector<'a, u8>>>,
}
impl<'a> Default for BlockMetaArgs<'a> {
#[inline]
fn default() -> Self {
BlockMetaArgs {
offset: 0,
first_key: None, }
}
}
pub struct BlockMetaBuilder<'a: 'b, 'b, A: flatbuffers::Allocator + 'a> {
fbb_: &'b mut flatbuffers::FlatBufferBuilder<'a, A>,
start_: flatbuffers::WIPOffset<flatbuffers::TableUnfinishedWIPOffset>,
}
impl<'a: 'b, 'b, A: flatbuffers::Allocator + 'a> BlockMetaBuilder<'a, 'b, A> {
#[inline]
pub fn add_offset(&mut self, offset: u64) {
self.fbb_.push_slot::<u64>(BlockMeta::VT_OFFSET, offset, 0);
}
#[inline]
pub fn add_first_key(&mut self, first_key: flatbuffers::WIPOffset<flatbuffers::Vector<'b , u8>>) {
self.fbb_.push_slot_always::<flatbuffers::WIPOffset<_>>(BlockMeta::VT_FIRST_KEY, first_key);
}
#[inline]
pub fn new(_fbb: &'b mut flatbuffers::FlatBufferBuilder<'a, A>) -> BlockMetaBuilder<'a, 'b, A> {
let start = _fbb.start_table();
BlockMetaBuilder {
fbb_: _fbb,
start_: start,
}
}
#[inline]
pub fn finish(self) -> flatbuffers::WIPOffset<BlockMeta<'a>> {
let o = self.fbb_.end_table(self.start_);
self.fbb_.required(o, BlockMeta::VT_FIRST_KEY,"first_key");
flatbuffers::WIPOffset::new(o.value())
}
}
impl core::fmt::Debug for BlockMeta<'_> {
fn fmt(&self, f: &mut core::fmt::Formatter<'_>) -> core::fmt::Result {
let mut ds = f.debug_struct("BlockMeta");
ds.field("offset", &self.offset());
ds.field("first_key", &self.first_key());
ds.finish()
}
}
pub enum SsTableIndexOffset {}
#[derive(Copy, Clone, PartialEq)]
pub struct SsTableIndex<'a> {
pub _tab: flatbuffers::Table<'a>,
}
impl<'a> flatbuffers::Follow<'a> for SsTableIndex<'a> {
type Inner = SsTableIndex<'a>;
#[inline]
unsafe fn follow(buf: &'a [u8], loc: usize) -> Self::Inner {
Self { _tab: flatbuffers::Table::new(buf, loc) }
}
}
impl<'a> SsTableIndex<'a> {
pub const VT_BLOCK_META: flatbuffers::VOffsetT = 4;
#[inline]
pub unsafe fn init_from_table(table: flatbuffers::Table<'a>) -> Self {
SsTableIndex { _tab: table }
}
#[allow(unused_mut)]
pub fn create<'bldr: 'args, 'args: 'mut_bldr, 'mut_bldr, A: flatbuffers::Allocator + 'bldr>(
_fbb: &'mut_bldr mut flatbuffers::FlatBufferBuilder<'bldr, A>,
args: &'args SsTableIndexArgs<'args>
) -> flatbuffers::WIPOffset<SsTableIndex<'bldr>> {
let mut builder = SsTableIndexBuilder::new(_fbb);
if let Some(x) = args.block_meta { builder.add_block_meta(x); }
builder.finish()
}
#[inline]
pub fn block_meta(&self) -> flatbuffers::Vector<'a, flatbuffers::ForwardsUOffset<BlockMeta<'a>>> {
unsafe { self._tab.get::<flatbuffers::ForwardsUOffset<flatbuffers::Vector<'a, flatbuffers::ForwardsUOffset<BlockMeta>>>>(SsTableIndex::VT_BLOCK_META, None).unwrap()}
}
}
impl flatbuffers::Verifiable for SsTableIndex<'_> {
#[inline]
fn run_verifier(
v: &mut flatbuffers::Verifier, pos: usize
) -> Result<(), flatbuffers::InvalidFlatbuffer> {
use self::flatbuffers::Verifiable;
v.visit_table(pos)?
.visit_field::<flatbuffers::ForwardsUOffset<flatbuffers::Vector<'_, flatbuffers::ForwardsUOffset<BlockMeta>>>>("block_meta", Self::VT_BLOCK_META, true)?
.finish();
Ok(())
}
}
pub struct SsTableIndexArgs<'a> {
pub block_meta: Option<flatbuffers::WIPOffset<flatbuffers::Vector<'a, flatbuffers::ForwardsUOffset<BlockMeta<'a>>>>>,
}
impl<'a> Default for SsTableIndexArgs<'a> {
#[inline]
fn default() -> Self {
SsTableIndexArgs {
block_meta: None, }
}
}
pub struct SsTableIndexBuilder<'a: 'b, 'b, A: flatbuffers::Allocator + 'a> {
fbb_: &'b mut flatbuffers::FlatBufferBuilder<'a, A>,
start_: flatbuffers::WIPOffset<flatbuffers::TableUnfinishedWIPOffset>,
}
impl<'a: 'b, 'b, A: flatbuffers::Allocator + 'a> SsTableIndexBuilder<'a, 'b, A> {
#[inline]
pub fn add_block_meta(&mut self, block_meta: flatbuffers::WIPOffset<flatbuffers::Vector<'b , flatbuffers::ForwardsUOffset<BlockMeta<'b >>>>) {
self.fbb_.push_slot_always::<flatbuffers::WIPOffset<_>>(SsTableIndex::VT_BLOCK_META, block_meta);
}
#[inline]
pub fn new(_fbb: &'b mut flatbuffers::FlatBufferBuilder<'a, A>) -> SsTableIndexBuilder<'a, 'b, A> {
let start = _fbb.start_table();
SsTableIndexBuilder {
fbb_: _fbb,
start_: start,
}
}
#[inline]
pub fn finish(self) -> flatbuffers::WIPOffset<SsTableIndex<'a>> {
let o = self.fbb_.end_table(self.start_);
self.fbb_.required(o, SsTableIndex::VT_BLOCK_META,"block_meta");
flatbuffers::WIPOffset::new(o.value())
}
}
impl core::fmt::Debug for SsTableIndex<'_> {
fn fmt(&self, f: &mut core::fmt::Formatter<'_>) -> core::fmt::Result {
let mut ds = f.debug_struct("SsTableIndex");
ds.field("block_meta", &self.block_meta());
ds.finish()
}
}
pub enum CompactedSsTableOffset {}
#[derive(Copy, Clone, PartialEq)]
pub struct CompactedSsTable<'a> {
pub _tab: flatbuffers::Table<'a>,
}
impl<'a> flatbuffers::Follow<'a> for CompactedSsTable<'a> {
type Inner = CompactedSsTable<'a>;
#[inline]
unsafe fn follow(buf: &'a [u8], loc: usize) -> Self::Inner {
Self { _tab: flatbuffers::Table::new(buf, loc) }
}
}
impl<'a> CompactedSsTable<'a> {
pub const VT_ID: flatbuffers::VOffsetT = 4;
pub const VT_INFO: flatbuffers::VOffsetT = 6;
pub const VT_VISIBLE_RANGE: flatbuffers::VOffsetT = 8;
#[inline]
pub unsafe fn init_from_table(table: flatbuffers::Table<'a>) -> Self {
CompactedSsTable { _tab: table }
}
#[allow(unused_mut)]
pub fn create<'bldr: 'args, 'args: 'mut_bldr, 'mut_bldr, A: flatbuffers::Allocator + 'bldr>(
_fbb: &'mut_bldr mut flatbuffers::FlatBufferBuilder<'bldr, A>,
args: &'args CompactedSsTableArgs<'args>
) -> flatbuffers::WIPOffset<CompactedSsTable<'bldr>> {
let mut builder = CompactedSsTableBuilder::new(_fbb);
if let Some(x) = args.visible_range { builder.add_visible_range(x); }
if let Some(x) = args.info { builder.add_info(x); }
if let Some(x) = args.id { builder.add_id(x); }
builder.finish()
}
#[inline]
pub fn id(&self) -> Ulid<'a> {
unsafe { self._tab.get::<flatbuffers::ForwardsUOffset<Ulid>>(CompactedSsTable::VT_ID, None).unwrap()}
}
#[inline]
pub fn info(&self) -> SsTableInfo<'a> {
unsafe { self._tab.get::<flatbuffers::ForwardsUOffset<SsTableInfo>>(CompactedSsTable::VT_INFO, None).unwrap()}
}
#[inline]
pub fn visible_range(&self) -> Option<BytesRange<'a>> {
unsafe { self._tab.get::<flatbuffers::ForwardsUOffset<BytesRange>>(CompactedSsTable::VT_VISIBLE_RANGE, None)}
}
}
impl flatbuffers::Verifiable for CompactedSsTable<'_> {
#[inline]
fn run_verifier(
v: &mut flatbuffers::Verifier, pos: usize
) -> Result<(), flatbuffers::InvalidFlatbuffer> {
use self::flatbuffers::Verifiable;
v.visit_table(pos)?
.visit_field::<flatbuffers::ForwardsUOffset<Ulid>>("id", Self::VT_ID, true)?
.visit_field::<flatbuffers::ForwardsUOffset<SsTableInfo>>("info", Self::VT_INFO, true)?
.visit_field::<flatbuffers::ForwardsUOffset<BytesRange>>("visible_range", Self::VT_VISIBLE_RANGE, false)?
.finish();
Ok(())
}
}
pub struct CompactedSsTableArgs<'a> {
pub id: Option<flatbuffers::WIPOffset<Ulid<'a>>>,
pub info: Option<flatbuffers::WIPOffset<SsTableInfo<'a>>>,
pub visible_range: Option<flatbuffers::WIPOffset<BytesRange<'a>>>,
}
impl<'a> Default for CompactedSsTableArgs<'a> {
#[inline]
fn default() -> Self {
CompactedSsTableArgs {
id: None, info: None, visible_range: None,
}
}
}
pub struct CompactedSsTableBuilder<'a: 'b, 'b, A: flatbuffers::Allocator + 'a> {
fbb_: &'b mut flatbuffers::FlatBufferBuilder<'a, A>,
start_: flatbuffers::WIPOffset<flatbuffers::TableUnfinishedWIPOffset>,
}
impl<'a: 'b, 'b, A: flatbuffers::Allocator + 'a> CompactedSsTableBuilder<'a, 'b, A> {
#[inline]
pub fn add_id(&mut self, id: flatbuffers::WIPOffset<Ulid<'b >>) {
self.fbb_.push_slot_always::<flatbuffers::WIPOffset<Ulid>>(CompactedSsTable::VT_ID, id);
}
#[inline]
pub fn add_info(&mut self, info: flatbuffers::WIPOffset<SsTableInfo<'b >>) {
self.fbb_.push_slot_always::<flatbuffers::WIPOffset<SsTableInfo>>(CompactedSsTable::VT_INFO, info);
}
#[inline]
pub fn add_visible_range(&mut self, visible_range: flatbuffers::WIPOffset<BytesRange<'b >>) {
self.fbb_.push_slot_always::<flatbuffers::WIPOffset<BytesRange>>(CompactedSsTable::VT_VISIBLE_RANGE, visible_range);
}
#[inline]
pub fn new(_fbb: &'b mut flatbuffers::FlatBufferBuilder<'a, A>) -> CompactedSsTableBuilder<'a, 'b, A> {
let start = _fbb.start_table();
CompactedSsTableBuilder {
fbb_: _fbb,
start_: start,
}
}
#[inline]
pub fn finish(self) -> flatbuffers::WIPOffset<CompactedSsTable<'a>> {
let o = self.fbb_.end_table(self.start_);
self.fbb_.required(o, CompactedSsTable::VT_ID,"id");
self.fbb_.required(o, CompactedSsTable::VT_INFO,"info");
flatbuffers::WIPOffset::new(o.value())
}
}
impl core::fmt::Debug for CompactedSsTable<'_> {
fn fmt(&self, f: &mut core::fmt::Formatter<'_>) -> core::fmt::Result {
let mut ds = f.debug_struct("CompactedSsTable");
ds.field("id", &self.id());
ds.field("info", &self.info());
ds.field("visible_range", &self.visible_range());
ds.finish()
}
}
pub enum SortedRunOffset {}
#[derive(Copy, Clone, PartialEq)]
pub struct SortedRun<'a> {
pub _tab: flatbuffers::Table<'a>,
}
impl<'a> flatbuffers::Follow<'a> for SortedRun<'a> {
type Inner = SortedRun<'a>;
#[inline]
unsafe fn follow(buf: &'a [u8], loc: usize) -> Self::Inner {
Self { _tab: flatbuffers::Table::new(buf, loc) }
}
}
impl<'a> SortedRun<'a> {
pub const VT_ID: flatbuffers::VOffsetT = 4;
pub const VT_SSTS: flatbuffers::VOffsetT = 6;
#[inline]
pub unsafe fn init_from_table(table: flatbuffers::Table<'a>) -> Self {
SortedRun { _tab: table }
}
#[allow(unused_mut)]
pub fn create<'bldr: 'args, 'args: 'mut_bldr, 'mut_bldr, A: flatbuffers::Allocator + 'bldr>(
_fbb: &'mut_bldr mut flatbuffers::FlatBufferBuilder<'bldr, A>,
args: &'args SortedRunArgs<'args>
) -> flatbuffers::WIPOffset<SortedRun<'bldr>> {
let mut builder = SortedRunBuilder::new(_fbb);
if let Some(x) = args.ssts { builder.add_ssts(x); }
builder.add_id(args.id);
builder.finish()
}
#[inline]
pub fn id(&self) -> u32 {
unsafe { self._tab.get::<u32>(SortedRun::VT_ID, Some(0)).unwrap()}
}
#[inline]
pub fn ssts(&self) -> flatbuffers::Vector<'a, flatbuffers::ForwardsUOffset<CompactedSsTable<'a>>> {
unsafe { self._tab.get::<flatbuffers::ForwardsUOffset<flatbuffers::Vector<'a, flatbuffers::ForwardsUOffset<CompactedSsTable>>>>(SortedRun::VT_SSTS, None).unwrap()}
}
}
impl flatbuffers::Verifiable for SortedRun<'_> {
#[inline]
fn run_verifier(
v: &mut flatbuffers::Verifier, pos: usize
) -> Result<(), flatbuffers::InvalidFlatbuffer> {
use self::flatbuffers::Verifiable;
v.visit_table(pos)?
.visit_field::<u32>("id", Self::VT_ID, false)?
.visit_field::<flatbuffers::ForwardsUOffset<flatbuffers::Vector<'_, flatbuffers::ForwardsUOffset<CompactedSsTable>>>>("ssts", Self::VT_SSTS, true)?
.finish();
Ok(())
}
}
pub struct SortedRunArgs<'a> {
pub id: u32,
pub ssts: Option<flatbuffers::WIPOffset<flatbuffers::Vector<'a, flatbuffers::ForwardsUOffset<CompactedSsTable<'a>>>>>,
}
impl<'a> Default for SortedRunArgs<'a> {
#[inline]
fn default() -> Self {
SortedRunArgs {
id: 0,
ssts: None, }
}
}
pub struct SortedRunBuilder<'a: 'b, 'b, A: flatbuffers::Allocator + 'a> {
fbb_: &'b mut flatbuffers::FlatBufferBuilder<'a, A>,
start_: flatbuffers::WIPOffset<flatbuffers::TableUnfinishedWIPOffset>,
}
impl<'a: 'b, 'b, A: flatbuffers::Allocator + 'a> SortedRunBuilder<'a, 'b, A> {
#[inline]
pub fn add_id(&mut self, id: u32) {
self.fbb_.push_slot::<u32>(SortedRun::VT_ID, id, 0);
}
#[inline]
pub fn add_ssts(&mut self, ssts: flatbuffers::WIPOffset<flatbuffers::Vector<'b , flatbuffers::ForwardsUOffset<CompactedSsTable<'b >>>>) {
self.fbb_.push_slot_always::<flatbuffers::WIPOffset<_>>(SortedRun::VT_SSTS, ssts);
}
#[inline]
pub fn new(_fbb: &'b mut flatbuffers::FlatBufferBuilder<'a, A>) -> SortedRunBuilder<'a, 'b, A> {
let start = _fbb.start_table();
SortedRunBuilder {
fbb_: _fbb,
start_: start,
}
}
#[inline]
pub fn finish(self) -> flatbuffers::WIPOffset<SortedRun<'a>> {
let o = self.fbb_.end_table(self.start_);
self.fbb_.required(o, SortedRun::VT_SSTS,"ssts");
flatbuffers::WIPOffset::new(o.value())
}
}
impl core::fmt::Debug for SortedRun<'_> {
fn fmt(&self, f: &mut core::fmt::Formatter<'_>) -> core::fmt::Result {
let mut ds = f.debug_struct("SortedRun");
ds.field("id", &self.id());
ds.field("ssts", &self.ssts());
ds.finish()
}
}
pub enum ExternalDbOffset {}
#[derive(Copy, Clone, PartialEq)]
pub struct ExternalDb<'a> {
pub _tab: flatbuffers::Table<'a>,
}
impl<'a> flatbuffers::Follow<'a> for ExternalDb<'a> {
type Inner = ExternalDb<'a>;
#[inline]
unsafe fn follow(buf: &'a [u8], loc: usize) -> Self::Inner {
Self { _tab: flatbuffers::Table::new(buf, loc) }
}
}
impl<'a> ExternalDb<'a> {
pub const VT_PATH: flatbuffers::VOffsetT = 4;
pub const VT_SOURCE_CHECKPOINT_ID: flatbuffers::VOffsetT = 6;
pub const VT_FINAL_CHECKPOINT_ID: flatbuffers::VOffsetT = 8;
pub const VT_SST_IDS: flatbuffers::VOffsetT = 10;
#[inline]
pub unsafe fn init_from_table(table: flatbuffers::Table<'a>) -> Self {
ExternalDb { _tab: table }
}
#[allow(unused_mut)]
pub fn create<'bldr: 'args, 'args: 'mut_bldr, 'mut_bldr, A: flatbuffers::Allocator + 'bldr>(
_fbb: &'mut_bldr mut flatbuffers::FlatBufferBuilder<'bldr, A>,
args: &'args ExternalDbArgs<'args>
) -> flatbuffers::WIPOffset<ExternalDb<'bldr>> {
let mut builder = ExternalDbBuilder::new(_fbb);
if let Some(x) = args.sst_ids { builder.add_sst_ids(x); }
if let Some(x) = args.final_checkpoint_id { builder.add_final_checkpoint_id(x); }
if let Some(x) = args.source_checkpoint_id { builder.add_source_checkpoint_id(x); }
if let Some(x) = args.path { builder.add_path(x); }
builder.finish()
}
#[inline]
pub fn path(&self) -> &'a str {
unsafe { self._tab.get::<flatbuffers::ForwardsUOffset<&str>>(ExternalDb::VT_PATH, None).unwrap()}
}
#[inline]
pub fn source_checkpoint_id(&self) -> Uuid<'a> {
unsafe { self._tab.get::<flatbuffers::ForwardsUOffset<Uuid>>(ExternalDb::VT_SOURCE_CHECKPOINT_ID, None).unwrap()}
}
#[inline]
pub fn final_checkpoint_id(&self) -> Option<Uuid<'a>> {
unsafe { self._tab.get::<flatbuffers::ForwardsUOffset<Uuid>>(ExternalDb::VT_FINAL_CHECKPOINT_ID, None)}
}
#[inline]
pub fn sst_ids(&self) -> flatbuffers::Vector<'a, flatbuffers::ForwardsUOffset<Ulid<'a>>> {
unsafe { self._tab.get::<flatbuffers::ForwardsUOffset<flatbuffers::Vector<'a, flatbuffers::ForwardsUOffset<Ulid>>>>(ExternalDb::VT_SST_IDS, None).unwrap()}
}
}
impl flatbuffers::Verifiable for ExternalDb<'_> {
#[inline]
fn run_verifier(
v: &mut flatbuffers::Verifier, pos: usize
) -> Result<(), flatbuffers::InvalidFlatbuffer> {
use self::flatbuffers::Verifiable;
v.visit_table(pos)?
.visit_field::<flatbuffers::ForwardsUOffset<&str>>("path", Self::VT_PATH, true)?
.visit_field::<flatbuffers::ForwardsUOffset<Uuid>>("source_checkpoint_id", Self::VT_SOURCE_CHECKPOINT_ID, true)?
.visit_field::<flatbuffers::ForwardsUOffset<Uuid>>("final_checkpoint_id", Self::VT_FINAL_CHECKPOINT_ID, false)?
.visit_field::<flatbuffers::ForwardsUOffset<flatbuffers::Vector<'_, flatbuffers::ForwardsUOffset<Ulid>>>>("sst_ids", Self::VT_SST_IDS, true)?
.finish();
Ok(())
}
}
pub struct ExternalDbArgs<'a> {
pub path: Option<flatbuffers::WIPOffset<&'a str>>,
pub source_checkpoint_id: Option<flatbuffers::WIPOffset<Uuid<'a>>>,
pub final_checkpoint_id: Option<flatbuffers::WIPOffset<Uuid<'a>>>,
pub sst_ids: Option<flatbuffers::WIPOffset<flatbuffers::Vector<'a, flatbuffers::ForwardsUOffset<Ulid<'a>>>>>,
}
impl<'a> Default for ExternalDbArgs<'a> {
#[inline]
fn default() -> Self {
ExternalDbArgs {
path: None, source_checkpoint_id: None, final_checkpoint_id: None,
sst_ids: None, }
}
}
pub struct ExternalDbBuilder<'a: 'b, 'b, A: flatbuffers::Allocator + 'a> {
fbb_: &'b mut flatbuffers::FlatBufferBuilder<'a, A>,
start_: flatbuffers::WIPOffset<flatbuffers::TableUnfinishedWIPOffset>,
}
impl<'a: 'b, 'b, A: flatbuffers::Allocator + 'a> ExternalDbBuilder<'a, 'b, A> {
#[inline]
pub fn add_path(&mut self, path: flatbuffers::WIPOffset<&'b str>) {
self.fbb_.push_slot_always::<flatbuffers::WIPOffset<_>>(ExternalDb::VT_PATH, path);
}
#[inline]
pub fn add_source_checkpoint_id(&mut self, source_checkpoint_id: flatbuffers::WIPOffset<Uuid<'b >>) {
self.fbb_.push_slot_always::<flatbuffers::WIPOffset<Uuid>>(ExternalDb::VT_SOURCE_CHECKPOINT_ID, source_checkpoint_id);
}
#[inline]
pub fn add_final_checkpoint_id(&mut self, final_checkpoint_id: flatbuffers::WIPOffset<Uuid<'b >>) {
self.fbb_.push_slot_always::<flatbuffers::WIPOffset<Uuid>>(ExternalDb::VT_FINAL_CHECKPOINT_ID, final_checkpoint_id);
}
#[inline]
pub fn add_sst_ids(&mut self, sst_ids: flatbuffers::WIPOffset<flatbuffers::Vector<'b , flatbuffers::ForwardsUOffset<Ulid<'b >>>>) {
self.fbb_.push_slot_always::<flatbuffers::WIPOffset<_>>(ExternalDb::VT_SST_IDS, sst_ids);
}
#[inline]
pub fn new(_fbb: &'b mut flatbuffers::FlatBufferBuilder<'a, A>) -> ExternalDbBuilder<'a, 'b, A> {
let start = _fbb.start_table();
ExternalDbBuilder {
fbb_: _fbb,
start_: start,
}
}
#[inline]
pub fn finish(self) -> flatbuffers::WIPOffset<ExternalDb<'a>> {
let o = self.fbb_.end_table(self.start_);
self.fbb_.required(o, ExternalDb::VT_PATH,"path");
self.fbb_.required(o, ExternalDb::VT_SOURCE_CHECKPOINT_ID,"source_checkpoint_id");
self.fbb_.required(o, ExternalDb::VT_SST_IDS,"sst_ids");
flatbuffers::WIPOffset::new(o.value())
}
}
impl core::fmt::Debug for ExternalDb<'_> {
fn fmt(&self, f: &mut core::fmt::Formatter<'_>) -> core::fmt::Result {
let mut ds = f.debug_struct("ExternalDb");
ds.field("path", &self.path());
ds.field("source_checkpoint_id", &self.source_checkpoint_id());
ds.field("final_checkpoint_id", &self.final_checkpoint_id());
ds.field("sst_ids", &self.sst_ids());
ds.finish()
}
}
pub enum ManifestV1Offset {}
#[derive(Copy, Clone, PartialEq)]
pub struct ManifestV1<'a> {
pub _tab: flatbuffers::Table<'a>,
}
impl<'a> flatbuffers::Follow<'a> for ManifestV1<'a> {
type Inner = ManifestV1<'a>;
#[inline]
unsafe fn follow(buf: &'a [u8], loc: usize) -> Self::Inner {
Self { _tab: flatbuffers::Table::new(buf, loc) }
}
}
impl<'a> ManifestV1<'a> {
pub const VT_MANIFEST_ID: flatbuffers::VOffsetT = 4;
pub const VT_EXTERNAL_DBS: flatbuffers::VOffsetT = 6;
pub const VT_INITIALIZED: flatbuffers::VOffsetT = 8;
pub const VT_WRITER_EPOCH: flatbuffers::VOffsetT = 10;
pub const VT_COMPACTOR_EPOCH: flatbuffers::VOffsetT = 12;
pub const VT_REPLAY_AFTER_WAL_ID: flatbuffers::VOffsetT = 14;
pub const VT_WAL_ID_LAST_SEEN: flatbuffers::VOffsetT = 16;
pub const VT_L0_LAST_COMPACTED: flatbuffers::VOffsetT = 18;
pub const VT_L0: flatbuffers::VOffsetT = 20;
pub const VT_COMPACTED: flatbuffers::VOffsetT = 22;
pub const VT_LAST_L0_CLOCK_TICK: flatbuffers::VOffsetT = 24;
pub const VT_CHECKPOINTS: flatbuffers::VOffsetT = 26;
pub const VT_LAST_L0_SEQ: flatbuffers::VOffsetT = 28;
pub const VT_WAL_OBJECT_STORE_URI: flatbuffers::VOffsetT = 30;
pub const VT_RECENT_SNAPSHOT_MIN_SEQ: flatbuffers::VOffsetT = 32;
pub const VT_SEQUENCE_TRACKER: flatbuffers::VOffsetT = 34;
#[inline]
pub unsafe fn init_from_table(table: flatbuffers::Table<'a>) -> Self {
ManifestV1 { _tab: table }
}
#[allow(unused_mut)]
pub fn create<'bldr: 'args, 'args: 'mut_bldr, 'mut_bldr, A: flatbuffers::Allocator + 'bldr>(
_fbb: &'mut_bldr mut flatbuffers::FlatBufferBuilder<'bldr, A>,
args: &'args ManifestV1Args<'args>
) -> flatbuffers::WIPOffset<ManifestV1<'bldr>> {
let mut builder = ManifestV1Builder::new(_fbb);
builder.add_recent_snapshot_min_seq(args.recent_snapshot_min_seq);
builder.add_last_l0_seq(args.last_l0_seq);
builder.add_last_l0_clock_tick(args.last_l0_clock_tick);
builder.add_wal_id_last_seen(args.wal_id_last_seen);
builder.add_replay_after_wal_id(args.replay_after_wal_id);
builder.add_compactor_epoch(args.compactor_epoch);
builder.add_writer_epoch(args.writer_epoch);
builder.add_manifest_id(args.manifest_id);
if let Some(x) = args.sequence_tracker { builder.add_sequence_tracker(x); }
if let Some(x) = args.wal_object_store_uri { builder.add_wal_object_store_uri(x); }
if let Some(x) = args.checkpoints { builder.add_checkpoints(x); }
if let Some(x) = args.compacted { builder.add_compacted(x); }
if let Some(x) = args.l0 { builder.add_l0(x); }
if let Some(x) = args.l0_last_compacted { builder.add_l0_last_compacted(x); }
if let Some(x) = args.external_dbs { builder.add_external_dbs(x); }
builder.add_initialized(args.initialized);
builder.finish()
}
#[inline]
pub fn manifest_id(&self) -> u64 {
unsafe { self._tab.get::<u64>(ManifestV1::VT_MANIFEST_ID, Some(0)).unwrap()}
}
#[inline]
pub fn external_dbs(&self) -> Option<flatbuffers::Vector<'a, flatbuffers::ForwardsUOffset<ExternalDb<'a>>>> {
unsafe { self._tab.get::<flatbuffers::ForwardsUOffset<flatbuffers::Vector<'a, flatbuffers::ForwardsUOffset<ExternalDb>>>>(ManifestV1::VT_EXTERNAL_DBS, None)}
}
#[inline]
pub fn initialized(&self) -> bool {
unsafe { self._tab.get::<bool>(ManifestV1::VT_INITIALIZED, Some(false)).unwrap()}
}
#[inline]
pub fn writer_epoch(&self) -> u64 {
unsafe { self._tab.get::<u64>(ManifestV1::VT_WRITER_EPOCH, Some(0)).unwrap()}
}
#[inline]
pub fn compactor_epoch(&self) -> u64 {
unsafe { self._tab.get::<u64>(ManifestV1::VT_COMPACTOR_EPOCH, Some(0)).unwrap()}
}
#[inline]
pub fn replay_after_wal_id(&self) -> u64 {
unsafe { self._tab.get::<u64>(ManifestV1::VT_REPLAY_AFTER_WAL_ID, Some(0)).unwrap()}
}
#[inline]
pub fn wal_id_last_seen(&self) -> u64 {
unsafe { self._tab.get::<u64>(ManifestV1::VT_WAL_ID_LAST_SEEN, Some(0)).unwrap()}
}
#[inline]
pub fn l0_last_compacted(&self) -> Option<Ulid<'a>> {
unsafe { self._tab.get::<flatbuffers::ForwardsUOffset<Ulid>>(ManifestV1::VT_L0_LAST_COMPACTED, None)}
}
#[inline]
pub fn l0(&self) -> flatbuffers::Vector<'a, flatbuffers::ForwardsUOffset<CompactedSsTable<'a>>> {
unsafe { self._tab.get::<flatbuffers::ForwardsUOffset<flatbuffers::Vector<'a, flatbuffers::ForwardsUOffset<CompactedSsTable>>>>(ManifestV1::VT_L0, None).unwrap()}
}
#[inline]
pub fn compacted(&self) -> flatbuffers::Vector<'a, flatbuffers::ForwardsUOffset<SortedRun<'a>>> {
unsafe { self._tab.get::<flatbuffers::ForwardsUOffset<flatbuffers::Vector<'a, flatbuffers::ForwardsUOffset<SortedRun>>>>(ManifestV1::VT_COMPACTED, None).unwrap()}
}
#[inline]
pub fn last_l0_clock_tick(&self) -> i64 {
unsafe { self._tab.get::<i64>(ManifestV1::VT_LAST_L0_CLOCK_TICK, Some(0)).unwrap()}
}
#[inline]
pub fn checkpoints(&self) -> flatbuffers::Vector<'a, flatbuffers::ForwardsUOffset<Checkpoint<'a>>> {
unsafe { self._tab.get::<flatbuffers::ForwardsUOffset<flatbuffers::Vector<'a, flatbuffers::ForwardsUOffset<Checkpoint>>>>(ManifestV1::VT_CHECKPOINTS, None).unwrap()}
}
#[inline]
pub fn last_l0_seq(&self) -> u64 {
unsafe { self._tab.get::<u64>(ManifestV1::VT_LAST_L0_SEQ, Some(0)).unwrap()}
}
#[inline]
pub fn wal_object_store_uri(&self) -> Option<&'a str> {
unsafe { self._tab.get::<flatbuffers::ForwardsUOffset<&str>>(ManifestV1::VT_WAL_OBJECT_STORE_URI, None)}
}
#[inline]
pub fn recent_snapshot_min_seq(&self) -> u64 {
unsafe { self._tab.get::<u64>(ManifestV1::VT_RECENT_SNAPSHOT_MIN_SEQ, Some(0)).unwrap()}
}
#[inline]
pub fn sequence_tracker(&self) -> Option<flatbuffers::Vector<'a, u8>> {
unsafe { self._tab.get::<flatbuffers::ForwardsUOffset<flatbuffers::Vector<'a, u8>>>(ManifestV1::VT_SEQUENCE_TRACKER, None)}
}
}
impl flatbuffers::Verifiable for ManifestV1<'_> {
#[inline]
fn run_verifier(
v: &mut flatbuffers::Verifier, pos: usize
) -> Result<(), flatbuffers::InvalidFlatbuffer> {
use self::flatbuffers::Verifiable;
v.visit_table(pos)?
.visit_field::<u64>("manifest_id", Self::VT_MANIFEST_ID, false)?
.visit_field::<flatbuffers::ForwardsUOffset<flatbuffers::Vector<'_, flatbuffers::ForwardsUOffset<ExternalDb>>>>("external_dbs", Self::VT_EXTERNAL_DBS, false)?
.visit_field::<bool>("initialized", Self::VT_INITIALIZED, false)?
.visit_field::<u64>("writer_epoch", Self::VT_WRITER_EPOCH, false)?
.visit_field::<u64>("compactor_epoch", Self::VT_COMPACTOR_EPOCH, false)?
.visit_field::<u64>("replay_after_wal_id", Self::VT_REPLAY_AFTER_WAL_ID, false)?
.visit_field::<u64>("wal_id_last_seen", Self::VT_WAL_ID_LAST_SEEN, false)?
.visit_field::<flatbuffers::ForwardsUOffset<Ulid>>("l0_last_compacted", Self::VT_L0_LAST_COMPACTED, false)?
.visit_field::<flatbuffers::ForwardsUOffset<flatbuffers::Vector<'_, flatbuffers::ForwardsUOffset<CompactedSsTable>>>>("l0", Self::VT_L0, true)?
.visit_field::<flatbuffers::ForwardsUOffset<flatbuffers::Vector<'_, flatbuffers::ForwardsUOffset<SortedRun>>>>("compacted", Self::VT_COMPACTED, true)?
.visit_field::<i64>("last_l0_clock_tick", Self::VT_LAST_L0_CLOCK_TICK, false)?
.visit_field::<flatbuffers::ForwardsUOffset<flatbuffers::Vector<'_, flatbuffers::ForwardsUOffset<Checkpoint>>>>("checkpoints", Self::VT_CHECKPOINTS, true)?
.visit_field::<u64>("last_l0_seq", Self::VT_LAST_L0_SEQ, false)?
.visit_field::<flatbuffers::ForwardsUOffset<&str>>("wal_object_store_uri", Self::VT_WAL_OBJECT_STORE_URI, false)?
.visit_field::<u64>("recent_snapshot_min_seq", Self::VT_RECENT_SNAPSHOT_MIN_SEQ, false)?
.visit_field::<flatbuffers::ForwardsUOffset<flatbuffers::Vector<'_, u8>>>("sequence_tracker", Self::VT_SEQUENCE_TRACKER, false)?
.finish();
Ok(())
}
}
pub struct ManifestV1Args<'a> {
pub manifest_id: u64,
pub external_dbs: Option<flatbuffers::WIPOffset<flatbuffers::Vector<'a, flatbuffers::ForwardsUOffset<ExternalDb<'a>>>>>,
pub initialized: bool,
pub writer_epoch: u64,
pub compactor_epoch: u64,
pub replay_after_wal_id: u64,
pub wal_id_last_seen: u64,
pub l0_last_compacted: Option<flatbuffers::WIPOffset<Ulid<'a>>>,
pub l0: Option<flatbuffers::WIPOffset<flatbuffers::Vector<'a, flatbuffers::ForwardsUOffset<CompactedSsTable<'a>>>>>,
pub compacted: Option<flatbuffers::WIPOffset<flatbuffers::Vector<'a, flatbuffers::ForwardsUOffset<SortedRun<'a>>>>>,
pub last_l0_clock_tick: i64,
pub checkpoints: Option<flatbuffers::WIPOffset<flatbuffers::Vector<'a, flatbuffers::ForwardsUOffset<Checkpoint<'a>>>>>,
pub last_l0_seq: u64,
pub wal_object_store_uri: Option<flatbuffers::WIPOffset<&'a str>>,
pub recent_snapshot_min_seq: u64,
pub sequence_tracker: Option<flatbuffers::WIPOffset<flatbuffers::Vector<'a, u8>>>,
}
impl<'a> Default for ManifestV1Args<'a> {
#[inline]
fn default() -> Self {
ManifestV1Args {
manifest_id: 0,
external_dbs: None,
initialized: false,
writer_epoch: 0,
compactor_epoch: 0,
replay_after_wal_id: 0,
wal_id_last_seen: 0,
l0_last_compacted: None,
l0: None, compacted: None, last_l0_clock_tick: 0,
checkpoints: None, last_l0_seq: 0,
wal_object_store_uri: None,
recent_snapshot_min_seq: 0,
sequence_tracker: None,
}
}
}
pub struct ManifestV1Builder<'a: 'b, 'b, A: flatbuffers::Allocator + 'a> {
fbb_: &'b mut flatbuffers::FlatBufferBuilder<'a, A>,
start_: flatbuffers::WIPOffset<flatbuffers::TableUnfinishedWIPOffset>,
}
impl<'a: 'b, 'b, A: flatbuffers::Allocator + 'a> ManifestV1Builder<'a, 'b, A> {
#[inline]
pub fn add_manifest_id(&mut self, manifest_id: u64) {
self.fbb_.push_slot::<u64>(ManifestV1::VT_MANIFEST_ID, manifest_id, 0);
}
#[inline]
pub fn add_external_dbs(&mut self, external_dbs: flatbuffers::WIPOffset<flatbuffers::Vector<'b , flatbuffers::ForwardsUOffset<ExternalDb<'b >>>>) {
self.fbb_.push_slot_always::<flatbuffers::WIPOffset<_>>(ManifestV1::VT_EXTERNAL_DBS, external_dbs);
}
#[inline]
pub fn add_initialized(&mut self, initialized: bool) {
self.fbb_.push_slot::<bool>(ManifestV1::VT_INITIALIZED, initialized, false);
}
#[inline]
pub fn add_writer_epoch(&mut self, writer_epoch: u64) {
self.fbb_.push_slot::<u64>(ManifestV1::VT_WRITER_EPOCH, writer_epoch, 0);
}
#[inline]
pub fn add_compactor_epoch(&mut self, compactor_epoch: u64) {
self.fbb_.push_slot::<u64>(ManifestV1::VT_COMPACTOR_EPOCH, compactor_epoch, 0);
}
#[inline]
pub fn add_replay_after_wal_id(&mut self, replay_after_wal_id: u64) {
self.fbb_.push_slot::<u64>(ManifestV1::VT_REPLAY_AFTER_WAL_ID, replay_after_wal_id, 0);
}
#[inline]
pub fn add_wal_id_last_seen(&mut self, wal_id_last_seen: u64) {
self.fbb_.push_slot::<u64>(ManifestV1::VT_WAL_ID_LAST_SEEN, wal_id_last_seen, 0);
}
#[inline]
pub fn add_l0_last_compacted(&mut self, l0_last_compacted: flatbuffers::WIPOffset<Ulid<'b >>) {
self.fbb_.push_slot_always::<flatbuffers::WIPOffset<Ulid>>(ManifestV1::VT_L0_LAST_COMPACTED, l0_last_compacted);
}
#[inline]
pub fn add_l0(&mut self, l0: flatbuffers::WIPOffset<flatbuffers::Vector<'b , flatbuffers::ForwardsUOffset<CompactedSsTable<'b >>>>) {
self.fbb_.push_slot_always::<flatbuffers::WIPOffset<_>>(ManifestV1::VT_L0, l0);
}
#[inline]
pub fn add_compacted(&mut self, compacted: flatbuffers::WIPOffset<flatbuffers::Vector<'b , flatbuffers::ForwardsUOffset<SortedRun<'b >>>>) {
self.fbb_.push_slot_always::<flatbuffers::WIPOffset<_>>(ManifestV1::VT_COMPACTED, compacted);
}
#[inline]
pub fn add_last_l0_clock_tick(&mut self, last_l0_clock_tick: i64) {
self.fbb_.push_slot::<i64>(ManifestV1::VT_LAST_L0_CLOCK_TICK, last_l0_clock_tick, 0);
}
#[inline]
pub fn add_checkpoints(&mut self, checkpoints: flatbuffers::WIPOffset<flatbuffers::Vector<'b , flatbuffers::ForwardsUOffset<Checkpoint<'b >>>>) {
self.fbb_.push_slot_always::<flatbuffers::WIPOffset<_>>(ManifestV1::VT_CHECKPOINTS, checkpoints);
}
#[inline]
pub fn add_last_l0_seq(&mut self, last_l0_seq: u64) {
self.fbb_.push_slot::<u64>(ManifestV1::VT_LAST_L0_SEQ, last_l0_seq, 0);
}
#[inline]
pub fn add_wal_object_store_uri(&mut self, wal_object_store_uri: flatbuffers::WIPOffset<&'b str>) {
self.fbb_.push_slot_always::<flatbuffers::WIPOffset<_>>(ManifestV1::VT_WAL_OBJECT_STORE_URI, wal_object_store_uri);
}
#[inline]
pub fn add_recent_snapshot_min_seq(&mut self, recent_snapshot_min_seq: u64) {
self.fbb_.push_slot::<u64>(ManifestV1::VT_RECENT_SNAPSHOT_MIN_SEQ, recent_snapshot_min_seq, 0);
}
#[inline]
pub fn add_sequence_tracker(&mut self, sequence_tracker: flatbuffers::WIPOffset<flatbuffers::Vector<'b , u8>>) {
self.fbb_.push_slot_always::<flatbuffers::WIPOffset<_>>(ManifestV1::VT_SEQUENCE_TRACKER, sequence_tracker);
}
#[inline]
pub fn new(_fbb: &'b mut flatbuffers::FlatBufferBuilder<'a, A>) -> ManifestV1Builder<'a, 'b, A> {
let start = _fbb.start_table();
ManifestV1Builder {
fbb_: _fbb,
start_: start,
}
}
#[inline]
pub fn finish(self) -> flatbuffers::WIPOffset<ManifestV1<'a>> {
let o = self.fbb_.end_table(self.start_);
self.fbb_.required(o, ManifestV1::VT_L0,"l0");
self.fbb_.required(o, ManifestV1::VT_COMPACTED,"compacted");
self.fbb_.required(o, ManifestV1::VT_CHECKPOINTS,"checkpoints");
flatbuffers::WIPOffset::new(o.value())
}
}
impl core::fmt::Debug for ManifestV1<'_> {
fn fmt(&self, f: &mut core::fmt::Formatter<'_>) -> core::fmt::Result {
let mut ds = f.debug_struct("ManifestV1");
ds.field("manifest_id", &self.manifest_id());
ds.field("external_dbs", &self.external_dbs());
ds.field("initialized", &self.initialized());
ds.field("writer_epoch", &self.writer_epoch());
ds.field("compactor_epoch", &self.compactor_epoch());
ds.field("replay_after_wal_id", &self.replay_after_wal_id());
ds.field("wal_id_last_seen", &self.wal_id_last_seen());
ds.field("l0_last_compacted", &self.l0_last_compacted());
ds.field("l0", &self.l0());
ds.field("compacted", &self.compacted());
ds.field("last_l0_clock_tick", &self.last_l0_clock_tick());
ds.field("checkpoints", &self.checkpoints());
ds.field("last_l0_seq", &self.last_l0_seq());
ds.field("wal_object_store_uri", &self.wal_object_store_uri());
ds.field("recent_snapshot_min_seq", &self.recent_snapshot_min_seq());
ds.field("sequence_tracker", &self.sequence_tracker());
ds.finish()
}
}
pub enum WriterCheckpointOffset {}
#[derive(Copy, Clone, PartialEq)]
pub struct WriterCheckpoint<'a> {
pub _tab: flatbuffers::Table<'a>,
}
impl<'a> flatbuffers::Follow<'a> for WriterCheckpoint<'a> {
type Inner = WriterCheckpoint<'a>;
#[inline]
unsafe fn follow(buf: &'a [u8], loc: usize) -> Self::Inner {
Self { _tab: flatbuffers::Table::new(buf, loc) }
}
}
impl<'a> WriterCheckpoint<'a> {
pub const VT_EPOCH: flatbuffers::VOffsetT = 4;
#[inline]
pub unsafe fn init_from_table(table: flatbuffers::Table<'a>) -> Self {
WriterCheckpoint { _tab: table }
}
#[allow(unused_mut)]
pub fn create<'bldr: 'args, 'args: 'mut_bldr, 'mut_bldr, A: flatbuffers::Allocator + 'bldr>(
_fbb: &'mut_bldr mut flatbuffers::FlatBufferBuilder<'bldr, A>,
args: &'args WriterCheckpointArgs
) -> flatbuffers::WIPOffset<WriterCheckpoint<'bldr>> {
let mut builder = WriterCheckpointBuilder::new(_fbb);
builder.add_epoch(args.epoch);
builder.finish()
}
#[inline]
pub fn epoch(&self) -> u64 {
unsafe { self._tab.get::<u64>(WriterCheckpoint::VT_EPOCH, Some(0)).unwrap()}
}
}
impl flatbuffers::Verifiable for WriterCheckpoint<'_> {
#[inline]
fn run_verifier(
v: &mut flatbuffers::Verifier, pos: usize
) -> Result<(), flatbuffers::InvalidFlatbuffer> {
use self::flatbuffers::Verifiable;
v.visit_table(pos)?
.visit_field::<u64>("epoch", Self::VT_EPOCH, false)?
.finish();
Ok(())
}
}
pub struct WriterCheckpointArgs {
pub epoch: u64,
}
impl<'a> Default for WriterCheckpointArgs {
#[inline]
fn default() -> Self {
WriterCheckpointArgs {
epoch: 0,
}
}
}
pub struct WriterCheckpointBuilder<'a: 'b, 'b, A: flatbuffers::Allocator + 'a> {
fbb_: &'b mut flatbuffers::FlatBufferBuilder<'a, A>,
start_: flatbuffers::WIPOffset<flatbuffers::TableUnfinishedWIPOffset>,
}
impl<'a: 'b, 'b, A: flatbuffers::Allocator + 'a> WriterCheckpointBuilder<'a, 'b, A> {
#[inline]
pub fn add_epoch(&mut self, epoch: u64) {
self.fbb_.push_slot::<u64>(WriterCheckpoint::VT_EPOCH, epoch, 0);
}
#[inline]
pub fn new(_fbb: &'b mut flatbuffers::FlatBufferBuilder<'a, A>) -> WriterCheckpointBuilder<'a, 'b, A> {
let start = _fbb.start_table();
WriterCheckpointBuilder {
fbb_: _fbb,
start_: start,
}
}
#[inline]
pub fn finish(self) -> flatbuffers::WIPOffset<WriterCheckpoint<'a>> {
let o = self.fbb_.end_table(self.start_);
flatbuffers::WIPOffset::new(o.value())
}
}
impl core::fmt::Debug for WriterCheckpoint<'_> {
fn fmt(&self, f: &mut core::fmt::Formatter<'_>) -> core::fmt::Result {
let mut ds = f.debug_struct("WriterCheckpoint");
ds.field("epoch", &self.epoch());
ds.finish()
}
}
pub enum CheckpointOffset {}
#[derive(Copy, Clone, PartialEq)]
pub struct Checkpoint<'a> {
pub _tab: flatbuffers::Table<'a>,
}
impl<'a> flatbuffers::Follow<'a> for Checkpoint<'a> {
type Inner = Checkpoint<'a>;
#[inline]
unsafe fn follow(buf: &'a [u8], loc: usize) -> Self::Inner {
Self { _tab: flatbuffers::Table::new(buf, loc) }
}
}
impl<'a> Checkpoint<'a> {
pub const VT_ID: flatbuffers::VOffsetT = 4;
pub const VT_MANIFEST_ID: flatbuffers::VOffsetT = 6;
pub const VT_CHECKPOINT_EXPIRE_TIME_S: flatbuffers::VOffsetT = 8;
pub const VT_CHECKPOINT_CREATE_TIME_S: flatbuffers::VOffsetT = 10;
pub const VT_METADATA_TYPE: flatbuffers::VOffsetT = 12;
pub const VT_METADATA: flatbuffers::VOffsetT = 14;
pub const VT_NAME: flatbuffers::VOffsetT = 16;
#[inline]
pub unsafe fn init_from_table(table: flatbuffers::Table<'a>) -> Self {
Checkpoint { _tab: table }
}
#[allow(unused_mut)]
pub fn create<'bldr: 'args, 'args: 'mut_bldr, 'mut_bldr, A: flatbuffers::Allocator + 'bldr>(
_fbb: &'mut_bldr mut flatbuffers::FlatBufferBuilder<'bldr, A>,
args: &'args CheckpointArgs<'args>
) -> flatbuffers::WIPOffset<Checkpoint<'bldr>> {
let mut builder = CheckpointBuilder::new(_fbb);
builder.add_manifest_id(args.manifest_id);
if let Some(x) = args.name { builder.add_name(x); }
if let Some(x) = args.metadata { builder.add_metadata(x); }
builder.add_checkpoint_create_time_s(args.checkpoint_create_time_s);
builder.add_checkpoint_expire_time_s(args.checkpoint_expire_time_s);
if let Some(x) = args.id { builder.add_id(x); }
builder.add_metadata_type(args.metadata_type);
builder.finish()
}
#[inline]
pub fn id(&self) -> Uuid<'a> {
unsafe { self._tab.get::<flatbuffers::ForwardsUOffset<Uuid>>(Checkpoint::VT_ID, None).unwrap()}
}
#[inline]
pub fn manifest_id(&self) -> u64 {
unsafe { self._tab.get::<u64>(Checkpoint::VT_MANIFEST_ID, Some(0)).unwrap()}
}
#[inline]
pub fn checkpoint_expire_time_s(&self) -> u32 {
unsafe { self._tab.get::<u32>(Checkpoint::VT_CHECKPOINT_EXPIRE_TIME_S, Some(0)).unwrap()}
}
#[inline]
pub fn checkpoint_create_time_s(&self) -> u32 {
unsafe { self._tab.get::<u32>(Checkpoint::VT_CHECKPOINT_CREATE_TIME_S, Some(0)).unwrap()}
}
#[inline]
pub fn metadata_type(&self) -> CheckpointMetadata {
unsafe { self._tab.get::<CheckpointMetadata>(Checkpoint::VT_METADATA_TYPE, Some(CheckpointMetadata::NONE)).unwrap()}
}
#[inline]
pub fn metadata(&self) -> Option<flatbuffers::Table<'a>> {
unsafe { self._tab.get::<flatbuffers::ForwardsUOffset<flatbuffers::Table<'a>>>(Checkpoint::VT_METADATA, None)}
}
#[inline]
pub fn name(&self) -> Option<&'a str> {
unsafe { self._tab.get::<flatbuffers::ForwardsUOffset<&str>>(Checkpoint::VT_NAME, None)}
}
#[inline]
#[allow(non_snake_case)]
pub fn metadata_as_writer_checkpoint(&self) -> Option<WriterCheckpoint<'a>> {
if self.metadata_type() == CheckpointMetadata::WriterCheckpoint {
self.metadata().map(|t| {
unsafe { WriterCheckpoint::init_from_table(t) }
})
} else {
None
}
}
}
impl flatbuffers::Verifiable for Checkpoint<'_> {
#[inline]
fn run_verifier(
v: &mut flatbuffers::Verifier, pos: usize
) -> Result<(), flatbuffers::InvalidFlatbuffer> {
use self::flatbuffers::Verifiable;
v.visit_table(pos)?
.visit_field::<flatbuffers::ForwardsUOffset<Uuid>>("id", Self::VT_ID, true)?
.visit_field::<u64>("manifest_id", Self::VT_MANIFEST_ID, false)?
.visit_field::<u32>("checkpoint_expire_time_s", Self::VT_CHECKPOINT_EXPIRE_TIME_S, false)?
.visit_field::<u32>("checkpoint_create_time_s", Self::VT_CHECKPOINT_CREATE_TIME_S, false)?
.visit_union::<CheckpointMetadata, _>("metadata_type", Self::VT_METADATA_TYPE, "metadata", Self::VT_METADATA, false, |key, v, pos| {
match key {
CheckpointMetadata::WriterCheckpoint => v.verify_union_variant::<flatbuffers::ForwardsUOffset<WriterCheckpoint>>("CheckpointMetadata::WriterCheckpoint", pos),
_ => Ok(()),
}
})?
.visit_field::<flatbuffers::ForwardsUOffset<&str>>("name", Self::VT_NAME, false)?
.finish();
Ok(())
}
}
pub struct CheckpointArgs<'a> {
pub id: Option<flatbuffers::WIPOffset<Uuid<'a>>>,
pub manifest_id: u64,
pub checkpoint_expire_time_s: u32,
pub checkpoint_create_time_s: u32,
pub metadata_type: CheckpointMetadata,
pub metadata: Option<flatbuffers::WIPOffset<flatbuffers::UnionWIPOffset>>,
pub name: Option<flatbuffers::WIPOffset<&'a str>>,
}
impl<'a> Default for CheckpointArgs<'a> {
#[inline]
fn default() -> Self {
CheckpointArgs {
id: None, manifest_id: 0,
checkpoint_expire_time_s: 0,
checkpoint_create_time_s: 0,
metadata_type: CheckpointMetadata::NONE,
metadata: None,
name: None,
}
}
}
pub struct CheckpointBuilder<'a: 'b, 'b, A: flatbuffers::Allocator + 'a> {
fbb_: &'b mut flatbuffers::FlatBufferBuilder<'a, A>,
start_: flatbuffers::WIPOffset<flatbuffers::TableUnfinishedWIPOffset>,
}
impl<'a: 'b, 'b, A: flatbuffers::Allocator + 'a> CheckpointBuilder<'a, 'b, A> {
#[inline]
pub fn add_id(&mut self, id: flatbuffers::WIPOffset<Uuid<'b >>) {
self.fbb_.push_slot_always::<flatbuffers::WIPOffset<Uuid>>(Checkpoint::VT_ID, id);
}
#[inline]
pub fn add_manifest_id(&mut self, manifest_id: u64) {
self.fbb_.push_slot::<u64>(Checkpoint::VT_MANIFEST_ID, manifest_id, 0);
}
#[inline]
pub fn add_checkpoint_expire_time_s(&mut self, checkpoint_expire_time_s: u32) {
self.fbb_.push_slot::<u32>(Checkpoint::VT_CHECKPOINT_EXPIRE_TIME_S, checkpoint_expire_time_s, 0);
}
#[inline]
pub fn add_checkpoint_create_time_s(&mut self, checkpoint_create_time_s: u32) {
self.fbb_.push_slot::<u32>(Checkpoint::VT_CHECKPOINT_CREATE_TIME_S, checkpoint_create_time_s, 0);
}
#[inline]
pub fn add_metadata_type(&mut self, metadata_type: CheckpointMetadata) {
self.fbb_.push_slot::<CheckpointMetadata>(Checkpoint::VT_METADATA_TYPE, metadata_type, CheckpointMetadata::NONE);
}
#[inline]
pub fn add_metadata(&mut self, metadata: flatbuffers::WIPOffset<flatbuffers::UnionWIPOffset>) {
self.fbb_.push_slot_always::<flatbuffers::WIPOffset<_>>(Checkpoint::VT_METADATA, metadata);
}
#[inline]
pub fn add_name(&mut self, name: flatbuffers::WIPOffset<&'b str>) {
self.fbb_.push_slot_always::<flatbuffers::WIPOffset<_>>(Checkpoint::VT_NAME, name);
}
#[inline]
pub fn new(_fbb: &'b mut flatbuffers::FlatBufferBuilder<'a, A>) -> CheckpointBuilder<'a, 'b, A> {
let start = _fbb.start_table();
CheckpointBuilder {
fbb_: _fbb,
start_: start,
}
}
#[inline]
pub fn finish(self) -> flatbuffers::WIPOffset<Checkpoint<'a>> {
let o = self.fbb_.end_table(self.start_);
self.fbb_.required(o, Checkpoint::VT_ID,"id");
flatbuffers::WIPOffset::new(o.value())
}
}
impl core::fmt::Debug for Checkpoint<'_> {
fn fmt(&self, f: &mut core::fmt::Formatter<'_>) -> core::fmt::Result {
let mut ds = f.debug_struct("Checkpoint");
ds.field("id", &self.id());
ds.field("manifest_id", &self.manifest_id());
ds.field("checkpoint_expire_time_s", &self.checkpoint_expire_time_s());
ds.field("checkpoint_create_time_s", &self.checkpoint_create_time_s());
ds.field("metadata_type", &self.metadata_type());
match self.metadata_type() {
CheckpointMetadata::WriterCheckpoint => {
if let Some(x) = self.metadata_as_writer_checkpoint() {
ds.field("metadata", &x)
} else {
ds.field("metadata", &"InvalidFlatbuffer: Union discriminant does not match value.")
}
},
_ => {
let x: Option<()> = None;
ds.field("metadata", &x)
},
};
ds.field("name", &self.name());
ds.finish()
}
}