use core::mem;
use core::cmp::Ordering;
extern crate flatbuffers;
use self::flatbuffers::{EndianScalar, Follow};
#[deprecated(since = "2.0.0", note = "Use associated constants instead. This will no longer be generated in 2021.")]
pub const ENUM_MIN_BOUND_TYPE: i8 = 0;
#[deprecated(since = "2.0.0", note = "Use associated constants instead. This will no longer be generated in 2021.")]
pub const ENUM_MAX_BOUND_TYPE: i8 = 3;
#[deprecated(since = "2.0.0", note = "Use associated constants instead. This will no longer be generated in 2021.")]
#[allow(non_camel_case_types)]
pub const ENUM_VALUES_BOUND_TYPE: [BoundType; 4] = [
BoundType::Unknown,
BoundType::Unbounded,
BoundType::Included,
BoundType::Excluded,
];
#[derive(Clone, Copy, PartialEq, Eq, PartialOrd, Ord, Hash, Default)]
#[repr(transparent)]
pub struct BoundType(pub i8);
#[allow(non_upper_case_globals)]
impl BoundType {
pub const Unknown: Self = Self(0);
pub const Unbounded: Self = Self(1);
pub const Included: Self = Self(2);
pub const Excluded: Self = Self(3);
pub const ENUM_MIN: i8 = 0;
pub const ENUM_MAX: i8 = 3;
pub const ENUM_VALUES: &'static [Self] = &[
Self::Unknown,
Self::Unbounded,
Self::Included,
Self::Excluded,
];
pub fn variant_name(self) -> Option<&'static str> {
match self {
Self::Unknown => Some("Unknown"),
Self::Unbounded => Some("Unbounded"),
Self::Included => Some("Included"),
Self::Excluded => Some("Excluded"),
_ => None,
}
}
}
impl core::fmt::Debug for BoundType {
fn fmt(&self, f: &mut core::fmt::Formatter) -> core::fmt::Result {
if let Some(name) = self.variant_name() {
f.write_str(name)
} else {
f.write_fmt(format_args!("<UNKNOWN {:?}>", self.0))
}
}
}
impl<'a> flatbuffers::Follow<'a> for BoundType {
type Inner = Self;
#[inline]
unsafe fn follow(buf: &'a [u8], loc: usize) -> Self::Inner {
let b = flatbuffers::read_scalar_at::<i8>(buf, loc);
Self(b)
}
}
impl flatbuffers::Push for BoundType {
type Output = BoundType;
#[inline]
unsafe fn push(&self, dst: &mut [u8], _written_len: usize) {
flatbuffers::emplace_scalar::<i8>(dst, self.0);
}
}
impl flatbuffers::EndianScalar for BoundType {
type Scalar = i8;
#[inline]
fn to_little_endian(self) -> i8 {
self.0.to_le()
}
#[inline]
#[allow(clippy::wrong_self_convention)]
fn from_little_endian(v: i8) -> Self {
let b = i8::from_le(v);
Self(b)
}
}
impl<'a> flatbuffers::Verifiable for BoundType {
#[inline]
fn run_verifier(
v: &mut flatbuffers::Verifier, pos: usize
) -> Result<(), flatbuffers::InvalidFlatbuffer> {
use self::flatbuffers::Verifiable;
i8::run_verifier(v, pos)
}
}
impl flatbuffers::SimpleToVerifyInSlice for BoundType {}
#[deprecated(since = "2.0.0", note = "Use associated constants instead. This will no longer be generated in 2021.")]
pub const ENUM_MIN_COMPRESSION_FORMAT: i8 = 0;
#[deprecated(since = "2.0.0", note = "Use associated constants instead. This will no longer be generated in 2021.")]
pub const ENUM_MAX_COMPRESSION_FORMAT: i8 = 4;
#[deprecated(since = "2.0.0", note = "Use associated constants instead. This will no longer be generated in 2021.")]
#[allow(non_camel_case_types)]
pub const ENUM_VALUES_COMPRESSION_FORMAT: [CompressionFormat; 5] = [
CompressionFormat::None,
CompressionFormat::Snappy,
CompressionFormat::Zlib,
CompressionFormat::Lz4,
CompressionFormat::Zstd,
];
#[derive(Clone, Copy, PartialEq, Eq, PartialOrd, Ord, Hash, Default)]
#[repr(transparent)]
pub struct CompressionFormat(pub i8);
#[allow(non_upper_case_globals)]
impl CompressionFormat {
pub const None: Self = Self(0);
pub const Snappy: Self = Self(1);
pub const Zlib: Self = Self(2);
pub const Lz4: Self = Self(3);
pub const Zstd: Self = Self(4);
pub const ENUM_MIN: i8 = 0;
pub const ENUM_MAX: i8 = 4;
pub const ENUM_VALUES: &'static [Self] = &[
Self::None,
Self::Snappy,
Self::Zlib,
Self::Lz4,
Self::Zstd,
];
pub fn variant_name(self) -> Option<&'static str> {
match self {
Self::None => Some("None"),
Self::Snappy => Some("Snappy"),
Self::Zlib => Some("Zlib"),
Self::Lz4 => Some("Lz4"),
Self::Zstd => Some("Zstd"),
_ => None,
}
}
}
impl core::fmt::Debug for CompressionFormat {
fn fmt(&self, f: &mut core::fmt::Formatter) -> core::fmt::Result {
if let Some(name) = self.variant_name() {
f.write_str(name)
} else {
f.write_fmt(format_args!("<UNKNOWN {:?}>", self.0))
}
}
}
impl<'a> flatbuffers::Follow<'a> for CompressionFormat {
type Inner = Self;
#[inline]
unsafe fn follow(buf: &'a [u8], loc: usize) -> Self::Inner {
let b = flatbuffers::read_scalar_at::<i8>(buf, loc);
Self(b)
}
}
impl flatbuffers::Push for CompressionFormat {
type Output = CompressionFormat;
#[inline]
unsafe fn push(&self, dst: &mut [u8], _written_len: usize) {
flatbuffers::emplace_scalar::<i8>(dst, self.0);
}
}
impl flatbuffers::EndianScalar for CompressionFormat {
type Scalar = i8;
#[inline]
fn to_little_endian(self) -> i8 {
self.0.to_le()
}
#[inline]
#[allow(clippy::wrong_self_convention)]
fn from_little_endian(v: i8) -> Self {
let b = i8::from_le(v);
Self(b)
}
}
impl<'a> flatbuffers::Verifiable for CompressionFormat {
#[inline]
fn run_verifier(
v: &mut flatbuffers::Verifier, pos: usize
) -> Result<(), flatbuffers::InvalidFlatbuffer> {
use self::flatbuffers::Verifiable;
i8::run_verifier(v, pos)
}
}
impl flatbuffers::SimpleToVerifyInSlice for CompressionFormat {}
#[deprecated(since = "2.0.0", note = "Use associated constants instead. This will no longer be generated in 2021.")]
pub const ENUM_MIN_SST_TYPE: i8 = 0;
#[deprecated(since = "2.0.0", note = "Use associated constants instead. This will no longer be generated in 2021.")]
pub const ENUM_MAX_SST_TYPE: i8 = 1;
#[deprecated(since = "2.0.0", note = "Use associated constants instead. This will no longer be generated in 2021.")]
#[allow(non_camel_case_types)]
pub const ENUM_VALUES_SST_TYPE: [SstType; 2] = [
SstType::Compacted,
SstType::Wal,
];
#[derive(Clone, Copy, PartialEq, Eq, PartialOrd, Ord, Hash, Default)]
#[repr(transparent)]
pub struct SstType(pub i8);
#[allow(non_upper_case_globals)]
impl SstType {
pub const Compacted: Self = Self(0);
pub const Wal: Self = Self(1);
pub const ENUM_MIN: i8 = 0;
pub const ENUM_MAX: i8 = 1;
pub const ENUM_VALUES: &'static [Self] = &[
Self::Compacted,
Self::Wal,
];
pub fn variant_name(self) -> Option<&'static str> {
match self {
Self::Compacted => Some("Compacted"),
Self::Wal => Some("Wal"),
_ => None,
}
}
}
impl core::fmt::Debug for SstType {
fn fmt(&self, f: &mut core::fmt::Formatter) -> core::fmt::Result {
if let Some(name) = self.variant_name() {
f.write_str(name)
} else {
f.write_fmt(format_args!("<UNKNOWN {:?}>", self.0))
}
}
}
impl<'a> flatbuffers::Follow<'a> for SstType {
type Inner = Self;
#[inline]
unsafe fn follow(buf: &'a [u8], loc: usize) -> Self::Inner {
let b = flatbuffers::read_scalar_at::<i8>(buf, loc);
Self(b)
}
}
impl flatbuffers::Push for SstType {
type Output = SstType;
#[inline]
unsafe fn push(&self, dst: &mut [u8], _written_len: usize) {
flatbuffers::emplace_scalar::<i8>(dst, self.0);
}
}
impl flatbuffers::EndianScalar for SstType {
type Scalar = i8;
#[inline]
fn to_little_endian(self) -> i8 {
self.0.to_le()
}
#[inline]
#[allow(clippy::wrong_self_convention)]
fn from_little_endian(v: i8) -> Self {
let b = i8::from_le(v);
Self(b)
}
}
impl<'a> flatbuffers::Verifiable for SstType {
#[inline]
fn run_verifier(
v: &mut flatbuffers::Verifier, pos: usize
) -> Result<(), flatbuffers::InvalidFlatbuffer> {
use self::flatbuffers::Verifiable;
i8::run_verifier(v, pos)
}
}
impl flatbuffers::SimpleToVerifyInSlice for SstType {}
#[deprecated(since = "2.0.0", note = "Use associated constants instead. This will no longer be generated in 2021.")]
pub const ENUM_MIN_COMPACTION_SPEC: u8 = 0;
#[deprecated(since = "2.0.0", note = "Use associated constants instead. This will no longer be generated in 2021.")]
pub const ENUM_MAX_COMPACTION_SPEC: u8 = 1;
#[deprecated(since = "2.0.0", note = "Use associated constants instead. This will no longer be generated in 2021.")]
#[allow(non_camel_case_types)]
pub const ENUM_VALUES_COMPACTION_SPEC: [CompactionSpec; 2] = [
CompactionSpec::NONE,
CompactionSpec::TieredCompactionSpec,
];
#[derive(Clone, Copy, PartialEq, Eq, PartialOrd, Ord, Hash, Default)]
#[repr(transparent)]
pub struct CompactionSpec(pub u8);
#[allow(non_upper_case_globals)]
impl CompactionSpec {
pub const NONE: Self = Self(0);
pub const TieredCompactionSpec: Self = Self(1);
pub const ENUM_MIN: u8 = 0;
pub const ENUM_MAX: u8 = 1;
pub const ENUM_VALUES: &'static [Self] = &[
Self::NONE,
Self::TieredCompactionSpec,
];
pub fn variant_name(self) -> Option<&'static str> {
match self {
Self::NONE => Some("NONE"),
Self::TieredCompactionSpec => Some("TieredCompactionSpec"),
_ => None,
}
}
}
impl core::fmt::Debug for CompactionSpec {
fn fmt(&self, f: &mut core::fmt::Formatter) -> core::fmt::Result {
if let Some(name) = self.variant_name() {
f.write_str(name)
} else {
f.write_fmt(format_args!("<UNKNOWN {:?}>", self.0))
}
}
}
impl<'a> flatbuffers::Follow<'a> for CompactionSpec {
type Inner = Self;
#[inline]
unsafe fn follow(buf: &'a [u8], loc: usize) -> Self::Inner {
let b = flatbuffers::read_scalar_at::<u8>(buf, loc);
Self(b)
}
}
impl flatbuffers::Push for CompactionSpec {
type Output = CompactionSpec;
#[inline]
unsafe fn push(&self, dst: &mut [u8], _written_len: usize) {
flatbuffers::emplace_scalar::<u8>(dst, self.0);
}
}
impl flatbuffers::EndianScalar for CompactionSpec {
type Scalar = u8;
#[inline]
fn to_little_endian(self) -> u8 {
self.0.to_le()
}
#[inline]
#[allow(clippy::wrong_self_convention)]
fn from_little_endian(v: u8) -> Self {
let b = u8::from_le(v);
Self(b)
}
}
impl<'a> flatbuffers::Verifiable for CompactionSpec {
#[inline]
fn run_verifier(
v: &mut flatbuffers::Verifier, pos: usize
) -> Result<(), flatbuffers::InvalidFlatbuffer> {
use self::flatbuffers::Verifiable;
u8::run_verifier(v, pos)
}
}
impl flatbuffers::SimpleToVerifyInSlice for CompactionSpec {}
pub struct CompactionSpecUnionTableOffset {}
#[deprecated(since = "2.0.0", note = "Use associated constants instead. This will no longer be generated in 2021.")]
pub const ENUM_MIN_COMPACTION_STATUS: i8 = 0;
#[deprecated(since = "2.0.0", note = "Use associated constants instead. This will no longer be generated in 2021.")]
pub const ENUM_MAX_COMPACTION_STATUS: i8 = 3;
#[deprecated(since = "2.0.0", note = "Use associated constants instead. This will no longer be generated in 2021.")]
#[allow(non_camel_case_types)]
pub const ENUM_VALUES_COMPACTION_STATUS: [CompactionStatus; 4] = [
CompactionStatus::Submitted,
CompactionStatus::Running,
CompactionStatus::Completed,
CompactionStatus::Failed,
];
#[derive(Clone, Copy, PartialEq, Eq, PartialOrd, Ord, Hash, Default)]
#[repr(transparent)]
pub struct CompactionStatus(pub i8);
#[allow(non_upper_case_globals)]
impl CompactionStatus {
pub const Submitted: Self = Self(0);
pub const Running: Self = Self(1);
pub const Completed: Self = Self(2);
pub const Failed: Self = Self(3);
pub const ENUM_MIN: i8 = 0;
pub const ENUM_MAX: i8 = 3;
pub const ENUM_VALUES: &'static [Self] = &[
Self::Submitted,
Self::Running,
Self::Completed,
Self::Failed,
];
pub fn variant_name(self) -> Option<&'static str> {
match self {
Self::Submitted => Some("Submitted"),
Self::Running => Some("Running"),
Self::Completed => Some("Completed"),
Self::Failed => Some("Failed"),
_ => None,
}
}
}
impl core::fmt::Debug for CompactionStatus {
fn fmt(&self, f: &mut core::fmt::Formatter) -> core::fmt::Result {
if let Some(name) = self.variant_name() {
f.write_str(name)
} else {
f.write_fmt(format_args!("<UNKNOWN {:?}>", self.0))
}
}
}
impl<'a> flatbuffers::Follow<'a> for CompactionStatus {
type Inner = Self;
#[inline]
unsafe fn follow(buf: &'a [u8], loc: usize) -> Self::Inner {
let b = flatbuffers::read_scalar_at::<i8>(buf, loc);
Self(b)
}
}
impl flatbuffers::Push for CompactionStatus {
type Output = CompactionStatus;
#[inline]
unsafe fn push(&self, dst: &mut [u8], _written_len: usize) {
flatbuffers::emplace_scalar::<i8>(dst, self.0);
}
}
impl flatbuffers::EndianScalar for CompactionStatus {
type Scalar = i8;
#[inline]
fn to_little_endian(self) -> i8 {
self.0.to_le()
}
#[inline]
#[allow(clippy::wrong_self_convention)]
fn from_little_endian(v: i8) -> Self {
let b = i8::from_le(v);
Self(b)
}
}
impl<'a> flatbuffers::Verifiable for CompactionStatus {
#[inline]
fn run_verifier(
v: &mut flatbuffers::Verifier, pos: usize
) -> Result<(), flatbuffers::InvalidFlatbuffer> {
use self::flatbuffers::Verifiable;
i8::run_verifier(v, pos)
}
}
impl flatbuffers::SimpleToVerifyInSlice for CompactionStatus {}
#[deprecated(since = "2.0.0", note = "Use associated constants instead. This will no longer be generated in 2021.")]
pub const ENUM_MIN_CHECKPOINT_METADATA: u8 = 0;
#[deprecated(since = "2.0.0", note = "Use associated constants instead. This will no longer be generated in 2021.")]
pub const ENUM_MAX_CHECKPOINT_METADATA: u8 = 1;
#[deprecated(since = "2.0.0", note = "Use associated constants instead. This will no longer be generated in 2021.")]
#[allow(non_camel_case_types)]
pub const ENUM_VALUES_CHECKPOINT_METADATA: [CheckpointMetadata; 2] = [
CheckpointMetadata::NONE,
CheckpointMetadata::WriterCheckpoint,
];
#[derive(Clone, Copy, PartialEq, Eq, PartialOrd, Ord, Hash, Default)]
#[repr(transparent)]
pub struct CheckpointMetadata(pub u8);
#[allow(non_upper_case_globals)]
impl CheckpointMetadata {
pub const NONE: Self = Self(0);
pub const WriterCheckpoint: Self = Self(1);
pub const ENUM_MIN: u8 = 0;
pub const ENUM_MAX: u8 = 1;
pub const ENUM_VALUES: &'static [Self] = &[
Self::NONE,
Self::WriterCheckpoint,
];
pub fn variant_name(self) -> Option<&'static str> {
match self {
Self::NONE => Some("NONE"),
Self::WriterCheckpoint => Some("WriterCheckpoint"),
_ => None,
}
}
}
impl core::fmt::Debug for CheckpointMetadata {
fn fmt(&self, f: &mut core::fmt::Formatter) -> core::fmt::Result {
if let Some(name) = self.variant_name() {
f.write_str(name)
} else {
f.write_fmt(format_args!("<UNKNOWN {:?}>", self.0))
}
}
}
impl<'a> flatbuffers::Follow<'a> for CheckpointMetadata {
type Inner = Self;
#[inline]
unsafe fn follow(buf: &'a [u8], loc: usize) -> Self::Inner {
let b = flatbuffers::read_scalar_at::<u8>(buf, loc);
Self(b)
}
}
impl flatbuffers::Push for CheckpointMetadata {
type Output = CheckpointMetadata;
#[inline]
unsafe fn push(&self, dst: &mut [u8], _written_len: usize) {
flatbuffers::emplace_scalar::<u8>(dst, self.0);
}
}
impl flatbuffers::EndianScalar for CheckpointMetadata {
type Scalar = u8;
#[inline]
fn to_little_endian(self) -> u8 {
self.0.to_le()
}
#[inline]
#[allow(clippy::wrong_self_convention)]
fn from_little_endian(v: u8) -> Self {
let b = u8::from_le(v);
Self(b)
}
}
impl<'a> flatbuffers::Verifiable for CheckpointMetadata {
#[inline]
fn run_verifier(
v: &mut flatbuffers::Verifier, pos: usize
) -> Result<(), flatbuffers::InvalidFlatbuffer> {
use self::flatbuffers::Verifiable;
u8::run_verifier(v, pos)
}
}
impl flatbuffers::SimpleToVerifyInSlice for CheckpointMetadata {}
pub struct CheckpointMetadataUnionTableOffset {}
pub enum UuidOffset {}
#[derive(Copy, Clone, PartialEq)]
pub struct Uuid<'a> {
pub _tab: flatbuffers::Table<'a>,
}
impl<'a> flatbuffers::Follow<'a> for Uuid<'a> {
type Inner = Uuid<'a>;
#[inline]
unsafe fn follow(buf: &'a [u8], loc: usize) -> Self::Inner {
Self { _tab: flatbuffers::Table::new(buf, loc) }
}
}
impl<'a> Uuid<'a> {
pub const VT_HIGH: flatbuffers::VOffsetT = 4;
pub const VT_LOW: flatbuffers::VOffsetT = 6;
#[inline]
pub unsafe fn init_from_table(table: flatbuffers::Table<'a>) -> Self {
Uuid { _tab: table }
}
#[allow(unused_mut)]
pub fn create<'bldr: 'args, 'args: 'mut_bldr, 'mut_bldr, A: flatbuffers::Allocator + 'bldr>(
_fbb: &'mut_bldr mut flatbuffers::FlatBufferBuilder<'bldr, A>,
args: &'args UuidArgs
) -> flatbuffers::WIPOffset<Uuid<'bldr>> {
let mut builder = UuidBuilder::new(_fbb);
builder.add_low(args.low);
builder.add_high(args.high);
builder.finish()
}
#[inline]
pub fn high(&self) -> u64 {
unsafe { self._tab.get::<u64>(Uuid::VT_HIGH, Some(0)).unwrap()}
}
#[inline]
pub fn low(&self) -> u64 {
unsafe { self._tab.get::<u64>(Uuid::VT_LOW, Some(0)).unwrap()}
}
}
impl flatbuffers::Verifiable for Uuid<'_> {
#[inline]
fn run_verifier(
v: &mut flatbuffers::Verifier, pos: usize
) -> Result<(), flatbuffers::InvalidFlatbuffer> {
use self::flatbuffers::Verifiable;
v.visit_table(pos)?
.visit_field::<u64>("high", Self::VT_HIGH, false)?
.visit_field::<u64>("low", Self::VT_LOW, false)?
.finish();
Ok(())
}
}
pub struct UuidArgs {
pub high: u64,
pub low: u64,
}
impl<'a> Default for UuidArgs {
#[inline]
fn default() -> Self {
UuidArgs {
high: 0,
low: 0,
}
}
}
pub struct UuidBuilder<'a: 'b, 'b, A: flatbuffers::Allocator + 'a> {
fbb_: &'b mut flatbuffers::FlatBufferBuilder<'a, A>,
start_: flatbuffers::WIPOffset<flatbuffers::TableUnfinishedWIPOffset>,
}
impl<'a: 'b, 'b, A: flatbuffers::Allocator + 'a> UuidBuilder<'a, 'b, A> {
#[inline]
pub fn add_high(&mut self, high: u64) {
self.fbb_.push_slot::<u64>(Uuid::VT_HIGH, high, 0);
}
#[inline]
pub fn add_low(&mut self, low: u64) {
self.fbb_.push_slot::<u64>(Uuid::VT_LOW, low, 0);
}
#[inline]
pub fn new(_fbb: &'b mut flatbuffers::FlatBufferBuilder<'a, A>) -> UuidBuilder<'a, 'b, A> {
let start = _fbb.start_table();
UuidBuilder {
fbb_: _fbb,
start_: start,
}
}
#[inline]
pub fn finish(self) -> flatbuffers::WIPOffset<Uuid<'a>> {
let o = self.fbb_.end_table(self.start_);
flatbuffers::WIPOffset::new(o.value())
}
}
impl core::fmt::Debug for Uuid<'_> {
fn fmt(&self, f: &mut core::fmt::Formatter<'_>) -> core::fmt::Result {
let mut ds = f.debug_struct("Uuid");
ds.field("high", &self.high());
ds.field("low", &self.low());
ds.finish()
}
}
pub enum UlidOffset {}
#[derive(Copy, Clone, PartialEq)]
pub struct Ulid<'a> {
pub _tab: flatbuffers::Table<'a>,
}
impl<'a> flatbuffers::Follow<'a> for Ulid<'a> {
type Inner = Ulid<'a>;
#[inline]
unsafe fn follow(buf: &'a [u8], loc: usize) -> Self::Inner {
Self { _tab: flatbuffers::Table::new(buf, loc) }
}
}
impl<'a> Ulid<'a> {
pub const VT_HIGH: flatbuffers::VOffsetT = 4;
pub const VT_LOW: flatbuffers::VOffsetT = 6;
#[inline]
pub unsafe fn init_from_table(table: flatbuffers::Table<'a>) -> Self {
Ulid { _tab: table }
}
#[allow(unused_mut)]
pub fn create<'bldr: 'args, 'args: 'mut_bldr, 'mut_bldr, A: flatbuffers::Allocator + 'bldr>(
_fbb: &'mut_bldr mut flatbuffers::FlatBufferBuilder<'bldr, A>,
args: &'args UlidArgs
) -> flatbuffers::WIPOffset<Ulid<'bldr>> {
let mut builder = UlidBuilder::new(_fbb);
builder.add_low(args.low);
builder.add_high(args.high);
builder.finish()
}
#[inline]
pub fn high(&self) -> u64 {
unsafe { self._tab.get::<u64>(Ulid::VT_HIGH, Some(0)).unwrap()}
}
#[inline]
pub fn low(&self) -> u64 {
unsafe { self._tab.get::<u64>(Ulid::VT_LOW, Some(0)).unwrap()}
}
}
impl flatbuffers::Verifiable for Ulid<'_> {
#[inline]
fn run_verifier(
v: &mut flatbuffers::Verifier, pos: usize
) -> Result<(), flatbuffers::InvalidFlatbuffer> {
use self::flatbuffers::Verifiable;
v.visit_table(pos)?
.visit_field::<u64>("high", Self::VT_HIGH, false)?
.visit_field::<u64>("low", Self::VT_LOW, false)?
.finish();
Ok(())
}
}
pub struct UlidArgs {
pub high: u64,
pub low: u64,
}
impl<'a> Default for UlidArgs {
#[inline]
fn default() -> Self {
UlidArgs {
high: 0,
low: 0,
}
}
}
pub struct UlidBuilder<'a: 'b, 'b, A: flatbuffers::Allocator + 'a> {
fbb_: &'b mut flatbuffers::FlatBufferBuilder<'a, A>,
start_: flatbuffers::WIPOffset<flatbuffers::TableUnfinishedWIPOffset>,
}
impl<'a: 'b, 'b, A: flatbuffers::Allocator + 'a> UlidBuilder<'a, 'b, A> {
#[inline]
pub fn add_high(&mut self, high: u64) {
self.fbb_.push_slot::<u64>(Ulid::VT_HIGH, high, 0);
}
#[inline]
pub fn add_low(&mut self, low: u64) {
self.fbb_.push_slot::<u64>(Ulid::VT_LOW, low, 0);
}
#[inline]
pub fn new(_fbb: &'b mut flatbuffers::FlatBufferBuilder<'a, A>) -> UlidBuilder<'a, 'b, A> {
let start = _fbb.start_table();
UlidBuilder {
fbb_: _fbb,
start_: start,
}
}
#[inline]
pub fn finish(self) -> flatbuffers::WIPOffset<Ulid<'a>> {
let o = self.fbb_.end_table(self.start_);
flatbuffers::WIPOffset::new(o.value())
}
}
impl core::fmt::Debug for Ulid<'_> {
fn fmt(&self, f: &mut core::fmt::Formatter<'_>) -> core::fmt::Result {
let mut ds = f.debug_struct("Ulid");
ds.field("high", &self.high());
ds.field("low", &self.low());
ds.finish()
}
}
pub enum BytesBoundOffset {}
#[derive(Copy, Clone, PartialEq)]
pub struct BytesBound<'a> {
pub _tab: flatbuffers::Table<'a>,
}
impl<'a> flatbuffers::Follow<'a> for BytesBound<'a> {
type Inner = BytesBound<'a>;
#[inline]
unsafe fn follow(buf: &'a [u8], loc: usize) -> Self::Inner {
Self { _tab: flatbuffers::Table::new(buf, loc) }
}
}
impl<'a> BytesBound<'a> {
pub const VT_KEY: flatbuffers::VOffsetT = 4;
pub const VT_BOUND_TYPE: flatbuffers::VOffsetT = 6;
#[inline]
pub unsafe fn init_from_table(table: flatbuffers::Table<'a>) -> Self {
BytesBound { _tab: table }
}
#[allow(unused_mut)]
pub fn create<'bldr: 'args, 'args: 'mut_bldr, 'mut_bldr, A: flatbuffers::Allocator + 'bldr>(
_fbb: &'mut_bldr mut flatbuffers::FlatBufferBuilder<'bldr, A>,
args: &'args BytesBoundArgs<'args>
) -> flatbuffers::WIPOffset<BytesBound<'bldr>> {
let mut builder = BytesBoundBuilder::new(_fbb);
if let Some(x) = args.key { builder.add_key(x); }
builder.add_bound_type(args.bound_type);
builder.finish()
}
#[inline]
pub fn key(&self) -> Option<flatbuffers::Vector<'a, u8>> {
unsafe { self._tab.get::<flatbuffers::ForwardsUOffset<flatbuffers::Vector<'a, u8>>>(BytesBound::VT_KEY, None)}
}
#[inline]
pub fn bound_type(&self) -> BoundType {
unsafe { self._tab.get::<BoundType>(BytesBound::VT_BOUND_TYPE, Some(BoundType::Unknown)).unwrap()}
}
}
impl flatbuffers::Verifiable for BytesBound<'_> {
#[inline]
fn run_verifier(
v: &mut flatbuffers::Verifier, pos: usize
) -> Result<(), flatbuffers::InvalidFlatbuffer> {
use self::flatbuffers::Verifiable;
v.visit_table(pos)?
.visit_field::<flatbuffers::ForwardsUOffset<flatbuffers::Vector<'_, u8>>>("key", Self::VT_KEY, false)?
.visit_field::<BoundType>("bound_type", Self::VT_BOUND_TYPE, false)?
.finish();
Ok(())
}
}
pub struct BytesBoundArgs<'a> {
pub key: Option<flatbuffers::WIPOffset<flatbuffers::Vector<'a, u8>>>,
pub bound_type: BoundType,
}
impl<'a> Default for BytesBoundArgs<'a> {
#[inline]
fn default() -> Self {
BytesBoundArgs {
key: None,
bound_type: BoundType::Unknown,
}
}
}
pub struct BytesBoundBuilder<'a: 'b, 'b, A: flatbuffers::Allocator + 'a> {
fbb_: &'b mut flatbuffers::FlatBufferBuilder<'a, A>,
start_: flatbuffers::WIPOffset<flatbuffers::TableUnfinishedWIPOffset>,
}
impl<'a: 'b, 'b, A: flatbuffers::Allocator + 'a> BytesBoundBuilder<'a, 'b, A> {
#[inline]
pub fn add_key(&mut self, key: flatbuffers::WIPOffset<flatbuffers::Vector<'b , u8>>) {
self.fbb_.push_slot_always::<flatbuffers::WIPOffset<_>>(BytesBound::VT_KEY, key);
}
#[inline]
pub fn add_bound_type(&mut self, bound_type: BoundType) {
self.fbb_.push_slot::<BoundType>(BytesBound::VT_BOUND_TYPE, bound_type, BoundType::Unknown);
}
#[inline]
pub fn new(_fbb: &'b mut flatbuffers::FlatBufferBuilder<'a, A>) -> BytesBoundBuilder<'a, 'b, A> {
let start = _fbb.start_table();
BytesBoundBuilder {
fbb_: _fbb,
start_: start,
}
}
#[inline]
pub fn finish(self) -> flatbuffers::WIPOffset<BytesBound<'a>> {
let o = self.fbb_.end_table(self.start_);
flatbuffers::WIPOffset::new(o.value())
}
}
impl core::fmt::Debug for BytesBound<'_> {
fn fmt(&self, f: &mut core::fmt::Formatter<'_>) -> core::fmt::Result {
let mut ds = f.debug_struct("BytesBound");
ds.field("key", &self.key());
ds.field("bound_type", &self.bound_type());
ds.finish()
}
}
pub enum BytesRangeOffset {}
#[derive(Copy, Clone, PartialEq)]
pub struct BytesRange<'a> {
pub _tab: flatbuffers::Table<'a>,
}
impl<'a> flatbuffers::Follow<'a> for BytesRange<'a> {
type Inner = BytesRange<'a>;
#[inline]
unsafe fn follow(buf: &'a [u8], loc: usize) -> Self::Inner {
Self { _tab: flatbuffers::Table::new(buf, loc) }
}
}
impl<'a> BytesRange<'a> {
pub const VT_START_BOUND: flatbuffers::VOffsetT = 4;
pub const VT_END_BOUND: flatbuffers::VOffsetT = 6;
#[inline]
pub unsafe fn init_from_table(table: flatbuffers::Table<'a>) -> Self {
BytesRange { _tab: table }
}
#[allow(unused_mut)]
pub fn create<'bldr: 'args, 'args: 'mut_bldr, 'mut_bldr, A: flatbuffers::Allocator + 'bldr>(
_fbb: &'mut_bldr mut flatbuffers::FlatBufferBuilder<'bldr, A>,
args: &'args BytesRangeArgs<'args>
) -> flatbuffers::WIPOffset<BytesRange<'bldr>> {
let mut builder = BytesRangeBuilder::new(_fbb);
if let Some(x) = args.end_bound { builder.add_end_bound(x); }
if let Some(x) = args.start_bound { builder.add_start_bound(x); }
builder.finish()
}
#[inline]
pub fn start_bound(&self) -> BytesBound<'a> {
unsafe { self._tab.get::<flatbuffers::ForwardsUOffset<BytesBound>>(BytesRange::VT_START_BOUND, None).unwrap()}
}
#[inline]
pub fn end_bound(&self) -> BytesBound<'a> {
unsafe { self._tab.get::<flatbuffers::ForwardsUOffset<BytesBound>>(BytesRange::VT_END_BOUND, None).unwrap()}
}
}
impl flatbuffers::Verifiable for BytesRange<'_> {
#[inline]
fn run_verifier(
v: &mut flatbuffers::Verifier, pos: usize
) -> Result<(), flatbuffers::InvalidFlatbuffer> {
use self::flatbuffers::Verifiable;
v.visit_table(pos)?
.visit_field::<flatbuffers::ForwardsUOffset<BytesBound>>("start_bound", Self::VT_START_BOUND, true)?
.visit_field::<flatbuffers::ForwardsUOffset<BytesBound>>("end_bound", Self::VT_END_BOUND, true)?
.finish();
Ok(())
}
}
pub struct BytesRangeArgs<'a> {
pub start_bound: Option<flatbuffers::WIPOffset<BytesBound<'a>>>,
pub end_bound: Option<flatbuffers::WIPOffset<BytesBound<'a>>>,
}
impl<'a> Default for BytesRangeArgs<'a> {
#[inline]
fn default() -> Self {
BytesRangeArgs {
start_bound: None, end_bound: None, }
}
}
pub struct BytesRangeBuilder<'a: 'b, 'b, A: flatbuffers::Allocator + 'a> {
fbb_: &'b mut flatbuffers::FlatBufferBuilder<'a, A>,
start_: flatbuffers::WIPOffset<flatbuffers::TableUnfinishedWIPOffset>,
}
impl<'a: 'b, 'b, A: flatbuffers::Allocator + 'a> BytesRangeBuilder<'a, 'b, A> {
#[inline]
pub fn add_start_bound(&mut self, start_bound: flatbuffers::WIPOffset<BytesBound<'b >>) {
self.fbb_.push_slot_always::<flatbuffers::WIPOffset<BytesBound>>(BytesRange::VT_START_BOUND, start_bound);
}
#[inline]
pub fn add_end_bound(&mut self, end_bound: flatbuffers::WIPOffset<BytesBound<'b >>) {
self.fbb_.push_slot_always::<flatbuffers::WIPOffset<BytesBound>>(BytesRange::VT_END_BOUND, end_bound);
}
#[inline]
pub fn new(_fbb: &'b mut flatbuffers::FlatBufferBuilder<'a, A>) -> BytesRangeBuilder<'a, 'b, A> {
let start = _fbb.start_table();
BytesRangeBuilder {
fbb_: _fbb,
start_: start,
}
}
#[inline]
pub fn finish(self) -> flatbuffers::WIPOffset<BytesRange<'a>> {
let o = self.fbb_.end_table(self.start_);
self.fbb_.required(o, BytesRange::VT_START_BOUND,"start_bound");
self.fbb_.required(o, BytesRange::VT_END_BOUND,"end_bound");
flatbuffers::WIPOffset::new(o.value())
}
}
impl core::fmt::Debug for BytesRange<'_> {
fn fmt(&self, f: &mut core::fmt::Formatter<'_>) -> core::fmt::Result {
let mut ds = f.debug_struct("BytesRange");
ds.field("start_bound", &self.start_bound());
ds.field("end_bound", &self.end_bound());
ds.finish()
}
}
pub enum SsTableInfoOffset {}
#[derive(Copy, Clone, PartialEq)]
pub struct SsTableInfo<'a> {
pub _tab: flatbuffers::Table<'a>,
}
impl<'a> flatbuffers::Follow<'a> for SsTableInfo<'a> {
type Inner = SsTableInfo<'a>;
#[inline]
unsafe fn follow(buf: &'a [u8], loc: usize) -> Self::Inner {
Self { _tab: flatbuffers::Table::new(buf, loc) }
}
}
impl<'a> SsTableInfo<'a> {
pub const VT_FIRST_ENTRY: flatbuffers::VOffsetT = 4;
pub const VT_INDEX_OFFSET: flatbuffers::VOffsetT = 6;
pub const VT_INDEX_LEN: flatbuffers::VOffsetT = 8;
pub const VT_FILTER_OFFSET: flatbuffers::VOffsetT = 10;
pub const VT_FILTER_LEN: flatbuffers::VOffsetT = 12;
pub const VT_COMPRESSION_FORMAT: flatbuffers::VOffsetT = 14;
pub const VT_SST_TYPE: flatbuffers::VOffsetT = 16;
pub const VT_LAST_ENTRY: flatbuffers::VOffsetT = 18;
pub const VT_STATS_OFFSET: flatbuffers::VOffsetT = 20;
pub const VT_STATS_LEN: flatbuffers::VOffsetT = 22;
#[inline]
pub unsafe fn init_from_table(table: flatbuffers::Table<'a>) -> Self {
SsTableInfo { _tab: table }
}
#[allow(unused_mut)]
pub fn create<'bldr: 'args, 'args: 'mut_bldr, 'mut_bldr, A: flatbuffers::Allocator + 'bldr>(
_fbb: &'mut_bldr mut flatbuffers::FlatBufferBuilder<'bldr, A>,
args: &'args SsTableInfoArgs<'args>
) -> flatbuffers::WIPOffset<SsTableInfo<'bldr>> {
let mut builder = SsTableInfoBuilder::new(_fbb);
builder.add_stats_len(args.stats_len);
builder.add_stats_offset(args.stats_offset);
builder.add_filter_len(args.filter_len);
builder.add_filter_offset(args.filter_offset);
builder.add_index_len(args.index_len);
builder.add_index_offset(args.index_offset);
if let Some(x) = args.last_entry { builder.add_last_entry(x); }
if let Some(x) = args.first_entry { builder.add_first_entry(x); }
builder.add_sst_type(args.sst_type);
builder.add_compression_format(args.compression_format);
builder.finish()
}
#[inline]
pub fn first_entry(&self) -> Option<flatbuffers::Vector<'a, u8>> {
unsafe { self._tab.get::<flatbuffers::ForwardsUOffset<flatbuffers::Vector<'a, u8>>>(SsTableInfo::VT_FIRST_ENTRY, None)}
}
#[inline]
pub fn index_offset(&self) -> u64 {
unsafe { self._tab.get::<u64>(SsTableInfo::VT_INDEX_OFFSET, Some(0)).unwrap()}
}
#[inline]
pub fn index_len(&self) -> u64 {
unsafe { self._tab.get::<u64>(SsTableInfo::VT_INDEX_LEN, Some(0)).unwrap()}
}
#[inline]
pub fn filter_offset(&self) -> u64 {
unsafe { self._tab.get::<u64>(SsTableInfo::VT_FILTER_OFFSET, Some(0)).unwrap()}
}
#[inline]
pub fn filter_len(&self) -> u64 {
unsafe { self._tab.get::<u64>(SsTableInfo::VT_FILTER_LEN, Some(0)).unwrap()}
}
#[inline]
pub fn compression_format(&self) -> CompressionFormat {
unsafe { self._tab.get::<CompressionFormat>(SsTableInfo::VT_COMPRESSION_FORMAT, Some(CompressionFormat::None)).unwrap()}
}
#[inline]
pub fn sst_type(&self) -> SstType {
unsafe { self._tab.get::<SstType>(SsTableInfo::VT_SST_TYPE, Some(SstType::Compacted)).unwrap()}
}
#[inline]
pub fn last_entry(&self) -> Option<flatbuffers::Vector<'a, u8>> {
unsafe { self._tab.get::<flatbuffers::ForwardsUOffset<flatbuffers::Vector<'a, u8>>>(SsTableInfo::VT_LAST_ENTRY, None)}
}
#[inline]
pub fn stats_offset(&self) -> u64 {
unsafe { self._tab.get::<u64>(SsTableInfo::VT_STATS_OFFSET, Some(0)).unwrap()}
}
#[inline]
pub fn stats_len(&self) -> u64 {
unsafe { self._tab.get::<u64>(SsTableInfo::VT_STATS_LEN, Some(0)).unwrap()}
}
}
impl flatbuffers::Verifiable for SsTableInfo<'_> {
#[inline]
fn run_verifier(
v: &mut flatbuffers::Verifier, pos: usize
) -> Result<(), flatbuffers::InvalidFlatbuffer> {
use self::flatbuffers::Verifiable;
v.visit_table(pos)?
.visit_field::<flatbuffers::ForwardsUOffset<flatbuffers::Vector<'_, u8>>>("first_entry", Self::VT_FIRST_ENTRY, false)?
.visit_field::<u64>("index_offset", Self::VT_INDEX_OFFSET, false)?
.visit_field::<u64>("index_len", Self::VT_INDEX_LEN, false)?
.visit_field::<u64>("filter_offset", Self::VT_FILTER_OFFSET, false)?
.visit_field::<u64>("filter_len", Self::VT_FILTER_LEN, false)?
.visit_field::<CompressionFormat>("compression_format", Self::VT_COMPRESSION_FORMAT, false)?
.visit_field::<SstType>("sst_type", Self::VT_SST_TYPE, false)?
.visit_field::<flatbuffers::ForwardsUOffset<flatbuffers::Vector<'_, u8>>>("last_entry", Self::VT_LAST_ENTRY, false)?
.visit_field::<u64>("stats_offset", Self::VT_STATS_OFFSET, false)?
.visit_field::<u64>("stats_len", Self::VT_STATS_LEN, false)?
.finish();
Ok(())
}
}
pub struct SsTableInfoArgs<'a> {
pub first_entry: Option<flatbuffers::WIPOffset<flatbuffers::Vector<'a, u8>>>,
pub index_offset: u64,
pub index_len: u64,
pub filter_offset: u64,
pub filter_len: u64,
pub compression_format: CompressionFormat,
pub sst_type: SstType,
pub last_entry: Option<flatbuffers::WIPOffset<flatbuffers::Vector<'a, u8>>>,
pub stats_offset: u64,
pub stats_len: u64,
}
impl<'a> Default for SsTableInfoArgs<'a> {
#[inline]
fn default() -> Self {
SsTableInfoArgs {
first_entry: None,
index_offset: 0,
index_len: 0,
filter_offset: 0,
filter_len: 0,
compression_format: CompressionFormat::None,
sst_type: SstType::Compacted,
last_entry: None,
stats_offset: 0,
stats_len: 0,
}
}
}
pub struct SsTableInfoBuilder<'a: 'b, 'b, A: flatbuffers::Allocator + 'a> {
fbb_: &'b mut flatbuffers::FlatBufferBuilder<'a, A>,
start_: flatbuffers::WIPOffset<flatbuffers::TableUnfinishedWIPOffset>,
}
impl<'a: 'b, 'b, A: flatbuffers::Allocator + 'a> SsTableInfoBuilder<'a, 'b, A> {
#[inline]
pub fn add_first_entry(&mut self, first_entry: flatbuffers::WIPOffset<flatbuffers::Vector<'b , u8>>) {
self.fbb_.push_slot_always::<flatbuffers::WIPOffset<_>>(SsTableInfo::VT_FIRST_ENTRY, first_entry);
}
#[inline]
pub fn add_index_offset(&mut self, index_offset: u64) {
self.fbb_.push_slot::<u64>(SsTableInfo::VT_INDEX_OFFSET, index_offset, 0);
}
#[inline]
pub fn add_index_len(&mut self, index_len: u64) {
self.fbb_.push_slot::<u64>(SsTableInfo::VT_INDEX_LEN, index_len, 0);
}
#[inline]
pub fn add_filter_offset(&mut self, filter_offset: u64) {
self.fbb_.push_slot::<u64>(SsTableInfo::VT_FILTER_OFFSET, filter_offset, 0);
}
#[inline]
pub fn add_filter_len(&mut self, filter_len: u64) {
self.fbb_.push_slot::<u64>(SsTableInfo::VT_FILTER_LEN, filter_len, 0);
}
#[inline]
pub fn add_compression_format(&mut self, compression_format: CompressionFormat) {
self.fbb_.push_slot::<CompressionFormat>(SsTableInfo::VT_COMPRESSION_FORMAT, compression_format, CompressionFormat::None);
}
#[inline]
pub fn add_sst_type(&mut self, sst_type: SstType) {
self.fbb_.push_slot::<SstType>(SsTableInfo::VT_SST_TYPE, sst_type, SstType::Compacted);
}
#[inline]
pub fn add_last_entry(&mut self, last_entry: flatbuffers::WIPOffset<flatbuffers::Vector<'b , u8>>) {
self.fbb_.push_slot_always::<flatbuffers::WIPOffset<_>>(SsTableInfo::VT_LAST_ENTRY, last_entry);
}
#[inline]
pub fn add_stats_offset(&mut self, stats_offset: u64) {
self.fbb_.push_slot::<u64>(SsTableInfo::VT_STATS_OFFSET, stats_offset, 0);
}
#[inline]
pub fn add_stats_len(&mut self, stats_len: u64) {
self.fbb_.push_slot::<u64>(SsTableInfo::VT_STATS_LEN, stats_len, 0);
}
#[inline]
pub fn new(_fbb: &'b mut flatbuffers::FlatBufferBuilder<'a, A>) -> SsTableInfoBuilder<'a, 'b, A> {
let start = _fbb.start_table();
SsTableInfoBuilder {
fbb_: _fbb,
start_: start,
}
}
#[inline]
pub fn finish(self) -> flatbuffers::WIPOffset<SsTableInfo<'a>> {
let o = self.fbb_.end_table(self.start_);
flatbuffers::WIPOffset::new(o.value())
}
}
impl core::fmt::Debug for SsTableInfo<'_> {
fn fmt(&self, f: &mut core::fmt::Formatter<'_>) -> core::fmt::Result {
let mut ds = f.debug_struct("SsTableInfo");
ds.field("first_entry", &self.first_entry());
ds.field("index_offset", &self.index_offset());
ds.field("index_len", &self.index_len());
ds.field("filter_offset", &self.filter_offset());
ds.field("filter_len", &self.filter_len());
ds.field("compression_format", &self.compression_format());
ds.field("sst_type", &self.sst_type());
ds.field("last_entry", &self.last_entry());
ds.field("stats_offset", &self.stats_offset());
ds.field("stats_len", &self.stats_len());
ds.finish()
}
}
pub enum BlockStatsOffset {}
#[derive(Copy, Clone, PartialEq)]
pub struct BlockStats<'a> {
pub _tab: flatbuffers::Table<'a>,
}
impl<'a> flatbuffers::Follow<'a> for BlockStats<'a> {
type Inner = BlockStats<'a>;
#[inline]
unsafe fn follow(buf: &'a [u8], loc: usize) -> Self::Inner {
Self { _tab: flatbuffers::Table::new(buf, loc) }
}
}
impl<'a> BlockStats<'a> {
pub const VT_NUM_PUTS: flatbuffers::VOffsetT = 4;
pub const VT_NUM_DELETES: flatbuffers::VOffsetT = 6;
pub const VT_NUM_MERGES: flatbuffers::VOffsetT = 8;
#[inline]
pub unsafe fn init_from_table(table: flatbuffers::Table<'a>) -> Self {
BlockStats { _tab: table }
}
#[allow(unused_mut)]
pub fn create<'bldr: 'args, 'args: 'mut_bldr, 'mut_bldr, A: flatbuffers::Allocator + 'bldr>(
_fbb: &'mut_bldr mut flatbuffers::FlatBufferBuilder<'bldr, A>,
args: &'args BlockStatsArgs
) -> flatbuffers::WIPOffset<BlockStats<'bldr>> {
let mut builder = BlockStatsBuilder::new(_fbb);
builder.add_num_merges(args.num_merges);
builder.add_num_deletes(args.num_deletes);
builder.add_num_puts(args.num_puts);
builder.finish()
}
#[inline]
pub fn num_puts(&self) -> u16 {
unsafe { self._tab.get::<u16>(BlockStats::VT_NUM_PUTS, Some(0)).unwrap()}
}
#[inline]
pub fn num_deletes(&self) -> u16 {
unsafe { self._tab.get::<u16>(BlockStats::VT_NUM_DELETES, Some(0)).unwrap()}
}
#[inline]
pub fn num_merges(&self) -> u16 {
unsafe { self._tab.get::<u16>(BlockStats::VT_NUM_MERGES, Some(0)).unwrap()}
}
}
impl flatbuffers::Verifiable for BlockStats<'_> {
#[inline]
fn run_verifier(
v: &mut flatbuffers::Verifier, pos: usize
) -> Result<(), flatbuffers::InvalidFlatbuffer> {
use self::flatbuffers::Verifiable;
v.visit_table(pos)?
.visit_field::<u16>("num_puts", Self::VT_NUM_PUTS, false)?
.visit_field::<u16>("num_deletes", Self::VT_NUM_DELETES, false)?
.visit_field::<u16>("num_merges", Self::VT_NUM_MERGES, false)?
.finish();
Ok(())
}
}
pub struct BlockStatsArgs {
pub num_puts: u16,
pub num_deletes: u16,
pub num_merges: u16,
}
impl<'a> Default for BlockStatsArgs {
#[inline]
fn default() -> Self {
BlockStatsArgs {
num_puts: 0,
num_deletes: 0,
num_merges: 0,
}
}
}
pub struct BlockStatsBuilder<'a: 'b, 'b, A: flatbuffers::Allocator + 'a> {
fbb_: &'b mut flatbuffers::FlatBufferBuilder<'a, A>,
start_: flatbuffers::WIPOffset<flatbuffers::TableUnfinishedWIPOffset>,
}
impl<'a: 'b, 'b, A: flatbuffers::Allocator + 'a> BlockStatsBuilder<'a, 'b, A> {
#[inline]
pub fn add_num_puts(&mut self, num_puts: u16) {
self.fbb_.push_slot::<u16>(BlockStats::VT_NUM_PUTS, num_puts, 0);
}
#[inline]
pub fn add_num_deletes(&mut self, num_deletes: u16) {
self.fbb_.push_slot::<u16>(BlockStats::VT_NUM_DELETES, num_deletes, 0);
}
#[inline]
pub fn add_num_merges(&mut self, num_merges: u16) {
self.fbb_.push_slot::<u16>(BlockStats::VT_NUM_MERGES, num_merges, 0);
}
#[inline]
pub fn new(_fbb: &'b mut flatbuffers::FlatBufferBuilder<'a, A>) -> BlockStatsBuilder<'a, 'b, A> {
let start = _fbb.start_table();
BlockStatsBuilder {
fbb_: _fbb,
start_: start,
}
}
#[inline]
pub fn finish(self) -> flatbuffers::WIPOffset<BlockStats<'a>> {
let o = self.fbb_.end_table(self.start_);
flatbuffers::WIPOffset::new(o.value())
}
}
impl core::fmt::Debug for BlockStats<'_> {
fn fmt(&self, f: &mut core::fmt::Formatter<'_>) -> core::fmt::Result {
let mut ds = f.debug_struct("BlockStats");
ds.field("num_puts", &self.num_puts());
ds.field("num_deletes", &self.num_deletes());
ds.field("num_merges", &self.num_merges());
ds.finish()
}
}
pub enum SstStatsOffset {}
#[derive(Copy, Clone, PartialEq)]
pub struct SstStats<'a> {
pub _tab: flatbuffers::Table<'a>,
}
impl<'a> flatbuffers::Follow<'a> for SstStats<'a> {
type Inner = SstStats<'a>;
#[inline]
unsafe fn follow(buf: &'a [u8], loc: usize) -> Self::Inner {
Self { _tab: flatbuffers::Table::new(buf, loc) }
}
}
impl<'a> SstStats<'a> {
pub const VT_NUM_PUTS: flatbuffers::VOffsetT = 4;
pub const VT_NUM_DELETES: flatbuffers::VOffsetT = 6;
pub const VT_NUM_MERGES: flatbuffers::VOffsetT = 8;
pub const VT_RAW_KEY_SIZE: flatbuffers::VOffsetT = 10;
pub const VT_RAW_VAL_SIZE: flatbuffers::VOffsetT = 12;
pub const VT_BLOCK_STATS: flatbuffers::VOffsetT = 14;
#[inline]
pub unsafe fn init_from_table(table: flatbuffers::Table<'a>) -> Self {
SstStats { _tab: table }
}
#[allow(unused_mut)]
pub fn create<'bldr: 'args, 'args: 'mut_bldr, 'mut_bldr, A: flatbuffers::Allocator + 'bldr>(
_fbb: &'mut_bldr mut flatbuffers::FlatBufferBuilder<'bldr, A>,
args: &'args SstStatsArgs<'args>
) -> flatbuffers::WIPOffset<SstStats<'bldr>> {
let mut builder = SstStatsBuilder::new(_fbb);
builder.add_raw_val_size(args.raw_val_size);
builder.add_raw_key_size(args.raw_key_size);
builder.add_num_merges(args.num_merges);
builder.add_num_deletes(args.num_deletes);
builder.add_num_puts(args.num_puts);
if let Some(x) = args.block_stats { builder.add_block_stats(x); }
builder.finish()
}
#[inline]
pub fn num_puts(&self) -> u64 {
unsafe { self._tab.get::<u64>(SstStats::VT_NUM_PUTS, Some(0)).unwrap()}
}
#[inline]
pub fn num_deletes(&self) -> u64 {
unsafe { self._tab.get::<u64>(SstStats::VT_NUM_DELETES, Some(0)).unwrap()}
}
#[inline]
pub fn num_merges(&self) -> u64 {
unsafe { self._tab.get::<u64>(SstStats::VT_NUM_MERGES, Some(0)).unwrap()}
}
#[inline]
pub fn raw_key_size(&self) -> u64 {
unsafe { self._tab.get::<u64>(SstStats::VT_RAW_KEY_SIZE, Some(0)).unwrap()}
}
#[inline]
pub fn raw_val_size(&self) -> u64 {
unsafe { self._tab.get::<u64>(SstStats::VT_RAW_VAL_SIZE, Some(0)).unwrap()}
}
#[inline]
pub fn block_stats(&self) -> Option<flatbuffers::Vector<'a, flatbuffers::ForwardsUOffset<BlockStats<'a>>>> {
unsafe { self._tab.get::<flatbuffers::ForwardsUOffset<flatbuffers::Vector<'a, flatbuffers::ForwardsUOffset<BlockStats>>>>(SstStats::VT_BLOCK_STATS, None)}
}
}
impl flatbuffers::Verifiable for SstStats<'_> {
#[inline]
fn run_verifier(
v: &mut flatbuffers::Verifier, pos: usize
) -> Result<(), flatbuffers::InvalidFlatbuffer> {
use self::flatbuffers::Verifiable;
v.visit_table(pos)?
.visit_field::<u64>("num_puts", Self::VT_NUM_PUTS, false)?
.visit_field::<u64>("num_deletes", Self::VT_NUM_DELETES, false)?
.visit_field::<u64>("num_merges", Self::VT_NUM_MERGES, false)?
.visit_field::<u64>("raw_key_size", Self::VT_RAW_KEY_SIZE, false)?
.visit_field::<u64>("raw_val_size", Self::VT_RAW_VAL_SIZE, false)?
.visit_field::<flatbuffers::ForwardsUOffset<flatbuffers::Vector<'_, flatbuffers::ForwardsUOffset<BlockStats>>>>("block_stats", Self::VT_BLOCK_STATS, false)?
.finish();
Ok(())
}
}
pub struct SstStatsArgs<'a> {
pub num_puts: u64,
pub num_deletes: u64,
pub num_merges: u64,
pub raw_key_size: u64,
pub raw_val_size: u64,
pub block_stats: Option<flatbuffers::WIPOffset<flatbuffers::Vector<'a, flatbuffers::ForwardsUOffset<BlockStats<'a>>>>>,
}
impl<'a> Default for SstStatsArgs<'a> {
#[inline]
fn default() -> Self {
SstStatsArgs {
num_puts: 0,
num_deletes: 0,
num_merges: 0,
raw_key_size: 0,
raw_val_size: 0,
block_stats: None,
}
}
}
pub struct SstStatsBuilder<'a: 'b, 'b, A: flatbuffers::Allocator + 'a> {
fbb_: &'b mut flatbuffers::FlatBufferBuilder<'a, A>,
start_: flatbuffers::WIPOffset<flatbuffers::TableUnfinishedWIPOffset>,
}
impl<'a: 'b, 'b, A: flatbuffers::Allocator + 'a> SstStatsBuilder<'a, 'b, A> {
#[inline]
pub fn add_num_puts(&mut self, num_puts: u64) {
self.fbb_.push_slot::<u64>(SstStats::VT_NUM_PUTS, num_puts, 0);
}
#[inline]
pub fn add_num_deletes(&mut self, num_deletes: u64) {
self.fbb_.push_slot::<u64>(SstStats::VT_NUM_DELETES, num_deletes, 0);
}
#[inline]
pub fn add_num_merges(&mut self, num_merges: u64) {
self.fbb_.push_slot::<u64>(SstStats::VT_NUM_MERGES, num_merges, 0);
}
#[inline]
pub fn add_raw_key_size(&mut self, raw_key_size: u64) {
self.fbb_.push_slot::<u64>(SstStats::VT_RAW_KEY_SIZE, raw_key_size, 0);
}
#[inline]
pub fn add_raw_val_size(&mut self, raw_val_size: u64) {
self.fbb_.push_slot::<u64>(SstStats::VT_RAW_VAL_SIZE, raw_val_size, 0);
}
#[inline]
pub fn add_block_stats(&mut self, block_stats: flatbuffers::WIPOffset<flatbuffers::Vector<'b , flatbuffers::ForwardsUOffset<BlockStats<'b >>>>) {
self.fbb_.push_slot_always::<flatbuffers::WIPOffset<_>>(SstStats::VT_BLOCK_STATS, block_stats);
}
#[inline]
pub fn new(_fbb: &'b mut flatbuffers::FlatBufferBuilder<'a, A>) -> SstStatsBuilder<'a, 'b, A> {
let start = _fbb.start_table();
SstStatsBuilder {
fbb_: _fbb,
start_: start,
}
}
#[inline]
pub fn finish(self) -> flatbuffers::WIPOffset<SstStats<'a>> {
let o = self.fbb_.end_table(self.start_);
flatbuffers::WIPOffset::new(o.value())
}
}
impl core::fmt::Debug for SstStats<'_> {
fn fmt(&self, f: &mut core::fmt::Formatter<'_>) -> core::fmt::Result {
let mut ds = f.debug_struct("SstStats");
ds.field("num_puts", &self.num_puts());
ds.field("num_deletes", &self.num_deletes());
ds.field("num_merges", &self.num_merges());
ds.field("raw_key_size", &self.raw_key_size());
ds.field("raw_val_size", &self.raw_val_size());
ds.field("block_stats", &self.block_stats());
ds.finish()
}
}
pub enum BlockMetaOffset {}
#[derive(Copy, Clone, PartialEq)]
pub struct BlockMeta<'a> {
pub _tab: flatbuffers::Table<'a>,
}
impl<'a> flatbuffers::Follow<'a> for BlockMeta<'a> {
type Inner = BlockMeta<'a>;
#[inline]
unsafe fn follow(buf: &'a [u8], loc: usize) -> Self::Inner {
Self { _tab: flatbuffers::Table::new(buf, loc) }
}
}
impl<'a> BlockMeta<'a> {
pub const VT_OFFSET: flatbuffers::VOffsetT = 4;
pub const VT_FIRST_KEY: flatbuffers::VOffsetT = 6;
#[inline]
pub unsafe fn init_from_table(table: flatbuffers::Table<'a>) -> Self {
BlockMeta { _tab: table }
}
#[allow(unused_mut)]
pub fn create<'bldr: 'args, 'args: 'mut_bldr, 'mut_bldr, A: flatbuffers::Allocator + 'bldr>(
_fbb: &'mut_bldr mut flatbuffers::FlatBufferBuilder<'bldr, A>,
args: &'args BlockMetaArgs<'args>
) -> flatbuffers::WIPOffset<BlockMeta<'bldr>> {
let mut builder = BlockMetaBuilder::new(_fbb);
builder.add_offset(args.offset);
if let Some(x) = args.first_key { builder.add_first_key(x); }
builder.finish()
}
#[inline]
pub fn offset(&self) -> u64 {
unsafe { self._tab.get::<u64>(BlockMeta::VT_OFFSET, Some(0)).unwrap()}
}
#[inline]
pub fn first_key(&self) -> flatbuffers::Vector<'a, u8> {
unsafe { self._tab.get::<flatbuffers::ForwardsUOffset<flatbuffers::Vector<'a, u8>>>(BlockMeta::VT_FIRST_KEY, None).unwrap()}
}
}
impl flatbuffers::Verifiable for BlockMeta<'_> {
#[inline]
fn run_verifier(
v: &mut flatbuffers::Verifier, pos: usize
) -> Result<(), flatbuffers::InvalidFlatbuffer> {
use self::flatbuffers::Verifiable;
v.visit_table(pos)?
.visit_field::<u64>("offset", Self::VT_OFFSET, false)?
.visit_field::<flatbuffers::ForwardsUOffset<flatbuffers::Vector<'_, u8>>>("first_key", Self::VT_FIRST_KEY, true)?
.finish();
Ok(())
}
}
pub struct BlockMetaArgs<'a> {
pub offset: u64,
pub first_key: Option<flatbuffers::WIPOffset<flatbuffers::Vector<'a, u8>>>,
}
impl<'a> Default for BlockMetaArgs<'a> {
#[inline]
fn default() -> Self {
BlockMetaArgs {
offset: 0,
first_key: None, }
}
}
pub struct BlockMetaBuilder<'a: 'b, 'b, A: flatbuffers::Allocator + 'a> {
fbb_: &'b mut flatbuffers::FlatBufferBuilder<'a, A>,
start_: flatbuffers::WIPOffset<flatbuffers::TableUnfinishedWIPOffset>,
}
impl<'a: 'b, 'b, A: flatbuffers::Allocator + 'a> BlockMetaBuilder<'a, 'b, A> {
#[inline]
pub fn add_offset(&mut self, offset: u64) {
self.fbb_.push_slot::<u64>(BlockMeta::VT_OFFSET, offset, 0);
}
#[inline]
pub fn add_first_key(&mut self, first_key: flatbuffers::WIPOffset<flatbuffers::Vector<'b , u8>>) {
self.fbb_.push_slot_always::<flatbuffers::WIPOffset<_>>(BlockMeta::VT_FIRST_KEY, first_key);
}
#[inline]
pub fn new(_fbb: &'b mut flatbuffers::FlatBufferBuilder<'a, A>) -> BlockMetaBuilder<'a, 'b, A> {
let start = _fbb.start_table();
BlockMetaBuilder {
fbb_: _fbb,
start_: start,
}
}
#[inline]
pub fn finish(self) -> flatbuffers::WIPOffset<BlockMeta<'a>> {
let o = self.fbb_.end_table(self.start_);
self.fbb_.required(o, BlockMeta::VT_FIRST_KEY,"first_key");
flatbuffers::WIPOffset::new(o.value())
}
}
impl core::fmt::Debug for BlockMeta<'_> {
fn fmt(&self, f: &mut core::fmt::Formatter<'_>) -> core::fmt::Result {
let mut ds = f.debug_struct("BlockMeta");
ds.field("offset", &self.offset());
ds.field("first_key", &self.first_key());
ds.finish()
}
}
pub enum SsTableIndexOffset {}
#[derive(Copy, Clone, PartialEq)]
pub struct SsTableIndex<'a> {
pub _tab: flatbuffers::Table<'a>,
}
impl<'a> flatbuffers::Follow<'a> for SsTableIndex<'a> {
type Inner = SsTableIndex<'a>;
#[inline]
unsafe fn follow(buf: &'a [u8], loc: usize) -> Self::Inner {
Self { _tab: flatbuffers::Table::new(buf, loc) }
}
}
impl<'a> SsTableIndex<'a> {
pub const VT_BLOCK_META: flatbuffers::VOffsetT = 4;
#[inline]
pub unsafe fn init_from_table(table: flatbuffers::Table<'a>) -> Self {
SsTableIndex { _tab: table }
}
#[allow(unused_mut)]
pub fn create<'bldr: 'args, 'args: 'mut_bldr, 'mut_bldr, A: flatbuffers::Allocator + 'bldr>(
_fbb: &'mut_bldr mut flatbuffers::FlatBufferBuilder<'bldr, A>,
args: &'args SsTableIndexArgs<'args>
) -> flatbuffers::WIPOffset<SsTableIndex<'bldr>> {
let mut builder = SsTableIndexBuilder::new(_fbb);
if let Some(x) = args.block_meta { builder.add_block_meta(x); }
builder.finish()
}
#[inline]
pub fn block_meta(&self) -> flatbuffers::Vector<'a, flatbuffers::ForwardsUOffset<BlockMeta<'a>>> {
unsafe { self._tab.get::<flatbuffers::ForwardsUOffset<flatbuffers::Vector<'a, flatbuffers::ForwardsUOffset<BlockMeta>>>>(SsTableIndex::VT_BLOCK_META, None).unwrap()}
}
}
impl flatbuffers::Verifiable for SsTableIndex<'_> {
#[inline]
fn run_verifier(
v: &mut flatbuffers::Verifier, pos: usize
) -> Result<(), flatbuffers::InvalidFlatbuffer> {
use self::flatbuffers::Verifiable;
v.visit_table(pos)?
.visit_field::<flatbuffers::ForwardsUOffset<flatbuffers::Vector<'_, flatbuffers::ForwardsUOffset<BlockMeta>>>>("block_meta", Self::VT_BLOCK_META, true)?
.finish();
Ok(())
}
}
pub struct SsTableIndexArgs<'a> {
pub block_meta: Option<flatbuffers::WIPOffset<flatbuffers::Vector<'a, flatbuffers::ForwardsUOffset<BlockMeta<'a>>>>>,
}
impl<'a> Default for SsTableIndexArgs<'a> {
#[inline]
fn default() -> Self {
SsTableIndexArgs {
block_meta: None, }
}
}
pub struct SsTableIndexBuilder<'a: 'b, 'b, A: flatbuffers::Allocator + 'a> {
fbb_: &'b mut flatbuffers::FlatBufferBuilder<'a, A>,
start_: flatbuffers::WIPOffset<flatbuffers::TableUnfinishedWIPOffset>,
}
impl<'a: 'b, 'b, A: flatbuffers::Allocator + 'a> SsTableIndexBuilder<'a, 'b, A> {
#[inline]
pub fn add_block_meta(&mut self, block_meta: flatbuffers::WIPOffset<flatbuffers::Vector<'b , flatbuffers::ForwardsUOffset<BlockMeta<'b >>>>) {
self.fbb_.push_slot_always::<flatbuffers::WIPOffset<_>>(SsTableIndex::VT_BLOCK_META, block_meta);
}
#[inline]
pub fn new(_fbb: &'b mut flatbuffers::FlatBufferBuilder<'a, A>) -> SsTableIndexBuilder<'a, 'b, A> {
let start = _fbb.start_table();
SsTableIndexBuilder {
fbb_: _fbb,
start_: start,
}
}
#[inline]
pub fn finish(self) -> flatbuffers::WIPOffset<SsTableIndex<'a>> {
let o = self.fbb_.end_table(self.start_);
self.fbb_.required(o, SsTableIndex::VT_BLOCK_META,"block_meta");
flatbuffers::WIPOffset::new(o.value())
}
}
impl core::fmt::Debug for SsTableIndex<'_> {
fn fmt(&self, f: &mut core::fmt::Formatter<'_>) -> core::fmt::Result {
let mut ds = f.debug_struct("SsTableIndex");
ds.field("block_meta", &self.block_meta());
ds.finish()
}
}
pub enum CompactedSsTableOffset {}
#[derive(Copy, Clone, PartialEq)]
pub struct CompactedSsTable<'a> {
pub _tab: flatbuffers::Table<'a>,
}
impl<'a> flatbuffers::Follow<'a> for CompactedSsTable<'a> {
type Inner = CompactedSsTable<'a>;
#[inline]
unsafe fn follow(buf: &'a [u8], loc: usize) -> Self::Inner {
Self { _tab: flatbuffers::Table::new(buf, loc) }
}
}
impl<'a> CompactedSsTable<'a> {
pub const VT_ID: flatbuffers::VOffsetT = 4;
pub const VT_INFO: flatbuffers::VOffsetT = 6;
pub const VT_VISIBLE_RANGE: flatbuffers::VOffsetT = 8;
pub const VT_FORMAT_VERSION: flatbuffers::VOffsetT = 10;
#[inline]
pub unsafe fn init_from_table(table: flatbuffers::Table<'a>) -> Self {
CompactedSsTable { _tab: table }
}
#[allow(unused_mut)]
pub fn create<'bldr: 'args, 'args: 'mut_bldr, 'mut_bldr, A: flatbuffers::Allocator + 'bldr>(
_fbb: &'mut_bldr mut flatbuffers::FlatBufferBuilder<'bldr, A>,
args: &'args CompactedSsTableArgs<'args>
) -> flatbuffers::WIPOffset<CompactedSsTable<'bldr>> {
let mut builder = CompactedSsTableBuilder::new(_fbb);
if let Some(x) = args.visible_range { builder.add_visible_range(x); }
if let Some(x) = args.info { builder.add_info(x); }
if let Some(x) = args.id { builder.add_id(x); }
if let Some(x) = args.format_version { builder.add_format_version(x); }
builder.finish()
}
#[inline]
pub fn id(&self) -> Ulid<'a> {
unsafe { self._tab.get::<flatbuffers::ForwardsUOffset<Ulid>>(CompactedSsTable::VT_ID, None).unwrap()}
}
#[inline]
pub fn info(&self) -> SsTableInfo<'a> {
unsafe { self._tab.get::<flatbuffers::ForwardsUOffset<SsTableInfo>>(CompactedSsTable::VT_INFO, None).unwrap()}
}
#[inline]
pub fn visible_range(&self) -> Option<BytesRange<'a>> {
unsafe { self._tab.get::<flatbuffers::ForwardsUOffset<BytesRange>>(CompactedSsTable::VT_VISIBLE_RANGE, None)}
}
#[inline]
pub fn format_version(&self) -> Option<u16> {
unsafe { self._tab.get::<u16>(CompactedSsTable::VT_FORMAT_VERSION, None)}
}
}
impl flatbuffers::Verifiable for CompactedSsTable<'_> {
#[inline]
fn run_verifier(
v: &mut flatbuffers::Verifier, pos: usize
) -> Result<(), flatbuffers::InvalidFlatbuffer> {
use self::flatbuffers::Verifiable;
v.visit_table(pos)?
.visit_field::<flatbuffers::ForwardsUOffset<Ulid>>("id", Self::VT_ID, true)?
.visit_field::<flatbuffers::ForwardsUOffset<SsTableInfo>>("info", Self::VT_INFO, true)?
.visit_field::<flatbuffers::ForwardsUOffset<BytesRange>>("visible_range", Self::VT_VISIBLE_RANGE, false)?
.visit_field::<u16>("format_version", Self::VT_FORMAT_VERSION, false)?
.finish();
Ok(())
}
}
pub struct CompactedSsTableArgs<'a> {
pub id: Option<flatbuffers::WIPOffset<Ulid<'a>>>,
pub info: Option<flatbuffers::WIPOffset<SsTableInfo<'a>>>,
pub visible_range: Option<flatbuffers::WIPOffset<BytesRange<'a>>>,
pub format_version: Option<u16>,
}
impl<'a> Default for CompactedSsTableArgs<'a> {
#[inline]
fn default() -> Self {
CompactedSsTableArgs {
id: None, info: None, visible_range: None,
format_version: None,
}
}
}
pub struct CompactedSsTableBuilder<'a: 'b, 'b, A: flatbuffers::Allocator + 'a> {
fbb_: &'b mut flatbuffers::FlatBufferBuilder<'a, A>,
start_: flatbuffers::WIPOffset<flatbuffers::TableUnfinishedWIPOffset>,
}
impl<'a: 'b, 'b, A: flatbuffers::Allocator + 'a> CompactedSsTableBuilder<'a, 'b, A> {
#[inline]
pub fn add_id(&mut self, id: flatbuffers::WIPOffset<Ulid<'b >>) {
self.fbb_.push_slot_always::<flatbuffers::WIPOffset<Ulid>>(CompactedSsTable::VT_ID, id);
}
#[inline]
pub fn add_info(&mut self, info: flatbuffers::WIPOffset<SsTableInfo<'b >>) {
self.fbb_.push_slot_always::<flatbuffers::WIPOffset<SsTableInfo>>(CompactedSsTable::VT_INFO, info);
}
#[inline]
pub fn add_visible_range(&mut self, visible_range: flatbuffers::WIPOffset<BytesRange<'b >>) {
self.fbb_.push_slot_always::<flatbuffers::WIPOffset<BytesRange>>(CompactedSsTable::VT_VISIBLE_RANGE, visible_range);
}
#[inline]
pub fn add_format_version(&mut self, format_version: u16) {
self.fbb_.push_slot_always::<u16>(CompactedSsTable::VT_FORMAT_VERSION, format_version);
}
#[inline]
pub fn new(_fbb: &'b mut flatbuffers::FlatBufferBuilder<'a, A>) -> CompactedSsTableBuilder<'a, 'b, A> {
let start = _fbb.start_table();
CompactedSsTableBuilder {
fbb_: _fbb,
start_: start,
}
}
#[inline]
pub fn finish(self) -> flatbuffers::WIPOffset<CompactedSsTable<'a>> {
let o = self.fbb_.end_table(self.start_);
self.fbb_.required(o, CompactedSsTable::VT_ID,"id");
self.fbb_.required(o, CompactedSsTable::VT_INFO,"info");
flatbuffers::WIPOffset::new(o.value())
}
}
impl core::fmt::Debug for CompactedSsTable<'_> {
fn fmt(&self, f: &mut core::fmt::Formatter<'_>) -> core::fmt::Result {
let mut ds = f.debug_struct("CompactedSsTable");
ds.field("id", &self.id());
ds.field("info", &self.info());
ds.field("visible_range", &self.visible_range());
ds.field("format_version", &self.format_version());
ds.finish()
}
}
pub enum CompactedSsTableV2Offset {}
#[derive(Copy, Clone, PartialEq)]
pub struct CompactedSsTableV2<'a> {
pub _tab: flatbuffers::Table<'a>,
}
impl<'a> flatbuffers::Follow<'a> for CompactedSsTableV2<'a> {
type Inner = CompactedSsTableV2<'a>;
#[inline]
unsafe fn follow(buf: &'a [u8], loc: usize) -> Self::Inner {
Self { _tab: flatbuffers::Table::new(buf, loc) }
}
}
impl<'a> CompactedSsTableV2<'a> {
pub const VT_ID: flatbuffers::VOffsetT = 4;
pub const VT_INFO: flatbuffers::VOffsetT = 6;
pub const VT_FORMAT_VERSION: flatbuffers::VOffsetT = 8;
#[inline]
pub unsafe fn init_from_table(table: flatbuffers::Table<'a>) -> Self {
CompactedSsTableV2 { _tab: table }
}
#[allow(unused_mut)]
pub fn create<'bldr: 'args, 'args: 'mut_bldr, 'mut_bldr, A: flatbuffers::Allocator + 'bldr>(
_fbb: &'mut_bldr mut flatbuffers::FlatBufferBuilder<'bldr, A>,
args: &'args CompactedSsTableV2Args<'args>
) -> flatbuffers::WIPOffset<CompactedSsTableV2<'bldr>> {
let mut builder = CompactedSsTableV2Builder::new(_fbb);
if let Some(x) = args.info { builder.add_info(x); }
if let Some(x) = args.id { builder.add_id(x); }
if let Some(x) = args.format_version { builder.add_format_version(x); }
builder.finish()
}
#[inline]
pub fn id(&self) -> Ulid<'a> {
unsafe { self._tab.get::<flatbuffers::ForwardsUOffset<Ulid>>(CompactedSsTableV2::VT_ID, None).unwrap()}
}
#[inline]
pub fn info(&self) -> SsTableInfo<'a> {
unsafe { self._tab.get::<flatbuffers::ForwardsUOffset<SsTableInfo>>(CompactedSsTableV2::VT_INFO, None).unwrap()}
}
#[inline]
pub fn format_version(&self) -> Option<u16> {
unsafe { self._tab.get::<u16>(CompactedSsTableV2::VT_FORMAT_VERSION, None)}
}
}
impl flatbuffers::Verifiable for CompactedSsTableV2<'_> {
#[inline]
fn run_verifier(
v: &mut flatbuffers::Verifier, pos: usize
) -> Result<(), flatbuffers::InvalidFlatbuffer> {
use self::flatbuffers::Verifiable;
v.visit_table(pos)?
.visit_field::<flatbuffers::ForwardsUOffset<Ulid>>("id", Self::VT_ID, true)?
.visit_field::<flatbuffers::ForwardsUOffset<SsTableInfo>>("info", Self::VT_INFO, true)?
.visit_field::<u16>("format_version", Self::VT_FORMAT_VERSION, false)?
.finish();
Ok(())
}
}
pub struct CompactedSsTableV2Args<'a> {
pub id: Option<flatbuffers::WIPOffset<Ulid<'a>>>,
pub info: Option<flatbuffers::WIPOffset<SsTableInfo<'a>>>,
pub format_version: Option<u16>,
}
impl<'a> Default for CompactedSsTableV2Args<'a> {
#[inline]
fn default() -> Self {
CompactedSsTableV2Args {
id: None, info: None, format_version: None,
}
}
}
pub struct CompactedSsTableV2Builder<'a: 'b, 'b, A: flatbuffers::Allocator + 'a> {
fbb_: &'b mut flatbuffers::FlatBufferBuilder<'a, A>,
start_: flatbuffers::WIPOffset<flatbuffers::TableUnfinishedWIPOffset>,
}
impl<'a: 'b, 'b, A: flatbuffers::Allocator + 'a> CompactedSsTableV2Builder<'a, 'b, A> {
#[inline]
pub fn add_id(&mut self, id: flatbuffers::WIPOffset<Ulid<'b >>) {
self.fbb_.push_slot_always::<flatbuffers::WIPOffset<Ulid>>(CompactedSsTableV2::VT_ID, id);
}
#[inline]
pub fn add_info(&mut self, info: flatbuffers::WIPOffset<SsTableInfo<'b >>) {
self.fbb_.push_slot_always::<flatbuffers::WIPOffset<SsTableInfo>>(CompactedSsTableV2::VT_INFO, info);
}
#[inline]
pub fn add_format_version(&mut self, format_version: u16) {
self.fbb_.push_slot_always::<u16>(CompactedSsTableV2::VT_FORMAT_VERSION, format_version);
}
#[inline]
pub fn new(_fbb: &'b mut flatbuffers::FlatBufferBuilder<'a, A>) -> CompactedSsTableV2Builder<'a, 'b, A> {
let start = _fbb.start_table();
CompactedSsTableV2Builder {
fbb_: _fbb,
start_: start,
}
}
#[inline]
pub fn finish(self) -> flatbuffers::WIPOffset<CompactedSsTableV2<'a>> {
let o = self.fbb_.end_table(self.start_);
self.fbb_.required(o, CompactedSsTableV2::VT_ID,"id");
self.fbb_.required(o, CompactedSsTableV2::VT_INFO,"info");
flatbuffers::WIPOffset::new(o.value())
}
}
impl core::fmt::Debug for CompactedSsTableV2<'_> {
fn fmt(&self, f: &mut core::fmt::Formatter<'_>) -> core::fmt::Result {
let mut ds = f.debug_struct("CompactedSsTableV2");
ds.field("id", &self.id());
ds.field("info", &self.info());
ds.field("format_version", &self.format_version());
ds.finish()
}
}
pub enum CompactedSsTableViewOffset {}
#[derive(Copy, Clone, PartialEq)]
pub struct CompactedSsTableView<'a> {
pub _tab: flatbuffers::Table<'a>,
}
impl<'a> flatbuffers::Follow<'a> for CompactedSsTableView<'a> {
type Inner = CompactedSsTableView<'a>;
#[inline]
unsafe fn follow(buf: &'a [u8], loc: usize) -> Self::Inner {
Self { _tab: flatbuffers::Table::new(buf, loc) }
}
}
impl<'a> CompactedSsTableView<'a> {
pub const VT_ID: flatbuffers::VOffsetT = 4;
pub const VT_SST_ID: flatbuffers::VOffsetT = 6;
pub const VT_VISIBLE_RANGE: flatbuffers::VOffsetT = 8;
#[inline]
pub unsafe fn init_from_table(table: flatbuffers::Table<'a>) -> Self {
CompactedSsTableView { _tab: table }
}
#[allow(unused_mut)]
pub fn create<'bldr: 'args, 'args: 'mut_bldr, 'mut_bldr, A: flatbuffers::Allocator + 'bldr>(
_fbb: &'mut_bldr mut flatbuffers::FlatBufferBuilder<'bldr, A>,
args: &'args CompactedSsTableViewArgs<'args>
) -> flatbuffers::WIPOffset<CompactedSsTableView<'bldr>> {
let mut builder = CompactedSsTableViewBuilder::new(_fbb);
if let Some(x) = args.visible_range { builder.add_visible_range(x); }
if let Some(x) = args.sst_id { builder.add_sst_id(x); }
if let Some(x) = args.id { builder.add_id(x); }
builder.finish()
}
#[inline]
pub fn id(&self) -> Ulid<'a> {
unsafe { self._tab.get::<flatbuffers::ForwardsUOffset<Ulid>>(CompactedSsTableView::VT_ID, None).unwrap()}
}
#[inline]
pub fn sst_id(&self) -> Ulid<'a> {
unsafe { self._tab.get::<flatbuffers::ForwardsUOffset<Ulid>>(CompactedSsTableView::VT_SST_ID, None).unwrap()}
}
#[inline]
pub fn visible_range(&self) -> Option<BytesRange<'a>> {
unsafe { self._tab.get::<flatbuffers::ForwardsUOffset<BytesRange>>(CompactedSsTableView::VT_VISIBLE_RANGE, None)}
}
}
impl flatbuffers::Verifiable for CompactedSsTableView<'_> {
#[inline]
fn run_verifier(
v: &mut flatbuffers::Verifier, pos: usize
) -> Result<(), flatbuffers::InvalidFlatbuffer> {
use self::flatbuffers::Verifiable;
v.visit_table(pos)?
.visit_field::<flatbuffers::ForwardsUOffset<Ulid>>("id", Self::VT_ID, true)?
.visit_field::<flatbuffers::ForwardsUOffset<Ulid>>("sst_id", Self::VT_SST_ID, true)?
.visit_field::<flatbuffers::ForwardsUOffset<BytesRange>>("visible_range", Self::VT_VISIBLE_RANGE, false)?
.finish();
Ok(())
}
}
pub struct CompactedSsTableViewArgs<'a> {
pub id: Option<flatbuffers::WIPOffset<Ulid<'a>>>,
pub sst_id: Option<flatbuffers::WIPOffset<Ulid<'a>>>,
pub visible_range: Option<flatbuffers::WIPOffset<BytesRange<'a>>>,
}
impl<'a> Default for CompactedSsTableViewArgs<'a> {
#[inline]
fn default() -> Self {
CompactedSsTableViewArgs {
id: None, sst_id: None, visible_range: None,
}
}
}
pub struct CompactedSsTableViewBuilder<'a: 'b, 'b, A: flatbuffers::Allocator + 'a> {
fbb_: &'b mut flatbuffers::FlatBufferBuilder<'a, A>,
start_: flatbuffers::WIPOffset<flatbuffers::TableUnfinishedWIPOffset>,
}
impl<'a: 'b, 'b, A: flatbuffers::Allocator + 'a> CompactedSsTableViewBuilder<'a, 'b, A> {
#[inline]
pub fn add_id(&mut self, id: flatbuffers::WIPOffset<Ulid<'b >>) {
self.fbb_.push_slot_always::<flatbuffers::WIPOffset<Ulid>>(CompactedSsTableView::VT_ID, id);
}
#[inline]
pub fn add_sst_id(&mut self, sst_id: flatbuffers::WIPOffset<Ulid<'b >>) {
self.fbb_.push_slot_always::<flatbuffers::WIPOffset<Ulid>>(CompactedSsTableView::VT_SST_ID, sst_id);
}
#[inline]
pub fn add_visible_range(&mut self, visible_range: flatbuffers::WIPOffset<BytesRange<'b >>) {
self.fbb_.push_slot_always::<flatbuffers::WIPOffset<BytesRange>>(CompactedSsTableView::VT_VISIBLE_RANGE, visible_range);
}
#[inline]
pub fn new(_fbb: &'b mut flatbuffers::FlatBufferBuilder<'a, A>) -> CompactedSsTableViewBuilder<'a, 'b, A> {
let start = _fbb.start_table();
CompactedSsTableViewBuilder {
fbb_: _fbb,
start_: start,
}
}
#[inline]
pub fn finish(self) -> flatbuffers::WIPOffset<CompactedSsTableView<'a>> {
let o = self.fbb_.end_table(self.start_);
self.fbb_.required(o, CompactedSsTableView::VT_ID,"id");
self.fbb_.required(o, CompactedSsTableView::VT_SST_ID,"sst_id");
flatbuffers::WIPOffset::new(o.value())
}
}
impl core::fmt::Debug for CompactedSsTableView<'_> {
fn fmt(&self, f: &mut core::fmt::Formatter<'_>) -> core::fmt::Result {
let mut ds = f.debug_struct("CompactedSsTableView");
ds.field("id", &self.id());
ds.field("sst_id", &self.sst_id());
ds.field("visible_range", &self.visible_range());
ds.finish()
}
}
pub enum SortedRunOffset {}
#[derive(Copy, Clone, PartialEq)]
pub struct SortedRun<'a> {
pub _tab: flatbuffers::Table<'a>,
}
impl<'a> flatbuffers::Follow<'a> for SortedRun<'a> {
type Inner = SortedRun<'a>;
#[inline]
unsafe fn follow(buf: &'a [u8], loc: usize) -> Self::Inner {
Self { _tab: flatbuffers::Table::new(buf, loc) }
}
}
impl<'a> SortedRun<'a> {
pub const VT_ID: flatbuffers::VOffsetT = 4;
pub const VT_SSTS: flatbuffers::VOffsetT = 6;
#[inline]
pub unsafe fn init_from_table(table: flatbuffers::Table<'a>) -> Self {
SortedRun { _tab: table }
}
#[allow(unused_mut)]
pub fn create<'bldr: 'args, 'args: 'mut_bldr, 'mut_bldr, A: flatbuffers::Allocator + 'bldr>(
_fbb: &'mut_bldr mut flatbuffers::FlatBufferBuilder<'bldr, A>,
args: &'args SortedRunArgs<'args>
) -> flatbuffers::WIPOffset<SortedRun<'bldr>> {
let mut builder = SortedRunBuilder::new(_fbb);
if let Some(x) = args.ssts { builder.add_ssts(x); }
builder.add_id(args.id);
builder.finish()
}
#[inline]
pub fn id(&self) -> u32 {
unsafe { self._tab.get::<u32>(SortedRun::VT_ID, Some(0)).unwrap()}
}
#[inline]
pub fn ssts(&self) -> flatbuffers::Vector<'a, flatbuffers::ForwardsUOffset<CompactedSsTable<'a>>> {
unsafe { self._tab.get::<flatbuffers::ForwardsUOffset<flatbuffers::Vector<'a, flatbuffers::ForwardsUOffset<CompactedSsTable>>>>(SortedRun::VT_SSTS, None).unwrap()}
}
}
impl flatbuffers::Verifiable for SortedRun<'_> {
#[inline]
fn run_verifier(
v: &mut flatbuffers::Verifier, pos: usize
) -> Result<(), flatbuffers::InvalidFlatbuffer> {
use self::flatbuffers::Verifiable;
v.visit_table(pos)?
.visit_field::<u32>("id", Self::VT_ID, false)?
.visit_field::<flatbuffers::ForwardsUOffset<flatbuffers::Vector<'_, flatbuffers::ForwardsUOffset<CompactedSsTable>>>>("ssts", Self::VT_SSTS, true)?
.finish();
Ok(())
}
}
pub struct SortedRunArgs<'a> {
pub id: u32,
pub ssts: Option<flatbuffers::WIPOffset<flatbuffers::Vector<'a, flatbuffers::ForwardsUOffset<CompactedSsTable<'a>>>>>,
}
impl<'a> Default for SortedRunArgs<'a> {
#[inline]
fn default() -> Self {
SortedRunArgs {
id: 0,
ssts: None, }
}
}
pub struct SortedRunBuilder<'a: 'b, 'b, A: flatbuffers::Allocator + 'a> {
fbb_: &'b mut flatbuffers::FlatBufferBuilder<'a, A>,
start_: flatbuffers::WIPOffset<flatbuffers::TableUnfinishedWIPOffset>,
}
impl<'a: 'b, 'b, A: flatbuffers::Allocator + 'a> SortedRunBuilder<'a, 'b, A> {
#[inline]
pub fn add_id(&mut self, id: u32) {
self.fbb_.push_slot::<u32>(SortedRun::VT_ID, id, 0);
}
#[inline]
pub fn add_ssts(&mut self, ssts: flatbuffers::WIPOffset<flatbuffers::Vector<'b , flatbuffers::ForwardsUOffset<CompactedSsTable<'b >>>>) {
self.fbb_.push_slot_always::<flatbuffers::WIPOffset<_>>(SortedRun::VT_SSTS, ssts);
}
#[inline]
pub fn new(_fbb: &'b mut flatbuffers::FlatBufferBuilder<'a, A>) -> SortedRunBuilder<'a, 'b, A> {
let start = _fbb.start_table();
SortedRunBuilder {
fbb_: _fbb,
start_: start,
}
}
#[inline]
pub fn finish(self) -> flatbuffers::WIPOffset<SortedRun<'a>> {
let o = self.fbb_.end_table(self.start_);
self.fbb_.required(o, SortedRun::VT_SSTS,"ssts");
flatbuffers::WIPOffset::new(o.value())
}
}
impl core::fmt::Debug for SortedRun<'_> {
fn fmt(&self, f: &mut core::fmt::Formatter<'_>) -> core::fmt::Result {
let mut ds = f.debug_struct("SortedRun");
ds.field("id", &self.id());
ds.field("ssts", &self.ssts());
ds.finish()
}
}
pub enum SortedRunV2Offset {}
#[derive(Copy, Clone, PartialEq)]
pub struct SortedRunV2<'a> {
pub _tab: flatbuffers::Table<'a>,
}
impl<'a> flatbuffers::Follow<'a> for SortedRunV2<'a> {
type Inner = SortedRunV2<'a>;
#[inline]
unsafe fn follow(buf: &'a [u8], loc: usize) -> Self::Inner {
Self { _tab: flatbuffers::Table::new(buf, loc) }
}
}
impl<'a> SortedRunV2<'a> {
pub const VT_ID: flatbuffers::VOffsetT = 4;
pub const VT_SSTS: flatbuffers::VOffsetT = 6;
#[inline]
pub unsafe fn init_from_table(table: flatbuffers::Table<'a>) -> Self {
SortedRunV2 { _tab: table }
}
#[allow(unused_mut)]
pub fn create<'bldr: 'args, 'args: 'mut_bldr, 'mut_bldr, A: flatbuffers::Allocator + 'bldr>(
_fbb: &'mut_bldr mut flatbuffers::FlatBufferBuilder<'bldr, A>,
args: &'args SortedRunV2Args<'args>
) -> flatbuffers::WIPOffset<SortedRunV2<'bldr>> {
let mut builder = SortedRunV2Builder::new(_fbb);
if let Some(x) = args.ssts { builder.add_ssts(x); }
builder.add_id(args.id);
builder.finish()
}
#[inline]
pub fn id(&self) -> u32 {
unsafe { self._tab.get::<u32>(SortedRunV2::VT_ID, Some(0)).unwrap()}
}
#[inline]
pub fn ssts(&self) -> flatbuffers::Vector<'a, flatbuffers::ForwardsUOffset<CompactedSsTableView<'a>>> {
unsafe { self._tab.get::<flatbuffers::ForwardsUOffset<flatbuffers::Vector<'a, flatbuffers::ForwardsUOffset<CompactedSsTableView>>>>(SortedRunV2::VT_SSTS, None).unwrap()}
}
}
impl flatbuffers::Verifiable for SortedRunV2<'_> {
#[inline]
fn run_verifier(
v: &mut flatbuffers::Verifier, pos: usize
) -> Result<(), flatbuffers::InvalidFlatbuffer> {
use self::flatbuffers::Verifiable;
v.visit_table(pos)?
.visit_field::<u32>("id", Self::VT_ID, false)?
.visit_field::<flatbuffers::ForwardsUOffset<flatbuffers::Vector<'_, flatbuffers::ForwardsUOffset<CompactedSsTableView>>>>("ssts", Self::VT_SSTS, true)?
.finish();
Ok(())
}
}
pub struct SortedRunV2Args<'a> {
pub id: u32,
pub ssts: Option<flatbuffers::WIPOffset<flatbuffers::Vector<'a, flatbuffers::ForwardsUOffset<CompactedSsTableView<'a>>>>>,
}
impl<'a> Default for SortedRunV2Args<'a> {
#[inline]
fn default() -> Self {
SortedRunV2Args {
id: 0,
ssts: None, }
}
}
pub struct SortedRunV2Builder<'a: 'b, 'b, A: flatbuffers::Allocator + 'a> {
fbb_: &'b mut flatbuffers::FlatBufferBuilder<'a, A>,
start_: flatbuffers::WIPOffset<flatbuffers::TableUnfinishedWIPOffset>,
}
impl<'a: 'b, 'b, A: flatbuffers::Allocator + 'a> SortedRunV2Builder<'a, 'b, A> {
#[inline]
pub fn add_id(&mut self, id: u32) {
self.fbb_.push_slot::<u32>(SortedRunV2::VT_ID, id, 0);
}
#[inline]
pub fn add_ssts(&mut self, ssts: flatbuffers::WIPOffset<flatbuffers::Vector<'b , flatbuffers::ForwardsUOffset<CompactedSsTableView<'b >>>>) {
self.fbb_.push_slot_always::<flatbuffers::WIPOffset<_>>(SortedRunV2::VT_SSTS, ssts);
}
#[inline]
pub fn new(_fbb: &'b mut flatbuffers::FlatBufferBuilder<'a, A>) -> SortedRunV2Builder<'a, 'b, A> {
let start = _fbb.start_table();
SortedRunV2Builder {
fbb_: _fbb,
start_: start,
}
}
#[inline]
pub fn finish(self) -> flatbuffers::WIPOffset<SortedRunV2<'a>> {
let o = self.fbb_.end_table(self.start_);
self.fbb_.required(o, SortedRunV2::VT_SSTS,"ssts");
flatbuffers::WIPOffset::new(o.value())
}
}
impl core::fmt::Debug for SortedRunV2<'_> {
fn fmt(&self, f: &mut core::fmt::Formatter<'_>) -> core::fmt::Result {
let mut ds = f.debug_struct("SortedRunV2");
ds.field("id", &self.id());
ds.field("ssts", &self.ssts());
ds.finish()
}
}
pub enum TieredCompactionSpecOffset {}
#[derive(Copy, Clone, PartialEq)]
pub struct TieredCompactionSpec<'a> {
pub _tab: flatbuffers::Table<'a>,
}
impl<'a> flatbuffers::Follow<'a> for TieredCompactionSpec<'a> {
type Inner = TieredCompactionSpec<'a>;
#[inline]
unsafe fn follow(buf: &'a [u8], loc: usize) -> Self::Inner {
Self { _tab: flatbuffers::Table::new(buf, loc) }
}
}
impl<'a> TieredCompactionSpec<'a> {
pub const VT_SSTS: flatbuffers::VOffsetT = 4;
pub const VT_SORTED_RUNS: flatbuffers::VOffsetT = 6;
pub const VT_L0_VIEW_IDS: flatbuffers::VOffsetT = 8;
#[inline]
pub unsafe fn init_from_table(table: flatbuffers::Table<'a>) -> Self {
TieredCompactionSpec { _tab: table }
}
#[allow(unused_mut)]
pub fn create<'bldr: 'args, 'args: 'mut_bldr, 'mut_bldr, A: flatbuffers::Allocator + 'bldr>(
_fbb: &'mut_bldr mut flatbuffers::FlatBufferBuilder<'bldr, A>,
args: &'args TieredCompactionSpecArgs<'args>
) -> flatbuffers::WIPOffset<TieredCompactionSpec<'bldr>> {
let mut builder = TieredCompactionSpecBuilder::new(_fbb);
if let Some(x) = args.l0_view_ids { builder.add_l0_view_ids(x); }
if let Some(x) = args.sorted_runs { builder.add_sorted_runs(x); }
if let Some(x) = args.ssts { builder.add_ssts(x); }
builder.finish()
}
#[inline]
pub fn ssts(&self) -> Option<flatbuffers::Vector<'a, flatbuffers::ForwardsUOffset<Ulid<'a>>>> {
unsafe { self._tab.get::<flatbuffers::ForwardsUOffset<flatbuffers::Vector<'a, flatbuffers::ForwardsUOffset<Ulid>>>>(TieredCompactionSpec::VT_SSTS, None)}
}
#[inline]
pub fn sorted_runs(&self) -> Option<flatbuffers::Vector<'a, u32>> {
unsafe { self._tab.get::<flatbuffers::ForwardsUOffset<flatbuffers::Vector<'a, u32>>>(TieredCompactionSpec::VT_SORTED_RUNS, None)}
}
#[inline]
pub fn l0_view_ids(&self) -> Option<flatbuffers::Vector<'a, flatbuffers::ForwardsUOffset<Ulid<'a>>>> {
unsafe { self._tab.get::<flatbuffers::ForwardsUOffset<flatbuffers::Vector<'a, flatbuffers::ForwardsUOffset<Ulid>>>>(TieredCompactionSpec::VT_L0_VIEW_IDS, None)}
}
}
impl flatbuffers::Verifiable for TieredCompactionSpec<'_> {
#[inline]
fn run_verifier(
v: &mut flatbuffers::Verifier, pos: usize
) -> Result<(), flatbuffers::InvalidFlatbuffer> {
use self::flatbuffers::Verifiable;
v.visit_table(pos)?
.visit_field::<flatbuffers::ForwardsUOffset<flatbuffers::Vector<'_, flatbuffers::ForwardsUOffset<Ulid>>>>("ssts", Self::VT_SSTS, false)?
.visit_field::<flatbuffers::ForwardsUOffset<flatbuffers::Vector<'_, u32>>>("sorted_runs", Self::VT_SORTED_RUNS, false)?
.visit_field::<flatbuffers::ForwardsUOffset<flatbuffers::Vector<'_, flatbuffers::ForwardsUOffset<Ulid>>>>("l0_view_ids", Self::VT_L0_VIEW_IDS, false)?
.finish();
Ok(())
}
}
pub struct TieredCompactionSpecArgs<'a> {
pub ssts: Option<flatbuffers::WIPOffset<flatbuffers::Vector<'a, flatbuffers::ForwardsUOffset<Ulid<'a>>>>>,
pub sorted_runs: Option<flatbuffers::WIPOffset<flatbuffers::Vector<'a, u32>>>,
pub l0_view_ids: Option<flatbuffers::WIPOffset<flatbuffers::Vector<'a, flatbuffers::ForwardsUOffset<Ulid<'a>>>>>,
}
impl<'a> Default for TieredCompactionSpecArgs<'a> {
#[inline]
fn default() -> Self {
TieredCompactionSpecArgs {
ssts: None,
sorted_runs: None,
l0_view_ids: None,
}
}
}
pub struct TieredCompactionSpecBuilder<'a: 'b, 'b, A: flatbuffers::Allocator + 'a> {
fbb_: &'b mut flatbuffers::FlatBufferBuilder<'a, A>,
start_: flatbuffers::WIPOffset<flatbuffers::TableUnfinishedWIPOffset>,
}
impl<'a: 'b, 'b, A: flatbuffers::Allocator + 'a> TieredCompactionSpecBuilder<'a, 'b, A> {
#[inline]
pub fn add_ssts(&mut self, ssts: flatbuffers::WIPOffset<flatbuffers::Vector<'b , flatbuffers::ForwardsUOffset<Ulid<'b >>>>) {
self.fbb_.push_slot_always::<flatbuffers::WIPOffset<_>>(TieredCompactionSpec::VT_SSTS, ssts);
}
#[inline]
pub fn add_sorted_runs(&mut self, sorted_runs: flatbuffers::WIPOffset<flatbuffers::Vector<'b , u32>>) {
self.fbb_.push_slot_always::<flatbuffers::WIPOffset<_>>(TieredCompactionSpec::VT_SORTED_RUNS, sorted_runs);
}
#[inline]
pub fn add_l0_view_ids(&mut self, l0_view_ids: flatbuffers::WIPOffset<flatbuffers::Vector<'b , flatbuffers::ForwardsUOffset<Ulid<'b >>>>) {
self.fbb_.push_slot_always::<flatbuffers::WIPOffset<_>>(TieredCompactionSpec::VT_L0_VIEW_IDS, l0_view_ids);
}
#[inline]
pub fn new(_fbb: &'b mut flatbuffers::FlatBufferBuilder<'a, A>) -> TieredCompactionSpecBuilder<'a, 'b, A> {
let start = _fbb.start_table();
TieredCompactionSpecBuilder {
fbb_: _fbb,
start_: start,
}
}
#[inline]
pub fn finish(self) -> flatbuffers::WIPOffset<TieredCompactionSpec<'a>> {
let o = self.fbb_.end_table(self.start_);
flatbuffers::WIPOffset::new(o.value())
}
}
impl core::fmt::Debug for TieredCompactionSpec<'_> {
fn fmt(&self, f: &mut core::fmt::Formatter<'_>) -> core::fmt::Result {
let mut ds = f.debug_struct("TieredCompactionSpec");
ds.field("ssts", &self.ssts());
ds.field("sorted_runs", &self.sorted_runs());
ds.field("l0_view_ids", &self.l0_view_ids());
ds.finish()
}
}
pub enum CompactionOffset {}
#[derive(Copy, Clone, PartialEq)]
pub struct Compaction<'a> {
pub _tab: flatbuffers::Table<'a>,
}
impl<'a> flatbuffers::Follow<'a> for Compaction<'a> {
type Inner = Compaction<'a>;
#[inline]
unsafe fn follow(buf: &'a [u8], loc: usize) -> Self::Inner {
Self { _tab: flatbuffers::Table::new(buf, loc) }
}
}
impl<'a> Compaction<'a> {
pub const VT_ID: flatbuffers::VOffsetT = 4;
pub const VT_SPEC_TYPE: flatbuffers::VOffsetT = 6;
pub const VT_SPEC: flatbuffers::VOffsetT = 8;
pub const VT_STATUS: flatbuffers::VOffsetT = 10;
pub const VT_OUTPUT_SSTS: flatbuffers::VOffsetT = 12;
#[inline]
pub unsafe fn init_from_table(table: flatbuffers::Table<'a>) -> Self {
Compaction { _tab: table }
}
#[allow(unused_mut)]
pub fn create<'bldr: 'args, 'args: 'mut_bldr, 'mut_bldr, A: flatbuffers::Allocator + 'bldr>(
_fbb: &'mut_bldr mut flatbuffers::FlatBufferBuilder<'bldr, A>,
args: &'args CompactionArgs<'args>
) -> flatbuffers::WIPOffset<Compaction<'bldr>> {
let mut builder = CompactionBuilder::new(_fbb);
if let Some(x) = args.output_ssts { builder.add_output_ssts(x); }
if let Some(x) = args.spec { builder.add_spec(x); }
if let Some(x) = args.id { builder.add_id(x); }
builder.add_status(args.status);
builder.add_spec_type(args.spec_type);
builder.finish()
}
#[inline]
pub fn id(&self) -> Ulid<'a> {
unsafe { self._tab.get::<flatbuffers::ForwardsUOffset<Ulid>>(Compaction::VT_ID, None).unwrap()}
}
#[inline]
pub fn spec_type(&self) -> CompactionSpec {
unsafe { self._tab.get::<CompactionSpec>(Compaction::VT_SPEC_TYPE, Some(CompactionSpec::NONE)).unwrap()}
}
#[inline]
pub fn spec(&self) -> flatbuffers::Table<'a> {
unsafe { self._tab.get::<flatbuffers::ForwardsUOffset<flatbuffers::Table<'a>>>(Compaction::VT_SPEC, None).unwrap()}
}
#[inline]
pub fn status(&self) -> CompactionStatus {
unsafe { self._tab.get::<CompactionStatus>(Compaction::VT_STATUS, Some(CompactionStatus::Submitted)).unwrap()}
}
#[inline]
pub fn output_ssts(&self) -> Option<flatbuffers::Vector<'a, flatbuffers::ForwardsUOffset<CompactedSsTable<'a>>>> {
unsafe { self._tab.get::<flatbuffers::ForwardsUOffset<flatbuffers::Vector<'a, flatbuffers::ForwardsUOffset<CompactedSsTable>>>>(Compaction::VT_OUTPUT_SSTS, None)}
}
#[inline]
#[allow(non_snake_case)]
pub fn spec_as_tiered_compaction_spec(&self) -> Option<TieredCompactionSpec<'a>> {
if self.spec_type() == CompactionSpec::TieredCompactionSpec {
let u = self.spec();
Some(unsafe { TieredCompactionSpec::init_from_table(u) })
} else {
None
}
}
}
impl flatbuffers::Verifiable for Compaction<'_> {
#[inline]
fn run_verifier(
v: &mut flatbuffers::Verifier, pos: usize
) -> Result<(), flatbuffers::InvalidFlatbuffer> {
use self::flatbuffers::Verifiable;
v.visit_table(pos)?
.visit_field::<flatbuffers::ForwardsUOffset<Ulid>>("id", Self::VT_ID, true)?
.visit_union::<CompactionSpec, _>("spec_type", Self::VT_SPEC_TYPE, "spec", Self::VT_SPEC, true, |key, v, pos| {
match key {
CompactionSpec::TieredCompactionSpec => v.verify_union_variant::<flatbuffers::ForwardsUOffset<TieredCompactionSpec>>("CompactionSpec::TieredCompactionSpec", pos),
_ => Ok(()),
}
})?
.visit_field::<CompactionStatus>("status", Self::VT_STATUS, false)?
.visit_field::<flatbuffers::ForwardsUOffset<flatbuffers::Vector<'_, flatbuffers::ForwardsUOffset<CompactedSsTable>>>>("output_ssts", Self::VT_OUTPUT_SSTS, false)?
.finish();
Ok(())
}
}
pub struct CompactionArgs<'a> {
pub id: Option<flatbuffers::WIPOffset<Ulid<'a>>>,
pub spec_type: CompactionSpec,
pub spec: Option<flatbuffers::WIPOffset<flatbuffers::UnionWIPOffset>>,
pub status: CompactionStatus,
pub output_ssts: Option<flatbuffers::WIPOffset<flatbuffers::Vector<'a, flatbuffers::ForwardsUOffset<CompactedSsTable<'a>>>>>,
}
impl<'a> Default for CompactionArgs<'a> {
#[inline]
fn default() -> Self {
CompactionArgs {
id: None, spec_type: CompactionSpec::NONE,
spec: None, status: CompactionStatus::Submitted,
output_ssts: None,
}
}
}
pub struct CompactionBuilder<'a: 'b, 'b, A: flatbuffers::Allocator + 'a> {
fbb_: &'b mut flatbuffers::FlatBufferBuilder<'a, A>,
start_: flatbuffers::WIPOffset<flatbuffers::TableUnfinishedWIPOffset>,
}
impl<'a: 'b, 'b, A: flatbuffers::Allocator + 'a> CompactionBuilder<'a, 'b, A> {
#[inline]
pub fn add_id(&mut self, id: flatbuffers::WIPOffset<Ulid<'b >>) {
self.fbb_.push_slot_always::<flatbuffers::WIPOffset<Ulid>>(Compaction::VT_ID, id);
}
#[inline]
pub fn add_spec_type(&mut self, spec_type: CompactionSpec) {
self.fbb_.push_slot::<CompactionSpec>(Compaction::VT_SPEC_TYPE, spec_type, CompactionSpec::NONE);
}
#[inline]
pub fn add_spec(&mut self, spec: flatbuffers::WIPOffset<flatbuffers::UnionWIPOffset>) {
self.fbb_.push_slot_always::<flatbuffers::WIPOffset<_>>(Compaction::VT_SPEC, spec);
}
#[inline]
pub fn add_status(&mut self, status: CompactionStatus) {
self.fbb_.push_slot::<CompactionStatus>(Compaction::VT_STATUS, status, CompactionStatus::Submitted);
}
#[inline]
pub fn add_output_ssts(&mut self, output_ssts: flatbuffers::WIPOffset<flatbuffers::Vector<'b , flatbuffers::ForwardsUOffset<CompactedSsTable<'b >>>>) {
self.fbb_.push_slot_always::<flatbuffers::WIPOffset<_>>(Compaction::VT_OUTPUT_SSTS, output_ssts);
}
#[inline]
pub fn new(_fbb: &'b mut flatbuffers::FlatBufferBuilder<'a, A>) -> CompactionBuilder<'a, 'b, A> {
let start = _fbb.start_table();
CompactionBuilder {
fbb_: _fbb,
start_: start,
}
}
#[inline]
pub fn finish(self) -> flatbuffers::WIPOffset<Compaction<'a>> {
let o = self.fbb_.end_table(self.start_);
self.fbb_.required(o, Compaction::VT_ID,"id");
self.fbb_.required(o, Compaction::VT_SPEC,"spec");
flatbuffers::WIPOffset::new(o.value())
}
}
impl core::fmt::Debug for Compaction<'_> {
fn fmt(&self, f: &mut core::fmt::Formatter<'_>) -> core::fmt::Result {
let mut ds = f.debug_struct("Compaction");
ds.field("id", &self.id());
ds.field("spec_type", &self.spec_type());
match self.spec_type() {
CompactionSpec::TieredCompactionSpec => {
if let Some(x) = self.spec_as_tiered_compaction_spec() {
ds.field("spec", &x)
} else {
ds.field("spec", &"InvalidFlatbuffer: Union discriminant does not match value.")
}
},
_ => {
let x: Option<()> = None;
ds.field("spec", &x)
},
};
ds.field("status", &self.status());
ds.field("output_ssts", &self.output_ssts());
ds.finish()
}
}
pub enum CompactionsV1Offset {}
#[derive(Copy, Clone, PartialEq)]
pub struct CompactionsV1<'a> {
pub _tab: flatbuffers::Table<'a>,
}
impl<'a> flatbuffers::Follow<'a> for CompactionsV1<'a> {
type Inner = CompactionsV1<'a>;
#[inline]
unsafe fn follow(buf: &'a [u8], loc: usize) -> Self::Inner {
Self { _tab: flatbuffers::Table::new(buf, loc) }
}
}
impl<'a> CompactionsV1<'a> {
pub const VT_COMPACTOR_EPOCH: flatbuffers::VOffsetT = 4;
pub const VT_RECENT_COMPACTIONS: flatbuffers::VOffsetT = 6;
#[inline]
pub unsafe fn init_from_table(table: flatbuffers::Table<'a>) -> Self {
CompactionsV1 { _tab: table }
}
#[allow(unused_mut)]
pub fn create<'bldr: 'args, 'args: 'mut_bldr, 'mut_bldr, A: flatbuffers::Allocator + 'bldr>(
_fbb: &'mut_bldr mut flatbuffers::FlatBufferBuilder<'bldr, A>,
args: &'args CompactionsV1Args<'args>
) -> flatbuffers::WIPOffset<CompactionsV1<'bldr>> {
let mut builder = CompactionsV1Builder::new(_fbb);
builder.add_compactor_epoch(args.compactor_epoch);
if let Some(x) = args.recent_compactions { builder.add_recent_compactions(x); }
builder.finish()
}
#[inline]
pub fn compactor_epoch(&self) -> u64 {
unsafe { self._tab.get::<u64>(CompactionsV1::VT_COMPACTOR_EPOCH, Some(0)).unwrap()}
}
#[inline]
pub fn recent_compactions(&self) -> flatbuffers::Vector<'a, flatbuffers::ForwardsUOffset<Compaction<'a>>> {
unsafe { self._tab.get::<flatbuffers::ForwardsUOffset<flatbuffers::Vector<'a, flatbuffers::ForwardsUOffset<Compaction>>>>(CompactionsV1::VT_RECENT_COMPACTIONS, None).unwrap()}
}
}
impl flatbuffers::Verifiable for CompactionsV1<'_> {
#[inline]
fn run_verifier(
v: &mut flatbuffers::Verifier, pos: usize
) -> Result<(), flatbuffers::InvalidFlatbuffer> {
use self::flatbuffers::Verifiable;
v.visit_table(pos)?
.visit_field::<u64>("compactor_epoch", Self::VT_COMPACTOR_EPOCH, false)?
.visit_field::<flatbuffers::ForwardsUOffset<flatbuffers::Vector<'_, flatbuffers::ForwardsUOffset<Compaction>>>>("recent_compactions", Self::VT_RECENT_COMPACTIONS, true)?
.finish();
Ok(())
}
}
pub struct CompactionsV1Args<'a> {
pub compactor_epoch: u64,
pub recent_compactions: Option<flatbuffers::WIPOffset<flatbuffers::Vector<'a, flatbuffers::ForwardsUOffset<Compaction<'a>>>>>,
}
impl<'a> Default for CompactionsV1Args<'a> {
#[inline]
fn default() -> Self {
CompactionsV1Args {
compactor_epoch: 0,
recent_compactions: None, }
}
}
pub struct CompactionsV1Builder<'a: 'b, 'b, A: flatbuffers::Allocator + 'a> {
fbb_: &'b mut flatbuffers::FlatBufferBuilder<'a, A>,
start_: flatbuffers::WIPOffset<flatbuffers::TableUnfinishedWIPOffset>,
}
impl<'a: 'b, 'b, A: flatbuffers::Allocator + 'a> CompactionsV1Builder<'a, 'b, A> {
#[inline]
pub fn add_compactor_epoch(&mut self, compactor_epoch: u64) {
self.fbb_.push_slot::<u64>(CompactionsV1::VT_COMPACTOR_EPOCH, compactor_epoch, 0);
}
#[inline]
pub fn add_recent_compactions(&mut self, recent_compactions: flatbuffers::WIPOffset<flatbuffers::Vector<'b , flatbuffers::ForwardsUOffset<Compaction<'b >>>>) {
self.fbb_.push_slot_always::<flatbuffers::WIPOffset<_>>(CompactionsV1::VT_RECENT_COMPACTIONS, recent_compactions);
}
#[inline]
pub fn new(_fbb: &'b mut flatbuffers::FlatBufferBuilder<'a, A>) -> CompactionsV1Builder<'a, 'b, A> {
let start = _fbb.start_table();
CompactionsV1Builder {
fbb_: _fbb,
start_: start,
}
}
#[inline]
pub fn finish(self) -> flatbuffers::WIPOffset<CompactionsV1<'a>> {
let o = self.fbb_.end_table(self.start_);
self.fbb_.required(o, CompactionsV1::VT_RECENT_COMPACTIONS,"recent_compactions");
flatbuffers::WIPOffset::new(o.value())
}
}
impl core::fmt::Debug for CompactionsV1<'_> {
fn fmt(&self, f: &mut core::fmt::Formatter<'_>) -> core::fmt::Result {
let mut ds = f.debug_struct("CompactionsV1");
ds.field("compactor_epoch", &self.compactor_epoch());
ds.field("recent_compactions", &self.recent_compactions());
ds.finish()
}
}
pub enum ExternalDbOffset {}
#[derive(Copy, Clone, PartialEq)]
pub struct ExternalDb<'a> {
pub _tab: flatbuffers::Table<'a>,
}
impl<'a> flatbuffers::Follow<'a> for ExternalDb<'a> {
type Inner = ExternalDb<'a>;
#[inline]
unsafe fn follow(buf: &'a [u8], loc: usize) -> Self::Inner {
Self { _tab: flatbuffers::Table::new(buf, loc) }
}
}
impl<'a> ExternalDb<'a> {
pub const VT_PATH: flatbuffers::VOffsetT = 4;
pub const VT_SOURCE_CHECKPOINT_ID: flatbuffers::VOffsetT = 6;
pub const VT_FINAL_CHECKPOINT_ID: flatbuffers::VOffsetT = 8;
pub const VT_SST_IDS: flatbuffers::VOffsetT = 10;
#[inline]
pub unsafe fn init_from_table(table: flatbuffers::Table<'a>) -> Self {
ExternalDb { _tab: table }
}
#[allow(unused_mut)]
pub fn create<'bldr: 'args, 'args: 'mut_bldr, 'mut_bldr, A: flatbuffers::Allocator + 'bldr>(
_fbb: &'mut_bldr mut flatbuffers::FlatBufferBuilder<'bldr, A>,
args: &'args ExternalDbArgs<'args>
) -> flatbuffers::WIPOffset<ExternalDb<'bldr>> {
let mut builder = ExternalDbBuilder::new(_fbb);
if let Some(x) = args.sst_ids { builder.add_sst_ids(x); }
if let Some(x) = args.final_checkpoint_id { builder.add_final_checkpoint_id(x); }
if let Some(x) = args.source_checkpoint_id { builder.add_source_checkpoint_id(x); }
if let Some(x) = args.path { builder.add_path(x); }
builder.finish()
}
#[inline]
pub fn path(&self) -> &'a str {
unsafe { self._tab.get::<flatbuffers::ForwardsUOffset<&str>>(ExternalDb::VT_PATH, None).unwrap()}
}
#[inline]
pub fn source_checkpoint_id(&self) -> Uuid<'a> {
unsafe { self._tab.get::<flatbuffers::ForwardsUOffset<Uuid>>(ExternalDb::VT_SOURCE_CHECKPOINT_ID, None).unwrap()}
}
#[inline]
pub fn final_checkpoint_id(&self) -> Option<Uuid<'a>> {
unsafe { self._tab.get::<flatbuffers::ForwardsUOffset<Uuid>>(ExternalDb::VT_FINAL_CHECKPOINT_ID, None)}
}
#[inline]
pub fn sst_ids(&self) -> flatbuffers::Vector<'a, flatbuffers::ForwardsUOffset<Ulid<'a>>> {
unsafe { self._tab.get::<flatbuffers::ForwardsUOffset<flatbuffers::Vector<'a, flatbuffers::ForwardsUOffset<Ulid>>>>(ExternalDb::VT_SST_IDS, None).unwrap()}
}
}
impl flatbuffers::Verifiable for ExternalDb<'_> {
#[inline]
fn run_verifier(
v: &mut flatbuffers::Verifier, pos: usize
) -> Result<(), flatbuffers::InvalidFlatbuffer> {
use self::flatbuffers::Verifiable;
v.visit_table(pos)?
.visit_field::<flatbuffers::ForwardsUOffset<&str>>("path", Self::VT_PATH, true)?
.visit_field::<flatbuffers::ForwardsUOffset<Uuid>>("source_checkpoint_id", Self::VT_SOURCE_CHECKPOINT_ID, true)?
.visit_field::<flatbuffers::ForwardsUOffset<Uuid>>("final_checkpoint_id", Self::VT_FINAL_CHECKPOINT_ID, false)?
.visit_field::<flatbuffers::ForwardsUOffset<flatbuffers::Vector<'_, flatbuffers::ForwardsUOffset<Ulid>>>>("sst_ids", Self::VT_SST_IDS, true)?
.finish();
Ok(())
}
}
pub struct ExternalDbArgs<'a> {
pub path: Option<flatbuffers::WIPOffset<&'a str>>,
pub source_checkpoint_id: Option<flatbuffers::WIPOffset<Uuid<'a>>>,
pub final_checkpoint_id: Option<flatbuffers::WIPOffset<Uuid<'a>>>,
pub sst_ids: Option<flatbuffers::WIPOffset<flatbuffers::Vector<'a, flatbuffers::ForwardsUOffset<Ulid<'a>>>>>,
}
impl<'a> Default for ExternalDbArgs<'a> {
#[inline]
fn default() -> Self {
ExternalDbArgs {
path: None, source_checkpoint_id: None, final_checkpoint_id: None,
sst_ids: None, }
}
}
pub struct ExternalDbBuilder<'a: 'b, 'b, A: flatbuffers::Allocator + 'a> {
fbb_: &'b mut flatbuffers::FlatBufferBuilder<'a, A>,
start_: flatbuffers::WIPOffset<flatbuffers::TableUnfinishedWIPOffset>,
}
impl<'a: 'b, 'b, A: flatbuffers::Allocator + 'a> ExternalDbBuilder<'a, 'b, A> {
#[inline]
pub fn add_path(&mut self, path: flatbuffers::WIPOffset<&'b str>) {
self.fbb_.push_slot_always::<flatbuffers::WIPOffset<_>>(ExternalDb::VT_PATH, path);
}
#[inline]
pub fn add_source_checkpoint_id(&mut self, source_checkpoint_id: flatbuffers::WIPOffset<Uuid<'b >>) {
self.fbb_.push_slot_always::<flatbuffers::WIPOffset<Uuid>>(ExternalDb::VT_SOURCE_CHECKPOINT_ID, source_checkpoint_id);
}
#[inline]
pub fn add_final_checkpoint_id(&mut self, final_checkpoint_id: flatbuffers::WIPOffset<Uuid<'b >>) {
self.fbb_.push_slot_always::<flatbuffers::WIPOffset<Uuid>>(ExternalDb::VT_FINAL_CHECKPOINT_ID, final_checkpoint_id);
}
#[inline]
pub fn add_sst_ids(&mut self, sst_ids: flatbuffers::WIPOffset<flatbuffers::Vector<'b , flatbuffers::ForwardsUOffset<Ulid<'b >>>>) {
self.fbb_.push_slot_always::<flatbuffers::WIPOffset<_>>(ExternalDb::VT_SST_IDS, sst_ids);
}
#[inline]
pub fn new(_fbb: &'b mut flatbuffers::FlatBufferBuilder<'a, A>) -> ExternalDbBuilder<'a, 'b, A> {
let start = _fbb.start_table();
ExternalDbBuilder {
fbb_: _fbb,
start_: start,
}
}
#[inline]
pub fn finish(self) -> flatbuffers::WIPOffset<ExternalDb<'a>> {
let o = self.fbb_.end_table(self.start_);
self.fbb_.required(o, ExternalDb::VT_PATH,"path");
self.fbb_.required(o, ExternalDb::VT_SOURCE_CHECKPOINT_ID,"source_checkpoint_id");
self.fbb_.required(o, ExternalDb::VT_SST_IDS,"sst_ids");
flatbuffers::WIPOffset::new(o.value())
}
}
impl core::fmt::Debug for ExternalDb<'_> {
fn fmt(&self, f: &mut core::fmt::Formatter<'_>) -> core::fmt::Result {
let mut ds = f.debug_struct("ExternalDb");
ds.field("path", &self.path());
ds.field("source_checkpoint_id", &self.source_checkpoint_id());
ds.field("final_checkpoint_id", &self.final_checkpoint_id());
ds.field("sst_ids", &self.sst_ids());
ds.finish()
}
}
pub enum ManifestV1Offset {}
#[derive(Copy, Clone, PartialEq)]
pub struct ManifestV1<'a> {
pub _tab: flatbuffers::Table<'a>,
}
impl<'a> flatbuffers::Follow<'a> for ManifestV1<'a> {
type Inner = ManifestV1<'a>;
#[inline]
unsafe fn follow(buf: &'a [u8], loc: usize) -> Self::Inner {
Self { _tab: flatbuffers::Table::new(buf, loc) }
}
}
impl<'a> ManifestV1<'a> {
pub const VT_MANIFEST_ID: flatbuffers::VOffsetT = 4;
pub const VT_EXTERNAL_DBS: flatbuffers::VOffsetT = 6;
pub const VT_INITIALIZED: flatbuffers::VOffsetT = 8;
pub const VT_WRITER_EPOCH: flatbuffers::VOffsetT = 10;
pub const VT_COMPACTOR_EPOCH: flatbuffers::VOffsetT = 12;
pub const VT_REPLAY_AFTER_WAL_ID: flatbuffers::VOffsetT = 14;
pub const VT_WAL_ID_LAST_SEEN: flatbuffers::VOffsetT = 16;
pub const VT_L0_LAST_COMPACTED: flatbuffers::VOffsetT = 18;
pub const VT_L0: flatbuffers::VOffsetT = 20;
pub const VT_COMPACTED: flatbuffers::VOffsetT = 22;
pub const VT_LAST_L0_CLOCK_TICK: flatbuffers::VOffsetT = 24;
pub const VT_CHECKPOINTS: flatbuffers::VOffsetT = 26;
pub const VT_LAST_L0_SEQ: flatbuffers::VOffsetT = 28;
pub const VT_WAL_OBJECT_STORE_URI: flatbuffers::VOffsetT = 30;
pub const VT_RECENT_SNAPSHOT_MIN_SEQ: flatbuffers::VOffsetT = 32;
pub const VT_SEQUENCE_TRACKER: flatbuffers::VOffsetT = 34;
#[inline]
pub unsafe fn init_from_table(table: flatbuffers::Table<'a>) -> Self {
ManifestV1 { _tab: table }
}
#[allow(unused_mut)]
pub fn create<'bldr: 'args, 'args: 'mut_bldr, 'mut_bldr, A: flatbuffers::Allocator + 'bldr>(
_fbb: &'mut_bldr mut flatbuffers::FlatBufferBuilder<'bldr, A>,
args: &'args ManifestV1Args<'args>
) -> flatbuffers::WIPOffset<ManifestV1<'bldr>> {
let mut builder = ManifestV1Builder::new(_fbb);
builder.add_recent_snapshot_min_seq(args.recent_snapshot_min_seq);
builder.add_last_l0_seq(args.last_l0_seq);
builder.add_last_l0_clock_tick(args.last_l0_clock_tick);
builder.add_wal_id_last_seen(args.wal_id_last_seen);
builder.add_replay_after_wal_id(args.replay_after_wal_id);
builder.add_compactor_epoch(args.compactor_epoch);
builder.add_writer_epoch(args.writer_epoch);
builder.add_manifest_id(args.manifest_id);
if let Some(x) = args.sequence_tracker { builder.add_sequence_tracker(x); }
if let Some(x) = args.wal_object_store_uri { builder.add_wal_object_store_uri(x); }
if let Some(x) = args.checkpoints { builder.add_checkpoints(x); }
if let Some(x) = args.compacted { builder.add_compacted(x); }
if let Some(x) = args.l0 { builder.add_l0(x); }
if let Some(x) = args.l0_last_compacted { builder.add_l0_last_compacted(x); }
if let Some(x) = args.external_dbs { builder.add_external_dbs(x); }
builder.add_initialized(args.initialized);
builder.finish()
}
#[inline]
pub fn manifest_id(&self) -> u64 {
unsafe { self._tab.get::<u64>(ManifestV1::VT_MANIFEST_ID, Some(0)).unwrap()}
}
#[inline]
pub fn external_dbs(&self) -> Option<flatbuffers::Vector<'a, flatbuffers::ForwardsUOffset<ExternalDb<'a>>>> {
unsafe { self._tab.get::<flatbuffers::ForwardsUOffset<flatbuffers::Vector<'a, flatbuffers::ForwardsUOffset<ExternalDb>>>>(ManifestV1::VT_EXTERNAL_DBS, None)}
}
#[inline]
pub fn initialized(&self) -> bool {
unsafe { self._tab.get::<bool>(ManifestV1::VT_INITIALIZED, Some(false)).unwrap()}
}
#[inline]
pub fn writer_epoch(&self) -> u64 {
unsafe { self._tab.get::<u64>(ManifestV1::VT_WRITER_EPOCH, Some(0)).unwrap()}
}
#[inline]
pub fn compactor_epoch(&self) -> u64 {
unsafe { self._tab.get::<u64>(ManifestV1::VT_COMPACTOR_EPOCH, Some(0)).unwrap()}
}
#[inline]
pub fn replay_after_wal_id(&self) -> u64 {
unsafe { self._tab.get::<u64>(ManifestV1::VT_REPLAY_AFTER_WAL_ID, Some(0)).unwrap()}
}
#[inline]
pub fn wal_id_last_seen(&self) -> u64 {
unsafe { self._tab.get::<u64>(ManifestV1::VT_WAL_ID_LAST_SEEN, Some(0)).unwrap()}
}
#[inline]
pub fn l0_last_compacted(&self) -> Option<Ulid<'a>> {
unsafe { self._tab.get::<flatbuffers::ForwardsUOffset<Ulid>>(ManifestV1::VT_L0_LAST_COMPACTED, None)}
}
#[inline]
pub fn l0(&self) -> flatbuffers::Vector<'a, flatbuffers::ForwardsUOffset<CompactedSsTable<'a>>> {
unsafe { self._tab.get::<flatbuffers::ForwardsUOffset<flatbuffers::Vector<'a, flatbuffers::ForwardsUOffset<CompactedSsTable>>>>(ManifestV1::VT_L0, None).unwrap()}
}
#[inline]
pub fn compacted(&self) -> flatbuffers::Vector<'a, flatbuffers::ForwardsUOffset<SortedRun<'a>>> {
unsafe { self._tab.get::<flatbuffers::ForwardsUOffset<flatbuffers::Vector<'a, flatbuffers::ForwardsUOffset<SortedRun>>>>(ManifestV1::VT_COMPACTED, None).unwrap()}
}
#[inline]
pub fn last_l0_clock_tick(&self) -> i64 {
unsafe { self._tab.get::<i64>(ManifestV1::VT_LAST_L0_CLOCK_TICK, Some(0)).unwrap()}
}
#[inline]
pub fn checkpoints(&self) -> flatbuffers::Vector<'a, flatbuffers::ForwardsUOffset<Checkpoint<'a>>> {
unsafe { self._tab.get::<flatbuffers::ForwardsUOffset<flatbuffers::Vector<'a, flatbuffers::ForwardsUOffset<Checkpoint>>>>(ManifestV1::VT_CHECKPOINTS, None).unwrap()}
}
#[inline]
pub fn last_l0_seq(&self) -> u64 {
unsafe { self._tab.get::<u64>(ManifestV1::VT_LAST_L0_SEQ, Some(0)).unwrap()}
}
#[inline]
pub fn wal_object_store_uri(&self) -> Option<&'a str> {
unsafe { self._tab.get::<flatbuffers::ForwardsUOffset<&str>>(ManifestV1::VT_WAL_OBJECT_STORE_URI, None)}
}
#[inline]
pub fn recent_snapshot_min_seq(&self) -> u64 {
unsafe { self._tab.get::<u64>(ManifestV1::VT_RECENT_SNAPSHOT_MIN_SEQ, Some(0)).unwrap()}
}
#[inline]
pub fn sequence_tracker(&self) -> Option<flatbuffers::Vector<'a, u8>> {
unsafe { self._tab.get::<flatbuffers::ForwardsUOffset<flatbuffers::Vector<'a, u8>>>(ManifestV1::VT_SEQUENCE_TRACKER, None)}
}
}
impl flatbuffers::Verifiable for ManifestV1<'_> {
#[inline]
fn run_verifier(
v: &mut flatbuffers::Verifier, pos: usize
) -> Result<(), flatbuffers::InvalidFlatbuffer> {
use self::flatbuffers::Verifiable;
v.visit_table(pos)?
.visit_field::<u64>("manifest_id", Self::VT_MANIFEST_ID, false)?
.visit_field::<flatbuffers::ForwardsUOffset<flatbuffers::Vector<'_, flatbuffers::ForwardsUOffset<ExternalDb>>>>("external_dbs", Self::VT_EXTERNAL_DBS, false)?
.visit_field::<bool>("initialized", Self::VT_INITIALIZED, false)?
.visit_field::<u64>("writer_epoch", Self::VT_WRITER_EPOCH, false)?
.visit_field::<u64>("compactor_epoch", Self::VT_COMPACTOR_EPOCH, false)?
.visit_field::<u64>("replay_after_wal_id", Self::VT_REPLAY_AFTER_WAL_ID, false)?
.visit_field::<u64>("wal_id_last_seen", Self::VT_WAL_ID_LAST_SEEN, false)?
.visit_field::<flatbuffers::ForwardsUOffset<Ulid>>("l0_last_compacted", Self::VT_L0_LAST_COMPACTED, false)?
.visit_field::<flatbuffers::ForwardsUOffset<flatbuffers::Vector<'_, flatbuffers::ForwardsUOffset<CompactedSsTable>>>>("l0", Self::VT_L0, true)?
.visit_field::<flatbuffers::ForwardsUOffset<flatbuffers::Vector<'_, flatbuffers::ForwardsUOffset<SortedRun>>>>("compacted", Self::VT_COMPACTED, true)?
.visit_field::<i64>("last_l0_clock_tick", Self::VT_LAST_L0_CLOCK_TICK, false)?
.visit_field::<flatbuffers::ForwardsUOffset<flatbuffers::Vector<'_, flatbuffers::ForwardsUOffset<Checkpoint>>>>("checkpoints", Self::VT_CHECKPOINTS, true)?
.visit_field::<u64>("last_l0_seq", Self::VT_LAST_L0_SEQ, false)?
.visit_field::<flatbuffers::ForwardsUOffset<&str>>("wal_object_store_uri", Self::VT_WAL_OBJECT_STORE_URI, false)?
.visit_field::<u64>("recent_snapshot_min_seq", Self::VT_RECENT_SNAPSHOT_MIN_SEQ, false)?
.visit_field::<flatbuffers::ForwardsUOffset<flatbuffers::Vector<'_, u8>>>("sequence_tracker", Self::VT_SEQUENCE_TRACKER, false)?
.finish();
Ok(())
}
}
pub struct ManifestV1Args<'a> {
pub manifest_id: u64,
pub external_dbs: Option<flatbuffers::WIPOffset<flatbuffers::Vector<'a, flatbuffers::ForwardsUOffset<ExternalDb<'a>>>>>,
pub initialized: bool,
pub writer_epoch: u64,
pub compactor_epoch: u64,
pub replay_after_wal_id: u64,
pub wal_id_last_seen: u64,
pub l0_last_compacted: Option<flatbuffers::WIPOffset<Ulid<'a>>>,
pub l0: Option<flatbuffers::WIPOffset<flatbuffers::Vector<'a, flatbuffers::ForwardsUOffset<CompactedSsTable<'a>>>>>,
pub compacted: Option<flatbuffers::WIPOffset<flatbuffers::Vector<'a, flatbuffers::ForwardsUOffset<SortedRun<'a>>>>>,
pub last_l0_clock_tick: i64,
pub checkpoints: Option<flatbuffers::WIPOffset<flatbuffers::Vector<'a, flatbuffers::ForwardsUOffset<Checkpoint<'a>>>>>,
pub last_l0_seq: u64,
pub wal_object_store_uri: Option<flatbuffers::WIPOffset<&'a str>>,
pub recent_snapshot_min_seq: u64,
pub sequence_tracker: Option<flatbuffers::WIPOffset<flatbuffers::Vector<'a, u8>>>,
}
impl<'a> Default for ManifestV1Args<'a> {
#[inline]
fn default() -> Self {
ManifestV1Args {
manifest_id: 0,
external_dbs: None,
initialized: false,
writer_epoch: 0,
compactor_epoch: 0,
replay_after_wal_id: 0,
wal_id_last_seen: 0,
l0_last_compacted: None,
l0: None, compacted: None, last_l0_clock_tick: 0,
checkpoints: None, last_l0_seq: 0,
wal_object_store_uri: None,
recent_snapshot_min_seq: 0,
sequence_tracker: None,
}
}
}
pub struct ManifestV1Builder<'a: 'b, 'b, A: flatbuffers::Allocator + 'a> {
fbb_: &'b mut flatbuffers::FlatBufferBuilder<'a, A>,
start_: flatbuffers::WIPOffset<flatbuffers::TableUnfinishedWIPOffset>,
}
impl<'a: 'b, 'b, A: flatbuffers::Allocator + 'a> ManifestV1Builder<'a, 'b, A> {
#[inline]
pub fn add_manifest_id(&mut self, manifest_id: u64) {
self.fbb_.push_slot::<u64>(ManifestV1::VT_MANIFEST_ID, manifest_id, 0);
}
#[inline]
pub fn add_external_dbs(&mut self, external_dbs: flatbuffers::WIPOffset<flatbuffers::Vector<'b , flatbuffers::ForwardsUOffset<ExternalDb<'b >>>>) {
self.fbb_.push_slot_always::<flatbuffers::WIPOffset<_>>(ManifestV1::VT_EXTERNAL_DBS, external_dbs);
}
#[inline]
pub fn add_initialized(&mut self, initialized: bool) {
self.fbb_.push_slot::<bool>(ManifestV1::VT_INITIALIZED, initialized, false);
}
#[inline]
pub fn add_writer_epoch(&mut self, writer_epoch: u64) {
self.fbb_.push_slot::<u64>(ManifestV1::VT_WRITER_EPOCH, writer_epoch, 0);
}
#[inline]
pub fn add_compactor_epoch(&mut self, compactor_epoch: u64) {
self.fbb_.push_slot::<u64>(ManifestV1::VT_COMPACTOR_EPOCH, compactor_epoch, 0);
}
#[inline]
pub fn add_replay_after_wal_id(&mut self, replay_after_wal_id: u64) {
self.fbb_.push_slot::<u64>(ManifestV1::VT_REPLAY_AFTER_WAL_ID, replay_after_wal_id, 0);
}
#[inline]
pub fn add_wal_id_last_seen(&mut self, wal_id_last_seen: u64) {
self.fbb_.push_slot::<u64>(ManifestV1::VT_WAL_ID_LAST_SEEN, wal_id_last_seen, 0);
}
#[inline]
pub fn add_l0_last_compacted(&mut self, l0_last_compacted: flatbuffers::WIPOffset<Ulid<'b >>) {
self.fbb_.push_slot_always::<flatbuffers::WIPOffset<Ulid>>(ManifestV1::VT_L0_LAST_COMPACTED, l0_last_compacted);
}
#[inline]
pub fn add_l0(&mut self, l0: flatbuffers::WIPOffset<flatbuffers::Vector<'b , flatbuffers::ForwardsUOffset<CompactedSsTable<'b >>>>) {
self.fbb_.push_slot_always::<flatbuffers::WIPOffset<_>>(ManifestV1::VT_L0, l0);
}
#[inline]
pub fn add_compacted(&mut self, compacted: flatbuffers::WIPOffset<flatbuffers::Vector<'b , flatbuffers::ForwardsUOffset<SortedRun<'b >>>>) {
self.fbb_.push_slot_always::<flatbuffers::WIPOffset<_>>(ManifestV1::VT_COMPACTED, compacted);
}
#[inline]
pub fn add_last_l0_clock_tick(&mut self, last_l0_clock_tick: i64) {
self.fbb_.push_slot::<i64>(ManifestV1::VT_LAST_L0_CLOCK_TICK, last_l0_clock_tick, 0);
}
#[inline]
pub fn add_checkpoints(&mut self, checkpoints: flatbuffers::WIPOffset<flatbuffers::Vector<'b , flatbuffers::ForwardsUOffset<Checkpoint<'b >>>>) {
self.fbb_.push_slot_always::<flatbuffers::WIPOffset<_>>(ManifestV1::VT_CHECKPOINTS, checkpoints);
}
#[inline]
pub fn add_last_l0_seq(&mut self, last_l0_seq: u64) {
self.fbb_.push_slot::<u64>(ManifestV1::VT_LAST_L0_SEQ, last_l0_seq, 0);
}
#[inline]
pub fn add_wal_object_store_uri(&mut self, wal_object_store_uri: flatbuffers::WIPOffset<&'b str>) {
self.fbb_.push_slot_always::<flatbuffers::WIPOffset<_>>(ManifestV1::VT_WAL_OBJECT_STORE_URI, wal_object_store_uri);
}
#[inline]
pub fn add_recent_snapshot_min_seq(&mut self, recent_snapshot_min_seq: u64) {
self.fbb_.push_slot::<u64>(ManifestV1::VT_RECENT_SNAPSHOT_MIN_SEQ, recent_snapshot_min_seq, 0);
}
#[inline]
pub fn add_sequence_tracker(&mut self, sequence_tracker: flatbuffers::WIPOffset<flatbuffers::Vector<'b , u8>>) {
self.fbb_.push_slot_always::<flatbuffers::WIPOffset<_>>(ManifestV1::VT_SEQUENCE_TRACKER, sequence_tracker);
}
#[inline]
pub fn new(_fbb: &'b mut flatbuffers::FlatBufferBuilder<'a, A>) -> ManifestV1Builder<'a, 'b, A> {
let start = _fbb.start_table();
ManifestV1Builder {
fbb_: _fbb,
start_: start,
}
}
#[inline]
pub fn finish(self) -> flatbuffers::WIPOffset<ManifestV1<'a>> {
let o = self.fbb_.end_table(self.start_);
self.fbb_.required(o, ManifestV1::VT_L0,"l0");
self.fbb_.required(o, ManifestV1::VT_COMPACTED,"compacted");
self.fbb_.required(o, ManifestV1::VT_CHECKPOINTS,"checkpoints");
flatbuffers::WIPOffset::new(o.value())
}
}
impl core::fmt::Debug for ManifestV1<'_> {
fn fmt(&self, f: &mut core::fmt::Formatter<'_>) -> core::fmt::Result {
let mut ds = f.debug_struct("ManifestV1");
ds.field("manifest_id", &self.manifest_id());
ds.field("external_dbs", &self.external_dbs());
ds.field("initialized", &self.initialized());
ds.field("writer_epoch", &self.writer_epoch());
ds.field("compactor_epoch", &self.compactor_epoch());
ds.field("replay_after_wal_id", &self.replay_after_wal_id());
ds.field("wal_id_last_seen", &self.wal_id_last_seen());
ds.field("l0_last_compacted", &self.l0_last_compacted());
ds.field("l0", &self.l0());
ds.field("compacted", &self.compacted());
ds.field("last_l0_clock_tick", &self.last_l0_clock_tick());
ds.field("checkpoints", &self.checkpoints());
ds.field("last_l0_seq", &self.last_l0_seq());
ds.field("wal_object_store_uri", &self.wal_object_store_uri());
ds.field("recent_snapshot_min_seq", &self.recent_snapshot_min_seq());
ds.field("sequence_tracker", &self.sequence_tracker());
ds.finish()
}
}
pub enum ManifestV2Offset {}
#[derive(Copy, Clone, PartialEq)]
pub struct ManifestV2<'a> {
pub _tab: flatbuffers::Table<'a>,
}
impl<'a> flatbuffers::Follow<'a> for ManifestV2<'a> {
type Inner = ManifestV2<'a>;
#[inline]
unsafe fn follow(buf: &'a [u8], loc: usize) -> Self::Inner {
Self { _tab: flatbuffers::Table::new(buf, loc) }
}
}
impl<'a> ManifestV2<'a> {
pub const VT_MANIFEST_ID: flatbuffers::VOffsetT = 4;
pub const VT_EXTERNAL_DBS: flatbuffers::VOffsetT = 6;
pub const VT_INITIALIZED: flatbuffers::VOffsetT = 8;
pub const VT_WRITER_EPOCH: flatbuffers::VOffsetT = 10;
pub const VT_COMPACTOR_EPOCH: flatbuffers::VOffsetT = 12;
pub const VT_REPLAY_AFTER_WAL_ID: flatbuffers::VOffsetT = 14;
pub const VT_WAL_ID_LAST_SEEN: flatbuffers::VOffsetT = 16;
pub const VT_LAST_COMPACTED_L0_SST_VIEW_ID: flatbuffers::VOffsetT = 18;
pub const VT_SSTS: flatbuffers::VOffsetT = 20;
pub const VT_L0: flatbuffers::VOffsetT = 22;
pub const VT_COMPACTED: flatbuffers::VOffsetT = 24;
pub const VT_LAST_L0_CLOCK_TICK: flatbuffers::VOffsetT = 26;
pub const VT_CHECKPOINTS: flatbuffers::VOffsetT = 28;
pub const VT_LAST_L0_SEQ: flatbuffers::VOffsetT = 30;
pub const VT_RECENT_SNAPSHOT_MIN_SEQ: flatbuffers::VOffsetT = 32;
pub const VT_SEQUENCE_TRACKER: flatbuffers::VOffsetT = 34;
#[inline]
pub unsafe fn init_from_table(table: flatbuffers::Table<'a>) -> Self {
ManifestV2 { _tab: table }
}
#[allow(unused_mut)]
pub fn create<'bldr: 'args, 'args: 'mut_bldr, 'mut_bldr, A: flatbuffers::Allocator + 'bldr>(
_fbb: &'mut_bldr mut flatbuffers::FlatBufferBuilder<'bldr, A>,
args: &'args ManifestV2Args<'args>
) -> flatbuffers::WIPOffset<ManifestV2<'bldr>> {
let mut builder = ManifestV2Builder::new(_fbb);
builder.add_recent_snapshot_min_seq(args.recent_snapshot_min_seq);
builder.add_last_l0_seq(args.last_l0_seq);
builder.add_last_l0_clock_tick(args.last_l0_clock_tick);
builder.add_wal_id_last_seen(args.wal_id_last_seen);
builder.add_replay_after_wal_id(args.replay_after_wal_id);
builder.add_compactor_epoch(args.compactor_epoch);
builder.add_writer_epoch(args.writer_epoch);
builder.add_manifest_id(args.manifest_id);
if let Some(x) = args.sequence_tracker { builder.add_sequence_tracker(x); }
if let Some(x) = args.checkpoints { builder.add_checkpoints(x); }
if let Some(x) = args.compacted { builder.add_compacted(x); }
if let Some(x) = args.l0 { builder.add_l0(x); }
if let Some(x) = args.ssts { builder.add_ssts(x); }
if let Some(x) = args.last_compacted_l0_sst_view_id { builder.add_last_compacted_l0_sst_view_id(x); }
if let Some(x) = args.external_dbs { builder.add_external_dbs(x); }
builder.add_initialized(args.initialized);
builder.finish()
}
#[inline]
pub fn manifest_id(&self) -> u64 {
unsafe { self._tab.get::<u64>(ManifestV2::VT_MANIFEST_ID, Some(0)).unwrap()}
}
#[inline]
pub fn external_dbs(&self) -> Option<flatbuffers::Vector<'a, flatbuffers::ForwardsUOffset<ExternalDb<'a>>>> {
unsafe { self._tab.get::<flatbuffers::ForwardsUOffset<flatbuffers::Vector<'a, flatbuffers::ForwardsUOffset<ExternalDb>>>>(ManifestV2::VT_EXTERNAL_DBS, None)}
}
#[inline]
pub fn initialized(&self) -> bool {
unsafe { self._tab.get::<bool>(ManifestV2::VT_INITIALIZED, Some(false)).unwrap()}
}
#[inline]
pub fn writer_epoch(&self) -> u64 {
unsafe { self._tab.get::<u64>(ManifestV2::VT_WRITER_EPOCH, Some(0)).unwrap()}
}
#[inline]
pub fn compactor_epoch(&self) -> u64 {
unsafe { self._tab.get::<u64>(ManifestV2::VT_COMPACTOR_EPOCH, Some(0)).unwrap()}
}
#[inline]
pub fn replay_after_wal_id(&self) -> u64 {
unsafe { self._tab.get::<u64>(ManifestV2::VT_REPLAY_AFTER_WAL_ID, Some(0)).unwrap()}
}
#[inline]
pub fn wal_id_last_seen(&self) -> u64 {
unsafe { self._tab.get::<u64>(ManifestV2::VT_WAL_ID_LAST_SEEN, Some(0)).unwrap()}
}
#[inline]
pub fn last_compacted_l0_sst_view_id(&self) -> Option<Ulid<'a>> {
unsafe { self._tab.get::<flatbuffers::ForwardsUOffset<Ulid>>(ManifestV2::VT_LAST_COMPACTED_L0_SST_VIEW_ID, None)}
}
#[inline]
pub fn ssts(&self) -> flatbuffers::Vector<'a, flatbuffers::ForwardsUOffset<CompactedSsTableV2<'a>>> {
unsafe { self._tab.get::<flatbuffers::ForwardsUOffset<flatbuffers::Vector<'a, flatbuffers::ForwardsUOffset<CompactedSsTableV2>>>>(ManifestV2::VT_SSTS, None).unwrap()}
}
#[inline]
pub fn l0(&self) -> flatbuffers::Vector<'a, flatbuffers::ForwardsUOffset<CompactedSsTableView<'a>>> {
unsafe { self._tab.get::<flatbuffers::ForwardsUOffset<flatbuffers::Vector<'a, flatbuffers::ForwardsUOffset<CompactedSsTableView>>>>(ManifestV2::VT_L0, None).unwrap()}
}
#[inline]
pub fn compacted(&self) -> flatbuffers::Vector<'a, flatbuffers::ForwardsUOffset<SortedRunV2<'a>>> {
unsafe { self._tab.get::<flatbuffers::ForwardsUOffset<flatbuffers::Vector<'a, flatbuffers::ForwardsUOffset<SortedRunV2>>>>(ManifestV2::VT_COMPACTED, None).unwrap()}
}
#[inline]
pub fn last_l0_clock_tick(&self) -> i64 {
unsafe { self._tab.get::<i64>(ManifestV2::VT_LAST_L0_CLOCK_TICK, Some(0)).unwrap()}
}
#[inline]
pub fn checkpoints(&self) -> flatbuffers::Vector<'a, flatbuffers::ForwardsUOffset<Checkpoint<'a>>> {
unsafe { self._tab.get::<flatbuffers::ForwardsUOffset<flatbuffers::Vector<'a, flatbuffers::ForwardsUOffset<Checkpoint>>>>(ManifestV2::VT_CHECKPOINTS, None).unwrap()}
}
#[inline]
pub fn last_l0_seq(&self) -> u64 {
unsafe { self._tab.get::<u64>(ManifestV2::VT_LAST_L0_SEQ, Some(0)).unwrap()}
}
#[inline]
pub fn recent_snapshot_min_seq(&self) -> u64 {
unsafe { self._tab.get::<u64>(ManifestV2::VT_RECENT_SNAPSHOT_MIN_SEQ, Some(0)).unwrap()}
}
#[inline]
pub fn sequence_tracker(&self) -> Option<flatbuffers::Vector<'a, u8>> {
unsafe { self._tab.get::<flatbuffers::ForwardsUOffset<flatbuffers::Vector<'a, u8>>>(ManifestV2::VT_SEQUENCE_TRACKER, None)}
}
}
impl flatbuffers::Verifiable for ManifestV2<'_> {
#[inline]
fn run_verifier(
v: &mut flatbuffers::Verifier, pos: usize
) -> Result<(), flatbuffers::InvalidFlatbuffer> {
use self::flatbuffers::Verifiable;
v.visit_table(pos)?
.visit_field::<u64>("manifest_id", Self::VT_MANIFEST_ID, false)?
.visit_field::<flatbuffers::ForwardsUOffset<flatbuffers::Vector<'_, flatbuffers::ForwardsUOffset<ExternalDb>>>>("external_dbs", Self::VT_EXTERNAL_DBS, false)?
.visit_field::<bool>("initialized", Self::VT_INITIALIZED, false)?
.visit_field::<u64>("writer_epoch", Self::VT_WRITER_EPOCH, false)?
.visit_field::<u64>("compactor_epoch", Self::VT_COMPACTOR_EPOCH, false)?
.visit_field::<u64>("replay_after_wal_id", Self::VT_REPLAY_AFTER_WAL_ID, false)?
.visit_field::<u64>("wal_id_last_seen", Self::VT_WAL_ID_LAST_SEEN, false)?
.visit_field::<flatbuffers::ForwardsUOffset<Ulid>>("last_compacted_l0_sst_view_id", Self::VT_LAST_COMPACTED_L0_SST_VIEW_ID, false)?
.visit_field::<flatbuffers::ForwardsUOffset<flatbuffers::Vector<'_, flatbuffers::ForwardsUOffset<CompactedSsTableV2>>>>("ssts", Self::VT_SSTS, true)?
.visit_field::<flatbuffers::ForwardsUOffset<flatbuffers::Vector<'_, flatbuffers::ForwardsUOffset<CompactedSsTableView>>>>("l0", Self::VT_L0, true)?
.visit_field::<flatbuffers::ForwardsUOffset<flatbuffers::Vector<'_, flatbuffers::ForwardsUOffset<SortedRunV2>>>>("compacted", Self::VT_COMPACTED, true)?
.visit_field::<i64>("last_l0_clock_tick", Self::VT_LAST_L0_CLOCK_TICK, false)?
.visit_field::<flatbuffers::ForwardsUOffset<flatbuffers::Vector<'_, flatbuffers::ForwardsUOffset<Checkpoint>>>>("checkpoints", Self::VT_CHECKPOINTS, true)?
.visit_field::<u64>("last_l0_seq", Self::VT_LAST_L0_SEQ, false)?
.visit_field::<u64>("recent_snapshot_min_seq", Self::VT_RECENT_SNAPSHOT_MIN_SEQ, false)?
.visit_field::<flatbuffers::ForwardsUOffset<flatbuffers::Vector<'_, u8>>>("sequence_tracker", Self::VT_SEQUENCE_TRACKER, false)?
.finish();
Ok(())
}
}
pub struct ManifestV2Args<'a> {
pub manifest_id: u64,
pub external_dbs: Option<flatbuffers::WIPOffset<flatbuffers::Vector<'a, flatbuffers::ForwardsUOffset<ExternalDb<'a>>>>>,
pub initialized: bool,
pub writer_epoch: u64,
pub compactor_epoch: u64,
pub replay_after_wal_id: u64,
pub wal_id_last_seen: u64,
pub last_compacted_l0_sst_view_id: Option<flatbuffers::WIPOffset<Ulid<'a>>>,
pub ssts: Option<flatbuffers::WIPOffset<flatbuffers::Vector<'a, flatbuffers::ForwardsUOffset<CompactedSsTableV2<'a>>>>>,
pub l0: Option<flatbuffers::WIPOffset<flatbuffers::Vector<'a, flatbuffers::ForwardsUOffset<CompactedSsTableView<'a>>>>>,
pub compacted: Option<flatbuffers::WIPOffset<flatbuffers::Vector<'a, flatbuffers::ForwardsUOffset<SortedRunV2<'a>>>>>,
pub last_l0_clock_tick: i64,
pub checkpoints: Option<flatbuffers::WIPOffset<flatbuffers::Vector<'a, flatbuffers::ForwardsUOffset<Checkpoint<'a>>>>>,
pub last_l0_seq: u64,
pub recent_snapshot_min_seq: u64,
pub sequence_tracker: Option<flatbuffers::WIPOffset<flatbuffers::Vector<'a, u8>>>,
}
impl<'a> Default for ManifestV2Args<'a> {
#[inline]
fn default() -> Self {
ManifestV2Args {
manifest_id: 0,
external_dbs: None,
initialized: false,
writer_epoch: 0,
compactor_epoch: 0,
replay_after_wal_id: 0,
wal_id_last_seen: 0,
last_compacted_l0_sst_view_id: None,
ssts: None, l0: None, compacted: None, last_l0_clock_tick: 0,
checkpoints: None, last_l0_seq: 0,
recent_snapshot_min_seq: 0,
sequence_tracker: None,
}
}
}
pub struct ManifestV2Builder<'a: 'b, 'b, A: flatbuffers::Allocator + 'a> {
fbb_: &'b mut flatbuffers::FlatBufferBuilder<'a, A>,
start_: flatbuffers::WIPOffset<flatbuffers::TableUnfinishedWIPOffset>,
}
impl<'a: 'b, 'b, A: flatbuffers::Allocator + 'a> ManifestV2Builder<'a, 'b, A> {
#[inline]
pub fn add_manifest_id(&mut self, manifest_id: u64) {
self.fbb_.push_slot::<u64>(ManifestV2::VT_MANIFEST_ID, manifest_id, 0);
}
#[inline]
pub fn add_external_dbs(&mut self, external_dbs: flatbuffers::WIPOffset<flatbuffers::Vector<'b , flatbuffers::ForwardsUOffset<ExternalDb<'b >>>>) {
self.fbb_.push_slot_always::<flatbuffers::WIPOffset<_>>(ManifestV2::VT_EXTERNAL_DBS, external_dbs);
}
#[inline]
pub fn add_initialized(&mut self, initialized: bool) {
self.fbb_.push_slot::<bool>(ManifestV2::VT_INITIALIZED, initialized, false);
}
#[inline]
pub fn add_writer_epoch(&mut self, writer_epoch: u64) {
self.fbb_.push_slot::<u64>(ManifestV2::VT_WRITER_EPOCH, writer_epoch, 0);
}
#[inline]
pub fn add_compactor_epoch(&mut self, compactor_epoch: u64) {
self.fbb_.push_slot::<u64>(ManifestV2::VT_COMPACTOR_EPOCH, compactor_epoch, 0);
}
#[inline]
pub fn add_replay_after_wal_id(&mut self, replay_after_wal_id: u64) {
self.fbb_.push_slot::<u64>(ManifestV2::VT_REPLAY_AFTER_WAL_ID, replay_after_wal_id, 0);
}
#[inline]
pub fn add_wal_id_last_seen(&mut self, wal_id_last_seen: u64) {
self.fbb_.push_slot::<u64>(ManifestV2::VT_WAL_ID_LAST_SEEN, wal_id_last_seen, 0);
}
#[inline]
pub fn add_last_compacted_l0_sst_view_id(&mut self, last_compacted_l0_sst_view_id: flatbuffers::WIPOffset<Ulid<'b >>) {
self.fbb_.push_slot_always::<flatbuffers::WIPOffset<Ulid>>(ManifestV2::VT_LAST_COMPACTED_L0_SST_VIEW_ID, last_compacted_l0_sst_view_id);
}
#[inline]
pub fn add_ssts(&mut self, ssts: flatbuffers::WIPOffset<flatbuffers::Vector<'b , flatbuffers::ForwardsUOffset<CompactedSsTableV2<'b >>>>) {
self.fbb_.push_slot_always::<flatbuffers::WIPOffset<_>>(ManifestV2::VT_SSTS, ssts);
}
#[inline]
pub fn add_l0(&mut self, l0: flatbuffers::WIPOffset<flatbuffers::Vector<'b , flatbuffers::ForwardsUOffset<CompactedSsTableView<'b >>>>) {
self.fbb_.push_slot_always::<flatbuffers::WIPOffset<_>>(ManifestV2::VT_L0, l0);
}
#[inline]
pub fn add_compacted(&mut self, compacted: flatbuffers::WIPOffset<flatbuffers::Vector<'b , flatbuffers::ForwardsUOffset<SortedRunV2<'b >>>>) {
self.fbb_.push_slot_always::<flatbuffers::WIPOffset<_>>(ManifestV2::VT_COMPACTED, compacted);
}
#[inline]
pub fn add_last_l0_clock_tick(&mut self, last_l0_clock_tick: i64) {
self.fbb_.push_slot::<i64>(ManifestV2::VT_LAST_L0_CLOCK_TICK, last_l0_clock_tick, 0);
}
#[inline]
pub fn add_checkpoints(&mut self, checkpoints: flatbuffers::WIPOffset<flatbuffers::Vector<'b , flatbuffers::ForwardsUOffset<Checkpoint<'b >>>>) {
self.fbb_.push_slot_always::<flatbuffers::WIPOffset<_>>(ManifestV2::VT_CHECKPOINTS, checkpoints);
}
#[inline]
pub fn add_last_l0_seq(&mut self, last_l0_seq: u64) {
self.fbb_.push_slot::<u64>(ManifestV2::VT_LAST_L0_SEQ, last_l0_seq, 0);
}
#[inline]
pub fn add_recent_snapshot_min_seq(&mut self, recent_snapshot_min_seq: u64) {
self.fbb_.push_slot::<u64>(ManifestV2::VT_RECENT_SNAPSHOT_MIN_SEQ, recent_snapshot_min_seq, 0);
}
#[inline]
pub fn add_sequence_tracker(&mut self, sequence_tracker: flatbuffers::WIPOffset<flatbuffers::Vector<'b , u8>>) {
self.fbb_.push_slot_always::<flatbuffers::WIPOffset<_>>(ManifestV2::VT_SEQUENCE_TRACKER, sequence_tracker);
}
#[inline]
pub fn new(_fbb: &'b mut flatbuffers::FlatBufferBuilder<'a, A>) -> ManifestV2Builder<'a, 'b, A> {
let start = _fbb.start_table();
ManifestV2Builder {
fbb_: _fbb,
start_: start,
}
}
#[inline]
pub fn finish(self) -> flatbuffers::WIPOffset<ManifestV2<'a>> {
let o = self.fbb_.end_table(self.start_);
self.fbb_.required(o, ManifestV2::VT_SSTS,"ssts");
self.fbb_.required(o, ManifestV2::VT_L0,"l0");
self.fbb_.required(o, ManifestV2::VT_COMPACTED,"compacted");
self.fbb_.required(o, ManifestV2::VT_CHECKPOINTS,"checkpoints");
flatbuffers::WIPOffset::new(o.value())
}
}
impl core::fmt::Debug for ManifestV2<'_> {
fn fmt(&self, f: &mut core::fmt::Formatter<'_>) -> core::fmt::Result {
let mut ds = f.debug_struct("ManifestV2");
ds.field("manifest_id", &self.manifest_id());
ds.field("external_dbs", &self.external_dbs());
ds.field("initialized", &self.initialized());
ds.field("writer_epoch", &self.writer_epoch());
ds.field("compactor_epoch", &self.compactor_epoch());
ds.field("replay_after_wal_id", &self.replay_after_wal_id());
ds.field("wal_id_last_seen", &self.wal_id_last_seen());
ds.field("last_compacted_l0_sst_view_id", &self.last_compacted_l0_sst_view_id());
ds.field("ssts", &self.ssts());
ds.field("l0", &self.l0());
ds.field("compacted", &self.compacted());
ds.field("last_l0_clock_tick", &self.last_l0_clock_tick());
ds.field("checkpoints", &self.checkpoints());
ds.field("last_l0_seq", &self.last_l0_seq());
ds.field("recent_snapshot_min_seq", &self.recent_snapshot_min_seq());
ds.field("sequence_tracker", &self.sequence_tracker());
ds.finish()
}
}
pub enum WriterCheckpointOffset {}
#[derive(Copy, Clone, PartialEq)]
pub struct WriterCheckpoint<'a> {
pub _tab: flatbuffers::Table<'a>,
}
impl<'a> flatbuffers::Follow<'a> for WriterCheckpoint<'a> {
type Inner = WriterCheckpoint<'a>;
#[inline]
unsafe fn follow(buf: &'a [u8], loc: usize) -> Self::Inner {
Self { _tab: flatbuffers::Table::new(buf, loc) }
}
}
impl<'a> WriterCheckpoint<'a> {
pub const VT_EPOCH: flatbuffers::VOffsetT = 4;
#[inline]
pub unsafe fn init_from_table(table: flatbuffers::Table<'a>) -> Self {
WriterCheckpoint { _tab: table }
}
#[allow(unused_mut)]
pub fn create<'bldr: 'args, 'args: 'mut_bldr, 'mut_bldr, A: flatbuffers::Allocator + 'bldr>(
_fbb: &'mut_bldr mut flatbuffers::FlatBufferBuilder<'bldr, A>,
args: &'args WriterCheckpointArgs
) -> flatbuffers::WIPOffset<WriterCheckpoint<'bldr>> {
let mut builder = WriterCheckpointBuilder::new(_fbb);
builder.add_epoch(args.epoch);
builder.finish()
}
#[inline]
pub fn epoch(&self) -> u64 {
unsafe { self._tab.get::<u64>(WriterCheckpoint::VT_EPOCH, Some(0)).unwrap()}
}
}
impl flatbuffers::Verifiable for WriterCheckpoint<'_> {
#[inline]
fn run_verifier(
v: &mut flatbuffers::Verifier, pos: usize
) -> Result<(), flatbuffers::InvalidFlatbuffer> {
use self::flatbuffers::Verifiable;
v.visit_table(pos)?
.visit_field::<u64>("epoch", Self::VT_EPOCH, false)?
.finish();
Ok(())
}
}
pub struct WriterCheckpointArgs {
pub epoch: u64,
}
impl<'a> Default for WriterCheckpointArgs {
#[inline]
fn default() -> Self {
WriterCheckpointArgs {
epoch: 0,
}
}
}
pub struct WriterCheckpointBuilder<'a: 'b, 'b, A: flatbuffers::Allocator + 'a> {
fbb_: &'b mut flatbuffers::FlatBufferBuilder<'a, A>,
start_: flatbuffers::WIPOffset<flatbuffers::TableUnfinishedWIPOffset>,
}
impl<'a: 'b, 'b, A: flatbuffers::Allocator + 'a> WriterCheckpointBuilder<'a, 'b, A> {
#[inline]
pub fn add_epoch(&mut self, epoch: u64) {
self.fbb_.push_slot::<u64>(WriterCheckpoint::VT_EPOCH, epoch, 0);
}
#[inline]
pub fn new(_fbb: &'b mut flatbuffers::FlatBufferBuilder<'a, A>) -> WriterCheckpointBuilder<'a, 'b, A> {
let start = _fbb.start_table();
WriterCheckpointBuilder {
fbb_: _fbb,
start_: start,
}
}
#[inline]
pub fn finish(self) -> flatbuffers::WIPOffset<WriterCheckpoint<'a>> {
let o = self.fbb_.end_table(self.start_);
flatbuffers::WIPOffset::new(o.value())
}
}
impl core::fmt::Debug for WriterCheckpoint<'_> {
fn fmt(&self, f: &mut core::fmt::Formatter<'_>) -> core::fmt::Result {
let mut ds = f.debug_struct("WriterCheckpoint");
ds.field("epoch", &self.epoch());
ds.finish()
}
}
pub enum CheckpointOffset {}
#[derive(Copy, Clone, PartialEq)]
pub struct Checkpoint<'a> {
pub _tab: flatbuffers::Table<'a>,
}
impl<'a> flatbuffers::Follow<'a> for Checkpoint<'a> {
type Inner = Checkpoint<'a>;
#[inline]
unsafe fn follow(buf: &'a [u8], loc: usize) -> Self::Inner {
Self { _tab: flatbuffers::Table::new(buf, loc) }
}
}
impl<'a> Checkpoint<'a> {
pub const VT_ID: flatbuffers::VOffsetT = 4;
pub const VT_MANIFEST_ID: flatbuffers::VOffsetT = 6;
pub const VT_CHECKPOINT_EXPIRE_TIME_S: flatbuffers::VOffsetT = 8;
pub const VT_CHECKPOINT_CREATE_TIME_S: flatbuffers::VOffsetT = 10;
pub const VT_METADATA_TYPE: flatbuffers::VOffsetT = 12;
pub const VT_METADATA: flatbuffers::VOffsetT = 14;
pub const VT_NAME: flatbuffers::VOffsetT = 16;
#[inline]
pub unsafe fn init_from_table(table: flatbuffers::Table<'a>) -> Self {
Checkpoint { _tab: table }
}
#[allow(unused_mut)]
pub fn create<'bldr: 'args, 'args: 'mut_bldr, 'mut_bldr, A: flatbuffers::Allocator + 'bldr>(
_fbb: &'mut_bldr mut flatbuffers::FlatBufferBuilder<'bldr, A>,
args: &'args CheckpointArgs<'args>
) -> flatbuffers::WIPOffset<Checkpoint<'bldr>> {
let mut builder = CheckpointBuilder::new(_fbb);
builder.add_manifest_id(args.manifest_id);
if let Some(x) = args.name { builder.add_name(x); }
if let Some(x) = args.metadata { builder.add_metadata(x); }
builder.add_checkpoint_create_time_s(args.checkpoint_create_time_s);
builder.add_checkpoint_expire_time_s(args.checkpoint_expire_time_s);
if let Some(x) = args.id { builder.add_id(x); }
builder.add_metadata_type(args.metadata_type);
builder.finish()
}
#[inline]
pub fn id(&self) -> Uuid<'a> {
unsafe { self._tab.get::<flatbuffers::ForwardsUOffset<Uuid>>(Checkpoint::VT_ID, None).unwrap()}
}
#[inline]
pub fn manifest_id(&self) -> u64 {
unsafe { self._tab.get::<u64>(Checkpoint::VT_MANIFEST_ID, Some(0)).unwrap()}
}
#[inline]
pub fn checkpoint_expire_time_s(&self) -> u32 {
unsafe { self._tab.get::<u32>(Checkpoint::VT_CHECKPOINT_EXPIRE_TIME_S, Some(0)).unwrap()}
}
#[inline]
pub fn checkpoint_create_time_s(&self) -> u32 {
unsafe { self._tab.get::<u32>(Checkpoint::VT_CHECKPOINT_CREATE_TIME_S, Some(0)).unwrap()}
}
#[inline]
pub fn metadata_type(&self) -> CheckpointMetadata {
unsafe { self._tab.get::<CheckpointMetadata>(Checkpoint::VT_METADATA_TYPE, Some(CheckpointMetadata::NONE)).unwrap()}
}
#[inline]
pub fn metadata(&self) -> Option<flatbuffers::Table<'a>> {
unsafe { self._tab.get::<flatbuffers::ForwardsUOffset<flatbuffers::Table<'a>>>(Checkpoint::VT_METADATA, None)}
}
#[inline]
pub fn name(&self) -> Option<&'a str> {
unsafe { self._tab.get::<flatbuffers::ForwardsUOffset<&str>>(Checkpoint::VT_NAME, None)}
}
#[inline]
#[allow(non_snake_case)]
pub fn metadata_as_writer_checkpoint(&self) -> Option<WriterCheckpoint<'a>> {
if self.metadata_type() == CheckpointMetadata::WriterCheckpoint {
self.metadata().map(|t| {
unsafe { WriterCheckpoint::init_from_table(t) }
})
} else {
None
}
}
}
impl flatbuffers::Verifiable for Checkpoint<'_> {
#[inline]
fn run_verifier(
v: &mut flatbuffers::Verifier, pos: usize
) -> Result<(), flatbuffers::InvalidFlatbuffer> {
use self::flatbuffers::Verifiable;
v.visit_table(pos)?
.visit_field::<flatbuffers::ForwardsUOffset<Uuid>>("id", Self::VT_ID, true)?
.visit_field::<u64>("manifest_id", Self::VT_MANIFEST_ID, false)?
.visit_field::<u32>("checkpoint_expire_time_s", Self::VT_CHECKPOINT_EXPIRE_TIME_S, false)?
.visit_field::<u32>("checkpoint_create_time_s", Self::VT_CHECKPOINT_CREATE_TIME_S, false)?
.visit_union::<CheckpointMetadata, _>("metadata_type", Self::VT_METADATA_TYPE, "metadata", Self::VT_METADATA, false, |key, v, pos| {
match key {
CheckpointMetadata::WriterCheckpoint => v.verify_union_variant::<flatbuffers::ForwardsUOffset<WriterCheckpoint>>("CheckpointMetadata::WriterCheckpoint", pos),
_ => Ok(()),
}
})?
.visit_field::<flatbuffers::ForwardsUOffset<&str>>("name", Self::VT_NAME, false)?
.finish();
Ok(())
}
}
pub struct CheckpointArgs<'a> {
pub id: Option<flatbuffers::WIPOffset<Uuid<'a>>>,
pub manifest_id: u64,
pub checkpoint_expire_time_s: u32,
pub checkpoint_create_time_s: u32,
pub metadata_type: CheckpointMetadata,
pub metadata: Option<flatbuffers::WIPOffset<flatbuffers::UnionWIPOffset>>,
pub name: Option<flatbuffers::WIPOffset<&'a str>>,
}
impl<'a> Default for CheckpointArgs<'a> {
#[inline]
fn default() -> Self {
CheckpointArgs {
id: None, manifest_id: 0,
checkpoint_expire_time_s: 0,
checkpoint_create_time_s: 0,
metadata_type: CheckpointMetadata::NONE,
metadata: None,
name: None,
}
}
}
pub struct CheckpointBuilder<'a: 'b, 'b, A: flatbuffers::Allocator + 'a> {
fbb_: &'b mut flatbuffers::FlatBufferBuilder<'a, A>,
start_: flatbuffers::WIPOffset<flatbuffers::TableUnfinishedWIPOffset>,
}
impl<'a: 'b, 'b, A: flatbuffers::Allocator + 'a> CheckpointBuilder<'a, 'b, A> {
#[inline]
pub fn add_id(&mut self, id: flatbuffers::WIPOffset<Uuid<'b >>) {
self.fbb_.push_slot_always::<flatbuffers::WIPOffset<Uuid>>(Checkpoint::VT_ID, id);
}
#[inline]
pub fn add_manifest_id(&mut self, manifest_id: u64) {
self.fbb_.push_slot::<u64>(Checkpoint::VT_MANIFEST_ID, manifest_id, 0);
}
#[inline]
pub fn add_checkpoint_expire_time_s(&mut self, checkpoint_expire_time_s: u32) {
self.fbb_.push_slot::<u32>(Checkpoint::VT_CHECKPOINT_EXPIRE_TIME_S, checkpoint_expire_time_s, 0);
}
#[inline]
pub fn add_checkpoint_create_time_s(&mut self, checkpoint_create_time_s: u32) {
self.fbb_.push_slot::<u32>(Checkpoint::VT_CHECKPOINT_CREATE_TIME_S, checkpoint_create_time_s, 0);
}
#[inline]
pub fn add_metadata_type(&mut self, metadata_type: CheckpointMetadata) {
self.fbb_.push_slot::<CheckpointMetadata>(Checkpoint::VT_METADATA_TYPE, metadata_type, CheckpointMetadata::NONE);
}
#[inline]
pub fn add_metadata(&mut self, metadata: flatbuffers::WIPOffset<flatbuffers::UnionWIPOffset>) {
self.fbb_.push_slot_always::<flatbuffers::WIPOffset<_>>(Checkpoint::VT_METADATA, metadata);
}
#[inline]
pub fn add_name(&mut self, name: flatbuffers::WIPOffset<&'b str>) {
self.fbb_.push_slot_always::<flatbuffers::WIPOffset<_>>(Checkpoint::VT_NAME, name);
}
#[inline]
pub fn new(_fbb: &'b mut flatbuffers::FlatBufferBuilder<'a, A>) -> CheckpointBuilder<'a, 'b, A> {
let start = _fbb.start_table();
CheckpointBuilder {
fbb_: _fbb,
start_: start,
}
}
#[inline]
pub fn finish(self) -> flatbuffers::WIPOffset<Checkpoint<'a>> {
let o = self.fbb_.end_table(self.start_);
self.fbb_.required(o, Checkpoint::VT_ID,"id");
flatbuffers::WIPOffset::new(o.value())
}
}
impl core::fmt::Debug for Checkpoint<'_> {
fn fmt(&self, f: &mut core::fmt::Formatter<'_>) -> core::fmt::Result {
let mut ds = f.debug_struct("Checkpoint");
ds.field("id", &self.id());
ds.field("manifest_id", &self.manifest_id());
ds.field("checkpoint_expire_time_s", &self.checkpoint_expire_time_s());
ds.field("checkpoint_create_time_s", &self.checkpoint_create_time_s());
ds.field("metadata_type", &self.metadata_type());
match self.metadata_type() {
CheckpointMetadata::WriterCheckpoint => {
if let Some(x) = self.metadata_as_writer_checkpoint() {
ds.field("metadata", &x)
} else {
ds.field("metadata", &"InvalidFlatbuffer: Union discriminant does not match value.")
}
},
_ => {
let x: Option<()> = None;
ds.field("metadata", &x)
},
};
ds.field("name", &self.name());
ds.finish()
}
}