#![allow(dead_code)]
#![allow(unused_imports)]
use crate::ipc::gen::Schema::*;
use crate::ipc::gen::Tensor::*;
#[allow(non_camel_case_types)]
#[repr(u8)]
#[derive(Clone, Copy, PartialEq, Debug)]
pub enum SparseTensorIndex {
NONE = 0,
SparseTensorIndexCOO = 1,
SparseMatrixIndexCSR = 2,
}
const ENUM_MIN_SPARSE_TENSOR_INDEX: u8 = 0;
const ENUM_MAX_SPARSE_TENSOR_INDEX: u8 = 2;
impl<'a> flatbuffers::Follow<'a> for SparseTensorIndex {
type Inner = Self;
#[inline]
fn follow(buf: &'a [u8], loc: usize) -> Self::Inner {
flatbuffers::read_scalar_at::<Self>(buf, loc)
}
}
impl flatbuffers::EndianScalar for SparseTensorIndex {
#[inline]
fn to_little_endian(self) -> Self {
let n = u8::to_le(self as u8);
let p = &n as *const u8 as *const SparseTensorIndex;
unsafe { *p }
}
#[inline]
fn from_little_endian(self) -> Self {
let n = u8::from_le(self as u8);
let p = &n as *const u8 as *const SparseTensorIndex;
unsafe { *p }
}
}
impl flatbuffers::Push for SparseTensorIndex {
type Output = SparseTensorIndex;
#[inline]
fn push(&self, dst: &mut [u8], _rest: &[u8]) {
flatbuffers::emplace_scalar::<SparseTensorIndex>(dst, *self);
}
}
#[allow(non_camel_case_types)]
const ENUM_VALUES_SPARSE_TENSOR_INDEX: [SparseTensorIndex; 3] = [
SparseTensorIndex::NONE,
SparseTensorIndex::SparseTensorIndexCOO,
SparseTensorIndex::SparseMatrixIndexCSR,
];
#[allow(non_camel_case_types)]
const ENUM_NAMES_SPARSE_TENSOR_INDEX: [&'static str; 3] =
["NONE", "SparseTensorIndexCOO", "SparseMatrixIndexCSR"];
pub fn enum_name_sparse_tensor_index(e: SparseTensorIndex) -> &'static str {
let index: usize = e as usize;
ENUM_NAMES_SPARSE_TENSOR_INDEX[index]
}
pub struct SparseTensorIndexUnionTableOffset {}
pub enum SparseTensorIndexCOOOffset {}
#[derive(Copy, Clone, Debug, PartialEq)]
pub struct SparseTensorIndexCOO<'a> {
pub _tab: flatbuffers::Table<'a>,
}
impl<'a> flatbuffers::Follow<'a> for SparseTensorIndexCOO<'a> {
type Inner = SparseTensorIndexCOO<'a>;
#[inline]
fn follow(buf: &'a [u8], loc: usize) -> Self::Inner {
Self {
_tab: flatbuffers::Table { buf: buf, loc: loc },
}
}
}
impl<'a> SparseTensorIndexCOO<'a> {
#[inline]
pub fn init_from_table(table: flatbuffers::Table<'a>) -> Self {
SparseTensorIndexCOO { _tab: table }
}
#[allow(unused_mut)]
pub fn create<'bldr: 'args, 'args: 'mut_bldr, 'mut_bldr>(
_fbb: &'mut_bldr mut flatbuffers::FlatBufferBuilder<'bldr>,
args: &'args SparseTensorIndexCOOArgs<'args>,
) -> flatbuffers::WIPOffset<SparseTensorIndexCOO<'bldr>> {
let mut builder = SparseTensorIndexCOOBuilder::new(_fbb);
if let Some(x) = args.indicesBuffer {
builder.add_indicesBuffer(x);
}
builder.finish()
}
pub const VT_INDICESBUFFER: flatbuffers::VOffsetT = 4;
#[inline]
pub fn indicesBuffer(&self) -> Option<&'a Buffer> {
self._tab
.get::<Buffer>(SparseTensorIndexCOO::VT_INDICESBUFFER, None)
}
}
pub struct SparseTensorIndexCOOArgs<'a> {
pub indicesBuffer: Option<&'a Buffer>,
}
impl<'a> Default for SparseTensorIndexCOOArgs<'a> {
#[inline]
fn default() -> Self {
SparseTensorIndexCOOArgs {
indicesBuffer: None,
}
}
}
pub struct SparseTensorIndexCOOBuilder<'a: 'b, 'b> {
fbb_: &'b mut flatbuffers::FlatBufferBuilder<'a>,
start_: flatbuffers::WIPOffset<flatbuffers::TableUnfinishedWIPOffset>,
}
impl<'a: 'b, 'b> SparseTensorIndexCOOBuilder<'a, 'b> {
#[inline]
pub fn add_indicesBuffer(&mut self, indicesBuffer: &'b Buffer) {
self.fbb_.push_slot_always::<&Buffer>(
SparseTensorIndexCOO::VT_INDICESBUFFER,
indicesBuffer,
);
}
#[inline]
pub fn new(
_fbb: &'b mut flatbuffers::FlatBufferBuilder<'a>,
) -> SparseTensorIndexCOOBuilder<'a, 'b> {
let start = _fbb.start_table();
SparseTensorIndexCOOBuilder {
fbb_: _fbb,
start_: start,
}
}
#[inline]
pub fn finish(self) -> flatbuffers::WIPOffset<SparseTensorIndexCOO<'a>> {
let o = self.fbb_.end_table(self.start_);
flatbuffers::WIPOffset::new(o.value())
}
}
pub enum SparseMatrixIndexCSROffset {}
#[derive(Copy, Clone, Debug, PartialEq)]
pub struct SparseMatrixIndexCSR<'a> {
pub _tab: flatbuffers::Table<'a>,
}
impl<'a> flatbuffers::Follow<'a> for SparseMatrixIndexCSR<'a> {
type Inner = SparseMatrixIndexCSR<'a>;
#[inline]
fn follow(buf: &'a [u8], loc: usize) -> Self::Inner {
Self {
_tab: flatbuffers::Table { buf: buf, loc: loc },
}
}
}
impl<'a> SparseMatrixIndexCSR<'a> {
#[inline]
pub fn init_from_table(table: flatbuffers::Table<'a>) -> Self {
SparseMatrixIndexCSR { _tab: table }
}
#[allow(unused_mut)]
pub fn create<'bldr: 'args, 'args: 'mut_bldr, 'mut_bldr>(
_fbb: &'mut_bldr mut flatbuffers::FlatBufferBuilder<'bldr>,
args: &'args SparseMatrixIndexCSRArgs<'args>,
) -> flatbuffers::WIPOffset<SparseMatrixIndexCSR<'bldr>> {
let mut builder = SparseMatrixIndexCSRBuilder::new(_fbb);
if let Some(x) = args.indicesBuffer {
builder.add_indicesBuffer(x);
}
if let Some(x) = args.indptrBuffer {
builder.add_indptrBuffer(x);
}
builder.finish()
}
pub const VT_INDPTRBUFFER: flatbuffers::VOffsetT = 4;
pub const VT_INDICESBUFFER: flatbuffers::VOffsetT = 6;
#[inline]
pub fn indptrBuffer(&self) -> Option<&'a Buffer> {
self._tab
.get::<Buffer>(SparseMatrixIndexCSR::VT_INDPTRBUFFER, None)
}
#[inline]
pub fn indicesBuffer(&self) -> Option<&'a Buffer> {
self._tab
.get::<Buffer>(SparseMatrixIndexCSR::VT_INDICESBUFFER, None)
}
}
pub struct SparseMatrixIndexCSRArgs<'a> {
pub indptrBuffer: Option<&'a Buffer>,
pub indicesBuffer: Option<&'a Buffer>,
}
impl<'a> Default for SparseMatrixIndexCSRArgs<'a> {
#[inline]
fn default() -> Self {
SparseMatrixIndexCSRArgs {
indptrBuffer: None,
indicesBuffer: None,
}
}
}
pub struct SparseMatrixIndexCSRBuilder<'a: 'b, 'b> {
fbb_: &'b mut flatbuffers::FlatBufferBuilder<'a>,
start_: flatbuffers::WIPOffset<flatbuffers::TableUnfinishedWIPOffset>,
}
impl<'a: 'b, 'b> SparseMatrixIndexCSRBuilder<'a, 'b> {
#[inline]
pub fn add_indptrBuffer(&mut self, indptrBuffer: &'b Buffer) {
self.fbb_.push_slot_always::<&Buffer>(
SparseMatrixIndexCSR::VT_INDPTRBUFFER,
indptrBuffer,
);
}
#[inline]
pub fn add_indicesBuffer(&mut self, indicesBuffer: &'b Buffer) {
self.fbb_.push_slot_always::<&Buffer>(
SparseMatrixIndexCSR::VT_INDICESBUFFER,
indicesBuffer,
);
}
#[inline]
pub fn new(
_fbb: &'b mut flatbuffers::FlatBufferBuilder<'a>,
) -> SparseMatrixIndexCSRBuilder<'a, 'b> {
let start = _fbb.start_table();
SparseMatrixIndexCSRBuilder {
fbb_: _fbb,
start_: start,
}
}
#[inline]
pub fn finish(self) -> flatbuffers::WIPOffset<SparseMatrixIndexCSR<'a>> {
let o = self.fbb_.end_table(self.start_);
flatbuffers::WIPOffset::new(o.value())
}
}
pub enum SparseTensorOffset {}
#[derive(Copy, Clone, Debug, PartialEq)]
pub struct SparseTensor<'a> {
pub _tab: flatbuffers::Table<'a>,
}
impl<'a> flatbuffers::Follow<'a> for SparseTensor<'a> {
type Inner = SparseTensor<'a>;
#[inline]
fn follow(buf: &'a [u8], loc: usize) -> Self::Inner {
Self {
_tab: flatbuffers::Table { buf: buf, loc: loc },
}
}
}
impl<'a> SparseTensor<'a> {
#[inline]
pub fn init_from_table(table: flatbuffers::Table<'a>) -> Self {
SparseTensor { _tab: table }
}
#[allow(unused_mut)]
pub fn create<'bldr: 'args, 'args: 'mut_bldr, 'mut_bldr>(
_fbb: &'mut_bldr mut flatbuffers::FlatBufferBuilder<'bldr>,
args: &'args SparseTensorArgs<'args>,
) -> flatbuffers::WIPOffset<SparseTensor<'bldr>> {
let mut builder = SparseTensorBuilder::new(_fbb);
builder.add_non_zero_length(args.non_zero_length);
if let Some(x) = args.data {
builder.add_data(x);
}
if let Some(x) = args.sparseIndex {
builder.add_sparseIndex(x);
}
if let Some(x) = args.shape {
builder.add_shape(x);
}
if let Some(x) = args.type_ {
builder.add_type_(x);
}
builder.add_sparseIndex_type(args.sparseIndex_type);
builder.add_type_type(args.type_type);
builder.finish()
}
pub const VT_TYPE_TYPE: flatbuffers::VOffsetT = 4;
pub const VT_TYPE_: flatbuffers::VOffsetT = 6;
pub const VT_SHAPE: flatbuffers::VOffsetT = 8;
pub const VT_NON_ZERO_LENGTH: flatbuffers::VOffsetT = 10;
pub const VT_SPARSEINDEX_TYPE: flatbuffers::VOffsetT = 12;
pub const VT_SPARSEINDEX: flatbuffers::VOffsetT = 14;
pub const VT_DATA: flatbuffers::VOffsetT = 16;
#[inline]
pub fn type_type(&self) -> Type {
self._tab
.get::<Type>(SparseTensor::VT_TYPE_TYPE, Some(Type::NONE))
.unwrap()
}
#[inline]
pub fn type_(&self) -> Option<flatbuffers::Table<'a>> {
self._tab
.get::<flatbuffers::ForwardsUOffset<flatbuffers::Table<'a>>>(
SparseTensor::VT_TYPE_,
None,
)
}
#[inline]
pub fn shape(
&self,
) -> Option<flatbuffers::Vector<flatbuffers::ForwardsUOffset<TensorDim<'a>>>> {
self._tab.get::<flatbuffers::ForwardsUOffset<
flatbuffers::Vector<flatbuffers::ForwardsUOffset<TensorDim<'a>>>,
>>(SparseTensor::VT_SHAPE, None)
}
#[inline]
pub fn non_zero_length(&self) -> i64 {
self._tab
.get::<i64>(SparseTensor::VT_NON_ZERO_LENGTH, Some(0))
.unwrap()
}
#[inline]
pub fn sparseIndex_type(&self) -> SparseTensorIndex {
self._tab
.get::<SparseTensorIndex>(
SparseTensor::VT_SPARSEINDEX_TYPE,
Some(SparseTensorIndex::NONE),
)
.unwrap()
}
#[inline]
pub fn sparseIndex(&self) -> Option<flatbuffers::Table<'a>> {
self._tab
.get::<flatbuffers::ForwardsUOffset<flatbuffers::Table<'a>>>(
SparseTensor::VT_SPARSEINDEX,
None,
)
}
#[inline]
pub fn data(&self) -> Option<&'a Buffer> {
self._tab.get::<Buffer>(SparseTensor::VT_DATA, None)
}
#[inline]
#[allow(non_snake_case)]
pub fn type__as_null(&'a self) -> Option<Null> {
if self.type_type() == Type::Null {
self.type_().map(|u| Null::init_from_table(u))
} else {
None
}
}
#[inline]
#[allow(non_snake_case)]
pub fn type__as_int(&'a self) -> Option<Int> {
if self.type_type() == Type::Int {
self.type_().map(|u| Int::init_from_table(u))
} else {
None
}
}
#[inline]
#[allow(non_snake_case)]
pub fn type__as_floating_point(&'a self) -> Option<FloatingPoint> {
if self.type_type() == Type::FloatingPoint {
self.type_().map(|u| FloatingPoint::init_from_table(u))
} else {
None
}
}
#[inline]
#[allow(non_snake_case)]
pub fn type__as_binary(&'a self) -> Option<Binary> {
if self.type_type() == Type::Binary {
self.type_().map(|u| Binary::init_from_table(u))
} else {
None
}
}
#[inline]
#[allow(non_snake_case)]
pub fn type__as_utf_8(&'a self) -> Option<Utf8> {
if self.type_type() == Type::Utf8 {
self.type_().map(|u| Utf8::init_from_table(u))
} else {
None
}
}
#[inline]
#[allow(non_snake_case)]
pub fn type__as_bool(&'a self) -> Option<Bool> {
if self.type_type() == Type::Bool {
self.type_().map(|u| Bool::init_from_table(u))
} else {
None
}
}
#[inline]
#[allow(non_snake_case)]
pub fn type__as_decimal(&'a self) -> Option<Decimal> {
if self.type_type() == Type::Decimal {
self.type_().map(|u| Decimal::init_from_table(u))
} else {
None
}
}
#[inline]
#[allow(non_snake_case)]
pub fn type__as_date(&'a self) -> Option<Date> {
if self.type_type() == Type::Date {
self.type_().map(|u| Date::init_from_table(u))
} else {
None
}
}
#[inline]
#[allow(non_snake_case)]
pub fn type__as_time(&'a self) -> Option<Time> {
if self.type_type() == Type::Time {
self.type_().map(|u| Time::init_from_table(u))
} else {
None
}
}
#[inline]
#[allow(non_snake_case)]
pub fn type__as_timestamp(&'a self) -> Option<Timestamp> {
if self.type_type() == Type::Timestamp {
self.type_().map(|u| Timestamp::init_from_table(u))
} else {
None
}
}
#[inline]
#[allow(non_snake_case)]
pub fn type__as_interval(&'a self) -> Option<Interval> {
if self.type_type() == Type::Interval {
self.type_().map(|u| Interval::init_from_table(u))
} else {
None
}
}
#[inline]
#[allow(non_snake_case)]
pub fn type__as_list(&'a self) -> Option<List> {
if self.type_type() == Type::List {
self.type_().map(|u| List::init_from_table(u))
} else {
None
}
}
#[inline]
#[allow(non_snake_case)]
pub fn type__as_struct_(&'a self) -> Option<Struct_> {
if self.type_type() == Type::Struct_ {
self.type_().map(|u| Struct_::init_from_table(u))
} else {
None
}
}
#[inline]
#[allow(non_snake_case)]
pub fn type__as_union(&'a self) -> Option<Union> {
if self.type_type() == Type::Union {
self.type_().map(|u| Union::init_from_table(u))
} else {
None
}
}
#[inline]
#[allow(non_snake_case)]
pub fn type__as_fixed_size_binary(&'a self) -> Option<FixedSizeBinary> {
if self.type_type() == Type::FixedSizeBinary {
self.type_().map(|u| FixedSizeBinary::init_from_table(u))
} else {
None
}
}
#[inline]
#[allow(non_snake_case)]
pub fn type__as_fixed_size_list(&'a self) -> Option<FixedSizeList> {
if self.type_type() == Type::FixedSizeList {
self.type_().map(|u| FixedSizeList::init_from_table(u))
} else {
None
}
}
#[inline]
#[allow(non_snake_case)]
pub fn type__as_map(&'a self) -> Option<Map> {
if self.type_type() == Type::Map {
self.type_().map(|u| Map::init_from_table(u))
} else {
None
}
}
#[inline]
#[allow(non_snake_case)]
pub fn sparseIndex_as_sparse_tensor_index_coo(
&'a self,
) -> Option<SparseTensorIndexCOO> {
if self.sparseIndex_type() == SparseTensorIndex::SparseTensorIndexCOO {
self.sparseIndex()
.map(|u| SparseTensorIndexCOO::init_from_table(u))
} else {
None
}
}
#[inline]
#[allow(non_snake_case)]
pub fn sparseIndex_as_sparse_matrix_index_csr(
&'a self,
) -> Option<SparseMatrixIndexCSR> {
if self.sparseIndex_type() == SparseTensorIndex::SparseMatrixIndexCSR {
self.sparseIndex()
.map(|u| SparseMatrixIndexCSR::init_from_table(u))
} else {
None
}
}
}
pub struct SparseTensorArgs<'a> {
pub type_type: Type,
pub type_: Option<flatbuffers::WIPOffset<flatbuffers::UnionWIPOffset>>,
pub shape: Option<
flatbuffers::WIPOffset<
flatbuffers::Vector<'a, flatbuffers::ForwardsUOffset<TensorDim<'a>>>,
>,
>,
pub non_zero_length: i64,
pub sparseIndex_type: SparseTensorIndex,
pub sparseIndex: Option<flatbuffers::WIPOffset<flatbuffers::UnionWIPOffset>>,
pub data: Option<&'a Buffer>,
}
impl<'a> Default for SparseTensorArgs<'a> {
#[inline]
fn default() -> Self {
SparseTensorArgs {
type_type: Type::NONE,
type_: None,
shape: None,
non_zero_length: 0,
sparseIndex_type: SparseTensorIndex::NONE,
sparseIndex: None,
data: None,
}
}
}
pub struct SparseTensorBuilder<'a: 'b, 'b> {
fbb_: &'b mut flatbuffers::FlatBufferBuilder<'a>,
start_: flatbuffers::WIPOffset<flatbuffers::TableUnfinishedWIPOffset>,
}
impl<'a: 'b, 'b> SparseTensorBuilder<'a, 'b> {
#[inline]
pub fn add_type_type(&mut self, type_type: Type) {
self.fbb_
.push_slot::<Type>(SparseTensor::VT_TYPE_TYPE, type_type, Type::NONE);
}
#[inline]
pub fn add_type_(
&mut self,
type_: flatbuffers::WIPOffset<flatbuffers::UnionWIPOffset>,
) {
self.fbb_
.push_slot_always::<flatbuffers::WIPOffset<_>>(SparseTensor::VT_TYPE_, type_);
}
#[inline]
pub fn add_shape(
&mut self,
shape: flatbuffers::WIPOffset<
flatbuffers::Vector<'b, flatbuffers::ForwardsUOffset<TensorDim<'b>>>,
>,
) {
self.fbb_
.push_slot_always::<flatbuffers::WIPOffset<_>>(SparseTensor::VT_SHAPE, shape);
}
#[inline]
pub fn add_non_zero_length(&mut self, non_zero_length: i64) {
self.fbb_
.push_slot::<i64>(SparseTensor::VT_NON_ZERO_LENGTH, non_zero_length, 0);
}
#[inline]
pub fn add_sparseIndex_type(&mut self, sparseIndex_type: SparseTensorIndex) {
self.fbb_.push_slot::<SparseTensorIndex>(
SparseTensor::VT_SPARSEINDEX_TYPE,
sparseIndex_type,
SparseTensorIndex::NONE,
);
}
#[inline]
pub fn add_sparseIndex(
&mut self,
sparseIndex: flatbuffers::WIPOffset<flatbuffers::UnionWIPOffset>,
) {
self.fbb_.push_slot_always::<flatbuffers::WIPOffset<_>>(
SparseTensor::VT_SPARSEINDEX,
sparseIndex,
);
}
#[inline]
pub fn add_data(&mut self, data: &'b Buffer) {
self.fbb_
.push_slot_always::<&Buffer>(SparseTensor::VT_DATA, data);
}
#[inline]
pub fn new(
_fbb: &'b mut flatbuffers::FlatBufferBuilder<'a>,
) -> SparseTensorBuilder<'a, 'b> {
let start = _fbb.start_table();
SparseTensorBuilder {
fbb_: _fbb,
start_: start,
}
}
#[inline]
pub fn finish(self) -> flatbuffers::WIPOffset<SparseTensor<'a>> {
let o = self.fbb_.end_table(self.start_);
flatbuffers::WIPOffset::new(o.value())
}
}
#[inline]
pub fn get_root_as_sparse_tensor<'a>(buf: &'a [u8]) -> SparseTensor<'a> {
flatbuffers::get_root::<SparseTensor<'a>>(buf)
}
#[inline]
pub fn get_size_prefixed_root_as_sparse_tensor<'a>(buf: &'a [u8]) -> SparseTensor<'a> {
flatbuffers::get_size_prefixed_root::<SparseTensor<'a>>(buf)
}
#[inline]
pub fn finish_sparse_tensor_buffer<'a, 'b>(
fbb: &'b mut flatbuffers::FlatBufferBuilder<'a>,
root: flatbuffers::WIPOffset<SparseTensor<'a>>,
) {
fbb.finish(root, None);
}
#[inline]
pub fn finish_size_prefixed_sparse_tensor_buffer<'a, 'b>(
fbb: &'b mut flatbuffers::FlatBufferBuilder<'a>,
root: flatbuffers::WIPOffset<SparseTensor<'a>>,
) {
fbb.finish_size_prefixed(root, None);
}