use crate::bitwidth::{align, BitWidth};
use bytes::{BufMut, BytesMut};
use smallvec::SmallVec;
mod value;
use crate::FlexBufferType;
use std::cmp::max;
use value::{find_vector_type, store_value, Value};
mod map;
mod push;
mod ser;
mod vector;
use map::sort_map_by_keys;
pub use map::MapBuilder;
pub use push::Pushable;
pub use ser::{Error, FlexbufferSerializer};
pub use vector::VectorBuilder;
macro_rules! push_slice {
($push_name: ident, $scalar: ty, $Val: ident, $new_vec: ident) => {
fn $push_name<T, S>(&mut self, xs: S)
where
T: Into<$scalar> + Copy,
S: AsRef<[T]>,
{
let mut value = Value::$new_vec(xs.as_ref().len());
let mut width = xs
.as_ref()
.iter()
.map(|x| BitWidth::from((*x).into()))
.max()
.unwrap_or_default();
if !value.is_fixed_length_vector() {
let length = Value::UInt(xs.as_ref().len() as u64);
width = std::cmp::max(width, length.width_or_child_width());
align(&mut self.buffer, width);
store_value(&mut self.buffer, length, width);
} else {
align(&mut self.buffer, width);
}
let address = self.buffer.len();
for &x in xs.as_ref().iter() {
store_value(&mut self.buffer, Value::$Val(x.into()), width);
}
value.set_address_or_panic(address);
value.set_child_width_or_panic(width);
self.values.push(value);
}
};
}
macro_rules! push_indirect {
($push_name: ident, $scalar: ty, $Direct: ident, $Indirect: ident) => {
fn $push_name<T: Into<$scalar>>(&mut self, x: T) {
let x = Value::$Direct(x.into());
let child_width = x.width_or_child_width();
let address = self.buffer.len();
store_value(&mut self.buffer, x, child_width);
self.values.push(Value::Reference {
address,
child_width,
fxb_type: FlexBufferType::$Indirect,
});
}
};
}
bitflags! {
pub struct BuilderOptions: u8 {
const SHARE_NONE = 0;
const SHARE_KEYS = 1;
const SHARE_STRINGS = 2;
const SHARE_KEYS_AND_STRINGS = 3;
const SHARE_KEY_VECTORS = 4;
const SHARE_ALL = 7;
}
}
impl Default for BuilderOptions {
fn default() -> Self {
Self::SHARE_KEYS
}
}
#[derive(Debug, Clone, Copy)]
struct CachedKey(usize);
#[derive(Debug, Clone)]
pub struct Builder {
buffer: BytesMut,
values: SmallVec<[Value; 8]>,
key_pool: Option<SmallVec<[CachedKey; 8]>>,
}
impl Default for Builder {
fn default() -> Self {
let opts = BuilderOptions::default();
Builder::new(&opts)
}
}
impl<'a> Builder {
#[must_use]
pub fn new(opts: &BuilderOptions) -> Self {
let key_pool = if opts.contains(BuilderOptions::SHARE_KEYS) {
Some(SmallVec::new())
} else {
None
};
Builder {
key_pool,
values: SmallVec::new(),
buffer: BytesMut::new(),
}
}
#[must_use]
pub fn view(&self) -> &[u8] {
&self.buffer
}
pub fn take_buffer(&mut self) -> BytesMut {
let mut b = BytesMut::new();
std::mem::swap(&mut self.buffer, &mut b);
b
}
pub fn reset(&mut self) {
self.buffer.clear();
self.values.clear();
if let Some(pool) = self.key_pool.as_mut() {
pool.clear();
}
}
fn push_key(&mut self, key: &str) {
debug_assert!(
key.bytes().all(|b| b != b'\0'),
"Keys must not have internal nulls."
);
let found = self.key_pool.as_ref().map(|pool| {
pool.binary_search_by(|&CachedKey(addr)| {
let old_key = map::get_key(&self.buffer, addr);
old_key.copied().cmp(key.bytes())
})
});
let address = if let Some(Ok(idx)) = found {
self.key_pool.as_ref().unwrap()[idx].0
} else {
let address = self.buffer.len();
self.buffer.extend_from_slice(key.as_bytes());
self.buffer.put_u8(b'\0');
address
};
if let Some(Err(idx)) = found {
let pool = self.key_pool.as_mut().unwrap();
pool.insert(idx, CachedKey(address));
}
self.values.push(Value::Key(address));
}
fn push_uint<T: Into<u64>>(&mut self, x: T) {
self.values.push(Value::UInt(x.into()));
}
fn push_int<T: Into<i64>>(&mut self, x: T) {
self.values.push(Value::Int(x.into()));
}
fn push_float<T: Into<f64>>(&mut self, x: T) {
self.values.push(Value::Float(x.into()));
}
fn push_null(&mut self) {
self.values.push(Value::Null);
}
fn push_bool(&mut self, x: bool) {
self.values.push(Value::Bool(x));
}
fn store_blob(&mut self, xs: &[u8]) -> Value {
let length = Value::UInt(xs.len() as u64);
let width = length.width_or_child_width();
align(&mut self.buffer, width);
store_value(&mut self.buffer, length, width);
let address = self.buffer.len();
self.buffer.extend_from_slice(xs);
Value::Reference {
fxb_type: FlexBufferType::Blob,
address,
child_width: width,
}
}
fn push_str(&mut self, x: &str) {
let mut string = self.store_blob(x.as_bytes());
self.buffer.put_u8(b'\0');
string.set_fxb_type_or_panic(FlexBufferType::String);
self.values.push(string);
}
fn push_blob(&mut self, x: &[u8]) {
let blob = self.store_blob(x);
self.values.push(blob);
}
fn push_bools(&mut self, xs: &[bool]) {
let length = Value::UInt(xs.len() as u64);
let width = length.width_or_child_width();
align(&mut self.buffer, width);
store_value(&mut self.buffer, length, width);
let address = self.buffer.len();
for &b in xs {
self.buffer.put_u8(u8::from(b));
self.buffer.resize(self.buffer.len() + width as usize, 0);
}
self.values.push(Value::Reference {
fxb_type: FlexBufferType::VectorBool,
address,
child_width: width,
});
}
push_slice!(push_uints, u64, UInt, new_uint_vector);
push_slice!(push_ints, i64, Int, new_int_vector);
push_slice!(push_floats, f64, Float, new_float_vector);
push_indirect!(push_indirect_int, i64, Int, IndirectInt);
push_indirect!(push_indirect_uint, u64, UInt, IndirectUInt);
push_indirect!(push_indirect_float, f64, Float, IndirectFloat);
pub fn start_vector(&'a mut self) -> VectorBuilder<'a> {
self.reset();
VectorBuilder {
builder: self,
start: None,
}
}
pub fn start_map(&'a mut self) -> MapBuilder<'a> {
self.reset();
MapBuilder {
builder: self,
start: None,
}
}
pub fn build_singleton<P: Pushable>(&mut self, p: P) {
self.reset();
p.push_to_builder(self);
let root = self.values.pop().unwrap();
store_root(&mut self.buffer, root);
}
fn push<P: Pushable>(&mut self, p: P) {
p.push_to_builder(self);
}
fn end_map_or_vector(&mut self, is_map: bool, previous_end: Option<usize>) {
let split = previous_end.unwrap_or(0);
let value = if is_map {
let key_vals = &mut self.values[split..];
sort_map_by_keys(key_vals, &self.buffer);
let key_vector = store_vector(&mut self.buffer, key_vals, &StoreOption::MapKeys);
store_vector(&mut self.buffer, key_vals, &StoreOption::Map(key_vector))
} else {
store_vector(
&mut self.buffer,
&self.values[split..],
&StoreOption::Vector,
)
};
self.values.truncate(split);
if previous_end.is_some() {
self.values.push(value);
} else {
store_root(&mut self.buffer, value);
}
}
}
pub fn singleton<P: Pushable>(p: P) -> BytesMut {
let mut b = Builder::default();
b.build_singleton(p);
let Builder { buffer, .. } = b;
buffer
}
#[allow(clippy::cast_possible_truncation)]
fn store_root(buffer: &mut BytesMut, root: Value) {
let root_width = root.width_in_vector(buffer.len(), 0);
align(buffer, root_width);
store_value(buffer, root, root_width);
buffer.put_u8(root.packed_type(root_width));
buffer.put_u8(root_width.n_bytes() as u8);
}
pub enum StoreOption {
Vector,
Map(Value),
MapKeys,
}
#[allow(clippy::too_many_lines)]
#[inline]
pub fn store_vector(buffer: &mut BytesMut, values: &[Value], opt: &StoreOption) -> Value {
match opt {
StoreOption::Vector => {
let mut result = find_vector_type(values.iter());
let length_slot = if result.is_fixed_length_vector() {
None
} else {
Some(Value::UInt(values.len() as u64))
};
let mut width = BitWidth::W8;
if let Some(l) = length_slot {
width = max(width, l.width_or_child_width());
}
let prefix_length = result.prefix_length();
for (i, &val) in values.iter().enumerate() {
width = max(width, val.width_in_vector(buffer.len(), i + prefix_length));
}
align(buffer, width);
if let Some(len) = length_slot {
store_value(buffer, len, width);
}
let address = buffer.len();
for &v in values {
store_value(buffer, v, width);
}
if result.is_typed_vector_or_map() {
for v in values {
buffer.put_u8(v.packed_type(width));
}
}
result.set_address_or_panic(address);
result.set_child_width_or_panic(width);
result
}
StoreOption::MapKeys => {
let mut result = find_vector_type(values.iter());
let length_slot = if result.is_fixed_length_vector() {
None
} else {
Some(Value::UInt(values.iter().step_by(2).count() as u64))
};
let mut width = BitWidth::W8;
if let Some(l) = length_slot {
width = max(width, l.width_or_child_width());
}
let prefix_length = result.prefix_length();
for (i, &val) in values.iter().step_by(2).enumerate() {
width = max(width, val.width_in_vector(buffer.len(), i + prefix_length));
}
align(buffer, width);
if let Some(len) = length_slot {
store_value(buffer, len, width);
}
let address = buffer.len();
for &v in values.iter().step_by(2) {
store_value(buffer, v, width);
}
if result.is_typed_vector_or_map() {
for v in values.iter().step_by(2) {
buffer.put_u8(v.packed_type(width));
}
}
result.set_address_or_panic(address);
result.set_child_width_or_panic(width);
result
}
StoreOption::Map(keys) => {
let mut result = Value::new_map();
let length_slot = if result.is_fixed_length_vector() {
None
} else {
let length = values.iter().skip(1).step_by(2).count();
Some(Value::UInt(length as u64))
};
let mut width = BitWidth::W8;
width = max(width, keys.width_in_vector(buffer.len(), 0));
if let Some(l) = length_slot {
width = max(width, l.width_or_child_width());
}
let prefix_length = result.prefix_length();
for (i, &val) in values.iter().skip(1).step_by(2).enumerate() {
width = max(width, val.width_in_vector(buffer.len(), i + prefix_length));
}
align(buffer, width);
let key_width = Value::UInt(keys.width_or_child_width().n_bytes() as u64);
store_value(buffer, *keys, width);
store_value(buffer, key_width, width);
if let Some(len) = length_slot {
store_value(buffer, len, width);
}
let address = buffer.len();
for &v in values.iter().skip(1).step_by(2) {
store_value(buffer, v, width);
}
if result.is_typed_vector_or_map() {
for v in values.iter().skip(1).step_by(2) {
buffer.put_u8(v.packed_type(width));
}
}
result.set_address_or_panic(address);
result.set_child_width_or_panic(width);
result
}
}
}