dharitri_sc/types/managed/basic/
managed_buffer.rs1use crate::{
2 abi::{TypeAbi, TypeAbiFrom, TypeName},
3 api::{
4 use_raw_handle, ErrorApiImpl, HandleConstraints, InvalidSliceError, ManagedBufferApiImpl,
5 ManagedTypeApi, RawHandle, StaticVarApiImpl,
6 },
7 codec::{
8 DecodeErrorHandler, Empty, EncodeErrorHandler, NestedDecode, NestedDecodeInput,
9 NestedEncode, NestedEncodeOutput, TopDecode, TopDecodeInput, TopEncode, TopEncodeOutput,
10 TryStaticCast,
11 },
12 formatter::{
13 hex_util::encode_bytes_as_hex, FormatBuffer, FormatByteReceiver, SCBinary, SCDisplay,
14 SCLowerHex,
15 },
16 types::{
17 heap::BoxedBytes, ManagedBufferCachedBuilder, ManagedRef, ManagedRefMut, ManagedType,
18 StaticBufferRef,
19 },
20};
21
22#[repr(transparent)]
24pub struct ManagedBuffer<M: ManagedTypeApi> {
25 pub(crate) handle: M::ManagedBufferHandle,
26}
27
28impl<M: ManagedTypeApi> ManagedType<M> for ManagedBuffer<M> {
29 type OwnHandle = M::ManagedBufferHandle;
30
31 #[inline]
32 unsafe fn from_handle(handle: M::ManagedBufferHandle) -> Self {
33 ManagedBuffer { handle }
34 }
35
36 fn get_handle(&self) -> M::ManagedBufferHandle {
37 self.handle.clone()
38 }
39
40 unsafe fn forget_into_handle(self) -> Self::OwnHandle {
41 unsafe {
42 let handle = core::ptr::read(&self.handle);
43 core::mem::forget(self);
44 handle
45 }
46 }
47
48 fn transmute_from_handle_ref(handle_ref: &M::ManagedBufferHandle) -> &Self {
49 unsafe { core::mem::transmute(handle_ref) }
50 }
51
52 fn transmute_from_handle_ref_mut(handle_ref: &mut M::ManagedBufferHandle) -> &mut Self {
53 unsafe { core::mem::transmute(handle_ref) }
54 }
55}
56
57impl<M: ManagedTypeApi> ManagedBuffer<M> {
58 #[inline]
59 pub fn new() -> Self {
60 Self::new_from_bytes(&[])
61 }
62
63 #[inline]
64 pub fn new_from_bytes(bytes: &[u8]) -> Self {
65 unsafe {
66 let result = Self::new_uninit();
67 M::managed_type_impl().mb_overwrite(result.get_handle(), bytes);
68 result
69 }
70 }
71
72 #[inline]
73 pub fn new_random(nr_bytes: usize) -> Self {
74 unsafe {
75 let result = Self::new_uninit();
76 M::managed_type_impl().mb_set_random(result.get_handle(), nr_bytes);
77 result
78 }
79 }
80
81 pub unsafe fn new_uninit() -> Self {
87 let new_handle: M::ManagedBufferHandle =
88 use_raw_handle(M::static_var_api_impl().next_handle());
89 ManagedBuffer::from_handle(new_handle)
90 }
91
92 pub unsafe fn temp_const_ref(
98 raw_handle: RawHandle,
99 ) -> ManagedRef<'static, M, ManagedBuffer<M>> {
100 ManagedRef::wrap_handle(use_raw_handle(raw_handle))
101 }
102
103 pub unsafe fn temp_const_ref_mut(
109 raw_handle: RawHandle,
110 ) -> ManagedRefMut<'static, M, ManagedBuffer<M>> {
111 ManagedRefMut::wrap_handle(use_raw_handle(raw_handle))
112 }
113
114 fn load_static_cache(&self) -> StaticBufferRef<M>
115 where
116 M: ManagedTypeApi,
117 {
118 StaticBufferRef::try_new_from_copy_bytes(self.len(), |dest_slice| {
119 let _ = self.load_slice(0, dest_slice);
120 })
121 .unwrap_or_else(|| {
122 M::error_api_impl().signal_error(b"static cache too small or already in use")
123 })
124 }
125
126 pub fn with_buffer_contents<R, F>(&self, f: F) -> R
127 where
128 M: ManagedTypeApi,
129 F: FnOnce(&[u8]) -> R,
130 {
131 let static_cache = self.load_static_cache();
132 static_cache.with_buffer_contents(f)
133 }
134
135 pub fn with_buffer_contents_mut<F>(&mut self, f: F)
136 where
137 M: ManagedTypeApi,
138 F: FnOnce(&mut [u8]) -> &[u8],
139 {
140 let static_cache = self.load_static_cache();
141 static_cache.with_buffer_contents_mut(|buffer| {
142 let result = f(buffer);
143 self.overwrite(result);
144 });
145 }
146}
147
148impl<M> From<&[u8]> for ManagedBuffer<M>
149where
150 M: ManagedTypeApi,
151{
152 #[inline]
153 fn from(bytes: &[u8]) -> Self {
154 Self::new_from_bytes(bytes)
155 }
156}
157
158impl<M> From<&str> for ManagedBuffer<M>
159where
160 M: ManagedTypeApi,
161{
162 #[inline]
163 fn from(s: &str) -> Self {
164 Self::new_from_bytes(s.as_bytes())
165 }
166}
167
168impl<M> From<BoxedBytes> for ManagedBuffer<M>
169where
170 M: ManagedTypeApi,
171{
172 #[inline]
173 fn from(bytes: BoxedBytes) -> Self {
174 Self::new_from_bytes(bytes.as_slice())
175 }
176}
177
178impl<M> From<Empty> for ManagedBuffer<M>
179where
180 M: ManagedTypeApi,
181{
182 #[inline]
183 fn from(_: Empty) -> Self {
184 Self::new()
185 }
186}
187
188impl<M, const N: usize> From<&[u8; N]> for ManagedBuffer<M>
190where
191 M: ManagedTypeApi,
192{
193 #[inline]
194 fn from(bytes: &[u8; N]) -> Self {
195 Self::new_from_bytes(bytes)
196 }
197}
198
199impl<M> From<crate::types::heap::Vec<u8>> for ManagedBuffer<M>
200where
201 M: ManagedTypeApi,
202{
203 #[inline]
204 fn from(bytes: crate::types::heap::Vec<u8>) -> Self {
205 Self::new_from_bytes(bytes.as_slice())
206 }
207}
208
209impl<M> From<crate::types::heap::String> for ManagedBuffer<M>
210where
211 M: ManagedTypeApi,
212{
213 #[inline]
214 fn from(s: crate::types::heap::String) -> Self {
215 Self::new_from_bytes(s.as_bytes())
216 }
217}
218
219impl<M> From<&crate::types::heap::String> for ManagedBuffer<M>
220where
221 M: ManagedTypeApi,
222{
223 #[inline]
224 fn from(s: &crate::types::heap::String) -> Self {
225 Self::new_from_bytes(s.as_bytes())
226 }
227}
228
229impl<M: ManagedTypeApi> Default for ManagedBuffer<M> {
230 #[inline]
231 fn default() -> Self {
232 Self::new()
233 }
234}
235
236impl<M: ManagedTypeApi> ManagedBuffer<M> {
237 #[inline]
238 pub fn len(&self) -> usize {
239 M::managed_type_impl().mb_len(self.handle.clone())
240 }
241
242 #[inline]
243 pub fn is_empty(&self) -> bool {
244 self.len() == 0
245 }
246
247 pub fn to_boxed_bytes(&self) -> BoxedBytes {
249 M::managed_type_impl().mb_to_boxed_bytes(self.handle.clone())
250 }
251
252 #[cfg(feature = "alloc")]
254 pub fn to_vec(&self) -> alloc::vec::Vec<u8> {
255 self.to_boxed_bytes().into_vec()
256 }
257
258 #[inline]
260 pub fn load_slice(
261 &self,
262 starting_position: usize,
263 dest_slice: &mut [u8],
264 ) -> Result<(), InvalidSliceError> {
265 M::managed_type_impl().mb_load_slice(self.handle.clone(), starting_position, dest_slice)
266 }
267
268 pub fn copy_slice(
269 &self,
270 starting_position: usize,
271 slice_len: usize,
272 ) -> Option<ManagedBuffer<M>> {
273 let api = M::managed_type_impl();
274 let result_handle = api.mb_new_empty();
275 let err_result = api.mb_copy_slice(
276 self.handle.clone(),
277 starting_position,
278 slice_len,
279 result_handle.clone(),
280 );
281 if err_result.is_ok() {
282 Some(unsafe { ManagedBuffer::from_handle(result_handle) })
283 } else {
284 None
285 }
286 }
287
288 pub fn load_to_byte_array<'a, const N: usize>(&self, array: &'a mut [u8; N]) -> &'a [u8] {
289 let len = self.len();
290 if len > N {
291 M::error_api_impl().signal_error(&b"failed to load to byte array"[..]);
292 }
293 let byte_slice = &mut array[..len];
294 let _ = self.load_slice(0, byte_slice);
295 byte_slice
296 }
297
298 pub fn for_each_batch<const BATCH_SIZE: usize, F: FnMut(&[u8])>(&self, mut f: F) {
300 let mut buffer = [0u8; BATCH_SIZE];
301 let arg_len = self.len();
302 let mut current_arg_index = 0;
303 while current_arg_index < arg_len {
304 let bytes_remaining = arg_len - current_arg_index;
305 let bytes_to_load = core::cmp::min(bytes_remaining, BATCH_SIZE);
306 let loaded_slice = &mut buffer[0..bytes_to_load];
307 let _ = self.load_slice(current_arg_index, loaded_slice);
308 f(loaded_slice);
309 current_arg_index += BATCH_SIZE;
310 }
311 }
312
313 #[inline]
314 pub fn overwrite(&mut self, value: &[u8]) {
315 M::managed_type_impl().mb_overwrite(self.handle.clone(), value);
316 }
317
318 pub fn set_slice(
319 &mut self,
320 starting_position: usize,
321 source_slice: &[u8],
322 ) -> Result<(), InvalidSliceError> {
323 if let Ok(()) = M::managed_type_impl().mb_set_slice(
324 self.handle.clone(),
325 starting_position,
326 source_slice,
327 ) {
328 Ok(())
329 } else {
330 Err(InvalidSliceError)
331 }
332 }
333
334 pub fn set_random(&mut self, nr_bytes: usize) {
335 M::managed_type_impl().mb_set_random(self.handle.clone(), nr_bytes);
336 }
337
338 #[inline]
339 pub fn append(&mut self, other: &ManagedBuffer<M>) {
340 M::managed_type_impl().mb_append(self.handle.clone(), other.handle.clone());
341 }
342
343 #[inline(always)]
344 pub fn append_bytes(&mut self, slice: &[u8]) {
345 M::managed_type_impl().mb_append_bytes(self.handle.clone(), slice);
346 }
347
348 pub fn append_u32_be(&mut self, item: u32) {
350 M::managed_type_impl().mb_append_bytes(self.handle.clone(), &item.to_be_bytes()[..]);
351 }
352
353 #[inline]
355 #[must_use]
356 pub fn concat(mut self, other: ManagedBuffer<M>) -> Self {
357 self.append(&other);
358 self
359 }
360
361 pub fn parse_as_u64(&self) -> Option<u64> {
365 const U64_NUM_BYTES: usize = 8;
366 let l = self.len();
367 if l > U64_NUM_BYTES {
368 return None;
369 }
370 let mut bytes = [0u8; U64_NUM_BYTES];
371 if M::managed_type_impl()
372 .mb_load_slice(self.handle.clone(), 0, &mut bytes[U64_NUM_BYTES - l..])
373 .is_err()
374 {
375 None
376 } else {
377 Some(u64::from_be_bytes(bytes))
378 }
379 }
380
381 pub fn hex_expr(&self) -> ManagedBuffer<M> {
384 let mut result = ManagedBufferCachedBuilder::new_from_slice(b"0x");
385 result.append_lower_hex(self);
386 result.into_managed_buffer()
387 }
388}
389
390impl<M: ManagedTypeApi> Clone for ManagedBuffer<M> {
391 fn clone(&self) -> Self {
392 let api = M::managed_type_impl();
393 let clone_handle = api.mb_new_empty();
394 api.mb_append(clone_handle.clone(), self.handle.clone());
395 unsafe { ManagedBuffer::from_handle(clone_handle) }
396 }
397}
398
399impl<M: ManagedTypeApi> PartialEq for ManagedBuffer<M> {
400 #[inline]
401 fn eq(&self, other: &Self) -> bool {
402 M::managed_type_impl().mb_eq(self.handle.clone(), other.handle.clone())
403 }
404}
405
406impl<M: ManagedTypeApi> Eq for ManagedBuffer<M> {}
407
408impl<M: ManagedTypeApi, const N: usize> PartialEq<&[u8; N]> for ManagedBuffer<M> {
409 fn eq(&self, other: &&[u8; N]) -> bool {
410 if self.len() != N {
411 return false;
412 }
413 let mut self_bytes = [0u8; N];
414 let _ = M::managed_type_impl().mb_load_slice(self.handle.clone(), 0, &mut self_bytes[..]);
415 self_bytes[..] == other[..]
416 }
417}
418
419impl<M: ManagedTypeApi> PartialEq<[u8]> for ManagedBuffer<M> {
420 fn eq(&self, other: &[u8]) -> bool {
421 let other_mb = ManagedBuffer::new_from_bytes(other);
423 self == &other_mb
424 }
425}
426
427impl<M: ManagedTypeApi> TryStaticCast for ManagedBuffer<M> {}
428
429impl<M: ManagedTypeApi> NestedEncode for ManagedBuffer<M> {
430 fn dep_encode_or_handle_err<O, H>(&self, dest: &mut O, h: H) -> Result<(), H::HandledErr>
431 where
432 O: NestedEncodeOutput,
433 H: EncodeErrorHandler,
434 {
435 if O::supports_specialized_type::<Self>() {
436 let len_bytes = (self.len() as u32).to_be_bytes();
437 dest.write(&len_bytes[..]);
438 dest.push_specialized((), self, h)
439 } else {
440 self.to_boxed_bytes().dep_encode_or_handle_err(dest, h)
441 }
442 }
443}
444
445impl<M: ManagedTypeApi> TopEncode for ManagedBuffer<M> {
446 #[inline]
447 fn top_encode_or_handle_err<O, H>(&self, output: O, h: H) -> Result<(), H::HandledErr>
448 where
449 O: TopEncodeOutput,
450 H: EncodeErrorHandler,
451 {
452 if O::supports_specialized_type::<Self>() {
453 output.set_specialized(self, h)
454 } else {
455 output.set_slice_u8(self.to_boxed_bytes().as_slice());
456 Ok(())
457 }
458 }
459}
460
461impl<M> TypeAbiFrom<&[u8]> for ManagedBuffer<M> where M: ManagedTypeApi {}
462impl<M> TypeAbiFrom<&str> for ManagedBuffer<M> where M: ManagedTypeApi {}
463impl<M, const N: usize> TypeAbiFrom<&[u8; N]> for ManagedBuffer<M> where M: ManagedTypeApi {}
464
465macro_rules! managed_buffer_codec_from_impl_bi_di {
466 ($other_ty:ty) => {
467 impl<M: ManagedTypeApi> TypeAbiFrom<$other_ty> for ManagedBuffer<M> {}
468 impl<M: ManagedTypeApi> TypeAbiFrom<&$other_ty> for ManagedBuffer<M> {}
469 impl<M: ManagedTypeApi> TypeAbiFrom<ManagedBuffer<M>> for $other_ty {}
470 impl<M: ManagedTypeApi> TypeAbiFrom<&ManagedBuffer<M>> for $other_ty {}
471 };
472}
473
474managed_buffer_codec_from_impl_bi_di! {crate::types::heap::Vec<u8>}
475managed_buffer_codec_from_impl_bi_di! {crate::types::heap::BoxedBytes}
476managed_buffer_codec_from_impl_bi_di! {crate::types::heap::String}
477
478impl<M: ManagedTypeApi> NestedDecode for ManagedBuffer<M> {
479 fn dep_decode_or_handle_err<I, H>(input: &mut I, h: H) -> Result<Self, H::HandledErr>
480 where
481 I: NestedDecodeInput,
482 H: DecodeErrorHandler,
483 {
484 if I::supports_specialized_type::<Self>() {
485 input.read_specialized((), h)
486 } else {
487 let boxed_bytes = BoxedBytes::dep_decode_or_handle_err(input, h)?;
488 Ok(Self::new_from_bytes(boxed_bytes.as_slice()))
489 }
490 }
491}
492
493impl<M: ManagedTypeApi> TopDecode for ManagedBuffer<M> {
494 fn top_decode_or_handle_err<I, H>(input: I, h: H) -> Result<Self, H::HandledErr>
495 where
496 I: TopDecodeInput,
497 H: DecodeErrorHandler,
498 {
499 if I::supports_specialized_type::<Self>() {
500 input.into_specialized(h)
501 } else {
502 Ok(ManagedBuffer::new_from_bytes(&input.into_boxed_slice_u8()))
503 }
504 }
505}
506
507impl<M> TypeAbiFrom<Self> for ManagedBuffer<M> where M: ManagedTypeApi {}
508impl<M> TypeAbiFrom<&Self> for ManagedBuffer<M> where M: ManagedTypeApi {}
509
510impl<M: ManagedTypeApi> TypeAbi for ManagedBuffer<M> {
511 type Unmanaged = dharitri_sc_codec::Vec<u8>;
512
513 fn type_name() -> TypeName {
514 "bytes".into()
515 }
516
517 fn type_name_rust() -> TypeName {
518 "ManagedBuffer<$API>".into()
519 }
520}
521
522impl<M: ManagedTypeApi> SCDisplay for ManagedBuffer<M> {
523 fn fmt<F: FormatByteReceiver>(&self, f: &mut F) {
524 let cast_handle = self.get_handle().cast_or_signal_error::<M, _>();
525 let wrap_cast = unsafe { ManagedRef::wrap_handle(cast_handle) };
526 f.append_managed_buffer(&wrap_cast);
527 }
528}
529
530impl<M: ManagedTypeApi> SCLowerHex for ManagedBuffer<M> {
531 fn fmt<F: FormatByteReceiver>(&self, f: &mut F) {
532 let hex_handle: M::ManagedBufferHandle =
533 use_raw_handle(crate::api::const_handles::MBUF_TEMPORARY_1);
534 M::managed_type_impl().mb_to_hex(self.handle.clone(), hex_handle.clone());
535 let cast_handle = hex_handle.cast_or_signal_error::<M, _>();
536 let wrap_cast = unsafe { ManagedRef::wrap_handle(cast_handle) };
537 f.append_managed_buffer(&wrap_cast);
538 }
539}
540
541impl<M: ManagedTypeApi> SCBinary for ManagedBuffer<M> {
542 fn fmt<F: FormatByteReceiver>(&self, f: &mut F) {
543 let cast_handle = self.get_handle().cast_or_signal_error::<M, _>();
545 let wrap_cast = unsafe { ManagedRef::wrap_handle(cast_handle) };
546 f.append_managed_buffer_binary(&wrap_cast);
547 }
548}
549
550impl<M: ManagedTypeApi> core::fmt::Debug for ManagedBuffer<M> {
551 fn fmt(&self, f: &mut core::fmt::Formatter<'_>) -> core::fmt::Result {
552 f.debug_struct("ManagedBuffer")
553 .field("handle", &self.handle.clone())
554 .field(
555 "hex-value",
556 &encode_bytes_as_hex(self.to_boxed_bytes().as_slice()),
557 )
558 .finish()
559 }
560}
561
562impl<M: ManagedTypeApi> core::fmt::Display for ManagedBuffer<M> {
563 fn fmt(&self, f: &mut core::fmt::Formatter<'_>) -> core::fmt::Result {
564 use crate::contract_base::ErrorHelper;
565
566 let s = alloc::string::String::from_utf8(self.to_boxed_bytes().into_vec())
567 .unwrap_or_else(|err| ErrorHelper::<M>::signal_error_with_message(err.as_bytes()));
568
569 s.fmt(f)
570 }
571}