1use core::cell::UnsafeCell;
4
5use zencan_common::{sdo::AbortCode, AtomicCell, TimeDifference, TimeOfDay};
6
7pub trait SubObjectAccess: Sync + Send {
9 fn read(&self, offset: usize, buf: &mut [u8]) -> Result<usize, AbortCode>;
28
29 fn read_size(&self) -> usize;
41
42 fn write(&self, data: &[u8]) -> Result<(), AbortCode>;
64
65 fn begin_partial(&self) -> Result<(), AbortCode> {
85 Err(AbortCode::UnsupportedAccess)
86 }
87
88 fn write_partial(&self, _buf: &[u8]) -> Result<(), AbortCode> {
96 Err(AbortCode::UnsupportedAccess)
97 }
98
99 fn end_partial(&self) -> Result<(), AbortCode> {
101 Err(AbortCode::UnsupportedAccess)
102 }
103}
104
105#[allow(missing_debug_implementations)]
107pub struct ScalarField<T: Copy> {
108 value: AtomicCell<T>,
109}
110
111impl<T: Send + Copy + PartialEq> ScalarField<T> {
112 pub fn load(&self) -> T {
114 self.value.load()
115 }
116
117 pub fn store(&self, value: T) {
119 self.value.store(value);
120 }
121}
122
123impl<T: Copy + Default> Default for ScalarField<T> {
124 fn default() -> Self {
125 Self {
126 value: AtomicCell::default(),
127 }
128 }
129}
130
131macro_rules! impl_scalar_field {
132 ($rust_type: ty) => {
133 impl ScalarField<$rust_type> {
134 pub const fn new(value: $rust_type) -> Self {
136 Self {
137 value: AtomicCell::new(value),
138 }
139 }
140 }
141 impl SubObjectAccess for ScalarField<$rust_type> {
142 fn read(&self, offset: usize, buf: &mut [u8]) -> Result<usize, AbortCode> {
143 let bytes = self.value.load().to_le_bytes();
144 if offset < bytes.len() {
145 let read_len = buf.len().min(bytes.len() - offset);
146 buf[0..read_len].copy_from_slice(&bytes[offset..offset + read_len]);
147 Ok(read_len)
148 } else {
149 Ok(0)
150 }
151 }
152
153 fn read_size(&self) -> usize {
154 core::mem::size_of::<$rust_type>()
155 }
156
157 fn write(&self, data: &[u8]) -> Result<(), AbortCode> {
158 let value = <$rust_type>::from_le_bytes(data.try_into().map_err(|_| {
159 if data.len() < size_of::<$rust_type>() {
160 AbortCode::DataTypeMismatchLengthLow
161 } else {
162 AbortCode::DataTypeMismatchLengthHigh
163 }
164 })?);
165 self.value.store(value);
166 Ok(())
167 }
168 }
169 };
170}
171
172impl_scalar_field!(u8);
173impl_scalar_field!(u16);
174impl_scalar_field!(u32);
175impl_scalar_field!(u64);
176impl_scalar_field!(i8);
177impl_scalar_field!(i16);
178impl_scalar_field!(i32);
179impl_scalar_field!(i64);
180impl_scalar_field!(f32);
181impl_scalar_field!(f64);
182
183impl SubObjectAccess for ScalarField<bool> {
185 fn read(&self, offset: usize, buf: &mut [u8]) -> Result<usize, AbortCode> {
186 let value = self.value.load();
187 if offset != 0 || buf.len() > 1 {
188 return Err(AbortCode::DataTypeMismatchLengthHigh);
189 }
190 buf[0] = if value { 1 } else { 0 };
191 Ok(1)
192 }
193
194 fn read_size(&self) -> usize {
195 1
196 }
197
198 fn write(&self, data: &[u8]) -> Result<(), AbortCode> {
199 if data.len() != 1 {
200 return Err(AbortCode::DataTypeMismatchLengthHigh);
201 }
202 let value = data[0] != 0;
203 self.value.store(value);
204 Ok(())
205 }
206}
207
208impl SubObjectAccess for ScalarField<TimeDifference> {
209 fn read(&self, offset: usize, buf: &mut [u8]) -> Result<usize, AbortCode> {
210 let value = self.value.load();
211 let bytes = value.to_le_bytes();
212 if offset < bytes.len() {
213 let read_len = buf.len().min(bytes.len() - offset);
214 buf[0..read_len].copy_from_slice(&bytes[offset..offset + read_len]);
215 Ok(read_len)
216 } else {
217 Ok(0)
218 }
219 }
220
221 fn read_size(&self) -> usize {
222 6
223 }
224
225 fn write(&self, data: &[u8]) -> Result<(), AbortCode> {
226 let value = TimeDifference::from_le_bytes(data.try_into().map_err(|_| {
227 if data.len() < 6 {
228 AbortCode::DataTypeMismatchLengthLow
229 } else {
230 AbortCode::DataTypeMismatchLengthHigh
231 }
232 })?);
233 self.value.store(value);
234 Ok(())
235 }
236}
237
238impl ScalarField<TimeDifference> {
239 pub const fn new(value: TimeDifference) -> Self {
241 Self {
242 value: AtomicCell::new(value),
243 }
244 }
245}
246
247impl SubObjectAccess for ScalarField<TimeOfDay> {
248 fn read(&self, offset: usize, buf: &mut [u8]) -> Result<usize, AbortCode> {
249 let value = self.value.load();
250 let bytes = value.to_le_bytes();
251 if offset < bytes.len() {
252 let read_len = buf.len().min(bytes.len() - offset);
253 buf[0..read_len].copy_from_slice(&bytes[offset..offset + read_len]);
254 Ok(read_len)
255 } else {
256 Ok(0)
257 }
258 }
259
260 fn read_size(&self) -> usize {
261 6
262 }
263
264 fn write(&self, data: &[u8]) -> Result<(), AbortCode> {
265 let value = TimeOfDay::from_le_bytes(data.try_into().map_err(|_| {
266 if data.len() < 6 {
267 AbortCode::DataTypeMismatchLengthLow
268 } else {
269 AbortCode::DataTypeMismatchLengthHigh
270 }
271 })?);
272 self.value.store(value);
273 Ok(())
274 }
275}
276
277impl ScalarField<TimeOfDay> {
278 pub const fn new(value: TimeOfDay) -> Self {
280 Self {
281 value: AtomicCell::new(value),
282 }
283 }
284}
285
286#[allow(clippy::len_without_is_empty, missing_debug_implementations)]
290pub struct ByteField<const N: usize> {
291 value: UnsafeCell<[u8; N]>,
292 write_offset: AtomicCell<Option<usize>>,
293}
294
295unsafe impl<const N: usize> Sync for ByteField<N> {}
296
297impl<const N: usize> ByteField<N> {
298 pub const fn new(value: [u8; N]) -> Self {
300 Self {
301 value: UnsafeCell::new(value),
302 write_offset: AtomicCell::new(None),
303 }
304 }
305
306 pub fn len(&self) -> usize {
308 N
309 }
310
311 pub fn store(&self, value: [u8; N]) {
313 self.write_offset.store(None);
315 critical_section::with(|_| {
316 let bytes = unsafe { &mut *self.value.get() };
317 bytes.copy_from_slice(&value);
318 });
319 }
320
321 pub fn load(&self) -> [u8; N] {
323 critical_section::with(|_| unsafe { *self.value.get() })
324 }
325}
326
327impl<const N: usize> Default for ByteField<N> {
328 fn default() -> Self {
329 Self {
330 value: UnsafeCell::new([0; N]),
331 write_offset: AtomicCell::new(None),
332 }
333 }
334}
335
336impl<const N: usize> SubObjectAccess for ByteField<N> {
337 fn read(&self, offset: usize, buf: &mut [u8]) -> Result<usize, AbortCode> {
338 critical_section::with(|_| {
339 let bytes = unsafe { &*self.value.get() };
340 if bytes.len() > offset {
341 let read_len = buf.len().min(bytes.len() - offset);
342 buf[..read_len].copy_from_slice(&bytes[offset..offset + read_len]);
343 Ok(read_len)
344 } else {
345 Ok(0)
346 }
347 })
348 }
349
350 fn read_size(&self) -> usize {
351 N
352 }
353
354 fn write(&self, data: &[u8]) -> Result<(), AbortCode> {
355 critical_section::with(|_| {
356 let bytes = unsafe { &mut *self.value.get() };
357 if data.len() > bytes.len() {
358 return Err(AbortCode::DataTypeMismatchLengthHigh);
359 }
360 bytes[..data.len()].copy_from_slice(data);
361 Ok(())
362 })
363 }
364
365 fn begin_partial(&self) -> Result<(), AbortCode> {
366 self.write_offset.store(Some(0));
367 Ok(())
368 }
369
370 fn write_partial(&self, buf: &[u8]) -> Result<(), AbortCode> {
371 let offset = self
373 .write_offset
374 .fetch_update(|old| Some(old.map(|x| x + buf.len())))
375 .unwrap();
376 if offset.is_none() {
377 return Err(AbortCode::GeneralError);
378 }
379 let offset = offset.unwrap();
380 if offset + buf.len() > N {
381 return Err(AbortCode::DataTypeMismatchLengthHigh);
382 }
383 critical_section::with(|_| {
384 let bytes = unsafe { &mut *self.value.get() };
385 bytes[offset..offset + buf.len()].copy_from_slice(buf);
386 });
387 Ok(())
388 }
389
390 fn end_partial(&self) -> Result<(), AbortCode> {
391 self.write_offset.store(None);
393 Ok(())
394 }
395}
396
397#[allow(clippy::len_without_is_empty, missing_debug_implementations)]
401pub struct NullTermByteField<const N: usize>(ByteField<N>);
402
403impl<const N: usize> NullTermByteField<N> {
404 pub const fn new(value: [u8; N]) -> Self {
406 Self(ByteField::new(value))
407 }
408
409 pub fn len(&self) -> usize {
411 N
412 }
413
414 pub fn load(&self) -> [u8; N] {
419 self.0.load()
420 }
421
422 pub fn store(&self, value: [u8; N]) {
424 self.0.store(value);
425 }
426
427 pub fn set_str(&self, value: &[u8]) -> Result<(), AbortCode> {
432 self.0.begin_partial()?;
433 self.0.write_partial(value)?;
434 if value.len() < N {
435 self.0.write_partial(&[0])?;
436 }
437 self.end_partial()?;
438 Ok(())
439 }
440}
441
442impl<const N: usize> Default for NullTermByteField<N> {
443 fn default() -> Self {
444 Self(ByteField::default())
445 }
446}
447
448impl<const N: usize> SubObjectAccess for NullTermByteField<N> {
449 fn read(&self, offset: usize, buf: &mut [u8]) -> Result<usize, AbortCode> {
450 let size = self.0.read(offset, buf)?;
451 let size = buf[0..size].iter().position(|b| *b == 0).unwrap_or(size);
452 Ok(size)
453 }
454
455 fn read_size(&self) -> usize {
456 critical_section::with(|_| {
457 let bytes = unsafe { &*self.0.value.get() };
458 bytes.iter().position(|b| *b == 0).unwrap_or(bytes.len())
460 })
461 }
462
463 fn write(&self, data: &[u8]) -> Result<(), AbortCode> {
464 self.0.begin_partial()?;
465 self.0.write_partial(data)?;
466 if data.len() < N {
467 self.0.write_partial(&[0])?;
468 }
469 self.0.end_partial()?;
470 Ok(())
471 }
472
473 fn begin_partial(&self) -> Result<(), AbortCode> {
474 self.0.begin_partial()
475 }
476
477 fn write_partial(&self, data: &[u8]) -> Result<(), AbortCode> {
478 self.0.write_partial(data)
479 }
480
481 fn end_partial(&self) -> Result<(), AbortCode> {
482 if self.0.write_offset.load().unwrap_or(0) < N {
484 self.0.write_partial(&[0])?;
485 }
486 self.0.end_partial()
487 }
488}
489
490#[derive(Clone, Copy, Debug)]
492pub struct ConstByteRefField {
493 value: &'static [u8],
494}
495
496impl ConstByteRefField {
497 pub const fn new(value: &'static [u8]) -> Self {
499 Self { value }
500 }
501}
502
503impl SubObjectAccess for ConstByteRefField {
504 fn read(&self, offset: usize, buf: &mut [u8]) -> Result<usize, AbortCode> {
505 let read_len = buf.len().min(self.value.len() - offset);
506 buf[..read_len].copy_from_slice(&self.value[offset..offset + read_len]);
507 Ok(read_len)
508 }
509
510 fn read_size(&self) -> usize {
511 self.value.len()
512 }
513
514 fn write(&self, _data: &[u8]) -> Result<(), AbortCode> {
515 Err(AbortCode::ReadOnly)
516 }
517}
518
519#[derive(Debug)]
520pub struct ConstField<const N: usize> {
525 bytes: [u8; N],
526}
527
528impl<const N: usize> ConstField<N> {
529 pub const fn new(bytes: [u8; N]) -> Self {
531 Self { bytes }
532 }
533}
534
535impl<const N: usize> SubObjectAccess for ConstField<N> {
536 fn read(&self, offset: usize, buf: &mut [u8]) -> Result<usize, AbortCode> {
537 if offset < self.bytes.len() {
538 let read_len = buf.len().min(self.bytes.len() - offset);
539 buf[..read_len].copy_from_slice(&self.bytes[offset..offset + read_len]);
540 Ok(read_len)
541 } else {
542 Ok(0)
543 }
544 }
545
546 fn read_size(&self) -> usize {
547 N
548 }
549
550 fn write(&self, _data: &[u8]) -> Result<(), AbortCode> {
551 Err(AbortCode::ReadOnly)
552 }
553}
554
555#[allow(missing_debug_implementations)]
557pub struct CallbackSubObject {
558 handler: AtomicCell<Option<&'static dyn SubObjectAccess>>,
559}
560
561impl Default for CallbackSubObject {
562 fn default() -> Self {
563 Self::new()
564 }
565}
566
567impl CallbackSubObject {
568 pub const fn new() -> Self {
570 Self {
571 handler: AtomicCell::new(None),
572 }
573 }
574
575 pub fn register_handler(&self, handler: &'static dyn SubObjectAccess) {
577 self.handler.store(Some(handler));
578 }
579}
580
581impl SubObjectAccess for CallbackSubObject {
582 fn read(&self, offset: usize, buf: &mut [u8]) -> Result<usize, AbortCode> {
583 if let Some(handler) = self.handler.load() {
584 handler.read(offset, buf)
585 } else {
586 Err(AbortCode::ResourceNotAvailable)
587 }
588 }
589
590 fn read_size(&self) -> usize {
591 if let Some(handler) = self.handler.load() {
592 handler.read_size()
593 } else {
594 0
595 }
596 }
597
598 fn write(&self, data: &[u8]) -> Result<(), AbortCode> {
599 if let Some(handler) = self.handler.load() {
600 handler.write(data)
601 } else {
602 Err(AbortCode::ResourceNotAvailable)
603 }
604 }
605
606 fn begin_partial(&self) -> Result<(), AbortCode> {
607 if let Some(handler) = self.handler.load() {
608 handler.begin_partial()
609 } else {
610 Err(AbortCode::ResourceNotAvailable)
611 }
612 }
613
614 fn write_partial(&self, buf: &[u8]) -> Result<(), AbortCode> {
615 if let Some(handler) = self.handler.load() {
616 handler.write_partial(buf)
617 } else {
618 Err(AbortCode::ResourceNotAvailable)
619 }
620 }
621
622 fn end_partial(&self) -> Result<(), AbortCode> {
623 if let Some(handler) = self.handler.load() {
624 handler.end_partial()
625 } else {
626 Err(AbortCode::ResourceNotAvailable)
627 }
628 }
629}
630
631#[cfg(test)]
632mod tests {
633 use zencan_common::objects::{ObjectCode, SubInfo};
634
635 use crate::object_dict::{ObjectAccess, ProvidesSubObjects};
636
637 use super::*;
638
639 #[derive(Default)]
640 struct ExampleRecord {
641 val1: ScalarField<u32>,
642 val2: ScalarField<bool>,
643 val3: NullTermByteField<10>,
644 }
645
646 impl ProvidesSubObjects for ExampleRecord {
647 fn get_sub_object(&self, sub: u8) -> Option<(SubInfo, &dyn SubObjectAccess)> {
648 match sub {
649 0 => Some((
650 SubInfo::MAX_SUB_NUMBER,
651 const { &ConstField::new(3u8.to_le_bytes()) },
652 )),
653 1 => Some((SubInfo::new_u32().rw_access(), &self.val1)),
654 2 => Some((SubInfo::new_u8().rw_access(), &self.val2)),
655 3 => Some((
656 SubInfo::new_visibile_str(self.val3.len()).rw_access(),
657 &self.val3,
658 )),
659 _ => None,
660 }
661 }
662
663 fn object_code(&self) -> ObjectCode {
664 ObjectCode::Record
665 }
666 }
667
668 #[test]
669 fn test_record_with_provides_sub_objects() {
670 let record = ExampleRecord::default();
671
672 assert_eq!(3, record.read_u8(0).unwrap());
673 record.write(1, &42u32.to_le_bytes()).unwrap();
674 assert_eq!(42, record.read_u32(1).unwrap());
675
676 record.begin_partial(3).unwrap();
677 record
679 .write_partial(3, &[0, 1, 2, 3, 4, 5, 6, 7, 8, 9])
680 .unwrap();
681 let mut buf = [0; 10];
682 record.read(3, 0, &mut buf).unwrap();
683 assert_eq!([0, 1, 2, 3, 4, 5, 6, 7, 8, 9], buf);
684 record.begin_partial(3).unwrap();
686 record.write_partial(3, &[0, 1, 2, 3]).unwrap();
687 record.write_partial(3, &[4, 5, 6, 7]).unwrap();
688 record.end_partial(3).unwrap();
689 let mut buf = [0; 9];
690 record.read(3, 0, &mut buf).unwrap();
691 assert_eq!([0u8, 1, 2, 3, 4, 5, 6, 7, 0], buf)
692 }
693
694 fn sub_read_test_helper(field: &dyn SubObjectAccess, expected_bytes: &[u8]) {
695 let n = expected_bytes.len();
696
697 assert!(n > 2, "Expected bytes cannot be shorted than 2 bytes");
698
699 assert_eq!(n, field.read_size());
700
701 let mut read_buf = vec![0xffu8; n + 10];
703 let read_size = field.read(0, &mut read_buf).unwrap();
704 assert_eq!(n, read_size);
705 assert_eq!(expected_bytes, &read_buf[0..n]);
706
707 let mut read_buf = vec![0xffu8; n + 10];
709 let read_size = field.read(0, &mut read_buf).unwrap();
710 assert_eq!(n, read_size);
711 assert_eq!(expected_bytes, &read_buf[0..n]);
712
713 let mut read_buf = vec![0xffu8; n + 10];
715 let read_size = field.read(2, &mut read_buf).unwrap();
716 assert_eq!(n - 2, read_size);
717 assert_eq!(&expected_bytes[2..], &read_buf[0..n - 2]);
718
719 let mut read_buf = vec![0xffu8; n - 2];
721 let read_size = field.read(1, &mut read_buf).unwrap();
722 assert_eq!(n - 2, read_size);
723 assert_eq!(expected_bytes[1..n - 1], read_buf);
724 }
725
726 #[test]
727 fn test_scalar_field() {
728 let field = ScalarField::<u32>::new(42u32);
729
730 let exp_bytes = 42u32.to_le_bytes();
731
732 sub_read_test_helper(&field, &exp_bytes);
733 }
734
735 #[test]
736 fn test_byte_field() {
737 const N: usize = 10;
738 let field = ByteField::new([0; N]);
739
740 let write_data = Vec::from_iter(0u8..N as u8);
741 field.write(&write_data).unwrap();
742
743 sub_read_test_helper(&field, &write_data);
744 }
745
746 #[test]
747 fn test_null_term_byte_field() {
748 let field = NullTermByteField::new([0; 10]);
749 field.write(&[1, 2, 3, 4, 5, 6, 7, 8, 9, 10]).unwrap();
751 sub_read_test_helper(&field, &[1, 2, 3, 4, 5, 6, 7, 8, 9, 10]);
752 field.write(&[1, 2, 3, 4]).unwrap();
754 sub_read_test_helper(&field, &[1, 2, 3, 4]);
755 }
756
757 #[test]
758 fn test_const_field() {
759 let field = ConstField::new([1, 2, 3, 4, 5]);
760 sub_read_test_helper(&field, &[1, 2, 3, 4, 5]);
761 }
762
763 #[test]
764 fn test_const_byte_ref_field() {
765 let field = ConstByteRefField::new(&[1, 2, 3, 4, 5]);
766 sub_read_test_helper(&field, &[1, 2, 3, 4, 5]);
767 }
768}