1use super::*;
2
3impl<T: ArenaTypes> Arena<T> {
4 pub fn new() -> Self {
5 Arena {
6 young_entries: Vec::with_capacity(256),
7 yard_entries: Vec::with_capacity(64),
8 handoff_entries: Vec::with_capacity(64),
9 stable_entries: Vec::with_capacity(64),
10 scratch_young: Vec::new(),
11 scratch_yard: Vec::new(),
12 scratch_handoff: Vec::new(),
13 scratch_stable: Vec::new(),
14 peak_usage: ArenaUsage::default(),
15 alloc_space: AllocSpace::Young,
16 type_names: Vec::new(),
17 type_field_names: Vec::new(),
18 type_variant_names: Vec::new(),
19 type_variant_ctor_ids: Vec::new(),
20 ctor_to_type_variant: Vec::new(),
21 symbol_entries: Vec::new(),
22 type_aliases: Vec::new(),
23 }
24 }
25
26 pub fn clone_static(&self) -> Self {
31 Arena {
32 young_entries: Vec::with_capacity(64),
33 yard_entries: Vec::new(),
34 handoff_entries: Vec::new(),
35 stable_entries: self.stable_entries.clone(),
36 scratch_young: Vec::new(),
37 scratch_yard: Vec::new(),
38 scratch_handoff: Vec::new(),
39 scratch_stable: Vec::new(),
40 peak_usage: ArenaUsage::default(),
41 alloc_space: AllocSpace::Young,
42 type_names: self.type_names.clone(),
43 type_field_names: self.type_field_names.clone(),
44 type_variant_names: self.type_variant_names.clone(),
45 type_variant_ctor_ids: self.type_variant_ctor_ids.clone(),
46 ctor_to_type_variant: self.ctor_to_type_variant.clone(),
47 symbol_entries: self.symbol_entries.clone(),
48 type_aliases: self.type_aliases.clone(),
49 }
50 }
51
52 pub fn deep_import(&mut self, value: NanValue, source: &Arena<T>) -> NanValue {
56 if !value.is_nan_boxed() {
58 return value;
59 }
60 let heap_idx = match value.heap_index() {
62 Some(idx) => idx,
63 None => return value, };
65
66 let entry = source.get(heap_idx).clone();
67 match entry {
68 ArenaEntry::Int(i) => NanValue::new_int(i, self),
69 ArenaEntry::String(s) => {
70 let idx = self.push(ArenaEntry::String(s));
71 NanValue::new_string(idx)
72 }
73 ArenaEntry::Tuple(items) => {
74 let imported: Vec<NanValue> =
75 items.iter().map(|v| self.deep_import(*v, source)).collect();
76 let idx = self.push_tuple(imported);
77 NanValue::new_tuple(idx)
78 }
79 ArenaEntry::List(_) => {
80 let flat = source.list_to_vec_value(value);
82 let imported: Vec<NanValue> =
83 flat.iter().map(|v| self.deep_import(*v, source)).collect();
84 if imported.is_empty() {
85 NanValue::EMPTY_LIST
86 } else {
87 let rc_items = Rc::new(imported);
88 let idx = self.push(ArenaEntry::List(ArenaList::Flat {
89 items: rc_items,
90 start: 0,
91 }));
92 NanValue::new_list(idx)
93 }
94 }
95 ArenaEntry::Map(map) => {
96 let mut new_map = T::Map::new();
97 for (hash, (k, v)) in map.iter() {
98 let ik = self.deep_import(*k, source);
99 let iv = self.deep_import(*v, source);
100 new_map = new_map.insert(*hash, (ik, iv));
101 }
102 let idx = self.push(ArenaEntry::Map(new_map));
103 NanValue::new_map(idx)
104 }
105 ArenaEntry::Vector(items) => {
106 let imported: Vec<NanValue> =
107 items.iter().map(|v| self.deep_import(*v, source)).collect();
108 let idx = self.push(ArenaEntry::Vector(imported));
109 NanValue::new_vector(idx)
110 }
111 ArenaEntry::Record { type_id, fields } => {
112 let imported: Vec<NanValue> = fields
113 .iter()
114 .map(|v| self.deep_import(*v, source))
115 .collect();
116 let idx = self.push(ArenaEntry::Record {
117 type_id,
118 fields: imported,
119 });
120 NanValue::new_record(idx)
121 }
122 ArenaEntry::Variant {
123 type_id,
124 variant_id,
125 fields,
126 } => {
127 let imported: Vec<NanValue> = fields
128 .iter()
129 .map(|v| self.deep_import(*v, source))
130 .collect();
131 let idx = self.push(ArenaEntry::Variant {
132 type_id,
133 variant_id,
134 fields: imported,
135 });
136 NanValue::new_variant(idx)
137 }
138 ArenaEntry::Boxed(inner) => {
139 let imported = self.deep_import(inner, source);
140 let idx = self.push(ArenaEntry::Boxed(imported));
141 NanValue::encode(value.tag(), ARENA_REF_BIT | (idx as u64))
142 }
143 ArenaEntry::Fn(_) | ArenaEntry::Builtin(_) | ArenaEntry::Namespace { .. } => value,
145 }
146 }
147
148 #[inline]
149 pub fn push(&mut self, entry: ArenaEntry<T>) -> u32 {
150 match &entry {
151 ArenaEntry::Fn(_) | ArenaEntry::Builtin(_) | ArenaEntry::Namespace { .. } => {}
152 _ => {
153 return match self.alloc_space {
154 AllocSpace::Young => {
155 let idx = self.young_entries.len() as u32;
156 self.young_entries.push(entry);
157 self.note_peak_usage();
158 Self::encode_index(HeapSpace::Young, idx)
159 }
160 AllocSpace::Yard => {
161 let idx = self.yard_entries.len() as u32;
162 self.yard_entries.push(entry);
163 self.note_peak_usage();
164 Self::encode_index(HeapSpace::Yard, idx)
165 }
166 AllocSpace::Handoff => {
167 let idx = self.handoff_entries.len() as u32;
168 self.handoff_entries.push(entry);
169 self.note_peak_usage();
170 Self::encode_index(HeapSpace::Handoff, idx)
171 }
172 };
173 }
174 }
175 match entry {
176 ArenaEntry::Fn(f) => self.push_symbol(ArenaSymbol::Fn(f)),
177 ArenaEntry::Builtin(name) => self.push_symbol(ArenaSymbol::Builtin(name)),
178 ArenaEntry::Namespace { name, members } => {
179 self.push_symbol(ArenaSymbol::Namespace { name, members })
180 }
181 _ => unreachable!("non-symbol entry already returned above"),
182 }
183 }
184
185 #[inline]
186 pub fn push_symbol(&mut self, symbol: ArenaSymbol<T>) -> u32 {
187 let idx = self.symbol_entries.len() as u32;
188 self.symbol_entries.push(symbol);
189 idx
190 }
191
192 #[inline]
193 pub fn get(&self, index: u32) -> &ArenaEntry<T> {
194 let (space, raw_index) = Self::decode_index(index);
195 match space {
196 HeapSpace::Young => &self.young_entries[raw_index as usize],
197 HeapSpace::Yard => &self.yard_entries[raw_index as usize],
198 HeapSpace::Handoff => &self.handoff_entries[raw_index as usize],
199 HeapSpace::Stable => &self.stable_entries[raw_index as usize],
200 }
201 }
202
203 #[inline]
204 pub fn get_mut(&mut self, index: u32) -> &mut ArenaEntry<T> {
205 let (space, raw_index) = Self::decode_index(index);
206 match space {
207 HeapSpace::Young => &mut self.young_entries[raw_index as usize],
208 HeapSpace::Yard => &mut self.yard_entries[raw_index as usize],
209 HeapSpace::Handoff => &mut self.handoff_entries[raw_index as usize],
210 HeapSpace::Stable => &mut self.stable_entries[raw_index as usize],
211 }
212 }
213
214 #[inline]
215 pub(crate) fn encode_index(space: HeapSpace, index: u32) -> u32 {
216 ((space as u32) << HEAP_SPACE_SHIFT) | index
217 }
218
219 #[inline]
220 pub(crate) fn encode_yard_index(index: u32) -> u32 {
221 Self::encode_index(HeapSpace::Yard, index)
222 }
223
224 #[inline]
225 pub(crate) fn encode_stable_index(index: u32) -> u32 {
226 Self::encode_index(HeapSpace::Stable, index)
227 }
228
229 #[inline]
230 pub(crate) fn encode_handoff_index(index: u32) -> u32 {
231 Self::encode_index(HeapSpace::Handoff, index)
232 }
233
234 #[inline]
235 pub(crate) fn decode_index(index: u32) -> (HeapSpace, u32) {
236 let space = match (index & HEAP_SPACE_MASK_U32) >> HEAP_SPACE_SHIFT {
237 0 => HeapSpace::Young,
238 1 => HeapSpace::Yard,
239 2 => HeapSpace::Handoff,
240 3 => HeapSpace::Stable,
241 _ => unreachable!("invalid heap space bits"),
242 };
243 (space, index & HEAP_INDEX_MASK_U32)
244 }
245
246 #[inline]
247 pub fn is_stable_index(index: u32) -> bool {
248 matches!(Self::decode_index(index).0, HeapSpace::Stable)
249 }
250
251 #[inline]
252 pub fn is_yard_index_in_region(&self, index: u32, mark: u32) -> bool {
253 let (space, raw_index) = Self::decode_index(index);
254 matches!(space, HeapSpace::Yard)
255 && raw_index >= mark
256 && raw_index < self.yard_entries.len() as u32
257 }
258
259 #[inline]
260 pub fn is_handoff_index_in_region(&self, index: u32, mark: u32) -> bool {
261 let (space, raw_index) = Self::decode_index(index);
262 matches!(space, HeapSpace::Handoff)
263 && raw_index >= mark
264 && raw_index < self.handoff_entries.len() as u32
265 }
266
267 #[inline]
268 pub fn is_young_index_in_region(&self, index: u32, mark: u32) -> bool {
269 let (space, raw_index) = Self::decode_index(index);
270 matches!(space, HeapSpace::Young)
271 && raw_index >= mark
272 && raw_index < self.young_entries.len() as u32
273 }
274
275 #[inline]
276 pub fn young_len(&self) -> usize {
277 self.young_entries.len()
278 }
279
280 #[inline]
281 pub fn yard_len(&self) -> usize {
282 self.yard_entries.len()
283 }
284
285 #[inline]
286 pub fn handoff_len(&self) -> usize {
287 self.handoff_entries.len()
288 }
289
290 #[inline]
291 pub fn stable_len(&self) -> usize {
292 self.stable_entries.len()
293 }
294
295 #[inline]
296 pub fn usage(&self) -> ArenaUsage {
297 ArenaUsage {
298 young: self.young_entries.len(),
299 yard: self.yard_entries.len(),
300 handoff: self.handoff_entries.len(),
301 stable: self.stable_entries.len(),
302 }
303 }
304
305 #[inline]
306 pub fn peak_usage(&self) -> ArenaUsage {
307 self.peak_usage
308 }
309
310 #[inline]
311 pub(crate) fn note_peak_usage(&mut self) {
312 let usage = self.usage();
313 self.peak_usage.young = self.peak_usage.young.max(usage.young);
314 self.peak_usage.yard = self.peak_usage.yard.max(usage.yard);
315 self.peak_usage.handoff = self.peak_usage.handoff.max(usage.handoff);
316 self.peak_usage.stable = self.peak_usage.stable.max(usage.stable);
317 }
318
319 #[inline]
320 pub(crate) fn take_u32_scratch(slot: &mut Vec<u32>, len: usize) -> Vec<u32> {
321 let mut scratch = core::mem::take(slot);
322 scratch.clear();
323 scratch.resize(len, u32::MAX);
324 scratch
325 }
326
327 #[inline]
328 pub(crate) fn recycle_u32_scratch(slot: &mut Vec<u32>, mut scratch: Vec<u32>) {
329 scratch.clear();
330 *slot = scratch;
331 }
332
333 #[inline]
334 pub fn is_frame_local_index(
335 &self,
336 index: u32,
337 arena_mark: u32,
338 yard_mark: u32,
339 handoff_mark: u32,
340 ) -> bool {
341 self.is_young_index_in_region(index, arena_mark)
342 || self.is_yard_index_in_region(index, yard_mark)
343 || self.is_handoff_index_in_region(index, handoff_mark)
344 }
345
346 pub fn with_alloc_space<R>(
347 &mut self,
348 space: AllocSpace,
349 f: impl FnOnce(&mut Arena<T>) -> R,
350 ) -> R {
351 let prev = self.alloc_space;
352 self.alloc_space = space;
353 let out = f(self);
354 self.alloc_space = prev;
355 out
356 }
357
358 pub fn push_i64(&mut self, val: i64) -> u32 {
361 self.push(ArenaEntry::Int(val))
362 }
363 pub fn push_string(&mut self, s: &str) -> u32 {
364 self.push(ArenaEntry::String(Rc::from(s)))
365 }
366 pub fn push_boxed(&mut self, val: NanValue) -> u32 {
367 self.push(ArenaEntry::Boxed(val))
368 }
369 pub fn push_record(&mut self, type_id: u32, fields: Vec<NanValue>) -> u32 {
370 self.push(ArenaEntry::Record { type_id, fields })
371 }
372 pub fn push_variant(&mut self, type_id: u32, variant_id: u16, fields: Vec<NanValue>) -> u32 {
373 self.push(ArenaEntry::Variant {
374 type_id,
375 variant_id,
376 fields,
377 })
378 }
379 pub fn push_list(&mut self, items: Vec<NanValue>) -> u32 {
380 self.push(ArenaEntry::List(ArenaList::Flat {
381 items: Rc::new(items),
382 start: 0,
383 }))
384 }
385 pub fn push_map(&mut self, map: T::Map) -> u32 {
386 self.push(ArenaEntry::Map(map))
387 }
388 pub fn push_tuple(&mut self, items: Vec<NanValue>) -> u32 {
389 self.push(ArenaEntry::Tuple(items))
390 }
391 pub fn push_vector(&mut self, items: Vec<NanValue>) -> u32 {
392 self.push(ArenaEntry::Vector(items))
393 }
394 pub fn push_fn(&mut self, f: Rc<T::Fn>) -> u32 {
395 self.push_symbol(ArenaSymbol::Fn(f))
396 }
397 pub fn push_builtin(&mut self, name: &str) -> u32 {
398 self.push_symbol(ArenaSymbol::Builtin(Rc::from(name)))
399 }
400 pub fn push_nullary_variant_symbol(&mut self, ctor_id: u32) -> u32 {
401 self.push_symbol(ArenaSymbol::NullaryVariant { ctor_id })
402 }
403
404 pub fn get_i64(&self, index: u32) -> i64 {
407 match self.get(index) {
408 ArenaEntry::Int(i) => *i,
409 _ => panic!("Arena: expected Int at {}", index),
410 }
411 }
412 pub fn get_string(&self, index: u32) -> &str {
413 match self.get(index) {
414 ArenaEntry::String(s) => s,
415 other => panic!("Arena: expected String at {} but found {:?}", index, other),
416 }
417 }
418 pub fn get_string_value(&self, value: NanValue) -> NanString<'_> {
419 if let Some(s) = value.small_string() {
420 s
421 } else {
422 NanString::Borrowed(self.get_string(value.arena_index()))
423 }
424 }
425 pub fn get_boxed(&self, index: u32) -> NanValue {
426 match self.get(index) {
427 ArenaEntry::Boxed(v) => *v,
428 _ => panic!("Arena: expected Boxed at {}", index),
429 }
430 }
431 pub fn get_record(&self, index: u32) -> (u32, &[NanValue]) {
432 match self.get(index) {
433 ArenaEntry::Record { type_id, fields } => (*type_id, fields),
434 _ => panic!("Arena: expected Record at {}", index),
435 }
436 }
437 pub fn get_variant(&self, index: u32) -> (u32, u16, &[NanValue]) {
438 match self.get(index) {
439 ArenaEntry::Variant {
440 type_id,
441 variant_id,
442 fields,
443 } => (*type_id, *variant_id, fields),
444 other => panic!("Arena: expected Variant at {} but found {:?}", index, other),
445 }
446 }
447 pub fn get_list(&self, index: u32) -> &ArenaList {
448 match self.get(index) {
449 ArenaEntry::List(items) => items,
450 _ => panic!("Arena: expected List at {}", index),
451 }
452 }
453 pub fn get_tuple(&self, index: u32) -> &[NanValue] {
454 match self.get(index) {
455 ArenaEntry::Tuple(items) => items,
456 _ => panic!("Arena: expected Tuple at {}", index),
457 }
458 }
459 pub fn get_vector(&self, index: u32) -> &[NanValue] {
460 match self.get(index) {
461 ArenaEntry::Vector(items) => items,
462 _ => panic!("Arena: expected Vector at {}", index),
463 }
464 }
465 pub fn get_vector_mut(&mut self, index: u32) -> &mut Vec<NanValue> {
466 match self.get_mut(index) {
467 ArenaEntry::Vector(items) => items,
468 _ => panic!("Arena: expected Vector at {}", index),
469 }
470 }
471 pub fn vector_ref_value(&self, value: NanValue) -> &[NanValue] {
472 if value.is_empty_vector_immediate() {
473 return &[];
474 }
475 self.get_vector(value.arena_index())
476 }
477 pub fn clone_vector_value(&self, value: NanValue) -> Vec<NanValue> {
478 if value.is_empty_vector_immediate() {
479 Vec::new()
480 } else {
481 self.get_vector(value.arena_index()).to_vec()
482 }
483 }
484 pub fn take_vector_value(&mut self, value: NanValue) -> Vec<NanValue> {
486 if value.is_empty_vector_immediate() {
487 Vec::new()
488 } else {
489 let index = value.arena_index();
490 std::mem::take(self.get_vector_mut(index))
491 }
492 }
493 pub fn get_map(&self, index: u32) -> &T::Map {
494 match self.get(index) {
495 ArenaEntry::Map(map) => map,
496 _ => panic!("Arena: expected Map at {}", index),
497 }
498 }
499 pub fn get_map_mut(&mut self, index: u32) -> &mut T::Map {
500 match self.get_mut(index) {
501 ArenaEntry::Map(map) => map,
502 _ => panic!("Arena: expected Map at {}", index),
503 }
504 }
505 pub fn map_ref_value(&self, map: NanValue) -> &T::Map {
506 if map.is_empty_map_immediate() {
507 use core::sync::atomic::{AtomicPtr, Ordering as AtomicOrdering};
510 static EMPTY_MAP_PTR: AtomicPtr<()> = AtomicPtr::new(core::ptr::null_mut());
511
512 let ptr = EMPTY_MAP_PTR.load(AtomicOrdering::Acquire);
513 if !ptr.is_null() {
514 return unsafe { &*(ptr as *const T::Map) };
516 }
517 let boxed = alloc::boxed::Box::new(T::Map::new());
518 let leaked: &'static T::Map = alloc::boxed::Box::leak(boxed);
519 let new_ptr = leaked as *const T::Map as *mut ();
520 EMPTY_MAP_PTR.store(new_ptr, AtomicOrdering::Release);
522 leaked
523 } else {
524 self.get_map(map.arena_index())
525 }
526 }
527 pub fn clone_map_value(&self, map: NanValue) -> T::Map {
528 if map.is_empty_map_immediate() {
529 T::Map::new()
530 } else {
531 self.get_map(map.arena_index()).clone()
532 }
533 }
534 pub fn take_map_value(&mut self, map: NanValue) -> T::Map {
538 if map.is_empty_map_immediate() {
539 T::Map::new()
540 } else {
541 let index = map.arena_index();
542 std::mem::replace(self.get_map_mut(index), T::Map::new())
543 }
544 }
545 pub fn get_fn(&self, index: u32) -> &T::Fn {
546 match &self.symbol_entries[index as usize] {
547 ArenaSymbol::Fn(f) => f,
548 _ => panic!("Arena: expected Fn symbol at {}", index),
549 }
550 }
551 pub fn get_fn_rc(&self, index: u32) -> &Rc<T::Fn> {
552 match &self.symbol_entries[index as usize] {
553 ArenaSymbol::Fn(f) => f,
554 _ => panic!("Arena: expected Fn symbol at {}", index),
555 }
556 }
557 pub fn get_builtin(&self, index: u32) -> &str {
558 match &self.symbol_entries[index as usize] {
559 ArenaSymbol::Builtin(s) => s,
560 _ => panic!("Arena: expected Builtin symbol at {}", index),
561 }
562 }
563 pub fn get_namespace(&self, index: u32) -> (&str, &[(Rc<str>, NanValue)]) {
564 match &self.symbol_entries[index as usize] {
565 ArenaSymbol::Namespace { name, members } => (name, members),
566 _ => panic!("Arena: expected Namespace symbol at {}", index),
567 }
568 }
569 pub fn get_nullary_variant_ctor(&self, index: u32) -> u32 {
570 match &self.symbol_entries[index as usize] {
571 ArenaSymbol::NullaryVariant { ctor_id } => *ctor_id,
572 _ => panic!("Arena: expected NullaryVariant symbol at {}", index),
573 }
574 }
575
576 pub fn register_record_type(&mut self, name: &str, field_names: Vec<String>) -> u32 {
579 let id = self.type_names.len() as u32;
580 self.type_names.push(String::from(name));
581 self.type_field_names.push(field_names);
582 self.type_variant_names.push(Vec::new());
583 self.type_variant_ctor_ids.push(Vec::new());
584 id
585 }
586
587 pub fn register_sum_type(&mut self, name: &str, variant_names: Vec<String>) -> u32 {
588 let id = self.type_names.len() as u32;
589 self.type_names.push(String::from(name));
590 self.type_field_names.push(Vec::new());
591 let ctor_ids: Vec<u32> = (0..variant_names.len())
592 .map(|variant_idx| {
593 let ctor_id = self.ctor_to_type_variant.len() as u32;
594 self.ctor_to_type_variant.push((id, variant_idx as u16));
595 ctor_id
596 })
597 .collect();
598 self.type_variant_names.push(variant_names);
599 self.type_variant_ctor_ids.push(ctor_ids);
600 id
601 }
602
603 pub fn register_variant_name(&mut self, type_id: u32, variant_name: String) -> u16 {
604 let variants = &mut self.type_variant_names[type_id as usize];
605 let variant_id = variants.len() as u16;
606 variants.push(variant_name);
607
608 let ctor_id = self.ctor_to_type_variant.len() as u32;
609 self.ctor_to_type_variant.push((type_id, variant_id));
610 self.type_variant_ctor_ids[type_id as usize].push(ctor_id);
611
612 variant_id
613 }
614
615 pub fn get_type_name(&self, type_id: u32) -> &str {
616 &self.type_names[type_id as usize]
617 }
618 pub fn type_count(&self) -> u32 {
619 self.type_names.len() as u32
620 }
621 pub fn get_field_names(&self, type_id: u32) -> &[String] {
622 &self.type_field_names[type_id as usize]
623 }
624 pub fn get_variant_name(&self, type_id: u32, variant_id: u16) -> &str {
625 &self.type_variant_names[type_id as usize][variant_id as usize]
626 }
627 pub fn register_type_alias(&mut self, alias: &str, type_id: u32) {
628 self.type_aliases.push((alias.to_string(), type_id));
629 }
630
631 pub fn find_type_id(&self, name: &str) -> Option<u32> {
632 self.type_names
633 .iter()
634 .position(|n| n == name)
635 .map(|i| i as u32)
636 .or_else(|| {
637 self.type_aliases
638 .iter()
639 .find(|(alias, _)| alias == name)
640 .map(|(_, id)| *id)
641 })
642 }
643 pub fn find_variant_id(&self, type_id: u32, variant_name: &str) -> Option<u16> {
644 self.type_variant_names
645 .get(type_id as usize)?
646 .iter()
647 .position(|n| n == variant_name)
648 .map(|i| i as u16)
649 }
650
651 pub fn find_ctor_id(&self, type_id: u32, variant_id: u16) -> Option<u32> {
652 self.type_variant_ctor_ids
653 .get(type_id as usize)?
654 .get(variant_id as usize)
655 .copied()
656 }
657
658 pub fn get_ctor_parts(&self, ctor_id: u32) -> (u32, u16) {
659 self.ctor_to_type_variant
660 .get(ctor_id as usize)
661 .copied()
662 .unwrap_or_else(|| panic!("Arena: expected ctor id {} to be registered", ctor_id))
663 }
664
665 pub fn len(&self) -> usize {
666 self.young_entries.len()
667 + self.yard_entries.len()
668 + self.handoff_entries.len()
669 + self.stable_entries.len()
670 }
671 pub fn is_empty(&self) -> bool {
672 self.young_entries.is_empty()
673 && self.yard_entries.is_empty()
674 && self.handoff_entries.is_empty()
675 && self.stable_entries.is_empty()
676 }
677}
678
679impl<T: ArenaTypes> Default for Arena<T> {
680 fn default() -> Self {
681 Self::new()
682 }
683}