tree_sitter_c2rust_core/
stack.rs

1use crate::util::*;
2use crate::*;
3use c2rust_bitfields;
4use std::os;
5pub type __u8 = os::raw::c_uchar;
6pub type __int16_t = os::raw::c_short;
7pub type __u16 = os::raw::c_ushort;
8pub type __int32_t = os::raw::c_int;
9pub type __u32 = os::raw::c_uint;
10pub type __off_t = isize;
11pub type __off64_t = isize;
12type _IO_FILE = u8;
13pub type _IO_lock_t = ();
14pub type FILE = _IO_FILE;
15pub type int16_t = __int16_t;
16pub type int32_t = __int32_t;
17pub type TSSymbol = u16;
18pub type TSFieldId = u16;
19pub type TSStateId = u16;
20#[derive(Copy, Clone)]
21#[repr(C)]
22pub struct C2RustUnnamed {
23    pub states: *const bool,
24    pub symbol_map: *const TSSymbol,
25    pub create: Option<unsafe extern "C" fn() -> *mut os::raw::c_void>,
26    pub destroy: Option<unsafe extern "C" fn(*mut os::raw::c_void) -> ()>,
27    pub scan: Option<unsafe extern "C" fn(*mut os::raw::c_void, *mut TSLexer, *const bool) -> bool>,
28    pub serialize:
29        Option<unsafe extern "C" fn(*mut os::raw::c_void, *mut os::raw::c_char) -> os::raw::c_uint>,
30    pub deserialize: Option<
31        unsafe extern "C" fn(*mut os::raw::c_void, *const os::raw::c_char, os::raw::c_uint) -> (),
32    >,
33}
34#[derive(Copy, Clone)]
35#[repr(C)]
36pub struct C2RustUnnamed_0 {
37    pub count: u8,
38    pub reusable: bool,
39}
40#[derive(Copy, Clone)]
41#[repr(C)]
42pub struct C2RustUnnamed_1 {
43    pub type_: u8,
44    pub child_count: u8,
45    pub symbol: TSSymbol,
46    pub dynamic_precedence: int16_t,
47    pub production_id: u16,
48}
49#[derive(Copy, Clone)]
50#[repr(C)]
51pub struct C2RustUnnamed_2 {
52    pub type_: u8,
53    pub state: TSStateId,
54    pub extra: bool,
55    pub repetition: bool,
56}
57#[derive(Copy, Clone)]
58#[repr(C)]
59pub struct VoidArray {
60    pub contents: *mut os::raw::c_void,
61    pub size: u32,
62    pub capacity: u32,
63}
64type C2RustUnnamed_3 = crate::util::LongShortData;
65type C2RustUnnamed_4 = crate::util::ScannerStateWithLookahead;
66type C2RustUnnamed_5 = crate::util::ScannerStateLookaheadMeta;
67type C2RustUnnamed_6 = crate::util::ScannerStateLookaheadFirstLeaf;
68#[derive(Copy, Clone)]
69#[repr(C)]
70pub struct Stack {
71    pub heads: C2RustUnnamed_8,
72    pub slices: StackSliceArray,
73    pub iterators: C2RustUnnamed_7,
74    pub node_pool: StackNodeArray,
75    pub base_node: *mut StackNode,
76    pub subtree_pool: *mut SubtreePool,
77}
78#[derive(Copy, Clone)]
79#[repr(C)]
80pub struct StackNode {
81    pub state: TSStateId,
82    pub position: Length,
83    pub links: [StackLink; 8],
84    pub link_count: os::raw::c_ushort,
85    pub ref_count: u32,
86    pub error_cost: os::raw::c_uint,
87    pub node_count: os::raw::c_uint,
88    pub dynamic_precedence: os::raw::c_int,
89}
90#[derive(Copy, Clone)]
91#[repr(C)]
92pub struct StackLink {
93    pub node: *mut StackNode,
94    pub subtree: Subtree,
95    pub is_pending: bool,
96}
97#[derive(Copy, Clone)]
98#[repr(C)]
99pub struct StackNodeArray {
100    pub contents: *mut *mut StackNode,
101    pub size: u32,
102    pub capacity: u32,
103}
104type C2RustUnnamed_7 = crate::util::StackElement<*mut StackIterator>;
105#[derive(Copy, Clone)]
106#[repr(C)]
107pub struct StackIterator {
108    pub node: *mut StackNode,
109    pub subtrees: SubtreeArray,
110    pub subtree_count: u32,
111    pub is_pending: bool,
112}
113#[derive(Copy, Clone)]
114#[repr(C)]
115pub struct StackSliceArray {
116    pub contents: *mut StackSlice,
117    pub size: u32,
118    pub capacity: u32,
119}
120#[derive(Copy, Clone)]
121#[repr(C)]
122pub struct StackSlice {
123    pub subtrees: SubtreeArray,
124    pub version: StackVersion,
125}
126pub type StackVersion = os::raw::c_uint;
127type C2RustUnnamed_8 = crate::util::StackElement<*mut StackHead>;
128#[derive(Copy, Clone)]
129#[repr(C)]
130pub struct StackHead {
131    pub node: *mut StackNode,
132    pub summary: *mut StackSummary,
133    pub node_count_at_last_error: os::raw::c_uint,
134    pub last_external_token: Subtree,
135    pub lookahead_when_paused: Subtree,
136    pub status: StackStatus,
137}
138pub type StackStatus = os::raw::c_uint;
139pub const StackStatusHalted: StackStatus = 2;
140pub const StackStatusPaused: StackStatus = 1;
141pub const StackStatusActive: StackStatus = 0;
142#[derive(Copy, Clone)]
143#[repr(C)]
144pub struct StackSummary {
145    pub contents: *mut StackSummaryEntry,
146    pub size: u32,
147    pub capacity: u32,
148}
149#[derive(Copy, Clone)]
150#[repr(C)]
151pub struct StackSummaryEntry {
152    pub position: Length,
153    pub depth: os::raw::c_uint,
154    pub state: TSStateId,
155}
156pub type StackAction = os::raw::c_uint;
157pub const StackActionNone: C2RustUnnamed_10 = 0;
158pub const StackActionStop: C2RustUnnamed_10 = 1;
159pub const StackActionPop: C2RustUnnamed_10 = 2;
160pub type StackCallback =
161    Option<unsafe extern "C" fn(*mut os::raw::c_void, *const StackIterator) -> StackAction>;
162#[derive(Copy, Clone)]
163#[repr(C)]
164pub struct SummarizeStackSession {
165    pub summary: *mut StackSummary,
166    pub max_depth: os::raw::c_uint,
167}
168type C2RustUnnamed_9 = crate::util::StackElement<*mut *mut StackNode>;
169pub type C2RustUnnamed_10 = os::raw::c_uint;
170#[inline]
171unsafe extern "C" fn ts_subtree_alloc_size(mut child_count: u32) -> size_t {
172    return (child_count as usize)
173        .wrapping_mul(::std::mem::size_of::<Subtree>() as usize)
174        .wrapping_add(::std::mem::size_of::<SubtreeHeapData>() as usize);
175}
176#[inline]
177unsafe extern "C" fn length_add(mut len1: Length, mut len2: Length) -> Length {
178    let mut result: Length = Length {
179        bytes: 0,
180        extent: TSPoint { row: 0, column: 0 },
181    };
182    result.bytes = (len1.bytes).wrapping_add(len2.bytes);
183    result.extent = point_add(len1.extent, len2.extent);
184    return result;
185}
186#[inline]
187unsafe extern "C" fn length_zero() -> Length {
188    let mut result: Length = {
189        let mut init = Length {
190            bytes: 0 as os::raw::c_int as u32,
191            extent: {
192                let mut init = TSPoint {
193                    row: 0 as os::raw::c_int as u32,
194                    column: 0 as os::raw::c_int as u32,
195                };
196                init
197            },
198        };
199        init
200    };
201    return result;
202}
203#[inline]
204unsafe extern "C" fn array__delete(mut self_0: *mut VoidArray) {
205    crate::alloc::ts_free((*self_0).contents);
206    let ref mut fresh0 = (*self_0).contents;
207    *fresh0 = 0 as *mut os::raw::c_void;
208    (*self_0).size = 0 as os::raw::c_int as u32;
209    (*self_0).capacity = 0 as os::raw::c_int as u32;
210}
211#[inline]
212unsafe extern "C" fn array__erase(
213    mut self_0: *mut VoidArray,
214    mut element_size: size_t,
215    mut index: u32,
216) {
217    if index < (*self_0).size {
218    } else {
219        panic!();
220    }
221    let mut contents: *mut os::raw::c_char = (*self_0).contents as *mut os::raw::c_char;
222    std::ptr::copy(
223        contents.offset(
224            (index.wrapping_add(1 as os::raw::c_int as os::raw::c_uint) as usize)
225                .wrapping_mul(element_size) as isize,
226        ) as *const os::raw::c_void,
227        contents.offset((index as usize).wrapping_mul(element_size) as isize)
228            as *mut os::raw::c_void,
229        ((((*self_0).size)
230            .wrapping_sub(index)
231            .wrapping_sub(1 as os::raw::c_int as os::raw::c_uint) as usize)
232            .wrapping_mul(element_size)) as usize,
233    );
234    let ref mut fresh1 = (*self_0).size;
235    *fresh1 = (*fresh1).wrapping_sub(1);
236}
237#[inline]
238unsafe extern "C" fn array__reserve(
239    mut self_0: *mut VoidArray,
240    mut element_size: size_t,
241    mut new_capacity: u32,
242) {
243    if new_capacity > (*self_0).capacity {
244        if !((*self_0).contents).is_null() {
245            let ref mut fresh2 = (*self_0).contents;
246            *fresh2 = crate::alloc::ts_realloc(
247                (*self_0).contents,
248                (new_capacity as usize).wrapping_mul(element_size),
249            );
250        } else {
251            let ref mut fresh3 = (*self_0).contents;
252            *fresh3 = crate::alloc::ts_malloc((new_capacity as usize).wrapping_mul(element_size));
253        }
254        (*self_0).capacity = new_capacity;
255    }
256}
257#[inline]
258unsafe extern "C" fn array__grow(
259    mut self_0: *mut VoidArray,
260    mut count: size_t,
261    mut element_size: size_t,
262) {
263    let mut new_size: size_t = ((*self_0).size as usize).wrapping_add(count);
264    if new_size > (*self_0).capacity as usize {
265        let mut new_capacity: size_t =
266            ((*self_0).capacity).wrapping_mul(2 as os::raw::c_int as os::raw::c_uint) as size_t;
267        if new_capacity < 8 as os::raw::c_int as usize {
268            new_capacity = 8 as os::raw::c_int as size_t;
269        }
270        if new_capacity < new_size {
271            new_capacity = new_size;
272        }
273        array__reserve(self_0, element_size, new_capacity as u32);
274    }
275}
276#[inline]
277unsafe extern "C" fn array__splice(
278    mut self_0: *mut VoidArray,
279    mut element_size: size_t,
280    mut index: u32,
281    mut old_count: u32,
282    mut new_count: u32,
283    mut elements: *const os::raw::c_void,
284) {
285    let mut new_size: u32 = ((*self_0).size)
286        .wrapping_add(new_count)
287        .wrapping_sub(old_count);
288    let mut old_end: u32 = index.wrapping_add(old_count);
289    let mut new_end: u32 = index.wrapping_add(new_count);
290    if old_end <= (*self_0).size {
291    } else {
292        panic!();
293    }
294    array__reserve(self_0, element_size, new_size);
295    let mut contents: *mut os::raw::c_char = (*self_0).contents as *mut os::raw::c_char;
296    if (*self_0).size > old_end {
297        std::ptr::copy(
298            contents.offset((old_end as usize).wrapping_mul(element_size) as isize)
299                as *const os::raw::c_void,
300            contents.offset((new_end as usize).wrapping_mul(element_size) as isize)
301                as *mut os::raw::c_void,
302            ((((*self_0).size).wrapping_sub(old_end) as usize).wrapping_mul(element_size)) as usize,
303        );
304    }
305    if new_count > 0 as os::raw::c_int as os::raw::c_uint {
306        if !elements.is_null() {
307            std::ptr::copy_nonoverlapping(
308                elements,
309                contents.offset((index as usize).wrapping_mul(element_size) as isize)
310                    as *mut os::raw::c_void,
311                (new_count as usize).wrapping_mul(element_size),
312            );
313        } else {
314            std::ptr::write_bytes(
315                contents.offset((index as usize).wrapping_mul(element_size) as isize)
316                    as *mut os::raw::c_void,
317                (0 as os::raw::c_int) as u8,
318                ((new_count as usize).wrapping_mul(element_size)) as usize,
319            );
320        }
321    }
322    let ref mut fresh4 = (*self_0).size;
323    *fresh4 =
324        (*fresh4 as os::raw::c_uint).wrapping_add(new_count.wrapping_sub(old_count)) as u32 as u32;
325}
326#[inline]
327unsafe extern "C" fn ts_subtree_symbol(mut self_0: Subtree) -> TSSymbol {
328    return (if (self_0.data).is_inline() as os::raw::c_int != 0 {
329        self_0.data.symbol as os::raw::c_int
330    } else {
331        (*self_0.ptr).symbol as os::raw::c_int
332    }) as TSSymbol;
333}
334#[inline]
335unsafe extern "C" fn ts_subtree_visible(mut self_0: Subtree) -> bool {
336    return if (self_0.data).is_inline() as os::raw::c_int != 0 {
337        (self_0.data).visible() as os::raw::c_int
338    } else {
339        (*self_0.ptr).visible() as os::raw::c_int
340    } != 0;
341}
342#[inline]
343unsafe extern "C" fn ts_subtree_named(mut self_0: Subtree) -> bool {
344    return if (self_0.data).is_inline() as os::raw::c_int != 0 {
345        (self_0.data).named() as os::raw::c_int
346    } else {
347        (*self_0.ptr).named() as os::raw::c_int
348    } != 0;
349}
350#[inline]
351unsafe extern "C" fn ts_subtree_extra(mut self_0: Subtree) -> bool {
352    return if (self_0.data).is_inline() as os::raw::c_int != 0 {
353        (self_0.data).extra() as os::raw::c_int
354    } else {
355        (*self_0.ptr).extra() as os::raw::c_int
356    } != 0;
357}
358#[inline]
359unsafe extern "C" fn ts_subtree_missing(mut self_0: Subtree) -> bool {
360    return if (self_0.data).is_inline() as os::raw::c_int != 0 {
361        (self_0.data).is_missing() as os::raw::c_int
362    } else {
363        (*self_0.ptr).is_missing() as os::raw::c_int
364    } != 0;
365}
366#[inline]
367unsafe extern "C" fn ts_language_write_symbol_as_dot_string(
368    mut self_0: *const TSLanguage,
369    mut f: *mut FILE,
370    mut symbol: TSSymbol,
371) {
372    let mut name: *const os::raw::c_char = ts_language_symbol_name(self_0, symbol);
373    let mut c: *const os::raw::c_char = name;
374    while *c != 0 {
375        match *c as os::raw::c_int {
376            34 | 92 => {
377                ();
378                ();
379            }
380            10 => {
381                ();
382            }
383            9 => {
384                ();
385            }
386            _ => {
387                ();
388            }
389        }
390        c = c.offset(1);
391    }
392}
393#[inline]
394unsafe extern "C" fn point_add(mut a: TSPoint, mut b: TSPoint) -> TSPoint {
395    if b.row > 0 as os::raw::c_int as os::raw::c_uint {
396        return point__new((a.row).wrapping_add(b.row), b.column);
397    } else {
398        return point__new(a.row, (a.column).wrapping_add(b.column));
399    };
400}
401#[inline]
402unsafe extern "C" fn point__new(mut row: os::raw::c_uint, mut column: os::raw::c_uint) -> TSPoint {
403    let mut result: TSPoint = {
404        let mut init = TSPoint {
405            row: row,
406            column: column,
407        };
408        init
409    };
410    return result;
411}
412#[inline]
413unsafe extern "C" fn ts_subtree_size(mut self_0: Subtree) -> Length {
414    if (self_0.data).is_inline() {
415        let mut result: Length = {
416            let mut init = Length {
417                bytes: self_0.data.size_bytes as u32,
418                extent: {
419                    let mut init = TSPoint {
420                        row: 0 as os::raw::c_int as u32,
421                        column: self_0.data.size_bytes as u32,
422                    };
423                    init
424                },
425            };
426            init
427        };
428        return result;
429    } else {
430        return (*self_0.ptr).size;
431    };
432}
433#[inline]
434unsafe extern "C" fn ts_subtree_child_count(mut self_0: Subtree) -> u32 {
435    return (if (self_0.data).is_inline() as os::raw::c_int != 0 {
436        0 as os::raw::c_int
437    } else {
438        (*self_0.ptr).child_count as os::raw::c_int
439    }) as u32;
440}
441#[inline]
442unsafe extern "C" fn ts_subtree_total_size(mut self_0: Subtree) -> Length {
443    return length_add(ts_subtree_padding(self_0), ts_subtree_size(self_0));
444}
445#[inline]
446unsafe extern "C" fn ts_subtree_total_bytes(mut self_0: Subtree) -> u32 {
447    return (ts_subtree_total_size(self_0)).bytes;
448}
449#[inline]
450unsafe extern "C" fn ts_subtree_padding(mut self_0: Subtree) -> Length {
451    if (self_0.data).is_inline() {
452        let mut result: Length = {
453            let mut init = Length {
454                bytes: self_0.data.padding_bytes as u32,
455                extent: {
456                    let mut init = TSPoint {
457                        row: (self_0.data).padding_rows() as u32,
458                        column: self_0.data.padding_columns as u32,
459                    };
460                    init
461                },
462            };
463            init
464        };
465        return result;
466    } else {
467        return (*self_0.ptr).padding;
468    };
469}
470#[inline]
471unsafe extern "C" fn ts_subtree_node_count(mut self_0: Subtree) -> u32 {
472    return if (self_0.data).is_inline() as os::raw::c_int != 0
473        || (*self_0.ptr).child_count as os::raw::c_int == 0 as os::raw::c_int
474    {
475        1 as os::raw::c_int as os::raw::c_uint
476    } else {
477        (*self_0.ptr).c2rust_unnamed.c2rust_unnamed.node_count
478    };
479}
480#[inline]
481unsafe extern "C" fn ts_subtree_error_cost(mut self_0: Subtree) -> u32 {
482    if ts_subtree_missing(self_0) {
483        return (110 as os::raw::c_int + 500 as os::raw::c_int) as u32;
484    } else {
485        return if (self_0.data).is_inline() as os::raw::c_int != 0 {
486            0 as os::raw::c_int as os::raw::c_uint
487        } else {
488            (*self_0.ptr).error_cost
489        };
490    };
491}
492#[inline]
493unsafe extern "C" fn ts_subtree_dynamic_precedence(mut self_0: Subtree) -> int32_t {
494    return if (self_0.data).is_inline() as os::raw::c_int != 0
495        || (*self_0.ptr).child_count as os::raw::c_int == 0 as os::raw::c_int
496    {
497        0 as os::raw::c_int
498    } else {
499        (*self_0.ptr)
500            .c2rust_unnamed
501            .c2rust_unnamed
502            .dynamic_precedence
503    };
504}
505#[inline]
506unsafe extern "C" fn ts_subtree_is_error(mut self_0: Subtree) -> bool {
507    return ts_subtree_symbol(self_0) as os::raw::c_int
508        == -(1 as os::raw::c_int) as TSSymbol as os::raw::c_int;
509}
510unsafe extern "C" fn stack_node_retain(mut self_0: *mut StackNode) {
511    if self_0.is_null() {
512        return;
513    }
514    if (*self_0).ref_count > 0 as os::raw::c_int as os::raw::c_uint {
515    } else {
516        panic!();
517    }
518    let ref mut fresh5 = (*self_0).ref_count;
519    *fresh5 = (*fresh5).wrapping_add(1);
520    if (*self_0).ref_count != 0 as os::raw::c_int as os::raw::c_uint {
521    } else {
522        panic!();
523    };
524}
525unsafe extern "C" fn stack_node_release(
526    mut self_0: *mut StackNode,
527    mut pool: *mut StackNodeArray,
528    mut subtree_pool: *mut SubtreePool,
529) {
530    loop {
531        if (*self_0).ref_count != 0 as os::raw::c_int as os::raw::c_uint {
532        } else {
533            panic!();
534        }
535        let ref mut fresh6 = (*self_0).ref_count;
536        *fresh6 = (*fresh6).wrapping_sub(1);
537        if (*self_0).ref_count > 0 as os::raw::c_int as os::raw::c_uint {
538            return;
539        }
540        let mut first_predecessor: *mut StackNode = 0 as *mut StackNode;
541        if (*self_0).link_count as os::raw::c_int > 0 as os::raw::c_int {
542            let mut i: os::raw::c_uint =
543                ((*self_0).link_count as os::raw::c_int - 1 as os::raw::c_int) as os::raw::c_uint;
544            while i > 0 as os::raw::c_int as os::raw::c_uint {
545                let mut link: StackLink = (*self_0).links[i as usize];
546                if !(link.subtree.ptr).is_null() {
547                    ts_subtree_release(subtree_pool, link.subtree);
548                }
549                stack_node_release(link.node, pool, subtree_pool);
550                i = i.wrapping_sub(1);
551            }
552            let mut link_0: StackLink = (*self_0).links[0 as os::raw::c_int as usize];
553            if !(link_0.subtree.ptr).is_null() {
554                ts_subtree_release(subtree_pool, link_0.subtree);
555            }
556            first_predecessor = (*self_0).links[0 as os::raw::c_int as usize].node;
557        }
558        if (*pool).size < 50 as os::raw::c_int as os::raw::c_uint {
559            array__grow(
560                pool as *mut VoidArray,
561                1 as os::raw::c_int as size_t,
562                ::std::mem::size_of::<*mut StackNode>() as usize,
563            );
564            let ref mut fresh7 = (*pool).size;
565            let fresh8 = *fresh7;
566            *fresh7 = (*fresh7).wrapping_add(1);
567            let ref mut fresh9 = *((*pool).contents).offset(fresh8 as isize);
568            *fresh9 = self_0;
569        } else {
570            crate::alloc::ts_free(self_0 as *mut os::raw::c_void);
571        }
572        if first_predecessor.is_null() {
573            break;
574        }
575        self_0 = first_predecessor;
576    }
577}
578unsafe extern "C" fn stack_node_new(
579    mut previous_node: *mut StackNode,
580    mut subtree: Subtree,
581    mut is_pending: bool,
582    mut state: TSStateId,
583    mut pool: *mut StackNodeArray,
584) -> *mut StackNode {
585    let mut node: *mut StackNode = (if (*pool).size > 0 as os::raw::c_int as os::raw::c_uint {
586        let ref mut fresh10 = (*pool).size;
587        *fresh10 = (*fresh10).wrapping_sub(1);
588        *((*pool).contents).offset(*fresh10 as isize) as *mut os::raw::c_void
589    } else {
590        crate::alloc::ts_malloc(::std::mem::size_of::<StackNode>() as usize)
591    }) as *mut StackNode;
592    *node = {
593        let mut init = StackNode {
594            state: state,
595            position: Length {
596                bytes: 0,
597                extent: TSPoint { row: 0, column: 0 },
598            },
599            links: [StackLink {
600                node: 0 as *mut StackNode,
601                subtree: Subtree {
602                    data: SubtreeInlineData {
603                        is_inline_visible_named_extra_has_changes_is_missing_is_keyword: [0; 1],
604                        symbol: 0,
605                        parse_state: 0,
606                        padding_columns: 0,
607                        padding_rows_lookahead_bytes: [0; 1],
608                        padding_bytes: 0,
609                        size_bytes: 0,
610                    },
611                },
612                is_pending: false,
613            }; 8],
614            link_count: 0 as os::raw::c_int as os::raw::c_ushort,
615            ref_count: 1 as os::raw::c_int as u32,
616            error_cost: 0,
617            node_count: 0,
618            dynamic_precedence: 0,
619        };
620        init
621    };
622    if !previous_node.is_null() {
623        (*node).link_count = 1 as os::raw::c_int as os::raw::c_ushort;
624        (*node).links[0 as os::raw::c_int as usize] = {
625            let mut init = StackLink {
626                node: previous_node,
627                subtree: subtree,
628                is_pending: is_pending,
629            };
630            init
631        };
632        (*node).position = (*previous_node).position;
633        (*node).error_cost = (*previous_node).error_cost;
634        (*node).dynamic_precedence = (*previous_node).dynamic_precedence;
635        (*node).node_count = (*previous_node).node_count;
636        if !(subtree.ptr).is_null() {
637            let ref mut fresh11 = (*node).error_cost;
638            *fresh11 = (*fresh11).wrapping_add(ts_subtree_error_cost(subtree));
639            (*node).position = length_add((*node).position, ts_subtree_total_size(subtree));
640            let ref mut fresh12 = (*node).node_count;
641            *fresh12 = (*fresh12).wrapping_add(ts_subtree_node_count(subtree));
642            (*node).dynamic_precedence += ts_subtree_dynamic_precedence(subtree);
643        }
644    } else {
645        (*node).position = length_zero();
646        (*node).error_cost = 0 as os::raw::c_int as os::raw::c_uint;
647    }
648    return node;
649}
650unsafe extern "C" fn stack__subtree_is_equivalent(mut left: Subtree, mut right: Subtree) -> bool {
651    if left.ptr == right.ptr {
652        return true;
653    }
654    if (left.ptr).is_null() || (right.ptr).is_null() {
655        return false;
656    }
657    if ts_subtree_symbol(left) as os::raw::c_int != ts_subtree_symbol(right) as os::raw::c_int {
658        return false;
659    }
660    if ts_subtree_error_cost(left) > 0 as os::raw::c_int as os::raw::c_uint
661        && ts_subtree_error_cost(right) > 0 as os::raw::c_int as os::raw::c_uint
662    {
663        return true;
664    }
665    return (ts_subtree_padding(left)).bytes == (ts_subtree_padding(right)).bytes
666        && (ts_subtree_size(left)).bytes == (ts_subtree_size(right)).bytes
667        && ts_subtree_child_count(left) == ts_subtree_child_count(right)
668        && ts_subtree_extra(left) as os::raw::c_int == ts_subtree_extra(right) as os::raw::c_int
669        && ts_subtree_external_scanner_state_eq(left, right) as os::raw::c_int != 0;
670}
671unsafe extern "C" fn stack_node_add_link(
672    mut self_0: *mut StackNode,
673    mut link: StackLink,
674    mut subtree_pool: *mut SubtreePool,
675) {
676    if link.node == self_0 {
677        return;
678    }
679    let mut i: os::raw::c_int = 0 as os::raw::c_int;
680    while i < (*self_0).link_count as os::raw::c_int {
681        let mut existing_link: *mut StackLink =
682            &mut *((*self_0).links).as_mut_ptr().offset(i as isize) as *mut StackLink;
683        if stack__subtree_is_equivalent((*existing_link).subtree, link.subtree) {
684            if (*existing_link).node == link.node {
685                if ts_subtree_dynamic_precedence(link.subtree)
686                    > ts_subtree_dynamic_precedence((*existing_link).subtree)
687                {
688                    ts_subtree_retain(link.subtree);
689                    ts_subtree_release(subtree_pool, (*existing_link).subtree);
690                    (*existing_link).subtree = link.subtree;
691                    (*self_0).dynamic_precedence = (*link.node).dynamic_precedence
692                        + ts_subtree_dynamic_precedence(link.subtree);
693                }
694                return;
695            }
696            if (*(*existing_link).node).state as os::raw::c_int
697                == (*link.node).state as os::raw::c_int
698                && (*(*existing_link).node).position.bytes == (*link.node).position.bytes
699            {
700                let mut j: os::raw::c_int = 0 as os::raw::c_int;
701                while j < (*link.node).link_count as os::raw::c_int {
702                    stack_node_add_link(
703                        (*existing_link).node,
704                        (*link.node).links[j as usize],
705                        subtree_pool,
706                    );
707                    j += 1;
708                }
709                let mut dynamic_precedence: int32_t = (*link.node).dynamic_precedence;
710                if !(link.subtree.ptr).is_null() {
711                    dynamic_precedence += ts_subtree_dynamic_precedence(link.subtree);
712                }
713                if dynamic_precedence > (*self_0).dynamic_precedence {
714                    (*self_0).dynamic_precedence = dynamic_precedence;
715                }
716                return;
717            }
718        }
719        i += 1;
720    }
721    if (*self_0).link_count as os::raw::c_int == 8 as os::raw::c_int {
722        return;
723    }
724    stack_node_retain(link.node);
725    let mut node_count: os::raw::c_uint = (*link.node).node_count;
726    let mut dynamic_precedence_0: os::raw::c_int = (*link.node).dynamic_precedence;
727    let ref mut fresh13 = (*self_0).link_count;
728    let fresh14 = *fresh13;
729    *fresh13 = (*fresh13).wrapping_add(1);
730    (*self_0).links[fresh14 as usize] = link;
731    if !(link.subtree.ptr).is_null() {
732        ts_subtree_retain(link.subtree);
733        node_count = node_count.wrapping_add(ts_subtree_node_count(link.subtree));
734        dynamic_precedence_0 += ts_subtree_dynamic_precedence(link.subtree);
735    }
736    if node_count > (*self_0).node_count {
737        (*self_0).node_count = node_count;
738    }
739    if dynamic_precedence_0 > (*self_0).dynamic_precedence {
740        (*self_0).dynamic_precedence = dynamic_precedence_0;
741    }
742}
743unsafe extern "C" fn stack_head_delete(
744    mut self_0: *mut StackHead,
745    mut pool: *mut StackNodeArray,
746    mut subtree_pool: *mut SubtreePool,
747) {
748    if !((*self_0).node).is_null() {
749        if !((*self_0).last_external_token.ptr).is_null() {
750            ts_subtree_release(subtree_pool, (*self_0).last_external_token);
751        }
752        if !((*self_0).lookahead_when_paused.ptr).is_null() {
753            ts_subtree_release(subtree_pool, (*self_0).lookahead_when_paused);
754        }
755        if !((*self_0).summary).is_null() {
756            array__delete((*self_0).summary as *mut VoidArray);
757            crate::alloc::ts_free((*self_0).summary as *mut os::raw::c_void);
758        }
759        stack_node_release((*self_0).node, pool, subtree_pool);
760    }
761}
762unsafe extern "C" fn ts_stack__add_version(
763    mut self_0: *mut Stack,
764    mut original_version: StackVersion,
765    mut node: *mut StackNode,
766) -> StackVersion {
767    let mut head: StackHead = {
768        let mut init = StackHead {
769            node: node,
770            summary: 0 as *mut StackSummary,
771            node_count_at_last_error: (*((*self_0).heads.contents)
772                .offset(original_version as isize))
773            .node_count_at_last_error,
774            last_external_token: (*((*self_0).heads.contents).offset(original_version as isize))
775                .last_external_token,
776            lookahead_when_paused: Subtree {
777                ptr: 0 as *const SubtreeHeapData,
778            },
779            status: StackStatusActive,
780        };
781        init
782    };
783    array__grow(
784        &mut (*self_0).heads as *mut C2RustUnnamed_8 as *mut VoidArray,
785        1 as os::raw::c_int as size_t,
786        ::std::mem::size_of::<StackHead>() as usize,
787    );
788    let ref mut fresh15 = (*self_0).heads.size;
789    let fresh16 = *fresh15;
790    *fresh15 = (*fresh15).wrapping_add(1);
791    *((*self_0).heads.contents).offset(fresh16 as isize) = head;
792    stack_node_retain(node);
793    if !(head.last_external_token.ptr).is_null() {
794        ts_subtree_retain(head.last_external_token);
795    }
796    return ((*self_0).heads.size).wrapping_sub(1 as os::raw::c_int as os::raw::c_uint);
797}
798unsafe extern "C" fn ts_stack__add_slice(
799    mut self_0: *mut Stack,
800    mut original_version: StackVersion,
801    mut node: *mut StackNode,
802    mut subtrees: *mut SubtreeArray,
803) {
804    let mut i: u32 = ((*self_0).slices.size).wrapping_sub(1 as os::raw::c_int as os::raw::c_uint);
805    while i.wrapping_add(1 as os::raw::c_int as os::raw::c_uint)
806        > 0 as os::raw::c_int as os::raw::c_uint
807    {
808        let mut version: StackVersion = (*((*self_0).slices.contents).offset(i as isize)).version;
809        if (*((*self_0).heads.contents).offset(version as isize)).node == node {
810            let mut slice: StackSlice = {
811                let mut init = StackSlice {
812                    subtrees: *subtrees,
813                    version: version,
814                };
815                init
816            };
817            array__splice(
818                &mut (*self_0).slices as *mut StackSliceArray as *mut VoidArray,
819                ::std::mem::size_of::<StackSlice>() as usize,
820                i.wrapping_add(1 as os::raw::c_int as os::raw::c_uint),
821                0 as os::raw::c_int as u32,
822                1 as os::raw::c_int as u32,
823                &mut slice as *mut StackSlice as *const os::raw::c_void,
824            );
825            return;
826        }
827        i = i.wrapping_sub(1);
828    }
829    let mut version_0: StackVersion = ts_stack__add_version(self_0, original_version, node);
830    let mut slice_0: StackSlice = {
831        let mut init = StackSlice {
832            subtrees: *subtrees,
833            version: version_0,
834        };
835        init
836    };
837    array__grow(
838        &mut (*self_0).slices as *mut StackSliceArray as *mut VoidArray,
839        1 as os::raw::c_int as size_t,
840        ::std::mem::size_of::<StackSlice>() as usize,
841    );
842    let ref mut fresh17 = (*self_0).slices.size;
843    let fresh18 = *fresh17;
844    *fresh17 = (*fresh17).wrapping_add(1);
845    *((*self_0).slices.contents).offset(fresh18 as isize) = slice_0;
846}
847#[inline(always)]
848unsafe extern "C" fn stack__iter(
849    mut self_0: *mut Stack,
850    mut version: StackVersion,
851    mut callback: StackCallback,
852    mut payload: *mut os::raw::c_void,
853    mut goal_subtree_count: os::raw::c_int,
854) -> StackSliceArray {
855    (*self_0).slices.size = 0 as os::raw::c_int as u32;
856    (*self_0).iterators.size = 0 as os::raw::c_int as u32;
857    if version < (*self_0).heads.size {
858    } else {
859        panic!();
860    }
861    let mut head: *mut StackHead =
862        &mut *((*self_0).heads.contents).offset(version as isize) as *mut StackHead;
863    let mut iterator: StackIterator = {
864        let mut init = StackIterator {
865            node: (*head).node,
866            subtrees: {
867                let mut init = SubtreeArray {
868                    contents: 0 as *mut Subtree,
869                    size: 0 as os::raw::c_int as u32,
870                    capacity: 0 as os::raw::c_int as u32,
871                };
872                init
873            },
874            subtree_count: 0 as os::raw::c_int as u32,
875            is_pending: true,
876        };
877        init
878    };
879    let mut include_subtrees: bool = false;
880    if goal_subtree_count >= 0 as os::raw::c_int {
881        include_subtrees = true;
882        array__reserve(
883            &mut iterator.subtrees as *mut SubtreeArray as *mut VoidArray,
884            ::std::mem::size_of::<Subtree>() as usize,
885            (ts_subtree_alloc_size(goal_subtree_count as u32))
886                .wrapping_div(::std::mem::size_of::<Subtree>() as usize) as u32,
887        );
888    }
889    array__grow(
890        &mut (*self_0).iterators as *mut C2RustUnnamed_7 as *mut VoidArray,
891        1 as os::raw::c_int as size_t,
892        ::std::mem::size_of::<StackIterator>() as usize,
893    );
894    let ref mut fresh19 = (*self_0).iterators.size;
895    let fresh20 = *fresh19;
896    *fresh19 = (*fresh19).wrapping_add(1);
897    *((*self_0).iterators.contents).offset(fresh20 as isize) = iterator;
898    while (*self_0).iterators.size > 0 as os::raw::c_int as os::raw::c_uint {
899        let mut i: u32 = 0 as os::raw::c_int as u32;
900        let mut size: u32 = (*self_0).iterators.size;
901        while i < size {
902            let mut iterator_0: *mut StackIterator =
903                &mut *((*self_0).iterators.contents).offset(i as isize) as *mut StackIterator;
904            let mut node: *mut StackNode = (*iterator_0).node;
905            let mut action: StackAction =
906                callback.expect("non-null function pointer")(payload, iterator_0);
907            let mut should_pop: bool =
908                action & StackActionPop as os::raw::c_int as os::raw::c_uint != 0;
909            let mut should_stop: bool =
910                action & StackActionStop as os::raw::c_int as os::raw::c_uint != 0
911                    || (*node).link_count as os::raw::c_int == 0 as os::raw::c_int;
912            if should_pop {
913                let mut subtrees: SubtreeArray = (*iterator_0).subtrees;
914                if !should_stop {
915                    ts_subtree_array_copy(subtrees, &mut subtrees);
916                }
917                ts_subtree_array_reverse(&mut subtrees);
918                ts_stack__add_slice(self_0, version, node, &mut subtrees);
919            }
920            if should_stop {
921                if !should_pop {
922                    ts_subtree_array_delete((*self_0).subtree_pool, &mut (*iterator_0).subtrees);
923                }
924                array__erase(
925                    &mut (*self_0).iterators as *mut C2RustUnnamed_7 as *mut VoidArray,
926                    ::std::mem::size_of::<StackIterator>() as usize,
927                    i,
928                );
929                i = i.wrapping_sub(1);
930                size = size.wrapping_sub(1);
931            } else {
932                let mut current_block_41: u64;
933                let mut j: u32 = 1 as os::raw::c_int as u32;
934                while j <= (*node).link_count as os::raw::c_uint {
935                    let mut next_iterator: *mut StackIterator = 0 as *mut StackIterator;
936                    let mut link: StackLink = StackLink {
937                        node: 0 as *mut StackNode,
938                        subtree: Subtree {
939                            data: SubtreeInlineData {
940                                is_inline_visible_named_extra_has_changes_is_missing_is_keyword: [0;
941                                    1],
942                                symbol: 0,
943                                parse_state: 0,
944                                padding_columns: 0,
945                                padding_rows_lookahead_bytes: [0; 1],
946                                padding_bytes: 0,
947                                size_bytes: 0,
948                            },
949                        },
950                        is_pending: false,
951                    };
952                    if j == (*node).link_count as os::raw::c_uint {
953                        link = (*node).links[0 as os::raw::c_int as usize];
954                        next_iterator = &mut *((*self_0).iterators.contents).offset(i as isize)
955                            as *mut StackIterator;
956                        current_block_41 = 7746103178988627676;
957                    } else if (*self_0).iterators.size >= 64 as os::raw::c_int as os::raw::c_uint {
958                        current_block_41 = 15925075030174552612;
959                    } else {
960                        link = (*node).links[j as usize];
961                        let mut current_iterator: StackIterator =
962                            *((*self_0).iterators.contents).offset(i as isize);
963                        array__grow(
964                            &mut (*self_0).iterators as *mut C2RustUnnamed_7 as *mut VoidArray,
965                            1 as os::raw::c_int as size_t,
966                            ::std::mem::size_of::<StackIterator>() as usize,
967                        );
968                        let ref mut fresh21 = (*self_0).iterators.size;
969                        let fresh22 = *fresh21;
970                        *fresh21 = (*fresh21).wrapping_add(1);
971                        *((*self_0).iterators.contents).offset(fresh22 as isize) = current_iterator;
972                        if ((*self_0).iterators.size)
973                            .wrapping_sub(1 as os::raw::c_int as os::raw::c_uint)
974                            < (*self_0).iterators.size
975                        {
976                        } else {
977                            panic!();
978                        }
979                        next_iterator = &mut *((*self_0).iterators.contents).offset(
980                            ((*self_0).iterators.size)
981                                .wrapping_sub(1 as os::raw::c_int as os::raw::c_uint)
982                                as isize,
983                        ) as *mut StackIterator;
984                        ts_subtree_array_copy(
985                            (*next_iterator).subtrees,
986                            &mut (*next_iterator).subtrees,
987                        );
988                        current_block_41 = 7746103178988627676;
989                    }
990                    match current_block_41 {
991                        7746103178988627676 => {
992                            let ref mut fresh23 = (*next_iterator).node;
993                            *fresh23 = link.node;
994                            if !(link.subtree.ptr).is_null() {
995                                if include_subtrees {
996                                    array__grow(
997                                        &mut (*next_iterator).subtrees as *mut SubtreeArray
998                                            as *mut VoidArray,
999                                        1 as os::raw::c_int as size_t,
1000                                        ::std::mem::size_of::<Subtree>() as usize,
1001                                    );
1002                                    let ref mut fresh24 = (*next_iterator).subtrees.size;
1003                                    let fresh25 = *fresh24;
1004                                    *fresh24 = (*fresh24).wrapping_add(1);
1005                                    *((*next_iterator).subtrees.contents)
1006                                        .offset(fresh25 as isize) = link.subtree;
1007                                    ts_subtree_retain(link.subtree);
1008                                }
1009                                if !ts_subtree_extra(link.subtree) {
1010                                    let ref mut fresh26 = (*next_iterator).subtree_count;
1011                                    *fresh26 = (*fresh26).wrapping_add(1);
1012                                    if !link.is_pending {
1013                                        (*next_iterator).is_pending = false;
1014                                    }
1015                                }
1016                            } else {
1017                                let ref mut fresh27 = (*next_iterator).subtree_count;
1018                                *fresh27 = (*fresh27).wrapping_add(1);
1019                                (*next_iterator).is_pending = false;
1020                            }
1021                        }
1022                        _ => {}
1023                    }
1024                    j = j.wrapping_add(1);
1025                }
1026            }
1027            i = i.wrapping_add(1);
1028        }
1029    }
1030    return (*self_0).slices;
1031}
1032#[no_mangle]
1033pub unsafe extern "C" fn ts_stack_new(mut subtree_pool: *mut SubtreePool) -> *mut Stack {
1034    let mut self_0: *mut Stack = crate::alloc::ts_calloc(
1035        1 as os::raw::c_int as size_t,
1036        ::std::mem::size_of::<Stack>() as usize,
1037    ) as *mut Stack;
1038    (*self_0).heads.size = 0 as os::raw::c_int as u32;
1039    (*self_0).heads.capacity = 0 as os::raw::c_int as u32;
1040    let ref mut fresh28 = (*self_0).heads.contents;
1041    *fresh28 = 0 as *mut StackHead;
1042    (*self_0).slices.size = 0 as os::raw::c_int as u32;
1043    (*self_0).slices.capacity = 0 as os::raw::c_int as u32;
1044    let ref mut fresh29 = (*self_0).slices.contents;
1045    *fresh29 = 0 as *mut StackSlice;
1046    (*self_0).iterators.size = 0 as os::raw::c_int as u32;
1047    (*self_0).iterators.capacity = 0 as os::raw::c_int as u32;
1048    let ref mut fresh30 = (*self_0).iterators.contents;
1049    *fresh30 = 0 as *mut StackIterator;
1050    (*self_0).node_pool.size = 0 as os::raw::c_int as u32;
1051    (*self_0).node_pool.capacity = 0 as os::raw::c_int as u32;
1052    let ref mut fresh31 = (*self_0).node_pool.contents;
1053    *fresh31 = 0 as *mut *mut StackNode;
1054    array__reserve(
1055        &mut (*self_0).heads as *mut C2RustUnnamed_8 as *mut VoidArray,
1056        ::std::mem::size_of::<StackHead>() as usize,
1057        4 as os::raw::c_int as u32,
1058    );
1059    array__reserve(
1060        &mut (*self_0).slices as *mut StackSliceArray as *mut VoidArray,
1061        ::std::mem::size_of::<StackSlice>() as usize,
1062        4 as os::raw::c_int as u32,
1063    );
1064    array__reserve(
1065        &mut (*self_0).iterators as *mut C2RustUnnamed_7 as *mut VoidArray,
1066        ::std::mem::size_of::<StackIterator>() as usize,
1067        4 as os::raw::c_int as u32,
1068    );
1069    array__reserve(
1070        &mut (*self_0).node_pool as *mut StackNodeArray as *mut VoidArray,
1071        ::std::mem::size_of::<*mut StackNode>() as usize,
1072        50 as os::raw::c_int as u32,
1073    );
1074    let ref mut fresh32 = (*self_0).subtree_pool;
1075    *fresh32 = subtree_pool;
1076    let ref mut fresh33 = (*self_0).base_node;
1077    *fresh33 = stack_node_new(
1078        0 as *mut StackNode,
1079        Subtree {
1080            ptr: 0 as *const SubtreeHeapData,
1081        },
1082        false,
1083        1 as os::raw::c_int as TSStateId,
1084        &mut (*self_0).node_pool,
1085    );
1086    ts_stack_clear(self_0);
1087    return self_0;
1088}
1089#[no_mangle]
1090pub unsafe extern "C" fn ts_stack_delete(mut self_0: *mut Stack) {
1091    if !((*self_0).slices.contents).is_null() {
1092        array__delete(&mut (*self_0).slices as *mut StackSliceArray as *mut VoidArray);
1093    }
1094    if !((*self_0).iterators.contents).is_null() {
1095        array__delete(&mut (*self_0).iterators as *mut C2RustUnnamed_7 as *mut VoidArray);
1096    }
1097    stack_node_release(
1098        (*self_0).base_node,
1099        &mut (*self_0).node_pool,
1100        (*self_0).subtree_pool,
1101    );
1102    let mut i: u32 = 0 as os::raw::c_int as u32;
1103    while i < (*self_0).heads.size {
1104        stack_head_delete(
1105            &mut *((*self_0).heads.contents).offset(i as isize),
1106            &mut (*self_0).node_pool,
1107            (*self_0).subtree_pool,
1108        );
1109        i = i.wrapping_add(1);
1110    }
1111    (*self_0).heads.size = 0 as os::raw::c_int as u32;
1112    if !((*self_0).node_pool.contents).is_null() {
1113        let mut i_0: u32 = 0 as os::raw::c_int as u32;
1114        while i_0 < (*self_0).node_pool.size {
1115            crate::alloc::ts_free(
1116                *((*self_0).node_pool.contents).offset(i_0 as isize) as *mut os::raw::c_void
1117            );
1118            i_0 = i_0.wrapping_add(1);
1119        }
1120        array__delete(&mut (*self_0).node_pool as *mut StackNodeArray as *mut VoidArray);
1121    }
1122    array__delete(&mut (*self_0).heads as *mut C2RustUnnamed_8 as *mut VoidArray);
1123    crate::alloc::ts_free(self_0 as *mut os::raw::c_void);
1124}
1125#[no_mangle]
1126pub unsafe extern "C" fn ts_stack_version_count(mut self_0: *const Stack) -> u32 {
1127    return (*self_0).heads.size;
1128}
1129#[no_mangle]
1130pub unsafe extern "C" fn ts_stack_state(
1131    mut self_0: *const Stack,
1132    mut version: StackVersion,
1133) -> TSStateId {
1134    if version < (*self_0).heads.size {
1135    } else {
1136        panic!();
1137    }
1138    return (*(*(&mut *((*self_0).heads.contents).offset(version as isize) as *mut StackHead))
1139        .node)
1140        .state;
1141}
1142#[no_mangle]
1143pub unsafe extern "C" fn ts_stack_position(
1144    mut self_0: *const Stack,
1145    mut version: StackVersion,
1146) -> Length {
1147    if version < (*self_0).heads.size {
1148    } else {
1149        panic!();
1150    }
1151    return (*(*(&mut *((*self_0).heads.contents).offset(version as isize) as *mut StackHead))
1152        .node)
1153        .position;
1154}
1155#[no_mangle]
1156pub unsafe extern "C" fn ts_stack_last_external_token(
1157    mut self_0: *const Stack,
1158    mut version: StackVersion,
1159) -> Subtree {
1160    if version < (*self_0).heads.size {
1161    } else {
1162        panic!();
1163    }
1164    return (*(&mut *((*self_0).heads.contents).offset(version as isize) as *mut StackHead))
1165        .last_external_token;
1166}
1167#[no_mangle]
1168pub unsafe extern "C" fn ts_stack_set_last_external_token(
1169    mut self_0: *mut Stack,
1170    mut version: StackVersion,
1171    mut token: Subtree,
1172) {
1173    if version < (*self_0).heads.size {
1174    } else {
1175        panic!();
1176    }
1177    let mut head: *mut StackHead =
1178        &mut *((*self_0).heads.contents).offset(version as isize) as *mut StackHead;
1179    if !(token.ptr).is_null() {
1180        ts_subtree_retain(token);
1181    }
1182    if !((*head).last_external_token.ptr).is_null() {
1183        ts_subtree_release((*self_0).subtree_pool, (*head).last_external_token);
1184    }
1185    (*head).last_external_token = token;
1186}
1187#[no_mangle]
1188pub unsafe extern "C" fn ts_stack_error_cost(
1189    mut self_0: *const Stack,
1190    mut version: StackVersion,
1191) -> os::raw::c_uint {
1192    if version < (*self_0).heads.size {
1193    } else {
1194        panic!();
1195    }
1196    let mut head: *mut StackHead =
1197        &mut *((*self_0).heads.contents).offset(version as isize) as *mut StackHead;
1198    let mut result: os::raw::c_uint = (*(*head).node).error_cost;
1199    if (*head).status as os::raw::c_uint == StackStatusPaused as os::raw::c_int as os::raw::c_uint
1200        || (*(*head).node).state as os::raw::c_int == 0 as os::raw::c_int
1201            && ((*(*head).node).links[0 as os::raw::c_int as usize]
1202                .subtree
1203                .ptr)
1204                .is_null()
1205    {
1206        result = result.wrapping_add(500 as os::raw::c_int as os::raw::c_uint);
1207    }
1208    return result;
1209}
1210#[no_mangle]
1211pub unsafe extern "C" fn ts_stack_node_count_since_error(
1212    mut self_0: *const Stack,
1213    mut version: StackVersion,
1214) -> os::raw::c_uint {
1215    if version < (*self_0).heads.size {
1216    } else {
1217        panic!();
1218    }
1219    let mut head: *mut StackHead =
1220        &mut *((*self_0).heads.contents).offset(version as isize) as *mut StackHead;
1221    if (*(*head).node).node_count < (*head).node_count_at_last_error {
1222        (*head).node_count_at_last_error = (*(*head).node).node_count;
1223    }
1224    return ((*(*head).node).node_count).wrapping_sub((*head).node_count_at_last_error);
1225}
1226#[no_mangle]
1227pub unsafe extern "C" fn ts_stack_push(
1228    mut self_0: *mut Stack,
1229    mut version: StackVersion,
1230    mut subtree: Subtree,
1231    mut pending: bool,
1232    mut state: TSStateId,
1233) {
1234    if version < (*self_0).heads.size {
1235    } else {
1236        panic!();
1237    }
1238    let mut head: *mut StackHead =
1239        &mut *((*self_0).heads.contents).offset(version as isize) as *mut StackHead;
1240    let mut new_node: *mut StackNode = stack_node_new(
1241        (*head).node,
1242        subtree,
1243        pending,
1244        state,
1245        &mut (*self_0).node_pool,
1246    );
1247    if (subtree.ptr).is_null() {
1248        (*head).node_count_at_last_error = (*new_node).node_count;
1249    }
1250    let ref mut fresh34 = (*head).node;
1251    *fresh34 = new_node;
1252}
1253#[inline(always)]
1254unsafe extern "C" fn pop_count_callback(
1255    mut payload: *mut os::raw::c_void,
1256    mut iterator: *const StackIterator,
1257) -> StackAction {
1258    let mut goal_subtree_count: *mut os::raw::c_uint = payload as *mut os::raw::c_uint;
1259    if (*iterator).subtree_count == *goal_subtree_count {
1260        return (StackActionPop as os::raw::c_int | StackActionStop as os::raw::c_int)
1261            as StackAction;
1262    } else {
1263        return StackActionNone as os::raw::c_int as StackAction;
1264    };
1265}
1266#[no_mangle]
1267pub unsafe extern "C" fn ts_stack_pop_count(
1268    mut self_0: *mut Stack,
1269    mut version: StackVersion,
1270    mut count: u32,
1271) -> StackSliceArray {
1272    return stack__iter(
1273        self_0,
1274        version,
1275        Some(
1276            pop_count_callback
1277                as unsafe extern "C" fn(*mut os::raw::c_void, *const StackIterator) -> StackAction,
1278        ),
1279        &mut count as *mut u32 as *mut os::raw::c_void,
1280        count as os::raw::c_int,
1281    );
1282}
1283#[inline(always)]
1284unsafe extern "C" fn pop_pending_callback(
1285    mut payload: *mut os::raw::c_void,
1286    mut iterator: *const StackIterator,
1287) -> StackAction {
1288    if (*iterator).subtree_count >= 1 as os::raw::c_int as os::raw::c_uint {
1289        if (*iterator).is_pending {
1290            return (StackActionPop as os::raw::c_int | StackActionStop as os::raw::c_int)
1291                as StackAction;
1292        } else {
1293            return StackActionStop as os::raw::c_int as StackAction;
1294        }
1295    } else {
1296        return StackActionNone as os::raw::c_int as StackAction;
1297    };
1298}
1299#[no_mangle]
1300pub unsafe extern "C" fn ts_stack_pop_pending(
1301    mut self_0: *mut Stack,
1302    mut version: StackVersion,
1303) -> StackSliceArray {
1304    let mut pop: StackSliceArray = stack__iter(
1305        self_0,
1306        version,
1307        Some(
1308            pop_pending_callback
1309                as unsafe extern "C" fn(*mut os::raw::c_void, *const StackIterator) -> StackAction,
1310        ),
1311        0 as *mut os::raw::c_void,
1312        0 as os::raw::c_int,
1313    );
1314    if pop.size > 0 as os::raw::c_int as os::raw::c_uint {
1315        ts_stack_renumber_version(
1316            self_0,
1317            (*(pop.contents).offset(0 as os::raw::c_int as isize)).version,
1318            version,
1319        );
1320        (*(pop.contents).offset(0 as os::raw::c_int as isize)).version = version;
1321    }
1322    return pop;
1323}
1324#[inline(always)]
1325unsafe extern "C" fn pop_error_callback(
1326    mut payload: *mut os::raw::c_void,
1327    mut iterator: *const StackIterator,
1328) -> StackAction {
1329    if (*iterator).subtrees.size > 0 as os::raw::c_int as os::raw::c_uint {
1330        let mut found_error: *mut bool = payload as *mut bool;
1331        if !*found_error
1332            && ts_subtree_is_error(
1333                *((*iterator).subtrees.contents).offset(0 as os::raw::c_int as isize),
1334            ) as os::raw::c_int
1335                != 0
1336        {
1337            *found_error = true;
1338            return (StackActionPop as os::raw::c_int | StackActionStop as os::raw::c_int)
1339                as StackAction;
1340        } else {
1341            return StackActionStop as os::raw::c_int as StackAction;
1342        }
1343    } else {
1344        return StackActionNone as os::raw::c_int as StackAction;
1345    };
1346}
1347#[no_mangle]
1348pub unsafe extern "C" fn ts_stack_pop_error(
1349    mut self_0: *mut Stack,
1350    mut version: StackVersion,
1351) -> SubtreeArray {
1352    if version < (*self_0).heads.size {
1353    } else {
1354        panic!();
1355    }
1356    let mut node: *mut StackNode =
1357        (*(&mut *((*self_0).heads.contents).offset(version as isize) as *mut StackHead)).node;
1358    let mut i: os::raw::c_uint = 0 as os::raw::c_int as os::raw::c_uint;
1359    while i < (*node).link_count as os::raw::c_uint {
1360        if !((*node).links[i as usize].subtree.ptr).is_null()
1361            && ts_subtree_is_error((*node).links[i as usize].subtree) as os::raw::c_int != 0
1362        {
1363            let mut found_error: bool = false;
1364            let mut pop: StackSliceArray = stack__iter(
1365                self_0,
1366                version,
1367                Some(
1368                    pop_error_callback
1369                        as unsafe extern "C" fn(
1370                            *mut os::raw::c_void,
1371                            *const StackIterator,
1372                        ) -> StackAction,
1373                ),
1374                &mut found_error as *mut bool as *mut os::raw::c_void,
1375                1 as os::raw::c_int,
1376            );
1377            if pop.size > 0 as os::raw::c_int as os::raw::c_uint {
1378                if pop.size == 1 as os::raw::c_int as os::raw::c_uint {
1379                } else {
1380                    panic!();
1381                }
1382                ts_stack_renumber_version(
1383                    self_0,
1384                    (*(pop.contents).offset(0 as os::raw::c_int as isize)).version,
1385                    version,
1386                );
1387                return (*(pop.contents).offset(0 as os::raw::c_int as isize)).subtrees;
1388            }
1389            break;
1390        } else {
1391            i = i.wrapping_add(1);
1392        }
1393    }
1394    return {
1395        let mut init = SubtreeArray {
1396            contents: 0 as *mut Subtree,
1397            size: 0 as os::raw::c_int as u32,
1398            capacity: 0,
1399        };
1400        init
1401    };
1402}
1403#[inline(always)]
1404unsafe extern "C" fn pop_all_callback(
1405    mut payload: *mut os::raw::c_void,
1406    mut iterator: *const StackIterator,
1407) -> StackAction {
1408    return (if (*(*iterator).node).link_count as os::raw::c_int == 0 as os::raw::c_int {
1409        StackActionPop as os::raw::c_int
1410    } else {
1411        StackActionNone as os::raw::c_int
1412    }) as StackAction;
1413}
1414#[no_mangle]
1415pub unsafe extern "C" fn ts_stack_pop_all(
1416    mut self_0: *mut Stack,
1417    mut version: StackVersion,
1418) -> StackSliceArray {
1419    return stack__iter(
1420        self_0,
1421        version,
1422        Some(
1423            pop_all_callback
1424                as unsafe extern "C" fn(*mut os::raw::c_void, *const StackIterator) -> StackAction,
1425        ),
1426        0 as *mut os::raw::c_void,
1427        0 as os::raw::c_int,
1428    );
1429}
1430#[inline(always)]
1431unsafe extern "C" fn summarize_stack_callback(
1432    mut payload: *mut os::raw::c_void,
1433    mut iterator: *const StackIterator,
1434) -> StackAction {
1435    let mut session: *mut SummarizeStackSession = payload as *mut SummarizeStackSession;
1436    let mut state: TSStateId = (*(*iterator).node).state;
1437    let mut depth: os::raw::c_uint = (*iterator).subtree_count;
1438    if depth > (*session).max_depth {
1439        return StackActionStop as os::raw::c_int as StackAction;
1440    }
1441    let mut i: os::raw::c_uint =
1442        ((*(*session).summary).size).wrapping_sub(1 as os::raw::c_int as os::raw::c_uint);
1443    while i.wrapping_add(1 as os::raw::c_int as os::raw::c_uint)
1444        > 0 as os::raw::c_int as os::raw::c_uint
1445    {
1446        let mut entry: StackSummaryEntry = *((*(*session).summary).contents).offset(i as isize);
1447        if entry.depth < depth {
1448            break;
1449        }
1450        if entry.depth == depth && entry.state as os::raw::c_int == state as os::raw::c_int {
1451            return StackActionNone as os::raw::c_int as StackAction;
1452        }
1453        i = i.wrapping_sub(1);
1454    }
1455    array__grow(
1456        (*session).summary as *mut VoidArray,
1457        1 as os::raw::c_int as size_t,
1458        ::std::mem::size_of::<StackSummaryEntry>() as usize,
1459    );
1460    let ref mut fresh35 = (*(*session).summary).size;
1461    let fresh36 = *fresh35;
1462    *fresh35 = (*fresh35).wrapping_add(1);
1463    *((*(*session).summary).contents).offset(fresh36 as isize) = {
1464        let mut init = StackSummaryEntry {
1465            position: (*(*iterator).node).position,
1466            depth: depth,
1467            state: state,
1468        };
1469        init
1470    };
1471    return StackActionNone as os::raw::c_int as StackAction;
1472}
1473#[no_mangle]
1474pub unsafe extern "C" fn ts_stack_record_summary(
1475    mut self_0: *mut Stack,
1476    mut version: StackVersion,
1477    mut max_depth: os::raw::c_uint,
1478) {
1479    let mut session: SummarizeStackSession = {
1480        let mut init = SummarizeStackSession {
1481            summary: crate::alloc::ts_malloc(::std::mem::size_of::<StackSummary>() as usize)
1482                as *mut StackSummary,
1483            max_depth: max_depth,
1484        };
1485        init
1486    };
1487    (*session.summary).size = 0 as os::raw::c_int as u32;
1488    (*session.summary).capacity = 0 as os::raw::c_int as u32;
1489    let ref mut fresh37 = (*session.summary).contents;
1490    *fresh37 = 0 as *mut StackSummaryEntry;
1491    stack__iter(
1492        self_0,
1493        version,
1494        Some(
1495            summarize_stack_callback
1496                as unsafe extern "C" fn(*mut os::raw::c_void, *const StackIterator) -> StackAction,
1497        ),
1498        &mut session as *mut SummarizeStackSession as *mut os::raw::c_void,
1499        -(1 as os::raw::c_int),
1500    );
1501    let mut head: *mut StackHead =
1502        &mut *((*self_0).heads.contents).offset(version as isize) as *mut StackHead;
1503    if !((*head).summary).is_null() {
1504        array__delete((*head).summary as *mut VoidArray);
1505        crate::alloc::ts_free((*head).summary as *mut os::raw::c_void);
1506    }
1507    let ref mut fresh38 = (*head).summary;
1508    *fresh38 = session.summary;
1509}
1510#[no_mangle]
1511pub unsafe extern "C" fn ts_stack_get_summary(
1512    mut self_0: *mut Stack,
1513    mut version: StackVersion,
1514) -> *mut StackSummary {
1515    if version < (*self_0).heads.size {
1516    } else {
1517        panic!();
1518    }
1519    return (*(&mut *((*self_0).heads.contents).offset(version as isize) as *mut StackHead))
1520        .summary;
1521}
1522#[no_mangle]
1523pub unsafe extern "C" fn ts_stack_dynamic_precedence(
1524    mut self_0: *mut Stack,
1525    mut version: StackVersion,
1526) -> os::raw::c_int {
1527    if version < (*self_0).heads.size {
1528    } else {
1529        panic!();
1530    }
1531    return (*(*(&mut *((*self_0).heads.contents).offset(version as isize) as *mut StackHead))
1532        .node)
1533        .dynamic_precedence;
1534}
1535#[no_mangle]
1536pub unsafe extern "C" fn ts_stack_has_advanced_since_error(
1537    mut self_0: *const Stack,
1538    mut version: StackVersion,
1539) -> bool {
1540    if version < (*self_0).heads.size {
1541    } else {
1542        panic!();
1543    }
1544    let mut head: *const StackHead =
1545        &mut *((*self_0).heads.contents).offset(version as isize) as *mut StackHead;
1546    let mut node: *const StackNode = (*head).node;
1547    if (*node).error_cost == 0 as os::raw::c_int as os::raw::c_uint {
1548        return true;
1549    }
1550    while !node.is_null() {
1551        if !((*node).link_count as os::raw::c_int > 0 as os::raw::c_int) {
1552            break;
1553        }
1554        let mut subtree: Subtree = (*node).links[0 as os::raw::c_int as usize].subtree;
1555        if (subtree.ptr).is_null() {
1556            break;
1557        }
1558        if ts_subtree_total_bytes(subtree) > 0 as os::raw::c_int as os::raw::c_uint {
1559            return true;
1560        } else {
1561            if !((*node).node_count > (*head).node_count_at_last_error
1562                && ts_subtree_error_cost(subtree) == 0 as os::raw::c_int as os::raw::c_uint)
1563            {
1564                break;
1565            }
1566            node = (*node).links[0 as os::raw::c_int as usize].node;
1567        }
1568    }
1569    return false;
1570}
1571#[no_mangle]
1572pub unsafe extern "C" fn ts_stack_remove_version(
1573    mut self_0: *mut Stack,
1574    mut version: StackVersion,
1575) {
1576    if version < (*self_0).heads.size {
1577    } else {
1578        panic!();
1579    }
1580    stack_head_delete(
1581        &mut *((*self_0).heads.contents).offset(version as isize),
1582        &mut (*self_0).node_pool,
1583        (*self_0).subtree_pool,
1584    );
1585    array__erase(
1586        &mut (*self_0).heads as *mut C2RustUnnamed_8 as *mut VoidArray,
1587        ::std::mem::size_of::<StackHead>() as usize,
1588        version,
1589    );
1590}
1591#[no_mangle]
1592pub unsafe extern "C" fn ts_stack_renumber_version(
1593    mut self_0: *mut Stack,
1594    mut v1: StackVersion,
1595    mut v2: StackVersion,
1596) {
1597    if v1 == v2 {
1598        return;
1599    }
1600    if v2 < v1 {
1601    } else {
1602        panic!();
1603    }
1604    if v1 < (*self_0).heads.size {
1605    } else {
1606        panic!();
1607    }
1608    let mut source_head: *mut StackHead =
1609        &mut *((*self_0).heads.contents).offset(v1 as isize) as *mut StackHead;
1610    let mut target_head: *mut StackHead =
1611        &mut *((*self_0).heads.contents).offset(v2 as isize) as *mut StackHead;
1612    if !((*target_head).summary).is_null() && ((*source_head).summary).is_null() {
1613        let ref mut fresh39 = (*source_head).summary;
1614        *fresh39 = (*target_head).summary;
1615        let ref mut fresh40 = (*target_head).summary;
1616        *fresh40 = 0 as *mut StackSummary;
1617    }
1618    stack_head_delete(
1619        target_head,
1620        &mut (*self_0).node_pool,
1621        (*self_0).subtree_pool,
1622    );
1623    *target_head = *source_head;
1624    array__erase(
1625        &mut (*self_0).heads as *mut C2RustUnnamed_8 as *mut VoidArray,
1626        ::std::mem::size_of::<StackHead>() as usize,
1627        v1,
1628    );
1629}
1630#[no_mangle]
1631pub unsafe extern "C" fn ts_stack_swap_versions(
1632    mut self_0: *mut Stack,
1633    mut v1: StackVersion,
1634    mut v2: StackVersion,
1635) {
1636    let mut temporary_head: StackHead = *((*self_0).heads.contents).offset(v1 as isize);
1637    *((*self_0).heads.contents).offset(v1 as isize) =
1638        *((*self_0).heads.contents).offset(v2 as isize);
1639    *((*self_0).heads.contents).offset(v2 as isize) = temporary_head;
1640}
1641#[no_mangle]
1642pub unsafe extern "C" fn ts_stack_copy_version(
1643    mut self_0: *mut Stack,
1644    mut version: StackVersion,
1645) -> StackVersion {
1646    if version < (*self_0).heads.size {
1647    } else {
1648        panic!();
1649    }
1650    array__grow(
1651        &mut (*self_0).heads as *mut C2RustUnnamed_8 as *mut VoidArray,
1652        1 as os::raw::c_int as size_t,
1653        ::std::mem::size_of::<StackHead>() as usize,
1654    );
1655    let ref mut fresh41 = (*self_0).heads.size;
1656    let fresh42 = *fresh41;
1657    *fresh41 = (*fresh41).wrapping_add(1);
1658    *((*self_0).heads.contents).offset(fresh42 as isize) =
1659        *((*self_0).heads.contents).offset(version as isize);
1660    if ((*self_0).heads.size).wrapping_sub(1 as os::raw::c_int as os::raw::c_uint)
1661        < (*self_0).heads.size
1662    {
1663    } else {
1664        panic!();
1665    }
1666    let mut head: *mut StackHead = &mut *((*self_0).heads.contents).offset(
1667        ((*self_0).heads.size).wrapping_sub(1 as os::raw::c_int as os::raw::c_uint) as isize,
1668    ) as *mut StackHead;
1669    stack_node_retain((*head).node);
1670    if !((*head).last_external_token.ptr).is_null() {
1671        ts_subtree_retain((*head).last_external_token);
1672    }
1673    let ref mut fresh43 = (*head).summary;
1674    *fresh43 = 0 as *mut StackSummary;
1675    return ((*self_0).heads.size).wrapping_sub(1 as os::raw::c_int as os::raw::c_uint);
1676}
1677#[no_mangle]
1678pub unsafe extern "C" fn ts_stack_merge(
1679    mut self_0: *mut Stack,
1680    mut version1: StackVersion,
1681    mut version2: StackVersion,
1682) -> bool {
1683    if !ts_stack_can_merge(self_0, version1, version2) {
1684        return false;
1685    }
1686    let mut head1: *mut StackHead =
1687        &mut *((*self_0).heads.contents).offset(version1 as isize) as *mut StackHead;
1688    let mut head2: *mut StackHead =
1689        &mut *((*self_0).heads.contents).offset(version2 as isize) as *mut StackHead;
1690    let mut i: u32 = 0 as os::raw::c_int as u32;
1691    while i < (*(*head2).node).link_count as os::raw::c_uint {
1692        stack_node_add_link(
1693            (*head1).node,
1694            (*(*head2).node).links[i as usize],
1695            (*self_0).subtree_pool,
1696        );
1697        i = i.wrapping_add(1);
1698    }
1699    if (*(*head1).node).state as os::raw::c_int == 0 as os::raw::c_int {
1700        (*head1).node_count_at_last_error = (*(*head1).node).node_count;
1701    }
1702    ts_stack_remove_version(self_0, version2);
1703    return true;
1704}
1705#[no_mangle]
1706pub unsafe extern "C" fn ts_stack_can_merge(
1707    mut self_0: *mut Stack,
1708    mut version1: StackVersion,
1709    mut version2: StackVersion,
1710) -> bool {
1711    let mut head1: *mut StackHead =
1712        &mut *((*self_0).heads.contents).offset(version1 as isize) as *mut StackHead;
1713    let mut head2: *mut StackHead =
1714        &mut *((*self_0).heads.contents).offset(version2 as isize) as *mut StackHead;
1715    return (*head1).status as os::raw::c_uint
1716        == StackStatusActive as os::raw::c_int as os::raw::c_uint
1717        && (*head2).status as os::raw::c_uint
1718            == StackStatusActive as os::raw::c_int as os::raw::c_uint
1719        && (*(*head1).node).state as os::raw::c_int == (*(*head2).node).state as os::raw::c_int
1720        && (*(*head1).node).position.bytes == (*(*head2).node).position.bytes
1721        && (*(*head1).node).error_cost == (*(*head2).node).error_cost
1722        && ts_subtree_external_scanner_state_eq(
1723            (*head1).last_external_token,
1724            (*head2).last_external_token,
1725        ) as os::raw::c_int
1726            != 0;
1727}
1728#[no_mangle]
1729pub unsafe extern "C" fn ts_stack_halt(mut self_0: *mut Stack, mut version: StackVersion) {
1730    if version < (*self_0).heads.size {
1731    } else {
1732        panic!();
1733    }
1734    (*(&mut *((*self_0).heads.contents).offset(version as isize) as *mut StackHead)).status =
1735        StackStatusHalted;
1736}
1737#[no_mangle]
1738pub unsafe extern "C" fn ts_stack_pause(
1739    mut self_0: *mut Stack,
1740    mut version: StackVersion,
1741    mut lookahead: Subtree,
1742) {
1743    if version < (*self_0).heads.size {
1744    } else {
1745        panic!();
1746    }
1747    let mut head: *mut StackHead =
1748        &mut *((*self_0).heads.contents).offset(version as isize) as *mut StackHead;
1749    (*head).status = StackStatusPaused;
1750    (*head).lookahead_when_paused = lookahead;
1751    (*head).node_count_at_last_error = (*(*head).node).node_count;
1752}
1753#[no_mangle]
1754pub unsafe extern "C" fn ts_stack_is_active(
1755    mut self_0: *const Stack,
1756    mut version: StackVersion,
1757) -> bool {
1758    if version < (*self_0).heads.size {
1759    } else {
1760        panic!();
1761    }
1762    return (*(&mut *((*self_0).heads.contents).offset(version as isize) as *mut StackHead)).status
1763        as os::raw::c_uint
1764        == StackStatusActive as os::raw::c_int as os::raw::c_uint;
1765}
1766#[no_mangle]
1767pub unsafe extern "C" fn ts_stack_is_halted(
1768    mut self_0: *const Stack,
1769    mut version: StackVersion,
1770) -> bool {
1771    if version < (*self_0).heads.size {
1772    } else {
1773        panic!();
1774    }
1775    return (*(&mut *((*self_0).heads.contents).offset(version as isize) as *mut StackHead)).status
1776        as os::raw::c_uint
1777        == StackStatusHalted as os::raw::c_int as os::raw::c_uint;
1778}
1779#[no_mangle]
1780pub unsafe extern "C" fn ts_stack_is_paused(
1781    mut self_0: *const Stack,
1782    mut version: StackVersion,
1783) -> bool {
1784    if version < (*self_0).heads.size {
1785    } else {
1786        panic!();
1787    }
1788    return (*(&mut *((*self_0).heads.contents).offset(version as isize) as *mut StackHead)).status
1789        as os::raw::c_uint
1790        == StackStatusPaused as os::raw::c_int as os::raw::c_uint;
1791}
1792#[no_mangle]
1793pub unsafe extern "C" fn ts_stack_resume(
1794    mut self_0: *mut Stack,
1795    mut version: StackVersion,
1796) -> Subtree {
1797    if version < (*self_0).heads.size {
1798    } else {
1799        panic!();
1800    }
1801    let mut head: *mut StackHead =
1802        &mut *((*self_0).heads.contents).offset(version as isize) as *mut StackHead;
1803    if (*head).status as os::raw::c_uint == StackStatusPaused as os::raw::c_int as os::raw::c_uint {
1804    } else {
1805        panic!();
1806    }
1807    let mut result: Subtree = (*head).lookahead_when_paused;
1808    (*head).status = StackStatusActive;
1809    (*head).lookahead_when_paused = Subtree {
1810        ptr: 0 as *const SubtreeHeapData,
1811    };
1812    return result;
1813}
1814#[no_mangle]
1815pub unsafe extern "C" fn ts_stack_clear(mut self_0: *mut Stack) {
1816    stack_node_retain((*self_0).base_node);
1817    let mut i: u32 = 0 as os::raw::c_int as u32;
1818    while i < (*self_0).heads.size {
1819        stack_head_delete(
1820            &mut *((*self_0).heads.contents).offset(i as isize),
1821            &mut (*self_0).node_pool,
1822            (*self_0).subtree_pool,
1823        );
1824        i = i.wrapping_add(1);
1825    }
1826    (*self_0).heads.size = 0 as os::raw::c_int as u32;
1827    array__grow(
1828        &mut (*self_0).heads as *mut C2RustUnnamed_8 as *mut VoidArray,
1829        1 as os::raw::c_int as size_t,
1830        ::std::mem::size_of::<StackHead>() as usize,
1831    );
1832    let ref mut fresh44 = (*self_0).heads.size;
1833    let fresh45 = *fresh44;
1834    *fresh44 = (*fresh44).wrapping_add(1);
1835    *((*self_0).heads.contents).offset(fresh45 as isize) = {
1836        let mut init = StackHead {
1837            node: (*self_0).base_node,
1838            summary: 0 as *mut StackSummary,
1839            node_count_at_last_error: 0,
1840            last_external_token: Subtree {
1841                ptr: 0 as *const SubtreeHeapData,
1842            },
1843            lookahead_when_paused: Subtree {
1844                ptr: 0 as *const SubtreeHeapData,
1845            },
1846            status: StackStatusActive,
1847        };
1848        init
1849    };
1850}
1851#[no_mangle]
1852pub unsafe extern "C" fn ts_stack_print_dot_graph(
1853    mut self_0: *mut Stack,
1854    mut language: *const TSLanguage,
1855    mut f: *mut FILE,
1856) -> bool {
1857    array__reserve(
1858        &mut (*self_0).iterators as *mut C2RustUnnamed_7 as *mut VoidArray,
1859        ::std::mem::size_of::<StackIterator>() as usize,
1860        32 as os::raw::c_int as u32,
1861    );
1862    if f.is_null() {
1863        f = core::ptr::null_mut();
1864    }
1865    ();
1866    ();
1867    ();
1868    let mut visited_nodes: C2RustUnnamed_9 = {
1869        let mut init = C2RustUnnamed_9 {
1870            contents: 0 as *mut *mut StackNode,
1871            size: 0 as os::raw::c_int as u32,
1872            capacity: 0 as os::raw::c_int as u32,
1873        };
1874        init
1875    };
1876    (*self_0).iterators.size = 0 as os::raw::c_int as u32;
1877    let mut i: u32 = 0 as os::raw::c_int as u32;
1878    while i < (*self_0).heads.size {
1879        let mut head: *mut StackHead =
1880            &mut *((*self_0).heads.contents).offset(i as isize) as *mut StackHead;
1881        if !((*head).status as os::raw::c_uint
1882            == StackStatusHalted as os::raw::c_int as os::raw::c_uint)
1883        {
1884            ();
1885            ();
1886            if (*head).status as os::raw::c_uint
1887                == StackStatusPaused as os::raw::c_int as os::raw::c_uint
1888            {
1889                ();
1890            }
1891            ();
1892            if !((*head).summary).is_null() {
1893                ();
1894                let mut j: u32 = 0 as os::raw::c_int as u32;
1895                while j < (*(*head).summary).size {
1896                    ();
1897                    j = j.wrapping_add(1);
1898                }
1899            }
1900            if !((*head).last_external_token.ptr).is_null() {
1901                let mut state: *const ExternalScannerState = &(*(*head).last_external_token.ptr)
1902                    .c2rust_unnamed
1903                    .external_scanner_state;
1904                let mut data: *const os::raw::c_char = ts_external_scanner_state_data(state);
1905                ();
1906                let mut j_0: u32 = 0 as os::raw::c_int as u32;
1907                while j_0 < (*state).length {
1908                    ();
1909                    j_0 = j_0.wrapping_add(1);
1910                }
1911            }
1912            ();
1913            array__grow(
1914                &mut (*self_0).iterators as *mut C2RustUnnamed_7 as *mut VoidArray,
1915                1 as os::raw::c_int as size_t,
1916                ::std::mem::size_of::<StackIterator>() as usize,
1917            );
1918            let ref mut fresh46 = (*self_0).iterators.size;
1919            let fresh47 = *fresh46;
1920            *fresh46 = (*fresh46).wrapping_add(1);
1921            *((*self_0).iterators.contents).offset(fresh47 as isize) = {
1922                let mut init = StackIterator {
1923                    node: (*head).node,
1924                    subtrees: SubtreeArray {
1925                        contents: 0 as *mut Subtree,
1926                        size: 0,
1927                        capacity: 0,
1928                    },
1929                    subtree_count: 0,
1930                    is_pending: false,
1931                };
1932                init
1933            };
1934        }
1935        i = i.wrapping_add(1);
1936    }
1937    let mut all_iterators_done: bool = false;
1938    while !all_iterators_done {
1939        all_iterators_done = true;
1940        let mut i_0: u32 = 0 as os::raw::c_int as u32;
1941        while i_0 < (*self_0).iterators.size {
1942            let mut iterator: StackIterator = *((*self_0).iterators.contents).offset(i_0 as isize);
1943            let mut node: *mut StackNode = iterator.node;
1944            let mut j_1: u32 = 0 as os::raw::c_int as u32;
1945            while j_1 < visited_nodes.size {
1946                if *(visited_nodes.contents).offset(j_1 as isize) == node {
1947                    node = 0 as *mut StackNode;
1948                    break;
1949                } else {
1950                    j_1 = j_1.wrapping_add(1);
1951                }
1952            }
1953            if !node.is_null() {
1954                all_iterators_done = false;
1955                ();
1956                if (*node).state as os::raw::c_int == 0 as os::raw::c_int {
1957                    ();
1958                } else if (*node).link_count as os::raw::c_int == 1 as os::raw::c_int
1959                    && !((*node).links[0 as os::raw::c_int as usize].subtree.ptr).is_null()
1960                    && ts_subtree_extra((*node).links[0 as os::raw::c_int as usize].subtree)
1961                        as os::raw::c_int
1962                        != 0
1963                {
1964                    ();
1965                } else {
1966                    ();
1967                }
1968                ();
1969                let mut j_2: os::raw::c_int = 0 as os::raw::c_int;
1970                while j_2 < (*node).link_count as os::raw::c_int {
1971                    let mut link: StackLink = (*node).links[j_2 as usize];
1972                    ();
1973                    if link.is_pending {
1974                        ();
1975                    }
1976                    if !(link.subtree.ptr).is_null()
1977                        && ts_subtree_extra(link.subtree) as os::raw::c_int != 0
1978                    {
1979                        ();
1980                    }
1981                    if (link.subtree.ptr).is_null() {
1982                        ();
1983                    } else {
1984                        ();
1985                        let mut quoted: bool = ts_subtree_visible(link.subtree) as os::raw::c_int
1986                            != 0
1987                            && !ts_subtree_named(link.subtree);
1988                        if quoted {
1989                            ();
1990                        }
1991                        ts_language_write_symbol_as_dot_string(
1992                            language,
1993                            f,
1994                            ts_subtree_symbol(link.subtree),
1995                        );
1996                        if quoted {
1997                            ();
1998                        }
1999                        ();
2000                        ();
2001                    }
2002                    ();
2003                    let mut next_iterator: *mut StackIterator = 0 as *mut StackIterator;
2004                    if j_2 == 0 as os::raw::c_int {
2005                        next_iterator = &mut *((*self_0).iterators.contents).offset(i_0 as isize)
2006                            as *mut StackIterator;
2007                    } else {
2008                        array__grow(
2009                            &mut (*self_0).iterators as *mut C2RustUnnamed_7 as *mut VoidArray,
2010                            1 as os::raw::c_int as size_t,
2011                            ::std::mem::size_of::<StackIterator>() as usize,
2012                        );
2013                        let ref mut fresh48 = (*self_0).iterators.size;
2014                        let fresh49 = *fresh48;
2015                        *fresh48 = (*fresh48).wrapping_add(1);
2016                        *((*self_0).iterators.contents).offset(fresh49 as isize) = iterator;
2017                        if ((*self_0).iterators.size)
2018                            .wrapping_sub(1 as os::raw::c_int as os::raw::c_uint)
2019                            < (*self_0).iterators.size
2020                        {
2021                        } else {
2022                            panic!();
2023                        }
2024                        next_iterator = &mut *((*self_0).iterators.contents).offset(
2025                            ((*self_0).iterators.size)
2026                                .wrapping_sub(1 as os::raw::c_int as os::raw::c_uint)
2027                                as isize,
2028                        ) as *mut StackIterator;
2029                    }
2030                    let ref mut fresh50 = (*next_iterator).node;
2031                    *fresh50 = link.node;
2032                    j_2 += 1;
2033                }
2034                array__grow(
2035                    &mut visited_nodes as *mut C2RustUnnamed_9 as *mut VoidArray,
2036                    1 as os::raw::c_int as size_t,
2037                    ::std::mem::size_of::<*mut StackNode>() as usize,
2038                );
2039                let fresh51 = visited_nodes.size;
2040                visited_nodes.size = (visited_nodes.size).wrapping_add(1);
2041                let ref mut fresh52 = *(visited_nodes.contents).offset(fresh51 as isize);
2042                *fresh52 = node;
2043            }
2044            i_0 = i_0.wrapping_add(1);
2045        }
2046    }
2047    ();
2048    array__delete(&mut visited_nodes as *mut C2RustUnnamed_9 as *mut VoidArray);
2049    return true;
2050}