1use std::borrow::Borrow;
3use std::cmp::Reverse;
4use std::collections::BinaryHeap;
5use std::fmt::Debug;
6use std::marker::PhantomData;
7
8use crate::lexicon::{Feature, FeatureOrLemma, LexemeId, Lexicon, ParsingError};
9use crate::parsing::rules::StolenInfo;
10use crate::parsing::trees::{GornIterator, StolenHead};
11use crate::{Direction, ParseHeap, ParsingConfig};
12use beam::Scanner;
13use itertools::{Either, Itertools};
14use logprob::LogProb;
15use petgraph::graph::NodeIndex;
16use thin_vec::{ThinVec, thin_vec};
17pub use trees::GornIndex;
18use trees::{FutureTree, ParseMoment};
19
20use rules::Rule;
21pub use rules::RulePool;
22pub(crate) use rules::{PartialRulePool, RuleHolder, RuleIndex};
23
24#[derive(Debug, Clone)]
25pub(crate) struct BeamWrapper<T, B: Scanner<T>> {
26 log_prob: LogProb<f64>,
27 queue: BinaryHeap<Reverse<ParseMoment>>,
28 heads: Vec<PossibleHeads>,
29 rules: PartialRulePool,
30 n_consec_empties: usize,
31 pub beam: B,
32 phantom: PhantomData<T>,
33}
34
35#[derive(Debug, Clone, PartialEq, Eq, Default)]
36pub(crate) enum PossibleHeads {
37 Possibles(PossibleTree),
38 FoundTree(HeadTree, RuleIndex),
39 #[default]
40 PlaceHolder,
41}
42
43#[derive(Debug, Clone, PartialEq, Eq, PartialOrd, Ord)]
44pub(crate) struct PossibleTree {
45 head: Vec<LexemeId>,
46 left_child: Option<Vec<PossibleTree>>,
47 right_child: Option<Vec<PossibleTree>>,
48}
49
50impl PossibleTree {
51 fn filter_node<T: Eq, C: Eq>(
54 &mut self,
55 node: NodeIndex,
56 mut d: GornIterator,
57 lexicon: &Lexicon<T, C>,
58 ) -> bool {
59 match d.next() {
60 Some(Direction::Left) => {
61 self.right_child = None;
62 if let Some(x) = self.left_child.as_mut() {
63 x.iter_mut().any(|h| h.filter_node(node, d, lexicon))
64 } else {
65 false
66 }
67 }
68 Some(Direction::Right) => {
69 self.left_child = None;
70
71 if let Some(x) = self.right_child.as_mut() {
72 x.iter_mut().any(|h| h.filter_node(node, d, lexicon))
73 } else {
74 false
75 }
76 }
77 None => {
78 self.head
79 .retain(|x| node == lexicon.parent_of(x.0).unwrap());
80 !self.head.is_empty()
81 }
82 }
83 }
84
85 pub(crate) fn just_heads(nodes: Vec<LexemeId>) -> Self {
86 Self {
87 head: nodes,
88 left_child: None,
89 right_child: None,
90 }
91 }
92 pub(crate) fn merge(mut self, children: Vec<PossibleTree>, dir: Direction) -> Self {
93 match dir {
94 Direction::Left => self.left_child = Some(children),
95 Direction::Right => self.right_child = Some(children),
96 }
97 self
98 }
99
100 pub(crate) fn empty() -> Self {
101 PossibleTree {
102 head: vec![],
103 left_child: None,
104 right_child: None,
105 }
106 }
107
108 fn to_trees(&self) -> impl Iterator<Item = HeadTree> {
109 match (&self.left_child, &self.right_child) {
110 (None, None) => Either::Left(self.head.iter().map(|x| HeadTree {
111 head: *x,
112 child: None,
113 })),
114 _ => {
115 let children = self
116 .left_child
117 .iter()
118 .flat_map(|x| x.iter())
119 .flat_map(|x| x.to_trees())
120 .map(|x| (Direction::Left, x));
121
122 let children = children.chain(
123 self.right_child
124 .iter()
125 .flat_map(|x| x.iter())
126 .flat_map(|x| x.to_trees())
127 .map(|x| (Direction::Right, x)),
128 );
129
130 Either::Right(
131 self.head
132 .iter()
133 .cartesian_product(children.collect_vec())
134 .map(|(x, (d, h))| HeadTree {
135 head: *x,
136 child: Some((Box::new(h), d)),
137 }),
138 )
139 }
140 }
141 }
142}
143
144#[derive(Debug, Clone, PartialEq, Eq, PartialOrd, Ord)]
145pub(crate) struct HeadTree {
146 head: LexemeId,
147 child: Option<(Box<HeadTree>, Direction)>,
148}
149
150#[derive(Debug, Clone, PartialEq, Eq, PartialOrd, Ord)]
151pub(crate) struct FilledHeadTree<'a, T> {
152 head: Option<&'a T>,
153 child: Option<(Box<FilledHeadTree<'a, T>>, Direction)>,
154}
155
156impl<'a, T> FilledHeadTree<'a, T> {
157 pub fn inorder(&self) -> Vec<&'a T> {
158 let mut order = vec![];
159 self.inorder_inner(&mut order);
160 order
161 }
162 fn inorder_inner(&self, v: &mut Vec<&'a T>) {
163 if let Some((child, d)) = &self.child {
164 match d {
165 Direction::Left => {
166 child.inorder_inner(v);
167 if let Some(x) = self.head {
168 v.push(x);
169 }
170 }
171 Direction::Right => {
172 if let Some(x) = self.head {
173 v.push(x);
174 }
175 child.inorder_inner(v);
176 }
177 }
178 } else if let Some(x) = self.head {
179 v.push(x);
180 }
181 }
182}
183
184impl HeadTree {
185 fn to_filled_head<'a, T: Eq, C: Eq>(&self, lex: &'a Lexicon<T, C>) -> FilledHeadTree<'a, T> {
186 let child = self
187 .child
188 .as_ref()
189 .map(|(child, dir)| (Box::new(child.to_filled_head(lex)), *dir));
190
191 FilledHeadTree {
192 head: lex
193 .leaf_to_lemma(self.head)
194 .expect("Head nodes must be lemmas!")
195 .as_ref(),
196 child,
197 }
198 }
199
200 pub fn find_node(&self, index: GornIndex) -> Option<LexemeId> {
201 let mut pos = self;
202 let mut node = self.head;
203 for d in index {
204 if let Some((child, child_d)) = &pos.child
205 && *child_d == d
206 {
207 node = child.head;
208 pos = child;
209 } else {
210 return None;
211 }
212 }
213 Some(node)
214 }
215}
216
217impl<T: Eq + std::fmt::Debug, B: Scanner<T> + Eq> PartialEq for BeamWrapper<T, B> {
218 fn eq(&self, other: &Self) -> bool {
219 self.beam == other.beam
220 && self.log_prob == other.log_prob
221 && self.queue.clone().into_sorted_vec() == other.queue.clone().into_sorted_vec()
222 }
223}
224
225impl<T: Eq + std::fmt::Debug, B: Scanner<T> + Eq> Eq for BeamWrapper<T, B> {}
226
227impl<T: Eq + std::fmt::Debug, B: Scanner<T> + Eq> PartialOrd for BeamWrapper<T, B> {
228 fn partial_cmp(&self, other: &Self) -> Option<std::cmp::Ordering> {
229 Some(self.cmp(other))
230 }
231}
232
233impl<T: Eq + std::fmt::Debug, B: Scanner<T> + Eq> Ord for BeamWrapper<T, B> {
234 fn cmp(&self, other: &Self) -> std::cmp::Ordering {
235 self.log_prob.cmp(&other.log_prob)
236 }
237}
238
239impl<T: Clone + Eq + std::fmt::Debug, B: Scanner<T> + Eq + Clone> BeamWrapper<T, B> {
240 fn check_node(&self, index: GornIndex, node: LexemeId, headtree: &HeadTree) -> bool {
241 if let Some(found_node) = headtree.find_node(index) {
242 found_node == node
243 } else {
244 false
245 }
246 }
247
248 fn scan<Category: Eq + std::fmt::Debug + Clone>(
249 mut self,
250 v: &mut ParseHeap<T, B>,
251 moment: &ParseMoment,
252 s: &Option<T>,
253 child_node: LexemeId,
254 child_prob: LogProb<f64>,
255 lexicon: &Lexicon<T, Category>,
256 ) {
257 match s {
258 Some(_) => self.n_consec_empties = 0,
259 None => self.n_consec_empties += 1,
260 }
261 let rule = match moment.stolen_head {
262 Some(StolenHead::Stealer(pos)) => {
263 let trees = std::mem::take(&mut self.heads[pos]);
264
265 let PossibleHeads::Possibles(possible_trees) = trees else {
266 panic!()
267 };
268
269 for (filled_head_tree, id_tree) in possible_trees
270 .to_trees()
271 .filter(|x| x.head == child_node)
272 .map(|x| (x.to_filled_head(lexicon), x))
273 {
274 let mut new_beam = self.clone();
275 if new_beam.beam.multiscan(&filled_head_tree) {
276 new_beam.heads[pos] = PossibleHeads::FoundTree(id_tree, moment.tree.id);
277 new_beam.log_prob += child_prob;
278 new_beam.rules.push_rule(
279 v.rules_mut(),
280 Rule::Scan {
281 lexeme: child_node,
282 stolen: StolenInfo::Stealer,
283 },
284 moment.tree.id,
285 );
286 v.push(new_beam);
287 }
288 }
289 None
290 }
291 Some(StolenHead::StolenHead(pos, index)) => {
292 let PossibleHeads::FoundTree(headtree, rule_id) = &self.heads[pos] else {
293 panic!()
294 };
295
296 if self.check_node(index, child_node, headtree) {
297 Some(Rule::Scan {
298 lexeme: child_node,
299 stolen: StolenInfo::Stolen(*rule_id, index),
300 })
301 } else {
302 None
303 }
304 }
305 None => {
306 if self.beam.scan(s) {
307 Some(Rule::Scan {
308 lexeme: child_node,
309 stolen: StolenInfo::Normal,
310 })
311 } else {
312 None
313 }
314 }
315 };
316
317 if let Some(rule) = rule {
318 self.log_prob += child_prob;
319 self.rules.push_rule(v.rules_mut(), rule, moment.tree.id);
320 v.push(self);
321 }
322 }
323
324 fn new_future_tree(&mut self, node: NodeIndex, index: GornIndex) -> (FutureTree, RuleIndex) {
325 let id = self.rules.fresh();
326 (FutureTree { node, index, id }, id)
327 }
328
329 fn push_moment(
330 &mut self,
331 node: NodeIndex,
332 index: GornIndex,
333 movers: ThinVec<FutureTree>,
334 stolen_head: Option<StolenHead>,
335 ) -> RuleIndex {
336 let (tree, id) = self.new_future_tree(node, index);
337 self.queue.push(Reverse(ParseMoment {
338 tree,
339 movers,
340 stolen_head,
341 }));
342 id
343 }
344
345 pub(super) fn new(beam: B, category_index: NodeIndex) -> Self {
346 let mut queue = BinaryHeap::<Reverse<ParseMoment>>::with_capacity(5);
347 queue.push(Reverse(ParseMoment::new(
348 FutureTree {
349 node: category_index,
350 index: GornIndex::default(),
351 id: RuleIndex::one(),
352 },
353 thin_vec![],
354 None,
355 )));
356 BeamWrapper {
357 beam,
358 heads: vec![],
359 n_consec_empties: 0,
360 queue,
361 log_prob: LogProb::prob_of_one(),
362 rules: PartialRulePool::default(),
363 phantom: PhantomData,
364 }
365 }
366
367 pub fn log_prob(&self) -> LogProb<f64> {
368 self.log_prob
369 }
370
371 pub fn n_steps(&self) -> usize {
372 self.rules.n_steps()
373 }
374
375 pub fn n_consecutive_empty(&self) -> usize {
376 self.n_consec_empties
377 }
378
379 pub fn pop_moment(&mut self) -> Option<ParseMoment> {
380 if let Some(Reverse(x)) = self.queue.pop() {
381 Some(x)
382 } else {
383 None
384 }
385 }
386
387 pub fn is_empty(&self) -> bool {
388 self.queue.is_empty()
389 }
390}
391
392fn clone_push<T: Clone>(v: &[T], x: T) -> ThinVec<T> {
393 let mut v: ThinVec<T> = ThinVec::from(v);
394 v.push(x);
395 v
396}
397
398#[allow(clippy::too_many_arguments)]
399#[inline]
400fn unmerge_from_mover<
401 T: Eq + std::fmt::Debug + Clone,
402 U: Eq + std::fmt::Debug + Clone,
403 Category: Eq + std::fmt::Debug + Clone,
404 B: Scanner<T> + Clone + Eq,
405>(
406 v: &mut ParseHeap<T, B>,
407 lexicon: &Lexicon<U, Category>,
408 moment: &ParseMoment,
409 beam: &BeamWrapper<T, B>,
410 cat: &Category,
411 child_node: NodeIndex,
412 dir: Direction,
413 child_prob: LogProb<f64>,
414) -> bool {
415 let mut new_beam = false;
416 for mover in moment.movers.iter() {
417 for stored_child_node in lexicon.children_of(mover.node) {
418 let (stored, stored_prob) = lexicon.get(stored_child_node).unwrap();
419 match stored {
420 FeatureOrLemma::Feature(Feature::Category(stored)) if stored == cat => {
421 let mut beam = beam.clone();
422 let movers = moment
423 .movers
424 .iter()
425 .filter(|&v| v != mover)
426 .cloned()
427 .collect();
428
429 let (stored_movers, child_movers) = if lexicon.is_complement(child_node) {
430 (movers, thin_vec![])
431 } else {
432 (thin_vec![], movers)
433 };
434
435 let child_id = beam.push_moment(
436 child_node,
437 moment.tree.index,
438 child_movers,
439 moment.stolen_head,
440 );
441 let stored_id =
442 beam.push_moment(stored_child_node, mover.index, stored_movers, None);
443
444 beam.log_prob += stored_prob + child_prob;
445
446 let trace_id = beam.rules.fresh_trace();
447
448 beam.rules.push_rule(
449 v.rules_mut(),
450 Rule::UnmergeFromMover {
451 child: child_node,
452 child_id,
453 stored_id,
454 trace_id,
455 destination_id: mover.id,
456 dir,
457 },
458 moment.tree.id,
459 );
460 v.push(beam);
461 new_beam = true;
462 }
463 _ => (),
464 }
465 }
466 }
467 new_beam
468}
469
470#[allow(clippy::too_many_arguments)]
471#[inline]
472fn unmerge<
473 T: Eq + std::fmt::Debug + Clone,
474 U: Eq + std::fmt::Debug + Clone,
475 Category: Eq + std::fmt::Debug + Clone,
476 B: Scanner<T> + Eq + Clone,
477>(
478 v: &mut ParseHeap<T, B>,
479 lexicon: &Lexicon<U, Category>,
480 moment: &ParseMoment,
481 mut beam: BeamWrapper<T, B>,
482 cat: &Category,
483 dir: Direction,
484 child_node: NodeIndex,
485 child_prob: LogProb<f64>,
486 head_info: HeadMovement,
487) -> Result<(), ParsingError<Category>> {
488 let complement = lexicon.find_category(cat)?;
489
490 let (complement_movers, child_movers) = if lexicon.is_complement(child_node) {
493 (moment.movers.clone(), thin_vec![])
494 } else {
495 (thin_vec![], moment.movers.clone())
496 };
497
498 let (child_head, comp_head) = match head_info {
499 HeadMovement::HeadMovement { stealer, stolen } => (Some(stealer), Some(stolen)),
500 HeadMovement::Inherit => (moment.stolen_head, None),
501 };
502
503 let complement_id = beam.push_moment(
504 complement,
505 moment.tree.index.clone_push(dir),
506 complement_movers,
507 comp_head,
508 );
509 let child_id = beam.push_moment(
510 child_node,
511 moment.tree.index.clone_push(dir.flip()),
512 child_movers,
513 child_head,
514 );
515
516 beam.log_prob += child_prob;
517 beam.rules.push_rule(
518 v.rules_mut(),
519 Rule::Unmerge {
520 child: child_node,
521 child_id,
522 complement_id,
523 dir,
524 affix: head_info != HeadMovement::Inherit, },
526 moment.tree.id,
527 );
528 v.push(beam);
529 Ok(())
530}
531
532#[allow(clippy::too_many_arguments)]
533#[inline]
534fn unmove_from_mover<
535 T: Eq + std::fmt::Debug + Clone,
536 U: Eq + std::fmt::Debug + Clone,
537 Category: Eq + std::fmt::Debug + Clone,
538 B: Scanner<T> + Clone + Eq,
539>(
540 v: &mut ParseHeap<T, B>,
541 lexicon: &Lexicon<U, Category>,
542 moment: &ParseMoment,
543 beam: &BeamWrapper<T, B>,
544 cat: &Category,
545 child_node: NodeIndex,
546 child_prob: LogProb<f64>,
547 already_mover_of_this_cat: bool,
548) -> bool {
549 let mut new_beam_found = false;
550
551 for mover in moment
552 .movers
553 .iter()
554 .filter(|x| lexicon.get_feature_category(x.node) == Some(cat) || !already_mover_of_this_cat)
555 {
561 for stored_child_node in lexicon.children_of(mover.node) {
562 let (stored, stored_prob) = lexicon.get(stored_child_node).unwrap();
563 match stored {
564 FeatureOrLemma::Feature(Feature::Licensee(s)) if cat == s => {
565 let mut beam = beam.clone();
566 let (stored_tree, stored_id) =
567 beam.new_future_tree(stored_child_node, mover.index);
568
569 let movers = moment
570 .movers
571 .iter()
572 .filter(|&v| v != mover)
573 .cloned()
574 .chain(std::iter::once(stored_tree))
575 .collect();
576
577 let child_id =
578 beam.push_moment(child_node, moment.tree.index, movers, moment.stolen_head);
579 beam.log_prob += stored_prob + child_prob;
580
581 let trace_id = beam.rules.fresh_trace();
582 beam.rules.push_rule(
583 v.rules_mut(),
584 Rule::UnmoveFromMover {
585 child_id,
586 stored_id,
587 trace_id,
588 destination_id: mover.id,
589 },
590 moment.tree.id,
591 );
592 v.push(beam);
593 new_beam_found = true;
594 }
595 _ => (),
596 }
597 }
598 }
599 new_beam_found
600}
601
602#[allow(clippy::too_many_arguments)]
603#[inline]
604fn unmove<
605 T: Eq + std::fmt::Debug + Clone,
606 U: Eq + std::fmt::Debug + Clone,
607 Category: Eq + std::fmt::Debug + Clone,
608 B: Scanner<T> + Eq + Clone,
609>(
610 v: &mut ParseHeap<T, B>,
611 lexicon: &Lexicon<U, Category>,
612 moment: &ParseMoment,
613 mut beam: BeamWrapper<T, B>,
614 cat: &Category,
615 child_node: NodeIndex,
616 child_prob: LogProb<f64>,
617) -> Result<(), ParsingError<Category>> {
618 let stored = lexicon.find_licensee(cat)?;
619 let (stored, stored_id) =
620 beam.new_future_tree(stored, moment.tree.index.clone_push(Direction::Left));
621
622 let child_id = beam.push_moment(
623 child_node,
624 moment.tree.index.clone_push(Direction::Right),
625 clone_push(&moment.movers, stored),
626 moment.stolen_head,
627 );
628
629 beam.log_prob += child_prob;
630 beam.rules.push_rule(
631 v.rules_mut(),
632 Rule::Unmove {
633 child_id,
634 stored_id,
635 },
636 moment.tree.id,
637 );
638 v.push(beam);
639 Ok(())
640}
641
642#[derive(Debug, Clone, Copy, Eq, PartialEq)]
643enum HeadMovement {
644 HeadMovement {
645 stealer: StolenHead,
646 stolen: StolenHead,
647 },
648 Inherit,
649}
650
651#[inline]
652fn set_beam_head<
653 T: Eq + std::fmt::Debug + Clone,
654 U: Eq + std::fmt::Debug + Clone,
655 Category: Eq + std::fmt::Debug + Clone,
656 B: Scanner<T> + Eq + Clone,
657>(
658 moment: &ParseMoment,
659 dir: Direction,
660 lexicon: &Lexicon<U, Category>,
661 child_node: NodeIndex,
662 beam: &mut BeamWrapper<T, B>,
663) -> Result<Option<HeadMovement>, ParsingError<Category>> {
664 match moment.stolen_head {
665 Some(StolenHead::Stealer(_)) => panic!(
666 "Only possible if there are multiple head movements to one lemma which is not yet supported"
667 ),
668 Some(StolenHead::StolenHead(pos, index)) => {
669 match &mut beam.heads[pos] {
670 PossibleHeads::Possibles(head_trees) => {
671 if !head_trees.filter_node(child_node, index.into_iter(), lexicon) {
672 return Ok(None);
673 }
674 }
675 PossibleHeads::FoundTree(heads, _) => {
676 let Some(node) = heads.find_node(index) else {
677 return Ok(None);
679 };
680 if child_node != lexicon.parent_of(node.0).unwrap() {
681 return Ok(None);
683 }
684 }
685 PossibleHeads::PlaceHolder => {
686 panic!("This enum variant is just used for swapping!")
687 }
688 }
689 Ok(Some(HeadMovement::HeadMovement {
690 stealer: StolenHead::StolenHead(pos, index),
691 stolen: StolenHead::StolenHead(pos, index.clone_push(dir)),
692 }))
693 }
694 None => {
695 let head_pos = beam.heads.len();
696 let possible_heads = PossibleHeads::Possibles(lexicon.possible_heads(child_node, 0)?);
697 beam.heads.push(possible_heads);
698 Ok(Some(HeadMovement::HeadMovement {
699 stealer: StolenHead::Stealer(head_pos),
700 stolen: StolenHead::new_stolen(head_pos, dir),
701 }))
702 }
703 }
704}
705
706pub(crate) fn expand<
707 T: Eq + std::fmt::Debug + Clone,
708 Category: Eq + std::fmt::Debug + Clone,
709 B: Scanner<T> + Eq + Clone,
710 L: Borrow<Lexicon<T, Category>>,
711>(
712 extender: &mut ParseHeap<T, B>,
713 moment: ParseMoment,
714 beam: BeamWrapper<T, B>,
715 lexicon: L,
716 config: &ParsingConfig,
717) {
718 let lexicon = lexicon.borrow();
719 let n_children = lexicon.n_children(moment.tree.node);
720 let new_beams = itertools::repeat_n(beam, n_children);
721
722 new_beams
723 .zip(lexicon.children_of(moment.tree.node))
724 .for_each(
725 |(mut beam, child_node)| match lexicon.get(child_node).unwrap() {
726 (FeatureOrLemma::Lemma(s), p) if moment.no_movers() => {
727 beam.scan(extender, &moment, s, LexemeId(child_node), p, lexicon)
728 }
729 (FeatureOrLemma::Complement(cat, dir), mut p)
730 | (FeatureOrLemma::Feature(Feature::Selector(cat, dir)), mut p) => {
731 if unmerge_from_mover(
732 extender,
733 lexicon,
734 &moment,
735 &beam,
736 cat,
737 child_node,
738 *dir,
739 p + config.move_prob,
740 ) {
741 p += config.dont_move_prob
742 }
743 let _ = unmerge(
744 extender,
745 lexicon,
746 &moment,
747 beam,
748 cat,
749 *dir,
750 child_node,
751 p,
752 HeadMovement::Inherit,
753 );
754 }
755 (FeatureOrLemma::Feature(Feature::Licensor(cat)), mut p) => {
756 let already_mover_of_this_cat = moment.movers.iter().any(|x| {
757 lexicon
758 .get_feature_category(x.node)
759 .map(|x| x == cat)
760 .unwrap_or(false)
761 });
762 if unmove_from_mover(
763 extender,
764 lexicon,
765 &moment,
766 &beam,
767 cat,
768 child_node,
769 p + config.move_prob,
770 already_mover_of_this_cat,
771 ) {
772 p += config.dont_move_prob
773 }
774 if !already_mover_of_this_cat {
775 let _ = unmove(extender, lexicon, &moment, beam, cat, child_node, p);
777 }
778 }
779 (FeatureOrLemma::Feature(Feature::Affix(cat, dir)), p) => {
780 if let Ok(Some(head_info)) =
781 set_beam_head(&moment, *dir, lexicon, child_node, &mut beam)
782 {
783 let _ = unmerge(
786 extender,
787 lexicon,
788 &moment,
789 beam,
790 cat,
791 Direction::Right,
792 child_node,
793 p,
794 head_info,
795 );
796 }
797 }
798 (FeatureOrLemma::Lemma(_), _)
799 | (FeatureOrLemma::Feature(Feature::Category(_)), _)
800 | (FeatureOrLemma::Feature(Feature::Licensee(_)), _) => (),
801 (FeatureOrLemma::Root, _) => unimplemented!("Impossible to parse the root node"),
802 },
803 );
804}
805
806pub mod beam;
807pub mod rules;
808
809mod trees;
810pub use trees::MAX_STEPS;
811
812#[cfg(test)]
813mod tests;