1use std::{collections::BTreeMap, fmt::Display};
3
4use crate::lexicon::{LexemeId, SemanticLexicon};
5use itertools::Itertools;
6use simple_semantics::{
7 lambda::{RootedLambdaPool, types::LambdaType},
8 language::{ConjoiningError, Expr},
9};
10
11use super::{Rule, RuleIndex, RulePool, TraceId};
12
13#[cfg(feature = "pretty")]
14use serde::{Serialize, ser::SerializeMap, ser::SerializeStruct};
15
16#[derive(Debug, Clone, PartialEq, Copy, Eq, Hash)]
17#[cfg_attr(feature = "pretty", derive(Serialize))]
18pub enum SemanticRule {
20 FunctionalApplication,
22 PredicateModification,
24 EventIdentification,
26 Store,
28 OnlyStore,
30 Identity,
32 ApplyFromStorage,
34 UpdateTrace,
36 Trace,
38 Scan(LexemeId),
40}
41
42impl std::fmt::Display for SemanticRule {
43 fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result {
44 write!(
45 f,
46 "{}",
47 match self {
48 SemanticRule::FunctionalApplication => "FA",
49 SemanticRule::PredicateModification => "PM",
50 SemanticRule::EventIdentification => "EI",
51 SemanticRule::Store => "Store",
52 SemanticRule::OnlyStore => "OnlyStore",
53 SemanticRule::Identity => "Id",
54 SemanticRule::ApplyFromStorage => "ApplyFromStorage",
55 SemanticRule::UpdateTrace => "UpdateTrace",
56 SemanticRule::Trace => "Trace",
57 SemanticRule::Scan(_) => "LexicalEntry",
58 }
59 )
60 }
61}
62
63impl RulePool {
64 pub fn to_interpretation<'a, 'src, T, C>(
66 &'a self,
67 lex: &'a SemanticLexicon<'src, T, C>,
68 ) -> impl Iterator<Item = (RootedLambdaPool<'src, Expr<'src>>, SemanticHistory<'static>)> + 'a
69 where
70 T: Eq + std::fmt::Debug + std::clone::Clone,
71 C: Eq + std::fmt::Debug + std::clone::Clone,
72 {
73 SemanticDerivation::interpret(self, lex).filter_map(|(mut pool, history)| {
74 if pool.reduce().is_ok() {
75 Some((pool, history))
76 } else {
77 None
78 }
79 })
80 }
81}
82
83#[derive(Debug, Clone, Copy, PartialEq, Eq)]
84struct HistoryId(usize);
85
86#[derive(Debug, Clone, Copy, PartialEq, Eq)]
87struct HistoryNode {
88 rule_id: RuleIndex,
89 rule: SemanticRule,
90 children: [Option<HistoryId>; 2],
91}
92
93#[derive(Debug, Clone, PartialEq, Eq)]
94pub enum SemanticHistory<'a> {
96 Rich(Vec<(SemanticRule, Option<SemanticState<'a>>)>),
98 Simple(Vec<SemanticRule>),
100}
101
102#[derive(Debug, Clone, Eq, PartialEq, Hash)]
103pub(crate) enum SemanticNode<'a> {
104 Rich(SemanticRule, Option<SemanticState<'a>>),
105 Simple(SemanticRule),
106}
107
108#[cfg(feature = "pretty")]
109impl Serialize for SemanticNode<'_> {
110 fn serialize<S>(&self, serializer: S) -> Result<S::Ok, S::Error>
111 where
112 S: serde::Serializer,
113 {
114 match self {
115 SemanticNode::Rich(semantic_rule, semantic_state) => {
116 let mut s = serializer.serialize_struct("SemanticNode", 1)?;
117 s.serialize_field("rule", semantic_rule)?;
118 s.serialize_field("state", semantic_state)?;
119 s.end()
120 }
121 SemanticNode::Simple(semantic_rule) => {
122 let mut s = serializer.serialize_struct("SemanticNode", 1)?;
123 s.serialize_field("rule", semantic_rule)?;
124 s.end()
125 }
126 }
127 }
128}
129
130impl<'a> SemanticHistory<'a> {
131 pub(super) fn semantic_node(&self, i: RuleIndex) -> Option<SemanticNode<'a>> {
132 match self {
133 SemanticHistory::Rich(items) => items
134 .get(i.0)
135 .map(|(rule, interp)| SemanticNode::Rich(*rule, interp.clone())),
136 SemanticHistory::Simple(items) => {
137 items.get(i.0).map(|rule| SemanticNode::Simple(*rule))
138 }
139 }
140 }
141
142 pub fn constituents(
145 &self,
146 ) -> Option<impl Iterator<Item = (SemanticRule, &RootedLambdaPool<'a, Expr<'a>>)>> {
147 match self {
148 SemanticHistory::Rich(items) => Some(
149 items
150 .iter()
151 .skip(1)
152 .filter_map(|(b, x)| x.as_ref().map(|x| (*b, &x.expr))),
153 ),
154 SemanticHistory::Simple(_) => None,
155 }
156 }
157
158 pub fn into_rich<T, C>(self, lexicon: &SemanticLexicon<'a, T, C>, rules: &RulePool) -> Self
160 where
161 T: Eq + std::fmt::Debug + std::clone::Clone,
162 C: Eq + std::fmt::Debug + std::clone::Clone,
163 {
164 match self {
165 SemanticHistory::Rich(items) => SemanticHistory::Rich(items),
166 SemanticHistory::Simple(semantic_rules) => {
167 let mut items = semantic_rules.into_iter().map(|x| (x, None)).collect_vec();
168
169 let mut derivation = SemanticDerivation {
170 rules,
171 lexicon,
172 semantic_history: vec![],
173 };
174
175 derivation.redo_history(RuleIndex(0), &mut items);
176
177 SemanticHistory::Rich(items)
178 }
179 }
180 }
181}
182
183impl Display for SemanticNode<'_> {
184 fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result {
185 match self {
186 SemanticNode::Rich(_, Some(interp)) => {
187 write!(f, "{interp}")
188 }
189 SemanticNode::Rich(semantic_rule, None) => {
190 write!(f, "{semantic_rule}")
191 }
192 SemanticNode::Simple(semantic_rule) => write!(f, "{semantic_rule}"),
193 }
194 }
195}
196
197impl Display for SemanticState<'_> {
198 fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result {
199 write!(f, "{}", self.expr)
200 }
201}
202
203#[derive(Debug, Clone, PartialEq, Eq, Hash)]
204pub struct SemanticState<'src> {
207 expr: RootedLambdaPool<'src, Expr<'src>>,
208 movers: BTreeMap<TraceId, (RootedLambdaPool<'src, Expr<'src>>, Option<LambdaType>)>,
209}
210
211#[cfg(feature = "pretty")]
212#[derive(Debug, Clone, PartialEq, Eq)]
213struct Movers<'a, 'src>(
214 &'a BTreeMap<TraceId, (RootedLambdaPool<'src, Expr<'src>>, Option<LambdaType>)>,
215);
216
217#[cfg(feature = "pretty")]
218#[derive(Debug, Clone, PartialEq, Eq)]
219struct Mover<'a, 'src>(&'a (RootedLambdaPool<'src, Expr<'src>>, Option<LambdaType>));
220
221#[cfg(feature = "pretty")]
222impl Serialize for Mover<'_, '_> {
223 fn serialize<S>(&self, serializer: S) -> Result<S::Ok, S::Error>
224 where
225 S: serde::Serializer,
226 {
227 let mut s = serializer.serialize_struct("Mover", 3)?;
228 s.serialize_field("expr", self.0.0.to_string().as_str())?;
229 s.serialize_field("tokens", &self.0.0)?;
230 if let Some(t) = self.0.1.as_ref() {
231 s.serialize_field("type", t.to_string().as_str())?;
232 }
233
234 s.end()
235 }
236}
237
238#[cfg(feature = "pretty")]
239impl Serialize for Movers<'_, '_> {
240 fn serialize<S>(&self, serializer: S) -> Result<S::Ok, S::Error>
241 where
242 S: serde::Serializer,
243 {
244 let mut s = serializer.serialize_map(Some(self.0.len()))?;
245 for (k, v) in self.0.iter() {
246 s.serialize_entry(k, &Mover(v))?;
247 }
248 s.end()
249 }
250}
251
252#[cfg(feature = "pretty")]
253impl Serialize for SemanticState<'_> {
254 fn serialize<S>(&self, serializer: S) -> Result<S::Ok, S::Error>
255 where
256 S: serde::Serializer,
257 {
258 let mut s = serializer.serialize_struct("SemanticState", 3)?;
259 s.serialize_field("expr", self.expr.to_string().as_str())?;
260 s.serialize_field("tokens", &self.expr)?;
261 s.serialize_field("movers", &Movers(&self.movers))?;
262
263 s.end()
264 }
265}
266
267impl<'src> SemanticState<'src> {
268 fn new(alpha: RootedLambdaPool<'src, Expr<'src>>) -> Self {
269 SemanticState {
270 expr: alpha,
271 movers: BTreeMap::default(),
272 }
273 }
274 fn event_identification(alpha: Self, beta: Self) -> Option<Self> {
275 let overlapping_traces = alpha.movers.keys().any(|k| beta.movers.contains_key(k));
276 if overlapping_traces {
277 return None;
278 }
279
280 let SemanticState {
281 expr: alpha,
282 movers: mut alpha_movers,
283 ..
284 } = alpha;
285
286 let SemanticState {
287 expr: beta,
288 movers: beta_movers,
289 ..
290 } = beta;
291 let alpha = match alpha.raised_conjoin(beta) {
292 Ok(x) => x,
293 Err(ConjoiningError::ReductionError(e)) => {
294 panic!("Reduction error in predicate_modification {e}")
295 }
296 Err(_) => return None,
297 };
298
299 alpha_movers.extend(beta_movers);
300 Some(SemanticState {
301 expr: alpha,
302 movers: alpha_movers,
303 })
304 }
305
306 fn predicate_modification(alpha: Self, beta: Self) -> Option<Self> {
307 let overlapping_traces = alpha.movers.keys().any(|k| beta.movers.contains_key(k));
308 if overlapping_traces {
309 return None;
310 }
311
312 let SemanticState {
313 expr: alpha,
314 movers: mut alpha_movers,
315 ..
316 } = alpha;
317
318 let SemanticState {
319 expr: beta,
320 movers: beta_movers,
321 ..
322 } = beta;
323 let alpha = match alpha.conjoin(beta) {
324 Ok(x) => x,
325 Err(ConjoiningError::ReductionError(e)) => {
326 panic!("Reduction error in predicate_modification {e}")
327 }
328 Err(_) => return None,
329 };
330
331 alpha_movers.extend(beta_movers);
332 Some(SemanticState {
333 expr: alpha,
334 movers: alpha_movers,
335 })
336 }
337
338 fn merge(alpha: Self, beta: Self) -> Option<Self> {
339 let overlapping_traces = alpha.movers.keys().any(|k| beta.movers.contains_key(k));
340 if overlapping_traces {
341 return None;
342 }
343
344 let SemanticState {
345 expr: alpha,
346 movers: mut alpha_movers,
347 ..
348 } = alpha;
349 let SemanticState {
350 expr: beta,
351 movers: beta_movers,
352 ..
353 } = beta;
354 if let Some(alpha) = alpha.merge(beta) {
355 alpha_movers.extend(beta_movers);
356 Some(SemanticState {
357 expr: alpha,
358 movers: alpha_movers,
359 })
360 } else {
361 None
362 }
363 }
364}
365
366#[derive(Debug, Clone)]
367struct SemanticDerivation<'a, 'src, T: Eq, C: Eq> {
368 lexicon: &'a SemanticLexicon<'src, T, C>,
369 rules: &'a RulePool,
370 semantic_history: Vec<HistoryNode>,
371}
372
373#[derive(Debug, Clone)]
374enum ApplyFromStorageResult<T> {
375 SuccesfulMerge(T),
376 FailedMerge,
377 NoTrace(T),
378}
379
380impl<'a, 'src, T, C> SemanticDerivation<'a, 'src, T, C>
381where
382 T: Eq + std::fmt::Debug + std::clone::Clone,
383 C: Eq + std::fmt::Debug + std::clone::Clone,
384{
385 fn interpret(
386 rules: &'a RulePool,
387 lex: &'a SemanticLexicon<'src, T, C>,
388 ) -> impl Iterator<Item = (RootedLambdaPool<'src, Expr<'src>>, SemanticHistory<'static>)> + 'a
389 {
390 let mut derivation = SemanticDerivation {
391 rules,
392 lexicon: lex,
393 semantic_history: vec![],
394 };
395
396 let last_derivation = derivation.get_previous_rules(RuleIndex(1));
398
399 last_derivation.into_iter().filter_map(move |(x, root)| {
400 if x.movers.is_empty() {
401 Some((
402 x.expr,
403 SemanticHistory::Simple(derivation.get_history(root)),
404 ))
405 } else {
406 None
407 }
408 })
409 }
410
411 fn get_history(&self, root: HistoryId) -> Vec<SemanticRule> {
412 let mut stack = vec![root];
413 let mut history: Vec<Option<SemanticRule>> = self
414 .rules
415 .0
416 .iter()
417 .map(|rule| match rule {
418 Rule::Start { .. } => Some(SemanticRule::Identity),
419 Rule::UnmoveTrace(_) => Some(SemanticRule::Trace),
420 _ => None,
421 })
422 .collect();
423
424 while let Some(node) = stack.pop() {
425 let HistoryNode {
426 rule_id,
427 rule,
428 children,
429 } = self.semantic_history.get(node.0).unwrap();
430 history[rule_id.0] = Some(*rule);
431 stack.extend(children.iter().filter_map(|x| *x));
432 }
433
434 history.into_iter().collect::<Option<Vec<_>>>().unwrap()
435 }
436
437 fn history_node(
438 &mut self,
439 rule_id: RuleIndex,
440 semantic: SemanticRule,
441 child_a: Option<HistoryId>,
442 child_b: Option<HistoryId>,
443 ) -> HistoryId {
444 self.semantic_history.push(HistoryNode {
445 rule_id,
446 rule: semantic,
447 children: [child_a, child_b],
448 });
449 HistoryId(self.semantic_history.len() - 1)
450 }
451
452 fn identity(
453 &mut self,
454 rule_id: RuleIndex,
455 child: (SemanticState<'src>, HistoryId),
456 ) -> (SemanticState<'src>, HistoryId) {
457 let (alpha, child_a) = child;
458 (
459 alpha,
460 self.history_node(rule_id, SemanticRule::Identity, Some(child_a), None),
461 )
462 }
463
464 fn functional_application(
465 &mut self,
466 rule_id: RuleIndex,
467 child: (SemanticState<'src>, HistoryId),
468 complement: (SemanticState<'src>, HistoryId),
469 ) -> Option<(SemanticState<'src>, HistoryId)> {
470 let (alpha, alpha_id) = child;
471 let (beta, beta_id) = complement;
472 SemanticState::merge(alpha, beta).map(|x| {
473 (
474 x,
475 self.history_node(
476 rule_id,
477 SemanticRule::FunctionalApplication,
478 Some(alpha_id),
479 Some(beta_id),
480 ),
481 )
482 })
483 }
484
485 fn predicate_modification(
486 &mut self,
487 rule_id: RuleIndex,
488 child: (SemanticState<'src>, HistoryId),
489 complement: (SemanticState<'src>, HistoryId),
490 ) -> Option<(SemanticState<'src>, HistoryId)> {
491 let (alpha, alpha_id) = child;
492 let (beta, beta_id) = complement;
493 SemanticState::predicate_modification(alpha, beta).map(|x| {
494 (
495 x,
496 self.history_node(
497 rule_id,
498 SemanticRule::PredicateModification,
499 Some(alpha_id),
500 Some(beta_id),
501 ),
502 )
503 })
504 }
505
506 fn event_identification(
507 &mut self,
508 rule_id: RuleIndex,
509 child: (SemanticState<'src>, HistoryId),
510 complement: (SemanticState<'src>, HistoryId),
511 ) -> Option<(SemanticState<'src>, HistoryId)> {
512 let (alpha, alpha_id) = child;
513 let (beta, beta_id) = complement;
514 SemanticState::event_identification(alpha, beta).map(|x| {
515 (
516 x,
517 self.history_node(
518 rule_id,
519 SemanticRule::EventIdentification,
520 Some(alpha_id),
521 Some(beta_id),
522 ),
523 )
524 })
525 }
526
527 fn only_store(
528 &mut self,
529 rule_id: RuleIndex,
530 child: (SemanticState<'src>, HistoryId),
531 complement: (SemanticState<'src>, HistoryId),
532 trace_id: TraceId,
533 ) -> Option<(SemanticState<'src>, HistoryId)> {
534 let (mut alpha, alpha_id) = child;
535 let (beta, beta_id) = complement;
536 alpha.movers.extend(beta.movers);
537 alpha.movers.insert(trace_id, (beta.expr.clone(), None));
538 Some((
539 alpha,
540 self.history_node(
541 rule_id,
542 SemanticRule::OnlyStore,
543 Some(alpha_id),
544 Some(beta_id),
545 ),
546 ))
547 }
548
549 fn store(
550 &mut self,
551 rule_id: RuleIndex,
552 child: (SemanticState<'src>, HistoryId),
553 complement: (SemanticState<'src>, HistoryId),
554 trace_id: TraceId,
555 ) -> Option<(SemanticState<'src>, HistoryId)> {
556 let (mut alpha, alpha_id) = child;
557 let (beta, beta_id) = complement;
558 if let Ok(trace_type) = alpha.expr.apply_new_free_variable(trace_id.0.into()) {
559 alpha.movers.extend(beta.movers);
560 alpha
561 .movers
562 .insert(trace_id, (beta.expr.clone(), Some(trace_type)));
563 Some((
564 alpha,
565 self.history_node(rule_id, SemanticRule::Store, Some(alpha_id), Some(beta_id)),
566 ))
567 } else {
568 None
569 }
570 }
571
572 fn update_trace(
573 &mut self,
574 rule_id: RuleIndex,
575 child: (SemanticState<'src>, HistoryId),
576 old_trace_id: TraceId,
577 trace_id: TraceId,
578 ) -> (SemanticState<'src>, HistoryId) {
579 let (mut alpha, alpha_child) = child;
580 if let Some((stored_value, stored_type)) = alpha.movers.remove(&old_trace_id) {
581 alpha
582 .movers
583 .insert(trace_id, (stored_value, stored_type.clone()));
584
585 if let Some(stored_type) = stored_type {
586 alpha
587 .expr
588 .lambda_abstract_free_variable(old_trace_id.0.into(), stored_type, true)
589 .unwrap();
590 alpha
591 .expr
592 .apply_new_free_variable(trace_id.0.into())
593 .unwrap();
594 }
595 }
596 (
597 alpha,
598 self.history_node(rule_id, SemanticRule::UpdateTrace, Some(alpha_child), None),
599 )
600 }
601
602 fn apply_from_storage(
603 &mut self,
604 rule_id: RuleIndex,
605 child: (SemanticState<'src>, HistoryId),
606 trace_id: TraceId,
607 ) -> ApplyFromStorageResult<(SemanticState<'src>, HistoryId)> {
608 let (mut alpha, alpha_id) = child;
609 if let Some((stored_value, stored_type)) = alpha.movers.remove(&trace_id) {
610 if let Some(stored_type) = stored_type {
611 alpha
612 .expr
613 .lambda_abstract_free_variable(trace_id.0.into(), stored_type, true)
614 .unwrap();
615 }
616 let SemanticState { expr, movers } = alpha;
617 match expr.merge(stored_value).map(|expr| {
618 (
619 SemanticState { expr, movers },
620 self.history_node(
621 rule_id,
622 SemanticRule::ApplyFromStorage,
623 Some(alpha_id),
624 None,
625 ),
626 )
627 }) {
628 Some(x) => ApplyFromStorageResult::SuccesfulMerge(x),
629 None => ApplyFromStorageResult::FailedMerge,
630 }
631 } else {
632 ApplyFromStorageResult::NoTrace((alpha, alpha_id))
633 }
634 }
635
636 fn get_trace(&mut self, trace_id: RuleIndex) -> TraceId {
637 match self.rules.get(trace_id) {
638 Rule::UnmoveTrace(trace_id) => *trace_id,
639 _ => panic!("Ill-formed tree"),
640 }
641 }
642
643 fn get_previous_rules(&mut self, rule_id: RuleIndex) -> Vec<(SemanticState<'src>, HistoryId)> {
644 let rule = self.rules.get(rule_id);
645 match rule {
646 Rule::Scan { lexeme, stolen: _ } => [(
647 SemanticState::new(self.lexicon.interpretation(*lexeme).clone()),
648 self.history_node(rule_id, SemanticRule::Scan(*lexeme), None, None),
649 )]
650 .into(),
651 Rule::UnmoveTrace(_) => panic!("Traces shouldn't directly be accessed"),
653 Rule::Start { .. } => panic!("The start rule must always be skipped"),
654 Rule::Unmerge {
655 child_id,
656 complement_id,
657 ..
658 } => {
659 let complements = self.get_previous_rules(*complement_id);
660 let children = self.get_previous_rules(*child_id);
661
662 children
663 .into_iter()
664 .cartesian_product(complements)
665 .filter_map(|(child, complement)| {
666 let child_type = child.0.expr.get_type().unwrap();
667 let complement_type = complement.0.expr.get_type().unwrap();
668 if child_type == complement_type {
669 self.predicate_modification(rule_id, child, complement)
670 } else if child_type.can_apply(&complement_type)
671 || complement_type.can_apply(&child_type)
672 {
673 self.functional_application(rule_id, child, complement)
674 } else {
675 self.event_identification(rule_id, child, complement)
676 }
677 })
678 .collect()
679 }
680 Rule::UnmergeFromMover {
681 child_id,
682 stored_id,
683 trace_id,
684 ..
685 } => {
686 let stored = self.get_previous_rules(*stored_id);
687 let children = self.get_previous_rules(*child_id);
688 let product = children.into_iter().cartesian_product(stored);
689 let mut new_states = product
690 .clone()
691 .filter_map(|(child, complement)| {
692 self.store(rule_id, child, complement, *trace_id)
693 })
694 .collect::<Vec<_>>();
695
696 new_states.extend(
697 product
698 .clone()
699 .filter_map(|(child, complement)| {
700 self.only_store(rule_id, child, complement, *trace_id)
701 })
702 .collect::<Vec<_>>(),
703 );
704
705 new_states.extend(product.filter_map(|(child, complement)| {
706 self.functional_application(rule_id, child, complement)
707 }));
708 new_states
709 }
710 Rule::Unmove {
711 child_id,
712 stored_id,
713 } =>
714 {
716 let trace_id = self.get_trace(*stored_id);
717 let children = self.get_previous_rules(*child_id);
718 children
719 .into_iter()
720 .filter_map(
721 |child| match self.apply_from_storage(rule_id, child, trace_id) {
722 ApplyFromStorageResult::SuccesfulMerge(x) => Some(x),
723 ApplyFromStorageResult::FailedMerge => None,
724 ApplyFromStorageResult::NoTrace(child) => {
725 Some(self.identity(rule_id, child))
726 }
727 },
728 )
729 .collect()
730 }
731
732 Rule::UnmoveFromMover {
733 child_id,
734 stored_id,
735 trace_id,
736 ..
737 } => {
738 let children = self.get_previous_rules(*child_id);
739 let old_trace_id = self.get_trace(*stored_id);
740 let mut states = children
741 .clone()
742 .into_iter()
743 .map(|child| self.update_trace(rule_id, child, old_trace_id, *trace_id))
744 .collect::<Vec<_>>();
745 states.extend(children.into_iter().filter_map(
746 |child| match self.apply_from_storage(rule_id, child, old_trace_id) {
747 ApplyFromStorageResult::SuccesfulMerge(x) => Some(x),
748 ApplyFromStorageResult::FailedMerge
749 | ApplyFromStorageResult::NoTrace(_) => None, },
752 ));
753 states
754 }
755 }
756 }
757
758 fn redo_history(
759 &mut self,
760 rule_id: RuleIndex,
761 history: &mut [(SemanticRule, Option<SemanticState<'src>>)],
762 ) {
763 let rule = *self.rules.get(rule_id);
764 let semantic_rule = history.get(rule_id.0).unwrap().0;
765 let children: Vec<_> = self.rules.get(rule_id).children().collect();
766
767 for child in children.iter() {
768 self.redo_history(*child, history);
769 }
770 let get_child = |i: usize| {
771 (
772 history
773 .get(children.get(i).unwrap().0)
774 .unwrap()
775 .1
776 .clone()
777 .unwrap(),
778 HistoryId(0),
779 )
780 };
781
782 let trace_id = match &rule {
783 Rule::UnmergeFromMover { trace_id, .. } | Rule::UnmoveFromMover { trace_id, .. } => {
784 Some(*trace_id)
785 }
786 _ => None,
787 };
788
789 let value = match semantic_rule {
790 SemanticRule::FunctionalApplication => {
791 let child = get_child(0);
792 let complement = get_child(1);
793
794 self.functional_application(rule_id, child, complement)
795 }
796 SemanticRule::PredicateModification => {
797 let child = get_child(0);
798 let complement = get_child(1);
799
800 self.predicate_modification(rule_id, child, complement)
801 }
802 SemanticRule::EventIdentification => {
803 let child = get_child(0);
804 let complement = get_child(1);
805
806 self.event_identification(rule_id, child, complement)
807 }
808 SemanticRule::Store => {
809 let child = get_child(0);
810 let complement = get_child(1);
811 self.store(rule_id, child, complement, trace_id.unwrap())
812 }
813 SemanticRule::OnlyStore => {
814 let child = get_child(0);
815 let complement = get_child(1);
816 self.only_store(rule_id, child, complement, trace_id.unwrap())
817 }
818 SemanticRule::Identity => {
819 let child = get_child(0);
820 Some(self.identity(rule_id, child))
821 }
822 SemanticRule::ApplyFromStorage => {
823 let child = get_child(0);
824 let trace_id = self.get_trace(children[1]);
825 match self.apply_from_storage(rule_id, child, trace_id) {
826 ApplyFromStorageResult::SuccesfulMerge(x) => Some(x),
827 ApplyFromStorageResult::FailedMerge | ApplyFromStorageResult::NoTrace(_) => {
828 None
829 }
830 }
831 }
832 SemanticRule::UpdateTrace => {
833 let child = get_child(0);
834 let old_trace_id = self.get_trace(children[1]);
835 Some(self.update_trace(rule_id, child, old_trace_id, trace_id.unwrap()))
836 }
837 SemanticRule::Trace => {
838 return;
839 }
840 SemanticRule::Scan(_) => {
841 let node = match rule {
842 Rule::Scan { lexeme, stolen: _ } => lexeme,
843 _ => panic!(
844 "The scan semantic rule should only happen with scanning when parsing"
845 ),
846 };
847 Some((
848 SemanticState::new(self.lexicon.interpretation(node).clone()),
849 HistoryId(0),
850 ))
851 }
852 };
853
854 let s = history.get_mut(rule_id.0).unwrap();
855 let state = &mut s.1;
856
857 let mut value = value.unwrap().0;
859 value.expr.reduce().unwrap();
860
861 *state = Some(value);
862 }
863}
864
865#[cfg(test)]
866mod tests {
867
868 use crate::lexicon::SemanticLexicon;
869 use crate::{ParsingConfig, PhonContent};
870
871 #[test]
872 fn doesnt_crash_with_bad_typed_double_movement() -> anyhow::Result<()> {
873 let lexicon = SemanticLexicon::parse(
874 "mary::0 -1 -1::a_0\n::=0 +1 0::lambda <e,e> x (a_1)\nran::=0 +1 0::a_1",
875 )?;
876 for (_, _, r) in lexicon.lexicon().parse(
877 &PhonContent::from(["mary", "ran"]),
878 "0",
879 &ParsingConfig::default(),
880 )? {
881 for (x, _h) in r.to_interpretation(&lexicon).take(10) {
882 println!("{x}");
883 }
884 }
885 Ok(())
886 }
887
888 #[test]
889 fn predicate_modification() -> anyhow::Result<()> {
890 let lexicon = SemanticLexicon::parse(
891 "tall::n= n::lambda a x pa_tall(x)\nman::n::lambda a x pa_man(x)",
892 )?;
893 for (_, _, r) in lexicon.lexicon().parse(
894 &PhonContent::from(["tall", "man"]),
895 "n",
896 &ParsingConfig::default(),
897 )? {
898 let (x, _h) = r.to_interpretation(&lexicon).next().unwrap();
899
900 assert_eq!(x.to_string(), "lambda a x pa_tall(x) & pa_man(x)");
901 }
902 Ok(())
903 }
904 #[test]
905 fn event_identification() -> anyhow::Result<()> {
906 let lexicon = SemanticLexicon::parse(
907 "voice::v= V::lambda a x lambda e y AgentOf(x, y)\nrun::v::lambda e x pe_runs(x)",
908 )?;
909 for (_, _, r) in lexicon.lexicon().parse(
910 &PhonContent::from(["voice", "run"]),
911 "V",
912 &ParsingConfig::default(),
913 )? {
914 let (x, _h) = r.to_interpretation(&lexicon).next().unwrap();
915
916 assert_eq!(
917 x.to_string(),
918 "lambda a x lambda e y AgentOf(x, y) & pe_runs(y)"
919 )
920 }
921 Ok(())
922 }
923}