minimalist_grammar_parser/parsing/rules/
semantics.rs

1//! This module defines the basic semantic rules which allow for semantic derivations.
2use super::{Rule, RuleIndex, RulePool, TraceId};
3use crate::lexicon::{LexemeId, SemanticLexicon};
4use itertools::{Either, Itertools};
5use simple_semantics::{
6    lambda::{RootedLambdaPool, types::LambdaType},
7    language::{ConjoiningError, Expr},
8};
9use std::{collections::BTreeMap, fmt::Display};
10
11#[cfg(feature = "pretty")]
12use serde::{Serialize, ser::SerializeMap, ser::SerializeStruct};
13
14#[derive(Debug, Clone, PartialEq, Copy, Eq, Hash)]
15#[cfg_attr(feature = "pretty", derive(Serialize))]
16///Enum to define possible semantic rules
17pub enum SemanticRule {
18    ///Apply one function to another.
19    FunctionalApplication,
20    ///Conjoin two <x,t> functions to a new <x, t> with logical and
21    PredicateModification,
22    ///Conjoin a <y, <x,t>> and a <x,t> function to <y,<x,t>>
23    EventIdentification,
24    ///Store a meaning for later, leaving behind a free variable as in QR.
25    Store,
26    ///Apply the identity function
27    Identity,
28    ///Retrieve something from storage and apply it.
29    ApplyFromStorage,
30    ///Change a trace's ID (for multiple movements)
31    UpdateTrace,
32    ///This is the landing site of a movement.
33    Trace,
34    ///Retrieve a meaning from a word.
35    Scan(LexemeId),
36}
37
38impl std::fmt::Display for SemanticRule {
39    fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result {
40        write!(
41            f,
42            "{}",
43            match self {
44                SemanticRule::FunctionalApplication => "FA",
45                SemanticRule::PredicateModification => "PM",
46                SemanticRule::EventIdentification => "EI",
47                SemanticRule::Store => "Store",
48                SemanticRule::Identity => "Id",
49                SemanticRule::ApplyFromStorage => "ApplyFromStorage",
50                SemanticRule::UpdateTrace => "UpdateTrace",
51                SemanticRule::Trace => "Trace",
52                SemanticRule::Scan(_) => "LexicalEntry",
53            }
54        )
55    }
56}
57
58impl RulePool {
59    ///Converts the [`RulePool`] to all possible semantic interpretations (as an iterator)
60    pub fn to_interpretation<'a, 'src, T, C>(
61        &'a self,
62        lex: &'a SemanticLexicon<'src, T, C>,
63    ) -> impl Iterator<Item = (RootedLambdaPool<'src, Expr<'src>>, SemanticHistory<'static>)> + 'a
64    where
65        T: Eq + std::fmt::Debug + std::clone::Clone,
66        C: Eq + std::fmt::Debug + std::clone::Clone,
67    {
68        SemanticDerivation::interpret(self, lex).filter_map(|(mut pool, history)| {
69            if pool.reduce().is_ok() {
70                Some((pool, history))
71            } else {
72                None
73            }
74        })
75    }
76}
77
78#[derive(Debug, Clone, Copy, PartialEq, Eq)]
79struct HistoryId(usize);
80
81#[derive(Debug, Clone, Copy, PartialEq, Eq)]
82struct HistoryNode {
83    rule_id: RuleIndex,
84    rule: SemanticRule,
85    children: [Option<HistoryId>; 2],
86}
87
88#[derive(Debug, Clone, PartialEq, Eq)]
89///The history of a semantic derivation, e.g. what meant what when
90pub enum SemanticHistory<'a> {
91    ///Semantic history including partial semantic interpretations
92    Rich(Vec<(SemanticRule, Option<SemanticState<'a>>)>),
93    ///Semantic history with only the rules.
94    Simple(Vec<SemanticRule>),
95}
96
97#[derive(Debug, Clone, Eq, PartialEq, Hash)]
98pub(crate) enum SemanticNode<'a> {
99    Rich(SemanticRule, Option<SemanticState<'a>>),
100    Simple(SemanticRule),
101}
102
103#[cfg(feature = "pretty")]
104impl Serialize for SemanticNode<'_> {
105    fn serialize<S>(&self, serializer: S) -> Result<S::Ok, S::Error>
106    where
107        S: serde::Serializer,
108    {
109        match self {
110            SemanticNode::Rich(semantic_rule, semantic_state) => {
111                let mut s = serializer.serialize_struct("SemanticNode", 1)?;
112                s.serialize_field("rule", semantic_rule)?;
113                s.serialize_field("state", semantic_state)?;
114                s.end()
115            }
116            SemanticNode::Simple(semantic_rule) => {
117                let mut s = serializer.serialize_struct("SemanticNode", 1)?;
118                s.serialize_field("rule", semantic_rule)?;
119                s.end()
120            }
121        }
122    }
123}
124
125impl<'a> SemanticHistory<'a> {
126    pub(super) fn semantic_node(&self, i: RuleIndex) -> Option<SemanticNode<'a>> {
127        match self {
128            SemanticHistory::Rich(items) => items
129                .get(i.0)
130                .map(|(rule, interp)| SemanticNode::Rich(*rule, interp.clone())),
131            SemanticHistory::Simple(items) => {
132                items.get(i.0).map(|rule| SemanticNode::Simple(*rule))
133            }
134        }
135    }
136
137    ///Get all the constituents of a semantic history (e.g. the interpretation of all
138    ///constituents). Returns [`None`] if the semantic history is [`SemanticHistory::Simple`]
139    #[must_use]
140    pub fn constituents(
141        &self,
142    ) -> Option<impl Iterator<Item = (SemanticRule, &RootedLambdaPool<'a, Expr<'a>>)>> {
143        match self {
144            SemanticHistory::Rich(items) => Some(
145                items
146                    .iter()
147                    .skip(1)
148                    .filter_map(|(b, x)| x.as_ref().map(|x| (*b, &x.expr))),
149            ),
150            SemanticHistory::Simple(_) => None,
151        }
152    }
153
154    ///Calculates the semantic interpretation of each partial step in a derivation.
155    #[must_use]
156    pub fn into_rich<T, C>(self, lexicon: &SemanticLexicon<'a, T, C>, rules: &RulePool) -> Self
157    where
158        T: Eq + std::fmt::Debug + std::clone::Clone,
159        C: Eq + std::fmt::Debug + std::clone::Clone,
160    {
161        match self {
162            SemanticHistory::Rich(items) => SemanticHistory::Rich(items),
163            SemanticHistory::Simple(semantic_rules) => {
164                let mut items = semantic_rules.into_iter().map(|x| (x, None)).collect_vec();
165
166                let mut derivation = SemanticDerivation {
167                    rules,
168                    lexicon,
169                    semantic_history: vec![],
170                };
171
172                derivation.redo_history(RuleIndex(0), &mut items);
173
174                SemanticHistory::Rich(items)
175            }
176        }
177    }
178}
179
180impl Display for SemanticNode<'_> {
181    fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result {
182        match self {
183            SemanticNode::Rich(_, Some(interp)) => {
184                write!(f, "{interp}")
185            }
186            SemanticNode::Rich(semantic_rule, None) => {
187                write!(f, "{semantic_rule}")
188            }
189            SemanticNode::Simple(semantic_rule) => write!(f, "{semantic_rule}"),
190        }
191    }
192}
193
194impl Display for SemanticState<'_> {
195    fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result {
196        write!(f, "{}", self.expr)
197    }
198}
199
200#[derive(Debug, Clone, PartialEq, Eq, Hash)]
201///The semantic state of a derivation at a given point (e.g. the interpretation of a constituent,
202///and all present movers)
203pub struct SemanticState<'src> {
204    expr: RootedLambdaPool<'src, Expr<'src>>,
205    movers: BTreeMap<TraceId, (RootedLambdaPool<'src, Expr<'src>>, Option<LambdaType>)>,
206}
207
208#[cfg(feature = "pretty")]
209#[derive(Debug, Clone, PartialEq, Eq)]
210struct Movers<'a, 'src>(
211    &'a BTreeMap<TraceId, (RootedLambdaPool<'src, Expr<'src>>, Option<LambdaType>)>,
212);
213
214#[cfg(feature = "pretty")]
215#[derive(Debug, Clone, PartialEq, Eq)]
216struct Mover<'a, 'src>(&'a (RootedLambdaPool<'src, Expr<'src>>, Option<LambdaType>));
217
218#[cfg(feature = "pretty")]
219impl Serialize for Mover<'_, '_> {
220    fn serialize<S>(&self, serializer: S) -> Result<S::Ok, S::Error>
221    where
222        S: serde::Serializer,
223    {
224        let mut s = serializer.serialize_struct("Mover", 3)?;
225        s.serialize_field("expr", self.0.0.to_string().as_str())?;
226        s.serialize_field("tokens", &self.0.0)?;
227        if let Some(t) = self.0.1.as_ref() {
228            s.serialize_field("type", t.to_string().as_str())?;
229        }
230
231        s.end()
232    }
233}
234
235#[cfg(feature = "pretty")]
236impl Serialize for Movers<'_, '_> {
237    fn serialize<S>(&self, serializer: S) -> Result<S::Ok, S::Error>
238    where
239        S: serde::Serializer,
240    {
241        let mut s = serializer.serialize_map(Some(self.0.len()))?;
242        for (k, v) in self.0 {
243            s.serialize_entry(k, &Mover(v))?;
244        }
245        s.end()
246    }
247}
248
249#[cfg(feature = "pretty")]
250impl Serialize for SemanticState<'_> {
251    fn serialize<S>(&self, serializer: S) -> Result<S::Ok, S::Error>
252    where
253        S: serde::Serializer,
254    {
255        let mut s = serializer.serialize_struct("SemanticState", 3)?;
256        s.serialize_field("expr", self.expr.to_string().as_str())?;
257        s.serialize_field("tokens", &self.expr)?;
258        s.serialize_field("movers", &Movers(&self.movers))?;
259
260        s.end()
261    }
262}
263
264impl<'src> SemanticState<'src> {
265    fn new(alpha: RootedLambdaPool<'src, Expr<'src>>) -> Self {
266        SemanticState {
267            expr: alpha,
268            movers: BTreeMap::default(),
269        }
270    }
271    fn event_identification(alpha: Self, beta: Self) -> Option<Self> {
272        let overlapping_traces = alpha.movers.keys().any(|k| beta.movers.contains_key(k));
273        if overlapping_traces {
274            return None;
275        }
276
277        let SemanticState {
278            expr: alpha,
279            movers: mut alpha_movers,
280            ..
281        } = alpha;
282
283        let SemanticState {
284            expr: beta,
285            movers: beta_movers,
286            ..
287        } = beta;
288
289        let alpha = match alpha.raised_conjoin(beta) {
290            Ok(x) => x,
291            Err(ConjoiningError::ReductionError(e)) => {
292                panic!("Reduction error in predicate_modification {e}")
293            }
294            Err(_) => {
295                return None;
296            }
297        };
298        alpha_movers.extend(beta_movers);
299        Some(SemanticState {
300            expr: alpha,
301            movers: alpha_movers,
302        })
303    }
304
305    fn predicate_modification(alpha: Self, beta: Self) -> Option<Self> {
306        let overlapping_traces = alpha.movers.keys().any(|k| beta.movers.contains_key(k));
307        if overlapping_traces {
308            return None;
309        }
310
311        let SemanticState {
312            expr: alpha,
313            movers: mut alpha_movers,
314            ..
315        } = alpha;
316
317        let SemanticState {
318            expr: beta,
319            movers: beta_movers,
320            ..
321        } = beta;
322        let alpha = match alpha.conjoin(beta) {
323            Ok(x) => x,
324            Err(ConjoiningError::ReductionError(e)) => {
325                panic!("Reduction error in predicate_modification {e}")
326            }
327            Err(_) => return None,
328        };
329
330        alpha_movers.extend(beta_movers);
331        Some(SemanticState {
332            expr: alpha,
333            movers: alpha_movers,
334        })
335    }
336
337    fn merge(alpha: Self, beta: Self) -> Option<Self> {
338        let overlapping_traces = alpha.movers.keys().any(|k| beta.movers.contains_key(k));
339        if overlapping_traces {
340            return None;
341        }
342
343        let SemanticState {
344            expr: alpha,
345            movers: mut alpha_movers,
346            ..
347        } = alpha;
348        let SemanticState {
349            expr: beta,
350            movers: beta_movers,
351            ..
352        } = beta;
353        if let Some(alpha) = alpha.merge(beta) {
354            alpha_movers.extend(beta_movers);
355            Some(SemanticState {
356                expr: alpha,
357                movers: alpha_movers,
358            })
359        } else {
360            None
361        }
362    }
363}
364
365#[derive(Debug, Clone)]
366struct SemanticDerivation<'a, 'src, T: Eq, C: Eq> {
367    lexicon: &'a SemanticLexicon<'src, T, C>,
368    rules: &'a RulePool,
369    semantic_history: Vec<HistoryNode>,
370}
371
372#[derive(Debug, Clone)]
373enum ApplyFromStorageResult<T> {
374    SuccesfulMerge(T),
375    FailedMerge,
376    NoTrace(T),
377}
378
379fn can_apply(a: &LambdaType, b: &LambdaType) -> bool {
380    a.can_apply(b) || b.can_apply(a)
381}
382
383fn can_event_id(alpha: &LambdaType, beta: &LambdaType) -> bool {
384    if let LambdaType::Composition(_sigma, b) = alpha
385        && let LambdaType::Composition(tau_alpha, t) = &**b
386        && matches!(&**t, LambdaType::T)
387        && let LambdaType::Composition(tau_beta, t_beta) = beta
388        && tau_beta == tau_alpha
389        && matches!(&**t_beta, LambdaType::T)
390    {
391        true
392    } else {
393        false
394    }
395}
396
397impl<'a, 'src, T, C> SemanticDerivation<'a, 'src, T, C>
398where
399    T: Eq + std::fmt::Debug + std::clone::Clone,
400    C: Eq + std::fmt::Debug + std::clone::Clone,
401{
402    fn interpret(
403        rules: &'a RulePool,
404        lex: &'a SemanticLexicon<'src, T, C>,
405    ) -> impl Iterator<Item = (RootedLambdaPool<'src, Expr<'src>>, SemanticHistory<'static>)> + 'a
406    {
407        let mut derivation = SemanticDerivation {
408            rules,
409            lexicon: lex,
410            semantic_history: vec![],
411        };
412
413        //We jump to rule 1 since the start rule is superfluous
414        let last_derivation = derivation.get_previous_rules(RuleIndex(1));
415
416        last_derivation.into_iter().filter_map(move |(x, root)| {
417            if x.movers.is_empty() {
418                Some((
419                    x.expr,
420                    SemanticHistory::Simple(derivation.get_history(root)),
421                ))
422            } else {
423                None
424            }
425        })
426    }
427
428    fn get_history(&self, root: HistoryId) -> Vec<SemanticRule> {
429        let mut stack = vec![root];
430        let mut history: Vec<Option<SemanticRule>> = self
431            .rules
432            .0
433            .iter()
434            .map(|rule| match rule {
435                Rule::Start { .. } => Some(SemanticRule::Identity),
436                Rule::UnmoveTrace(_) => Some(SemanticRule::Trace),
437                _ => None,
438            })
439            .collect();
440
441        while let Some(node) = stack.pop() {
442            let HistoryNode {
443                rule_id,
444                rule,
445                children,
446            } = self.semantic_history.get(node.0).unwrap();
447            history[rule_id.0] = Some(*rule);
448            stack.extend(children.iter().filter_map(|x| *x));
449        }
450
451        history.into_iter().collect::<Option<Vec<_>>>().unwrap()
452    }
453
454    fn history_node(
455        &mut self,
456        rule_id: RuleIndex,
457        semantic: SemanticRule,
458        child_a: Option<HistoryId>,
459        child_b: Option<HistoryId>,
460    ) -> HistoryId {
461        self.semantic_history.push(HistoryNode {
462            rule_id,
463            rule: semantic,
464            children: [child_a, child_b],
465        });
466        HistoryId(self.semantic_history.len() - 1)
467    }
468
469    fn identity(
470        &mut self,
471        rule_id: RuleIndex,
472        child: (SemanticState<'src>, HistoryId),
473    ) -> (SemanticState<'src>, HistoryId) {
474        let (alpha, child_a) = child;
475        (
476            alpha,
477            self.history_node(rule_id, SemanticRule::Identity, Some(child_a), None),
478        )
479    }
480
481    fn functional_application(
482        &mut self,
483        rule_id: RuleIndex,
484        child: (SemanticState<'src>, HistoryId),
485        complement: (SemanticState<'src>, HistoryId),
486    ) -> Option<(SemanticState<'src>, HistoryId)> {
487        let (alpha, alpha_id) = child;
488        let (beta, beta_id) = complement;
489        SemanticState::merge(alpha, beta).map(|x| {
490            (
491                x,
492                self.history_node(
493                    rule_id,
494                    SemanticRule::FunctionalApplication,
495                    Some(alpha_id),
496                    Some(beta_id),
497                ),
498            )
499        })
500    }
501
502    fn predicate_modification(
503        &mut self,
504        rule_id: RuleIndex,
505        child: (SemanticState<'src>, HistoryId),
506        complement: (SemanticState<'src>, HistoryId),
507    ) -> Option<(SemanticState<'src>, HistoryId)> {
508        let (alpha, alpha_id) = child;
509        let (beta, beta_id) = complement;
510        SemanticState::predicate_modification(alpha, beta).map(|x| {
511            (
512                x,
513                self.history_node(
514                    rule_id,
515                    SemanticRule::PredicateModification,
516                    Some(alpha_id),
517                    Some(beta_id),
518                ),
519            )
520        })
521    }
522
523    fn event_identification(
524        &mut self,
525        rule_id: RuleIndex,
526        child: (SemanticState<'src>, HistoryId),
527        complement: (SemanticState<'src>, HistoryId),
528    ) -> Option<(SemanticState<'src>, HistoryId)> {
529        let (alpha, alpha_id) = child;
530        let (beta, beta_id) = complement;
531        SemanticState::event_identification(alpha, beta).map(|x| {
532            (
533                x,
534                self.history_node(
535                    rule_id,
536                    SemanticRule::EventIdentification,
537                    Some(alpha_id),
538                    Some(beta_id),
539                ),
540            )
541        })
542    }
543
544    fn store(
545        &mut self,
546        rule_id: RuleIndex,
547        child: (SemanticState<'src>, HistoryId),
548        complement: (SemanticState<'src>, HistoryId),
549        trace_id: TraceId,
550    ) -> Option<(SemanticState<'src>, HistoryId)> {
551        let (mut alpha, alpha_id) = child;
552        let (beta, beta_id) = complement;
553        if let Ok(trace_type) = alpha.expr.apply_new_free_variable(trace_id.0.into()) {
554            alpha.movers.extend(beta.movers);
555            alpha
556                .movers
557                .insert(trace_id, (beta.expr.clone(), Some(trace_type)));
558            Some((
559                alpha,
560                self.history_node(rule_id, SemanticRule::Store, Some(alpha_id), Some(beta_id)),
561            ))
562        } else {
563            None
564        }
565    }
566
567    fn update_trace(
568        &mut self,
569        rule_id: RuleIndex,
570        child: (SemanticState<'src>, HistoryId),
571        old_trace_id: TraceId,
572        trace_id: TraceId,
573    ) -> (SemanticState<'src>, HistoryId) {
574        let (mut alpha, alpha_child) = child;
575        if let Some((stored_value, stored_type)) = alpha.movers.remove(&old_trace_id) {
576            alpha
577                .movers
578                .insert(trace_id, (stored_value, stored_type.clone()));
579
580            if let Some(stored_type) = stored_type {
581                alpha
582                    .expr
583                    .lambda_abstract_free_variable(old_trace_id.0.into(), stored_type, true)
584                    .unwrap();
585                alpha
586                    .expr
587                    .apply_new_free_variable(trace_id.0.into())
588                    .unwrap();
589            }
590        }
591        (
592            alpha,
593            self.history_node(rule_id, SemanticRule::UpdateTrace, Some(alpha_child), None),
594        )
595    }
596
597    fn apply_from_storage(
598        &mut self,
599        rule_id: RuleIndex,
600        child: (SemanticState<'src>, HistoryId),
601        trace_id: TraceId,
602    ) -> ApplyFromStorageResult<(SemanticState<'src>, HistoryId)> {
603        let (mut alpha, alpha_id) = child;
604        if let Some((stored_value, stored_type)) = alpha.movers.remove(&trace_id) {
605            if let Some(stored_type) = stored_type {
606                alpha
607                    .expr
608                    .lambda_abstract_free_variable(trace_id.0.into(), stored_type, true)
609                    .unwrap();
610            }
611            let SemanticState { expr, movers } = alpha;
612            match expr.merge(stored_value).map(|expr| {
613                (
614                    SemanticState { expr, movers },
615                    self.history_node(
616                        rule_id,
617                        SemanticRule::ApplyFromStorage,
618                        Some(alpha_id),
619                        None,
620                    ),
621                )
622            }) {
623                Some(x) => ApplyFromStorageResult::SuccesfulMerge(x),
624                None => ApplyFromStorageResult::FailedMerge,
625            }
626        } else {
627            ApplyFromStorageResult::NoTrace((alpha, alpha_id))
628        }
629    }
630
631    fn get_trace(&mut self, trace_id: RuleIndex) -> TraceId {
632        match self.rules.get(trace_id) {
633            Rule::UnmoveTrace(trace_id) => *trace_id,
634            _ => panic!("Ill-formed tree"),
635        }
636    }
637
638    fn get_previous_rules(&mut self, rule_id: RuleIndex) -> Vec<(SemanticState<'src>, HistoryId)> {
639        let rule = self.rules.get(rule_id);
640        match rule {
641            Rule::Scan { lexeme, stolen: _ } => [(
642                SemanticState::new(self.lexicon.interpretation(*lexeme).clone()),
643                self.history_node(rule_id, SemanticRule::Scan(*lexeme), None, None),
644            )]
645            .into(),
646            // These shouldn't be called.
647            Rule::UnmoveTrace(_) => panic!("Traces shouldn't directly be accessed"),
648            Rule::Start { .. } => panic!("The start rule must always be skipped"),
649            Rule::Unmerge {
650                child_id,
651                complement_id,
652                ..
653            } => {
654                let complements = self.get_previous_rules(*complement_id);
655                let children = self.get_previous_rules(*child_id);
656
657                children
658                    .into_iter()
659                    .cartesian_product(complements)
660                    .flat_map(|(child, complement)| {
661                        let child_type = child.0.expr.get_type().unwrap();
662                        let complement_type = complement.0.expr.get_type().unwrap();
663
664                        let can_apply = can_apply(&child_type, &complement_type);
665                        let can_event_id = can_event_id(&child_type, &complement_type);
666
667                        if can_apply && can_event_id {
668                            Either::Left(
669                                std::iter::once(self.functional_application(
670                                    rule_id,
671                                    child.clone(),
672                                    complement.clone(),
673                                ))
674                                .chain(std::iter::once(
675                                    self.event_identification(rule_id, child, complement),
676                                )),
677                            )
678                        } else {
679                            let x = if child_type == complement_type {
680                                self.predicate_modification(rule_id, child, complement)
681                            } else if child_type.can_apply(&complement_type)
682                                || complement_type.can_apply(&child_type)
683                            {
684                                self.functional_application(rule_id, child, complement)
685                            } else {
686                                self.event_identification(rule_id, child, complement)
687                            };
688                            Either::Right(std::iter::once(x))
689                        }
690                    })
691                    .flatten()
692                    .collect()
693            }
694            Rule::UnmergeFromMover {
695                child_id,
696                stored_id,
697                trace_id,
698                ..
699            } => {
700                let stored = self.get_previous_rules(*stored_id);
701                let children = self.get_previous_rules(*child_id);
702                let product = children.into_iter().cartesian_product(stored);
703                let mut new_states = product
704                    .clone()
705                    .filter_map(|(child, complement)| {
706                        self.store(rule_id, child, complement, *trace_id)
707                    })
708                    .collect::<Vec<_>>();
709
710                new_states.extend(
711                    product
712                        .flat_map(|(child, complement)| {
713                            let child_type = child.0.expr.get_type().unwrap();
714                            let complement_type = complement.0.expr.get_type().unwrap();
715
716                            let can_apply = can_apply(&child_type, &complement_type);
717                            let can_event_id = can_event_id(&child_type, &complement_type);
718
719                            if can_apply && can_event_id {
720                                Either::Left(
721                                    std::iter::once(self.functional_application(
722                                        rule_id,
723                                        child.clone(),
724                                        complement.clone(),
725                                    ))
726                                    .chain(std::iter::once(
727                                        self.event_identification(rule_id, child, complement),
728                                    )),
729                                )
730                            } else {
731                                let x = if child_type == complement_type {
732                                    self.predicate_modification(rule_id, child, complement)
733                                } else if child_type.can_apply(&complement_type)
734                                    || complement_type.can_apply(&child_type)
735                                {
736                                    self.functional_application(rule_id, child, complement)
737                                } else {
738                                    self.event_identification(rule_id, child, complement)
739                                };
740                                Either::Right(std::iter::once(x))
741                            }
742                        })
743                        .flatten(),
744                );
745                new_states
746            }
747            Rule::Unmove {
748                child_id,
749                stored_id,
750            } =>
751            //We add the lambda extraction to child_id
752            {
753                let trace_id = self.get_trace(*stored_id);
754                let children = self.get_previous_rules(*child_id);
755                children
756                    .into_iter()
757                    .filter_map(
758                        |child| match self.apply_from_storage(rule_id, child, trace_id) {
759                            ApplyFromStorageResult::SuccesfulMerge(x) => Some(x),
760                            ApplyFromStorageResult::FailedMerge => None,
761                            ApplyFromStorageResult::NoTrace(child) => {
762                                Some(self.identity(rule_id, child))
763                            }
764                        },
765                    )
766                    .collect()
767            }
768
769            Rule::UnmoveFromMover {
770                child_id,
771                stored_id,
772                trace_id,
773                ..
774            } => {
775                let children = self.get_previous_rules(*child_id);
776                let old_trace_id = self.get_trace(*stored_id);
777                let mut states = children
778                    .clone()
779                    .into_iter()
780                    .map(|child| self.update_trace(rule_id, child, old_trace_id, *trace_id))
781                    .collect::<Vec<_>>();
782                states.extend(children.into_iter().filter_map(
783                    |child| match self.apply_from_storage(rule_id, child, old_trace_id) {
784                        ApplyFromStorageResult::SuccesfulMerge(x) => Some(x),
785                        ApplyFromStorageResult::FailedMerge
786                        | ApplyFromStorageResult::NoTrace(_) => None, //We don't percolate up if
787                                                                      //the trace is missing because the previous rule handles that.
788                    },
789                ));
790                states
791            }
792        }
793    }
794
795    fn redo_history(
796        &mut self,
797        rule_id: RuleIndex,
798        history: &mut [(SemanticRule, Option<SemanticState<'src>>)],
799    ) {
800        let rule = *self.rules.get(rule_id);
801        let semantic_rule = history.get(rule_id.0).unwrap().0;
802        let children: Vec<_> = self.rules.get(rule_id).children().collect();
803
804        for child in &children {
805            self.redo_history(*child, history);
806        }
807        let get_child = |i: usize| {
808            (
809                history
810                    .get(children.get(i).unwrap().0)
811                    .unwrap()
812                    .1
813                    .clone()
814                    .unwrap(),
815                HistoryId(0),
816            )
817        };
818
819        let trace_id = match &rule {
820            Rule::UnmergeFromMover { trace_id, .. } | Rule::UnmoveFromMover { trace_id, .. } => {
821                Some(*trace_id)
822            }
823            _ => None,
824        };
825
826        let value = match semantic_rule {
827            SemanticRule::FunctionalApplication => {
828                let child = get_child(0);
829                let complement = get_child(1);
830
831                self.functional_application(rule_id, child, complement)
832            }
833            SemanticRule::PredicateModification => {
834                let child = get_child(0);
835                let complement = get_child(1);
836
837                self.predicate_modification(rule_id, child, complement)
838            }
839            SemanticRule::EventIdentification => {
840                let child = get_child(0);
841                let complement = get_child(1);
842
843                self.event_identification(rule_id, child, complement)
844            }
845            SemanticRule::Store => {
846                let child = get_child(0);
847                let complement = get_child(1);
848                self.store(rule_id, child, complement, trace_id.unwrap())
849            }
850            SemanticRule::Identity => {
851                let child = get_child(0);
852                Some(self.identity(rule_id, child))
853            }
854            SemanticRule::ApplyFromStorage => {
855                let child = get_child(0);
856                let trace_id = self.get_trace(children[1]);
857                match self.apply_from_storage(rule_id, child, trace_id) {
858                    ApplyFromStorageResult::SuccesfulMerge(x) => Some(x),
859                    ApplyFromStorageResult::FailedMerge | ApplyFromStorageResult::NoTrace(_) => {
860                        None
861                    }
862                }
863            }
864            SemanticRule::UpdateTrace => {
865                let child = get_child(0);
866                let old_trace_id = self.get_trace(children[1]);
867                Some(self.update_trace(rule_id, child, old_trace_id, trace_id.unwrap()))
868            }
869            SemanticRule::Trace => {
870                return;
871            }
872            SemanticRule::Scan(_) => {
873                let node = match rule {
874                    Rule::Scan { lexeme, stolen: _ } => lexeme,
875                    _ => panic!(
876                        "The scan semantic rule should only happen with scanning when parsing"
877                    ),
878                };
879                Some((
880                    SemanticState::new(self.lexicon.interpretation(node).clone()),
881                    HistoryId(0),
882                ))
883            }
884        };
885
886        let s = history.get_mut(rule_id.0).unwrap();
887        let state = &mut s.1;
888
889        //TODO: this paniced in some downstream code, see if you can find out why.
890        let mut value = value.unwrap().0;
891        value.expr.reduce().unwrap();
892
893        *state = Some(value);
894    }
895}
896
897#[cfg(test)]
898mod tests {
899
900    use crate::lexicon::SemanticLexicon;
901    use crate::{ParsingConfig, PhonContent};
902
903    #[test]
904    fn doesnt_crash_with_bad_typed_double_movement() -> anyhow::Result<()> {
905        let lexicon = SemanticLexicon::parse(
906            "mary::0 -1 -1::a_0\n::=0 +1 0::lambda <e,e> x (a_1)\nran::=0 +1 0::a_1",
907        )?;
908        for (_, _, r) in lexicon.lexicon().parse(
909            &PhonContent::from(["mary", "ran"]),
910            "0",
911            &ParsingConfig::default(),
912        )? {
913            for (x, _h) in r.to_interpretation(&lexicon).take(10) {
914                println!("{x}");
915            }
916        }
917        Ok(())
918    }
919
920    #[test]
921    fn predicate_modification() -> anyhow::Result<()> {
922        let lexicon = SemanticLexicon::parse(
923            "tall::n= n::lambda a x pa_tall(x)\nman::n::lambda a x pa_man(x)",
924        )?;
925        for (_, _, r) in lexicon.lexicon().parse(
926            &PhonContent::from(["tall", "man"]),
927            "n",
928            &ParsingConfig::default(),
929        )? {
930            let (x, _h) = r.to_interpretation(&lexicon).next().unwrap();
931
932            assert_eq!(x.to_string(), "lambda a x pa_tall(x) & pa_man(x)");
933        }
934        Ok(())
935    }
936
937    #[test]
938    fn weird_lex() -> anyhow::Result<()> {
939        let lexicons = [
940            (
941                "0::3= +2 1= 0::lambda t phi phi & Q#<a,t>(a_m)\n1::3 -2::lambda t phi phi & P#<a,t>(a_m)\n2::1::P#<a,t>(a_j)",
942                ["1", "0", "2"],
943            ),
944            (
945                "0::3= 2= +1 0::a_c\n1::3 -1::lambda a x iota_e(y, some_e(z, all_e, pa_Q(a_c)))\n2::2::lambda e x ~(pe_run(x) | pe_walk(x))",
946                ["1", "0", "2"],
947            ),
948            (
949                "0::2= 1= 0::lambda <t,e> P lambda <t,t> Q pe_run(P(pa_Q(a_c)))\n1::2::lambda <t,t> P some_e(x, pe_walk(x), pe_walk(x))\n2::1::lambda <<t,e>,<<t,t>,t>> M pe_walk(iota_e(x, pe_walk(x)))",
950                ["0", "1", "2"],
951            ),
952            (
953                "0::2= 1= 0::lambda <a,t> P P\n1::2::lambda a x ~pa_Q(x)\n2::1::lambda <<a,t>,<a,t>> M PatientOf(a_c, iota_e(x, pa_P(a_b)))",
954                ["0", "1", "2"],
955            ),
956        ];
957
958        for (lexicon, s) in lexicons {
959            let lexicon = SemanticLexicon::parse(lexicon)?;
960            let mut n = 0;
961            for (_, _, r) in
962                lexicon
963                    .lexicon()
964                    .parse(&PhonContent::from(s), "0", &ParsingConfig::default())?
965            {
966                let (x, _h) = r.to_interpretation(&lexicon).next().unwrap();
967                println!("{x}");
968                n += 1;
969            }
970            assert!(n > 0);
971        }
972        Ok(())
973    }
974    #[test]
975    fn event_identification() -> anyhow::Result<()> {
976        let lexicon = SemanticLexicon::parse(
977            "voice::v= V::lambda a x lambda e y AgentOf(x, y)\nrun::v::lambda e x pe_runs(x)",
978        )?;
979        for (_, _, r) in lexicon.lexicon().parse(
980            &PhonContent::from(["voice", "run"]),
981            "V",
982            &ParsingConfig::default(),
983        )? {
984            let (x, _h) = r.to_interpretation(&lexicon).next().unwrap();
985
986            assert_eq!(
987                x.to_string(),
988                "lambda a x lambda e y AgentOf(x, y) & pe_runs(y)"
989            )
990        }
991        Ok(())
992    }
993}