minimalist_grammar_parser/lexicon/
semantics.rs

1use ahash::HashMap;
2use simple_semantics::language::LambdaParseError;
3use std::fmt::Debug;
4
5use crate::{ParsingConfig, PhonContent};
6
7use super::*;
8
9use itertools::Itertools;
10use simple_semantics::LanguageExpression;
11use simple_semantics::lambda::RootedLambdaPool;
12use simple_semantics::language::Expr;
13
14///A lexicon that is paired with semantic interpretations for its leaf nodes.
15///
16///Each leaf must have a semantic interpretation defined as a [`RootedLambdaPool<Expr>`].
17#[derive(Debug, Clone, Eq, PartialEq)]
18pub struct SemanticLexicon<'src, T: Eq, Category: Eq> {
19    lexicon: Lexicon<T, Category>,
20    semantic_entries: HashMap<LexemeId, RootedLambdaPool<'src, Expr<'src>>>,
21}
22
23impl<'src, T: Eq, C: Eq> SemanticLexicon<'src, T, C> {
24    ///Create a new [`SemanticLexicon`] by combining a [`Lexicon`] and a [`HashMap`] of leaf nodes
25    ///and semantic interpretations ([`RootedLambdaPool`])
26    pub fn new(
27        lexicon: Lexicon<T, C>,
28        semantic_entries: HashMap<LexemeId, RootedLambdaPool<'src, Expr<'src>>>,
29    ) -> Self {
30        SemanticLexicon {
31            lexicon,
32            semantic_entries,
33        }
34    }
35}
36
37#[allow(clippy::type_complexity)]
38fn semantic_grammar_parser<'src>() -> impl Parser<
39    'src,
40    &'src str,
41    (
42        Lexicon<&'src str, &'src str>,
43        Vec<(
44            LexemeId,
45            Result<RootedLambdaPool<'src, Expr<'src>>, LambdaParseError>,
46        )>,
47    ),
48    extra::Err<Rich<'src, char>>,
49> {
50    entry_parser()
51        .then_ignore(just("::").padded())
52        .then(
53            any()
54                .and_is(newline().not())
55                .repeated()
56                .to_slice()
57                .map(RootedLambdaPool::parse),
58        )
59        .separated_by(newline())
60        .collect::<Vec<_>>()
61        .map(|vec| {
62            let (lexical_entries, interpretations): (Vec<_>, Vec<_>) = vec.into_iter().unzip();
63
64            //  Assumes that the leaves iterator goes in order of lexical_entries
65            let lexicon = Lexicon::new(lexical_entries, false);
66            let semantic_entries = lexicon
67                .leaves
68                .iter()
69                .copied()
70                .zip(interpretations)
71                .collect();
72
73            (lexicon, semantic_entries)
74        })
75        .then_ignore(end())
76}
77
78impl<'src> SemanticLexicon<'src, &'src str, &'src str> {
79    ///Create a new semantic lexicon by parsing a string.
80    pub fn parse(s: &'src str) -> Result<Self, LambdaParseError> {
81        let (lexicon, semantic_entries) = semantic_grammar_parser().parse(s).into_result()?;
82
83        let semantic_lexicon = SemanticLexicon {
84            lexicon,
85            semantic_entries: semantic_entries
86                .into_iter()
87                .map(|(k, v)| v.map(|v| (k, v)))
88                .collect::<Result<_, _>>()?,
89        };
90        Ok(semantic_lexicon)
91    }
92}
93
94impl<'src, T: Eq + Clone + Debug, C: Eq + Clone + Debug> SemanticLexicon<'src, T, C> {
95    ///Get the interpretation of a leaf node. Panics if the node has no semantic interpretation.
96    pub fn interpretation(&self, lexeme_id: LexemeId) -> &RootedLambdaPool<'src, Expr<'src>> {
97        self.semantic_entries
98            .get(&lexeme_id)
99            .expect("There is no lemma of that node index!")
100    }
101
102    ///Get a reference to the underlying [`Lexicon`]
103    pub fn lexicon(&self) -> &Lexicon<T, C> {
104        &self.lexicon
105    }
106
107    ///Get a mutable reference to the underlying [`Lexicon`]
108    pub fn lexicon_mut(&mut self) -> &mut Lexicon<T, C> {
109        &mut self.lexicon
110    }
111
112    ///Get a reference to the underlying [`HashMap`] of lexical entries.
113    pub fn interpretations(&self) -> &HashMap<LexemeId, RootedLambdaPool<'src, Expr<'src>>> {
114        &self.semantic_entries
115    }
116
117    ///Get a mutable reference to the underlying [`HashMap`] of lexical entries.
118    pub fn interpretations_mut(
119        &mut self,
120    ) -> &mut HashMap<LexemeId, RootedLambdaPool<'src, Expr<'src>>> {
121        &mut self.semantic_entries
122    }
123
124    ///Get a mutable reference to both the underlying [`Lexicon`] and [`HashMap`] of lexical
125    ///entries.
126    pub fn lexicon_and_interpretations_mut(
127        &mut self,
128    ) -> (
129        &mut Lexicon<T, C>,
130        &mut HashMap<LexemeId, RootedLambdaPool<'src, Expr<'src>>>,
131    ) {
132        (&mut self.lexicon, &mut self.semantic_entries)
133    }
134
135    ///Remaps the lexicon to a new category or lemma type
136    pub fn remap_lexicon<T2: Eq, C2: Eq>(
137        self,
138        lemma_map: impl Fn(&T) -> T2,
139        category_map: impl Fn(&C) -> C2,
140    ) -> SemanticLexicon<'src, T2, C2> {
141        let SemanticLexicon {
142            lexicon,
143            semantic_entries,
144        } = self;
145
146        let lexicon = lexicon.remap_lexicon(lemma_map, category_map);
147
148        SemanticLexicon {
149            lexicon,
150            semantic_entries,
151        }
152    }
153
154    ///Parse a sentence and return all its parses and their interpretations as nested iterators.
155    #[allow(clippy::type_complexity)]
156    pub fn parse_and_interpret<'a, 'b: 'a>(
157        &'a self,
158        category: C,
159        sentence: &'b [PhonContent<T>],
160        config: &'b ParsingConfig,
161    ) -> Result<
162        impl Iterator<
163            Item = (
164                LogProb<f64>,
165                &'a [PhonContent<T>],
166                impl Iterator<Item = LanguageExpression<'src>>,
167            ),
168        >,
169        ParsingError<C>,
170    > {
171        Ok(self
172            .lexicon
173            .parse(sentence, category, config)?
174            .map(move |(p, s, r)| {
175                (
176                    p,
177                    s,
178                    r.to_interpretation(self)
179                        .filter_map(|(pool, _)| pool.into_pool().ok())
180                        .collect_vec()
181                        .into_iter(),
182                )
183            }))
184    }
185}
186
187impl<T: Eq, C: Eq> From<SemanticLexicon<'_, T, C>> for Lexicon<T, C> {
188    fn from(value: SemanticLexicon<'_, T, C>) -> Self {
189        value.lexicon
190    }
191}
192
193impl<T, C> Display for SemanticLexicon<'_, T, C>
194where
195    T: Eq + Display + std::fmt::Debug + Clone,
196    C: Eq + Display + std::fmt::Debug + Clone,
197{
198    fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result {
199        write!(
200            f,
201            "{}",
202            self.lexicon
203                .lexemes()
204                .unwrap()
205                .iter()
206                .zip(self.lexicon.leaves.iter())
207                .map(|(l, n)| format!("{l}::{}", self.semantic_entries[n]))
208                .join("\n")
209        )
210    }
211}
212#[cfg(test)]
213mod test {
214    use itertools::Itertools;
215    use logprob::LogProb;
216    use simple_semantics::lambda::RootedLambdaPool;
217
218    use super::SemanticLexicon;
219    use crate::{ParsingConfig, PhonContent};
220
221    #[test]
222    fn trivial_montague() -> anyhow::Result<()> {
223        let config: ParsingConfig = ParsingConfig::new(
224            LogProb::new(-256.0).unwrap(),
225            LogProb::from_raw_prob(0.5).unwrap(),
226            100,
227            1000,
228        );
229        let lexicon = "john::d::a_j\nmary::d::a_m\nlikes::d= =d v::lambda a x (lambda a y (some_e(e, all_e, AgentOf(y, e) & PatientOf(x, e) & pe_likes(e))))";
230
231        //Scenarios are not reliably assigning values!
232        let semantic = SemanticLexicon::parse(lexicon)?;
233        let (_, _, rules) = semantic
234            .lexicon
235            .parse(&PhonContent::from(["john", "likes", "mary"]), "v", &config)?
236            .next()
237            .unwrap();
238        let (interpretation, mut history) = rules.to_interpretation(&semantic).next().unwrap();
239        let interpretation = interpretation.into_pool()?;
240
241        #[cfg(feature = "pretty")]
242        {
243            let latex = semantic
244                .derivation(rules.clone(), history.clone())
245                .tree()
246                .latex();
247            println!("{latex}");
248            assert_eq!(
249                latex,
250                "\\begin{forest}[\\semder{v}{\\textsc{FA}} [\\lex{d}{john}{\\textsc{LexicalEntry}}] [\\semder{=d v}{\\textsc{FA}} [\\lex{d= =d v}{likes}{\\textsc{LexicalEntry}}] [\\lex{d}{mary}{\\textsc{LexicalEntry}}]]]\\end{forest}"
251            );
252
253            history = history.into_rich(&semantic, &rules);
254            let latex = semantic.derivation(rules.clone(), history).tree().latex();
255            println!("{latex}");
256            assert_eq!(
257                latex,
258                "\\begin{forest}[\\semder{v}{\\texttt{some\\_e(x, all\\_e, AgentOf(a\\_j, x) \\& PatientOf(a\\_m, x) \\& pe\\_likes(x))}} [\\lex{d}{john}{\\texttt{a\\_j}}] [\\semder{=d v}{\\texttt{{$\\lambda_{a}$}x some\\_e(y, all\\_e, AgentOf(x, y) \\& PatientOf(a\\_m, y) \\& pe\\_likes(y))}} [\\lex{d= =d v}{likes}{\\texttt{{$\\lambda_{a}$}x {$\\lambda_{a}$}y some\\_e(z, all\\_e, AgentOf(y, z) \\& PatientOf(x, z) \\& pe\\_likes(z))}}] [\\lex{d}{mary}{\\texttt{a\\_m}}]]]\\end{forest}"
259            );
260        }
261        assert_eq!(
262            "some_e(x, all_e, AgentOf(a_j, x) & PatientOf(a_m, x) & pe_likes(x))",
263            interpretation.to_string()
264        );
265        Ok(())
266    }
267
268    #[test]
269    fn moving_montague() -> anyhow::Result<()> {
270        let config: ParsingConfig = ParsingConfig::new(
271            LogProb::new(-256.0).unwrap(),
272            LogProb::from_raw_prob(0.5).unwrap(),
273            100,
274            1000,
275        );
276        let lexicon = "john::d::a_j\nmary::d::a_m\nlikes::d= =d v::lambda a x (lambda a y (some_e(e, all_e, AgentOf(x, e) & PatientOf(y, e) & pe_likes(e))))";
277        let lexicon = format!(
278            "{lexicon}\n::=v c::lambda t phi (phi)\n::v= +wh c::lambda t phi (phi)\nknows::c= =d v::lambda <a,t> P (lambda a x (P(x)))\nwho::d -wh::lambda <a,t> P (P)",
279        );
280
281        let s = lexicon.as_str();
282        let semantic = SemanticLexicon::parse(s)?;
283
284        let (_, _, rules) = semantic
285            .lexicon()
286            .parse(
287                &PhonContent::from(["john", "knows", "who", "likes", "mary"]),
288                "c",
289                &config,
290            )
291            .map_err(|x| x.inner_into::<String>())?
292            .next()
293            .unwrap();
294        dbg!(&rules);
295        let (interpretation, mut history) = rules.to_interpretation(&semantic).next().unwrap();
296        let interpretation = interpretation.into_pool()?;
297        assert_eq!(
298            interpretation.to_string(),
299            "some_e(x, all_e, AgentOf(a_m, x) & PatientOf(a_j, x) & pe_likes(x))"
300        );
301        #[cfg(feature = "pretty")]
302        {
303            let latex = semantic
304                .derivation(rules.clone(), history.clone())
305                .tree()
306                .latex();
307
308            println!("{latex}");
309            assert_eq!(
310                latex,
311                "\\begin{forest}[\\semder{c}{\\textsc{FA}} [\\semder{v}{\\textsc{FA}} [\\lex{d}{john}{\\textsc{LexicalEntry}}] [\\semder{=d v}{\\textsc{FA}} [\\lex{c= =d v}{knows}{\\textsc{LexicalEntry}}] [\\semder{c}{\\textsc{ApplyFromStorage}} [\\lex{d -wh}{who}{\\textsc{LexicalEntry}}] [\\semder{+wh c}{\\textsc{FA}} [\\lex{v= +wh c}{$\\epsilon$}{\\textsc{LexicalEntry}}] [\\semder{v}{\\textsc{Store}} [$t_0$] [\\semder{=d v}{\\textsc{FA}} [\\lex{d= =d v}{likes}{\\textsc{LexicalEntry}}] [\\lex{d}{mary}{\\textsc{LexicalEntry}}]]]]]]] [\\lex{=v c}{$\\epsilon$}{\\textsc{LexicalEntry}}]]\\end{forest}"
312            );
313
314            history = history.into_rich(&semantic, &rules);
315            let tree = semantic.derivation(rules, history).tree();
316            let latex = tree.latex();
317            println!("{latex}");
318            assert_eq!(
319                latex,
320                "\\begin{forest}[\\semder{c}{\\texttt{some\\_e(x, all\\_e, AgentOf(a\\_m, x) \\& PatientOf(a\\_j, x) \\& pe\\_likes(x))}} [\\semder{v}{\\texttt{some\\_e(x, all\\_e, AgentOf(a\\_m, x) \\& PatientOf(a\\_j, x) \\& pe\\_likes(x))}} [\\lex{d}{john}{\\texttt{a\\_j}}] [\\semder{=d v}{\\texttt{{$\\lambda_{a}$}x some\\_e(y, all\\_e, AgentOf(a\\_m, y) \\& PatientOf(x, y) \\& pe\\_likes(y))}} [\\lex{c= =d v}{knows}{\\texttt{{$\\lambda_{\\left\\langle a,t\\right\\rangle }$}P {$\\lambda_{a}$}x P(x)}}] [\\semder{c}{\\texttt{{$\\lambda_{a}$}x some\\_e(y, all\\_e, AgentOf(a\\_m, y) \\& PatientOf(x, y) \\& pe\\_likes(y))}} [\\lex{d -wh}{who}{\\texttt{{$\\lambda_{\\left\\langle a,t\\right\\rangle }$}P P}}] [\\semder{+wh c}{\\texttt{some\\_e(x, all\\_e, AgentOf(a\\_m, x) \\& PatientOf(0\\#a, x) \\& pe\\_likes(x))}} [\\lex{v= +wh c}{$\\epsilon$}{\\texttt{{$\\lambda_{t}$}phi phi}}] [\\semder{v}{\\texttt{some\\_e(x, all\\_e, AgentOf(a\\_m, x) \\& PatientOf(0\\#a, x) \\& pe\\_likes(x))}} [$t_0$] [\\semder{=d v}{\\texttt{{$\\lambda_{a}$}x some\\_e(y, all\\_e, AgentOf(a\\_m, y) \\& PatientOf(x, y) \\& pe\\_likes(y))}} [\\lex{d= =d v}{likes}{\\texttt{{$\\lambda_{a}$}x {$\\lambda_{a}$}y some\\_e(z, all\\_e, AgentOf(x, z) \\& PatientOf(y, z) \\& pe\\_likes(z))}}] [\\lex{d}{mary}{\\texttt{a\\_m}}]]]]]]] [\\lex{=v c}{$\\epsilon$}{\\texttt{{$\\lambda_{t}$}phi phi}}]]\\end{forest}"
321            );
322            let typst = serde_json::to_string(&tree)?;
323            println!("{typst}");
324            assert_eq!(
325                typst,
326                "{\"tree\":[{\"Node\":{\"features\":[\"c\"],\"movement\":[],\"semantics\":{\"rule\":\"FunctionalApplication\",\"state\":{\"expr\":\"some_e(x, all_e, AgentOf(a_m, x) & PatientOf(a_j, x) & pe_likes(x))\",\"tokens\":[{\"Quantifier\":{\"q\":\"some\",\"var\":{\"Bound\":\"x\"},\"t\":\"e\"}},\"OpenDelim\",{\"Const\":\"all_e\"},\"ArgSep\",{\"Func\":\"AgentOf\"},\"OpenDelim\",{\"Actor\":\"m\"},\"ArgSep\",{\"Var\":{\"Bound\":\"x\"}},\"CloseDelim\",{\"Func\":\"&\"},{\"Func\":\"PatientOf\"},\"OpenDelim\",{\"Actor\":\"j\"},\"ArgSep\",{\"Var\":{\"Bound\":\"x\"}},\"CloseDelim\",{\"Func\":\"&\"},{\"Func\":\"likes\"},\"OpenDelim\",{\"Var\":{\"Bound\":\"x\"}},\"CloseDelim\",\"CloseDelim\"],\"movers\":{}}}}},[{\"Node\":{\"features\":[\"v\"],\"movement\":[],\"semantics\":{\"rule\":\"FunctionalApplication\",\"state\":{\"expr\":\"some_e(x, all_e, AgentOf(a_m, x) & PatientOf(a_j, x) & pe_likes(x))\",\"tokens\":[{\"Quantifier\":{\"q\":\"some\",\"var\":{\"Bound\":\"x\"},\"t\":\"e\"}},\"OpenDelim\",{\"Const\":\"all_e\"},\"ArgSep\",{\"Func\":\"AgentOf\"},\"OpenDelim\",{\"Actor\":\"m\"},\"ArgSep\",{\"Var\":{\"Bound\":\"x\"}},\"CloseDelim\",{\"Func\":\"&\"},{\"Func\":\"PatientOf\"},\"OpenDelim\",{\"Actor\":\"j\"},\"ArgSep\",{\"Var\":{\"Bound\":\"x\"}},\"CloseDelim\",{\"Func\":\"&\"},{\"Func\":\"likes\"},\"OpenDelim\",{\"Var\":{\"Bound\":\"x\"}},\"CloseDelim\",\"CloseDelim\"],\"movers\":{}}}}},{\"Leaf\":{\"features\":[\"d\"],\"lemma\":{\"Single\":\"john\"},\"semantics\":{\"rule\":{\"Scan\":[2]},\"state\":{\"expr\":\"a_j\",\"tokens\":[{\"Actor\":\"j\"}],\"movers\":{}}}}},[{\"Node\":{\"features\":[\"=d\",\"v\"],\"movement\":[],\"semantics\":{\"rule\":\"FunctionalApplication\",\"state\":{\"expr\":\"lambda a x some_e(y, all_e, AgentOf(a_m, y) & PatientOf(x, y) & pe_likes(y))\",\"tokens\":[{\"Lambda\":{\"t\":\"a\",\"var\":{\"Bound\":\"x\"}}},{\"Quantifier\":{\"q\":\"some\",\"var\":{\"Bound\":\"y\"},\"t\":\"e\"}},\"OpenDelim\",{\"Const\":\"all_e\"},\"ArgSep\",{\"Func\":\"AgentOf\"},\"OpenDelim\",{\"Actor\":\"m\"},\"ArgSep\",{\"Var\":{\"Bound\":\"y\"}},\"CloseDelim\",{\"Func\":\"&\"},{\"Func\":\"PatientOf\"},\"OpenDelim\",{\"Var\":{\"Bound\":\"x\"}},\"ArgSep\",{\"Var\":{\"Bound\":\"y\"}},\"CloseDelim\",{\"Func\":\"&\"},{\"Func\":\"likes\"},\"OpenDelim\",{\"Var\":{\"Bound\":\"y\"}},\"CloseDelim\",\"CloseDelim\"],\"movers\":{}}}}},{\"Leaf\":{\"features\":[\"c=\",\"=d\",\"v\"],\"lemma\":{\"Single\":\"knows\"},\"semantics\":{\"rule\":{\"Scan\":[15]},\"state\":{\"expr\":\"lambda <a,t> P lambda a x P(x)\",\"tokens\":[{\"Lambda\":{\"t\":\"<a,t>\",\"var\":{\"Bound\":\"P\"}}},{\"Lambda\":{\"t\":\"a\",\"var\":{\"Bound\":\"x\"}}},{\"Var\":{\"Bound\":\"P\"}},\"OpenDelim\",{\"Var\":{\"Bound\":\"x\"}},\"CloseDelim\"],\"movers\":{}}}}},[{\"Node\":{\"features\":[\"c\"],\"movement\":[],\"semantics\":{\"rule\":\"ApplyFromStorage\",\"state\":{\"expr\":\"lambda a x some_e(y, all_e, AgentOf(a_m, y) & PatientOf(x, y) & pe_likes(y))\",\"tokens\":[{\"Lambda\":{\"t\":\"a\",\"var\":{\"Bound\":\"x\"}}},{\"Quantifier\":{\"q\":\"some\",\"var\":{\"Bound\":\"y\"},\"t\":\"e\"}},\"OpenDelim\",{\"Const\":\"all_e\"},\"ArgSep\",{\"Func\":\"AgentOf\"},\"OpenDelim\",{\"Actor\":\"m\"},\"ArgSep\",{\"Var\":{\"Bound\":\"y\"}},\"CloseDelim\",{\"Func\":\"&\"},{\"Func\":\"PatientOf\"},\"OpenDelim\",{\"Var\":{\"Bound\":\"x\"}},\"ArgSep\",{\"Var\":{\"Bound\":\"y\"}},\"CloseDelim\",{\"Func\":\"&\"},{\"Func\":\"likes\"},\"OpenDelim\",{\"Var\":{\"Bound\":\"y\"}},\"CloseDelim\",\"CloseDelim\"],\"movers\":{}}}}},{\"Leaf\":{\"features\":[\"d\",\"-wh\"],\"lemma\":{\"Single\":\"who\"},\"semantics\":{\"rule\":{\"Scan\":[18]},\"state\":{\"expr\":\"lambda <a,t> P P\",\"tokens\":[{\"Lambda\":{\"t\":\"<a,t>\",\"var\":{\"Bound\":\"P\"}}},{\"Var\":{\"Bound\":\"P\"}}],\"movers\":{}}}}},[{\"Node\":{\"features\":[\"+wh\",\"c\"],\"movement\":[[\"-wh\"]],\"semantics\":{\"rule\":\"FunctionalApplication\",\"state\":{\"expr\":\"some_e(x, all_e, AgentOf(a_m, x) & PatientOf(0#a, x) & pe_likes(x))\",\"tokens\":[{\"Quantifier\":{\"q\":\"some\",\"var\":{\"Bound\":\"x\"},\"t\":\"e\"}},\"OpenDelim\",{\"Const\":\"all_e\"},\"ArgSep\",{\"Func\":\"AgentOf\"},\"OpenDelim\",{\"Actor\":\"m\"},\"ArgSep\",{\"Var\":{\"Bound\":\"x\"}},\"CloseDelim\",{\"Func\":\"&\"},{\"Func\":\"PatientOf\"},\"OpenDelim\",{\"Var\":{\"Free\":{\"label\":\"0\",\"t\":\"a\",\"anon\":true}}},\"ArgSep\",{\"Var\":{\"Bound\":\"x\"}},\"CloseDelim\",{\"Func\":\"&\"},{\"Func\":\"likes\"},\"OpenDelim\",{\"Var\":{\"Bound\":\"x\"}},\"CloseDelim\",\"CloseDelim\"],\"movers\":{\"0\":{\"expr\":\"lambda <a,t> P P\",\"tokens\":[{\"Lambda\":{\"t\":\"<a,t>\",\"var\":{\"Bound\":\"P\"}}},{\"Var\":{\"Bound\":\"P\"}}],\"type\":\"a\"}}}}}},{\"Leaf\":{\"features\":[\"v=\",\"+wh\",\"c\"],\"lemma\":{\"Single\":null},\"semantics\":{\"rule\":{\"Scan\":[13]},\"state\":{\"expr\":\"lambda t phi phi\",\"tokens\":[{\"Lambda\":{\"t\":\"t\",\"var\":{\"Bound\":\"phi\"}}},{\"Var\":{\"Bound\":\"phi\"}}],\"movers\":{}}}}},[{\"Node\":{\"features\":[\"v\"],\"movement\":[[\"-wh\"]],\"semantics\":{\"rule\":\"Store\",\"state\":{\"expr\":\"some_e(x, all_e, AgentOf(a_m, x) & PatientOf(0#a, x) & pe_likes(x))\",\"tokens\":[{\"Quantifier\":{\"q\":\"some\",\"var\":{\"Bound\":\"x\"},\"t\":\"e\"}},\"OpenDelim\",{\"Const\":\"all_e\"},\"ArgSep\",{\"Func\":\"AgentOf\"},\"OpenDelim\",{\"Actor\":\"m\"},\"ArgSep\",{\"Var\":{\"Bound\":\"x\"}},\"CloseDelim\",{\"Func\":\"&\"},{\"Func\":\"PatientOf\"},\"OpenDelim\",{\"Var\":{\"Free\":{\"label\":\"0\",\"t\":\"a\",\"anon\":true}}},\"ArgSep\",{\"Var\":{\"Bound\":\"x\"}},\"CloseDelim\",{\"Func\":\"&\"},{\"Func\":\"likes\"},\"OpenDelim\",{\"Var\":{\"Bound\":\"x\"}},\"CloseDelim\",\"CloseDelim\"],\"movers\":{\"0\":{\"expr\":\"lambda <a,t> P P\",\"tokens\":[{\"Lambda\":{\"t\":\"<a,t>\",\"var\":{\"Bound\":\"P\"}}},{\"Var\":{\"Bound\":\"P\"}}],\"type\":\"a\"}}}}}},{\"Trace\":{\"trace\":0,\"semantics\":{\"rule\":\"Trace\",\"state\":null}}},[{\"Node\":{\"features\":[\"=d\",\"v\"],\"movement\":[],\"semantics\":{\"rule\":\"FunctionalApplication\",\"state\":{\"expr\":\"lambda a x some_e(y, all_e, AgentOf(a_m, y) & PatientOf(x, y) & pe_likes(y))\",\"tokens\":[{\"Lambda\":{\"t\":\"a\",\"var\":{\"Bound\":\"x\"}}},{\"Quantifier\":{\"q\":\"some\",\"var\":{\"Bound\":\"y\"},\"t\":\"e\"}},\"OpenDelim\",{\"Const\":\"all_e\"},\"ArgSep\",{\"Func\":\"AgentOf\"},\"OpenDelim\",{\"Actor\":\"m\"},\"ArgSep\",{\"Var\":{\"Bound\":\"y\"}},\"CloseDelim\",{\"Func\":\"&\"},{\"Func\":\"PatientOf\"},\"OpenDelim\",{\"Var\":{\"Bound\":\"x\"}},\"ArgSep\",{\"Var\":{\"Bound\":\"y\"}},\"CloseDelim\",{\"Func\":\"&\"},{\"Func\":\"likes\"},\"OpenDelim\",{\"Var\":{\"Bound\":\"y\"}},\"CloseDelim\",\"CloseDelim\"],\"movers\":{}}}}},{\"Leaf\":{\"features\":[\"d=\",\"=d\",\"v\"],\"lemma\":{\"Single\":\"likes\"},\"semantics\":{\"rule\":{\"Scan\":[7]},\"state\":{\"expr\":\"lambda a x lambda a y some_e(z, all_e, AgentOf(x, z) & PatientOf(y, z) & pe_likes(z))\",\"tokens\":[{\"Lambda\":{\"t\":\"a\",\"var\":{\"Bound\":\"x\"}}},{\"Lambda\":{\"t\":\"a\",\"var\":{\"Bound\":\"y\"}}},{\"Quantifier\":{\"q\":\"some\",\"var\":{\"Bound\":\"z\"},\"t\":\"e\"}},\"OpenDelim\",{\"Const\":\"all_e\"},\"ArgSep\",{\"Func\":\"AgentOf\"},\"OpenDelim\",{\"Var\":{\"Bound\":\"x\"}},\"ArgSep\",{\"Var\":{\"Bound\":\"z\"}},\"CloseDelim\",{\"Func\":\"&\"},{\"Func\":\"PatientOf\"},\"OpenDelim\",{\"Var\":{\"Bound\":\"y\"}},\"ArgSep\",{\"Var\":{\"Bound\":\"z\"}},\"CloseDelim\",{\"Func\":\"&\"},{\"Func\":\"likes\"},\"OpenDelim\",{\"Var\":{\"Bound\":\"z\"}},\"CloseDelim\",\"CloseDelim\"],\"movers\":{}}}}},{\"Leaf\":{\"features\":[\"d\"],\"lemma\":{\"Single\":\"mary\"},\"semantics\":{\"rule\":{\"Scan\":[3]},\"state\":{\"expr\":\"a_m\",\"tokens\":[{\"Actor\":\"m\"}],\"movers\":{}}}}}]]]]]],{\"Leaf\":{\"features\":[\"=v\",\"c\"],\"lemma\":{\"Single\":null},\"semantics\":{\"rule\":{\"Scan\":[10]},\"state\":{\"expr\":\"lambda t phi phi\",\"tokens\":[{\"Lambda\":{\"t\":\"t\",\"var\":{\"Bound\":\"phi\"}}},{\"Var\":{\"Bound\":\"phi\"}}],\"movers\":{}}}}}],\"head_movement\":[],\"phrasal_movement\":[[\"011110\",\"0110\"]]}"
327            );
328        }
329        Ok(())
330    }
331
332    #[test]
333    fn qr_test() -> anyhow::Result<()> {
334        let config: ParsingConfig = ParsingConfig::new(
335            LogProb::new(-256.0).unwrap(),
336            LogProb::from_raw_prob(0.5).unwrap(),
337            100,
338            1000,
339        );
340        let lexical = [
341            "everyone::d -k -q::lambda <a,t> P (every(x, all_a, P(x)))",
342            "someone::d -k -q::lambda <a,t> P (some(x, all_a, P(x)))",
343            "likes::d= V::lambda a x (lambda a y (some_e(e, all_e, AgentOf(y, e)&pe_likes(e)&PatientOf(x, e))))",
344            "::v= +k +q t::lambda t x (x)",
345            "::V= +k d= +q v::lambda <a,t> p (p)",
346        ];
347
348        let lexicon = lexical.join("\n");
349        let lex = SemanticLexicon::parse(&lexicon).unwrap();
350
351        let mut v = vec![];
352        for (_, s, rules) in lex
353            .lexicon
354            .generate("t", &config)
355            .map_err(|e| e.inner_into::<String>())?
356            .take(10)
357        {
358            let mut s = PhonContent::try_flatten(s)?.join(" ");
359            for interpretation in rules
360                .to_interpretation(&lex)
361                .map(|(pool, _)| pool.into_pool().unwrap().to_string())
362                .unique()
363            {
364                s.push('\n');
365                s.push_str(&interpretation);
366            }
367            println!("{s}");
368            v.push(s);
369        }
370        assert_eq!(
371            vec![
372                "someone someone likes\nsome(x, all_a, some(y, all_a, some_e(z, all_e, AgentOf(x, z) & pe_likes(z) & PatientOf(y, z))))\nsome(x, all_a, some(y, all_a, some_e(z, all_e, AgentOf(y, z) & pe_likes(z) & PatientOf(x, z))))",
373                "someone everyone likes\nsome(x, all_a, every(y, all_a, some_e(z, all_e, AgentOf(x, z) & pe_likes(z) & PatientOf(y, z))))\nevery(x, all_a, some(y, all_a, some_e(z, all_e, AgentOf(y, z) & pe_likes(z) & PatientOf(x, z))))",
374                "everyone everyone likes\nevery(x, all_a, every(y, all_a, some_e(z, all_e, AgentOf(x, z) & pe_likes(z) & PatientOf(y, z))))\nevery(x, all_a, every(y, all_a, some_e(z, all_e, AgentOf(y, z) & pe_likes(z) & PatientOf(x, z))))",
375                "everyone someone likes\nevery(x, all_a, some(y, all_a, some_e(z, all_e, AgentOf(x, z) & pe_likes(z) & PatientOf(y, z))))\nsome(x, all_a, every(y, all_a, some_e(z, all_e, AgentOf(y, z) & pe_likes(z) & PatientOf(x, z))))"
376            ],
377            v
378        );
379        println!("sov good");
380
381        let lexical = [
382            "everyone::d -k -q::lambda <a,t> P (every(x, all_a, P(x)))",
383            "someone::d -k -q::lambda <a,t> P (some(x, all_a, P(x)))",
384            "likes::d= V::lambda a x (lambda a y (some_e(e, all_e, AgentOf(y, e)&pe_likes(e)&PatientOf(x, e))))",
385            "::v<= +k +q t::lambda t x (x)",
386            "::V<= +k d= +q v::lambda <a,t> p (p)",
387        ];
388
389        let lexicon = lexical.join("\n");
390        let lex = SemanticLexicon::parse(&lexicon)?;
391
392        let mut v = vec![];
393        for (_, s, rules) in lex
394            .lexicon
395            .generate("t", &config)
396            .map_err(|e| e.inner_into::<String>())?
397            .take(10)
398        {
399            let mut s = PhonContent::flatten(s).join(" ");
400            println!("{s:?}");
401            for interpretation in rules
402                .to_interpretation(&lex)
403                .map(|(pool, _)| {
404                    println!("{pool}");
405                    pool.into_pool().unwrap().to_string()
406                })
407                .unique()
408            {
409                s.push('\n');
410                s.push_str(&interpretation);
411            }
412            println!("{s}");
413            v.push(s);
414        }
415        for (a, b) in vec![
416                "someone likes someone\nsome(x, all_a, some(y, all_a, some_e(z, all_e, AgentOf(x, z) & pe_likes(z) & PatientOf(y, z))))\nsome(x, all_a, some(y, all_a, some_e(z, all_e, AgentOf(y, z) & pe_likes(z) & PatientOf(x, z))))",
417                "someone likes everyone\nsome(x, all_a, every(y, all_a, some_e(z, all_e, AgentOf(x, z) & pe_likes(z) & PatientOf(y, z))))\nevery(x, all_a, some(y, all_a, some_e(z, all_e, AgentOf(y, z) & pe_likes(z) & PatientOf(x, z))))",
418                "everyone likes everyone\nevery(x, all_a, every(y, all_a, some_e(z, all_e, AgentOf(x, z) & pe_likes(z) & PatientOf(y, z))))\nevery(x, all_a, every(y, all_a, some_e(z, all_e, AgentOf(y, z) & pe_likes(z) & PatientOf(x, z))))",
419                "everyone likes someone\nevery(x, all_a, some(y, all_a, some_e(z, all_e, AgentOf(x, z) & pe_likes(z) & PatientOf(y, z))))\nsome(x, all_a, every(y, all_a, some_e(z, all_e, AgentOf(y, z) & pe_likes(z) & PatientOf(x, z))))",
420            ].into_iter().zip(v) {
421            assert_eq!(a,b)
422        }
423
424        #[cfg(feature = "pretty")]
425        {
426            let (_, _, rules) = lex
427                .lexicon
428                .parse(
429                    &PhonContent::from(["everyone", "likes", "someone"]),
430                    "t",
431                    &config,
432                )
433                .map_err(|e| e.inner_into::<String>())?
434                .next()
435                .unwrap();
436
437            let (_, mut history) = rules.to_interpretation(&lex).next().unwrap();
438            let latex = lex
439                .derivation(rules.clone(), history.clone())
440                .tree()
441                .latex();
442
443            println!("{latex}");
444            assert_eq!(
445                latex,
446                "\\begin{forest}[\\semder{t}{\\textsc{ApplyFromStorage}} [\\lex{d -k -q}{everyone}{\\textsc{LexicalEntry}}] [\\semder{+q t}{\\textsc{UpdateTrace}} [$t_0$] [\\semder{+k +q t}{\\textsc{FA}} [\\lex{v<= +k +q t}{$\\epsilon$-$\\epsilon$-likes}{\\textsc{LexicalEntry}}] [\\semder{v}{\\textsc{ApplyFromStorage}} [\\lex{d -k -q}{someone}{\\textsc{LexicalEntry}}] [\\semder{+q v}{\\textsc{Store}} [\\semder{d= +q v}{\\textsc{UpdateTrace}} [$t_2$] [\\semder{+k d= +q v}{\\textsc{FA}} [\\lex{V<= +k d= +q v}{$\\epsilon$-likes}{\\textsc{LexicalEntry}}] [\\semder{V}{\\textsc{Store}} [\\lex{d= V}{likes}{\\textsc{LexicalEntry}}] [$t_3$]]]] [$t_1$]]]]]]\\end{forest}"
447            );
448
449            history = history.into_rich(&lex, &rules);
450            let latex = lex.derivation(rules, history).tree().latex();
451            println!("{latex}");
452            assert_eq!(
453                latex,
454                "\\begin{forest}[\\semder{t}{\\texttt{every(x, all\\_a, some(y, all\\_a, some\\_e(z, all\\_e, AgentOf(x, z) \\& pe\\_likes(z) \\& PatientOf(y, z))))}} [\\lex{d -k -q}{everyone}{\\texttt{{$\\lambda_{\\left\\langle a,t\\right\\rangle }$}P every(x, all\\_a, P(x))}}] [\\semder{+q t}{\\texttt{some(x, all\\_a, some\\_e(y, all\\_e, AgentOf(0\\#a, y) \\& pe\\_likes(y) \\& PatientOf(x, y)))}} [$t_0$] [\\semder{+k +q t}{\\texttt{some(x, all\\_a, some\\_e(y, all\\_e, AgentOf(1\\#a, y) \\& pe\\_likes(y) \\& PatientOf(x, y)))}} [\\lex{v<= +k +q t}{$\\epsilon$-$\\epsilon$-likes}{\\texttt{{$\\lambda_{t}$}phi phi}}] [\\semder{v}{\\texttt{some(x, all\\_a, some\\_e(y, all\\_e, AgentOf(1\\#a, y) \\& pe\\_likes(y) \\& PatientOf(x, y)))}} [\\lex{d -k -q}{someone}{\\texttt{{$\\lambda_{\\left\\langle a,t\\right\\rangle }$}P some(x, all\\_a, P(x))}}] [\\semder{+q v}{\\texttt{some\\_e(x, all\\_e, AgentOf(1\\#a, x) \\& pe\\_likes(x) \\& PatientOf(2\\#a, x))}} [\\semder{d= +q v}{\\texttt{{$\\lambda_{a}$}x some\\_e(y, all\\_e, AgentOf(x, y) \\& pe\\_likes(y) \\& PatientOf(2\\#a, y))}} [$t_2$] [\\semder{+k d= +q v}{\\texttt{{$\\lambda_{a}$}x some\\_e(y, all\\_e, AgentOf(x, y) \\& pe\\_likes(y) \\& PatientOf(3\\#a, y))}} [\\lex{V<= +k d= +q v}{$\\epsilon$-likes}{\\texttt{{$\\lambda_{\\left\\langle a,t\\right\\rangle }$}P P}}] [\\semder{V}{\\texttt{{$\\lambda_{a}$}x some\\_e(y, all\\_e, AgentOf(x, y) \\& pe\\_likes(y) \\& PatientOf(3\\#a, y))}} [\\lex{d= V}{likes}{\\texttt{{$\\lambda_{a}$}x {$\\lambda_{a}$}y some\\_e(z, all\\_e, AgentOf(y, z) \\& pe\\_likes(z) \\& PatientOf(x, z))}}] [$t_3$]]]] [$t_1$]]]]]]\\end{forest}"
455            );
456        }
457        Ok(())
458    }
459
460    #[test]
461    fn obscure_error_with_rich() -> anyhow::Result<()> {
462        let grammar = "ε::0= =2 +1 0::lambda a x (pa_0(x))
463ran::2::lambda t x (a_1)
464John::0 -1::a_1";
465
466        let lexicon = SemanticLexicon::parse(grammar)?;
467        for (_, _, r) in lexicon.lexicon.parse(
468            &PhonContent::from(["John", "ran"]),
469            "0",
470            &ParsingConfig::default(),
471        )? {
472            for (pool, h) in r.to_interpretation(&lexicon) {
473                pool.into_pool()?;
474                h.into_rich(&lexicon, &r);
475            }
476        }
477        Ok(())
478    }
479
480    #[test]
481    fn homophony() -> anyhow::Result<()> {
482        let grammar = [
483            "everyone::d -k -q::lambda <a,<e,t>> P lambda <<e,t>, t> Q every(x, all_a, Q(P(x)))",
484            "everyone::d -k -q::lambda <a,t> P every(x, all_a, P(x))",
485        ]
486        .join("\n");
487
488        let lexicon = SemanticLexicon::parse(grammar.as_str())?;
489        assert_eq!(lexicon.interpretations().len(), 2);
490        Ok(())
491    }
492
493    #[test]
494    fn merge_non_lambdas() -> anyhow::Result<()> {
495        let grammar = "a::=1 0::pa_man\nb::1::a_john";
496
497        let lexicon = SemanticLexicon::parse(grammar)?;
498        for (_, _, r) in lexicon.lexicon.parse(
499            &PhonContent::from(["b", "a"]),
500            "0",
501            &ParsingConfig::default(),
502        )? {
503            println!("{r:?}");
504            let (pool, _) = r.to_interpretation(&lexicon).next().unwrap();
505            assert_eq!(pool.to_string(), "pa_man(a_john)");
506        }
507        Ok(())
508    }
509
510    #[test]
511    fn complicated_intransitives() -> anyhow::Result<()> {
512        /*
513        let grammar = "runs::=ag V::lambda e x pe_runs(x)\n::d= ag -ag::lambda a x lambda e y AgentOf(x,y)\nJohn::d::a_John\n::V= v::lambda <e,t> P P\n::v= +ag t::lambda <e,t> P lambda <e,t> Q some_e(e, P(e), Q(e))";
514
515        let lexicon = SemanticLexicon::parse(grammar)?;
516        let mut i = 0;
517        for (_, _, r) in lexicon.lexicon.parse(
518            &PhonContent::from(["John", "runs"]),
519            "t",
520            &ParsingConfig::default(),
521        )? {
522            i += 1;
523            let (pool, h) = r.to_interpretation(&lexicon).next().unwrap();
524            h.into_rich(&lexicon, &r);
525            assert_eq!(
526                pool.to_string(),
527                "some_e(x, pe_runs(x), AgentOf(a_John, x))"
528            );
529        }
530        assert_eq!(i, 1);*/
531
532        let x = RootedLambdaPool::parse(
533            "lambda <<e,t>, <<e,t>, t>> G G(lambda e y pe_loves(y), lambda e y PatientOf(a_John, y))",
534        )?;
535        let y = RootedLambdaPool::parse(
536            "lambda <<<e,t>, <<e,t>, t>>, t> Z lambda <<e,t>, <<e,t>, t>> G Z(lambda <e,t> P lambda <e,t> Q G(P , lambda e x Q(x) & AgentOf(a_John,x)))",
537        )?;
538
539        let mut z = x.merge(y).unwrap();
540        z.reduce()?;
541        println!("{z}");
542
543        let grammar = [
544            "loves::=d V::lambda a x lambda e y pe_loves(y) & PatientOf(x, y)",
545            "someone::d -k -q::lambda <a,<e,t>> P lambda <<e,t>, t> Q some(x, all_a, Q(P(x)))",
546            "everyone::d -k -q::lambda <a,<e,t>> P lambda <<e,t>, t> Q every(x, all_a, Q(P(x)))",
547            "someone::d -k -q::lambda <a,t> P some(x, all_a, P(x))",
548            "everyone::d -k -q::lambda <a,t> P every(x, all_a, P(x))",
549            "Mary::d -k -q::a_Mary",
550            "John::d -k -q::a_John",
551            "::V<= +k =d +q v::lambda <e,t> P lambda a x lambda e y P(y) & AgentOf(x, y)",
552            "::v<= +k +q t::lambda <e,t> P some_e(e, True, P(e))",
553        ]
554        .join("\n");
555
556        let lexicon = SemanticLexicon::parse(grammar.as_str())?;
557        dbg!(lexicon.interpretations().len());
558        let s = PhonContent::from(["someone", "loves", "everyone"]);
559        for (_, _, r) in lexicon
560            .lexicon
561            .parse(&s, "t", &ParsingConfig::default())
562            .unwrap()
563        {
564            for (pool, _) in r.to_interpretation(&lexicon) {
565                println!("{pool}");
566            }
567        }
568        Ok(())
569    }
570
571    #[test]
572    fn iota_test() -> anyhow::Result<()> {
573        let grammar = [
574            "John::d -k::a_j",
575            "Mary::d -k::a_m",
576            "the::n= d -k::lambda <a,t> P iota(x, P(x))",
577            "vase::n::lambda a x pa_vase(x)",
578            "see::d= V::lambda e x pe_see(x)",
579            "::=>V +k agrO::lambda a x lambda e y PatientOf(x, y)",
580            "::=>agrO v::lambda <e,t> phi phi",
581            "::=>v =d voice::lambda a x lambda e y AgentOf(x, y)",
582            "s::=>voice +k t::lambda <e,t> P some_e(x, all_e, P(x) & Habitual#<e,t>(x))",
583        ]
584        .join("\n");
585
586        let lexicon = SemanticLexicon::parse(&grammar)?;
587        let s = [
588            PhonContent::Normal("John"),
589            PhonContent::Affixed(vec!["see", "s"]),
590            PhonContent::Normal("the"),
591            PhonContent::Normal("vase"),
592        ];
593        for (_, _, r) in lexicon
594            .lexicon
595            .parse(&s, "t", &ParsingConfig::default())
596            .unwrap()
597        {
598            for (pool, _) in r.to_interpretation(&lexicon) {
599                assert_eq!(
600                    pool.to_string(),
601                    "some_e(x, all_e, AgentOf(a_j, x) & PatientOf(iota(y, pa_vase(y)), x) & pe_see(x) & Habitual#<e,t>(x))"
602                )
603            }
604        }
605
606        Ok(())
607    }
608}