1use ahash::HashMap;
2use simple_semantics::language::LambdaParseError;
3use std::fmt::Debug;
4
5use crate::{ParsingConfig, PhonContent};
6
7use super::*;
8
9use itertools::Itertools;
10use simple_semantics::LanguageExpression;
11use simple_semantics::lambda::RootedLambdaPool;
12use simple_semantics::language::Expr;
13
14#[derive(Debug, Clone, Eq, PartialEq)]
18pub struct SemanticLexicon<'src, T: Eq, Category: Eq> {
19 lexicon: Lexicon<T, Category>,
20 semantic_entries: HashMap<LexemeId, RootedLambdaPool<'src, Expr<'src>>>,
21}
22
23impl<'src, T: Eq, C: Eq> SemanticLexicon<'src, T, C> {
24 #[must_use]
27 pub fn new(
28 lexicon: Lexicon<T, C>,
29 semantic_entries: HashMap<LexemeId, RootedLambdaPool<'src, Expr<'src>>>,
30 ) -> Self {
31 SemanticLexicon {
32 lexicon,
33 semantic_entries,
34 }
35 }
36}
37
38#[allow(clippy::type_complexity)]
39fn semantic_grammar_parser<'src>() -> impl Parser<
40 'src,
41 &'src str,
42 (
43 Lexicon<&'src str, &'src str>,
44 Vec<(
45 LexemeId,
46 Result<RootedLambdaPool<'src, Expr<'src>>, LambdaParseError>,
47 )>,
48 ),
49 extra::Err<Rich<'src, char>>,
50> {
51 entry_parser()
52 .then_ignore(just("::").padded())
53 .then(
54 any()
55 .and_is(newline().not())
56 .repeated()
57 .to_slice()
58 .map(RootedLambdaPool::parse),
59 )
60 .separated_by(newline())
61 .collect::<Vec<_>>()
62 .map(|vec| {
63 let (lexical_entries, interpretations): (Vec<_>, Vec<_>) = vec.into_iter().unzip();
64
65 let lexicon = Lexicon::new(lexical_entries, false);
67 let semantic_entries = lexicon
68 .leaves
69 .iter()
70 .copied()
71 .zip(interpretations)
72 .collect();
73
74 (lexicon, semantic_entries)
75 })
76 .then_ignore(end())
77}
78
79impl<'src> SemanticLexicon<'src, &'src str, &'src str> {
80 pub fn parse(s: &'src str) -> Result<Self, LambdaParseError> {
82 let (lexicon, semantic_entries) = semantic_grammar_parser().parse(s).into_result()?;
83
84 let semantic_lexicon = SemanticLexicon {
85 lexicon,
86 semantic_entries: semantic_entries
87 .into_iter()
88 .map(|(k, v)| v.map(|v| (k, v)))
89 .collect::<Result<_, _>>()?,
90 };
91 Ok(semantic_lexicon)
92 }
93}
94
95impl<'src, T: Eq + Clone + Debug, C: Eq + Clone + Debug> SemanticLexicon<'src, T, C> {
96 #[must_use]
98 pub fn interpretation(&self, lexeme_id: LexemeId) -> &RootedLambdaPool<'src, Expr<'src>> {
99 self.semantic_entries
100 .get(&lexeme_id)
101 .expect("There is no lemma of that node index!")
102 }
103
104 #[must_use]
106 pub fn lexicon(&self) -> &Lexicon<T, C> {
107 &self.lexicon
108 }
109
110 pub fn lexicon_mut(&mut self) -> &mut Lexicon<T, C> {
112 &mut self.lexicon
113 }
114
115 #[must_use]
117 pub fn interpretations(&self) -> &HashMap<LexemeId, RootedLambdaPool<'src, Expr<'src>>> {
118 &self.semantic_entries
119 }
120
121 pub fn interpretations_mut(
123 &mut self,
124 ) -> &mut HashMap<LexemeId, RootedLambdaPool<'src, Expr<'src>>> {
125 &mut self.semantic_entries
126 }
127
128 pub fn lexicon_and_interpretations_mut(
131 &mut self,
132 ) -> (
133 &mut Lexicon<T, C>,
134 &mut HashMap<LexemeId, RootedLambdaPool<'src, Expr<'src>>>,
135 ) {
136 (&mut self.lexicon, &mut self.semantic_entries)
137 }
138
139 pub fn remap_lexicon<T2: Eq, C2: Eq>(
141 self,
142 lemma_map: impl Fn(&T) -> T2,
143 category_map: impl Fn(&C) -> C2,
144 ) -> SemanticLexicon<'src, T2, C2> {
145 let SemanticLexicon {
146 lexicon,
147 semantic_entries,
148 } = self;
149
150 let lexicon = lexicon.remap_lexicon(lemma_map, category_map);
151
152 SemanticLexicon {
153 lexicon,
154 semantic_entries,
155 }
156 }
157
158 #[allow(clippy::type_complexity)]
160 pub fn parse_and_interpret<'a, 'b: 'a>(
161 &'a self,
162 category: C,
163 sentence: &'b [PhonContent<T>],
164 config: &'b ParsingConfig,
165 ) -> Result<
166 impl Iterator<
167 Item = (
168 LogProb<f64>,
169 &'a [PhonContent<T>],
170 impl Iterator<Item = LanguageExpression<'src>>,
171 ),
172 >,
173 ParsingError<C>,
174 > {
175 Ok(self
176 .lexicon
177 .parse(sentence, category, config)?
178 .map(move |(p, s, r)| {
179 (
180 p,
181 s,
182 r.to_interpretation(self)
183 .filter_map(|(pool, _)| pool.into_pool().ok())
184 .collect_vec()
185 .into_iter(),
186 )
187 }))
188 }
189}
190
191impl<T: Eq, C: Eq> From<SemanticLexicon<'_, T, C>> for Lexicon<T, C> {
192 fn from(value: SemanticLexicon<'_, T, C>) -> Self {
193 value.lexicon
194 }
195}
196
197impl<T, C> Display for SemanticLexicon<'_, T, C>
198where
199 T: Eq + Display + std::fmt::Debug + Clone,
200 C: Eq + Display + std::fmt::Debug + Clone,
201{
202 fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result {
203 write!(
204 f,
205 "{}",
206 self.lexicon
207 .lexemes()
208 .unwrap()
209 .iter()
210 .zip(self.lexicon.leaves.iter())
211 .map(|(l, n)| format!("{l}::{}", self.semantic_entries[n]))
212 .join("\n")
213 )
214 }
215}
216#[cfg(test)]
217mod test {
218 use itertools::Itertools;
219 use logprob::LogProb;
220 use simple_semantics::lambda::RootedLambdaPool;
221
222 use super::SemanticLexicon;
223 use crate::{ParsingConfig, PhonContent};
224
225 #[test]
226 fn trivial_montague() -> anyhow::Result<()> {
227 let config: ParsingConfig = ParsingConfig::new(
228 LogProb::new(-256.0).unwrap(),
229 LogProb::from_raw_prob(0.5).unwrap(),
230 100,
231 1000,
232 );
233 let lexicon = "john::d::a_j\nmary::d::a_m\nlikes::d= =d v::lambda a x (lambda a y (some_e(e, all_e, AgentOf(y, e) & PatientOf(x, e) & pe_likes(e))))";
234
235 let semantic = SemanticLexicon::parse(lexicon)?;
237 let (_, _, rules) = semantic
238 .lexicon
239 .parse(&PhonContent::from(["john", "likes", "mary"]), "v", &config)?
240 .next()
241 .unwrap();
242 let (interpretation, mut history) = rules.to_interpretation(&semantic).next().unwrap();
243 let interpretation = interpretation.into_pool()?;
244
245 #[cfg(feature = "pretty")]
246 {
247 let latex = semantic
248 .derivation(rules.clone(), history.clone())
249 .tree()
250 .latex();
251 println!("{latex}");
252 assert_eq!(
253 latex,
254 "\\begin{forest}[\\semder{v}{\\textsc{FA}} [\\lex{d}{john}{\\textsc{LexicalEntry}}] [\\semder{=d v}{\\textsc{FA}} [\\lex{d= =d v}{likes}{\\textsc{LexicalEntry}}] [\\lex{d}{mary}{\\textsc{LexicalEntry}}]]]\\end{forest}"
255 );
256
257 history = history.into_rich(&semantic, &rules);
258 let latex = semantic.derivation(rules.clone(), history).tree().latex();
259 println!("{latex}");
260 assert_eq!(
261 latex,
262 "\\begin{forest}[\\semder{v}{\\texttt{some\\_e(x, all\\_e, AgentOf(a\\_j, x) \\& PatientOf(a\\_m, x) \\& pe\\_likes(x))}} [\\lex{d}{john}{\\texttt{a\\_j}}] [\\semder{=d v}{\\texttt{{$\\lambda_{a}$}x some\\_e(y, all\\_e, AgentOf(x, y) \\& PatientOf(a\\_m, y) \\& pe\\_likes(y))}} [\\lex{d= =d v}{likes}{\\texttt{{$\\lambda_{a}$}x {$\\lambda_{a}$}y some\\_e(z, all\\_e, AgentOf(y, z) \\& PatientOf(x, z) \\& pe\\_likes(z))}}] [\\lex{d}{mary}{\\texttt{a\\_m}}]]]\\end{forest}"
263 );
264 }
265 assert_eq!(
266 "some_e(x, all_e, AgentOf(a_j, x) & PatientOf(a_m, x) & pe_likes(x))",
267 interpretation.to_string()
268 );
269 Ok(())
270 }
271
272 #[test]
273 fn moving_montague() -> anyhow::Result<()> {
274 let config: ParsingConfig = ParsingConfig::new(
275 LogProb::new(-256.0).unwrap(),
276 LogProb::from_raw_prob(0.5).unwrap(),
277 100,
278 1000,
279 );
280 let lexicon = "john::d::a_j\nmary::d::a_m\nlikes::d= =d v::lambda a x (lambda a y (some_e(e, all_e, AgentOf(x, e) & PatientOf(y, e) & pe_likes(e))))";
281 let lexicon = format!(
282 "{lexicon}\n::=v c::lambda t phi (phi)\n::v= +wh c::lambda t phi (phi)\nknows::c= =d v::lambda <a,t> P (lambda a x (P(x)))\nwho::d -wh::lambda <a,t> P (P)",
283 );
284
285 let s = lexicon.as_str();
286 let semantic = SemanticLexicon::parse(s)?;
287
288 let (_, _, rules) = semantic
289 .lexicon()
290 .parse(
291 &PhonContent::from(["john", "knows", "who", "likes", "mary"]),
292 "c",
293 &config,
294 )
295 .map_err(|x| x.inner_into::<String>())?
296 .next()
297 .unwrap();
298 dbg!(&rules);
299 let (interpretation, mut history) = rules.to_interpretation(&semantic).next().unwrap();
300 let interpretation = interpretation.into_pool()?;
301 assert_eq!(
302 interpretation.to_string(),
303 "some_e(x, all_e, AgentOf(a_m, x) & PatientOf(a_j, x) & pe_likes(x))"
304 );
305 #[cfg(feature = "pretty")]
306 {
307 let latex = semantic
308 .derivation(rules.clone(), history.clone())
309 .tree()
310 .latex();
311
312 println!("{latex}");
313 assert_eq!(
314 latex,
315 "\\begin{forest}[\\semder{c}{\\textsc{FA}} [\\semder{v}{\\textsc{FA}} [\\lex{d}{john}{\\textsc{LexicalEntry}}] [\\semder{=d v}{\\textsc{FA}} [\\lex{c= =d v}{knows}{\\textsc{LexicalEntry}}] [\\semder{c}{\\textsc{ApplyFromStorage}} [\\lex{d -wh}{who}{\\textsc{LexicalEntry}}] [\\semder{+wh c}{\\textsc{FA}} [\\lex{v= +wh c}{$\\epsilon$}{\\textsc{LexicalEntry}}] [\\semder{v}{\\textsc{Store}} [$t_0$] [\\semder{=d v}{\\textsc{FA}} [\\lex{d= =d v}{likes}{\\textsc{LexicalEntry}}] [\\lex{d}{mary}{\\textsc{LexicalEntry}}]]]]]]] [\\lex{=v c}{$\\epsilon$}{\\textsc{LexicalEntry}}]]\\end{forest}"
316 );
317
318 history = history.into_rich(&semantic, &rules);
319 let tree = semantic.derivation(rules, history).tree();
320 let latex = tree.latex();
321 println!("{latex}");
322 assert_eq!(
323 latex,
324 "\\begin{forest}[\\semder{c}{\\texttt{some\\_e(x, all\\_e, AgentOf(a\\_m, x) \\& PatientOf(a\\_j, x) \\& pe\\_likes(x))}} [\\semder{v}{\\texttt{some\\_e(x, all\\_e, AgentOf(a\\_m, x) \\& PatientOf(a\\_j, x) \\& pe\\_likes(x))}} [\\lex{d}{john}{\\texttt{a\\_j}}] [\\semder{=d v}{\\texttt{{$\\lambda_{a}$}x some\\_e(y, all\\_e, AgentOf(a\\_m, y) \\& PatientOf(x, y) \\& pe\\_likes(y))}} [\\lex{c= =d v}{knows}{\\texttt{{$\\lambda_{\\left\\langle a,t\\right\\rangle }$}P {$\\lambda_{a}$}x P(x)}}] [\\semder{c}{\\texttt{{$\\lambda_{a}$}x some\\_e(y, all\\_e, AgentOf(a\\_m, y) \\& PatientOf(x, y) \\& pe\\_likes(y))}} [\\lex{d -wh}{who}{\\texttt{{$\\lambda_{\\left\\langle a,t\\right\\rangle }$}P P}}] [\\semder{+wh c}{\\texttt{some\\_e(x, all\\_e, AgentOf(a\\_m, x) \\& PatientOf(0\\#a, x) \\& pe\\_likes(x))}} [\\lex{v= +wh c}{$\\epsilon$}{\\texttt{{$\\lambda_{t}$}phi phi}}] [\\semder{v}{\\texttt{some\\_e(x, all\\_e, AgentOf(a\\_m, x) \\& PatientOf(0\\#a, x) \\& pe\\_likes(x))}} [$t_0$] [\\semder{=d v}{\\texttt{{$\\lambda_{a}$}x some\\_e(y, all\\_e, AgentOf(a\\_m, y) \\& PatientOf(x, y) \\& pe\\_likes(y))}} [\\lex{d= =d v}{likes}{\\texttt{{$\\lambda_{a}$}x {$\\lambda_{a}$}y some\\_e(z, all\\_e, AgentOf(x, z) \\& PatientOf(y, z) \\& pe\\_likes(z))}}] [\\lex{d}{mary}{\\texttt{a\\_m}}]]]]]]] [\\lex{=v c}{$\\epsilon$}{\\texttt{{$\\lambda_{t}$}phi phi}}]]\\end{forest}"
325 );
326 let typst = serde_json::to_string(&tree)?;
327 println!("{typst}");
328 assert_eq!(
329 typst,
330 "{\"tree\":[{\"Node\":{\"features\":[\"c\"],\"movement\":[],\"semantics\":{\"rule\":\"FunctionalApplication\",\"state\":{\"expr\":\"some_e(x, all_e, AgentOf(a_m, x) & PatientOf(a_j, x) & pe_likes(x))\",\"tokens\":[{\"Quantifier\":{\"q\":\"some\",\"var\":{\"Bound\":\"x\"},\"t\":\"e\"}},\"OpenDelim\",{\"Const\":\"all_e\"},\"ArgSep\",{\"Func\":\"AgentOf\"},\"OpenDelim\",{\"Actor\":\"m\"},\"ArgSep\",{\"Var\":{\"Bound\":\"x\"}},\"CloseDelim\",{\"Func\":\"&\"},{\"Func\":\"PatientOf\"},\"OpenDelim\",{\"Actor\":\"j\"},\"ArgSep\",{\"Var\":{\"Bound\":\"x\"}},\"CloseDelim\",{\"Func\":\"&\"},{\"Func\":\"likes\"},\"OpenDelim\",{\"Var\":{\"Bound\":\"x\"}},\"CloseDelim\",\"CloseDelim\"],\"movers\":{}}}}},[{\"Node\":{\"features\":[\"v\"],\"movement\":[],\"semantics\":{\"rule\":\"FunctionalApplication\",\"state\":{\"expr\":\"some_e(x, all_e, AgentOf(a_m, x) & PatientOf(a_j, x) & pe_likes(x))\",\"tokens\":[{\"Quantifier\":{\"q\":\"some\",\"var\":{\"Bound\":\"x\"},\"t\":\"e\"}},\"OpenDelim\",{\"Const\":\"all_e\"},\"ArgSep\",{\"Func\":\"AgentOf\"},\"OpenDelim\",{\"Actor\":\"m\"},\"ArgSep\",{\"Var\":{\"Bound\":\"x\"}},\"CloseDelim\",{\"Func\":\"&\"},{\"Func\":\"PatientOf\"},\"OpenDelim\",{\"Actor\":\"j\"},\"ArgSep\",{\"Var\":{\"Bound\":\"x\"}},\"CloseDelim\",{\"Func\":\"&\"},{\"Func\":\"likes\"},\"OpenDelim\",{\"Var\":{\"Bound\":\"x\"}},\"CloseDelim\",\"CloseDelim\"],\"movers\":{}}}}},{\"Leaf\":{\"features\":[\"d\"],\"lemma\":{\"Single\":\"john\"},\"semantics\":{\"rule\":{\"Scan\":[2]},\"state\":{\"expr\":\"a_j\",\"tokens\":[{\"Actor\":\"j\"}],\"movers\":{}}}}},[{\"Node\":{\"features\":[\"=d\",\"v\"],\"movement\":[],\"semantics\":{\"rule\":\"FunctionalApplication\",\"state\":{\"expr\":\"lambda a x some_e(y, all_e, AgentOf(a_m, y) & PatientOf(x, y) & pe_likes(y))\",\"tokens\":[{\"Lambda\":{\"t\":\"a\",\"var\":{\"Bound\":\"x\"}}},{\"Quantifier\":{\"q\":\"some\",\"var\":{\"Bound\":\"y\"},\"t\":\"e\"}},\"OpenDelim\",{\"Const\":\"all_e\"},\"ArgSep\",{\"Func\":\"AgentOf\"},\"OpenDelim\",{\"Actor\":\"m\"},\"ArgSep\",{\"Var\":{\"Bound\":\"y\"}},\"CloseDelim\",{\"Func\":\"&\"},{\"Func\":\"PatientOf\"},\"OpenDelim\",{\"Var\":{\"Bound\":\"x\"}},\"ArgSep\",{\"Var\":{\"Bound\":\"y\"}},\"CloseDelim\",{\"Func\":\"&\"},{\"Func\":\"likes\"},\"OpenDelim\",{\"Var\":{\"Bound\":\"y\"}},\"CloseDelim\",\"CloseDelim\"],\"movers\":{}}}}},{\"Leaf\":{\"features\":[\"c=\",\"=d\",\"v\"],\"lemma\":{\"Single\":\"knows\"},\"semantics\":{\"rule\":{\"Scan\":[15]},\"state\":{\"expr\":\"lambda <a,t> P lambda a x P(x)\",\"tokens\":[{\"Lambda\":{\"t\":\"<a,t>\",\"var\":{\"Bound\":\"P\"}}},{\"Lambda\":{\"t\":\"a\",\"var\":{\"Bound\":\"x\"}}},{\"Var\":{\"Bound\":\"P\"}},\"OpenDelim\",{\"Var\":{\"Bound\":\"x\"}},\"CloseDelim\"],\"movers\":{}}}}},[{\"Node\":{\"features\":[\"c\"],\"movement\":[],\"semantics\":{\"rule\":\"ApplyFromStorage\",\"state\":{\"expr\":\"lambda a x some_e(y, all_e, AgentOf(a_m, y) & PatientOf(x, y) & pe_likes(y))\",\"tokens\":[{\"Lambda\":{\"t\":\"a\",\"var\":{\"Bound\":\"x\"}}},{\"Quantifier\":{\"q\":\"some\",\"var\":{\"Bound\":\"y\"},\"t\":\"e\"}},\"OpenDelim\",{\"Const\":\"all_e\"},\"ArgSep\",{\"Func\":\"AgentOf\"},\"OpenDelim\",{\"Actor\":\"m\"},\"ArgSep\",{\"Var\":{\"Bound\":\"y\"}},\"CloseDelim\",{\"Func\":\"&\"},{\"Func\":\"PatientOf\"},\"OpenDelim\",{\"Var\":{\"Bound\":\"x\"}},\"ArgSep\",{\"Var\":{\"Bound\":\"y\"}},\"CloseDelim\",{\"Func\":\"&\"},{\"Func\":\"likes\"},\"OpenDelim\",{\"Var\":{\"Bound\":\"y\"}},\"CloseDelim\",\"CloseDelim\"],\"movers\":{}}}}},{\"Leaf\":{\"features\":[\"d\",\"-wh\"],\"lemma\":{\"Single\":\"who\"},\"semantics\":{\"rule\":{\"Scan\":[18]},\"state\":{\"expr\":\"lambda <a,t> P P\",\"tokens\":[{\"Lambda\":{\"t\":\"<a,t>\",\"var\":{\"Bound\":\"P\"}}},{\"Var\":{\"Bound\":\"P\"}}],\"movers\":{}}}}},[{\"Node\":{\"features\":[\"+wh\",\"c\"],\"movement\":[[\"-wh\"]],\"semantics\":{\"rule\":\"FunctionalApplication\",\"state\":{\"expr\":\"some_e(x, all_e, AgentOf(a_m, x) & PatientOf(0#a, x) & pe_likes(x))\",\"tokens\":[{\"Quantifier\":{\"q\":\"some\",\"var\":{\"Bound\":\"x\"},\"t\":\"e\"}},\"OpenDelim\",{\"Const\":\"all_e\"},\"ArgSep\",{\"Func\":\"AgentOf\"},\"OpenDelim\",{\"Actor\":\"m\"},\"ArgSep\",{\"Var\":{\"Bound\":\"x\"}},\"CloseDelim\",{\"Func\":\"&\"},{\"Func\":\"PatientOf\"},\"OpenDelim\",{\"Var\":{\"Free\":{\"label\":\"0\",\"t\":\"a\",\"anon\":true}}},\"ArgSep\",{\"Var\":{\"Bound\":\"x\"}},\"CloseDelim\",{\"Func\":\"&\"},{\"Func\":\"likes\"},\"OpenDelim\",{\"Var\":{\"Bound\":\"x\"}},\"CloseDelim\",\"CloseDelim\"],\"movers\":{\"0\":{\"expr\":\"lambda <a,t> P P\",\"tokens\":[{\"Lambda\":{\"t\":\"<a,t>\",\"var\":{\"Bound\":\"P\"}}},{\"Var\":{\"Bound\":\"P\"}}],\"type\":\"a\"}}}}}},{\"Leaf\":{\"features\":[\"v=\",\"+wh\",\"c\"],\"lemma\":{\"Single\":null},\"semantics\":{\"rule\":{\"Scan\":[13]},\"state\":{\"expr\":\"lambda t phi phi\",\"tokens\":[{\"Lambda\":{\"t\":\"t\",\"var\":{\"Bound\":\"phi\"}}},{\"Var\":{\"Bound\":\"phi\"}}],\"movers\":{}}}}},[{\"Node\":{\"features\":[\"v\"],\"movement\":[[\"-wh\"]],\"semantics\":{\"rule\":\"Store\",\"state\":{\"expr\":\"some_e(x, all_e, AgentOf(a_m, x) & PatientOf(0#a, x) & pe_likes(x))\",\"tokens\":[{\"Quantifier\":{\"q\":\"some\",\"var\":{\"Bound\":\"x\"},\"t\":\"e\"}},\"OpenDelim\",{\"Const\":\"all_e\"},\"ArgSep\",{\"Func\":\"AgentOf\"},\"OpenDelim\",{\"Actor\":\"m\"},\"ArgSep\",{\"Var\":{\"Bound\":\"x\"}},\"CloseDelim\",{\"Func\":\"&\"},{\"Func\":\"PatientOf\"},\"OpenDelim\",{\"Var\":{\"Free\":{\"label\":\"0\",\"t\":\"a\",\"anon\":true}}},\"ArgSep\",{\"Var\":{\"Bound\":\"x\"}},\"CloseDelim\",{\"Func\":\"&\"},{\"Func\":\"likes\"},\"OpenDelim\",{\"Var\":{\"Bound\":\"x\"}},\"CloseDelim\",\"CloseDelim\"],\"movers\":{\"0\":{\"expr\":\"lambda <a,t> P P\",\"tokens\":[{\"Lambda\":{\"t\":\"<a,t>\",\"var\":{\"Bound\":\"P\"}}},{\"Var\":{\"Bound\":\"P\"}}],\"type\":\"a\"}}}}}},{\"Trace\":{\"trace\":0,\"semantics\":{\"rule\":\"Trace\",\"state\":null}}},[{\"Node\":{\"features\":[\"=d\",\"v\"],\"movement\":[],\"semantics\":{\"rule\":\"FunctionalApplication\",\"state\":{\"expr\":\"lambda a x some_e(y, all_e, AgentOf(a_m, y) & PatientOf(x, y) & pe_likes(y))\",\"tokens\":[{\"Lambda\":{\"t\":\"a\",\"var\":{\"Bound\":\"x\"}}},{\"Quantifier\":{\"q\":\"some\",\"var\":{\"Bound\":\"y\"},\"t\":\"e\"}},\"OpenDelim\",{\"Const\":\"all_e\"},\"ArgSep\",{\"Func\":\"AgentOf\"},\"OpenDelim\",{\"Actor\":\"m\"},\"ArgSep\",{\"Var\":{\"Bound\":\"y\"}},\"CloseDelim\",{\"Func\":\"&\"},{\"Func\":\"PatientOf\"},\"OpenDelim\",{\"Var\":{\"Bound\":\"x\"}},\"ArgSep\",{\"Var\":{\"Bound\":\"y\"}},\"CloseDelim\",{\"Func\":\"&\"},{\"Func\":\"likes\"},\"OpenDelim\",{\"Var\":{\"Bound\":\"y\"}},\"CloseDelim\",\"CloseDelim\"],\"movers\":{}}}}},{\"Leaf\":{\"features\":[\"d=\",\"=d\",\"v\"],\"lemma\":{\"Single\":\"likes\"},\"semantics\":{\"rule\":{\"Scan\":[7]},\"state\":{\"expr\":\"lambda a x lambda a y some_e(z, all_e, AgentOf(x, z) & PatientOf(y, z) & pe_likes(z))\",\"tokens\":[{\"Lambda\":{\"t\":\"a\",\"var\":{\"Bound\":\"x\"}}},{\"Lambda\":{\"t\":\"a\",\"var\":{\"Bound\":\"y\"}}},{\"Quantifier\":{\"q\":\"some\",\"var\":{\"Bound\":\"z\"},\"t\":\"e\"}},\"OpenDelim\",{\"Const\":\"all_e\"},\"ArgSep\",{\"Func\":\"AgentOf\"},\"OpenDelim\",{\"Var\":{\"Bound\":\"x\"}},\"ArgSep\",{\"Var\":{\"Bound\":\"z\"}},\"CloseDelim\",{\"Func\":\"&\"},{\"Func\":\"PatientOf\"},\"OpenDelim\",{\"Var\":{\"Bound\":\"y\"}},\"ArgSep\",{\"Var\":{\"Bound\":\"z\"}},\"CloseDelim\",{\"Func\":\"&\"},{\"Func\":\"likes\"},\"OpenDelim\",{\"Var\":{\"Bound\":\"z\"}},\"CloseDelim\",\"CloseDelim\"],\"movers\":{}}}}},{\"Leaf\":{\"features\":[\"d\"],\"lemma\":{\"Single\":\"mary\"},\"semantics\":{\"rule\":{\"Scan\":[3]},\"state\":{\"expr\":\"a_m\",\"tokens\":[{\"Actor\":\"m\"}],\"movers\":{}}}}}]]]]]],{\"Leaf\":{\"features\":[\"=v\",\"c\"],\"lemma\":{\"Single\":null},\"semantics\":{\"rule\":{\"Scan\":[10]},\"state\":{\"expr\":\"lambda t phi phi\",\"tokens\":[{\"Lambda\":{\"t\":\"t\",\"var\":{\"Bound\":\"phi\"}}},{\"Var\":{\"Bound\":\"phi\"}}],\"movers\":{}}}}}],\"head_movement\":[],\"phrasal_movement\":[[\"011110\",\"0110\"]]}"
331 );
332 }
333 Ok(())
334 }
335
336 #[test]
337 fn qr_test() -> anyhow::Result<()> {
338 let config: ParsingConfig = ParsingConfig::new(
339 LogProb::new(-256.0).unwrap(),
340 LogProb::from_raw_prob(0.5).unwrap(),
341 100,
342 1000,
343 );
344 let lexical = [
345 "everyone::d -k -q::lambda <a,t> P (every(x, all_a, P(x)))",
346 "someone::d -k -q::lambda <a,t> P (some(x, all_a, P(x)))",
347 "likes::d= V::lambda a x (lambda a y (some_e(e, all_e, AgentOf(y, e)&pe_likes(e)&PatientOf(x, e))))",
348 "::v= +k +q t::lambda t x (x)",
349 "::V= +k d= +q v::lambda <a,t> p (p)",
350 ];
351
352 let lexicon = lexical.join("\n");
353 let lex = SemanticLexicon::parse(&lexicon).unwrap();
354
355 let mut v = vec![];
356 for (_, s, rules) in lex
357 .lexicon
358 .generate("t", &config)
359 .map_err(|e| e.inner_into::<String>())?
360 .take(10)
361 {
362 let mut s = PhonContent::try_flatten(s)?.join(" ");
363 for interpretation in rules
364 .to_interpretation(&lex)
365 .map(|(pool, _)| pool.into_pool().unwrap().to_string())
366 .unique()
367 {
368 s.push('\n');
369 s.push_str(&interpretation);
370 }
371 println!("{s}");
372 v.push(s);
373 }
374 assert_eq!(
375 vec![
376 "someone someone likes\nsome(x, all_a, some(y, all_a, some_e(z, all_e, AgentOf(x, z) & pe_likes(z) & PatientOf(y, z))))\nsome(x, all_a, some(y, all_a, some_e(z, all_e, AgentOf(y, z) & pe_likes(z) & PatientOf(x, z))))",
377 "someone everyone likes\nsome(x, all_a, every(y, all_a, some_e(z, all_e, AgentOf(x, z) & pe_likes(z) & PatientOf(y, z))))\nevery(x, all_a, some(y, all_a, some_e(z, all_e, AgentOf(y, z) & pe_likes(z) & PatientOf(x, z))))",
378 "everyone everyone likes\nevery(x, all_a, every(y, all_a, some_e(z, all_e, AgentOf(x, z) & pe_likes(z) & PatientOf(y, z))))\nevery(x, all_a, every(y, all_a, some_e(z, all_e, AgentOf(y, z) & pe_likes(z) & PatientOf(x, z))))",
379 "everyone someone likes\nevery(x, all_a, some(y, all_a, some_e(z, all_e, AgentOf(x, z) & pe_likes(z) & PatientOf(y, z))))\nsome(x, all_a, every(y, all_a, some_e(z, all_e, AgentOf(y, z) & pe_likes(z) & PatientOf(x, z))))",
380 ],
381 v
382 );
383 println!("sov good");
384
385 let lexical = [
386 "everyone::d -k -q::lambda <a,t> P (every(x, all_a, P(x)))",
387 "someone::d -k -q::lambda <a,t> P (some(x, all_a, P(x)))",
388 "likes::d= V::lambda a x (lambda a y (some_e(e, all_e, AgentOf(y, e)&pe_likes(e)&PatientOf(x, e))))",
389 "::v<= +k +q t::lambda t x (x)",
390 "::V<= +k d= +q v::lambda <a,t> p (p)",
391 ];
392
393 let lexicon = lexical.join("\n");
394 let lex = SemanticLexicon::parse(&lexicon)?;
395
396 let mut v = vec![];
397 for (_, s, rules) in lex
398 .lexicon
399 .generate("t", &config)
400 .map_err(|e| e.inner_into::<String>())?
401 .take(10)
402 {
403 let mut s = PhonContent::flatten(s).join(" ");
404 println!("{s:?}");
405 for interpretation in rules
406 .to_interpretation(&lex)
407 .map(|(pool, _)| {
408 println!("{pool}");
409 pool.into_pool().unwrap().to_string()
410 })
411 .unique()
412 {
413 s.push('\n');
414 s.push_str(&interpretation);
415 }
416 println!("{s}");
417 v.push(s);
418 }
419 for (a, b) in vec![
420 "someone likes someone\nsome(x, all_a, some(y, all_a, some_e(z, all_e, AgentOf(x, z) & pe_likes(z) & PatientOf(y, z))))\nsome(x, all_a, some(y, all_a, some_e(z, all_e, AgentOf(y, z) & pe_likes(z) & PatientOf(x, z))))",
421 "someone likes everyone\nsome(x, all_a, every(y, all_a, some_e(z, all_e, AgentOf(x, z) & pe_likes(z) & PatientOf(y, z))))\nevery(x, all_a, some(y, all_a, some_e(z, all_e, AgentOf(y, z) & pe_likes(z) & PatientOf(x, z))))",
422 "everyone likes everyone\nevery(x, all_a, every(y, all_a, some_e(z, all_e, AgentOf(x, z) & pe_likes(z) & PatientOf(y, z))))\nevery(x, all_a, every(y, all_a, some_e(z, all_e, AgentOf(y, z) & pe_likes(z) & PatientOf(x, z))))",
423 "everyone likes someone\nevery(x, all_a, some(y, all_a, some_e(z, all_e, AgentOf(x, z) & pe_likes(z) & PatientOf(y, z))))\nsome(x, all_a, every(y, all_a, some_e(z, all_e, AgentOf(y, z) & pe_likes(z) & PatientOf(x, z))))",
424 ].into_iter().zip(v) {
425 assert_eq!(a,b)
426 }
427
428 #[cfg(feature = "pretty")]
429 {
430 let (_, _, rules) = lex
431 .lexicon
432 .parse(
433 &PhonContent::from(["everyone", "likes", "someone"]),
434 "t",
435 &config,
436 )
437 .map_err(|e| e.inner_into::<String>())?
438 .next()
439 .unwrap();
440
441 let (_, mut history) = rules.to_interpretation(&lex).next().unwrap();
442 let latex = lex
443 .derivation(rules.clone(), history.clone())
444 .tree()
445 .latex();
446
447 println!("{latex}");
448 assert_eq!(
449 latex,
450 "\\begin{forest}[\\semder{t}{\\textsc{ApplyFromStorage}} [\\lex{d -k -q}{everyone}{\\textsc{LexicalEntry}}] [\\semder{+q t}{\\textsc{UpdateTrace}} [$t_0$] [\\semder{+k +q t}{\\textsc{FA}} [\\lex{v<= +k +q t}{$\\epsilon$-$\\epsilon$-likes}{\\textsc{LexicalEntry}}] [\\semder{v}{\\textsc{ApplyFromStorage}} [\\lex{d -k -q}{someone}{\\textsc{LexicalEntry}}] [\\semder{+q v}{\\textsc{Store}} [\\semder{d= +q v}{\\textsc{UpdateTrace}} [$t_2$] [\\semder{+k d= +q v}{\\textsc{FA}} [\\lex{V<= +k d= +q v}{$\\epsilon$-likes}{\\textsc{LexicalEntry}}] [\\semder{V}{\\textsc{Store}} [\\lex{d= V}{likes}{\\textsc{LexicalEntry}}] [$t_3$]]]] [$t_1$]]]]]]\\end{forest}"
451 );
452
453 history = history.into_rich(&lex, &rules);
454 let latex = lex.derivation(rules, history).tree().latex();
455 println!("{latex}");
456 assert_eq!(
457 latex,
458 "\\begin{forest}[\\semder{t}{\\texttt{every(x, all\\_a, some(y, all\\_a, some\\_e(z, all\\_e, AgentOf(x, z) \\& pe\\_likes(z) \\& PatientOf(y, z))))}} [\\lex{d -k -q}{everyone}{\\texttt{{$\\lambda_{\\left\\langle a,t\\right\\rangle }$}P every(x, all\\_a, P(x))}}] [\\semder{+q t}{\\texttt{some(x, all\\_a, some\\_e(y, all\\_e, AgentOf(0\\#a, y) \\& pe\\_likes(y) \\& PatientOf(x, y)))}} [$t_0$] [\\semder{+k +q t}{\\texttt{some(x, all\\_a, some\\_e(y, all\\_e, AgentOf(1\\#a, y) \\& pe\\_likes(y) \\& PatientOf(x, y)))}} [\\lex{v<= +k +q t}{$\\epsilon$-$\\epsilon$-likes}{\\texttt{{$\\lambda_{t}$}phi phi}}] [\\semder{v}{\\texttt{some(x, all\\_a, some\\_e(y, all\\_e, AgentOf(1\\#a, y) \\& pe\\_likes(y) \\& PatientOf(x, y)))}} [\\lex{d -k -q}{someone}{\\texttt{{$\\lambda_{\\left\\langle a,t\\right\\rangle }$}P some(x, all\\_a, P(x))}}] [\\semder{+q v}{\\texttt{some\\_e(x, all\\_e, AgentOf(1\\#a, x) \\& pe\\_likes(x) \\& PatientOf(2\\#a, x))}} [\\semder{d= +q v}{\\texttt{{$\\lambda_{a}$}x some\\_e(y, all\\_e, AgentOf(x, y) \\& pe\\_likes(y) \\& PatientOf(2\\#a, y))}} [$t_2$] [\\semder{+k d= +q v}{\\texttt{{$\\lambda_{a}$}x some\\_e(y, all\\_e, AgentOf(x, y) \\& pe\\_likes(y) \\& PatientOf(3\\#a, y))}} [\\lex{V<= +k d= +q v}{$\\epsilon$-likes}{\\texttt{{$\\lambda_{\\left\\langle a,t\\right\\rangle }$}P P}}] [\\semder{V}{\\texttt{{$\\lambda_{a}$}x some\\_e(y, all\\_e, AgentOf(x, y) \\& pe\\_likes(y) \\& PatientOf(3\\#a, y))}} [\\lex{d= V}{likes}{\\texttt{{$\\lambda_{a}$}x {$\\lambda_{a}$}y some\\_e(z, all\\_e, AgentOf(y, z) \\& pe\\_likes(z) \\& PatientOf(x, z))}}] [$t_3$]]]] [$t_1$]]]]]]\\end{forest}"
459 );
460 }
461 Ok(())
462 }
463
464 #[test]
465 fn obscure_error_with_rich() -> anyhow::Result<()> {
466 let grammar = "ε::0= =2 +1 0::lambda a x (pa_0(x))
467ran::2::lambda t x (a_1)
468John::0 -1::a_1";
469
470 let lexicon = SemanticLexicon::parse(grammar)?;
471 for (_, _, r) in lexicon.lexicon.parse(
472 &PhonContent::from(["John", "ran"]),
473 "0",
474 &ParsingConfig::default(),
475 )? {
476 for (pool, h) in r.to_interpretation(&lexicon) {
477 pool.into_pool()?;
478 let _ = h.into_rich(&lexicon, &r);
479 }
480 }
481 Ok(())
482 }
483
484 #[test]
485 fn homophony() -> anyhow::Result<()> {
486 let grammar = [
487 "everyone::d -k -q::lambda <a,<e,t>> P lambda <<e,t>, t> Q every(x, all_a, Q(P(x)))",
488 "everyone::d -k -q::lambda <a,t> P every(x, all_a, P(x))",
489 ]
490 .join("\n");
491
492 let lexicon = SemanticLexicon::parse(grammar.as_str())?;
493 assert_eq!(lexicon.interpretations().len(), 2);
494 Ok(())
495 }
496
497 #[test]
498 fn merge_non_lambdas() -> anyhow::Result<()> {
499 let grammar = "a::=1 0::pa_man\nb::1::a_john";
500
501 let lexicon = SemanticLexicon::parse(grammar)?;
502 for (_, _, r) in lexicon.lexicon.parse(
503 &PhonContent::from(["b", "a"]),
504 "0",
505 &ParsingConfig::default(),
506 )? {
507 println!("{r:?}");
508 let (pool, _) = r.to_interpretation(&lexicon).next().unwrap();
509 assert_eq!(pool.to_string(), "pa_man(a_john)");
510 }
511 Ok(())
512 }
513
514 #[test]
515 fn complicated_intransitives() -> anyhow::Result<()> {
516 let x = RootedLambdaPool::parse(
537 "lambda <<e,t>, <<e,t>, t>> G G(lambda e y pe_loves(y), lambda e y PatientOf(a_John, y))",
538 )?;
539 let y = RootedLambdaPool::parse(
540 "lambda <<<e,t>, <<e,t>, t>>, t> Z lambda <<e,t>, <<e,t>, t>> G Z(lambda <e,t> P lambda <e,t> Q G(P , lambda e x Q(x) & AgentOf(a_John,x)))",
541 )?;
542
543 let mut z = x.merge(y).unwrap();
544 z.reduce()?;
545 println!("{z}");
546
547 let grammar = [
548 "loves::=d V::lambda a x lambda e y pe_loves(y) & PatientOf(x, y)",
549 "someone::d -k -q::lambda <a,<e,t>> P lambda <<e,t>, t> Q some(x, all_a, Q(P(x)))",
550 "everyone::d -k -q::lambda <a,<e,t>> P lambda <<e,t>, t> Q every(x, all_a, Q(P(x)))",
551 "someone::d -k -q::lambda <a,t> P some(x, all_a, P(x))",
552 "everyone::d -k -q::lambda <a,t> P every(x, all_a, P(x))",
553 "Mary::d -k -q::a_Mary",
554 "John::d -k -q::a_John",
555 "::V<= +k =d +q v::lambda <e,t> P lambda a x lambda e y P(y) & AgentOf(x, y)",
556 "::v<= +k +q t::lambda <e,t> P some_e(e, True, P(e))",
557 ]
558 .join("\n");
559
560 let lexicon = SemanticLexicon::parse(grammar.as_str())?;
561 dbg!(lexicon.interpretations().len());
562 let s = PhonContent::from(["someone", "loves", "everyone"]);
563 for (_, _, r) in lexicon
564 .lexicon
565 .parse(&s, "t", &ParsingConfig::default())
566 .unwrap()
567 {
568 for (pool, _) in r.to_interpretation(&lexicon) {
569 println!("{pool}");
570 }
571 }
572 Ok(())
573 }
574
575 #[test]
576 fn iota_test() -> anyhow::Result<()> {
577 let grammar = [
578 "John::d -k::a_j",
579 "Mary::d -k::a_m",
580 "the::n= d -k::lambda <a,t> P iota(x, P(x))",
581 "vase::n::lambda a x pa_vase(x)",
582 "see::d= V::lambda e x pe_see(x)",
583 "::=>V +k agrO::lambda a x lambda e y PatientOf(x, y)",
584 "::=>agrO v::lambda <e,t> phi phi",
585 "::=>v =d voice::lambda a x lambda e y AgentOf(x, y)",
586 "s::=>voice +k t::lambda <e,t> P some_e(x, all_e, P(x) & Habitual#<e,t>(x))",
587 ]
588 .join("\n");
589
590 let lexicon = SemanticLexicon::parse(&grammar)?;
591 let s = [
592 PhonContent::Normal("John"),
593 PhonContent::Affixed(vec!["see", "s"]),
594 PhonContent::Normal("the"),
595 PhonContent::Normal("vase"),
596 ];
597 for (_, _, r) in lexicon
598 .lexicon
599 .parse(&s, "t", &ParsingConfig::default())
600 .unwrap()
601 {
602 for (pool, _) in r.to_interpretation(&lexicon) {
603 assert_eq!(
604 pool.to_string(),
605 "some_e(x, all_e, AgentOf(a_j, x) & PatientOf(iota(y, pa_vase(y)), x) & pe_see(x) & Habitual#<e,t>(x))"
606 )
607 }
608 }
609
610 Ok(())
611 }
612}