|
1 | | -use crate::generate::grammars::{InlinedProductionMap, Production, ProductionStep, SyntaxGrammar}; |
| 1 | +use crate::generate::{ |
| 2 | + grammars::{InlinedProductionMap, LexicalGrammar, Production, ProductionStep, SyntaxGrammar}, |
| 3 | + rules::SymbolType, |
| 4 | +}; |
| 5 | +use anyhow::{anyhow, Result}; |
2 | 6 | use std::collections::HashMap; |
3 | 7 |
|
4 | 8 | #[derive(Clone, Copy, Debug, PartialEq, Eq, Hash)] |
@@ -181,29 +185,46 @@ impl InlinedProductionMapBuilder { |
181 | 185 | } |
182 | 186 | } |
183 | 187 |
|
184 | | -pub(super) fn process_inlines(grammar: &SyntaxGrammar) -> InlinedProductionMap { |
185 | | - InlinedProductionMapBuilder { |
| 188 | +pub(super) fn process_inlines( |
| 189 | + grammar: &SyntaxGrammar, |
| 190 | + lexical_grammar: &LexicalGrammar, |
| 191 | +) -> Result<InlinedProductionMap> { |
| 192 | + for symbol in &grammar.variables_to_inline { |
| 193 | + match symbol.kind { |
| 194 | + SymbolType::External => { |
| 195 | + return Err(anyhow!( |
| 196 | + "External token `{}` cannot be inlined", |
| 197 | + grammar.external_tokens[symbol.index].name |
| 198 | + )) |
| 199 | + } |
| 200 | + SymbolType::Terminal => { |
| 201 | + return Err(anyhow!( |
| 202 | + "Token `{}` cannot be inlined", |
| 203 | + lexical_grammar.variables[symbol.index].name, |
| 204 | + )) |
| 205 | + } |
| 206 | + _ => {} |
| 207 | + } |
| 208 | + } |
| 209 | + |
| 210 | + Ok(InlinedProductionMapBuilder { |
186 | 211 | productions: Vec::new(), |
187 | 212 | production_indices_by_step_id: HashMap::new(), |
188 | 213 | } |
189 | | - .build(grammar) |
| 214 | + .build(grammar)) |
190 | 215 | } |
191 | 216 |
|
192 | 217 | #[cfg(test)] |
193 | 218 | mod tests { |
194 | 219 | use super::*; |
195 | | - use crate::generate::grammars::{ProductionStep, SyntaxVariable, VariableType}; |
| 220 | + use crate::generate::grammars::{ |
| 221 | + LexicalVariable, ProductionStep, SyntaxVariable, VariableType, |
| 222 | + }; |
196 | 223 | use crate::generate::rules::{Associativity, Precedence, Symbol}; |
197 | 224 |
|
198 | 225 | #[test] |
199 | 226 | fn test_basic_inlining() { |
200 | 227 | let grammar = SyntaxGrammar { |
201 | | - word_token: None, |
202 | | - extra_symbols: vec![], |
203 | | - external_tokens: vec![], |
204 | | - supertype_symbols: vec![], |
205 | | - expected_conflicts: vec![], |
206 | | - precedence_orderings: vec![], |
207 | 228 | variables_to_inline: vec![Symbol::non_terminal(1)], |
208 | 229 | variables: vec![ |
209 | 230 | SyntaxVariable { |
@@ -236,8 +257,10 @@ mod tests { |
236 | 257 | ], |
237 | 258 | }, |
238 | 259 | ], |
| 260 | + ..Default::default() |
239 | 261 | }; |
240 | | - let inline_map = process_inlines(&grammar); |
| 262 | + |
| 263 | + let inline_map = process_inlines(&grammar, &Default::default()).unwrap(); |
241 | 264 |
|
242 | 265 | // Nothing to inline at step 0. |
243 | 266 | assert!(inline_map |
@@ -330,14 +353,10 @@ mod tests { |
330 | 353 | Symbol::non_terminal(2), |
331 | 354 | Symbol::non_terminal(3), |
332 | 355 | ], |
333 | | - extra_symbols: vec![], |
334 | | - external_tokens: vec![], |
335 | | - supertype_symbols: vec![], |
336 | | - expected_conflicts: vec![], |
337 | | - precedence_orderings: vec![], |
338 | | - word_token: None, |
| 356 | + ..Default::default() |
339 | 357 | }; |
340 | | - let inline_map = process_inlines(&grammar); |
| 358 | + |
| 359 | + let inline_map = process_inlines(&grammar, &Default::default()).unwrap(); |
341 | 360 |
|
342 | 361 | let productions: Vec<&Production> = inline_map |
343 | 362 | .inlined_productions(&grammar.variables[0].productions[0], 1) |
@@ -433,15 +452,10 @@ mod tests { |
433 | 452 | }], |
434 | 453 | }, |
435 | 454 | ], |
436 | | - extra_symbols: vec![], |
437 | | - external_tokens: vec![], |
438 | | - supertype_symbols: vec![], |
439 | | - expected_conflicts: vec![], |
440 | | - precedence_orderings: vec![], |
441 | | - word_token: None, |
| 455 | + ..Default::default() |
442 | 456 | }; |
443 | 457 |
|
444 | | - let inline_map = process_inlines(&grammar); |
| 458 | + let inline_map = process_inlines(&grammar, &Default::default()).unwrap(); |
445 | 459 |
|
446 | 460 | let productions: Vec<_> = inline_map |
447 | 461 | .inlined_productions(&grammar.variables[0].productions[0], 0) |
@@ -490,4 +504,36 @@ mod tests { |
490 | 504 | }], |
491 | 505 | ); |
492 | 506 | } |
| 507 | + |
| 508 | + #[test] |
| 509 | + fn test_error_when_inlining_tokens() { |
| 510 | + let lexical_grammar = LexicalGrammar { |
| 511 | + variables: vec![LexicalVariable { |
| 512 | + name: "something".to_string(), |
| 513 | + kind: VariableType::Named, |
| 514 | + implicit_precedence: 0, |
| 515 | + start_state: 0, |
| 516 | + }], |
| 517 | + ..Default::default() |
| 518 | + }; |
| 519 | + |
| 520 | + let grammar = SyntaxGrammar { |
| 521 | + variables_to_inline: vec![Symbol::terminal(0)], |
| 522 | + variables: vec![SyntaxVariable { |
| 523 | + name: "non-terminal-0".to_string(), |
| 524 | + kind: VariableType::Named, |
| 525 | + productions: vec![Production { |
| 526 | + dynamic_precedence: 0, |
| 527 | + steps: vec![ProductionStep::new(Symbol::terminal(0))], |
| 528 | + }], |
| 529 | + }], |
| 530 | + ..Default::default() |
| 531 | + }; |
| 532 | + |
| 533 | + if let Err(error) = process_inlines(&grammar, &lexical_grammar) { |
| 534 | + assert_eq!(error.to_string(), "Token `something` cannot be inlined"); |
| 535 | + } else { |
| 536 | + panic!("expected an error, but got none"); |
| 537 | + } |
| 538 | + } |
493 | 539 | } |
0 commit comments