Fix invalid indices when content is not long enough
Signed-off-by: Marcel Müller <neikos@neikos.email>
This commit is contained in:
parent
fa1582f3ad
commit
462355b6f2
4 changed files with 25 additions and 7 deletions
|
|
@ -123,14 +123,16 @@ fn emit_ast_expr(
|
|||
{
|
||||
previous_post_whitespace_content = post_whitespace_content;
|
||||
if let Some(ws) = prev_whitespace_content {
|
||||
let idx = end_indices.last().copied();
|
||||
eval.insert(
|
||||
eval.len() - 2,
|
||||
idx.unwrap_or(eval.len()),
|
||||
Instruction::AppendContent {
|
||||
content: ws.source().clone(),
|
||||
},
|
||||
);
|
||||
let index_index = end_indices.len() - 1;
|
||||
end_indices[index_index] += 1;
|
||||
if let Some(idx) = end_indices.last_mut() {
|
||||
*idx += 1;
|
||||
}
|
||||
}
|
||||
|
||||
if let TemplateAstExpr::IfConditional { expression } = &**expression {
|
||||
|
|
|
|||
|
|
@ -383,10 +383,10 @@ fn parse_block_token<'input>(input: &mut Input<'input>) -> PResult<'input, Templ
|
|||
"parse_block_token",
|
||||
alt((
|
||||
parse_ident,
|
||||
parse_literal,
|
||||
parse_condition_if,
|
||||
parse_condition_else,
|
||||
parse_end,
|
||||
terminated(parse_literal, ident_terminator_check),
|
||||
terminated(parse_condition_if, ident_terminator_check),
|
||||
terminated(parse_condition_else, ident_terminator_check),
|
||||
terminated(parse_end, ident_terminator_check),
|
||||
parse_whitespace,
|
||||
)),
|
||||
)
|
||||
|
|
|
|||
14
tests/checks.rs
Normal file
14
tests/checks.rs
Normal file
|
|
@ -0,0 +1,14 @@
|
|||
#[test]
|
||||
fn check_files() {
|
||||
let files = std::fs::read_dir("tests/checks/").unwrap();
|
||||
|
||||
for file in files {
|
||||
let input = std::fs::read_to_string(file.unwrap().path()).unwrap();
|
||||
|
||||
let parsed = nomo::parser::parse(input.into()).unwrap();
|
||||
|
||||
let ast = nomo::ast::parse(parsed.tokens()).unwrap();
|
||||
|
||||
let _emit = nomo::emit::emit_machine(ast);
|
||||
}
|
||||
}
|
||||
|
|
@ -0,0 +1,2 @@
|
|||
|
||||
{{if en}}{{ end}}
|
||||
Loading…
Add table
Add a link
Reference in a new issue