diff --git a/trunk_lexer/src/lexer.rs b/trunk_lexer/src/lexer.rs index 7701b6a..f5b5f29 100644 --- a/trunk_lexer/src/lexer.rs +++ b/trunk_lexer/src/lexer.rs @@ -134,6 +134,8 @@ impl Lexer { buffer.push('?'); } } else { + self.next(); + self.col += 1; buffer.push(char); @@ -203,6 +205,7 @@ impl Lexer { // This is a close tag, we can enter "Initial" mode again. if let Some('>') = self.peek { self.next(); + self.next(); self.col += 2; @@ -911,6 +914,18 @@ mod tests { assert_tokens("", &[open!(), TokenKind::CloseTag]); } + #[test] + fn close_tag_followed_by_content() { + assert_tokens( + " ", + &[ + open!(), + TokenKind::CloseTag, + TokenKind::InlineHtml(" ".into()), + ], + ); + } + #[test] fn inline_html() { assert_tokens( diff --git a/trunk_parser/src/parser/mod.rs b/trunk_parser/src/parser/mod.rs index 62befe7..3cd7ce2 100644 --- a/trunk_parser/src/parser/mod.rs +++ b/trunk_parser/src/parser/mod.rs @@ -5,6 +5,7 @@ use crate::{ }, Block, Case, Catch, Expression, Identifier, MatchArm, Program, Statement, Type, }; +use core::panic; use std::{fmt::Display, vec::IntoIter}; use trunk_lexer::{Span, Token, TokenKind}; @@ -89,7 +90,10 @@ impl Parser { let mut ast = Program::new(); while self.current.kind != TokenKind::Eof { - if let TokenKind::OpenTag(_) = self.current.kind { + if matches!( + self.current.kind, + TokenKind::OpenTag(_) | TokenKind::CloseTag + ) { self.next(); continue; } @@ -3088,6 +3092,14 @@ mod tests { ) } + #[test] + fn close_tag_followed_by_content() { + assert_ast( + " ", + &[Statement::InlineHtml(" ".into())], + ); + } + fn assert_ast(source: &str, expected: &[Statement]) { let mut lexer = Lexer::new(None); let tokens = lexer.tokenize(source).unwrap();