mirror of
https://github.com/danog/parser.git
synced 2024-11-27 04:14:55 +01:00
parser/lexer: add support for goto statements
This commit is contained in:
parent
8b27266f05
commit
1f0cd664da
@ -808,6 +808,7 @@ fn identifier_to_keyword(ident: &[u8]) -> Option<TokenKind> {
|
||||
b"for" => TokenKind::For,
|
||||
b"foreach" => TokenKind::Foreach,
|
||||
b"function" => TokenKind::Function,
|
||||
b"goto" => TokenKind::Goto,
|
||||
b"if" => TokenKind::If,
|
||||
b"include" => TokenKind::Include,
|
||||
b"include_once" => TokenKind::IncludeOnce,
|
||||
|
@ -91,6 +91,7 @@ pub enum TokenKind {
|
||||
Foreach,
|
||||
FullyQualifiedIdentifier(ByteString),
|
||||
Function,
|
||||
Goto,
|
||||
GreaterThan,
|
||||
GreaterThanEquals,
|
||||
Identifier(ByteString),
|
||||
@ -259,6 +260,7 @@ impl Display for TokenKind {
|
||||
return write!(f, "{}", String::from_utf8_lossy(id));
|
||||
}
|
||||
Self::Function => "function",
|
||||
Self::Goto => "goto",
|
||||
Self::GreaterThan => ">",
|
||||
Self::GreaterThanEquals => ">=",
|
||||
Self::Identifier(id) => {
|
||||
|
@ -178,6 +178,9 @@ impl From<&TokenKind> for IncludeKind {
|
||||
#[derive(Debug, PartialEq, Clone)]
|
||||
pub enum Statement {
|
||||
InlineHtml(ByteString),
|
||||
Goto {
|
||||
label: Identifier,
|
||||
},
|
||||
HaltCompiler {
|
||||
content: Option<ByteString>,
|
||||
},
|
||||
|
@ -171,6 +171,15 @@ impl Parser {
|
||||
self.skip_comments();
|
||||
|
||||
let statement = match &self.current.kind {
|
||||
TokenKind::Goto => {
|
||||
self.next();
|
||||
|
||||
let label = self.ident()?.into();
|
||||
|
||||
self.semi()?;
|
||||
|
||||
Statement::Goto { label }
|
||||
},
|
||||
TokenKind::HaltCompiler => {
|
||||
self.next();
|
||||
|
||||
@ -4078,6 +4087,13 @@ mod tests {
|
||||
);
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn simple_goto() {
|
||||
assert_ast("<?php goto a;", &[
|
||||
Statement::Goto { label: "a".into() }
|
||||
]);
|
||||
}
|
||||
|
||||
fn assert_ast(source: &str, expected: &[Statement]) {
|
||||
let mut lexer = Lexer::new(None);
|
||||
let tokens = lexer.tokenize(source).unwrap();
|
||||
|
Loading…
Reference in New Issue
Block a user