mirror of
https://github.com/danog/parser.git
synced 2024-11-26 20:04:57 +01:00
parser: support group use statements
This commit is contained in:
parent
fa820ba296
commit
a99b62155a
@ -304,6 +304,11 @@ pub enum Statement {
|
||||
uses: Vec<Use>,
|
||||
kind: UseKind,
|
||||
},
|
||||
GroupUse {
|
||||
prefix: Identifier,
|
||||
kind: UseKind,
|
||||
uses: Vec<Use>,
|
||||
},
|
||||
Comment {
|
||||
comment: ByteString,
|
||||
},
|
||||
|
@ -224,6 +224,36 @@ impl Parser {
|
||||
_ => UseKind::Normal,
|
||||
};
|
||||
|
||||
if self.peek.kind == TokenKind::LeftBrace {
|
||||
let prefix = self.full_name()?;
|
||||
self.next();
|
||||
|
||||
let mut uses = Vec::new();
|
||||
while self.current.kind != TokenKind::RightBrace {
|
||||
let name = self.full_name()?;
|
||||
let mut alias = None;
|
||||
|
||||
if self.current.kind == TokenKind::As {
|
||||
self.next();
|
||||
alias = Some(self.ident()?.into());
|
||||
}
|
||||
|
||||
uses.push(Use {
|
||||
name: name.into(),
|
||||
alias,
|
||||
});
|
||||
|
||||
if self.current.kind == TokenKind::Comma {
|
||||
self.next();
|
||||
continue;
|
||||
}
|
||||
}
|
||||
|
||||
self.rbrace()?;
|
||||
self.semi()?;
|
||||
|
||||
Statement::GroupUse { prefix: prefix.into(), kind, uses }
|
||||
} else {
|
||||
let mut uses = Vec::new();
|
||||
while !self.is_eof() {
|
||||
let name = self.full_name()?;
|
||||
@ -250,6 +280,7 @@ impl Parser {
|
||||
|
||||
Statement::Use { uses, kind }
|
||||
}
|
||||
}
|
||||
TokenKind::Const => {
|
||||
self.next();
|
||||
|
||||
@ -5299,6 +5330,17 @@ mod tests {
|
||||
]);
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn simple_group_use() {
|
||||
assert_ast("<?php use Foo\\{Bar, Baz, Car};", &[
|
||||
Statement::GroupUse { prefix: "Foo\\".into(), kind: crate::UseKind::Normal, uses: vec![
|
||||
Use { name: "Bar".into(), alias: None },
|
||||
Use { name: "Baz".into(), alias: None },
|
||||
Use { name: "Car".into(), alias: None }
|
||||
]}
|
||||
]);
|
||||
}
|
||||
|
||||
fn assert_ast(source: &str, expected: &[Statement]) {
|
||||
let mut lexer = Lexer::new(None);
|
||||
let tokens = lexer.tokenize(source).unwrap();
|
||||
|
Loading…
Reference in New Issue
Block a user