mirror of
https://github.com/danog/parser.git
synced 2024-11-27 04:14:55 +01:00
Merge pull request #40 from ryangjchandler/feature/error-suppress-expression
This commit is contained in:
commit
948c2cf561
@ -167,6 +167,11 @@ impl Lexer {
|
||||
let char = self.current.unwrap();
|
||||
|
||||
let kind = match char {
|
||||
'@' => {
|
||||
self.col += 1;
|
||||
|
||||
TokenKind::At
|
||||
}
|
||||
'!' => {
|
||||
self.col += 1;
|
||||
|
||||
|
@ -17,6 +17,7 @@ pub enum TokenKind {
|
||||
ArrayCast,
|
||||
Arrow,
|
||||
NullsafeArrow,
|
||||
At,
|
||||
As,
|
||||
Asterisk,
|
||||
Attribute,
|
||||
|
@ -376,6 +376,9 @@ pub struct Use {
|
||||
#[derive(Debug, PartialEq, Clone, Serialize)]
|
||||
pub enum Expression {
|
||||
Static,
|
||||
ErrorSuppress {
|
||||
expr: Box<Self>,
|
||||
},
|
||||
Increment {
|
||||
value: Box<Self>,
|
||||
},
|
||||
|
@ -1993,6 +1993,7 @@ fn is_prefix(op: &TokenKind) -> bool {
|
||||
| TokenKind::BoolCast
|
||||
| TokenKind::IntCast
|
||||
| TokenKind::DoubleCast
|
||||
| TokenKind::At
|
||||
)
|
||||
}
|
||||
|
||||
@ -2003,8 +2004,9 @@ fn prefix_binding_power(op: &TokenKind) -> u8 {
|
||||
| TokenKind::BoolCast
|
||||
| TokenKind::IntCast
|
||||
| TokenKind::DoubleCast => 101,
|
||||
TokenKind::Minus => 100,
|
||||
TokenKind::Bang => 99,
|
||||
TokenKind::At => 16,
|
||||
TokenKind::Minus => 99,
|
||||
TokenKind::Bang => 98,
|
||||
_ => unreachable!(),
|
||||
}
|
||||
}
|
||||
@ -2025,6 +2027,9 @@ fn prefix(op: &TokenKind, rhs: Expression) -> Expression {
|
||||
kind: op.into(),
|
||||
value: Box::new(rhs),
|
||||
},
|
||||
TokenKind::At => Expression::ErrorSuppress {
|
||||
expr: Box::new(rhs),
|
||||
},
|
||||
_ => unreachable!(),
|
||||
}
|
||||
}
|
||||
@ -3157,6 +3162,21 @@ mod tests {
|
||||
);
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn error_suppress() {
|
||||
assert_ast(
|
||||
"<?php @hello();",
|
||||
&[expr!(Expression::ErrorSuppress {
|
||||
expr: Box::new(Expression::Call {
|
||||
target: Box::new(Expression::Identifier {
|
||||
name: "hello".into()
|
||||
}),
|
||||
args: vec![],
|
||||
}),
|
||||
})],
|
||||
);
|
||||
}
|
||||
|
||||
fn assert_ast(source: &str, expected: &[Statement]) {
|
||||
let mut lexer = Lexer::new(None);
|
||||
let tokens = lexer.tokenize(source).unwrap();
|
||||
|
Loading…
Reference in New Issue
Block a user