mirror of
https://github.com/danog/parser.git
synced 2024-11-27 04:14:55 +01:00
parser: support closures returning by ref
This commit is contained in:
parent
630ad85f34
commit
95dbc4416e
@ -453,6 +453,7 @@ pub enum Expression {
|
||||
return_type: Option<Type>,
|
||||
body: Block,
|
||||
r#static: bool,
|
||||
by_ref: bool,
|
||||
},
|
||||
ArrowFunction {
|
||||
params: Vec<Param>,
|
||||
|
@ -1630,12 +1630,14 @@ impl Parser {
|
||||
uses,
|
||||
return_type,
|
||||
body,
|
||||
by_ref,
|
||||
..
|
||||
} => Expression::Closure {
|
||||
params,
|
||||
uses,
|
||||
return_type,
|
||||
body,
|
||||
by_ref,
|
||||
r#static: true,
|
||||
},
|
||||
_ => unreachable!(),
|
||||
@ -1644,6 +1646,13 @@ impl Parser {
|
||||
TokenKind::Function => {
|
||||
self.next();
|
||||
|
||||
let by_ref = if self.current.kind == TokenKind::Ampersand {
|
||||
self.next();
|
||||
true
|
||||
} else {
|
||||
false
|
||||
};
|
||||
|
||||
self.lparen()?;
|
||||
|
||||
let params = self.param_list()?;
|
||||
@ -1715,6 +1724,7 @@ impl Parser {
|
||||
return_type,
|
||||
body,
|
||||
r#static: false,
|
||||
by_ref,
|
||||
}
|
||||
}
|
||||
TokenKind::Fn => {
|
||||
@ -3758,7 +3768,8 @@ mod tests {
|
||||
uses: vec![],
|
||||
return_type: None,
|
||||
body: vec![],
|
||||
r#static: false
|
||||
r#static: false,
|
||||
by_ref: false,
|
||||
})],
|
||||
);
|
||||
}
|
||||
@ -3784,7 +3795,8 @@ mod tests {
|
||||
uses: vec![],
|
||||
return_type: None,
|
||||
body: vec![],
|
||||
r#static: true
|
||||
r#static: true,
|
||||
by_ref: false,
|
||||
})],
|
||||
);
|
||||
}
|
||||
@ -3875,6 +3887,20 @@ mod tests {
|
||||
}]);
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn closure_returning_ref() {
|
||||
assert_ast("<?php function &() {};", &[
|
||||
expr!(Expression::Closure {
|
||||
params: vec![],
|
||||
body: vec![],
|
||||
return_type: None,
|
||||
r#static: false,
|
||||
uses: vec![],
|
||||
by_ref: true,
|
||||
})
|
||||
]);
|
||||
}
|
||||
|
||||
fn assert_ast(source: &str, expected: &[Statement]) {
|
||||
let mut lexer = Lexer::new(None);
|
||||
let tokens = lexer.tokenize(source).unwrap();
|
||||
|
Loading…
Reference in New Issue
Block a user