Rustfmt lexer

This commit is contained in:
Evan Shaw 2022-09-11 15:53:09 +12:00
parent bf7c95d09a
commit 7d503e0c33

View File

@ -1,4 +1,4 @@
use crate::{Token, TokenKind, OpenTagKind};
use crate::{OpenTagKind, Token, TokenKind};
#[derive(Debug)]
pub enum LexerState {
@ -52,7 +52,7 @@ impl Lexer {
// of some description.
LexerState::Initial => {
tokens.append(&mut self.initial()?);
},
}
// The scripting state is entered when an open tag is encountered in the source code.
// This tells the lexer to start analysing characters at PHP tokens instead of inline HTML.
LexerState::Scripting => {
@ -77,7 +77,7 @@ impl Lexer {
}
tokens.push(self.scripting()?);
},
}
}
}
@ -107,7 +107,7 @@ impl Lexer {
self.enter_state(LexerState::Scripting);
let mut tokens = vec!();
let mut tokens = vec![];
if !buffer.is_empty() {
tokens.push(Token {
@ -118,7 +118,7 @@ impl Lexer {
tokens.push(Token {
kind: TokenKind::OpenTag(OpenTagKind::Full),
span: (self.line, self.col)
span: (self.line, self.col),
});
return Ok(tokens);
@ -138,20 +138,18 @@ impl Lexer {
buffer.push(char);
}
},
}
_ => {
self.next();
buffer.push(char);
},
}
}
}
Ok(vec![
Token {
Ok(vec![Token {
kind: TokenKind::InlineHtml(buffer),
span: (self.line, self.col)
}
])
span: (self.line, self.col),
}])
}
fn scripting(&mut self) -> Result<Token, LexerError> {
@ -187,7 +185,7 @@ impl Lexer {
} else {
TokenKind::Bang
}
},
}
'&' => {
self.col += 1;
@ -200,7 +198,7 @@ impl Lexer {
} else {
TokenKind::Ampersand
}
},
}
'?' => {
// This is a close tag, we can enter "Initial" mode again.
if let Some('>') = self.peek {
@ -236,7 +234,7 @@ impl Lexer {
} else {
TokenKind::Question
}
},
}
'=' => {
if let Some('=') = self.peek {
self.next();
@ -261,7 +259,7 @@ impl Lexer {
TokenKind::Equals
}
},
}
// Single quoted string.
'\'' => {
self.col += 1;
@ -303,7 +301,7 @@ impl Lexer {
}
TokenKind::ConstantString(buffer)
},
}
'"' => {
self.col += 1;
@ -344,7 +342,7 @@ impl Lexer {
}
TokenKind::ConstantString(buffer)
},
}
'$' => {
let mut buffer = String::new();
@ -356,7 +354,7 @@ impl Lexer {
self.col += 1;
buffer.push(n);
self.next();
},
}
'a'..='z' | 'A'..='Z' | '\u{80}'..='\u{ff}' | '_' => {
self.col += 1;
@ -368,7 +366,7 @@ impl Lexer {
}
TokenKind::Variable(buffer)
},
}
'.' => {
self.col += 1;
@ -384,7 +382,7 @@ impl Lexer {
self.next();
self.col += 1;
},
}
'_' => {
if underscore {
return Err(LexerError::UnexpectedCharacter(n));
@ -394,7 +392,7 @@ impl Lexer {
self.next();
self.col += 1;
},
}
_ => break,
}
}
@ -421,7 +419,7 @@ impl Lexer {
} else {
TokenKind::Dot
}
},
}
'0'..='9' => {
let mut buffer = String::from(char);
let mut underscore = false;
@ -437,7 +435,7 @@ impl Lexer {
self.next();
self.col += 1;
},
}
'.' => {
if is_float {
return Err(LexerError::UnexpectedCharacter(n));
@ -447,7 +445,7 @@ impl Lexer {
buffer.push(n);
self.next();
self.col += 1;
},
}
'_' => {
if underscore {
return Err(LexerError::UnexpectedCharacter(n));
@ -457,7 +455,7 @@ impl Lexer {
self.next();
self.col += 1;
},
}
_ => break,
}
}
@ -467,7 +465,7 @@ impl Lexer {
} else {
TokenKind::Int(buffer.parse().unwrap())
}
},
}
'\\' => {
self.col += 1;
@ -481,7 +479,7 @@ impl Lexer {
} else {
TokenKind::NamespaceSeparator
}
},
}
_ if char.is_alphabetic() || char == '_' => {
self.col += 1;
@ -515,7 +513,7 @@ impl Lexer {
} else {
identifier_to_keyword(&buffer).unwrap_or(TokenKind::Identifier(buffer))
}
},
}
'/' | '#' => {
self.col += 1;
@ -589,7 +587,7 @@ impl Lexer {
TokenKind::Comment(buffer)
}
},
}
'*' => {
self.col += 1;
@ -604,7 +602,7 @@ impl Lexer {
} else {
TokenKind::Asterisk
}
},
}
'|' => {
self.col += 1;
@ -617,15 +615,15 @@ impl Lexer {
} else {
TokenKind::Pipe
}
},
}
'{' => {
self.col += 1;
TokenKind::LeftBrace
},
}
'}' => {
self.col += 1;
TokenKind::RightBrace
},
}
'(' => {
self.col += 1;
@ -654,15 +652,15 @@ impl Lexer {
} else {
TokenKind::LeftParen
}
},
}
')' => {
self.col += 1;
TokenKind::RightParen
},
}
';' => {
self.col += 1;
TokenKind::SemiColon
},
}
'+' => {
self.col += 1;
@ -681,7 +679,7 @@ impl Lexer {
} else {
TokenKind::Plus
}
},
}
'-' => {
self.col += 1;
@ -698,7 +696,7 @@ impl Lexer {
} else {
TokenKind::Minus
}
},
}
'<' => {
self.col += 1;
@ -722,7 +720,7 @@ impl Lexer {
} else {
TokenKind::LessThan
}
},
}
'>' => {
self.col += 1;
@ -735,19 +733,19 @@ impl Lexer {
} else {
TokenKind::GreaterThan
}
},
}
',' => {
self.col += 1;
TokenKind::Comma
},
}
'[' => {
self.col += 1;
TokenKind::LeftBracket
},
}
']' => {
self.col += 1;
TokenKind::RightBracket
},
}
':' => {
self.col += 1;
@ -759,13 +757,18 @@ impl Lexer {
} else {
TokenKind::Colon
}
},
_ => unimplemented!("<scripting> char: {}, line: {}, col: {}", char, self.line, self.col),
}
_ => unimplemented!(
"<scripting> char: {}, line: {}, col: {}",
char,
self.line,
self.col
),
};
Ok(Token {
kind,
span: (self.line, self.col)
span: (self.line, self.col),
})
}
@ -872,8 +875,8 @@ pub enum LexerError {
#[cfg(test)]
mod tests {
use crate::{TokenKind, OpenTagKind, Token};
use super::Lexer;
use crate::{OpenTagKind, Token, TokenKind};
macro_rules! open {
() => {
@ -881,7 +884,7 @@ mod tests {
};
($kind:expr) => {
TokenKind::OpenTag($kind)
}
};
}
macro_rules! var {
($v:expr) => {
@ -896,18 +899,15 @@ mod tests {
#[test]
fn basic_tokens() {
assert_tokens("<?php ?>", &[
open!(),
TokenKind::CloseTag,
]);
assert_tokens("<?php ?>", &[open!(), TokenKind::CloseTag]);
}
#[test]
fn inline_html() {
assert_tokens("Hello, world!\n<?php", &[
TokenKind::InlineHtml("Hello, world!\n".into()),
open!(),
]);
assert_tokens(
"Hello, world!\n<?php",
&[TokenKind::InlineHtml("Hello, world!\n".into()), open!()],
);
}
#[test]
@ -945,85 +945,96 @@ mod tests {
#[test]
fn casts() {
assert_tokens("<?php (object) (string)", &[
open!(),
TokenKind::ObjectCast,
TokenKind::StringCast,
]);
assert_tokens(
"<?php (object) (string)",
&[open!(), TokenKind::ObjectCast, TokenKind::StringCast],
);
}
#[test]
fn constant_single_quote_strings() {
assert_tokens(r#"<?php 'Hello, world!' 'I\'m a developer.' 'This is a backslash \\.' 'This is a multi-line
string.'"#, &[
assert_tokens(
r#"<?php 'Hello, world!' 'I\'m a developer.' 'This is a backslash \\.' 'This is a multi-line
string.'"#,
&[
open!(),
TokenKind::ConstantString("Hello, world!".into()),
TokenKind::ConstantString("I'm a developer.".into()),
TokenKind::ConstantString("This is a backslash \\.".into()),
TokenKind::ConstantString("This is a multi-line\nstring.".into()),
]);
],
);
}
#[test]
fn single_line_comments() {
assert_tokens(r#"<?php
assert_tokens(
r#"<?php
// Single line comment.
# Another single line comment.
"#, &[
"#,
&[
open!(),
TokenKind::Comment("// Single line comment.".into()),
TokenKind::Comment("# Another single line comment.".into()),
]);
],
);
}
#[test]
fn multi_line_comments() {
assert_tokens(r#"<?php
assert_tokens(
r#"<?php
/*
Hello
*/"#, &[
open!(),
TokenKind::Comment("/*\nHello\n*/".into()),
])
*/"#,
&[open!(), TokenKind::Comment("/*\nHello\n*/".into())],
)
}
#[test]
fn multi_line_comments_before_structure() {
assert_tokens(r#"<?php
assert_tokens(
r#"<?php
/*
Hello
*/
function"#, &[
function"#,
&[
open!(),
TokenKind::Comment("/*\nHello\n*/".into()),
TokenKind::Function,
])
],
)
}
#[test]
fn vars() {
assert_tokens("<?php $one $_one $One $one_one", &[
assert_tokens(
"<?php $one $_one $One $one_one",
&[
open!(),
var!("one"),
var!("_one"),
var!("One"),
var!("one_one"),
]);
],
);
}
#[test]
fn nums() {
assert_tokens("<?php 1 1_000 1_000_000", &[
open!(),
int!(1),
int!(1_000),
int!(1_000_000),
]);
assert_tokens(
"<?php 1 1_000 1_000_000",
&[open!(), int!(1), int!(1_000), int!(1_000_000)],
);
}
#[test]
fn punct() {
assert_tokens("<?php {}();, :: :", &[
assert_tokens(
"<?php {}();, :: :",
&[
open!(),
TokenKind::LeftBrace,
TokenKind::RightBrace,
@ -1033,86 +1044,84 @@ function"#, &[
TokenKind::Comma,
TokenKind::DoubleColon,
TokenKind::Colon,
]);
],
);
}
#[test]
fn sigils() {
assert_tokens("<?php ->", &[
open!(),
TokenKind::Arrow,
]);
assert_tokens("<?php ->", &[open!(), TokenKind::Arrow]);
}
#[test]
fn math() {
assert_tokens("<?php + - <", &[
assert_tokens(
"<?php + - <",
&[
open!(),
TokenKind::Plus,
TokenKind::Minus,
TokenKind::LessThan,
]);
],
);
}
#[test]
fn identifiers() {
assert_tokens("<?php \\ Unqualified Is\\Qualified", &[
assert_tokens(
"<?php \\ Unqualified Is\\Qualified",
&[
open!(),
TokenKind::NamespaceSeparator,
TokenKind::Identifier("Unqualified".into()),
TokenKind::QualifiedIdentifier("Is\\Qualified".into()),
]);
],
);
}
#[test]
fn equals() {
assert_tokens("<?php = == ===", &[
assert_tokens(
"<?php = == ===",
&[
open!(),
TokenKind::Equals,
TokenKind::DoubleEquals,
TokenKind::TripleEquals,
]);
],
);
}
#[test]
fn span_tracking() {
let spans = get_spans("<?php hello_world()");
assert_eq!(spans, &[
(1, 4),
(1, 16),
(1, 17),
(1, 18),
]);
assert_eq!(spans, &[(1, 4), (1, 16), (1, 17), (1, 18),]);
let spans = get_spans(r#"<?php
let spans = get_spans(
r#"<?php
function hello_world() {
}"#);
}"#,
);
assert_eq!(spans, &[
(1, 4),
(3, 8),
(3, 20),
(3, 21),
(3, 22),
(3, 24),
(5, 1),
]);
assert_eq!(
spans,
&[(1, 4), (3, 8), (3, 20), (3, 21), (3, 22), (3, 24), (5, 1),]
);
}
#[test]
fn floats() {
assert_tokens("<?php 200.5 .05", &[
open!(),
TokenKind::Float(200.5),
TokenKind::Float(0.05),
]);
assert_tokens(
"<?php 200.5 .05",
&[open!(), TokenKind::Float(200.5), TokenKind::Float(0.05)],
);
}
fn assert_tokens(source: &str, expected: &[TokenKind]) {
let mut kinds = vec!();
let mut kinds = vec![];
for token in get_tokens(source) {
kinds.push(token.kind);
@ -1123,7 +1132,7 @@ function hello_world() {
fn get_spans(source: &str) -> Vec<(usize, usize)> {
let tokens = get_tokens(source);
let mut spans = vec!();
let mut spans = vec![];
for token in tokens {
spans.push(token.span);