2022-11-30 00:19:51 +01:00
|
|
|
use std::env;
|
|
|
|
use std::fs::read_dir;
|
|
|
|
use std::path::PathBuf;
|
|
|
|
|
2022-12-06 09:33:29 +01:00
|
|
|
use php_parser_rs::lexer::Lexer;
|
|
|
|
use php_parser_rs::parser::Parser;
|
|
|
|
|
2022-12-01 03:49:51 +01:00
|
|
|
static PARSER: Parser = Parser::new();
|
|
|
|
static LEXER: Lexer = Lexer::new();
|
|
|
|
|
2022-11-30 00:19:51 +01:00
|
|
|
fn main() {
|
|
|
|
let manifest = PathBuf::from(env::var("CARGO_MANIFEST_DIR").unwrap());
|
2022-12-05 18:36:04 +01:00
|
|
|
let mut entries = read_dir(manifest.join("tests").join("fixtures"))
|
2022-11-30 00:19:51 +01:00
|
|
|
.unwrap()
|
|
|
|
.flatten()
|
|
|
|
.map(|entry| entry.path())
|
|
|
|
.filter(|entry| entry.is_dir())
|
|
|
|
.collect::<Vec<PathBuf>>();
|
|
|
|
|
|
|
|
entries.sort();
|
|
|
|
|
|
|
|
for entry in entries {
|
|
|
|
let code_filename = entry.join("code.php");
|
|
|
|
let ast_filename = entry.join("ast.txt");
|
2022-12-01 01:33:38 +01:00
|
|
|
let tokens_filename = entry.join("tokens.txt");
|
2022-11-30 00:19:51 +01:00
|
|
|
let lexer_error_filename = entry.join("lexer-error.txt");
|
|
|
|
let parser_error_filename = entry.join("parser-error.txt");
|
|
|
|
|
|
|
|
if !code_filename.exists() {
|
|
|
|
continue;
|
|
|
|
}
|
|
|
|
|
|
|
|
if ast_filename.exists() {
|
|
|
|
std::fs::remove_file(&ast_filename).unwrap();
|
|
|
|
}
|
|
|
|
|
2022-12-01 01:33:38 +01:00
|
|
|
if tokens_filename.exists() {
|
|
|
|
std::fs::remove_file(&tokens_filename).unwrap();
|
|
|
|
}
|
|
|
|
|
2022-11-30 00:19:51 +01:00
|
|
|
if lexer_error_filename.exists() {
|
|
|
|
std::fs::remove_file(&lexer_error_filename).unwrap();
|
|
|
|
}
|
|
|
|
|
|
|
|
if parser_error_filename.exists() {
|
|
|
|
std::fs::remove_file(&parser_error_filename).unwrap();
|
|
|
|
}
|
|
|
|
|
|
|
|
let code = std::fs::read_to_string(&code_filename).unwrap();
|
2022-12-01 03:49:51 +01:00
|
|
|
let tokens = LEXER.tokenize(code.as_bytes());
|
2022-11-30 00:19:51 +01:00
|
|
|
|
|
|
|
match tokens {
|
|
|
|
Ok(tokens) => {
|
2022-12-01 01:33:38 +01:00
|
|
|
std::fs::write(tokens_filename, format!("{:#?}\n", tokens)).unwrap();
|
|
|
|
println!(
|
|
|
|
"✅ generated `tokens.txt` for `{}`",
|
|
|
|
entry.to_string_lossy()
|
|
|
|
);
|
|
|
|
|
2022-12-01 03:49:51 +01:00
|
|
|
let ast = PARSER.parse(tokens);
|
2022-11-30 00:19:51 +01:00
|
|
|
match ast {
|
|
|
|
Ok(ast) => {
|
|
|
|
std::fs::write(ast_filename, format!("{:#?}\n", ast)).unwrap();
|
|
|
|
println!("✅ generated `ast.txt` for `{}`", entry.to_string_lossy());
|
|
|
|
}
|
|
|
|
Err(error) => {
|
|
|
|
std::fs::write(
|
|
|
|
parser_error_filename,
|
|
|
|
format!("{:?} -> {}\n", error, error),
|
|
|
|
)
|
|
|
|
.unwrap();
|
|
|
|
println!(
|
|
|
|
"✅ generated `parser-error.txt` for `{}`",
|
|
|
|
entry.to_string_lossy()
|
|
|
|
);
|
|
|
|
}
|
|
|
|
}
|
|
|
|
}
|
|
|
|
Err(error) => {
|
2022-12-01 01:33:38 +01:00
|
|
|
std::fs::write(lexer_error_filename, format!("{:?} -> {}\n", error, error))
|
|
|
|
.unwrap();
|
2022-11-30 00:19:51 +01:00
|
|
|
println!(
|
|
|
|
"✅ generated `lexer-error.txt` for `{}`",
|
|
|
|
entry.to_string_lossy()
|
|
|
|
);
|
|
|
|
}
|
|
|
|
}
|
|
|
|
}
|
|
|
|
}
|