parser/bin/snapshot.rs
Saif Eddin Gmati b72751d3c7
chore: remove tokens test (#180)
Signed-off-by: azjezz <azjezz@protonmail.com>
2022-12-08 18:47:54 +01:00

78 lines
2.4 KiB
Rust

use std::env;
use std::fs::read_dir;
use std::path::PathBuf;
use php_parser_rs::lexer::Lexer;
use php_parser_rs::parse;
static LEXER: Lexer = Lexer::new();
fn main() {
let manifest = PathBuf::from(env::var("CARGO_MANIFEST_DIR").unwrap());
let mut entries = read_dir(manifest.join("tests").join("fixtures"))
.unwrap()
.flatten()
.map(|entry| entry.path())
.filter(|entry| entry.is_dir())
.collect::<Vec<PathBuf>>();
entries.sort();
for entry in entries {
let code_filename = entry.join("code.php");
let ast_filename = entry.join("ast.txt");
let lexer_error_filename = entry.join("lexer-error.txt");
let parser_error_filename = entry.join("parser-error.txt");
if !code_filename.exists() {
continue;
}
if ast_filename.exists() {
std::fs::remove_file(&ast_filename).unwrap();
}
if lexer_error_filename.exists() {
std::fs::remove_file(&lexer_error_filename).unwrap();
}
if parser_error_filename.exists() {
std::fs::remove_file(&parser_error_filename).unwrap();
}
let code = std::fs::read(&code_filename).unwrap();
let tokens = LEXER.tokenize(&code);
match tokens {
Ok(tokens) => {
let ast = parse(tokens);
match ast {
Ok(ast) => {
std::fs::write(ast_filename, format!("{:#?}\n", ast)).unwrap();
println!("✅ generated `ast.txt` for `{}`", entry.to_string_lossy());
}
Err(error) => {
std::fs::write(
parser_error_filename,
format!("{:?} -> {}\n", error, error),
)
.unwrap();
println!(
"✅ generated `parser-error.txt` for `{}`",
entry.to_string_lossy()
);
}
}
}
Err(error) => {
std::fs::write(lexer_error_filename, format!("{:?} -> {}\n", error, error))
.unwrap();
println!(
"✅ generated `lexer-error.txt` for `{}`",
entry.to_string_lossy()
);
}
}
}
}