Skip to content

Commit

Permalink
tests: add test in json for the ast
Browse files Browse the repository at this point in the history
Signed-off-by: FedericoBruzzone <[email protected]>
Co-authored-by: Federico Guerinoni <[email protected]>
  • Loading branch information
FedericoBruzzone and guerinoni committed Aug 19, 2024
1 parent e595d31 commit 63143fc
Show file tree
Hide file tree
Showing 6 changed files with 228 additions and 175 deletions.
168 changes: 0 additions & 168 deletions src/lexer/mod.rs
Original file line number Diff line number Diff line change
Expand Up @@ -94,171 +94,3 @@ impl std::fmt::Display for LexerError {
}
}
}

/// Collect all fs files in the given path.
/// This is util function for testing.
#[cfg(test)]
pub fn collect_fs_files(path: &str, set_logger: bool) -> Vec<std::path::PathBuf> {
if set_logger {
let subscriber = tracing_subscriber::fmt()
// filter spans/events with level TRACE or higher.
.with_max_level(tracing::Level::TRACE)
// build but do not install the subscriber.
.finish();

let _ = tracing::subscriber::set_global_default(subscriber)
.map_err(|_err| eprintln!("Unable to set global default subscriber"));
}

std::fs::read_dir(path)
.expect("Failed to read directory")
.filter_map(|entry| {
let path = entry.ok()?.path();
if let Some(extension) = path.extension() {
if extension == "fs" {
return Some(path);
}
}
None
})
.collect()
}

#[cfg(test)]
mod tests {
use super::*;
use crate::source::Source;
use pretty_assertions::assert_eq;

#[test]
fn native_types() {
let fs_files = collect_fs_files("./testdata/native_types", true);
assert_eq!(fs_files.len(), 16);

for path in fs_files {
info!("file -> {:?}", path);
eprintln!("file -> {:?}", path);
let input = std::fs::File::open(path.clone()).unwrap();
let content = std::io::read_to_string(input).unwrap();
let source = Source::from(content);
let lexer = Lexer::new(&source);
let output_tokens = lexer.collect::<Vec<Token>>();

let tokens_file = path.to_str().unwrap();
let tokens_file = tokens_file.to_string().replace(".fs", ".tokens.json");
let tokens = std::fs::File::open(tokens_file).unwrap();
let expected_tokens: Vec<Token> = serde_json::from_reader(tokens).unwrap();
assert_eq!(output_tokens, expected_tokens);
}
}

#[test]
fn functions() {
let fs_files = collect_fs_files("./testdata/functions", true);
assert_eq!(fs_files.len(), 9);

for path in fs_files {
info!("file -> {:?}", path);
eprintln!("file -> {:?}", path);
let input = std::fs::File::open(path.clone()).unwrap();
let content = std::io::read_to_string(input).unwrap();
let source = Source::from(content);
let lexer = Lexer::new(&source);
let output_tokens = lexer.collect::<Vec<Token>>();

let tokens_file = path.to_str().unwrap();
let tokens_file = tokens_file.to_string().replace(".fs", ".tokens.json");
let tokens = std::fs::File::open(tokens_file).unwrap();
let expected_tokens: Vec<Token> = serde_json::from_reader(tokens).unwrap();
assert_eq!(output_tokens, expected_tokens);
}
}

#[test]
fn lists() {
let fs_files = collect_fs_files("./testdata/lists", true);
assert_eq!(fs_files.len(), 3);

for path in fs_files {
info!("file -> {:?}", path);
eprintln!("file -> {:?}", path);
let input = std::fs::File::open(path.clone()).unwrap();
let content = std::io::read_to_string(input).unwrap();
let source = Source::from(content);
let lexer = Lexer::new(&source);
let output_tokens = lexer.collect::<Vec<Token>>();

let tokens_file = path.to_str().unwrap();
let tokens_file = tokens_file.to_string().replace(".fs", ".tokens.json");
let tokens = std::fs::File::open(tokens_file).unwrap();
let expected_tokens: Vec<Token> = serde_json::from_reader(tokens).unwrap();
assert_eq!(output_tokens, expected_tokens);
}
}

#[test]
fn tuples() {
let fs_files = collect_fs_files("./testdata/tuples", true);
assert_eq!(fs_files.len(), 3);

for path in fs_files {
info!("file -> {:?}", path);
eprintln!("file -> {:?}", path);
let input = std::fs::File::open(path.clone()).unwrap();
let content = std::io::read_to_string(input).unwrap();
let source = Source::from(content);
let lexer = Lexer::new(&source);
let output_tokens = lexer.collect::<Vec<Token>>();

let tokens_file = path.to_str().unwrap();
let tokens_file = tokens_file.to_string().replace(".fs", ".tokens.json");
let tokens = std::fs::File::open(tokens_file).unwrap();
let expected_tokens: Vec<Token> = serde_json::from_reader(tokens).unwrap();
assert_eq!(output_tokens, expected_tokens);
}
}

#[test]
fn records() {
let fs_files = collect_fs_files("./testdata/records", true);
assert_eq!(fs_files.len(), 3);

for path in fs_files {
info!("file -> {:?}", path);
eprintln!("file -> {:?}", path);
let input = std::fs::File::open(path.clone()).unwrap();
let content = std::io::read_to_string(input).unwrap();
let source = Source::from(content);
let lexer = Lexer::new(&source);
let output_tokens = lexer.collect::<Vec<Token>>();

let tokens_file = path.to_str().unwrap();
let tokens_file = tokens_file.to_string().replace(".fs", ".tokens.json");
let tokens = std::fs::File::open(tokens_file).unwrap();
let expected_tokens: Vec<Token> = serde_json::from_reader(tokens).unwrap();
assert_eq!(output_tokens, expected_tokens);
}
}

#[test]
fn variants() {
let fs_files = collect_fs_files("./testdata/variants", true);
assert_eq!(fs_files.len(), 1);

for path in fs_files {
info!("file -> {:?}", path);
eprintln!("file -> {:?}", path);
let input = std::fs::File::open(path.clone()).unwrap();
let content = std::io::read_to_string(input).unwrap();
let source = Source::from(content);
let lexer = Lexer::new(&source);
let output_tokens = lexer.collect::<Vec<Token>>();

let tokens_file = path.to_str().unwrap();
let tokens_file = tokens_file.to_string().replace(".fs", ".tokens.json");
let tokens = std::fs::File::open(tokens_file).unwrap();
let expected_tokens: Vec<Token> = serde_json::from_reader(tokens).unwrap();
assert_eq!(output_tokens, expected_tokens);
}
}
}
1 change: 1 addition & 0 deletions src/main.rs
Original file line number Diff line number Diff line change
Expand Up @@ -2,6 +2,7 @@ pub mod lexer;
pub mod logger;
pub mod parser;
pub mod source;
pub mod tests;
pub mod utils;

use lexer::Lexer;
Expand Down
13 changes: 7 additions & 6 deletions src/parser/ast.rs
Original file line number Diff line number Diff line change
@@ -1,31 +1,32 @@
use crate::{lexer::token::TokenLocation, source::Source};
use serde::{Deserialize, Serialize};

#[derive(Debug)]
#[derive(Debug, Deserialize, Serialize, PartialEq)]
pub struct Ast {
pub source: Source,
pub root: Block,
}

#[derive(Debug)]
#[derive(Debug, Deserialize, Serialize, PartialEq)]
pub struct Block {
pub stmts: Box<[Stmt]>,
}

#[derive(Debug)]
#[derive(Debug, Deserialize, Serialize, PartialEq)]
pub enum Stmt {
Assign { lhs: Expr, type_: Type, rhs: Expr },
Expr(Expr),
}

#[derive(Debug)]
#[derive(Debug, Deserialize, Serialize, PartialEq)]
pub enum Type {
Int,
Float,
Bool,
Str,
}

#[derive(Debug)]
#[derive(Debug, Deserialize, Serialize, PartialEq)]
pub enum Expr {
Literal {
literal: Literal,
Expand All @@ -37,7 +38,7 @@ pub enum Expr {
},
}

#[derive(Debug)]
#[derive(Debug, Deserialize, Serialize, PartialEq)]
pub enum Literal {
Int(i64),
Float(f64),
Expand Down
3 changes: 2 additions & 1 deletion src/source.rs
Original file line number Diff line number Diff line change
@@ -1,8 +1,9 @@
use serde::{Deserialize, Serialize};
use std::fs;
use std::path::{Path, PathBuf};
use tracing::info;

#[derive(Debug, Clone)]
#[derive(Debug, Clone, Deserialize, Serialize, PartialEq)]
pub struct Source {
file_path: PathBuf,
content: String,
Expand Down
Loading

0 comments on commit 63143fc

Please sign in to comment.