Skip to content

Commit

Permalink
parser: add comment stmt and some tests
Browse files Browse the repository at this point in the history
Signed-off-by: FedericoBruzzone <[email protected]>
  • Loading branch information
FedericoBruzzone authored and guerinoni committed Aug 23, 2024
1 parent bbfe466 commit 2f8c7b4
Show file tree
Hide file tree
Showing 10 changed files with 248 additions and 38 deletions.
4 changes: 2 additions & 2 deletions src/lexer/mod.rs
Original file line number Diff line number Diff line change
Expand Up @@ -123,7 +123,7 @@ pub mod tests {
let tokens_file = fs_file.to_string().replace(".fs", ".tokens.json");
let tokens = std::fs::File::open(tokens_file.clone()).unwrap();
let expected_tokens: Vec<Token> = serde_json::from_reader(tokens).unwrap();
println!("{}", serde_json::to_string(&output_tokens).unwrap());
// println!("{}", serde_json::to_string(&output_tokens).unwrap());
assert_eq!(output_tokens, expected_tokens);
}
}
Expand Down Expand Up @@ -233,7 +233,7 @@ pub mod tests {
let fs_file = path.to_str().unwrap();
let tokens_file = fs_file.to_string().replace(".fs", ".tokens.json");
let tokens = std::fs::File::open(tokens_file).unwrap();
println!("{}", serde_json::to_string(&output_tokens).unwrap());
// println!("{}", serde_json::to_string(&output_tokens).unwrap());
let expected_tokens: Vec<Token> = serde_json::from_reader(tokens).unwrap();
assert_eq!(output_tokens, expected_tokens);
}
Expand Down
10 changes: 9 additions & 1 deletion src/parser/ast.rs
Original file line number Diff line number Diff line change
Expand Up @@ -14,8 +14,16 @@ pub struct Block {

#[derive(Debug, Deserialize, Serialize, PartialEq)]
pub enum Stmt {
Assign { lhs: Expr, type_: Type, rhs: Expr },
Assign {
lhs: Expr,
type_: Type,
rhs: Expr,
},
Expr(Expr),
Comment {
comment: String,
location: TokenLocation,
},
}

#[derive(Debug, Deserialize, Serialize, PartialEq)]
Expand Down
98 changes: 66 additions & 32 deletions src/parser/mod.rs
Original file line number Diff line number Diff line change
Expand Up @@ -69,13 +69,20 @@ impl<I: IntoIterator<Item = Token>> Parser<I> {
info!("Parsed identifier - {:?}", stms);
Some(stms)
}
Some(Token {
kind: TokenKind::TokenComment,
..
}) => {
let comment = self.parse_comment_stmt();
info!("Parsed comment - {:?}", comment);
Some(comment)
}
_ => todo!(),
}
}

fn parse_identifier_stmt(&mut self) -> ast::Stmt {
let lhs = self.curr_token.clone().unwrap(); // Safe to unwrap because we checked for Some
// in parse_stmt
let lhs = self.curr_token.clone().unwrap(); // Safe to unwrap
self.consume();

match self.curr_token {
Expand All @@ -88,39 +95,61 @@ impl<I: IntoIterator<Item = Token>> Parser<I> {
Some(Token {
kind: TokenKind::TokenKeyword(_),
..
}) => self.parse_assign_stmt(lhs),
_ => todo!(), // Match `(` and parse a function
}
}
_ => todo!(),
}
}

fn parse_assign_stmt(&mut self, lhs: Token) -> ast::Stmt {
let type_ = self.parse_type();
info!("Parsed type - {:?}", type_);
self.consume();
match self.curr_token {
Some(Token {
kind: TokenKind::TokenAssign,
..
}) => {
self.consume();
let rhs = self.parse_expr();
info!("Parsed expr - {:?}", rhs);
self.consume();
match self.curr_token {
Some(Token {
kind: TokenKind::TokenNewLine,
..
}) => {
let type_ = self.parse_type();
self.consume();
match self.curr_token {
Some(Token {
kind: TokenKind::TokenAssign,
..
}) => {
self.consume();
let rhs = self.parse_expr();
self.consume();
match self.curr_token {
Some(Token {
kind: TokenKind::TokenNewLine,
..
}) => {
self.consume();
ast::Stmt::Assign {
lhs: ast::Expr::Identifier {
name: lhs.lexeme,
location: lhs.location,
},
type_,
rhs,
}
}
_ => todo!(),
}
}
_ => todo!(),
ast::Stmt::Assign {
lhs: ast::Expr::Identifier {
name: lhs.lexeme,
location: lhs.location,
},
type_,
rhs,
}
}
_ => todo!(), // Match `(` and parse a function
_ => todo!(),
}
}
_ => todo!(),
}
}

fn parse_comment_stmt(&mut self) -> ast::Stmt {
let comment = self.curr_token.clone().unwrap(); // Safe to unwrap
self.consume();
match self.curr_token {
Some(Token {
kind: TokenKind::TokenNewLine,
..
}) => {
self.consume();
ast::Stmt::Comment {
comment: comment.lexeme,
location: comment.location,
}
}
_ => todo!(),
Expand Down Expand Up @@ -222,11 +251,16 @@ pub mod tests {

let fs_files = fs_files.iter().filter(|p| {
p.ends_with("id_int_assign.fs")
|| p.ends_with("id_int_assign_2.fs")
|| p.ends_with("id_int_assign_with_len_one.fs")
|| p.ends_with("id_int_assign_with_spaces.fs")
|| p.ends_with("id_float_assign.fs")
|| p.ends_with("id_bool_true_assign.fs")
|| p.ends_with("id_bool_false_assign.fs")
|| p.ends_with("id_str_assign.fs")
|| p.ends_with("id_str_assign_multiple_words.fs")
|| p.ends_with("comment.fs")
|| p.ends_with("comment_and_id_int.fs")
});

for path in fs_files {
Expand All @@ -243,7 +277,7 @@ pub mod tests {
let output_ast = Parser::new(source.clone(), Lexer::new(&source)).parse();
let ast_file = fs_file.to_string().replace(".fs", ".ast.json");
let ast = std::fs::File::open(ast_file).unwrap();
// println!("{}", serde_json::to_string(&output_ast.root).unwrap());
println!("{}", serde_json::to_string(&output_ast.root).unwrap());
let expected_ast = serde_json::from_reader(ast).unwrap();
assert_eq!(output_ast.root, expected_ast);
}
Expand Down
15 changes: 15 additions & 0 deletions testdata/native_types/comment.ast.json
Original file line number Diff line number Diff line change
@@ -0,0 +1,15 @@
{
"stmts": [
{
"Comment": {
"comment": "# this is a comment",
"location": {
"file_path": "",
"line": 0,
"column_start": 0,
"column_end": 19
}
}
}
]
}
44 changes: 44 additions & 0 deletions testdata/native_types/comment_and_id_int.ast.json
Original file line number Diff line number Diff line change
@@ -0,0 +1,44 @@
{
"stmts": [
{
"Comment": {
"comment": "# this is a comment",
"location": {
"file_path": "",
"line": 0,
"column_start": 0,
"column_end": 19
}
}
},
{
"Assign": {
"lhs": {
"Identifier": {
"name": "x",
"location": {
"file_path": "",
"line": 1,
"column_start": 0,
"column_end": 1
}
}
},
"type_": "Int",
"rhs": {
"Literal": {
"literal": {
"Int": 0
},
"location": {
"file_path": "",
"line": 1,
"column_start": 9,
"column_end": 10
}
}
}
}
}
]
}
33 changes: 33 additions & 0 deletions testdata/native_types/id_int_assign_2.ast.json
Original file line number Diff line number Diff line change
@@ -0,0 +1,33 @@
{
"stmts": [
{
"Assign": {
"lhs": {
"Identifier": {
"name": "_x_int",
"location": {
"file_path": "",
"line": 0,
"column_start": 0,
"column_end": 6
}
}
},
"type_": "Int",
"rhs": {
"Literal": {
"literal": {
"Int": 732
},
"location": {
"file_path": "",
"line": 0,
"column_start": 14,
"column_end": 17
}
}
}
}
}
]
}
33 changes: 33 additions & 0 deletions testdata/native_types/id_int_assign_with_len_one.ast.json
Original file line number Diff line number Diff line change
@@ -0,0 +1,33 @@
{
"stmts": [
{
"Assign": {
"lhs": {
"Identifier": {
"name": "i",
"location": {
"file_path": "",
"line": 0,
"column_start": 0,
"column_end": 1
}
}
},
"type_": "Int",
"rhs": {
"Literal": {
"literal": {
"Int": 1
},
"location": {
"file_path": "",
"line": 0,
"column_start": 9,
"column_end": 10
}
}
}
}
}
]
}
33 changes: 33 additions & 0 deletions testdata/native_types/id_int_assign_with_spaces.ast.json
Original file line number Diff line number Diff line change
@@ -0,0 +1,33 @@
{
"stmts": [
{
"Assign": {
"lhs": {
"Identifier": {
"name": "_x_int",
"location": {
"file_path": "",
"line": 0,
"column_start": 4,
"column_end": 10
}
}
},
"type_": "Int",
"rhs": {
"Literal": {
"literal": {
"Int": 0
},
"location": {
"file_path": "",
"line": 0,
"column_start": 22,
"column_end": 23
}
}
}
}
}
]
}
2 changes: 1 addition & 1 deletion testdata/native_types/id_int_assign_with_spaces.fs
Original file line number Diff line number Diff line change
@@ -1 +1 @@
_x_int: int = 0
_x_int: int = 0
14 changes: 12 additions & 2 deletions testdata/native_types/id_int_assign_with_spaces.tokens.json
Original file line number Diff line number Diff line change
Expand Up @@ -54,13 +54,23 @@
}
},
{
"kind": "TokenEOF",
"lexeme": "",
"kind": "TokenNewLine",
"lexeme": "\\n",
"location": {
"file_path": "",
"line": 0,
"column_start": 23,
"column_end": 23
}
},
{
"kind": "TokenEOF",
"lexeme": "",
"location": {
"file_path": "",
"line": 1,
"column_start": 0,
"column_end": 0
}
}
]

0 comments on commit 2f8c7b4

Please sign in to comment.