From 88e42d3f09c24d5b90987e894a77c82fba36ff5f Mon Sep 17 00:00:00 2001 From: maxmindlin <35264981+maxmindlin@users.noreply.github.com> Date: Wed, 12 Jun 2024 13:08:26 -0400 Subject: [PATCH] if stmts --- scout-interpreter/Cargo.toml | 2 +- scout-interpreter/src/lib.rs | 41 ++++++++++++---- scout-interpreter/src/object.rs | 28 ++++++++++- scout-lexer/src/lib.rs | 11 ++++- scout-lexer/src/token.rs | 14 +++++- scout-parser/src/ast.rs | 3 ++ scout-parser/src/lib.rs | 85 ++++++++++++++++++++------------- 7 files changed, 134 insertions(+), 50 deletions(-) diff --git a/scout-interpreter/Cargo.toml b/scout-interpreter/Cargo.toml index 5df68bf..096c122 100644 --- a/scout-interpreter/Cargo.toml +++ b/scout-interpreter/Cargo.toml @@ -12,7 +12,7 @@ futures = "0.3.30" serde = { version = "1.0", features = ["derive"] } serde_json = "1.0" image = "0.25.1" +scout-lexer = { path = "../scout-lexer/" } [dev-dependencies] test-case = "*" -scout-lexer = { path = "../scout-lexer/" } diff --git a/scout-interpreter/src/lib.rs b/scout-interpreter/src/lib.rs index 08f697e..9ba8327 100644 --- a/scout-interpreter/src/lib.rs +++ b/scout-interpreter/src/lib.rs @@ -7,6 +7,7 @@ use fantoccini::Locator; use futures::lock::Mutex; use futures::{future::BoxFuture, FutureExt}; use object::{obj_map_to_json, Object}; +use scout_lexer::TokenKind; use scout_parser::ast::{Block, ExprKind, Identifier, NodeKind, Program, StmtKind}; use serde::{Deserialize, Serialize}; use serde_json::{json, Map, Value}; @@ -27,18 +28,16 @@ pub struct ScrapeResults { impl ScrapeResults { pub fn add_result(&mut self, res: Map, url: &str) { - let entry = self.results.get_mut(url); - match entry { + match self.results.get_mut(url) { None => { self.results.insert(url.to_owned(), vec![res].into()); } - Some(e) => { - if let Value::Array(vec) = e { - vec.push(Value::from(res)); - } else { - panic!("results was not a vec type"); - } + Some(Value::Array(v)) => { + v.push(Value::from(res)); } + // This should never happen since `add_results` is the only way to + // insert to the map. + _ => panic!("results was not a vec type"), } } @@ -165,8 +164,14 @@ fn eval_statement<'a>( Ok(Arc::new(Object::Null)) } - StmtKind::If(_cond, _block) => { - unimplemented!() + StmtKind::If(cond, block) => { + let truth_check = + eval_expression(cond, crawler, env.clone(), results.clone()).await?; + if truth_check.is_truthy() { + eval_block(block, crawler, env.clone(), results.clone()).await?; + } + + Ok(Arc::new(Object::Null)) } } } @@ -292,6 +297,7 @@ fn eval_expression<'a>( }, }, ExprKind::Str(s) => Ok(Arc::new(Object::Str(s.to_owned()))), + ExprKind::Number(n) => Ok(Arc::new(Object::Number(*n))), ExprKind::Call(ident, params) => { apply_call(ident, params, crawler, None, env.clone(), results.clone()).await } @@ -313,12 +319,27 @@ fn eval_expression<'a>( } Ok(prev.unwrap()) } + ExprKind::Infix(lhs, op, rhs) => { + // TODO: precedence.... + let l_obj = eval_expression(lhs, crawler, env.clone(), results.clone()).await?; + let r_obj = eval_expression(rhs, crawler, env.clone(), results.clone()).await?; + let res = eval_op(l_obj.clone(), op, r_obj.clone())?; + Ok(res) + } _ => Err(EvalError::InvalidExpr), } } .boxed() } +fn eval_op(lhs: Arc, op: &TokenKind, rhs: Arc) -> EvalResult { + match (lhs.clone(), op, rhs.clone()) { + (_, TokenKind::EQ, _) => Ok(Arc::new(Object::Boolean(lhs == rhs))), + (_, TokenKind::NEQ, _) => Ok(Arc::new(Object::Boolean(lhs != rhs))), + _ => Err(EvalError::UnknownInfixOp), + } +} + impl From for EvalError { fn from(_: fantoccini::error::CmdError) -> Self { Self::BrowserError diff --git a/scout-interpreter/src/object.rs b/scout-interpreter/src/object.rs index c5b221e..76af740 100644 --- a/scout-interpreter/src/object.rs +++ b/scout-interpreter/src/object.rs @@ -1,7 +1,7 @@ use std::{collections::HashMap, fmt::Display, sync::Arc}; use scout_parser::ast::Identifier; -use serde_json::Value; +use serde_json::{json, Value}; #[derive(Debug)] pub enum Object { @@ -10,6 +10,25 @@ pub enum Object { Str(String), Node(fantoccini::elements::Element), List(Vec>), + Boolean(bool), + Number(f64), +} + +impl PartialEq for Object { + fn eq(&self, other: &Self) -> bool { + use Object::*; + match (self, other) { + (Null, Null) => true, + (Map(a), Map(b)) => a == b, + (Str(a), Str(b)) => a == b, + // @TODO: check if this is even correct + (Node(a), Node(b)) => a.element_id() == b.element_id(), + (List(a), List(b)) => a == b, + (Boolean(a), Boolean(b)) => a == b, + (Number(a), Number(b)) => a == b, + _ => false, + } + } } impl Display for Object { @@ -27,6 +46,8 @@ impl Display for Object { Str(s) => write!(f, "\"{}\"", s), Node(_) => write!(f, "Node"), List(objs) => write!(f, "[Object; {}]", objs.len()), + Boolean(b) => write!(f, "{}", b), + Number(n) => write!(f, "{}", n), } } } @@ -41,6 +62,8 @@ impl Object { Node(_) => Value::String("Node".to_owned()), List(list) => Value::Array(list.iter().map(|obj| obj.to_json()).collect()), Map(map) => Value::Object(obj_map_to_json(map)), + Boolean(b) => Value::Bool(*b), + Number(n) => json!(n), } } @@ -52,6 +75,9 @@ impl Object { Map(m) => !m.is_empty(), Node(_) => true, List(v) => !v.is_empty(), + Boolean(b) => *b, + // @TODO: Idk what truthiness of floats should be + Number(n) => *n > 0.0, } } } diff --git a/scout-lexer/src/lib.rs b/scout-lexer/src/lib.rs index e054e52..16415f7 100644 --- a/scout-lexer/src/lib.rs +++ b/scout-lexer/src/lib.rs @@ -30,9 +30,16 @@ impl Lexer { '=' => match self.peek() { Some('=') => { self.next(); - Token::new(DbEqual, "==".to_string()) + Token::new(EQ, "==".to_string()) } - _ => Token::new(Equal, '='.to_string()), + _ => Token::new(Assign, '='.to_string()), + }, + '!' => match self.peek() { + Some('=') => { + self.next(); + Token::new(NEQ, "!=".to_string()) + } + _ => Token::new(Illegal, '!'.to_string()), }, '"' => { let literal = self.read_string(); diff --git a/scout-lexer/src/token.rs b/scout-lexer/src/token.rs index 22bfd11..b060900 100644 --- a/scout-lexer/src/token.rs +++ b/scout-lexer/src/token.rs @@ -16,8 +16,9 @@ pub enum TokenKind { RBrace, Select, SelectAll, - Equal, - DbEqual, + Assign, + EQ, + NEQ, // Keywords If, @@ -45,6 +46,15 @@ impl TokenKind { _ => None, } } + + pub fn is_infix(&self) -> bool { + use TokenKind::*; + match self { + EQ => true, + NEQ => true, + _ => false, + } + } } #[derive(Debug, Clone, PartialEq, Eq)] diff --git a/scout-parser/src/ast.rs b/scout-parser/src/ast.rs index f4b7248..a3b59c7 100644 --- a/scout-parser/src/ast.rs +++ b/scout-parser/src/ast.rs @@ -1,5 +1,7 @@ use std::{collections::HashMap, fmt::Display}; +use scout_lexer::TokenKind; + #[derive(Debug)] pub enum NodeKind { Program(Program), @@ -33,6 +35,7 @@ pub enum ExprKind { SelectAll(String, Option), Call(Identifier, Vec), Chain(Vec), + Infix(Box, TokenKind, Box), } #[derive(Debug, PartialEq, Eq, Hash, Clone)] diff --git a/scout-parser/src/lib.rs b/scout-parser/src/lib.rs index 0b3c6e1..d593e25 100644 --- a/scout-parser/src/lib.rs +++ b/scout-parser/src/lib.rs @@ -9,31 +9,6 @@ pub mod ast; type ParseResult = Result; -#[derive(Debug, PartialOrd, Ord, PartialEq, Eq)] -pub enum Precedence { - Lowest, - Equals, - LessGreater, - Sum, - Product, - Prefix, - Call, - Index, -} - -impl From for Precedence { - fn from(value: TokenKind) -> Self { - use TokenKind::*; - match value { - Equal => Self::Equals, - DbEqual => Self::Equals, - LParen => Self::Call, - Pipe => Self::Index, - _ => Self::Lowest, - } - } -} - #[derive(Debug)] pub enum ParseError { UnexpectedToken(TokenKind, TokenKind), @@ -84,8 +59,9 @@ impl Parser { TokenKind::Scrape => self.parse_scrape_stmt(), TokenKind::For => self.parse_for_loop(), TokenKind::Screenshot => self.parse_screenshot_stmt(), + TokenKind::If => self.parse_if(), TokenKind::Ident => match self.peek.kind { - TokenKind::Equal => { + TokenKind::Assign => { let ident = Identifier::new(self.curr.literal.clone()); self.next_token(); self.next_token(); @@ -98,6 +74,25 @@ impl Parser { } } + fn parse_if(&mut self) -> ParseResult { + self.next_token(); + let cond = self.parse_expr()?; + self.expect_peek(TokenKind::Do)?; + self.next_token(); + let block = self.parse_block()?; + Ok(StmtKind::If(cond, block)) + } + + fn parse_block(&mut self) -> ParseResult { + let mut stmts = Vec::new(); + while self.curr.kind != TokenKind::End { + let stmt = self.parse_stmt()?; + stmts.push(stmt); + self.next_token(); + } + Ok(Block::new(stmts)) + } + /// `for in do end` fn parse_for_loop(&mut self) -> ParseResult { self.expect_peek(TokenKind::Ident)?; @@ -106,14 +101,15 @@ impl Parser { self.next_token(); let iterable = self.parse_expr()?; self.expect_peek(TokenKind::Do)?; - let mut stmts = Vec::new(); + // let mut stmts = Vec::new(); self.next_token(); - while self.curr.kind != TokenKind::End { - let stmt = self.parse_stmt()?; - stmts.push(stmt); - self.next_token(); - } - let block = Block::new(stmts); + let block = self.parse_block()?; + // while self.curr.kind != TokenKind::End { + // let stmt = self.parse_stmt()?; + // stmts.push(stmt); + // self.next_token(); + // } + // let block = Block::new(stmts); self.next_token(); let floop = ForLoop::new(ident, iterable, block); @@ -168,7 +164,7 @@ impl Parser { } fn parse_single_expr(&mut self) -> ParseResult { - match self.curr.kind { + let lhs = match self.curr.kind { TokenKind::Ident => { // Parse multiple types of ident expressions match self.peek.kind { @@ -224,6 +220,16 @@ impl Parser { .map_err(|_| ParseError::InvalidNumber)?, )), _ => Err(ParseError::InvalidToken(self.curr.kind)), + }?; + + if self.peek.kind.is_infix() { + self.next_token(); + let op = self.curr.kind; + self.next_token(); + let rhs = self.parse_expr()?; + Ok(ExprKind::Infix(Box::new(lhs), op, Box::new(rhs))) + } else { + Ok(lhs) } } @@ -377,6 +383,17 @@ mod tests { ])) ) )] + #[test_case( + r#"x = 1 == 2"#, + StmtKind::Assign( + Identifier::new("x".to_string()), + ExprKind::Infix(Box::new(ExprKind::Number(1.)), TokenKind::EQ, Box::new(ExprKind::Number(2.))) + ) + )] + #[test_case( + r#"if 1 do end"#, + StmtKind::If(ExprKind::Number(1.), Block::new(vec![])) + )] fn test_single_stmt(input: &str, exp: StmtKind) { let stmt = extract_first_stmt(input); assert_eq!(stmt, exp);