New math parser (#956)

* New math parser

* Remove old parser

* Comments

* Move tests into parser_impl, remove dead code

* Backport some math tests
This commit is contained in:
Adam Chalmers
2023-10-31 14:16:18 -05:00
committed by GitHub
parent 387f7e0912
commit 023c3cbb90
8 changed files with 891 additions and 4547 deletions

View File

@ -1443,7 +1443,7 @@ describe('nests binary expressions correctly', () => {
type: 'BinaryExpression', type: 'BinaryExpression',
operator: '*', operator: '*',
start: 15, start: 15,
end: 26, end: 25,
left: { type: 'Literal', value: 2, raw: '2', start: 15, end: 16 }, left: { type: 'Literal', value: 2, raw: '2', start: 15, end: 16 },
right: { right: {
type: 'BinaryExpression', type: 'BinaryExpression',

View File

@ -50,6 +50,9 @@ engine = []
panic = "abort" panic = "abort"
debug = true debug = true
[profile.bench]
debug = true # Flamegraphs of benchmarks require accurate debug symbols
[dev-dependencies] [dev-dependencies]
criterion = "0.5.1" criterion = "0.5.1"
expectorate = "1.1.0" expectorate = "1.1.0"

View File

@ -2175,6 +2175,7 @@ impl BinaryExpression {
BinaryOperator::Mul => (left * right).into(), BinaryOperator::Mul => (left * right).into(),
BinaryOperator::Div => (left / right).into(), BinaryOperator::Div => (left / right).into(),
BinaryOperator::Mod => (left % right).into(), BinaryOperator::Mod => (left % right).into(),
BinaryOperator::Pow => (left.powf(right)).into(),
}; };
Ok(MemoryItem::UserVal(UserVal { Ok(MemoryItem::UserVal(UserVal {
@ -2257,13 +2258,46 @@ pub enum BinaryOperator {
#[serde(rename = "%")] #[serde(rename = "%")]
#[display("%")] #[display("%")]
Mod, Mod,
/// Raise a number to a power.
#[serde(rename = "^")]
#[display("^")]
Pow,
}
/// Mathematical associativity.
/// Should a . b . c be read as (a . b) . c, or a . (b . c)
/// See <https://developer.mozilla.org/en-US/docs/Web/JavaScript/Reference/Operators/Operator_precedence#precedence_and_associativity> for more.
#[derive(PartialEq, Eq, Debug, Clone, Copy)]
pub enum Associativity {
/// Read a . b . c as (a . b) . c
Left,
/// Read a . b . c as a . (b . c)
Right,
}
impl Associativity {
pub fn is_left(&self) -> bool {
matches!(self, Self::Left)
}
} }
impl BinaryOperator { impl BinaryOperator {
/// Follow JS definitions of each operator.
/// Taken from <https://developer.mozilla.org/en-US/docs/Web/JavaScript/Reference/Operators/Operator_precedence#table>
pub fn precedence(&self) -> u8 { pub fn precedence(&self) -> u8 {
match &self { match &self {
BinaryOperator::Add | BinaryOperator::Sub => 11, BinaryOperator::Add | BinaryOperator::Sub => 11,
BinaryOperator::Mul | BinaryOperator::Div | BinaryOperator::Mod => 12, BinaryOperator::Mul | BinaryOperator::Div | BinaryOperator::Mod => 12,
BinaryOperator::Pow => 6,
}
}
/// Follow JS definitions of each operator.
/// Taken from <https://developer.mozilla.org/en-US/docs/Web/JavaScript/Reference/Operators/Operator_precedence#table>
pub fn associativity(&self) -> Associativity {
match self {
Self::Add | Self::Sub | Self::Mul | Self::Div | Self::Mod => Associativity::Left,
Self::Pow => Associativity::Right,
} }
} }
} }

View File

@ -5,7 +5,6 @@ pub mod docs;
pub mod engine; pub mod engine;
pub mod errors; pub mod errors;
pub mod executor; pub mod executor;
pub mod math_parser;
pub mod parser; pub mod parser;
pub mod server; pub mod server;
pub mod std; pub mod std;

File diff suppressed because it is too large Load Diff

File diff suppressed because it is too large Load Diff

View File

@ -0,0 +1,134 @@
use crate::ast::types::{BinaryExpression, BinaryOperator, BinaryPart};
/// Parses a list of tokens (in infix order, i.e. as the user typed them)
/// into a binary expression tree.
pub fn parse(infix_tokens: Vec<BinaryExpressionToken>) -> BinaryExpression {
let rpn = postfix(infix_tokens);
evaluate(rpn)
}
/// Parses a list of tokens (in postfix order) into a binary expression tree.
fn evaluate(rpn: Vec<BinaryExpressionToken>) -> BinaryExpression {
let mut operand_stack = Vec::new();
for item in rpn {
let expr = match item {
BinaryExpressionToken::Operator(operator) => {
let right: BinaryPart = operand_stack.pop().unwrap();
let left = operand_stack.pop().unwrap();
BinaryPart::BinaryExpression(Box::new(BinaryExpression {
start: left.start(),
end: right.end(),
operator,
left,
right,
}))
}
BinaryExpressionToken::Operand(o) => o,
};
operand_stack.push(expr)
}
if let BinaryPart::BinaryExpression(expr) = operand_stack.pop().unwrap() {
*expr
} else {
panic!("Last expression was not a binary expression")
}
}
/// Reorders tokens from infix order to postfix order.
fn postfix(infix: Vec<BinaryExpressionToken>) -> Vec<BinaryExpressionToken> {
let mut operator_stack: Vec<BinaryOperator> = Vec::with_capacity(infix.len());
let mut output = Vec::with_capacity(infix.len());
for token in infix {
match token {
BinaryExpressionToken::Operator(o1) => {
// From https://en.wikipedia.org/wiki/Shunting_yard_algorithm:
// while (
// there is an operator o2 at the top of the operator stack which is not a left parenthesis,
// and (o2 has greater precedence than o1 or (o1 and o2 have the same precedence and o1 is left-associative))
// )
while operator_stack
.last()
.map(|o2| {
(o2.precedence() > o1.precedence())
|| o1.precedence() == o2.precedence() && o1.associativity().is_left()
})
.unwrap_or(false)
{
output.push(BinaryExpressionToken::Operator(operator_stack.pop().unwrap()));
}
operator_stack.push(o1);
}
o @ BinaryExpressionToken::Operand(_) => output.push(o),
}
}
// After the while loop, pop the remaining items from the operator stack into the output queue.
output.extend(operator_stack.into_iter().rev().map(BinaryExpressionToken::Operator));
output
}
/// Expressions are made up of operators and operands.
#[derive(PartialEq, Debug)]
pub enum BinaryExpressionToken {
Operator(BinaryOperator),
Operand(BinaryPart),
}
impl From<BinaryPart> for BinaryExpressionToken {
fn from(value: BinaryPart) -> Self {
Self::Operand(value)
}
}
impl From<BinaryOperator> for BinaryExpressionToken {
fn from(value: BinaryOperator) -> Self {
Self::Operator(value)
}
}
#[cfg(test)]
mod tests {
use crate::ast::types::Literal;
use super::*;
#[test]
fn parse_and_evaluate() {
/// Make a literal
fn lit(n: u8) -> BinaryPart {
BinaryPart::Literal(Box::new(Literal {
start: 0,
end: 0,
value: n.into(),
raw: n.to_string(),
}))
}
let tests: Vec<Vec<BinaryExpressionToken>> = vec![
// 3 + 4 × 2 ÷ ( 1 5 ) ^ 2 ^ 3
vec![
lit(3).into(),
BinaryOperator::Add.into(),
lit(4).into(),
BinaryOperator::Mul.into(),
lit(2).into(),
BinaryOperator::Div.into(),
BinaryPart::BinaryExpression(Box::new(BinaryExpression {
start: 0,
end: 0,
operator: BinaryOperator::Sub,
left: lit(1),
right: lit(5),
}))
.into(),
BinaryOperator::Pow.into(),
lit(2).into(),
BinaryOperator::Pow.into(),
lit(3).into(),
],
];
for infix_input in tests {
let rpn = postfix(infix_input);
let tree = evaluate(rpn);
dbg!(tree);
}
}
}

View File

@ -17,12 +17,13 @@ use crate::{
}, },
errors::{KclError, KclErrorDetails}, errors::{KclError, KclErrorDetails},
executor::SourceRange, executor::SourceRange,
math_parser::MathParser,
parser::parser_impl::error::ContextError, parser::parser_impl::error::ContextError,
std::StdLib, std::StdLib,
token::{Token, TokenType}, token::{Token, TokenType},
}; };
use super::{math::BinaryExpressionToken, PIPE_OPERATOR, PIPE_SUBSTITUTION_OPERATOR};
mod error; mod error;
type PResult<O, E = error::ContextError> = winnow::prelude::PResult<O, E>; type PResult<O, E = error::ContextError> = winnow::prelude::PResult<O, E>;
@ -455,7 +456,7 @@ fn object(i: TokenSlice) -> PResult<ObjectExpression> {
/// Parse the % symbol, used to substitute a curried argument from a |> (pipe). /// Parse the % symbol, used to substitute a curried argument from a |> (pipe).
fn pipe_sub(i: TokenSlice) -> PResult<PipeSubstitution> { fn pipe_sub(i: TokenSlice) -> PResult<PipeSubstitution> {
any.try_map(|token: Token| { any.try_map(|token: Token| {
if matches!(token.token_type, TokenType::Operator) && token.value == "%" { if matches!(token.token_type, TokenType::Operator) && token.value == PIPE_SUBSTITUTION_OPERATOR {
Ok(PipeSubstitution { Ok(PipeSubstitution {
start: token.start, start: token.start,
end: token.end, end: token.end,
@ -1025,35 +1026,33 @@ fn unary_expression(i: TokenSlice) -> PResult<UnaryExpression> {
/// Consume tokens that make up a binary expression, but don't actually return them. /// Consume tokens that make up a binary expression, but don't actually return them.
/// Why not? /// Why not?
/// Because this is designed to be used with .recognize() within the `binary_expression` parser. /// Because this is designed to be used with .recognize() within the `binary_expression` parser.
fn binary_expression_tokens(i: TokenSlice) -> PResult<()> { fn binary_expression_tokens(i: TokenSlice) -> PResult<Vec<BinaryExpressionToken>> {
let _first = operand.parse_next(i)?; let first = operand.parse_next(i).map(BinaryExpressionToken::from)?;
let _remaining: Vec<_> = repeat( let remaining: Vec<_> = repeat(
1.., 1..,
( (
preceded(opt(whitespace), binary_operator), preceded(opt(whitespace), binary_operator).map(BinaryExpressionToken::from),
preceded(opt(whitespace), operand), preceded(opt(whitespace), operand).map(BinaryExpressionToken::from),
), ),
) )
.context(expected( .context(expected(
"one or more binary operators (like + or -) and operands for them, e.g. 1 + 2 - 3", "one or more binary operators (like + or -) and operands for them, e.g. 1 + 2 - 3",
)) ))
.parse_next(i)?; .parse_next(i)?;
Ok(()) let mut out = Vec::with_capacity(1 + 2 * remaining.len());
out.push(first);
out.extend(remaining.into_iter().flat_map(|(a, b)| [a, b]));
Ok(out)
} }
/// Parse an infix binary expression. /// Parse an infix binary expression.
fn binary_expression(i: TokenSlice) -> PResult<BinaryExpression> { fn binary_expression(i: TokenSlice) -> PResult<BinaryExpression> {
// Find the slice of tokens which makes up the binary expression // Find the slice of tokens which makes up the binary expression
let tokens = binary_expression_tokens.recognize().parse_next(i)?; let tokens = binary_expression_tokens.parse_next(i)?;
// Pass the token slice into the specialized math parser, for things like // Pass the token slice into the specialized math parser, for things like
// precedence and converting infix operations to an AST. // precedence and converting infix operations to an AST.
let mut math_parser = MathParser::new(tokens); Ok(super::math::parse(tokens))
let expr = math_parser
.parse()
.map_err(error::ContextError::from)
.map_err(ErrMode::Backtrack)?;
Ok(expr)
} }
fn binary_expr_in_parens(i: TokenSlice) -> PResult<BinaryExpression> { fn binary_expr_in_parens(i: TokenSlice) -> PResult<BinaryExpression> {
@ -1134,7 +1133,7 @@ fn big_arrow(i: TokenSlice) -> PResult<Token> {
} }
/// Parse a |> operator. /// Parse a |> operator.
fn pipe_operator(i: TokenSlice) -> PResult<Token> { fn pipe_operator(i: TokenSlice) -> PResult<Token> {
one_of((TokenType::Operator, "|>")) one_of((TokenType::Operator, PIPE_OPERATOR))
.context(expected( .context(expected(
"the |> operator, used for 'piping' one function's output into another function's input", "the |> operator, used for 'piping' one function's output into another function's input",
)) ))
@ -1757,19 +1756,11 @@ const mySk1 = startSketchAt([0, 0])"#;
{ {
// Run the original parser // Run the original parser
let tokens = crate::token::lexer(test_program); let tokens = crate::token::lexer(test_program);
let expected = crate::parser::Parser::new(tokens.clone()) // TODO: get snapshots of what this outputs.
.ast_old() let _actual = match program.parse(&tokens) {
.expect("Old parser failed");
// Run the second parser, check it matches the first parser.
let actual = match program.parse(&tokens) {
Ok(x) => x, Ok(x) => x,
Err(_e) => panic!("could not parse test {i}"), Err(_e) => panic!("could not parse test {i}"),
}; };
assert_eq!(
expected, actual,
"old parser (left) and new parser (right) disagree on test {i}"
);
} }
} }
@ -2064,4 +2055,702 @@ const mySk1 = startSketchAt([0, 0])"#;
assert_eq!(value.raw, "5"); assert_eq!(value.raw, "5");
} }
} }
#[test]
fn test_math_parse() {
let tokens = crate::token::lexer(r#"5 + "a""#);
let actual = crate::parser::Parser::new(tokens).ast().unwrap().body;
let expr = BinaryExpression {
start: 0,
end: 7,
operator: BinaryOperator::Add,
left: BinaryPart::Literal(Box::new(Literal {
start: 0,
end: 1,
value: serde_json::Value::Number(serde_json::Number::from(5)),
raw: "5".to_owned(),
})),
right: BinaryPart::Literal(Box::new(Literal {
start: 4,
end: 7,
value: serde_json::Value::String("a".to_owned()),
raw: r#""a""#.to_owned(),
})),
};
let expected = vec![BodyItem::ExpressionStatement(ExpressionStatement {
start: 0,
end: 7,
expression: Value::BinaryExpression(Box::new(expr)),
})];
assert_eq!(expected, actual);
}
#[test]
fn test_is_code_token() {
let tokens = [
Token {
token_type: TokenType::Word,
start: 0,
end: 3,
value: "log".to_string(),
},
Token {
token_type: TokenType::Brace,
start: 3,
end: 4,
value: "(".to_string(),
},
Token {
token_type: TokenType::Number,
start: 4,
end: 5,
value: "5".to_string(),
},
Token {
token_type: TokenType::Comma,
start: 5,
end: 6,
value: ",".to_string(),
},
Token {
token_type: TokenType::String,
start: 7,
end: 14,
value: "\"hello\"".to_string(),
},
Token {
token_type: TokenType::Word,
start: 16,
end: 27,
value: "aIdentifier".to_string(),
},
Token {
token_type: TokenType::Brace,
start: 27,
end: 28,
value: ")".to_string(),
},
];
for (i, token) in tokens.iter().enumerate() {
assert!(token.is_code_token(), "failed test {i}: {token:?}")
}
}
#[test]
fn test_is_not_code_token() {
let tokens = [
Token {
token_type: TokenType::Whitespace,
start: 6,
end: 7,
value: " ".to_string(),
},
Token {
token_type: TokenType::BlockComment,
start: 28,
end: 30,
value: "/* abte */".to_string(),
},
Token {
token_type: TokenType::LineComment,
start: 30,
end: 33,
value: "// yoyo a line".to_string(),
},
];
for (i, token) in tokens.iter().enumerate() {
assert!(!token.is_code_token(), "failed test {i}: {token:?}")
}
}
#[test]
fn test_abstract_syntax_tree() {
let code = "5 +6";
let parser = crate::parser::Parser::new(crate::token::lexer(code));
let result = parser.ast().unwrap();
let expected_result = Program {
start: 0,
end: 4,
body: vec![BodyItem::ExpressionStatement(ExpressionStatement {
start: 0,
end: 4,
expression: Value::BinaryExpression(Box::new(BinaryExpression {
start: 0,
end: 4,
left: BinaryPart::Literal(Box::new(Literal {
start: 0,
end: 1,
value: serde_json::Value::Number(serde_json::Number::from(5)),
raw: "5".to_string(),
})),
operator: BinaryOperator::Add,
right: BinaryPart::Literal(Box::new(Literal {
start: 3,
end: 4,
value: serde_json::Value::Number(serde_json::Number::from(6)),
raw: "6".to_string(),
})),
})),
})],
non_code_meta: NonCodeMeta::default(),
};
assert_eq!(result, expected_result);
}
#[test]
fn test_empty_file() {
let some_program_string = r#""#;
let tokens = crate::token::lexer(some_program_string);
let parser = crate::parser::Parser::new(tokens);
let result = parser.ast();
assert!(result.is_err());
assert!(result.err().unwrap().to_string().contains("file is empty"));
}
#[test]
fn test_parse_half_pipe_small() {
let tokens = crate::token::lexer(
"const secondExtrude = startSketchOn('XY')
|> startProfileAt([0,0], %)
|",
);
let parser = crate::parser::Parser::new(tokens);
let result = parser.ast();
assert!(result.is_err());
assert!(result.err().unwrap().to_string().contains("Unexpected token"));
}
#[test]
fn test_parse_member_expression_double_nested_braces() {
let tokens = crate::token::lexer(r#"const prop = yo["one"][two]"#);
let parser = crate::parser::Parser::new(tokens);
parser.ast().unwrap();
}
#[test]
fn test_parse_member_expression_binary_expression_period_number_first() {
let tokens = crate::token::lexer(
r#"const obj = { a: 1, b: 2 }
const height = 1 - obj.a"#,
);
let parser = crate::parser::Parser::new(tokens);
parser.ast().unwrap();
}
#[test]
fn test_parse_member_expression_binary_expression_brace_number_first() {
let tokens = crate::token::lexer(
r#"const obj = { a: 1, b: 2 }
const height = 1 - obj["a"]"#,
);
let parser = crate::parser::Parser::new(tokens);
parser.ast().unwrap();
}
#[test]
fn test_parse_member_expression_binary_expression_brace_number_second() {
let tokens = crate::token::lexer(
r#"const obj = { a: 1, b: 2 }
const height = obj["a"] - 1"#,
);
let parser = crate::parser::Parser::new(tokens);
parser.ast().unwrap();
}
#[test]
fn test_parse_member_expression_binary_expression_in_array_number_first() {
let tokens = crate::token::lexer(
r#"const obj = { a: 1, b: 2 }
const height = [1 - obj["a"], 0]"#,
);
let parser = crate::parser::Parser::new(tokens);
parser.ast().unwrap();
}
#[test]
fn test_parse_member_expression_binary_expression_in_array_number_second() {
let tokens = crate::token::lexer(
r#"const obj = { a: 1, b: 2 }
const height = [obj["a"] - 1, 0]"#,
);
let parser = crate::parser::Parser::new(tokens);
parser.ast().unwrap();
}
#[test]
fn test_parse_member_expression_binary_expression_in_array_number_second_missing_space() {
let tokens = crate::token::lexer(
r#"const obj = { a: 1, b: 2 }
const height = [obj["a"] -1, 0]"#,
);
let parser = crate::parser::Parser::new(tokens);
parser.ast().unwrap();
}
#[test]
fn test_parse_half_pipe() {
let tokens = crate::token::lexer(
"const height = 10
const firstExtrude = startSketchOn('XY')
|> startProfileAt([0,0], %)
|> line([0, 8], %)
|> line([20, 0], %)
|> line([0, -8], %)
|> close(%)
|> extrude(2, %)
show(firstExtrude)
const secondExtrude = startSketchOn('XY')
|> startProfileAt([0,0], %)
|",
);
let parser = crate::parser::Parser::new(tokens);
let result = parser.ast();
assert!(result.is_err());
assert!(result.err().unwrap().to_string().contains("Unexpected token"));
}
#[test]
fn test_parse_greater_bang() {
let tokens = crate::token::lexer(">!");
let parser = crate::parser::Parser::new(tokens);
let err = parser.ast().unwrap_err();
// TODO: Better errors when program cannot tokenize.
// https://github.com/KittyCAD/modeling-app/issues/696
assert!(err.to_string().contains("file is empty"));
}
#[test]
fn test_parse_z_percent_parens() {
let tokens = crate::token::lexer("z%)");
let parser = crate::parser::Parser::new(tokens);
let result = parser.ast();
assert!(result.is_err());
assert!(result.err().unwrap().to_string().contains("Unexpected token"));
}
#[test]
fn test_parse_parens_unicode() {
let tokens = crate::token::lexer("");
let parser = crate::parser::Parser::new(tokens);
let result = parser.ast();
// TODO: Better errors when program cannot tokenize.
// https://github.com/KittyCAD/modeling-app/issues/696
assert!(result.is_err());
}
#[test]
fn test_parse_negative_in_array_binary_expression() {
let tokens = crate::token::lexer(
r#"const leg1 = 5
const thickness = 0.56
const bracket = [-leg2 + thickness, 0]
"#,
);
let parser = crate::parser::Parser::new(tokens);
let result = parser.ast();
assert!(result.is_ok());
}
#[test]
fn test_parse_nested_open_brackets() {
let tokens = crate::token::lexer(
r#"
z(-[["#,
);
let parser = crate::parser::Parser::new(tokens);
let result = parser.ast();
assert!(result.is_err());
}
#[test]
fn test_parse_weird_new_line_function() {
let tokens = crate::token::lexer(
r#"z
(--#"#,
);
let parser = crate::parser::Parser::new(tokens);
let result = parser.ast();
assert!(result.is_err());
// TODO: Better errors when program cannot tokenize.
// https://github.com/KittyCAD/modeling-app/issues/696
assert_eq!(
result.err().unwrap().to_string(),
r#"syntax: KclErrorDetails { source_ranges: [], message: "file is empty" }"#
);
}
#[test]
fn test_parse_weird_lots_of_fancy_brackets() {
let tokens = crate::token::lexer(r#"zz({{{{{{{{)iegAng{{{{{{{##"#);
let parser = crate::parser::Parser::new(tokens);
let result = parser.ast();
assert!(result.is_err());
// TODO: Better errors when program cannot tokenize.
// https://github.com/KittyCAD/modeling-app/issues/696
assert_eq!(
result.err().unwrap().to_string(),
r#"syntax: KclErrorDetails { source_ranges: [], message: "file is empty" }"#
);
}
#[test]
fn test_parse_weird_close_before_open() {
let tokens = crate::token::lexer(
r#"fn)n
e
["#,
);
let parser = crate::parser::Parser::new(tokens);
let result = parser.ast();
assert!(result.is_err());
assert!(result
.err()
.unwrap()
.to_string()
.contains("expected whitespace, found ')' which is brace"));
}
#[test]
fn test_parse_weird_close_before_nada() {
let tokens = crate::token::lexer(r#"fn)n-"#);
let parser = crate::parser::Parser::new(tokens);
let result = parser.ast();
assert!(result.is_err());
assert!(result
.err()
.unwrap()
.to_string()
.contains("expected whitespace, found ')' which is brace"));
}
#[test]
fn test_parse_weird_lots_of_slashes() {
let tokens = crate::token::lexer(
r#"J///////////o//+///////////P++++*++++++P///////˟
++4"#,
);
let parser = crate::parser::Parser::new(tokens);
let result = parser.ast();
assert!(result.is_err());
assert!(result.err().unwrap().to_string().contains("Unexpected token"));
}
#[test]
fn test_parse_expand_array() {
let code = "const myArray = [0..10]";
let parser = crate::parser::Parser::new(crate::token::lexer(code));
let result = parser.ast().unwrap();
let expected_result = Program {
start: 0,
end: 23,
body: vec![BodyItem::VariableDeclaration(VariableDeclaration {
start: 0,
end: 23,
declarations: vec![VariableDeclarator {
start: 6,
end: 23,
id: Identifier {
start: 6,
end: 13,
name: "myArray".to_string(),
},
init: Value::ArrayExpression(Box::new(ArrayExpression {
start: 16,
end: 23,
elements: vec![
Value::Literal(Box::new(Literal {
start: 17,
end: 18,
value: 0.into(),
raw: "0".to_string(),
})),
Value::Literal(Box::new(Literal {
start: 17,
end: 18,
value: 1.into(),
raw: "1".to_string(),
})),
Value::Literal(Box::new(Literal {
start: 17,
end: 18,
value: 2.into(),
raw: "2".to_string(),
})),
Value::Literal(Box::new(Literal {
start: 17,
end: 18,
value: 3.into(),
raw: "3".to_string(),
})),
Value::Literal(Box::new(Literal {
start: 17,
end: 18,
value: 4.into(),
raw: "4".to_string(),
})),
Value::Literal(Box::new(Literal {
start: 17,
end: 18,
value: 5.into(),
raw: "5".to_string(),
})),
Value::Literal(Box::new(Literal {
start: 17,
end: 18,
value: 6.into(),
raw: "6".to_string(),
})),
Value::Literal(Box::new(Literal {
start: 17,
end: 18,
value: 7.into(),
raw: "7".to_string(),
})),
Value::Literal(Box::new(Literal {
start: 17,
end: 18,
value: 8.into(),
raw: "8".to_string(),
})),
Value::Literal(Box::new(Literal {
start: 17,
end: 18,
value: 9.into(),
raw: "9".to_string(),
})),
Value::Literal(Box::new(Literal {
start: 17,
end: 18,
value: 10.into(),
raw: "10".to_string(),
})),
],
})),
}],
kind: VariableKind::Const,
})],
non_code_meta: NonCodeMeta::default(),
};
assert_eq!(result, expected_result);
}
#[test]
fn test_error_keyword_in_variable() {
let some_program_string = r#"const let = "thing""#;
let tokens = crate::token::lexer(some_program_string);
let parser = crate::parser::Parser::new(tokens);
let result = parser.ast();
assert!(result.is_err());
assert_eq!(
result.err().unwrap().to_string(),
r#"syntax: KclErrorDetails { source_ranges: [SourceRange([6, 9])], message: "Cannot assign a variable to a reserved keyword: let" }"#
);
}
#[test]
fn test_error_keyword_in_fn_name() {
let some_program_string = r#"fn let = () {}"#;
let tokens = crate::token::lexer(some_program_string);
let parser = crate::parser::Parser::new(tokens);
let result = parser.ast();
assert!(result.is_err());
assert_eq!(
result.err().unwrap().to_string(),
r#"syntax: KclErrorDetails { source_ranges: [SourceRange([3, 6])], message: "Cannot assign a variable to a reserved keyword: let" }"#
);
}
#[test]
fn test_error_stdlib_in_fn_name() {
let some_program_string = r#"fn cos = () => {
return 1
}"#;
let tokens = crate::token::lexer(some_program_string);
let parser = crate::parser::Parser::new(tokens);
let result = parser.ast();
assert!(result.is_err());
assert_eq!(
result.err().unwrap().to_string(),
r#"syntax: KclErrorDetails { source_ranges: [SourceRange([3, 6])], message: "Cannot assign a variable to a reserved keyword: cos" }"#
);
}
#[test]
fn test_error_keyword_in_fn_args() {
let some_program_string = r#"fn thing = (let) => {
return 1
}"#;
let tokens = crate::token::lexer(some_program_string);
let parser = crate::parser::Parser::new(tokens);
let result = parser.ast();
assert!(result.is_err());
assert_eq!(
result.err().unwrap().to_string(),
r#"syntax: KclErrorDetails { source_ranges: [SourceRange([12, 15])], message: "Cannot assign a variable to a reserved keyword: let" }"#
);
}
#[test]
fn test_error_stdlib_in_fn_args() {
let some_program_string = r#"fn thing = (cos) => {
return 1
}"#;
let tokens = crate::token::lexer(some_program_string);
let parser = crate::parser::Parser::new(tokens);
let result = parser.ast();
assert!(result.is_err());
assert_eq!(
result.err().unwrap().to_string(),
r#"syntax: KclErrorDetails { source_ranges: [SourceRange([12, 15])], message: "Cannot assign a variable to a reserved keyword: cos" }"#
);
}
#[test]
fn zero_param_function() {
let program = r#"
fn firstPrimeNumber = () => {
return 2
}
firstPrimeNumber()
"#;
let tokens = crate::token::lexer(program);
let parser = crate::parser::Parser::new(tokens);
let _ast = parser.ast().unwrap();
}
#[test]
fn test_keyword_ok_in_fn_args_return() {
let some_program_string = r#"fn thing = (param) => {
return true
}
thing(false)
"#;
let tokens = crate::token::lexer(some_program_string);
let parser = crate::parser::Parser::new(tokens);
parser.ast().unwrap();
}
#[test]
fn test_error_define_function_as_var() {
for name in ["var", "let", "const"] {
let some_program_string = format!(
r#"{} thing = (param) => {{
return true
}}
thing(false)
"#,
name
);
let tokens = crate::token::lexer(&some_program_string);
let parser = crate::parser::Parser::new(tokens);
let result = parser.ast();
assert!(result.is_err());
assert_eq!(
result.err().unwrap().to_string(),
format!(
r#"syntax: KclErrorDetails {{ source_ranges: [SourceRange([0, {}])], message: "Expected a `fn` variable kind, found: `{}`" }}"#,
name.len(),
name
)
);
}
}
#[test]
fn test_error_define_var_as_function() {
let some_program_string = r#"fn thing = "thing""#;
let tokens = crate::token::lexer(some_program_string);
let parser = crate::parser::Parser::new(tokens);
let result = parser.ast();
assert!(result.is_err());
// TODO: https://github.com/KittyCAD/modeling-app/issues/784
// Improve this error message.
// It should say that the compiler is expecting a function expression on the RHS.
assert_eq!(
result.err().unwrap().to_string(),
r#"syntax: KclErrorDetails { source_ranges: [SourceRange([11, 18])], message: "Unexpected token" }"#
);
}
#[test]
fn test_member_expression_sketch_group() {
let some_program_string = r#"fn cube = (pos, scale) => {
const sg = startSketchOn('XY')
|> startProfileAt(pos, %)
|> line([0, scale], %)
|> line([scale, 0], %)
|> line([0, -scale], %)
return sg
}
const b1 = cube([0,0], 10)
const b2 = cube([3,3], 4)
const pt1 = b1[0]
const pt2 = b2[0]
show(b1)
show(b2)"#;
let tokens = crate::token::lexer(some_program_string);
let parser = crate::parser::Parser::new(tokens);
parser.ast().unwrap();
}
#[test]
fn test_math_with_stdlib() {
let some_program_string = r#"const d2r = pi() / 2
let other_thing = 2 * cos(3)"#;
let tokens = crate::token::lexer(some_program_string);
let parser = crate::parser::Parser::new(tokens);
parser.ast().unwrap();
}
#[test]
fn test_negative_arguments() {
let some_program_string = r#"fn box = (p, h, l, w) => {
const myBox = startSketchOn('XY')
|> startProfileAt(p, %)
|> line([0, l], %)
|> line([w, 0], %)
|> line([0, -l], %)
|> close(%)
|> extrude(h, %)
return myBox
}
let myBox = box([0,0], -3, -16, -10)
show(myBox)"#;
let tokens = crate::token::lexer(some_program_string);
let parser = crate::parser::Parser::new(tokens);
parser.ast().unwrap();
}
#[test]
fn test_math() {
for math_expression in [
"1 + 2",
"1+2",
"1 -2",
"1 + 2 * 3",
"1 * ( 2 + 3 )",
"1 * ( 2 + 3 ) / 4",
"1 + ( 2 + 3 ) / 4",
"1 * (( 2 + 3 ) / 4 + 5 )",
"1 * ((( 2 + 3 )))",
"distance * p * FOS * 6 / (sigmaAllow * width)",
"2 + (((3)))",
] {
let tokens = crate::token::lexer(math_expression);
let _expr = binary_expression.parse(&tokens).unwrap();
}
}
} }