Compare commits

...

15 Commits

Author SHA1 Message Date
73fb4e567d Use dispatch
Fixes #839
2023-10-12 00:49:02 -05:00
616b168002 Add another unit test 2023-10-11 23:02:53 -05:00
0fe2fa14e0 Fix another regression 2023-10-11 23:02:52 -05:00
7956b66302 Allow whitespace within open parentheses 2023-10-11 23:02:52 -05:00
48fa65e2e6 Allow whitespace before commas in comma-separated lists 2023-10-11 23:02:52 -05:00
f8d0cb0b92 Empty functions are no longer valid 2023-10-11 23:02:52 -05:00
b7e5a27aae Allow arithmetic on LHS of a pipe expression 2023-10-11 23:02:51 -05:00
643497f6d0 Correct inaccurate test, add CommentStyle to tests 2023-10-11 23:02:51 -05:00
e2ad24360a More little bugfixes
NonCodeMeta can have many nodes in the 'start' not just 0 or 1

fix Double newline after block comments
2023-10-11 23:02:51 -05:00
c3a61f368e Adjust test expectations because they're expecting inaccuracies which have been fixed 2023-10-11 23:02:51 -05:00
0c9250d2c5 Fix custom deserializer for NonCodeMeta 2023-10-11 23:02:50 -05:00
3666f42e41 Fix yarn tests 2023-10-11 23:02:50 -05:00
cd01d1c755 Accept fewer kinds of value on RHS of a |> operator
This yields SIGNIFICANT speedup
2023-10-11 23:02:50 -05:00
cf177c10a9 Separate benches for parsing and lexing 2023-10-11 23:02:49 -05:00
09615164eb New parser built with Winnow
Fixes #716
2023-10-11 23:02:49 -05:00
17 changed files with 2480 additions and 195 deletions

View File

@ -73,6 +73,7 @@
"simpleserver": "yarn pretest && http-server ./public --cors -p 3000",
"fmt": "prettier --write ./src",
"fmt-check": "prettier --check ./src",
"build:wasm-dev": "(cd src/wasm-lib && wasm-pack build --dev --target web --out-dir pkg && cargo test -p kcl-lib export_bindings) && cp src/wasm-lib/pkg/wasm_lib_bg.wasm public && yarn fmt",
"build:wasm": "(cd src/wasm-lib && wasm-pack build --target web --out-dir pkg && cargo test -p kcl-lib export_bindings) && cp src/wasm-lib/pkg/wasm_lib_bg.wasm public && yarn fmt",
"build:wasm-clean": "yarn wasm-prep && yarn build:wasm",
"remove-importmeta": "sed -i 's/import.meta.url/window.location.origin/g' \"./src/wasm-lib/pkg/wasm_lib.js\"; sed -i '' 's/import.meta.url/window.location.origin/g' \"./src/wasm-lib/pkg/wasm_lib.js\" || echo \"sed for both mac and linux\"",

View File

@ -29,7 +29,7 @@ class KclManager {
end: 0,
nonCodeMeta: {
nonCodeNodes: {},
start: null,
start: [],
},
}
private _programMemory: ProgramMemory = {
@ -220,7 +220,7 @@ class KclManager {
end: 0,
nonCodeMeta: {
nonCodeNodes: {},
start: null,
start: [],
},
}
this._programMemory = {

View File

@ -141,42 +141,6 @@ const newVar = myVar + 1
})
describe('testing function declaration', () => {
test('fn funcN = () => {}', () => {
const { body } = parse('fn funcN = () => {}')
delete (body[0] as any).declarations[0].init.body.nonCodeMeta
expect(body).toEqual([
{
type: 'VariableDeclaration',
start: 0,
end: 19,
kind: 'fn',
declarations: [
{
type: 'VariableDeclarator',
start: 3,
end: 19,
id: {
type: 'Identifier',
start: 3,
end: 8,
name: 'funcN',
},
init: {
type: 'FunctionExpression',
start: 11,
end: 19,
params: [],
body: {
start: 17,
end: 19,
body: [],
},
},
},
],
},
])
})
test('fn funcN = (a, b) => {return a + b}', () => {
const { body } = parse(
['fn funcN = (a, b) => {', ' return a + b', '}'].join('\n')
@ -1513,22 +1477,23 @@ const key = 'c'`
const nonCodeMetaInstance = {
type: 'NonCodeNode',
start: code.indexOf('\n// this is a comment'),
end: code.indexOf('const key'),
end: code.indexOf('const key') - 1,
value: {
type: 'blockComment',
style: 'line',
value: 'this is a comment',
},
}
const { nonCodeMeta } = parse(code)
expect(nonCodeMeta.nonCodeNodes[0]).toEqual(nonCodeMetaInstance)
expect(nonCodeMeta.nonCodeNodes[0][0]).toEqual(nonCodeMetaInstance)
// extra whitespace won't change it's position (0) or value (NB the start end would have changed though)
const codeWithExtraStartWhitespace = '\n\n\n' + code
const { nonCodeMeta: nonCodeMeta2 } = parse(codeWithExtraStartWhitespace)
expect(nonCodeMeta2.nonCodeNodes[0].value).toStrictEqual(
expect(nonCodeMeta2.nonCodeNodes[0][0].value).toStrictEqual(
nonCodeMetaInstance.value
)
expect(nonCodeMeta2.nonCodeNodes[0].start).not.toBe(
expect(nonCodeMeta2.nonCodeNodes[0][0].start).not.toBe(
nonCodeMetaInstance.start
)
})
@ -1546,12 +1511,13 @@ const key = 'c'`
const indexOfSecondLineToExpression = 2
const sketchNonCodeMeta = (body as any)[0].declarations[0].init.nonCodeMeta
.nonCodeNodes
expect(sketchNonCodeMeta[indexOfSecondLineToExpression]).toEqual({
expect(sketchNonCodeMeta[indexOfSecondLineToExpression][0]).toEqual({
type: 'NonCodeNode',
start: 106,
end: 166,
end: 163,
value: {
type: 'blockComment',
type: 'inlineComment',
style: 'block',
value: 'this is\n a comment\n spanning a few lines',
},
})
@ -1568,14 +1534,15 @@ const key = 'c'`
const { body } = parse(code)
const sketchNonCodeMeta = (body[0] as any).declarations[0].init.nonCodeMeta
.nonCodeNodes
expect(sketchNonCodeMeta[3]).toEqual({
.nonCodeNodes[3][0]
expect(sketchNonCodeMeta).toEqual({
type: 'NonCodeNode',
start: 125,
end: 141,
end: 138,
value: {
type: 'blockComment',
value: 'a comment',
style: 'line',
},
})
})
@ -1693,11 +1660,7 @@ describe('parsing errors', () => {
}
const theError = _theError as any
expect(theError).toEqual(
new KCLError(
'unexpected',
'Unexpected token Token { token_type: Brace, start: 29, end: 30, value: "}" }',
[[29, 30]]
)
new KCLError('syntax', 'Unexpected token', [[27, 28]])
)
})
})

View File

@ -104,7 +104,7 @@ describe('Testing addSketchTo', () => {
body: [],
start: 0,
end: 0,
nonCodeMeta: { nonCodeNodes: {}, start: null },
nonCodeMeta: { nonCodeNodes: {}, start: [] },
},
'yz'
)

View File

@ -540,7 +540,7 @@ export function createPipeExpression(
start: 0,
end: 0,
body,
nonCodeMeta: { nonCodeNodes: {}, start: null },
nonCodeMeta: { nonCodeNodes: {}, start: [] },
}
}

View File

@ -272,21 +272,20 @@ const mySk1 = startSketchAt([0, 0])
`
const { ast } = code2ast(code)
const recasted = recast(ast)
expect(recasted).toBe(`// comment at start
expect(recasted).toBe(`/* comment at start */
const mySk1 = startSketchAt([0, 0])
|> lineTo([1, 1], %)
// comment here
|> lineTo({ to: [0, 1], tag: 'myTag' }, %)
|> lineTo([1, 1], %)
/* and
here
a comment between pipe expression statements */
|> lineTo([1, 1], %) /* and
here */
// a comment between pipe expression statements
|> rx(90, %)
// and another with just white space between others below
|> ry(45, %)
|> rx(45, %)
// one more for good measure
/* one more for good measure */
`)
})
})

View File

@ -100,7 +100,7 @@ describe('testing changeSketchArguments', () => {
|> startProfileAt([0, 0], %)
|> ${line}
|> lineTo([0.46, -5.82], %)
// |> rx(45, %)
// |> rx(45, %)
show(mySketch001)
`
const code = genCode(lineToChange)

View File

@ -257,7 +257,7 @@ export async function executeCode({
body: [],
nonCodeMeta: {
nonCodeNodes: {},
start: null,
start: [],
},
},
}

View File

@ -6,36 +6,30 @@ pub fn bench_lex(c: &mut Criterion) {
c.bench_function("lex_pipes_on_pipes", |b| b.iter(|| lex(PIPES_PROGRAM)));
}
pub fn bench_lex_parse(c: &mut Criterion) {
c.bench_function("parse_lex_cube", |b| b.iter(|| lex_and_parse(CUBE_PROGRAM)));
c.bench_function("parse_lex_big_kitt", |b| b.iter(|| lex_and_parse(KITT_PROGRAM)));
c.bench_function("parse_lex_pipes_on_pipes", |b| b.iter(|| lex_and_parse(PIPES_PROGRAM)));
pub fn bench_parse(c: &mut Criterion) {
for (name, file) in [
("pipes_on_pipes", PIPES_PROGRAM),
("big_kitt", KITT_PROGRAM),
("cube", CUBE_PROGRAM),
] {
let tokens = kcl_lib::token::lexer(file);
c.bench_function(&format!("parse_{name}"), move |b| {
let tok = tokens.clone();
b.iter(move || {
let parser = kcl_lib::parser::Parser::new(tok.clone());
black_box(parser.ast().unwrap());
})
});
}
}
fn lex(program: &str) {
black_box(kcl_lib::token::lexer(program));
}
fn lex_and_parse(program: &str) {
let tokens = kcl_lib::token::lexer(program);
let parser = kcl_lib::parser::Parser::new(tokens);
black_box(parser.ast().unwrap());
}
criterion_group!(benches, bench_lex, bench_lex_parse);
criterion_group!(benches, bench_lex, bench_parse);
criterion_main!(benches);
const KITT_PROGRAM: &str = include_str!("../../tests/executor/inputs/kittycad_svg.kcl");
const PIPES_PROGRAM: &str = include_str!("../../tests/executor/inputs/pipes_on_pipes.kcl");
const CUBE_PROGRAM: &str = r#"fn cube = (pos, scale) => {
const sg = startSketchAt(pos)
|> line([0, scale], %)
|> line([scale, 0], %)
|> line([0, -scale], %)
return sg
}
const b1 = cube([0,0], 10)
const pt1 = b1[0]
show(b1)"#;
const CUBE_PROGRAM: &str = include_str!("../../tests/executor/inputs/cube.kcl");

View File

@ -63,10 +63,14 @@ impl Program {
.fold(String::new(), |mut output, (index, recast_str)| {
let start_string = if index == 0 {
// We need to indent.
if let Some(start) = self.non_code_meta.start.clone() {
start.format(&indentation)
} else {
if self.non_code_meta.start.is_empty() {
indentation.to_string()
} else {
self.non_code_meta
.start
.iter()
.map(|start| start.format(&indentation))
.collect()
}
} else {
// Do nothing, we already applied the indentation elsewhere.
@ -82,7 +86,10 @@ impl Program {
};
let custom_white_space_or_comment = match self.non_code_meta.non_code_nodes.get(&index) {
Some(custom_white_space_or_comment) => custom_white_space_or_comment.format(&indentation),
Some(noncodes) => noncodes
.iter()
.map(|custom_white_space_or_comment| custom_white_space_or_comment.format(&indentation))
.collect::<String>(),
None => String::new(),
};
let end_string = if custom_white_space_or_comment.is_empty() {
@ -707,30 +714,35 @@ pub struct NonCodeNode {
impl NonCodeNode {
pub fn value(&self) -> String {
match &self.value {
NonCodeValue::InlineComment { value } => value.clone(),
NonCodeValue::BlockComment { value } => value.clone(),
NonCodeValue::NewLineBlockComment { value } => value.clone(),
NonCodeValue::InlineComment { value, style: _ } => value.clone(),
NonCodeValue::BlockComment { value, style: _ } => value.clone(),
NonCodeValue::NewLineBlockComment { value, style: _ } => value.clone(),
NonCodeValue::NewLine => "\n\n".to_string(),
}
}
pub fn format(&self, indentation: &str) -> String {
match &self.value {
NonCodeValue::InlineComment { value } => format!(" // {}\n", value),
NonCodeValue::BlockComment { value } => {
NonCodeValue::InlineComment {
value,
style: CommentStyle::Line,
} => format!(" // {}\n", value),
NonCodeValue::InlineComment {
value,
style: CommentStyle::Block,
} => format!(" /* {} */", value),
NonCodeValue::BlockComment { value, style } => {
let add_start_new_line = if self.start == 0 { "" } else { "\n" };
if value.contains('\n') {
format!("{}{}/* {} */\n", add_start_new_line, indentation, value)
} else {
format!("{}{}// {}\n", add_start_new_line, indentation, value)
match style {
CommentStyle::Block => format!("{}{}/* {} */", add_start_new_line, indentation, value),
CommentStyle::Line => format!("{}{}// {}\n", add_start_new_line, indentation, value),
}
}
NonCodeValue::NewLineBlockComment { value } => {
NonCodeValue::NewLineBlockComment { value, style } => {
let add_start_new_line = if self.start == 0 { "" } else { "\n\n" };
if value.contains('\n') {
format!("{}{}/* {} */\n", add_start_new_line, indentation, value)
} else {
format!("{}{}// {}\n", add_start_new_line, indentation, value)
match style {
CommentStyle::Block => format!("{}{}/* {} */\n", add_start_new_line, indentation, value),
CommentStyle::Line => format!("{}{}// {}\n", add_start_new_line, indentation, value),
}
}
NonCodeValue::NewLine => "\n\n".to_string(),
@ -738,14 +750,27 @@ impl NonCodeNode {
}
}
#[derive(Debug, Clone, Deserialize, Serialize, PartialEq, ts_rs::TS, JsonSchema)]
#[ts(export)]
#[serde(rename_all = "camelCase")]
pub enum CommentStyle {
/// Like // foo
Line,
/// Like /* foo */
Block,
}
#[derive(Debug, Clone, Deserialize, Serialize, PartialEq, ts_rs::TS, JsonSchema)]
#[ts(export)]
#[serde(tag = "type", rename_all = "camelCase")]
pub enum NonCodeValue {
/// An inline comment.
/// An example of this is the following: `1 + 1 // This is an inline comment`.
/// Here are examples:
/// `1 + 1 // This is an inline comment`.
/// `1 + 1 /* Here's another */`.
InlineComment {
value: String,
style: CommentStyle,
},
/// A block comment.
/// An example of this is the following:
@ -759,11 +784,13 @@ pub enum NonCodeValue {
/// If it did it would be a `NewLineBlockComment`.
BlockComment {
value: String,
style: CommentStyle,
},
/// A block comment that has a new line above it.
/// The user explicitly added a new line above the block comment.
NewLineBlockComment {
value: String,
style: CommentStyle,
},
// A new line like `\n\n` NOT a new line like `\n`.
// This is also not a comment.
@ -774,8 +801,8 @@ pub enum NonCodeValue {
#[ts(export)]
#[serde(rename_all = "camelCase")]
pub struct NonCodeMeta {
pub non_code_nodes: HashMap<usize, NonCodeNode>,
pub start: Option<NonCodeNode>,
pub non_code_nodes: HashMap<usize, Vec<NonCodeNode>>,
pub start: Vec<NonCodeNode>,
}
// implement Deserialize manually because we to force the keys of non_code_nodes to be usize
@ -788,15 +815,16 @@ impl<'de> Deserialize<'de> for NonCodeMeta {
#[derive(Deserialize)]
#[serde(rename_all = "camelCase")]
struct NonCodeMetaHelper {
non_code_nodes: HashMap<String, NonCodeNode>,
start: Option<NonCodeNode>,
non_code_nodes: HashMap<String, Vec<NonCodeNode>>,
start: Vec<NonCodeNode>,
}
let helper = NonCodeMetaHelper::deserialize(deserializer)?;
let mut non_code_nodes = HashMap::new();
for (key, value) in helper.non_code_nodes {
non_code_nodes.insert(key.parse().map_err(serde::de::Error::custom)?, value);
}
let non_code_nodes = helper
.non_code_nodes
.into_iter()
.map(|(key, value)| Ok((key.parse().map_err(serde::de::Error::custom)?, value)))
.collect::<Result<HashMap<_, _>, _>>()?;
Ok(NonCodeMeta {
non_code_nodes,
start: helper.start,
@ -804,6 +832,12 @@ impl<'de> Deserialize<'de> for NonCodeMeta {
}
}
impl NonCodeMeta {
pub fn insert(&mut self, i: usize, new: NonCodeNode) {
self.non_code_nodes.entry(i).or_default().push(new);
}
}
#[derive(Debug, Clone, Deserialize, Serialize, PartialEq, ts_rs::TS, JsonSchema)]
#[ts(export)]
#[serde(tag = "type")]
@ -2385,7 +2419,9 @@ impl PipeExpression {
let mut s = statement.recast(options, indentation_level + 1, true);
let non_code_meta = self.non_code_meta.clone();
if let Some(non_code_meta_value) = non_code_meta.non_code_nodes.get(&index) {
s += non_code_meta_value.format(&indentation).trim_end_matches('\n')
for val in non_code_meta_value {
s += val.format(&indentation).trim_end_matches('\n')
}
}
if index != self.body.len() - 1 {
@ -2869,13 +2905,32 @@ show(part001)"#;
recasted,
r#"fn myFn = () => {
// this is a comment
const yo = { a: { b: { c: '123' } } }
/* block
const yo = { a: { b: { c: '123' } } } /* block
comment */
const key = 'c'
// this is also a comment
return things
}
"#
);
}
#[test]
fn test_recast_comment_at_start() {
let test_program = r#"
/* comment at start */
const mySk1 = startSketchAt([0, 0])"#;
let tokens = crate::token::lexer(test_program);
let parser = crate::parser::Parser::new(tokens);
let program = parser.ast().unwrap();
let recasted = program.recast(&Default::default(), 0);
assert_eq!(
recasted,
r#"/* comment at start */
const mySk1 = startSketchAt([0, 0])
"#
);
}
@ -2913,14 +2968,13 @@ const mySk1 = startSketchOn('XY')
|> lineTo({ to: [0, 1], tag: 'myTag' }, %)
|> lineTo([1, 1], %)
/* and
here
a comment between pipe expression statements */
here */
// a comment between pipe expression statements
|> rx(90, %)
// and another with just white space between others below
|> ry(45, %)
|> rx(45, %)
// one more for good measure
// one more for good measure
"#
);
}
@ -2988,16 +3042,19 @@ const things = "things"
let program = parser.ast().unwrap();
let recasted = program.recast(&Default::default(), 0);
assert_eq!(recasted.trim(), some_program_string.trim());
let expected = some_program_string.trim();
// Currently new parser removes an empty line
let actual = recasted.trim();
assert_eq!(actual, expected);
}
#[test]
fn test_recast_comment_tokens_inside_strings() {
let some_program_string = r#"let b = {
"end": 141,
"start": 125,
"type": "NonCodeNode",
"value": "
end: 141,
start: 125,
type: "NonCodeNode",
value: "
// a comment
"
}"#;

View File

@ -4,7 +4,7 @@ use tower_lsp::lsp_types::{Diagnostic, DiagnosticSeverity};
use crate::executor::SourceRange;
#[derive(Error, Debug, Serialize, Deserialize, ts_rs::TS)]
#[derive(Error, Debug, Serialize, Deserialize, ts_rs::TS, Clone)]
#[ts(export)]
#[serde(tag = "kind", rename_all = "snake_case")]
pub enum KclError {
@ -28,7 +28,7 @@ pub enum KclError {
Engine(KclErrorDetails),
}
#[derive(Debug, Serialize, Deserialize, ts_rs::TS)]
#[derive(Debug, Serialize, Deserialize, ts_rs::TS, Clone)]
#[ts(export)]
pub struct KclErrorDetails {
#[serde(rename = "sourceRanges")]
@ -78,6 +78,22 @@ impl KclError {
KclError::Engine(e) => e.source_ranges.clone(),
}
}
/// Get the inner error message.
pub fn message(&self) -> &str {
match &self {
KclError::Syntax(e) => &e.message,
KclError::Semantic(e) => &e.message,
KclError::Type(e) => &e.message,
KclError::Unimplemented(e) => &e.message,
KclError::Unexpected(e) => &e.message,
KclError::ValueAlreadyDefined(e) => &e.message,
KclError::UndefinedValue(e) => &e.message,
KclError::InvalidExpression(e) => &e.message,
KclError::Engine(e) => &e.message,
}
}
pub fn to_lsp_diagnostic(&self, code: &str) -> Diagnostic {
let (message, _, _) = self.get_message_line_column(code);
let source_ranges = self.source_ranges();

View File

@ -2,7 +2,7 @@ use std::{collections::HashMap, str::FromStr};
use crate::{
ast::types::{
ArrayExpression, BinaryExpression, BinaryPart, BodyItem, CallExpression, ExpressionStatement,
ArrayExpression, BinaryExpression, BinaryPart, BodyItem, CallExpression, CommentStyle, ExpressionStatement,
FunctionExpression, Identifier, Literal, LiteralIdentifier, MemberExpression, MemberObject, NonCodeMeta,
NonCodeNode, NonCodeValue, ObjectExpression, ObjectKeyInfo, ObjectProperty, PipeExpression, PipeSubstitution,
Program, ReturnStatement, UnaryExpression, UnaryOperator, Value, VariableDeclaration, VariableDeclarator,
@ -13,6 +13,8 @@ use crate::{
token::{Token, TokenType},
};
mod parser_impl;
pub const PIPE_SUBSTITUTION_OPERATOR: &str = "%";
pub const PIPE_OPERATOR: &str = "|>";
@ -179,13 +181,19 @@ impl Parser {
Ok(token)
}
/// Use the new Winnow parser.
pub fn ast(&self) -> Result<Program, KclError> {
parser_impl::run_parser(&mut self.tokens.as_slice())
}
/// Use the old handwritten recursive parser.
pub fn ast_old(&self) -> Result<Program, KclError> {
let body = self.make_body(
0,
vec![],
NonCodeMeta {
non_code_nodes: HashMap::new(),
start: None,
start: Vec::new(),
},
)?;
let end = match self.get_token(body.last_index) {
@ -209,7 +217,7 @@ impl Parser {
})
}
pub fn make_literal(&self, index: usize) -> Result<Literal, KclError> {
fn make_literal(&self, index: usize) -> Result<Literal, KclError> {
let token = self.get_token(index)?;
let value = if token.token_type == TokenType::Number {
if let Ok(value) = token.value.parse::<i64>() {
@ -295,6 +303,11 @@ impl Parser {
));
}
let is_block_style = non_code_tokens
.first()
.map(|tok| matches!(tok.token_type, TokenType::BlockComment))
.unwrap_or_default();
let full_string = non_code_tokens
.iter()
.map(|t| {
@ -336,11 +349,32 @@ impl Parser {
value: if start_end_string.starts_with("\n\n") && is_new_line_comment {
// Preserve if they want a whitespace line before the comment.
// But let's just allow one.
NonCodeValue::NewLineBlockComment { value: full_string }
} else if is_new_line_comment {
NonCodeValue::BlockComment { value: full_string }
NonCodeValue::NewLineBlockComment {
value: full_string,
style: if is_block_style {
CommentStyle::Block
} else {
NonCodeValue::InlineComment { value: full_string }
CommentStyle::Line
},
}
} else if is_new_line_comment {
NonCodeValue::BlockComment {
value: full_string,
style: if is_block_style {
CommentStyle::Block
} else {
CommentStyle::Line
},
}
} else {
NonCodeValue::InlineComment {
value: full_string,
style: if is_block_style {
CommentStyle::Block
} else {
CommentStyle::Line
},
}
},
};
Ok((Some(node), end_index - 1))
@ -1033,7 +1067,7 @@ impl Parser {
let non_code_meta = match previous_non_code_meta {
Some(meta) => meta,
None => NonCodeMeta {
start: None,
start: Vec::new(),
non_code_nodes: HashMap::new(),
},
};
@ -1064,7 +1098,7 @@ impl Parser {
let mut _non_code_meta: NonCodeMeta;
if let Some(node) = next_pipe.non_code_node {
_non_code_meta = non_code_meta;
_non_code_meta.non_code_nodes.insert(previous_values.len(), node);
_non_code_meta.insert(previous_values.len(), node);
} else {
_non_code_meta = non_code_meta;
}
@ -1435,7 +1469,7 @@ impl Parser {
self.make_params(next_brace_or_comma_token.index, _previous_params)
}
pub fn make_unary_expression(&self, index: usize) -> Result<UnaryExpressionResult, KclError> {
fn make_unary_expression(&self, index: usize) -> Result<UnaryExpressionResult, KclError> {
let current_token = self.get_token(index)?;
let next_token = self.next_meaningful_token(index, None)?;
if next_token.token.is_none() {
@ -1631,9 +1665,11 @@ impl Parser {
let next_token = self.next_meaningful_token(token_index, Some(0))?;
if let Some(node) = &next_token.non_code_node {
if previous_body.is_empty() {
non_code_meta.start = next_token.non_code_node;
if let Some(next) = next_token.non_code_node {
non_code_meta.start.push(next);
}
} else {
non_code_meta.non_code_nodes.insert(previous_body.len(), node.clone());
non_code_meta.insert(previous_body.len(), node.clone());
}
}
return self.make_body(next_token.index, previous_body, non_code_meta);
@ -1641,14 +1677,14 @@ impl Parser {
let next = self.next_meaningful_token(token_index, None)?;
if let Some(node) = &next.non_code_node {
non_code_meta.non_code_nodes.insert(previous_body.len(), node.clone());
non_code_meta.insert(previous_body.len(), node.clone());
}
if token.token_type == TokenType::Keyword && VariableKind::from_str(&token.value).is_ok() {
let declaration = self.make_variable_declaration(token_index)?;
let next_thing = self.next_meaningful_token(declaration.last_index, None)?;
if let Some(node) = &next_thing.non_code_node {
non_code_meta.non_code_nodes.insert(previous_body.len(), node.clone());
non_code_meta.insert(previous_body.len(), node.clone());
}
let mut _previous_body = previous_body;
_previous_body.push(BodyItem::VariableDeclaration(VariableDeclaration {
@ -1669,7 +1705,7 @@ impl Parser {
let statement = self.make_return_statement(token_index)?;
let next_thing = self.next_meaningful_token(statement.last_index, None)?;
if let Some(node) = &next_thing.non_code_node {
non_code_meta.non_code_nodes.insert(previous_body.len(), node.clone());
non_code_meta.insert(previous_body.len(), node.clone());
}
let mut _previous_body = previous_body;
_previous_body.push(BodyItem::ReturnStatement(ReturnStatement {
@ -1693,7 +1729,7 @@ impl Parser {
let expression = self.make_expression_statement(token_index)?;
let next_thing = self.next_meaningful_token(expression.last_index, None)?;
if let Some(node) = &next_thing.non_code_node {
non_code_meta.non_code_nodes.insert(previous_body.len(), node.clone());
non_code_meta.insert(previous_body.len(), node.clone());
}
let mut _previous_body = previous_body;
_previous_body.push(BodyItem::ExpressionStatement(ExpressionStatement {
@ -1716,7 +1752,7 @@ impl Parser {
&& next_thing_token.token_type == TokenType::Operator
{
if let Some(node) = &next_thing.non_code_node {
non_code_meta.non_code_nodes.insert(previous_body.len(), node.clone());
non_code_meta.insert(previous_body.len(), node.clone());
}
let expression = self.make_expression_statement(token_index)?;
let mut _previous_body = previous_body;
@ -1749,7 +1785,7 @@ impl Parser {
last_index: next_token_index,
non_code_meta: NonCodeMeta {
non_code_nodes: HashMap::new(),
start: None,
start: Vec::new(),
},
}
} else {
@ -1758,7 +1794,7 @@ impl Parser {
vec![],
NonCodeMeta {
non_code_nodes: HashMap::new(),
start: None,
start: Vec::new(),
},
)?
};
@ -1913,6 +1949,7 @@ const key = 'c'"#,
end: 60,
value: NonCodeValue::BlockComment {
value: "this is a comment".to_string(),
style: CommentStyle::Line,
},
}),
31,
@ -1966,6 +2003,35 @@ const key = 'c'"#,
);
}
#[test]
fn test_math_parse() {
let tokens = crate::token::lexer(r#"5 + "a""#);
let actual = Parser::new(tokens).ast().unwrap().body;
let expr = BinaryExpression {
start: 0,
end: 7,
operator: BinaryOperator::Add,
left: BinaryPart::Literal(Box::new(Literal {
start: 0,
end: 1,
value: serde_json::Value::Number(serde_json::Number::from(5)),
raw: "5".to_owned(),
})),
right: BinaryPart::Literal(Box::new(Literal {
start: 4,
end: 7,
value: serde_json::Value::String("a".to_owned()),
raw: r#""a""#.to_owned(),
})),
};
let expected = vec![BodyItem::ExpressionStatement(ExpressionStatement {
start: 0,
end: 7,
expression: Value::BinaryExpression(Box::new(expr)),
})];
assert_eq!(expected, actual);
}
#[test]
fn test_is_code_token() {
let tokens = [
@ -2600,7 +2666,7 @@ show(mySk1)"#;
vec![],
NonCodeMeta {
non_code_nodes: HashMap::new(),
start: None,
start: Vec::new(),
},
)
.unwrap();
@ -2636,10 +2702,7 @@ show(mySk1)"#;
})),
})),
})],
non_code_meta: NonCodeMeta {
non_code_nodes: Default::default(),
start: None,
},
non_code_meta: NonCodeMeta::default(),
};
assert_eq!(result, expected_result);
@ -2812,10 +2875,6 @@ z(-[["#,
let parser = Parser::new(tokens);
let result = parser.ast();
assert!(result.is_err());
assert_eq!(
result.err().unwrap().to_string(),
r#"syntax: KclErrorDetails { source_ranges: [SourceRange([1, 2])], message: "missing a closing brace for the function call" }"#
);
}
#[test]
@ -2831,7 +2890,7 @@ z(-[["#,
// https://github.com/KittyCAD/modeling-app/issues/696
assert_eq!(
result.err().unwrap().to_string(),
r#"semantic: KclErrorDetails { source_ranges: [], message: "file is empty" }"#
r#"syntax: KclErrorDetails { source_ranges: [], message: "file is empty" }"#
);
}
@ -2845,7 +2904,7 @@ z(-[["#,
// https://github.com/KittyCAD/modeling-app/issues/696
assert_eq!(
result.err().unwrap().to_string(),
r#"semantic: KclErrorDetails { source_ranges: [], message: "file is empty" }"#
r#"syntax: KclErrorDetails { source_ranges: [], message: "file is empty" }"#
);
}
@ -2863,7 +2922,7 @@ e
.err()
.unwrap()
.to_string()
.contains("expected to be started on a identifier or literal"));
.contains("expected whitespace, found ')' which is brace"));
}
#[test]
@ -2872,7 +2931,11 @@ e
let parser = Parser::new(tokens);
let result = parser.ast();
assert!(result.is_err());
assert!(result.err().unwrap().to_string().contains("expected another token"));
assert!(result
.err()
.unwrap()
.to_string()
.contains("expected whitespace, found ')' which is brace"));
}
#[test]
@ -2884,11 +2947,7 @@ e
let parser = Parser::new(tokens);
let result = parser.ast();
assert!(result.is_err());
assert!(result
.err()
.unwrap()
.to_string()
.contains("unexpected end of expression"));
assert!(result.err().unwrap().to_string().contains("Unexpected token"));
}
#[test]
@ -2985,10 +3044,7 @@ e
}],
kind: VariableKind::Const,
})],
non_code_meta: NonCodeMeta {
non_code_nodes: Default::default(),
start: None,
},
non_code_meta: NonCodeMeta::default(),
};
assert_eq!(result, expected_result);
@ -3022,7 +3078,9 @@ e
#[test]
fn test_error_stdlib_in_fn_name() {
let some_program_string = r#"fn cos = () {}"#;
let some_program_string = r#"fn cos = () => {
return 1
}"#;
let tokens = crate::token::lexer(some_program_string);
let parser = Parser::new(tokens);
let result = parser.ast();
@ -3123,9 +3181,12 @@ thing(false)
let parser = Parser::new(tokens);
let result = parser.ast();
assert!(result.is_err());
// TODO: https://github.com/KittyCAD/modeling-app/issues/784
// Improve this error message.
// It should say that the compiler is expecting a function expression on the RHS.
assert_eq!(
result.err().unwrap().to_string(),
r#"syntax: KclErrorDetails { source_ranges: [SourceRange([0, 2])], message: "Expected a `let` variable kind, found: `fn`" }"#
r#"syntax: KclErrorDetails { source_ranges: [SourceRange([11, 18])], message: "Unexpected token" }"#
);
}
@ -3163,15 +3224,6 @@ let other_thing = 2 * cos(3)"#;
parser.ast().unwrap();
}
#[test]
fn test_parse_pipes_on_pipes() {
let code = include_str!("../../tests/executor/inputs/pipes_on_pipes.kcl");
let tokens = crate::token::lexer(code);
let parser = Parser::new(tokens);
parser.ast().unwrap();
}
#[test]
fn test_negative_arguments() {
let some_program_string = r#"fn box = (p, h, l, w) => {

File diff suppressed because it is too large Load Diff

View File

@ -0,0 +1,107 @@
use winnow::error::{ErrorKind, ParseError, StrContext};
use crate::{
errors::{KclError, KclErrorDetails},
token::Token,
};
/// Accumulate context while backtracking errors
/// Very similar to [`winnow::error::ContextError`] type,
/// but the 'cause' field is always a [`KclError`],
/// instead of a dynamic [`std::error::Error`] trait object.
#[derive(Debug, Clone)]
pub struct ContextError<C = StrContext> {
pub context: Vec<C>,
pub cause: Option<KclError>,
}
impl From<ParseError<&[Token], ContextError>> for KclError {
fn from(err: ParseError<&[Token], ContextError>) -> Self {
let Some(last_token) = err.input().last() else {
return KclError::Syntax(KclErrorDetails {
source_ranges: Default::default(),
message: "file is empty".to_owned(),
});
};
let (input, offset, err) = (err.input().to_vec(), err.offset(), err.into_inner());
if let Some(e) = err.cause {
return e;
}
// See docs on `offset`.
if offset >= input.len() {
let context = err.context.first();
return KclError::Syntax(KclErrorDetails {
source_ranges: last_token.as_source_ranges(),
message: match context {
Some(what) => format!("Unexpected end of file. The compiler {what}"),
None => "Unexpected end of file while still parsing".to_owned(),
},
});
}
let bad_token = &input[offset];
// TODO: Add the Winnow parser context to the error.
// See https://github.com/KittyCAD/modeling-app/issues/784
KclError::Syntax(KclErrorDetails {
source_ranges: bad_token.as_source_ranges(),
message: "Unexpected token".to_owned(),
})
}
}
impl<C> From<KclError> for ContextError<C> {
fn from(e: KclError) -> Self {
Self {
context: Default::default(),
cause: Some(e),
}
}
}
impl<C> std::default::Default for ContextError<C> {
fn default() -> Self {
Self {
context: Default::default(),
cause: None,
}
}
}
impl<I, C> winnow::error::ParserError<I> for ContextError<C> {
#[inline]
fn from_error_kind(_input: &I, _kind: ErrorKind) -> Self {
Self::default()
}
#[inline]
fn append(self, _input: &I, _kind: ErrorKind) -> Self {
self
}
#[inline]
fn or(self, other: Self) -> Self {
other
}
}
impl<C, I> winnow::error::AddContext<I, C> for ContextError<C> {
#[inline]
fn add_context(mut self, _input: &I, ctx: C) -> Self {
self.context.push(ctx);
self
}
}
impl<C, I> winnow::error::FromExternalError<I, KclError> for ContextError<C> {
#[inline]
fn from_external_error(_input: &I, _kind: ErrorKind, e: KclError) -> Self {
let mut err = Self::default();
{
err.cause = Some(e);
}
err
}
}

View File

@ -6,6 +6,8 @@ use schemars::JsonSchema;
use serde::{Deserialize, Serialize};
use tower_lsp::lsp_types::SemanticTokenType;
use crate::{ast::types::VariableKind, executor::SourceRange};
mod tokeniser;
/// The types of tokens.
@ -142,15 +144,39 @@ impl Token {
TokenType::Whitespace | TokenType::LineComment | TokenType::BlockComment
)
}
pub fn as_source_range(&self) -> SourceRange {
SourceRange([self.start, self.end])
}
pub fn as_source_ranges(&self) -> Vec<SourceRange> {
vec![self.as_source_range()]
}
/// Is this token the beginning of a variable/function declaration?
/// If so, what kind?
/// If not, returns None.
pub fn declaration_keyword(&self) -> Option<VariableKind> {
if !matches!(self.token_type, TokenType::Keyword) {
return None;
}
Some(match self.value.as_str() {
"var" => VariableKind::Var,
"let" => VariableKind::Let,
"fn" => VariableKind::Fn,
"const" => VariableKind::Const,
_ => return None,
})
}
}
impl From<Token> for crate::executor::SourceRange {
impl From<Token> for SourceRange {
fn from(token: Token) -> Self {
Self([token.start, token.end])
}
}
impl From<&Token> for crate::executor::SourceRange {
impl From<&Token> for SourceRange {
fn from(token: &Token) -> Self {
Self([token.start, token.end])
}

View File

@ -0,0 +1,12 @@
fn cube = (pos, scale) => {
const sg = startSketchAt(pos)
|> line([0, scale], %)
|> line([scale, 0], %)
|> line([0, -scale], %)
return sg
}
const b1 = cube([0,0], 10)
const pt1 = b1[0]
show(b1)

View File

@ -87,7 +87,7 @@ const fnBox = box(3, 6, 10)
show(fnBox)"#;
let result = execute_and_snapshot(code).await.unwrap();
twenty_twenty::assert_image("tests/executor/outputs/function_sketch.png", &result, 1.0);
twenty_twenty::assert_image("tests/executor/outputs/function_sketch.png", &result, 0.999);
}
#[tokio::test(flavor = "multi_thread")]
@ -107,7 +107,11 @@ async fn serial_test_execute_with_function_sketch_with_position() {
show(box([0,0], 3, 6, 10))"#;
let result = execute_and_snapshot(code).await.unwrap();
twenty_twenty::assert_image("tests/executor/outputs/function_sketch_with_position.png", &result, 1.0);
twenty_twenty::assert_image(
"tests/executor/outputs/function_sketch_with_position.png",
&result,
0.999,
);
}
#[tokio::test(flavor = "multi_thread")]
@ -125,7 +129,7 @@ async fn serial_test_execute_with_angled_line() {
show(part001)"#;
let result = execute_and_snapshot(code).await.unwrap();
twenty_twenty::assert_image("tests/executor/outputs/angled_line.png", &result, 1.0);
twenty_twenty::assert_image("tests/executor/outputs/angled_line.png", &result, 0.999);
}
#[tokio::test(flavor = "multi_thread")]
@ -152,7 +156,7 @@ const bracket = startSketchOn('XY')
show(bracket)"#;
let result = execute_and_snapshot(code).await.unwrap();
twenty_twenty::assert_image("tests/executor/outputs/parametric.png", &result, 1.0);
twenty_twenty::assert_image("tests/executor/outputs/parametric.png", &result, 0.999);
}
#[tokio::test(flavor = "multi_thread")]
@ -187,7 +191,7 @@ const bracket = startSketchAt([0, 0])
show(bracket)"#;
let result = execute_and_snapshot(code).await.unwrap();
twenty_twenty::assert_image("tests/executor/outputs/parametric_with_tan_arc.png", &result, 1.0);
twenty_twenty::assert_image("tests/executor/outputs/parametric_with_tan_arc.png", &result, 0.999);
}
#[tokio::test(flavor = "multi_thread")]
@ -215,7 +219,7 @@ async fn serial_test_execute_pipes_on_pipes() {
let code = include_str!("inputs/pipes_on_pipes.kcl");
let result = execute_and_snapshot(code).await.unwrap();
twenty_twenty::assert_image("tests/executor/outputs/pipes_on_pipes.png", &result, 1.0);
twenty_twenty::assert_image("tests/executor/outputs/pipes_on_pipes.png", &result, 0.999);
}
#[tokio::test(flavor = "multi_thread")]
@ -223,7 +227,7 @@ async fn serial_test_execute_kittycad_svg() {
let code = include_str!("inputs/kittycad_svg.kcl");
let result = execute_and_snapshot(code).await.unwrap();
twenty_twenty::assert_image("tests/executor/outputs/kittycad_svg.png", &result, 1.0);
twenty_twenty::assert_image("tests/executor/outputs/kittycad_svg.png", &result, 0.999);
}
#[tokio::test(flavor = "multi_thread")]
@ -270,7 +274,7 @@ const body = startSketchOn('XY')
show(body)"#;
let result = execute_and_snapshot(code).await.unwrap();
twenty_twenty::assert_image("tests/executor/outputs/close_arc.png", &result, 1.0);
twenty_twenty::assert_image("tests/executor/outputs/close_arc.png", &result, 0.999);
}
#[tokio::test(flavor = "multi_thread")]
@ -296,7 +300,7 @@ let thing = box(-12, -15, 10)
box(-20, -5, 10)"#;
let result = execute_and_snapshot(code).await.unwrap();
twenty_twenty::assert_image("tests/executor/outputs/negative_args.png", &result, 1.0);
twenty_twenty::assert_image("tests/executor/outputs/negative_args.png", &result, 0.999);
}
#[tokio::test(flavor = "multi_thread")]
@ -309,7 +313,7 @@ async fn test_basic_tangental_arc() {
"#;
let result = execute_and_snapshot(code).await.unwrap();
twenty_twenty::assert_image("tests/executor/outputs/tangental_arc.png", &result, 1.0);
twenty_twenty::assert_image("tests/executor/outputs/tangental_arc.png", &result, 0.999);
}
#[tokio::test(flavor = "multi_thread")]
@ -322,7 +326,7 @@ async fn test_basic_tangental_arc_with_point() {
"#;
let result = execute_and_snapshot(code).await.unwrap();
twenty_twenty::assert_image("tests/executor/outputs/tangental_arc_with_point.png", &result, 1.0);
twenty_twenty::assert_image("tests/executor/outputs/tangental_arc_with_point.png", &result, 0.999);
}
#[tokio::test(flavor = "multi_thread")]
@ -335,7 +339,7 @@ async fn test_basic_tangental_arc_to() {
"#;
let result = execute_and_snapshot(code).await.unwrap();
twenty_twenty::assert_image("tests/executor/outputs/tangental_arc_to.png", &result, 1.0);
twenty_twenty::assert_image("tests/executor/outputs/tangental_arc_to.png", &result, 0.999);
}
#[tokio::test(flavor = "multi_thread")]
@ -362,7 +366,11 @@ let thing = box(-12, -15, 10, 'yz')
box(-20, -5, 10, 'xy')"#;
let result = execute_and_snapshot(code).await.unwrap();
twenty_twenty::assert_image("tests/executor/outputs/different_planes_same_drawing.png", &result, 1.0);
twenty_twenty::assert_image(
"tests/executor/outputs/different_planes_same_drawing.png",
&result,
0.999,
);
}
#[tokio::test(flavor = "multi_thread")]
@ -421,5 +429,5 @@ const part004 = startSketchOn('YZ')
"#;
let result = execute_and_snapshot(code).await.unwrap();
twenty_twenty::assert_image("tests/executor/outputs/lots_of_planes.png", &result, 1.0);
twenty_twenty::assert_image("tests/executor/outputs/lots_of_planes.png", &result, 0.999);
}