Compare commits

...

15 Commits

Author SHA1 Message Date
73fb4e567d Use dispatch
Fixes #839
2023-10-12 00:49:02 -05:00
616b168002 Add another unit test 2023-10-11 23:02:53 -05:00
0fe2fa14e0 Fix another regression 2023-10-11 23:02:52 -05:00
7956b66302 Allow whitespace within open parentheses 2023-10-11 23:02:52 -05:00
48fa65e2e6 Allow whitespace before commas in comma-separated lists 2023-10-11 23:02:52 -05:00
f8d0cb0b92 Empty functions are no longer valid 2023-10-11 23:02:52 -05:00
b7e5a27aae Allow arithmetic on LHS of a pipe expression 2023-10-11 23:02:51 -05:00
643497f6d0 Correct inaccurate test, add CommentStyle to tests 2023-10-11 23:02:51 -05:00
e2ad24360a More little bugfixes
NonCodeMeta can have many nodes in the 'start' not just 0 or 1

fix Double newline after block comments
2023-10-11 23:02:51 -05:00
c3a61f368e Adjust test expectations because they're expecting inaccuracies which have been fixed 2023-10-11 23:02:51 -05:00
0c9250d2c5 Fix custom deserializer for NonCodeMeta 2023-10-11 23:02:50 -05:00
3666f42e41 Fix yarn tests 2023-10-11 23:02:50 -05:00
cd01d1c755 Accept fewer kinds of value on RHS of a |> operator
This yields SIGNIFICANT speedup
2023-10-11 23:02:50 -05:00
cf177c10a9 Separate benches for parsing and lexing 2023-10-11 23:02:49 -05:00
09615164eb New parser built with Winnow
Fixes #716
2023-10-11 23:02:49 -05:00
17 changed files with 2480 additions and 195 deletions

View File

@ -73,6 +73,7 @@
"simpleserver": "yarn pretest && http-server ./public --cors -p 3000", "simpleserver": "yarn pretest && http-server ./public --cors -p 3000",
"fmt": "prettier --write ./src", "fmt": "prettier --write ./src",
"fmt-check": "prettier --check ./src", "fmt-check": "prettier --check ./src",
"build:wasm-dev": "(cd src/wasm-lib && wasm-pack build --dev --target web --out-dir pkg && cargo test -p kcl-lib export_bindings) && cp src/wasm-lib/pkg/wasm_lib_bg.wasm public && yarn fmt",
"build:wasm": "(cd src/wasm-lib && wasm-pack build --target web --out-dir pkg && cargo test -p kcl-lib export_bindings) && cp src/wasm-lib/pkg/wasm_lib_bg.wasm public && yarn fmt", "build:wasm": "(cd src/wasm-lib && wasm-pack build --target web --out-dir pkg && cargo test -p kcl-lib export_bindings) && cp src/wasm-lib/pkg/wasm_lib_bg.wasm public && yarn fmt",
"build:wasm-clean": "yarn wasm-prep && yarn build:wasm", "build:wasm-clean": "yarn wasm-prep && yarn build:wasm",
"remove-importmeta": "sed -i 's/import.meta.url/window.location.origin/g' \"./src/wasm-lib/pkg/wasm_lib.js\"; sed -i '' 's/import.meta.url/window.location.origin/g' \"./src/wasm-lib/pkg/wasm_lib.js\" || echo \"sed for both mac and linux\"", "remove-importmeta": "sed -i 's/import.meta.url/window.location.origin/g' \"./src/wasm-lib/pkg/wasm_lib.js\"; sed -i '' 's/import.meta.url/window.location.origin/g' \"./src/wasm-lib/pkg/wasm_lib.js\" || echo \"sed for both mac and linux\"",

View File

@ -29,7 +29,7 @@ class KclManager {
end: 0, end: 0,
nonCodeMeta: { nonCodeMeta: {
nonCodeNodes: {}, nonCodeNodes: {},
start: null, start: [],
}, },
} }
private _programMemory: ProgramMemory = { private _programMemory: ProgramMemory = {
@ -220,7 +220,7 @@ class KclManager {
end: 0, end: 0,
nonCodeMeta: { nonCodeMeta: {
nonCodeNodes: {}, nonCodeNodes: {},
start: null, start: [],
}, },
} }
this._programMemory = { this._programMemory = {

View File

@ -141,42 +141,6 @@ const newVar = myVar + 1
}) })
describe('testing function declaration', () => { describe('testing function declaration', () => {
test('fn funcN = () => {}', () => {
const { body } = parse('fn funcN = () => {}')
delete (body[0] as any).declarations[0].init.body.nonCodeMeta
expect(body).toEqual([
{
type: 'VariableDeclaration',
start: 0,
end: 19,
kind: 'fn',
declarations: [
{
type: 'VariableDeclarator',
start: 3,
end: 19,
id: {
type: 'Identifier',
start: 3,
end: 8,
name: 'funcN',
},
init: {
type: 'FunctionExpression',
start: 11,
end: 19,
params: [],
body: {
start: 17,
end: 19,
body: [],
},
},
},
],
},
])
})
test('fn funcN = (a, b) => {return a + b}', () => { test('fn funcN = (a, b) => {return a + b}', () => {
const { body } = parse( const { body } = parse(
['fn funcN = (a, b) => {', ' return a + b', '}'].join('\n') ['fn funcN = (a, b) => {', ' return a + b', '}'].join('\n')
@ -1513,22 +1477,23 @@ const key = 'c'`
const nonCodeMetaInstance = { const nonCodeMetaInstance = {
type: 'NonCodeNode', type: 'NonCodeNode',
start: code.indexOf('\n// this is a comment'), start: code.indexOf('\n// this is a comment'),
end: code.indexOf('const key'), end: code.indexOf('const key') - 1,
value: { value: {
type: 'blockComment', type: 'blockComment',
style: 'line',
value: 'this is a comment', value: 'this is a comment',
}, },
} }
const { nonCodeMeta } = parse(code) const { nonCodeMeta } = parse(code)
expect(nonCodeMeta.nonCodeNodes[0]).toEqual(nonCodeMetaInstance) expect(nonCodeMeta.nonCodeNodes[0][0]).toEqual(nonCodeMetaInstance)
// extra whitespace won't change it's position (0) or value (NB the start end would have changed though) // extra whitespace won't change it's position (0) or value (NB the start end would have changed though)
const codeWithExtraStartWhitespace = '\n\n\n' + code const codeWithExtraStartWhitespace = '\n\n\n' + code
const { nonCodeMeta: nonCodeMeta2 } = parse(codeWithExtraStartWhitespace) const { nonCodeMeta: nonCodeMeta2 } = parse(codeWithExtraStartWhitespace)
expect(nonCodeMeta2.nonCodeNodes[0].value).toStrictEqual( expect(nonCodeMeta2.nonCodeNodes[0][0].value).toStrictEqual(
nonCodeMetaInstance.value nonCodeMetaInstance.value
) )
expect(nonCodeMeta2.nonCodeNodes[0].start).not.toBe( expect(nonCodeMeta2.nonCodeNodes[0][0].start).not.toBe(
nonCodeMetaInstance.start nonCodeMetaInstance.start
) )
}) })
@ -1546,12 +1511,13 @@ const key = 'c'`
const indexOfSecondLineToExpression = 2 const indexOfSecondLineToExpression = 2
const sketchNonCodeMeta = (body as any)[0].declarations[0].init.nonCodeMeta const sketchNonCodeMeta = (body as any)[0].declarations[0].init.nonCodeMeta
.nonCodeNodes .nonCodeNodes
expect(sketchNonCodeMeta[indexOfSecondLineToExpression]).toEqual({ expect(sketchNonCodeMeta[indexOfSecondLineToExpression][0]).toEqual({
type: 'NonCodeNode', type: 'NonCodeNode',
start: 106, start: 106,
end: 166, end: 163,
value: { value: {
type: 'blockComment', type: 'inlineComment',
style: 'block',
value: 'this is\n a comment\n spanning a few lines', value: 'this is\n a comment\n spanning a few lines',
}, },
}) })
@ -1568,14 +1534,15 @@ const key = 'c'`
const { body } = parse(code) const { body } = parse(code)
const sketchNonCodeMeta = (body[0] as any).declarations[0].init.nonCodeMeta const sketchNonCodeMeta = (body[0] as any).declarations[0].init.nonCodeMeta
.nonCodeNodes .nonCodeNodes[3][0]
expect(sketchNonCodeMeta[3]).toEqual({ expect(sketchNonCodeMeta).toEqual({
type: 'NonCodeNode', type: 'NonCodeNode',
start: 125, start: 125,
end: 141, end: 138,
value: { value: {
type: 'blockComment', type: 'blockComment',
value: 'a comment', value: 'a comment',
style: 'line',
}, },
}) })
}) })
@ -1693,11 +1660,7 @@ describe('parsing errors', () => {
} }
const theError = _theError as any const theError = _theError as any
expect(theError).toEqual( expect(theError).toEqual(
new KCLError( new KCLError('syntax', 'Unexpected token', [[27, 28]])
'unexpected',
'Unexpected token Token { token_type: Brace, start: 29, end: 30, value: "}" }',
[[29, 30]]
)
) )
}) })
}) })

View File

@ -104,7 +104,7 @@ describe('Testing addSketchTo', () => {
body: [], body: [],
start: 0, start: 0,
end: 0, end: 0,
nonCodeMeta: { nonCodeNodes: {}, start: null }, nonCodeMeta: { nonCodeNodes: {}, start: [] },
}, },
'yz' 'yz'
) )

View File

@ -540,7 +540,7 @@ export function createPipeExpression(
start: 0, start: 0,
end: 0, end: 0,
body, body,
nonCodeMeta: { nonCodeNodes: {}, start: null }, nonCodeMeta: { nonCodeNodes: {}, start: [] },
} }
} }

View File

@ -272,21 +272,20 @@ const mySk1 = startSketchAt([0, 0])
` `
const { ast } = code2ast(code) const { ast } = code2ast(code)
const recasted = recast(ast) const recasted = recast(ast)
expect(recasted).toBe(`// comment at start expect(recasted).toBe(`/* comment at start */
const mySk1 = startSketchAt([0, 0]) const mySk1 = startSketchAt([0, 0])
|> lineTo([1, 1], %) |> lineTo([1, 1], %)
// comment here // comment here
|> lineTo({ to: [0, 1], tag: 'myTag' }, %) |> lineTo({ to: [0, 1], tag: 'myTag' }, %)
|> lineTo([1, 1], %) |> lineTo([1, 1], %) /* and
/* and here */
here // a comment between pipe expression statements
a comment between pipe expression statements */
|> rx(90, %) |> rx(90, %)
// and another with just white space between others below // and another with just white space between others below
|> ry(45, %) |> ry(45, %)
|> rx(45, %) |> rx(45, %)
// one more for good measure /* one more for good measure */
`) `)
}) })
}) })

View File

@ -257,7 +257,7 @@ export async function executeCode({
body: [], body: [],
nonCodeMeta: { nonCodeMeta: {
nonCodeNodes: {}, nonCodeNodes: {},
start: null, start: [],
}, },
}, },
} }

View File

@ -6,36 +6,30 @@ pub fn bench_lex(c: &mut Criterion) {
c.bench_function("lex_pipes_on_pipes", |b| b.iter(|| lex(PIPES_PROGRAM))); c.bench_function("lex_pipes_on_pipes", |b| b.iter(|| lex(PIPES_PROGRAM)));
} }
pub fn bench_lex_parse(c: &mut Criterion) { pub fn bench_parse(c: &mut Criterion) {
c.bench_function("parse_lex_cube", |b| b.iter(|| lex_and_parse(CUBE_PROGRAM))); for (name, file) in [
c.bench_function("parse_lex_big_kitt", |b| b.iter(|| lex_and_parse(KITT_PROGRAM))); ("pipes_on_pipes", PIPES_PROGRAM),
c.bench_function("parse_lex_pipes_on_pipes", |b| b.iter(|| lex_and_parse(PIPES_PROGRAM))); ("big_kitt", KITT_PROGRAM),
("cube", CUBE_PROGRAM),
] {
let tokens = kcl_lib::token::lexer(file);
c.bench_function(&format!("parse_{name}"), move |b| {
let tok = tokens.clone();
b.iter(move || {
let parser = kcl_lib::parser::Parser::new(tok.clone());
black_box(parser.ast().unwrap());
})
});
}
} }
fn lex(program: &str) { fn lex(program: &str) {
black_box(kcl_lib::token::lexer(program)); black_box(kcl_lib::token::lexer(program));
} }
fn lex_and_parse(program: &str) { criterion_group!(benches, bench_lex, bench_parse);
let tokens = kcl_lib::token::lexer(program);
let parser = kcl_lib::parser::Parser::new(tokens);
black_box(parser.ast().unwrap());
}
criterion_group!(benches, bench_lex, bench_lex_parse);
criterion_main!(benches); criterion_main!(benches);
const KITT_PROGRAM: &str = include_str!("../../tests/executor/inputs/kittycad_svg.kcl"); const KITT_PROGRAM: &str = include_str!("../../tests/executor/inputs/kittycad_svg.kcl");
const PIPES_PROGRAM: &str = include_str!("../../tests/executor/inputs/pipes_on_pipes.kcl"); const PIPES_PROGRAM: &str = include_str!("../../tests/executor/inputs/pipes_on_pipes.kcl");
const CUBE_PROGRAM: &str = r#"fn cube = (pos, scale) => { const CUBE_PROGRAM: &str = include_str!("../../tests/executor/inputs/cube.kcl");
const sg = startSketchAt(pos)
|> line([0, scale], %)
|> line([scale, 0], %)
|> line([0, -scale], %)
return sg
}
const b1 = cube([0,0], 10)
const pt1 = b1[0]
show(b1)"#;

View File

@ -63,10 +63,14 @@ impl Program {
.fold(String::new(), |mut output, (index, recast_str)| { .fold(String::new(), |mut output, (index, recast_str)| {
let start_string = if index == 0 { let start_string = if index == 0 {
// We need to indent. // We need to indent.
if let Some(start) = self.non_code_meta.start.clone() { if self.non_code_meta.start.is_empty() {
start.format(&indentation)
} else {
indentation.to_string() indentation.to_string()
} else {
self.non_code_meta
.start
.iter()
.map(|start| start.format(&indentation))
.collect()
} }
} else { } else {
// Do nothing, we already applied the indentation elsewhere. // Do nothing, we already applied the indentation elsewhere.
@ -82,7 +86,10 @@ impl Program {
}; };
let custom_white_space_or_comment = match self.non_code_meta.non_code_nodes.get(&index) { let custom_white_space_or_comment = match self.non_code_meta.non_code_nodes.get(&index) {
Some(custom_white_space_or_comment) => custom_white_space_or_comment.format(&indentation), Some(noncodes) => noncodes
.iter()
.map(|custom_white_space_or_comment| custom_white_space_or_comment.format(&indentation))
.collect::<String>(),
None => String::new(), None => String::new(),
}; };
let end_string = if custom_white_space_or_comment.is_empty() { let end_string = if custom_white_space_or_comment.is_empty() {
@ -707,30 +714,35 @@ pub struct NonCodeNode {
impl NonCodeNode { impl NonCodeNode {
pub fn value(&self) -> String { pub fn value(&self) -> String {
match &self.value { match &self.value {
NonCodeValue::InlineComment { value } => value.clone(), NonCodeValue::InlineComment { value, style: _ } => value.clone(),
NonCodeValue::BlockComment { value } => value.clone(), NonCodeValue::BlockComment { value, style: _ } => value.clone(),
NonCodeValue::NewLineBlockComment { value } => value.clone(), NonCodeValue::NewLineBlockComment { value, style: _ } => value.clone(),
NonCodeValue::NewLine => "\n\n".to_string(), NonCodeValue::NewLine => "\n\n".to_string(),
} }
} }
pub fn format(&self, indentation: &str) -> String { pub fn format(&self, indentation: &str) -> String {
match &self.value { match &self.value {
NonCodeValue::InlineComment { value } => format!(" // {}\n", value), NonCodeValue::InlineComment {
NonCodeValue::BlockComment { value } => { value,
style: CommentStyle::Line,
} => format!(" // {}\n", value),
NonCodeValue::InlineComment {
value,
style: CommentStyle::Block,
} => format!(" /* {} */", value),
NonCodeValue::BlockComment { value, style } => {
let add_start_new_line = if self.start == 0 { "" } else { "\n" }; let add_start_new_line = if self.start == 0 { "" } else { "\n" };
if value.contains('\n') { match style {
format!("{}{}/* {} */\n", add_start_new_line, indentation, value) CommentStyle::Block => format!("{}{}/* {} */", add_start_new_line, indentation, value),
} else { CommentStyle::Line => format!("{}{}// {}\n", add_start_new_line, indentation, value),
format!("{}{}// {}\n", add_start_new_line, indentation, value)
} }
} }
NonCodeValue::NewLineBlockComment { value } => { NonCodeValue::NewLineBlockComment { value, style } => {
let add_start_new_line = if self.start == 0 { "" } else { "\n\n" }; let add_start_new_line = if self.start == 0 { "" } else { "\n\n" };
if value.contains('\n') { match style {
format!("{}{}/* {} */\n", add_start_new_line, indentation, value) CommentStyle::Block => format!("{}{}/* {} */\n", add_start_new_line, indentation, value),
} else { CommentStyle::Line => format!("{}{}// {}\n", add_start_new_line, indentation, value),
format!("{}{}// {}\n", add_start_new_line, indentation, value)
} }
} }
NonCodeValue::NewLine => "\n\n".to_string(), NonCodeValue::NewLine => "\n\n".to_string(),
@ -738,14 +750,27 @@ impl NonCodeNode {
} }
} }
#[derive(Debug, Clone, Deserialize, Serialize, PartialEq, ts_rs::TS, JsonSchema)]
#[ts(export)]
#[serde(rename_all = "camelCase")]
pub enum CommentStyle {
/// Like // foo
Line,
/// Like /* foo */
Block,
}
#[derive(Debug, Clone, Deserialize, Serialize, PartialEq, ts_rs::TS, JsonSchema)] #[derive(Debug, Clone, Deserialize, Serialize, PartialEq, ts_rs::TS, JsonSchema)]
#[ts(export)] #[ts(export)]
#[serde(tag = "type", rename_all = "camelCase")] #[serde(tag = "type", rename_all = "camelCase")]
pub enum NonCodeValue { pub enum NonCodeValue {
/// An inline comment. /// An inline comment.
/// An example of this is the following: `1 + 1 // This is an inline comment`. /// Here are examples:
/// `1 + 1 // This is an inline comment`.
/// `1 + 1 /* Here's another */`.
InlineComment { InlineComment {
value: String, value: String,
style: CommentStyle,
}, },
/// A block comment. /// A block comment.
/// An example of this is the following: /// An example of this is the following:
@ -759,11 +784,13 @@ pub enum NonCodeValue {
/// If it did it would be a `NewLineBlockComment`. /// If it did it would be a `NewLineBlockComment`.
BlockComment { BlockComment {
value: String, value: String,
style: CommentStyle,
}, },
/// A block comment that has a new line above it. /// A block comment that has a new line above it.
/// The user explicitly added a new line above the block comment. /// The user explicitly added a new line above the block comment.
NewLineBlockComment { NewLineBlockComment {
value: String, value: String,
style: CommentStyle,
}, },
// A new line like `\n\n` NOT a new line like `\n`. // A new line like `\n\n` NOT a new line like `\n`.
// This is also not a comment. // This is also not a comment.
@ -774,8 +801,8 @@ pub enum NonCodeValue {
#[ts(export)] #[ts(export)]
#[serde(rename_all = "camelCase")] #[serde(rename_all = "camelCase")]
pub struct NonCodeMeta { pub struct NonCodeMeta {
pub non_code_nodes: HashMap<usize, NonCodeNode>, pub non_code_nodes: HashMap<usize, Vec<NonCodeNode>>,
pub start: Option<NonCodeNode>, pub start: Vec<NonCodeNode>,
} }
// implement Deserialize manually because we to force the keys of non_code_nodes to be usize // implement Deserialize manually because we to force the keys of non_code_nodes to be usize
@ -788,15 +815,16 @@ impl<'de> Deserialize<'de> for NonCodeMeta {
#[derive(Deserialize)] #[derive(Deserialize)]
#[serde(rename_all = "camelCase")] #[serde(rename_all = "camelCase")]
struct NonCodeMetaHelper { struct NonCodeMetaHelper {
non_code_nodes: HashMap<String, NonCodeNode>, non_code_nodes: HashMap<String, Vec<NonCodeNode>>,
start: Option<NonCodeNode>, start: Vec<NonCodeNode>,
} }
let helper = NonCodeMetaHelper::deserialize(deserializer)?; let helper = NonCodeMetaHelper::deserialize(deserializer)?;
let mut non_code_nodes = HashMap::new(); let non_code_nodes = helper
for (key, value) in helper.non_code_nodes { .non_code_nodes
non_code_nodes.insert(key.parse().map_err(serde::de::Error::custom)?, value); .into_iter()
} .map(|(key, value)| Ok((key.parse().map_err(serde::de::Error::custom)?, value)))
.collect::<Result<HashMap<_, _>, _>>()?;
Ok(NonCodeMeta { Ok(NonCodeMeta {
non_code_nodes, non_code_nodes,
start: helper.start, start: helper.start,
@ -804,6 +832,12 @@ impl<'de> Deserialize<'de> for NonCodeMeta {
} }
} }
impl NonCodeMeta {
pub fn insert(&mut self, i: usize, new: NonCodeNode) {
self.non_code_nodes.entry(i).or_default().push(new);
}
}
#[derive(Debug, Clone, Deserialize, Serialize, PartialEq, ts_rs::TS, JsonSchema)] #[derive(Debug, Clone, Deserialize, Serialize, PartialEq, ts_rs::TS, JsonSchema)]
#[ts(export)] #[ts(export)]
#[serde(tag = "type")] #[serde(tag = "type")]
@ -2385,7 +2419,9 @@ impl PipeExpression {
let mut s = statement.recast(options, indentation_level + 1, true); let mut s = statement.recast(options, indentation_level + 1, true);
let non_code_meta = self.non_code_meta.clone(); let non_code_meta = self.non_code_meta.clone();
if let Some(non_code_meta_value) = non_code_meta.non_code_nodes.get(&index) { if let Some(non_code_meta_value) = non_code_meta.non_code_nodes.get(&index) {
s += non_code_meta_value.format(&indentation).trim_end_matches('\n') for val in non_code_meta_value {
s += val.format(&indentation).trim_end_matches('\n')
}
} }
if index != self.body.len() - 1 { if index != self.body.len() - 1 {
@ -2869,13 +2905,32 @@ show(part001)"#;
recasted, recasted,
r#"fn myFn = () => { r#"fn myFn = () => {
// this is a comment // this is a comment
const yo = { a: { b: { c: '123' } } } const yo = { a: { b: { c: '123' } } } /* block
/* block
comment */ comment */
const key = 'c' const key = 'c'
// this is also a comment // this is also a comment
return things return things
} }
"#
);
}
#[test]
fn test_recast_comment_at_start() {
let test_program = r#"
/* comment at start */
const mySk1 = startSketchAt([0, 0])"#;
let tokens = crate::token::lexer(test_program);
let parser = crate::parser::Parser::new(tokens);
let program = parser.ast().unwrap();
let recasted = program.recast(&Default::default(), 0);
assert_eq!(
recasted,
r#"/* comment at start */
const mySk1 = startSketchAt([0, 0])
"# "#
); );
} }
@ -2913,9 +2968,8 @@ const mySk1 = startSketchOn('XY')
|> lineTo({ to: [0, 1], tag: 'myTag' }, %) |> lineTo({ to: [0, 1], tag: 'myTag' }, %)
|> lineTo([1, 1], %) |> lineTo([1, 1], %)
/* and /* and
here here */
// a comment between pipe expression statements
a comment between pipe expression statements */
|> rx(90, %) |> rx(90, %)
// and another with just white space between others below // and another with just white space between others below
|> ry(45, %) |> ry(45, %)
@ -2988,16 +3042,19 @@ const things = "things"
let program = parser.ast().unwrap(); let program = parser.ast().unwrap();
let recasted = program.recast(&Default::default(), 0); let recasted = program.recast(&Default::default(), 0);
assert_eq!(recasted.trim(), some_program_string.trim()); let expected = some_program_string.trim();
// Currently new parser removes an empty line
let actual = recasted.trim();
assert_eq!(actual, expected);
} }
#[test] #[test]
fn test_recast_comment_tokens_inside_strings() { fn test_recast_comment_tokens_inside_strings() {
let some_program_string = r#"let b = { let some_program_string = r#"let b = {
"end": 141, end: 141,
"start": 125, start: 125,
"type": "NonCodeNode", type: "NonCodeNode",
"value": " value: "
// a comment // a comment
" "
}"#; }"#;

View File

@ -4,7 +4,7 @@ use tower_lsp::lsp_types::{Diagnostic, DiagnosticSeverity};
use crate::executor::SourceRange; use crate::executor::SourceRange;
#[derive(Error, Debug, Serialize, Deserialize, ts_rs::TS)] #[derive(Error, Debug, Serialize, Deserialize, ts_rs::TS, Clone)]
#[ts(export)] #[ts(export)]
#[serde(tag = "kind", rename_all = "snake_case")] #[serde(tag = "kind", rename_all = "snake_case")]
pub enum KclError { pub enum KclError {
@ -28,7 +28,7 @@ pub enum KclError {
Engine(KclErrorDetails), Engine(KclErrorDetails),
} }
#[derive(Debug, Serialize, Deserialize, ts_rs::TS)] #[derive(Debug, Serialize, Deserialize, ts_rs::TS, Clone)]
#[ts(export)] #[ts(export)]
pub struct KclErrorDetails { pub struct KclErrorDetails {
#[serde(rename = "sourceRanges")] #[serde(rename = "sourceRanges")]
@ -78,6 +78,22 @@ impl KclError {
KclError::Engine(e) => e.source_ranges.clone(), KclError::Engine(e) => e.source_ranges.clone(),
} }
} }
/// Get the inner error message.
pub fn message(&self) -> &str {
match &self {
KclError::Syntax(e) => &e.message,
KclError::Semantic(e) => &e.message,
KclError::Type(e) => &e.message,
KclError::Unimplemented(e) => &e.message,
KclError::Unexpected(e) => &e.message,
KclError::ValueAlreadyDefined(e) => &e.message,
KclError::UndefinedValue(e) => &e.message,
KclError::InvalidExpression(e) => &e.message,
KclError::Engine(e) => &e.message,
}
}
pub fn to_lsp_diagnostic(&self, code: &str) -> Diagnostic { pub fn to_lsp_diagnostic(&self, code: &str) -> Diagnostic {
let (message, _, _) = self.get_message_line_column(code); let (message, _, _) = self.get_message_line_column(code);
let source_ranges = self.source_ranges(); let source_ranges = self.source_ranges();

View File

@ -2,7 +2,7 @@ use std::{collections::HashMap, str::FromStr};
use crate::{ use crate::{
ast::types::{ ast::types::{
ArrayExpression, BinaryExpression, BinaryPart, BodyItem, CallExpression, ExpressionStatement, ArrayExpression, BinaryExpression, BinaryPart, BodyItem, CallExpression, CommentStyle, ExpressionStatement,
FunctionExpression, Identifier, Literal, LiteralIdentifier, MemberExpression, MemberObject, NonCodeMeta, FunctionExpression, Identifier, Literal, LiteralIdentifier, MemberExpression, MemberObject, NonCodeMeta,
NonCodeNode, NonCodeValue, ObjectExpression, ObjectKeyInfo, ObjectProperty, PipeExpression, PipeSubstitution, NonCodeNode, NonCodeValue, ObjectExpression, ObjectKeyInfo, ObjectProperty, PipeExpression, PipeSubstitution,
Program, ReturnStatement, UnaryExpression, UnaryOperator, Value, VariableDeclaration, VariableDeclarator, Program, ReturnStatement, UnaryExpression, UnaryOperator, Value, VariableDeclaration, VariableDeclarator,
@ -13,6 +13,8 @@ use crate::{
token::{Token, TokenType}, token::{Token, TokenType},
}; };
mod parser_impl;
pub const PIPE_SUBSTITUTION_OPERATOR: &str = "%"; pub const PIPE_SUBSTITUTION_OPERATOR: &str = "%";
pub const PIPE_OPERATOR: &str = "|>"; pub const PIPE_OPERATOR: &str = "|>";
@ -179,13 +181,19 @@ impl Parser {
Ok(token) Ok(token)
} }
/// Use the new Winnow parser.
pub fn ast(&self) -> Result<Program, KclError> { pub fn ast(&self) -> Result<Program, KclError> {
parser_impl::run_parser(&mut self.tokens.as_slice())
}
/// Use the old handwritten recursive parser.
pub fn ast_old(&self) -> Result<Program, KclError> {
let body = self.make_body( let body = self.make_body(
0, 0,
vec![], vec![],
NonCodeMeta { NonCodeMeta {
non_code_nodes: HashMap::new(), non_code_nodes: HashMap::new(),
start: None, start: Vec::new(),
}, },
)?; )?;
let end = match self.get_token(body.last_index) { let end = match self.get_token(body.last_index) {
@ -209,7 +217,7 @@ impl Parser {
}) })
} }
pub fn make_literal(&self, index: usize) -> Result<Literal, KclError> { fn make_literal(&self, index: usize) -> Result<Literal, KclError> {
let token = self.get_token(index)?; let token = self.get_token(index)?;
let value = if token.token_type == TokenType::Number { let value = if token.token_type == TokenType::Number {
if let Ok(value) = token.value.parse::<i64>() { if let Ok(value) = token.value.parse::<i64>() {
@ -295,6 +303,11 @@ impl Parser {
)); ));
} }
let is_block_style = non_code_tokens
.first()
.map(|tok| matches!(tok.token_type, TokenType::BlockComment))
.unwrap_or_default();
let full_string = non_code_tokens let full_string = non_code_tokens
.iter() .iter()
.map(|t| { .map(|t| {
@ -336,11 +349,32 @@ impl Parser {
value: if start_end_string.starts_with("\n\n") && is_new_line_comment { value: if start_end_string.starts_with("\n\n") && is_new_line_comment {
// Preserve if they want a whitespace line before the comment. // Preserve if they want a whitespace line before the comment.
// But let's just allow one. // But let's just allow one.
NonCodeValue::NewLineBlockComment { value: full_string } NonCodeValue::NewLineBlockComment {
} else if is_new_line_comment { value: full_string,
NonCodeValue::BlockComment { value: full_string } style: if is_block_style {
CommentStyle::Block
} else { } else {
NonCodeValue::InlineComment { value: full_string } CommentStyle::Line
},
}
} else if is_new_line_comment {
NonCodeValue::BlockComment {
value: full_string,
style: if is_block_style {
CommentStyle::Block
} else {
CommentStyle::Line
},
}
} else {
NonCodeValue::InlineComment {
value: full_string,
style: if is_block_style {
CommentStyle::Block
} else {
CommentStyle::Line
},
}
}, },
}; };
Ok((Some(node), end_index - 1)) Ok((Some(node), end_index - 1))
@ -1033,7 +1067,7 @@ impl Parser {
let non_code_meta = match previous_non_code_meta { let non_code_meta = match previous_non_code_meta {
Some(meta) => meta, Some(meta) => meta,
None => NonCodeMeta { None => NonCodeMeta {
start: None, start: Vec::new(),
non_code_nodes: HashMap::new(), non_code_nodes: HashMap::new(),
}, },
}; };
@ -1064,7 +1098,7 @@ impl Parser {
let mut _non_code_meta: NonCodeMeta; let mut _non_code_meta: NonCodeMeta;
if let Some(node) = next_pipe.non_code_node { if let Some(node) = next_pipe.non_code_node {
_non_code_meta = non_code_meta; _non_code_meta = non_code_meta;
_non_code_meta.non_code_nodes.insert(previous_values.len(), node); _non_code_meta.insert(previous_values.len(), node);
} else { } else {
_non_code_meta = non_code_meta; _non_code_meta = non_code_meta;
} }
@ -1435,7 +1469,7 @@ impl Parser {
self.make_params(next_brace_or_comma_token.index, _previous_params) self.make_params(next_brace_or_comma_token.index, _previous_params)
} }
pub fn make_unary_expression(&self, index: usize) -> Result<UnaryExpressionResult, KclError> { fn make_unary_expression(&self, index: usize) -> Result<UnaryExpressionResult, KclError> {
let current_token = self.get_token(index)?; let current_token = self.get_token(index)?;
let next_token = self.next_meaningful_token(index, None)?; let next_token = self.next_meaningful_token(index, None)?;
if next_token.token.is_none() { if next_token.token.is_none() {
@ -1631,9 +1665,11 @@ impl Parser {
let next_token = self.next_meaningful_token(token_index, Some(0))?; let next_token = self.next_meaningful_token(token_index, Some(0))?;
if let Some(node) = &next_token.non_code_node { if let Some(node) = &next_token.non_code_node {
if previous_body.is_empty() { if previous_body.is_empty() {
non_code_meta.start = next_token.non_code_node; if let Some(next) = next_token.non_code_node {
non_code_meta.start.push(next);
}
} else { } else {
non_code_meta.non_code_nodes.insert(previous_body.len(), node.clone()); non_code_meta.insert(previous_body.len(), node.clone());
} }
} }
return self.make_body(next_token.index, previous_body, non_code_meta); return self.make_body(next_token.index, previous_body, non_code_meta);
@ -1641,14 +1677,14 @@ impl Parser {
let next = self.next_meaningful_token(token_index, None)?; let next = self.next_meaningful_token(token_index, None)?;
if let Some(node) = &next.non_code_node { if let Some(node) = &next.non_code_node {
non_code_meta.non_code_nodes.insert(previous_body.len(), node.clone()); non_code_meta.insert(previous_body.len(), node.clone());
} }
if token.token_type == TokenType::Keyword && VariableKind::from_str(&token.value).is_ok() { if token.token_type == TokenType::Keyword && VariableKind::from_str(&token.value).is_ok() {
let declaration = self.make_variable_declaration(token_index)?; let declaration = self.make_variable_declaration(token_index)?;
let next_thing = self.next_meaningful_token(declaration.last_index, None)?; let next_thing = self.next_meaningful_token(declaration.last_index, None)?;
if let Some(node) = &next_thing.non_code_node { if let Some(node) = &next_thing.non_code_node {
non_code_meta.non_code_nodes.insert(previous_body.len(), node.clone()); non_code_meta.insert(previous_body.len(), node.clone());
} }
let mut _previous_body = previous_body; let mut _previous_body = previous_body;
_previous_body.push(BodyItem::VariableDeclaration(VariableDeclaration { _previous_body.push(BodyItem::VariableDeclaration(VariableDeclaration {
@ -1669,7 +1705,7 @@ impl Parser {
let statement = self.make_return_statement(token_index)?; let statement = self.make_return_statement(token_index)?;
let next_thing = self.next_meaningful_token(statement.last_index, None)?; let next_thing = self.next_meaningful_token(statement.last_index, None)?;
if let Some(node) = &next_thing.non_code_node { if let Some(node) = &next_thing.non_code_node {
non_code_meta.non_code_nodes.insert(previous_body.len(), node.clone()); non_code_meta.insert(previous_body.len(), node.clone());
} }
let mut _previous_body = previous_body; let mut _previous_body = previous_body;
_previous_body.push(BodyItem::ReturnStatement(ReturnStatement { _previous_body.push(BodyItem::ReturnStatement(ReturnStatement {
@ -1693,7 +1729,7 @@ impl Parser {
let expression = self.make_expression_statement(token_index)?; let expression = self.make_expression_statement(token_index)?;
let next_thing = self.next_meaningful_token(expression.last_index, None)?; let next_thing = self.next_meaningful_token(expression.last_index, None)?;
if let Some(node) = &next_thing.non_code_node { if let Some(node) = &next_thing.non_code_node {
non_code_meta.non_code_nodes.insert(previous_body.len(), node.clone()); non_code_meta.insert(previous_body.len(), node.clone());
} }
let mut _previous_body = previous_body; let mut _previous_body = previous_body;
_previous_body.push(BodyItem::ExpressionStatement(ExpressionStatement { _previous_body.push(BodyItem::ExpressionStatement(ExpressionStatement {
@ -1716,7 +1752,7 @@ impl Parser {
&& next_thing_token.token_type == TokenType::Operator && next_thing_token.token_type == TokenType::Operator
{ {
if let Some(node) = &next_thing.non_code_node { if let Some(node) = &next_thing.non_code_node {
non_code_meta.non_code_nodes.insert(previous_body.len(), node.clone()); non_code_meta.insert(previous_body.len(), node.clone());
} }
let expression = self.make_expression_statement(token_index)?; let expression = self.make_expression_statement(token_index)?;
let mut _previous_body = previous_body; let mut _previous_body = previous_body;
@ -1749,7 +1785,7 @@ impl Parser {
last_index: next_token_index, last_index: next_token_index,
non_code_meta: NonCodeMeta { non_code_meta: NonCodeMeta {
non_code_nodes: HashMap::new(), non_code_nodes: HashMap::new(),
start: None, start: Vec::new(),
}, },
} }
} else { } else {
@ -1758,7 +1794,7 @@ impl Parser {
vec![], vec![],
NonCodeMeta { NonCodeMeta {
non_code_nodes: HashMap::new(), non_code_nodes: HashMap::new(),
start: None, start: Vec::new(),
}, },
)? )?
}; };
@ -1913,6 +1949,7 @@ const key = 'c'"#,
end: 60, end: 60,
value: NonCodeValue::BlockComment { value: NonCodeValue::BlockComment {
value: "this is a comment".to_string(), value: "this is a comment".to_string(),
style: CommentStyle::Line,
}, },
}), }),
31, 31,
@ -1966,6 +2003,35 @@ const key = 'c'"#,
); );
} }
#[test]
fn test_math_parse() {
let tokens = crate::token::lexer(r#"5 + "a""#);
let actual = Parser::new(tokens).ast().unwrap().body;
let expr = BinaryExpression {
start: 0,
end: 7,
operator: BinaryOperator::Add,
left: BinaryPart::Literal(Box::new(Literal {
start: 0,
end: 1,
value: serde_json::Value::Number(serde_json::Number::from(5)),
raw: "5".to_owned(),
})),
right: BinaryPart::Literal(Box::new(Literal {
start: 4,
end: 7,
value: serde_json::Value::String("a".to_owned()),
raw: r#""a""#.to_owned(),
})),
};
let expected = vec![BodyItem::ExpressionStatement(ExpressionStatement {
start: 0,
end: 7,
expression: Value::BinaryExpression(Box::new(expr)),
})];
assert_eq!(expected, actual);
}
#[test] #[test]
fn test_is_code_token() { fn test_is_code_token() {
let tokens = [ let tokens = [
@ -2600,7 +2666,7 @@ show(mySk1)"#;
vec![], vec![],
NonCodeMeta { NonCodeMeta {
non_code_nodes: HashMap::new(), non_code_nodes: HashMap::new(),
start: None, start: Vec::new(),
}, },
) )
.unwrap(); .unwrap();
@ -2636,10 +2702,7 @@ show(mySk1)"#;
})), })),
})), })),
})], })],
non_code_meta: NonCodeMeta { non_code_meta: NonCodeMeta::default(),
non_code_nodes: Default::default(),
start: None,
},
}; };
assert_eq!(result, expected_result); assert_eq!(result, expected_result);
@ -2812,10 +2875,6 @@ z(-[["#,
let parser = Parser::new(tokens); let parser = Parser::new(tokens);
let result = parser.ast(); let result = parser.ast();
assert!(result.is_err()); assert!(result.is_err());
assert_eq!(
result.err().unwrap().to_string(),
r#"syntax: KclErrorDetails { source_ranges: [SourceRange([1, 2])], message: "missing a closing brace for the function call" }"#
);
} }
#[test] #[test]
@ -2831,7 +2890,7 @@ z(-[["#,
// https://github.com/KittyCAD/modeling-app/issues/696 // https://github.com/KittyCAD/modeling-app/issues/696
assert_eq!( assert_eq!(
result.err().unwrap().to_string(), result.err().unwrap().to_string(),
r#"semantic: KclErrorDetails { source_ranges: [], message: "file is empty" }"# r#"syntax: KclErrorDetails { source_ranges: [], message: "file is empty" }"#
); );
} }
@ -2845,7 +2904,7 @@ z(-[["#,
// https://github.com/KittyCAD/modeling-app/issues/696 // https://github.com/KittyCAD/modeling-app/issues/696
assert_eq!( assert_eq!(
result.err().unwrap().to_string(), result.err().unwrap().to_string(),
r#"semantic: KclErrorDetails { source_ranges: [], message: "file is empty" }"# r#"syntax: KclErrorDetails { source_ranges: [], message: "file is empty" }"#
); );
} }
@ -2863,7 +2922,7 @@ e
.err() .err()
.unwrap() .unwrap()
.to_string() .to_string()
.contains("expected to be started on a identifier or literal")); .contains("expected whitespace, found ')' which is brace"));
} }
#[test] #[test]
@ -2872,7 +2931,11 @@ e
let parser = Parser::new(tokens); let parser = Parser::new(tokens);
let result = parser.ast(); let result = parser.ast();
assert!(result.is_err()); assert!(result.is_err());
assert!(result.err().unwrap().to_string().contains("expected another token")); assert!(result
.err()
.unwrap()
.to_string()
.contains("expected whitespace, found ')' which is brace"));
} }
#[test] #[test]
@ -2884,11 +2947,7 @@ e
let parser = Parser::new(tokens); let parser = Parser::new(tokens);
let result = parser.ast(); let result = parser.ast();
assert!(result.is_err()); assert!(result.is_err());
assert!(result assert!(result.err().unwrap().to_string().contains("Unexpected token"));
.err()
.unwrap()
.to_string()
.contains("unexpected end of expression"));
} }
#[test] #[test]
@ -2985,10 +3044,7 @@ e
}], }],
kind: VariableKind::Const, kind: VariableKind::Const,
})], })],
non_code_meta: NonCodeMeta { non_code_meta: NonCodeMeta::default(),
non_code_nodes: Default::default(),
start: None,
},
}; };
assert_eq!(result, expected_result); assert_eq!(result, expected_result);
@ -3022,7 +3078,9 @@ e
#[test] #[test]
fn test_error_stdlib_in_fn_name() { fn test_error_stdlib_in_fn_name() {
let some_program_string = r#"fn cos = () {}"#; let some_program_string = r#"fn cos = () => {
return 1
}"#;
let tokens = crate::token::lexer(some_program_string); let tokens = crate::token::lexer(some_program_string);
let parser = Parser::new(tokens); let parser = Parser::new(tokens);
let result = parser.ast(); let result = parser.ast();
@ -3123,9 +3181,12 @@ thing(false)
let parser = Parser::new(tokens); let parser = Parser::new(tokens);
let result = parser.ast(); let result = parser.ast();
assert!(result.is_err()); assert!(result.is_err());
// TODO: https://github.com/KittyCAD/modeling-app/issues/784
// Improve this error message.
// It should say that the compiler is expecting a function expression on the RHS.
assert_eq!( assert_eq!(
result.err().unwrap().to_string(), result.err().unwrap().to_string(),
r#"syntax: KclErrorDetails { source_ranges: [SourceRange([0, 2])], message: "Expected a `let` variable kind, found: `fn`" }"# r#"syntax: KclErrorDetails { source_ranges: [SourceRange([11, 18])], message: "Unexpected token" }"#
); );
} }
@ -3163,15 +3224,6 @@ let other_thing = 2 * cos(3)"#;
parser.ast().unwrap(); parser.ast().unwrap();
} }
#[test]
fn test_parse_pipes_on_pipes() {
let code = include_str!("../../tests/executor/inputs/pipes_on_pipes.kcl");
let tokens = crate::token::lexer(code);
let parser = Parser::new(tokens);
parser.ast().unwrap();
}
#[test] #[test]
fn test_negative_arguments() { fn test_negative_arguments() {
let some_program_string = r#"fn box = (p, h, l, w) => { let some_program_string = r#"fn box = (p, h, l, w) => {

File diff suppressed because it is too large Load Diff

View File

@ -0,0 +1,107 @@
use winnow::error::{ErrorKind, ParseError, StrContext};
use crate::{
errors::{KclError, KclErrorDetails},
token::Token,
};
/// Accumulate context while backtracking errors
/// Very similar to [`winnow::error::ContextError`] type,
/// but the 'cause' field is always a [`KclError`],
/// instead of a dynamic [`std::error::Error`] trait object.
#[derive(Debug, Clone)]
pub struct ContextError<C = StrContext> {
pub context: Vec<C>,
pub cause: Option<KclError>,
}
impl From<ParseError<&[Token], ContextError>> for KclError {
fn from(err: ParseError<&[Token], ContextError>) -> Self {
let Some(last_token) = err.input().last() else {
return KclError::Syntax(KclErrorDetails {
source_ranges: Default::default(),
message: "file is empty".to_owned(),
});
};
let (input, offset, err) = (err.input().to_vec(), err.offset(), err.into_inner());
if let Some(e) = err.cause {
return e;
}
// See docs on `offset`.
if offset >= input.len() {
let context = err.context.first();
return KclError::Syntax(KclErrorDetails {
source_ranges: last_token.as_source_ranges(),
message: match context {
Some(what) => format!("Unexpected end of file. The compiler {what}"),
None => "Unexpected end of file while still parsing".to_owned(),
},
});
}
let bad_token = &input[offset];
// TODO: Add the Winnow parser context to the error.
// See https://github.com/KittyCAD/modeling-app/issues/784
KclError::Syntax(KclErrorDetails {
source_ranges: bad_token.as_source_ranges(),
message: "Unexpected token".to_owned(),
})
}
}
impl<C> From<KclError> for ContextError<C> {
fn from(e: KclError) -> Self {
Self {
context: Default::default(),
cause: Some(e),
}
}
}
impl<C> std::default::Default for ContextError<C> {
fn default() -> Self {
Self {
context: Default::default(),
cause: None,
}
}
}
impl<I, C> winnow::error::ParserError<I> for ContextError<C> {
#[inline]
fn from_error_kind(_input: &I, _kind: ErrorKind) -> Self {
Self::default()
}
#[inline]
fn append(self, _input: &I, _kind: ErrorKind) -> Self {
self
}
#[inline]
fn or(self, other: Self) -> Self {
other
}
}
impl<C, I> winnow::error::AddContext<I, C> for ContextError<C> {
#[inline]
fn add_context(mut self, _input: &I, ctx: C) -> Self {
self.context.push(ctx);
self
}
}
impl<C, I> winnow::error::FromExternalError<I, KclError> for ContextError<C> {
#[inline]
fn from_external_error(_input: &I, _kind: ErrorKind, e: KclError) -> Self {
let mut err = Self::default();
{
err.cause = Some(e);
}
err
}
}

View File

@ -6,6 +6,8 @@ use schemars::JsonSchema;
use serde::{Deserialize, Serialize}; use serde::{Deserialize, Serialize};
use tower_lsp::lsp_types::SemanticTokenType; use tower_lsp::lsp_types::SemanticTokenType;
use crate::{ast::types::VariableKind, executor::SourceRange};
mod tokeniser; mod tokeniser;
/// The types of tokens. /// The types of tokens.
@ -142,15 +144,39 @@ impl Token {
TokenType::Whitespace | TokenType::LineComment | TokenType::BlockComment TokenType::Whitespace | TokenType::LineComment | TokenType::BlockComment
) )
} }
pub fn as_source_range(&self) -> SourceRange {
SourceRange([self.start, self.end])
} }
impl From<Token> for crate::executor::SourceRange { pub fn as_source_ranges(&self) -> Vec<SourceRange> {
vec![self.as_source_range()]
}
/// Is this token the beginning of a variable/function declaration?
/// If so, what kind?
/// If not, returns None.
pub fn declaration_keyword(&self) -> Option<VariableKind> {
if !matches!(self.token_type, TokenType::Keyword) {
return None;
}
Some(match self.value.as_str() {
"var" => VariableKind::Var,
"let" => VariableKind::Let,
"fn" => VariableKind::Fn,
"const" => VariableKind::Const,
_ => return None,
})
}
}
impl From<Token> for SourceRange {
fn from(token: Token) -> Self { fn from(token: Token) -> Self {
Self([token.start, token.end]) Self([token.start, token.end])
} }
} }
impl From<&Token> for crate::executor::SourceRange { impl From<&Token> for SourceRange {
fn from(token: &Token) -> Self { fn from(token: &Token) -> Self {
Self([token.start, token.end]) Self([token.start, token.end])
} }

View File

@ -0,0 +1,12 @@
fn cube = (pos, scale) => {
const sg = startSketchAt(pos)
|> line([0, scale], %)
|> line([scale, 0], %)
|> line([0, -scale], %)
return sg
}
const b1 = cube([0,0], 10)
const pt1 = b1[0]
show(b1)

View File

@ -87,7 +87,7 @@ const fnBox = box(3, 6, 10)
show(fnBox)"#; show(fnBox)"#;
let result = execute_and_snapshot(code).await.unwrap(); let result = execute_and_snapshot(code).await.unwrap();
twenty_twenty::assert_image("tests/executor/outputs/function_sketch.png", &result, 1.0); twenty_twenty::assert_image("tests/executor/outputs/function_sketch.png", &result, 0.999);
} }
#[tokio::test(flavor = "multi_thread")] #[tokio::test(flavor = "multi_thread")]
@ -107,7 +107,11 @@ async fn serial_test_execute_with_function_sketch_with_position() {
show(box([0,0], 3, 6, 10))"#; show(box([0,0], 3, 6, 10))"#;
let result = execute_and_snapshot(code).await.unwrap(); let result = execute_and_snapshot(code).await.unwrap();
twenty_twenty::assert_image("tests/executor/outputs/function_sketch_with_position.png", &result, 1.0); twenty_twenty::assert_image(
"tests/executor/outputs/function_sketch_with_position.png",
&result,
0.999,
);
} }
#[tokio::test(flavor = "multi_thread")] #[tokio::test(flavor = "multi_thread")]
@ -125,7 +129,7 @@ async fn serial_test_execute_with_angled_line() {
show(part001)"#; show(part001)"#;
let result = execute_and_snapshot(code).await.unwrap(); let result = execute_and_snapshot(code).await.unwrap();
twenty_twenty::assert_image("tests/executor/outputs/angled_line.png", &result, 1.0); twenty_twenty::assert_image("tests/executor/outputs/angled_line.png", &result, 0.999);
} }
#[tokio::test(flavor = "multi_thread")] #[tokio::test(flavor = "multi_thread")]
@ -152,7 +156,7 @@ const bracket = startSketchOn('XY')
show(bracket)"#; show(bracket)"#;
let result = execute_and_snapshot(code).await.unwrap(); let result = execute_and_snapshot(code).await.unwrap();
twenty_twenty::assert_image("tests/executor/outputs/parametric.png", &result, 1.0); twenty_twenty::assert_image("tests/executor/outputs/parametric.png", &result, 0.999);
} }
#[tokio::test(flavor = "multi_thread")] #[tokio::test(flavor = "multi_thread")]
@ -187,7 +191,7 @@ const bracket = startSketchAt([0, 0])
show(bracket)"#; show(bracket)"#;
let result = execute_and_snapshot(code).await.unwrap(); let result = execute_and_snapshot(code).await.unwrap();
twenty_twenty::assert_image("tests/executor/outputs/parametric_with_tan_arc.png", &result, 1.0); twenty_twenty::assert_image("tests/executor/outputs/parametric_with_tan_arc.png", &result, 0.999);
} }
#[tokio::test(flavor = "multi_thread")] #[tokio::test(flavor = "multi_thread")]
@ -215,7 +219,7 @@ async fn serial_test_execute_pipes_on_pipes() {
let code = include_str!("inputs/pipes_on_pipes.kcl"); let code = include_str!("inputs/pipes_on_pipes.kcl");
let result = execute_and_snapshot(code).await.unwrap(); let result = execute_and_snapshot(code).await.unwrap();
twenty_twenty::assert_image("tests/executor/outputs/pipes_on_pipes.png", &result, 1.0); twenty_twenty::assert_image("tests/executor/outputs/pipes_on_pipes.png", &result, 0.999);
} }
#[tokio::test(flavor = "multi_thread")] #[tokio::test(flavor = "multi_thread")]
@ -223,7 +227,7 @@ async fn serial_test_execute_kittycad_svg() {
let code = include_str!("inputs/kittycad_svg.kcl"); let code = include_str!("inputs/kittycad_svg.kcl");
let result = execute_and_snapshot(code).await.unwrap(); let result = execute_and_snapshot(code).await.unwrap();
twenty_twenty::assert_image("tests/executor/outputs/kittycad_svg.png", &result, 1.0); twenty_twenty::assert_image("tests/executor/outputs/kittycad_svg.png", &result, 0.999);
} }
#[tokio::test(flavor = "multi_thread")] #[tokio::test(flavor = "multi_thread")]
@ -270,7 +274,7 @@ const body = startSketchOn('XY')
show(body)"#; show(body)"#;
let result = execute_and_snapshot(code).await.unwrap(); let result = execute_and_snapshot(code).await.unwrap();
twenty_twenty::assert_image("tests/executor/outputs/close_arc.png", &result, 1.0); twenty_twenty::assert_image("tests/executor/outputs/close_arc.png", &result, 0.999);
} }
#[tokio::test(flavor = "multi_thread")] #[tokio::test(flavor = "multi_thread")]
@ -296,7 +300,7 @@ let thing = box(-12, -15, 10)
box(-20, -5, 10)"#; box(-20, -5, 10)"#;
let result = execute_and_snapshot(code).await.unwrap(); let result = execute_and_snapshot(code).await.unwrap();
twenty_twenty::assert_image("tests/executor/outputs/negative_args.png", &result, 1.0); twenty_twenty::assert_image("tests/executor/outputs/negative_args.png", &result, 0.999);
} }
#[tokio::test(flavor = "multi_thread")] #[tokio::test(flavor = "multi_thread")]
@ -309,7 +313,7 @@ async fn test_basic_tangental_arc() {
"#; "#;
let result = execute_and_snapshot(code).await.unwrap(); let result = execute_and_snapshot(code).await.unwrap();
twenty_twenty::assert_image("tests/executor/outputs/tangental_arc.png", &result, 1.0); twenty_twenty::assert_image("tests/executor/outputs/tangental_arc.png", &result, 0.999);
} }
#[tokio::test(flavor = "multi_thread")] #[tokio::test(flavor = "multi_thread")]
@ -322,7 +326,7 @@ async fn test_basic_tangental_arc_with_point() {
"#; "#;
let result = execute_and_snapshot(code).await.unwrap(); let result = execute_and_snapshot(code).await.unwrap();
twenty_twenty::assert_image("tests/executor/outputs/tangental_arc_with_point.png", &result, 1.0); twenty_twenty::assert_image("tests/executor/outputs/tangental_arc_with_point.png", &result, 0.999);
} }
#[tokio::test(flavor = "multi_thread")] #[tokio::test(flavor = "multi_thread")]
@ -335,7 +339,7 @@ async fn test_basic_tangental_arc_to() {
"#; "#;
let result = execute_and_snapshot(code).await.unwrap(); let result = execute_and_snapshot(code).await.unwrap();
twenty_twenty::assert_image("tests/executor/outputs/tangental_arc_to.png", &result, 1.0); twenty_twenty::assert_image("tests/executor/outputs/tangental_arc_to.png", &result, 0.999);
} }
#[tokio::test(flavor = "multi_thread")] #[tokio::test(flavor = "multi_thread")]
@ -362,7 +366,11 @@ let thing = box(-12, -15, 10, 'yz')
box(-20, -5, 10, 'xy')"#; box(-20, -5, 10, 'xy')"#;
let result = execute_and_snapshot(code).await.unwrap(); let result = execute_and_snapshot(code).await.unwrap();
twenty_twenty::assert_image("tests/executor/outputs/different_planes_same_drawing.png", &result, 1.0); twenty_twenty::assert_image(
"tests/executor/outputs/different_planes_same_drawing.png",
&result,
0.999,
);
} }
#[tokio::test(flavor = "multi_thread")] #[tokio::test(flavor = "multi_thread")]
@ -421,5 +429,5 @@ const part004 = startSketchOn('YZ')
"#; "#;
let result = execute_and_snapshot(code).await.unwrap(); let result = execute_and_snapshot(code).await.unwrap();
twenty_twenty::assert_image("tests/executor/outputs/lots_of_planes.png", &result, 1.0); twenty_twenty::assert_image("tests/executor/outputs/lots_of_planes.png", &result, 0.999);
} }