Compare commits
52 Commits
Author | SHA1 | Date | |
---|---|---|---|
1af8a8c64f | |||
eb4776826b | |||
f3dd0469d5 | |||
deea74754d | |||
3fd798c704 | |||
cc9eaf2991 | |||
6f24031220 | |||
672bcd297f | |||
3bc182fe16 | |||
589cd39eec | |||
63feebef5c | |||
65037abd9a | |||
97bc339a62 | |||
4e9a6375a5 | |||
3d19dfb800 | |||
d2a7b84292 | |||
9e02bab155 | |||
7352de5a70 | |||
9797d0cb81 | |||
83907fa9db | |||
a367be4e2b | |||
056fa00adc | |||
4759fb2e6f | |||
45f497d9cd | |||
dc61bdebdf | |||
61943055e5 | |||
416fe0f644 | |||
708465d818 | |||
e706fb02d6 | |||
1bf7daa474 | |||
ffc47f8f40 | |||
768aaa84f6 | |||
f3a700eec8 | |||
c853637a9a | |||
9af30d9ef6 | |||
6164714a6b | |||
64ceb98eba | |||
2cbf260900 | |||
cfaaedf602 | |||
12b3717eb5 | |||
0bc685b0c4 | |||
9ee032771a | |||
c307ddd1b1 | |||
a30818ff2b | |||
53e763d938 | |||
8f74cd1d0c | |||
c271942897 | |||
a03d09b41d | |||
2971b7752b | |||
70e99eb00b | |||
5c66af59d2 | |||
6dda6daeef |
21
LICENSE
Normal file
21
LICENSE
Normal file
@ -0,0 +1,21 @@
|
|||||||
|
The MIT License (MIT)
|
||||||
|
|
||||||
|
Copyright (c) 2023 The KittyCAD Authors
|
||||||
|
|
||||||
|
Permission is hereby granted, free of charge, to any person obtaining a copy
|
||||||
|
of this software and associated documentation files (the "Software"), to deal
|
||||||
|
in the Software without restriction, including without limitation the rights
|
||||||
|
to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
|
||||||
|
copies of the Software, and to permit persons to whom the Software is
|
||||||
|
furnished to do so, subject to the following conditions:
|
||||||
|
|
||||||
|
The above copyright notice and this permission notice shall be included in all
|
||||||
|
copies or substantial portions of the Software.
|
||||||
|
|
||||||
|
THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
|
||||||
|
IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
|
||||||
|
FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
|
||||||
|
AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
|
||||||
|
LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
|
||||||
|
OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
|
||||||
|
SOFTWARE.
|
1859
docs/kcl/std.json
1859
docs/kcl/std.json
File diff suppressed because it is too large
Load Diff
316
docs/kcl/std.md
316
docs/kcl/std.md
@ -50,6 +50,8 @@
|
|||||||
* [`sqrt`](#sqrt)
|
* [`sqrt`](#sqrt)
|
||||||
* [`startSketchAt`](#startSketchAt)
|
* [`startSketchAt`](#startSketchAt)
|
||||||
* [`tan`](#tan)
|
* [`tan`](#tan)
|
||||||
|
* [`tangentalArc`](#tangentalArc)
|
||||||
|
* [`tangentalArcTo`](#tangentalArcTo)
|
||||||
* [`tau`](#tau)
|
* [`tau`](#tau)
|
||||||
* [`xLine`](#xLine)
|
* [`xLine`](#xLine)
|
||||||
* [`xLineTo`](#xLineTo)
|
* [`xLineTo`](#xLineTo)
|
||||||
@ -3234,6 +3236,320 @@ tan(num: number) -> number
|
|||||||
|
|
||||||
|
|
||||||
|
|
||||||
|
### tangentalArc
|
||||||
|
|
||||||
|
Draw an arc.
|
||||||
|
|
||||||
|
|
||||||
|
|
||||||
|
```
|
||||||
|
tangentalArc(data: TangentalArcData, sketch_group: SketchGroup) -> SketchGroup
|
||||||
|
```
|
||||||
|
|
||||||
|
#### Arguments
|
||||||
|
|
||||||
|
* `data`: `TangentalArcData` - Data to draw a tangental arc.
|
||||||
|
```
|
||||||
|
{
|
||||||
|
// Offset of the arc, in degrees.
|
||||||
|
offset: number,
|
||||||
|
// Radius of the arc. Not to be confused with Raiders of the Lost Ark.
|
||||||
|
radius: number,
|
||||||
|
} |
|
||||||
|
{
|
||||||
|
// The tag.
|
||||||
|
tag: string,
|
||||||
|
// Where the arc should end. Must lie in the same plane as the current path pen position. Must not be colinear with current path pen position.
|
||||||
|
to: [number],
|
||||||
|
} |
|
||||||
|
[number]
|
||||||
|
```
|
||||||
|
* `sketch_group`: `SketchGroup` - A sketch group is a collection of paths.
|
||||||
|
```
|
||||||
|
{
|
||||||
|
// The id of the sketch group.
|
||||||
|
id: uuid,
|
||||||
|
// The position of the sketch group.
|
||||||
|
position: [number],
|
||||||
|
// The rotation of the sketch group.
|
||||||
|
rotation: [number],
|
||||||
|
// The starting path.
|
||||||
|
start: {
|
||||||
|
// The from point.
|
||||||
|
from: [number],
|
||||||
|
// The name of the path.
|
||||||
|
name: string,
|
||||||
|
// The to point.
|
||||||
|
to: [number],
|
||||||
|
},
|
||||||
|
// The paths in the sketch group.
|
||||||
|
value: [{
|
||||||
|
// The from point.
|
||||||
|
from: [number],
|
||||||
|
// The name of the path.
|
||||||
|
name: string,
|
||||||
|
// The to point.
|
||||||
|
to: [number],
|
||||||
|
type: string,
|
||||||
|
} |
|
||||||
|
{
|
||||||
|
// The from point.
|
||||||
|
from: [number],
|
||||||
|
// The name of the path.
|
||||||
|
name: string,
|
||||||
|
// The to point.
|
||||||
|
to: [number],
|
||||||
|
type: string,
|
||||||
|
// The x coordinate.
|
||||||
|
x: number,
|
||||||
|
} |
|
||||||
|
{
|
||||||
|
// The from point.
|
||||||
|
from: [number],
|
||||||
|
// The name of the path.
|
||||||
|
name: string,
|
||||||
|
// The to point.
|
||||||
|
to: [number],
|
||||||
|
type: string,
|
||||||
|
// The x coordinate.
|
||||||
|
x: number,
|
||||||
|
// The y coordinate.
|
||||||
|
y: number,
|
||||||
|
} |
|
||||||
|
{
|
||||||
|
// The from point.
|
||||||
|
from: [number],
|
||||||
|
// The name of the path.
|
||||||
|
name: string,
|
||||||
|
// The to point.
|
||||||
|
to: [number],
|
||||||
|
type: string,
|
||||||
|
}],
|
||||||
|
}
|
||||||
|
```
|
||||||
|
|
||||||
|
#### Returns
|
||||||
|
|
||||||
|
* `SketchGroup` - A sketch group is a collection of paths.
|
||||||
|
```
|
||||||
|
{
|
||||||
|
// The id of the sketch group.
|
||||||
|
id: uuid,
|
||||||
|
// The position of the sketch group.
|
||||||
|
position: [number],
|
||||||
|
// The rotation of the sketch group.
|
||||||
|
rotation: [number],
|
||||||
|
// The starting path.
|
||||||
|
start: {
|
||||||
|
// The from point.
|
||||||
|
from: [number],
|
||||||
|
// The name of the path.
|
||||||
|
name: string,
|
||||||
|
// The to point.
|
||||||
|
to: [number],
|
||||||
|
},
|
||||||
|
// The paths in the sketch group.
|
||||||
|
value: [{
|
||||||
|
// The from point.
|
||||||
|
from: [number],
|
||||||
|
// The name of the path.
|
||||||
|
name: string,
|
||||||
|
// The to point.
|
||||||
|
to: [number],
|
||||||
|
type: string,
|
||||||
|
} |
|
||||||
|
{
|
||||||
|
// The from point.
|
||||||
|
from: [number],
|
||||||
|
// The name of the path.
|
||||||
|
name: string,
|
||||||
|
// The to point.
|
||||||
|
to: [number],
|
||||||
|
type: string,
|
||||||
|
// The x coordinate.
|
||||||
|
x: number,
|
||||||
|
} |
|
||||||
|
{
|
||||||
|
// The from point.
|
||||||
|
from: [number],
|
||||||
|
// The name of the path.
|
||||||
|
name: string,
|
||||||
|
// The to point.
|
||||||
|
to: [number],
|
||||||
|
type: string,
|
||||||
|
// The x coordinate.
|
||||||
|
x: number,
|
||||||
|
// The y coordinate.
|
||||||
|
y: number,
|
||||||
|
} |
|
||||||
|
{
|
||||||
|
// The from point.
|
||||||
|
from: [number],
|
||||||
|
// The name of the path.
|
||||||
|
name: string,
|
||||||
|
// The to point.
|
||||||
|
to: [number],
|
||||||
|
type: string,
|
||||||
|
}],
|
||||||
|
}
|
||||||
|
```
|
||||||
|
|
||||||
|
|
||||||
|
|
||||||
|
### tangentalArcTo
|
||||||
|
|
||||||
|
Draw an arc.
|
||||||
|
|
||||||
|
|
||||||
|
|
||||||
|
```
|
||||||
|
tangentalArcTo(data: TangentalArcToData, sketch_group: SketchGroup) -> SketchGroup
|
||||||
|
```
|
||||||
|
|
||||||
|
#### Arguments
|
||||||
|
|
||||||
|
* `data`: `TangentalArcToData` - Data to draw a tangental arc to a specific point.
|
||||||
|
```
|
||||||
|
{
|
||||||
|
// The tag.
|
||||||
|
tag: string,
|
||||||
|
// Where the arc should end. Must lie in the same plane as the current path pen position. Must not be colinear with current path pen position.
|
||||||
|
to: [number],
|
||||||
|
} |
|
||||||
|
[number]
|
||||||
|
```
|
||||||
|
* `sketch_group`: `SketchGroup` - A sketch group is a collection of paths.
|
||||||
|
```
|
||||||
|
{
|
||||||
|
// The id of the sketch group.
|
||||||
|
id: uuid,
|
||||||
|
// The position of the sketch group.
|
||||||
|
position: [number],
|
||||||
|
// The rotation of the sketch group.
|
||||||
|
rotation: [number],
|
||||||
|
// The starting path.
|
||||||
|
start: {
|
||||||
|
// The from point.
|
||||||
|
from: [number],
|
||||||
|
// The name of the path.
|
||||||
|
name: string,
|
||||||
|
// The to point.
|
||||||
|
to: [number],
|
||||||
|
},
|
||||||
|
// The paths in the sketch group.
|
||||||
|
value: [{
|
||||||
|
// The from point.
|
||||||
|
from: [number],
|
||||||
|
// The name of the path.
|
||||||
|
name: string,
|
||||||
|
// The to point.
|
||||||
|
to: [number],
|
||||||
|
type: string,
|
||||||
|
} |
|
||||||
|
{
|
||||||
|
// The from point.
|
||||||
|
from: [number],
|
||||||
|
// The name of the path.
|
||||||
|
name: string,
|
||||||
|
// The to point.
|
||||||
|
to: [number],
|
||||||
|
type: string,
|
||||||
|
// The x coordinate.
|
||||||
|
x: number,
|
||||||
|
} |
|
||||||
|
{
|
||||||
|
// The from point.
|
||||||
|
from: [number],
|
||||||
|
// The name of the path.
|
||||||
|
name: string,
|
||||||
|
// The to point.
|
||||||
|
to: [number],
|
||||||
|
type: string,
|
||||||
|
// The x coordinate.
|
||||||
|
x: number,
|
||||||
|
// The y coordinate.
|
||||||
|
y: number,
|
||||||
|
} |
|
||||||
|
{
|
||||||
|
// The from point.
|
||||||
|
from: [number],
|
||||||
|
// The name of the path.
|
||||||
|
name: string,
|
||||||
|
// The to point.
|
||||||
|
to: [number],
|
||||||
|
type: string,
|
||||||
|
}],
|
||||||
|
}
|
||||||
|
```
|
||||||
|
|
||||||
|
#### Returns
|
||||||
|
|
||||||
|
* `SketchGroup` - A sketch group is a collection of paths.
|
||||||
|
```
|
||||||
|
{
|
||||||
|
// The id of the sketch group.
|
||||||
|
id: uuid,
|
||||||
|
// The position of the sketch group.
|
||||||
|
position: [number],
|
||||||
|
// The rotation of the sketch group.
|
||||||
|
rotation: [number],
|
||||||
|
// The starting path.
|
||||||
|
start: {
|
||||||
|
// The from point.
|
||||||
|
from: [number],
|
||||||
|
// The name of the path.
|
||||||
|
name: string,
|
||||||
|
// The to point.
|
||||||
|
to: [number],
|
||||||
|
},
|
||||||
|
// The paths in the sketch group.
|
||||||
|
value: [{
|
||||||
|
// The from point.
|
||||||
|
from: [number],
|
||||||
|
// The name of the path.
|
||||||
|
name: string,
|
||||||
|
// The to point.
|
||||||
|
to: [number],
|
||||||
|
type: string,
|
||||||
|
} |
|
||||||
|
{
|
||||||
|
// The from point.
|
||||||
|
from: [number],
|
||||||
|
// The name of the path.
|
||||||
|
name: string,
|
||||||
|
// The to point.
|
||||||
|
to: [number],
|
||||||
|
type: string,
|
||||||
|
// The x coordinate.
|
||||||
|
x: number,
|
||||||
|
} |
|
||||||
|
{
|
||||||
|
// The from point.
|
||||||
|
from: [number],
|
||||||
|
// The name of the path.
|
||||||
|
name: string,
|
||||||
|
// The to point.
|
||||||
|
to: [number],
|
||||||
|
type: string,
|
||||||
|
// The x coordinate.
|
||||||
|
x: number,
|
||||||
|
// The y coordinate.
|
||||||
|
y: number,
|
||||||
|
} |
|
||||||
|
{
|
||||||
|
// The from point.
|
||||||
|
from: [number],
|
||||||
|
// The name of the path.
|
||||||
|
name: string,
|
||||||
|
// The to point.
|
||||||
|
to: [number],
|
||||||
|
type: string,
|
||||||
|
}],
|
||||||
|
}
|
||||||
|
```
|
||||||
|
|
||||||
|
|
||||||
|
|
||||||
### tau
|
### tau
|
||||||
|
|
||||||
Return the value of `tau`. The full circle constant (τ). Equal to 2π.
|
Return the value of `tau`. The full circle constant (τ). Equal to 2π.
|
||||||
|
@ -1,6 +1,6 @@
|
|||||||
{
|
{
|
||||||
"name": "untitled-app",
|
"name": "untitled-app",
|
||||||
"version": "0.9.1",
|
"version": "0.9.5",
|
||||||
"private": true,
|
"private": true,
|
||||||
"dependencies": {
|
"dependencies": {
|
||||||
"@codemirror/autocomplete": "^6.9.0",
|
"@codemirror/autocomplete": "^6.9.0",
|
||||||
@ -10,7 +10,7 @@
|
|||||||
"@fortawesome/react-fontawesome": "^0.2.0",
|
"@fortawesome/react-fontawesome": "^0.2.0",
|
||||||
"@headlessui/react": "^1.7.13",
|
"@headlessui/react": "^1.7.13",
|
||||||
"@headlessui/tailwindcss": "^0.2.0",
|
"@headlessui/tailwindcss": "^0.2.0",
|
||||||
"@kittycad/lib": "^0.0.38",
|
"@kittycad/lib": "^0.0.40",
|
||||||
"@lezer/javascript": "^1.4.7",
|
"@lezer/javascript": "^1.4.7",
|
||||||
"@open-rpc/client-js": "^1.8.1",
|
"@open-rpc/client-js": "^1.8.1",
|
||||||
"@react-hook/resize-observer": "^1.2.6",
|
"@react-hook/resize-observer": "^1.2.6",
|
||||||
@ -48,7 +48,7 @@
|
|||||||
"ts-node": "^10.9.1",
|
"ts-node": "^10.9.1",
|
||||||
"typescript": "^4.4.2",
|
"typescript": "^4.4.2",
|
||||||
"uuid": "^9.0.0",
|
"uuid": "^9.0.0",
|
||||||
"vitest": "^0.34.1",
|
"vitest": "^0.34.6",
|
||||||
"vscode-jsonrpc": "^8.1.0",
|
"vscode-jsonrpc": "^8.1.0",
|
||||||
"vscode-languageserver-protocol": "^3.17.3",
|
"vscode-languageserver-protocol": "^3.17.3",
|
||||||
"wasm-pack": "^0.12.1",
|
"wasm-pack": "^0.12.1",
|
||||||
@ -102,7 +102,6 @@
|
|||||||
"@babel/preset-env": "^7.22.9",
|
"@babel/preset-env": "^7.22.9",
|
||||||
"@tauri-apps/cli": "^1.3.1",
|
"@tauri-apps/cli": "^1.3.1",
|
||||||
"@types/crypto-js": "^4.1.1",
|
"@types/crypto-js": "^4.1.1",
|
||||||
"@types/debounce": "^1.2.1",
|
|
||||||
"@types/debounce-promise": "^3.1.6",
|
"@types/debounce-promise": "^3.1.6",
|
||||||
"@types/isomorphic-fetch": "^0.0.36",
|
"@types/isomorphic-fetch": "^0.0.36",
|
||||||
"@types/react-modal": "^3.16.0",
|
"@types/react-modal": "^3.16.0",
|
||||||
@ -117,7 +116,7 @@
|
|||||||
"eslint-plugin-css-modules": "^2.11.0",
|
"eslint-plugin-css-modules": "^2.11.0",
|
||||||
"happy-dom": "^10.8.0",
|
"happy-dom": "^10.8.0",
|
||||||
"husky": "^8.0.3",
|
"husky": "^8.0.3",
|
||||||
"postcss": "^8.4.19",
|
"postcss": "^8.4.31",
|
||||||
"prettier": "^2.8.0",
|
"prettier": "^2.8.0",
|
||||||
"setimmediate": "^1.0.5",
|
"setimmediate": "^1.0.5",
|
||||||
"tailwindcss": "^3.2.4",
|
"tailwindcss": "^3.2.4",
|
||||||
|
122
src-tauri/Cargo.lock
generated
122
src-tauri/Cargo.lock
generated
@ -84,7 +84,7 @@ dependencies = [
|
|||||||
"tauri-build",
|
"tauri-build",
|
||||||
"tauri-plugin-fs-extra",
|
"tauri-plugin-fs-extra",
|
||||||
"tokio",
|
"tokio",
|
||||||
"toml 0.8.0",
|
"toml 0.8.2",
|
||||||
]
|
]
|
||||||
|
|
||||||
[[package]]
|
[[package]]
|
||||||
@ -1309,7 +1309,21 @@ checksum = "e5c13fb08e5d4dfc151ee5e88bae63f7773d61852f3bdc73c9f4b9e1bde03148"
|
|||||||
dependencies = [
|
dependencies = [
|
||||||
"log",
|
"log",
|
||||||
"mac",
|
"mac",
|
||||||
"markup5ever",
|
"markup5ever 0.10.1",
|
||||||
|
"proc-macro2",
|
||||||
|
"quote",
|
||||||
|
"syn 1.0.109",
|
||||||
|
]
|
||||||
|
|
||||||
|
[[package]]
|
||||||
|
name = "html5ever"
|
||||||
|
version = "0.26.0"
|
||||||
|
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||||
|
checksum = "bea68cab48b8459f17cf1c944c67ddc572d272d9f2b274140f223ecb1da4a3b7"
|
||||||
|
dependencies = [
|
||||||
|
"log",
|
||||||
|
"mac",
|
||||||
|
"markup5ever 0.11.0",
|
||||||
"proc-macro2",
|
"proc-macro2",
|
||||||
"quote",
|
"quote",
|
||||||
"syn 1.0.109",
|
"syn 1.0.109",
|
||||||
@ -1644,9 +1658,9 @@ dependencies = [
|
|||||||
|
|
||||||
[[package]]
|
[[package]]
|
||||||
name = "kittycad"
|
name = "kittycad"
|
||||||
version = "0.2.26"
|
version = "0.2.28"
|
||||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||||
checksum = "e2623ee601ce203476229df3f9d3a14664cb43e3f7455e9ac8ed91aacaa6163d"
|
checksum = "35b2f9302648dbb06fd7121687f9505fc3179eba84111a06d76b246e3158f5dc"
|
||||||
dependencies = [
|
dependencies = [
|
||||||
"anyhow",
|
"anyhow",
|
||||||
"async-trait",
|
"async-trait",
|
||||||
@ -1686,7 +1700,20 @@ source = "registry+https://github.com/rust-lang/crates.io-index"
|
|||||||
checksum = "1ea8e9c6e031377cff82ee3001dc8026cdf431ed4e2e6b51f98ab8c73484a358"
|
checksum = "1ea8e9c6e031377cff82ee3001dc8026cdf431ed4e2e6b51f98ab8c73484a358"
|
||||||
dependencies = [
|
dependencies = [
|
||||||
"cssparser",
|
"cssparser",
|
||||||
"html5ever",
|
"html5ever 0.25.2",
|
||||||
|
"matches",
|
||||||
|
"selectors",
|
||||||
|
]
|
||||||
|
|
||||||
|
[[package]]
|
||||||
|
name = "kuchikiki"
|
||||||
|
version = "0.8.2"
|
||||||
|
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||||
|
checksum = "f29e4755b7b995046f510a7520c42b2fed58b77bd94d5a87a8eb43d2fd126da8"
|
||||||
|
dependencies = [
|
||||||
|
"cssparser",
|
||||||
|
"html5ever 0.26.0",
|
||||||
|
"indexmap 1.9.3",
|
||||||
"matches",
|
"matches",
|
||||||
"selectors",
|
"selectors",
|
||||||
]
|
]
|
||||||
@ -1748,9 +1775,9 @@ dependencies = [
|
|||||||
|
|
||||||
[[package]]
|
[[package]]
|
||||||
name = "log"
|
name = "log"
|
||||||
version = "0.4.18"
|
version = "0.4.20"
|
||||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||||
checksum = "518ef76f2f87365916b142844c16d8fefd85039bc5699050210a7778ee1cd1de"
|
checksum = "b5e6163cb8c49088c2c36f57875e58ccd8c87c7427f7fbd50ea6710b2f3f2e8f"
|
||||||
dependencies = [
|
dependencies = [
|
||||||
"serde",
|
"serde",
|
||||||
]
|
]
|
||||||
@ -1802,7 +1829,21 @@ checksum = "a24f40fb03852d1cdd84330cddcaf98e9ec08a7b7768e952fad3b4cf048ec8fd"
|
|||||||
dependencies = [
|
dependencies = [
|
||||||
"log",
|
"log",
|
||||||
"phf 0.8.0",
|
"phf 0.8.0",
|
||||||
"phf_codegen",
|
"phf_codegen 0.8.0",
|
||||||
|
"string_cache",
|
||||||
|
"string_cache_codegen",
|
||||||
|
"tendril",
|
||||||
|
]
|
||||||
|
|
||||||
|
[[package]]
|
||||||
|
name = "markup5ever"
|
||||||
|
version = "0.11.0"
|
||||||
|
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||||
|
checksum = "7a2629bb1404f3d34c2e921f21fd34ba00b206124c81f65c50b43b6aaefeb016"
|
||||||
|
dependencies = [
|
||||||
|
"log",
|
||||||
|
"phf 0.10.1",
|
||||||
|
"phf_codegen 0.10.0",
|
||||||
"string_cache",
|
"string_cache",
|
||||||
"string_cache_codegen",
|
"string_cache_codegen",
|
||||||
"tendril",
|
"tendril",
|
||||||
@ -2374,6 +2415,16 @@ dependencies = [
|
|||||||
"phf_shared 0.8.0",
|
"phf_shared 0.8.0",
|
||||||
]
|
]
|
||||||
|
|
||||||
|
[[package]]
|
||||||
|
name = "phf_codegen"
|
||||||
|
version = "0.10.0"
|
||||||
|
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||||
|
checksum = "4fb1c3a8bc4dd4e5cfce29b44ffc14bedd2ee294559a294e2a4d4c9e9a6a13cd"
|
||||||
|
dependencies = [
|
||||||
|
"phf_generator 0.10.0",
|
||||||
|
"phf_shared 0.10.0",
|
||||||
|
]
|
||||||
|
|
||||||
[[package]]
|
[[package]]
|
||||||
name = "phf_generator"
|
name = "phf_generator"
|
||||||
version = "0.8.0"
|
version = "0.8.0"
|
||||||
@ -3139,7 +3190,7 @@ dependencies = [
|
|||||||
"log",
|
"log",
|
||||||
"matches",
|
"matches",
|
||||||
"phf 0.8.0",
|
"phf 0.8.0",
|
||||||
"phf_codegen",
|
"phf_codegen 0.8.0",
|
||||||
"precomputed-hash",
|
"precomputed-hash",
|
||||||
"servo_arc",
|
"servo_arc",
|
||||||
"smallvec",
|
"smallvec",
|
||||||
@ -3661,9 +3712,9 @@ dependencies = [
|
|||||||
|
|
||||||
[[package]]
|
[[package]]
|
||||||
name = "tauri"
|
name = "tauri"
|
||||||
version = "1.4.1"
|
version = "1.5.0"
|
||||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||||
checksum = "7fbe522898e35407a8e60dc3870f7579fea2fc262a6a6072eccdd37ae1e1d91e"
|
checksum = "72aee3277d0a0df01472cc704ab5934a51a1f25348838df17bfb3c5cb727880c"
|
||||||
dependencies = [
|
dependencies = [
|
||||||
"anyhow",
|
"anyhow",
|
||||||
"base64 0.21.2",
|
"base64 0.21.2",
|
||||||
@ -3717,12 +3768,13 @@ dependencies = [
|
|||||||
|
|
||||||
[[package]]
|
[[package]]
|
||||||
name = "tauri-build"
|
name = "tauri-build"
|
||||||
version = "1.4.0"
|
version = "1.5.0"
|
||||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||||
checksum = "7d2edd6a259b5591c8efdeb9d5702cb53515b82a6affebd55c7fd6d3a27b7d1b"
|
checksum = "defbfc551bd38ab997e5f8e458f87396d2559d05ce32095076ad6c30f7fc5f9c"
|
||||||
dependencies = [
|
dependencies = [
|
||||||
"anyhow",
|
"anyhow",
|
||||||
"cargo_toml",
|
"cargo_toml",
|
||||||
|
"dirs-next",
|
||||||
"heck 0.4.1",
|
"heck 0.4.1",
|
||||||
"json-patch",
|
"json-patch",
|
||||||
"semver",
|
"semver",
|
||||||
@ -3730,13 +3782,14 @@ dependencies = [
|
|||||||
"serde_json",
|
"serde_json",
|
||||||
"tauri-utils",
|
"tauri-utils",
|
||||||
"tauri-winres",
|
"tauri-winres",
|
||||||
|
"walkdir",
|
||||||
]
|
]
|
||||||
|
|
||||||
[[package]]
|
[[package]]
|
||||||
name = "tauri-codegen"
|
name = "tauri-codegen"
|
||||||
version = "1.4.0"
|
version = "1.4.1"
|
||||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||||
checksum = "54ad2d49fdeab4a08717f5b49a163bdc72efc3b1950b6758245fcde79b645e1a"
|
checksum = "7b3475e55acec0b4a50fb96435f19631fb58cbcd31923e1a213de5c382536bbb"
|
||||||
dependencies = [
|
dependencies = [
|
||||||
"base64 0.21.2",
|
"base64 0.21.2",
|
||||||
"brotli",
|
"brotli",
|
||||||
@ -3760,9 +3813,9 @@ dependencies = [
|
|||||||
|
|
||||||
[[package]]
|
[[package]]
|
||||||
name = "tauri-macros"
|
name = "tauri-macros"
|
||||||
version = "1.4.0"
|
version = "1.4.1"
|
||||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||||
checksum = "8eb12a2454e747896929338d93b0642144bb51e0dddbb36e579035731f0d76b7"
|
checksum = "613740228de92d9196b795ac455091d3a5fbdac2654abb8bb07d010b62ab43af"
|
||||||
dependencies = [
|
dependencies = [
|
||||||
"heck 0.4.1",
|
"heck 0.4.1",
|
||||||
"proc-macro2",
|
"proc-macro2",
|
||||||
@ -3775,7 +3828,7 @@ dependencies = [
|
|||||||
[[package]]
|
[[package]]
|
||||||
name = "tauri-plugin-fs-extra"
|
name = "tauri-plugin-fs-extra"
|
||||||
version = "0.0.0"
|
version = "0.0.0"
|
||||||
source = "git+https://github.com/tauri-apps/plugins-workspace?branch=v1#0190f68f1dff80576595a1b79e31338a3e9ebba1"
|
source = "git+https://github.com/tauri-apps/plugins-workspace?branch=v1#9b96996b5a90a6a57d587ce4312975f13a4d8bc2"
|
||||||
dependencies = [
|
dependencies = [
|
||||||
"log",
|
"log",
|
||||||
"serde",
|
"serde",
|
||||||
@ -3786,9 +3839,9 @@ dependencies = [
|
|||||||
|
|
||||||
[[package]]
|
[[package]]
|
||||||
name = "tauri-runtime"
|
name = "tauri-runtime"
|
||||||
version = "0.14.0"
|
version = "0.14.1"
|
||||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||||
checksum = "108683199cb18f96d2d4134187bb789964143c845d2d154848dda209191fd769"
|
checksum = "07f8e9e53e00e9f41212c115749e87d5cd2a9eebccafca77a19722eeecd56d43"
|
||||||
dependencies = [
|
dependencies = [
|
||||||
"gtk",
|
"gtk",
|
||||||
"http",
|
"http",
|
||||||
@ -3807,9 +3860,9 @@ dependencies = [
|
|||||||
|
|
||||||
[[package]]
|
[[package]]
|
||||||
name = "tauri-runtime-wry"
|
name = "tauri-runtime-wry"
|
||||||
version = "0.14.0"
|
version = "0.14.1"
|
||||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||||
checksum = "0b7aa256a1407a3a091b5d843eccc1a5042289baf0a43d1179d9f0fcfea37c1b"
|
checksum = "8141d72b6b65f2008911e9ef5b98a68d1e3413b7a1464e8f85eb3673bb19a895"
|
||||||
dependencies = [
|
dependencies = [
|
||||||
"cocoa",
|
"cocoa",
|
||||||
"gtk",
|
"gtk",
|
||||||
@ -3827,19 +3880,20 @@ dependencies = [
|
|||||||
|
|
||||||
[[package]]
|
[[package]]
|
||||||
name = "tauri-utils"
|
name = "tauri-utils"
|
||||||
version = "1.4.0"
|
version = "1.5.0"
|
||||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||||
checksum = "03fc02bb6072bb397e1d473c6f76c953cda48b4a2d0cce605df284aa74a12e84"
|
checksum = "34d55e185904a84a419308d523c2c6891d5e2dbcee740c4997eb42e75a7b0f46"
|
||||||
dependencies = [
|
dependencies = [
|
||||||
"brotli",
|
"brotli",
|
||||||
"ctor",
|
"ctor",
|
||||||
"dunce",
|
"dunce",
|
||||||
"glob",
|
"glob",
|
||||||
"heck 0.4.1",
|
"heck 0.4.1",
|
||||||
"html5ever",
|
"html5ever 0.26.0",
|
||||||
"infer",
|
"infer",
|
||||||
"json-patch",
|
"json-patch",
|
||||||
"kuchiki",
|
"kuchikiki",
|
||||||
|
"log",
|
||||||
"memchr",
|
"memchr",
|
||||||
"phf 0.10.1",
|
"phf 0.10.1",
|
||||||
"proc-macro2",
|
"proc-macro2",
|
||||||
@ -4024,14 +4078,14 @@ dependencies = [
|
|||||||
|
|
||||||
[[package]]
|
[[package]]
|
||||||
name = "toml"
|
name = "toml"
|
||||||
version = "0.8.0"
|
version = "0.8.2"
|
||||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||||
checksum = "c226a7bba6d859b63c92c4b4fe69c5b6b72d0cb897dbc8e6012298e6154cb56e"
|
checksum = "185d8ab0dfbb35cf1399a6344d8484209c088f75f8f68230da55d48d95d43e3d"
|
||||||
dependencies = [
|
dependencies = [
|
||||||
"serde",
|
"serde",
|
||||||
"serde_spanned",
|
"serde_spanned",
|
||||||
"toml_datetime",
|
"toml_datetime",
|
||||||
"toml_edit 0.20.0",
|
"toml_edit 0.20.2",
|
||||||
]
|
]
|
||||||
|
|
||||||
[[package]]
|
[[package]]
|
||||||
@ -4058,9 +4112,9 @@ dependencies = [
|
|||||||
|
|
||||||
[[package]]
|
[[package]]
|
||||||
name = "toml_edit"
|
name = "toml_edit"
|
||||||
version = "0.20.0"
|
version = "0.20.2"
|
||||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||||
checksum = "8ff63e60a958cefbb518ae1fd6566af80d9d4be430a33f3723dfc47d1d411d95"
|
checksum = "396e4d48bbb2b7554c944bde63101b5ae446cff6ec4a24227428f15eb72ef338"
|
||||||
dependencies = [
|
dependencies = [
|
||||||
"indexmap 2.0.0",
|
"indexmap 2.0.0",
|
||||||
"serde",
|
"serde",
|
||||||
@ -4855,9 +4909,9 @@ dependencies = [
|
|||||||
|
|
||||||
[[package]]
|
[[package]]
|
||||||
name = "wry"
|
name = "wry"
|
||||||
version = "0.24.3"
|
version = "0.24.4"
|
||||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||||
checksum = "33748f35413c8a98d45f7a08832d848c0c5915501803d1faade5a4ebcd258cea"
|
checksum = "88ef04bdad49eba2e01f06e53688c8413bd6a87b0bc14b72284465cf96e3578e"
|
||||||
dependencies = [
|
dependencies = [
|
||||||
"base64 0.13.1",
|
"base64 0.13.1",
|
||||||
"block",
|
"block",
|
||||||
@ -4869,7 +4923,7 @@ dependencies = [
|
|||||||
"gio",
|
"gio",
|
||||||
"glib",
|
"glib",
|
||||||
"gtk",
|
"gtk",
|
||||||
"html5ever",
|
"html5ever 0.25.2",
|
||||||
"http",
|
"http",
|
||||||
"kuchiki",
|
"kuchiki",
|
||||||
"libc",
|
"libc",
|
||||||
|
@ -12,18 +12,18 @@ rust-version = "1.60"
|
|||||||
# See more keys and their definitions at https://doc.rust-lang.org/cargo/reference/manifest.html
|
# See more keys and their definitions at https://doc.rust-lang.org/cargo/reference/manifest.html
|
||||||
|
|
||||||
[build-dependencies]
|
[build-dependencies]
|
||||||
tauri-build = { version = "1.4.0", features = [] }
|
tauri-build = { version = "1.5.0", features = [] }
|
||||||
|
|
||||||
[dependencies]
|
[dependencies]
|
||||||
anyhow = "1"
|
anyhow = "1"
|
||||||
kittycad = "0.2.26"
|
kittycad = "0.2.28"
|
||||||
oauth2 = "4.4.2"
|
oauth2 = "4.4.2"
|
||||||
serde = { version = "1.0", features = ["derive"] }
|
serde = { version = "1.0", features = ["derive"] }
|
||||||
serde_json = "1.0"
|
serde_json = "1.0"
|
||||||
tauri = { version = "1.4.1", features = [ "os-all", "dialog-all", "fs-all", "http-request", "path-all", "shell-open", "shell-open-api", "updater", "devtools"] }
|
tauri = { version = "1.5.0", features = [ "os-all", "dialog-all", "fs-all", "http-request", "path-all", "shell-open", "shell-open-api", "updater", "devtools"] }
|
||||||
tauri-plugin-fs-extra = { git = "https://github.com/tauri-apps/plugins-workspace", branch = "v1" }
|
tauri-plugin-fs-extra = { git = "https://github.com/tauri-apps/plugins-workspace", branch = "v1" }
|
||||||
tokio = { version = "1.32.0", features = ["time"] }
|
tokio = { version = "1.32.0", features = ["time"] }
|
||||||
toml = "0.8.0"
|
toml = "0.8.2"
|
||||||
|
|
||||||
[features]
|
[features]
|
||||||
# this feature is used for production builds or when `devPath` points to the filesystem and the built-in dev server is disabled.
|
# this feature is used for production builds or when `devPath` points to the filesystem and the built-in dev server is disabled.
|
||||||
|
@ -8,7 +8,7 @@
|
|||||||
},
|
},
|
||||||
"package": {
|
"package": {
|
||||||
"productName": "kittycad-modeling",
|
"productName": "kittycad-modeling",
|
||||||
"version": "0.9.1"
|
"version": "0.9.5"
|
||||||
},
|
},
|
||||||
"tauri": {
|
"tauri": {
|
||||||
"allowlist": {
|
"allowlist": {
|
||||||
|
12
src/App.tsx
12
src/App.tsx
@ -31,6 +31,7 @@ import { TextEditor } from 'components/TextEditor'
|
|||||||
import { Themes, getSystemTheme } from 'lib/theme'
|
import { Themes, getSystemTheme } from 'lib/theme'
|
||||||
import { useSetupEngineManager } from 'hooks/useSetupEngineManager'
|
import { useSetupEngineManager } from 'hooks/useSetupEngineManager'
|
||||||
import { useEngineConnectionSubscriptions } from 'hooks/useEngineConnectionSubscriptions'
|
import { useEngineConnectionSubscriptions } from 'hooks/useEngineConnectionSubscriptions'
|
||||||
|
import { engineCommandManager } from './lang/std/engineConnection'
|
||||||
|
|
||||||
export function App() {
|
export function App() {
|
||||||
const { code: loadedCode, project } = useLoaderData() as IndexLoaderData
|
const { code: loadedCode, project } = useLoaderData() as IndexLoaderData
|
||||||
@ -39,7 +40,6 @@ export function App() {
|
|||||||
useHotKeyListener()
|
useHotKeyListener()
|
||||||
const {
|
const {
|
||||||
setCode,
|
setCode,
|
||||||
engineCommandManager,
|
|
||||||
buttonDownInStream,
|
buttonDownInStream,
|
||||||
openPanes,
|
openPanes,
|
||||||
setOpenPanes,
|
setOpenPanes,
|
||||||
@ -52,7 +52,6 @@ export function App() {
|
|||||||
guiMode: s.guiMode,
|
guiMode: s.guiMode,
|
||||||
setGuiMode: s.setGuiMode,
|
setGuiMode: s.setGuiMode,
|
||||||
setCode: s.setCode,
|
setCode: s.setCode,
|
||||||
engineCommandManager: s.engineCommandManager,
|
|
||||||
buttonDownInStream: s.buttonDownInStream,
|
buttonDownInStream: s.buttonDownInStream,
|
||||||
openPanes: s.openPanes,
|
openPanes: s.openPanes,
|
||||||
setOpenPanes: s.setOpenPanes,
|
setOpenPanes: s.setOpenPanes,
|
||||||
@ -91,12 +90,12 @@ export function App() {
|
|||||||
if (guiMode.sketchMode === 'sketchEdit') {
|
if (guiMode.sketchMode === 'sketchEdit') {
|
||||||
// TODO: share this with Toolbar's "Exit sketch" button
|
// TODO: share this with Toolbar's "Exit sketch" button
|
||||||
// exiting sketch should be done consistently across all exits
|
// exiting sketch should be done consistently across all exits
|
||||||
engineCommandManager?.sendSceneCommand({
|
engineCommandManager.sendSceneCommand({
|
||||||
type: 'modeling_cmd_req',
|
type: 'modeling_cmd_req',
|
||||||
cmd_id: uuidv4(),
|
cmd_id: uuidv4(),
|
||||||
cmd: { type: 'edit_mode_exit' },
|
cmd: { type: 'edit_mode_exit' },
|
||||||
})
|
})
|
||||||
engineCommandManager?.sendSceneCommand({
|
engineCommandManager.sendSceneCommand({
|
||||||
type: 'modeling_cmd_req',
|
type: 'modeling_cmd_req',
|
||||||
cmd_id: uuidv4(),
|
cmd_id: uuidv4(),
|
||||||
cmd: { type: 'default_camera_disable_sketch_mode' },
|
cmd: { type: 'default_camera_disable_sketch_mode' },
|
||||||
@ -107,7 +106,7 @@ export function App() {
|
|||||||
// when exiting sketch mode in the future
|
// when exiting sketch mode in the future
|
||||||
executeAst()
|
executeAst()
|
||||||
} else {
|
} else {
|
||||||
engineCommandManager?.sendSceneCommand({
|
engineCommandManager.sendSceneCommand({
|
||||||
type: 'modeling_cmd_req',
|
type: 'modeling_cmd_req',
|
||||||
cmd_id: uuidv4(),
|
cmd_id: uuidv4(),
|
||||||
cmd: {
|
cmd: {
|
||||||
@ -156,7 +155,7 @@ export function App() {
|
|||||||
useEngineConnectionSubscriptions()
|
useEngineConnectionSubscriptions()
|
||||||
|
|
||||||
const debounceSocketSend = throttle<EngineCommand>((message) => {
|
const debounceSocketSend = throttle<EngineCommand>((message) => {
|
||||||
engineCommandManager?.sendSceneCommand(message)
|
engineCommandManager.sendSceneCommand(message)
|
||||||
}, 16)
|
}, 16)
|
||||||
const handleMouseMove: MouseEventHandler<HTMLDivElement> = (e) => {
|
const handleMouseMove: MouseEventHandler<HTMLDivElement> = (e) => {
|
||||||
e.nativeEvent.preventDefault()
|
e.nativeEvent.preventDefault()
|
||||||
@ -216,7 +215,6 @@ export function App() {
|
|||||||
} else if (interactionGuards.zoom.dragCallback(eWithButton)) {
|
} else if (interactionGuards.zoom.dragCallback(eWithButton)) {
|
||||||
interaction = 'zoom'
|
interaction = 'zoom'
|
||||||
} else {
|
} else {
|
||||||
console.log('none')
|
|
||||||
return
|
return
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@ -18,6 +18,7 @@ import styles from './Toolbar.module.css'
|
|||||||
import { v4 as uuidv4 } from 'uuid'
|
import { v4 as uuidv4 } from 'uuid'
|
||||||
import { useAppMode } from 'hooks/useAppMode'
|
import { useAppMode } from 'hooks/useAppMode'
|
||||||
import { ActionIcon } from 'components/ActionIcon'
|
import { ActionIcon } from 'components/ActionIcon'
|
||||||
|
import { engineCommandManager } from './lang/std/engineConnection'
|
||||||
|
|
||||||
export const sketchButtonClassnames = {
|
export const sketchButtonClassnames = {
|
||||||
background:
|
background:
|
||||||
@ -50,7 +51,6 @@ export const Toolbar = () => {
|
|||||||
ast,
|
ast,
|
||||||
updateAst,
|
updateAst,
|
||||||
programMemory,
|
programMemory,
|
||||||
engineCommandManager,
|
|
||||||
executeAst,
|
executeAst,
|
||||||
} = useStore((s) => ({
|
} = useStore((s) => ({
|
||||||
guiMode: s.guiMode,
|
guiMode: s.guiMode,
|
||||||
@ -59,15 +59,10 @@ export const Toolbar = () => {
|
|||||||
ast: s.ast,
|
ast: s.ast,
|
||||||
updateAst: s.updateAst,
|
updateAst: s.updateAst,
|
||||||
programMemory: s.programMemory,
|
programMemory: s.programMemory,
|
||||||
engineCommandManager: s.engineCommandManager,
|
|
||||||
executeAst: s.executeAst,
|
executeAst: s.executeAst,
|
||||||
}))
|
}))
|
||||||
useAppMode()
|
useAppMode()
|
||||||
|
|
||||||
useEffect(() => {
|
|
||||||
console.log('guiMode', guiMode)
|
|
||||||
}, [guiMode])
|
|
||||||
|
|
||||||
function ToolbarButtons({ className }: React.HTMLAttributes<HTMLElement>) {
|
function ToolbarButtons({ className }: React.HTMLAttributes<HTMLElement>) {
|
||||||
return (
|
return (
|
||||||
<span className={styles.toolbarButtons + ' ' + className}>
|
<span className={styles.toolbarButtons + ' ' + className}>
|
||||||
@ -173,12 +168,12 @@ export const Toolbar = () => {
|
|||||||
{guiMode.mode === 'sketch' && (
|
{guiMode.mode === 'sketch' && (
|
||||||
<button
|
<button
|
||||||
onClick={() => {
|
onClick={() => {
|
||||||
engineCommandManager?.sendSceneCommand({
|
engineCommandManager.sendSceneCommand({
|
||||||
type: 'modeling_cmd_req',
|
type: 'modeling_cmd_req',
|
||||||
cmd_id: uuidv4(),
|
cmd_id: uuidv4(),
|
||||||
cmd: { type: 'edit_mode_exit' },
|
cmd: { type: 'edit_mode_exit' },
|
||||||
})
|
})
|
||||||
engineCommandManager?.sendSceneCommand({
|
engineCommandManager.sendSceneCommand({
|
||||||
type: 'modeling_cmd_req',
|
type: 'modeling_cmd_req',
|
||||||
cmd_id: uuidv4(),
|
cmd_id: uuidv4(),
|
||||||
cmd: { type: 'default_camera_disable_sketch_mode' },
|
cmd: { type: 'default_camera_disable_sketch_mode' },
|
||||||
@ -214,7 +209,7 @@ export const Toolbar = () => {
|
|||||||
<button
|
<button
|
||||||
key={sketchFnName}
|
key={sketchFnName}
|
||||||
onClick={() => {
|
onClick={() => {
|
||||||
engineCommandManager?.sendSceneCommand({
|
engineCommandManager.sendSceneCommand({
|
||||||
type: 'modeling_cmd_req',
|
type: 'modeling_cmd_req',
|
||||||
cmd_id: uuidv4(),
|
cmd_id: uuidv4(),
|
||||||
cmd: {
|
cmd: {
|
||||||
|
@ -23,10 +23,7 @@ type ActionButtonAsLink = BaseActionButtonProps &
|
|||||||
}
|
}
|
||||||
|
|
||||||
type ActionButtonAsExternal = BaseActionButtonProps &
|
type ActionButtonAsExternal = BaseActionButtonProps &
|
||||||
Omit<
|
Omit<LinkProps, keyof BaseActionButtonProps> & {
|
||||||
React.AnchorHTMLAttributes<HTMLAnchorElement>,
|
|
||||||
keyof BaseActionButtonProps
|
|
||||||
> & {
|
|
||||||
Element: 'externalLink'
|
Element: 'externalLink'
|
||||||
}
|
}
|
||||||
|
|
||||||
@ -69,12 +66,17 @@ export const ActionButton = (props: ActionButtonProps) => {
|
|||||||
)
|
)
|
||||||
}
|
}
|
||||||
case 'externalLink': {
|
case 'externalLink': {
|
||||||
const { Element, icon, children, className, ...rest } = props
|
const { Element, to, icon, children, className, ...rest } = props
|
||||||
return (
|
return (
|
||||||
<a className={classNames} {...rest}>
|
<Link
|
||||||
|
to={to || paths.INDEX}
|
||||||
|
className={classNames}
|
||||||
|
{...rest}
|
||||||
|
target="_blank"
|
||||||
|
>
|
||||||
{icon && <ActionIcon {...icon} />}
|
{icon && <ActionIcon {...icon} />}
|
||||||
{children}
|
{children}
|
||||||
</a>
|
</Link>
|
||||||
)
|
)
|
||||||
}
|
}
|
||||||
default: {
|
default: {
|
||||||
|
@ -1,7 +1,5 @@
|
|||||||
import { useEffect, useState, useRef } from 'react'
|
import { useEffect, useState, useRef } from 'react'
|
||||||
import { parser_wasm } from '../lang/abstractSyntaxTree'
|
import { parse, BinaryPart, Value, executor } from '../lang/wasm'
|
||||||
import { BinaryPart, Value } from '../lang/abstractSyntaxTreeTypes'
|
|
||||||
import { executor } from '../lang/executor'
|
|
||||||
import {
|
import {
|
||||||
createIdentifier,
|
createIdentifier,
|
||||||
createLiteral,
|
createLiteral,
|
||||||
@ -10,6 +8,7 @@ import {
|
|||||||
} from '../lang/modifyAst'
|
} from '../lang/modifyAst'
|
||||||
import { findAllPreviousVariables, PrevVariable } from '../lang/queryAst'
|
import { findAllPreviousVariables, PrevVariable } from '../lang/queryAst'
|
||||||
import { useStore } from '../useStore'
|
import { useStore } from '../useStore'
|
||||||
|
import { engineCommandManager } from '../lang/std/engineConnection'
|
||||||
|
|
||||||
export const AvailableVars = ({
|
export const AvailableVars = ({
|
||||||
onVarClick,
|
onVarClick,
|
||||||
@ -92,14 +91,11 @@ export function useCalc({
|
|||||||
newVariableInsertIndex: number
|
newVariableInsertIndex: number
|
||||||
setNewVariableName: (a: string) => void
|
setNewVariableName: (a: string) => void
|
||||||
} {
|
} {
|
||||||
const { ast, programMemory, selectionRange, engineCommandManager } = useStore(
|
const { ast, programMemory, selectionRange } = useStore((s) => ({
|
||||||
(s) => ({
|
ast: s.ast,
|
||||||
ast: s.ast,
|
programMemory: s.programMemory,
|
||||||
programMemory: s.programMemory,
|
selectionRange: s.selectionRanges.codeBasedSelections[0].range,
|
||||||
selectionRange: s.selectionRanges.codeBasedSelections[0].range,
|
}))
|
||||||
engineCommandManager: s.engineCommandManager,
|
|
||||||
})
|
|
||||||
)
|
|
||||||
const inputRef = useRef<HTMLInputElement>(null)
|
const inputRef = useRef<HTMLInputElement>(null)
|
||||||
const [availableVarInfo, setAvailableVarInfo] = useState<
|
const [availableVarInfo, setAvailableVarInfo] = useState<
|
||||||
ReturnType<typeof findAllPreviousVariables>
|
ReturnType<typeof findAllPreviousVariables>
|
||||||
@ -140,10 +136,9 @@ export function useCalc({
|
|||||||
}, [ast, programMemory, selectionRange])
|
}, [ast, programMemory, selectionRange])
|
||||||
|
|
||||||
useEffect(() => {
|
useEffect(() => {
|
||||||
if (!engineCommandManager) return
|
|
||||||
try {
|
try {
|
||||||
const code = `const __result__ = ${value}\nshow(__result__)`
|
const code = `const __result__ = ${value}\nshow(__result__)`
|
||||||
const ast = parser_wasm(code)
|
const ast = parse(code)
|
||||||
const _programMem: any = { root: {}, return: null }
|
const _programMem: any = { root: {}, return: null }
|
||||||
availableVarInfo.variables.forEach(({ key, value }) => {
|
availableVarInfo.variables.forEach(({ key, value }) => {
|
||||||
_programMem.root[key] = { type: 'userVal', value, __meta: [] }
|
_programMem.root[key] = { type: 'userVal', value, __meta: [] }
|
||||||
|
@ -1,5 +1,4 @@
|
|||||||
import { CollapsiblePanel, CollapsiblePanelProps } from './CollapsiblePanel'
|
import { CollapsiblePanel, CollapsiblePanelProps } from './CollapsiblePanel'
|
||||||
import { useStore } from '../useStore'
|
|
||||||
import { v4 as uuidv4 } from 'uuid'
|
import { v4 as uuidv4 } from 'uuid'
|
||||||
import { EngineCommand } from '../lang/std/engineConnection'
|
import { EngineCommand } from '../lang/std/engineConnection'
|
||||||
import { useState } from 'react'
|
import { useState } from 'react'
|
||||||
@ -7,6 +6,7 @@ import { ActionButton } from '../components/ActionButton'
|
|||||||
import { faCheck } from '@fortawesome/free-solid-svg-icons'
|
import { faCheck } from '@fortawesome/free-solid-svg-icons'
|
||||||
import { isReducedMotion } from 'lang/util'
|
import { isReducedMotion } from 'lang/util'
|
||||||
import { AstExplorer } from './AstExplorer'
|
import { AstExplorer } from './AstExplorer'
|
||||||
|
import { engineCommandManager } from '../lang/std/engineConnection'
|
||||||
|
|
||||||
type SketchModeCmd = Extract<
|
type SketchModeCmd = Extract<
|
||||||
Extract<EngineCommand, { type: 'modeling_cmd_req' }>['cmd'],
|
Extract<EngineCommand, { type: 'modeling_cmd_req' }>['cmd'],
|
||||||
@ -14,9 +14,6 @@ type SketchModeCmd = Extract<
|
|||||||
>
|
>
|
||||||
|
|
||||||
export const DebugPanel = ({ className, ...props }: CollapsiblePanelProps) => {
|
export const DebugPanel = ({ className, ...props }: CollapsiblePanelProps) => {
|
||||||
const { engineCommandManager } = useStore((s) => ({
|
|
||||||
engineCommandManager: s.engineCommandManager,
|
|
||||||
}))
|
|
||||||
const [sketchModeCmd, setSketchModeCmd] = useState<SketchModeCmd>({
|
const [sketchModeCmd, setSketchModeCmd] = useState<SketchModeCmd>({
|
||||||
type: 'default_camera_enable_sketch_mode',
|
type: 'default_camera_enable_sketch_mode',
|
||||||
origin: { x: 0, y: 0, z: 0 },
|
origin: { x: 0, y: 0, z: 0 },
|
||||||
@ -70,19 +67,18 @@ export const DebugPanel = ({ className, ...props }: CollapsiblePanelProps) => {
|
|||||||
className="w-16"
|
className="w-16"
|
||||||
type="checkbox"
|
type="checkbox"
|
||||||
checked={sketchModeCmd.ortho}
|
checked={sketchModeCmd.ortho}
|
||||||
onChange={(a) => {
|
onChange={(a) =>
|
||||||
console.log(a, (a as any).checked)
|
|
||||||
setSketchModeCmd({
|
setSketchModeCmd({
|
||||||
...sketchModeCmd,
|
...sketchModeCmd,
|
||||||
ortho: a.target.checked,
|
ortho: a.target.checked,
|
||||||
})
|
})
|
||||||
}}
|
}
|
||||||
/>
|
/>
|
||||||
</div>
|
</div>
|
||||||
<ActionButton
|
<ActionButton
|
||||||
Element="button"
|
Element="button"
|
||||||
onClick={() => {
|
onClick={() => {
|
||||||
engineCommandManager?.sendSceneCommand({
|
engineCommandManager.sendSceneCommand({
|
||||||
type: 'modeling_cmd_req',
|
type: 'modeling_cmd_req',
|
||||||
cmd: sketchModeCmd,
|
cmd: sketchModeCmd,
|
||||||
cmd_id: uuidv4(),
|
cmd_id: uuidv4(),
|
||||||
|
@ -1,13 +1,17 @@
|
|||||||
import { v4 as uuidv4 } from 'uuid'
|
import { v4 as uuidv4 } from 'uuid'
|
||||||
import { useStore } from '../useStore'
|
|
||||||
import { faFileExport, faXmark } from '@fortawesome/free-solid-svg-icons'
|
import { faFileExport, faXmark } from '@fortawesome/free-solid-svg-icons'
|
||||||
import { ActionButton } from './ActionButton'
|
import { ActionButton } from './ActionButton'
|
||||||
import Modal from 'react-modal'
|
import Modal from 'react-modal'
|
||||||
import React from 'react'
|
import React from 'react'
|
||||||
import { useFormik } from 'formik'
|
import { useFormik } from 'formik'
|
||||||
import { Models } from '@kittycad/lib'
|
import { Models } from '@kittycad/lib'
|
||||||
|
import { engineCommandManager } from '../lang/std/engineConnection'
|
||||||
|
import { useGlobalStateContext } from 'hooks/useGlobalStateContext'
|
||||||
|
|
||||||
type OutputFormat = Models['OutputFormat_type']
|
type OutputFormat = Models['OutputFormat_type']
|
||||||
|
type OutputTypeKey = OutputFormat['type']
|
||||||
|
type ExtractStorageTypes<T> = T extends { storage: infer U } ? U : never
|
||||||
|
type StorageUnion = ExtractStorageTypes<OutputFormat>
|
||||||
|
|
||||||
interface ExportButtonProps extends React.PropsWithChildren {
|
interface ExportButtonProps extends React.PropsWithChildren {
|
||||||
className?: {
|
className?: {
|
||||||
@ -18,14 +22,19 @@ interface ExportButtonProps extends React.PropsWithChildren {
|
|||||||
}
|
}
|
||||||
|
|
||||||
export const ExportButton = ({ children, className }: ExportButtonProps) => {
|
export const ExportButton = ({ children, className }: ExportButtonProps) => {
|
||||||
const { engineCommandManager } = useStore((s) => ({
|
|
||||||
engineCommandManager: s.engineCommandManager,
|
|
||||||
}))
|
|
||||||
|
|
||||||
const [modalIsOpen, setIsOpen] = React.useState(false)
|
const [modalIsOpen, setIsOpen] = React.useState(false)
|
||||||
|
const {
|
||||||
|
settings: {
|
||||||
|
state: {
|
||||||
|
context: { baseUnit },
|
||||||
|
},
|
||||||
|
},
|
||||||
|
} = useGlobalStateContext()
|
||||||
|
|
||||||
const defaultType = 'gltf'
|
const defaultType = 'gltf'
|
||||||
const [type, setType] = React.useState(defaultType)
|
const [type, setType] = React.useState<OutputTypeKey>(defaultType)
|
||||||
|
const defaultStorage = 'embedded'
|
||||||
|
const [storage, setStorage] = React.useState<StorageUnion>(defaultStorage)
|
||||||
|
|
||||||
function openModal() {
|
function openModal() {
|
||||||
setIsOpen(true)
|
setIsOpen(true)
|
||||||
@ -38,7 +47,7 @@ export const ExportButton = ({ children, className }: ExportButtonProps) => {
|
|||||||
// Default to gltf and embedded.
|
// Default to gltf and embedded.
|
||||||
const initialValues: OutputFormat = {
|
const initialValues: OutputFormat = {
|
||||||
type: defaultType,
|
type: defaultType,
|
||||||
storage: 'embedded',
|
storage: defaultStorage,
|
||||||
presentation: 'pretty',
|
presentation: 'pretty',
|
||||||
}
|
}
|
||||||
const formik = useFormik({
|
const formik = useFormik({
|
||||||
@ -66,7 +75,18 @@ export const ExportButton = ({ children, className }: ExportButtonProps) => {
|
|||||||
},
|
},
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
engineCommandManager?.sendSceneCommand({
|
if (values.type === 'obj' || values.type === 'stl') {
|
||||||
|
values.units = baseUnit
|
||||||
|
}
|
||||||
|
if (
|
||||||
|
values.type === 'ply' ||
|
||||||
|
values.type === 'stl' ||
|
||||||
|
values.type === 'gltf'
|
||||||
|
) {
|
||||||
|
// Set the storage type.
|
||||||
|
values.storage = storage
|
||||||
|
}
|
||||||
|
engineCommandManager.sendSceneCommand({
|
||||||
type: 'modeling_cmd_req',
|
type: 'modeling_cmd_req',
|
||||||
cmd: {
|
cmd: {
|
||||||
type: 'export',
|
type: 'export',
|
||||||
@ -75,6 +95,7 @@ export const ExportButton = ({ children, className }: ExportButtonProps) => {
|
|||||||
// in the scene to export. In that case, you'd pass the IDs thru here.
|
// in the scene to export. In that case, you'd pass the IDs thru here.
|
||||||
entity_ids: [],
|
entity_ids: [],
|
||||||
format: values,
|
format: values,
|
||||||
|
source_unit: baseUnit,
|
||||||
},
|
},
|
||||||
cmd_id: uuidv4(),
|
cmd_id: uuidv4(),
|
||||||
})
|
})
|
||||||
@ -109,7 +130,17 @@ export const ExportButton = ({ children, className }: ExportButtonProps) => {
|
|||||||
id="type"
|
id="type"
|
||||||
name="type"
|
name="type"
|
||||||
onChange={(e) => {
|
onChange={(e) => {
|
||||||
setType(e.target.value)
|
setType(e.target.value as OutputTypeKey)
|
||||||
|
if (e.target.value === 'gltf') {
|
||||||
|
// Set default to embedded.
|
||||||
|
setStorage('embedded')
|
||||||
|
} else if (e.target.value === 'ply') {
|
||||||
|
// Set default to ascii.
|
||||||
|
setStorage('ascii')
|
||||||
|
} else if (e.target.value === 'stl') {
|
||||||
|
// Set default to ascii.
|
||||||
|
setStorage('ascii')
|
||||||
|
}
|
||||||
formik.handleChange(e)
|
formik.handleChange(e)
|
||||||
}}
|
}}
|
||||||
className="bg-chalkboard-20 dark:bg-chalkboard-90 w-full"
|
className="bg-chalkboard-20 dark:bg-chalkboard-90 w-full"
|
||||||
@ -127,10 +158,10 @@ export const ExportButton = ({ children, className }: ExportButtonProps) => {
|
|||||||
<select
|
<select
|
||||||
id="storage"
|
id="storage"
|
||||||
name="storage"
|
name="storage"
|
||||||
onChange={formik.handleChange}
|
onChange={(e) => {
|
||||||
value={
|
setStorage(e.target.value as StorageUnion)
|
||||||
'storage' in formik.values ? formik.values.storage : ''
|
formik.handleChange(e)
|
||||||
}
|
}}
|
||||||
className="bg-chalkboard-20 dark:bg-chalkboard-90 w-full"
|
className="bg-chalkboard-20 dark:bg-chalkboard-90 w-full"
|
||||||
>
|
>
|
||||||
{type === 'gltf' && (
|
{type === 'gltf' && (
|
||||||
|
@ -1,7 +1,6 @@
|
|||||||
import { processMemory } from './MemoryPanel'
|
import { processMemory } from './MemoryPanel'
|
||||||
import { parser_wasm } from '../lang/abstractSyntaxTree'
|
|
||||||
import { enginelessExecutor } from '../lib/testHelpers'
|
import { enginelessExecutor } from '../lib/testHelpers'
|
||||||
import { initPromise } from '../lang/rust'
|
import { initPromise, parse } from '../lang/wasm'
|
||||||
|
|
||||||
beforeAll(() => initPromise)
|
beforeAll(() => initPromise)
|
||||||
|
|
||||||
@ -26,7 +25,7 @@ describe('processMemory', () => {
|
|||||||
|> lineTo([2.15, 4.32], %)
|
|> lineTo([2.15, 4.32], %)
|
||||||
// |> rx(90, %)
|
// |> rx(90, %)
|
||||||
show(theExtrude, theSketch)`
|
show(theExtrude, theSketch)`
|
||||||
const ast = parser_wasm(code)
|
const ast = parse(code)
|
||||||
const programMemory = await enginelessExecutor(ast, {
|
const programMemory = await enginelessExecutor(ast, {
|
||||||
root: {},
|
root: {},
|
||||||
return: null,
|
return: null,
|
||||||
|
@ -2,7 +2,7 @@ import ReactJson from 'react-json-view'
|
|||||||
import { CollapsiblePanel, CollapsiblePanelProps } from './CollapsiblePanel'
|
import { CollapsiblePanel, CollapsiblePanelProps } from './CollapsiblePanel'
|
||||||
import { useStore } from '../useStore'
|
import { useStore } from '../useStore'
|
||||||
import { useMemo } from 'react'
|
import { useMemo } from 'react'
|
||||||
import { ProgramMemory, Path, ExtrudeSurface } from '../lang/executor'
|
import { ProgramMemory, Path, ExtrudeSurface } from '../lang/wasm'
|
||||||
import { Themes } from '../lib/theme'
|
import { Themes } from '../lib/theme'
|
||||||
|
|
||||||
interface MemoryPanelProps extends CollapsiblePanelProps {
|
interface MemoryPanelProps extends CollapsiblePanelProps {
|
||||||
|
@ -2,6 +2,8 @@ import { fireEvent, render, screen } from '@testing-library/react'
|
|||||||
import { BrowserRouter } from 'react-router-dom'
|
import { BrowserRouter } from 'react-router-dom'
|
||||||
import ProjectSidebarMenu from './ProjectSidebarMenu'
|
import ProjectSidebarMenu from './ProjectSidebarMenu'
|
||||||
import { ProjectWithEntryPointMetadata } from '../Router'
|
import { ProjectWithEntryPointMetadata } from '../Router'
|
||||||
|
import { GlobalStateProvider } from './GlobalStateProvider'
|
||||||
|
import CommandBarProvider from './CommandBar'
|
||||||
|
|
||||||
const now = new Date()
|
const now = new Date()
|
||||||
const projectWellFormed = {
|
const projectWellFormed = {
|
||||||
@ -38,7 +40,11 @@ describe('ProjectSidebarMenu tests', () => {
|
|||||||
test('Renders the project name', () => {
|
test('Renders the project name', () => {
|
||||||
render(
|
render(
|
||||||
<BrowserRouter>
|
<BrowserRouter>
|
||||||
<ProjectSidebarMenu project={projectWellFormed} />
|
<CommandBarProvider>
|
||||||
|
<GlobalStateProvider>
|
||||||
|
<ProjectSidebarMenu project={projectWellFormed} />
|
||||||
|
</GlobalStateProvider>
|
||||||
|
</CommandBarProvider>
|
||||||
</BrowserRouter>
|
</BrowserRouter>
|
||||||
)
|
)
|
||||||
|
|
||||||
@ -55,7 +61,11 @@ describe('ProjectSidebarMenu tests', () => {
|
|||||||
test('Renders app name if given no project', () => {
|
test('Renders app name if given no project', () => {
|
||||||
render(
|
render(
|
||||||
<BrowserRouter>
|
<BrowserRouter>
|
||||||
<ProjectSidebarMenu />
|
<CommandBarProvider>
|
||||||
|
<GlobalStateProvider>
|
||||||
|
<ProjectSidebarMenu />
|
||||||
|
</GlobalStateProvider>
|
||||||
|
</CommandBarProvider>
|
||||||
</BrowserRouter>
|
</BrowserRouter>
|
||||||
)
|
)
|
||||||
|
|
||||||
@ -69,7 +79,14 @@ describe('ProjectSidebarMenu tests', () => {
|
|||||||
test('Renders as a link if set to do so', () => {
|
test('Renders as a link if set to do so', () => {
|
||||||
render(
|
render(
|
||||||
<BrowserRouter>
|
<BrowserRouter>
|
||||||
<ProjectSidebarMenu project={projectWellFormed} renderAsLink={true} />
|
<CommandBarProvider>
|
||||||
|
<GlobalStateProvider>
|
||||||
|
<ProjectSidebarMenu
|
||||||
|
project={projectWellFormed}
|
||||||
|
renderAsLink={true}
|
||||||
|
/>
|
||||||
|
</GlobalStateProvider>
|
||||||
|
</CommandBarProvider>
|
||||||
</BrowserRouter>
|
</BrowserRouter>
|
||||||
)
|
)
|
||||||
|
|
||||||
|
@ -1,6 +1,6 @@
|
|||||||
import { Dialog, Transition } from '@headlessui/react'
|
import { Dialog, Transition } from '@headlessui/react'
|
||||||
import { Fragment, useState } from 'react'
|
import { Fragment, useState } from 'react'
|
||||||
import { Value } from '../lang/abstractSyntaxTreeTypes'
|
import { Value } from '../lang/wasm'
|
||||||
import {
|
import {
|
||||||
AvailableVars,
|
AvailableVars,
|
||||||
addToInputHelper,
|
addToInputHelper,
|
||||||
|
@ -1,6 +1,6 @@
|
|||||||
import { Dialog, Transition } from '@headlessui/react'
|
import { Dialog, Transition } from '@headlessui/react'
|
||||||
import { Fragment, useState } from 'react'
|
import { Fragment, useState } from 'react'
|
||||||
import { Value } from '../lang/abstractSyntaxTreeTypes'
|
import { Value } from '../lang/wasm'
|
||||||
import {
|
import {
|
||||||
AvailableVars,
|
AvailableVars,
|
||||||
addToInputHelper,
|
addToInputHelper,
|
||||||
|
@ -20,11 +20,15 @@ import {
|
|||||||
compareVec2Epsilon,
|
compareVec2Epsilon,
|
||||||
} from 'lang/std/sketch'
|
} from 'lang/std/sketch'
|
||||||
import { getNodeFromPath } from 'lang/queryAst'
|
import { getNodeFromPath } from 'lang/queryAst'
|
||||||
import { Program, VariableDeclarator } from 'lang/abstractSyntaxTreeTypes'
|
import {
|
||||||
import { modify_ast_for_sketch } from '../wasm-lib/pkg/wasm_lib'
|
Program,
|
||||||
|
VariableDeclarator,
|
||||||
|
rangeTypeFix,
|
||||||
|
modifyAstForSketch,
|
||||||
|
} from 'lang/wasm'
|
||||||
import { KCLError } from 'lang/errors'
|
import { KCLError } from 'lang/errors'
|
||||||
import { KclError as RustKclError } from '../wasm-lib/kcl/bindings/KclError'
|
import { KclError as RustKclError } from '../wasm-lib/kcl/bindings/KclError'
|
||||||
import { rangeTypeFix } from 'lang/abstractSyntaxTree'
|
import { engineCommandManager } from '../lang/std/engineConnection'
|
||||||
|
|
||||||
export const Stream = ({ className = '' }) => {
|
export const Stream = ({ className = '' }) => {
|
||||||
const [isLoading, setIsLoading] = useState(true)
|
const [isLoading, setIsLoading] = useState(true)
|
||||||
@ -32,7 +36,6 @@ export const Stream = ({ className = '' }) => {
|
|||||||
const videoRef = useRef<HTMLVideoElement>(null)
|
const videoRef = useRef<HTMLVideoElement>(null)
|
||||||
const {
|
const {
|
||||||
mediaStream,
|
mediaStream,
|
||||||
engineCommandManager,
|
|
||||||
setButtonDownInStream,
|
setButtonDownInStream,
|
||||||
didDragInStream,
|
didDragInStream,
|
||||||
setDidDragInStream,
|
setDidDragInStream,
|
||||||
@ -45,7 +48,6 @@ export const Stream = ({ className = '' }) => {
|
|||||||
programMemory,
|
programMemory,
|
||||||
} = useStore((s) => ({
|
} = useStore((s) => ({
|
||||||
mediaStream: s.mediaStream,
|
mediaStream: s.mediaStream,
|
||||||
engineCommandManager: s.engineCommandManager,
|
|
||||||
setButtonDownInStream: s.setButtonDownInStream,
|
setButtonDownInStream: s.setButtonDownInStream,
|
||||||
fileId: s.fileId,
|
fileId: s.fileId,
|
||||||
didDragInStream: s.didDragInStream,
|
didDragInStream: s.didDragInStream,
|
||||||
@ -73,7 +75,7 @@ export const Stream = ({ className = '' }) => {
|
|||||||
if (!videoRef.current) return
|
if (!videoRef.current) return
|
||||||
if (!mediaStream) return
|
if (!mediaStream) return
|
||||||
videoRef.current.srcObject = mediaStream
|
videoRef.current.srcObject = mediaStream
|
||||||
}, [mediaStream, engineCommandManager])
|
}, [mediaStream])
|
||||||
|
|
||||||
const handleMouseDown: MouseEventHandler<HTMLVideoElement> = (e) => {
|
const handleMouseDown: MouseEventHandler<HTMLVideoElement> = (e) => {
|
||||||
if (!videoRef.current) return
|
if (!videoRef.current) return
|
||||||
@ -107,7 +109,7 @@ export const Stream = ({ className = '' }) => {
|
|||||||
}
|
}
|
||||||
|
|
||||||
if (guiMode.mode === 'sketch' && guiMode.sketchMode === ('move' as any)) {
|
if (guiMode.mode === 'sketch' && guiMode.sketchMode === ('move' as any)) {
|
||||||
engineCommandManager?.sendSceneCommand({
|
engineCommandManager.sendSceneCommand({
|
||||||
type: 'modeling_cmd_req',
|
type: 'modeling_cmd_req',
|
||||||
cmd: {
|
cmd: {
|
||||||
type: 'handle_mouse_drag_start',
|
type: 'handle_mouse_drag_start',
|
||||||
@ -121,7 +123,7 @@ export const Stream = ({ className = '' }) => {
|
|||||||
guiMode.sketchMode === ('sketch_line' as any)
|
guiMode.sketchMode === ('sketch_line' as any)
|
||||||
)
|
)
|
||||||
) {
|
) {
|
||||||
engineCommandManager?.sendSceneCommand({
|
engineCommandManager.sendSceneCommand({
|
||||||
type: 'modeling_cmd_req',
|
type: 'modeling_cmd_req',
|
||||||
cmd: {
|
cmd: {
|
||||||
type: 'camera_drag_start',
|
type: 'camera_drag_start',
|
||||||
@ -139,7 +141,7 @@ export const Stream = ({ className = '' }) => {
|
|||||||
const handleScroll: WheelEventHandler<HTMLVideoElement> = (e) => {
|
const handleScroll: WheelEventHandler<HTMLVideoElement> = (e) => {
|
||||||
if (!cameraMouseDragGuards[cameraControls].zoom.scrollCallback(e)) return
|
if (!cameraMouseDragGuards[cameraControls].zoom.scrollCallback(e)) return
|
||||||
|
|
||||||
engineCommandManager?.sendSceneCommand({
|
engineCommandManager.sendSceneCommand({
|
||||||
type: 'modeling_cmd_req',
|
type: 'modeling_cmd_req',
|
||||||
cmd: {
|
cmd: {
|
||||||
type: 'default_camera_zoom',
|
type: 'default_camera_zoom',
|
||||||
@ -177,7 +179,7 @@ export const Stream = ({ className = '' }) => {
|
|||||||
}
|
}
|
||||||
|
|
||||||
if (!didDragInStream) {
|
if (!didDragInStream) {
|
||||||
engineCommandManager?.sendSceneCommand({
|
engineCommandManager.sendSceneCommand({
|
||||||
type: 'modeling_cmd_req',
|
type: 'modeling_cmd_req',
|
||||||
cmd: {
|
cmd: {
|
||||||
type: 'select_with_point',
|
type: 'select_with_point',
|
||||||
@ -214,7 +216,7 @@ export const Stream = ({ className = '' }) => {
|
|||||||
window: { x, y },
|
window: { x, y },
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
engineCommandManager?.sendSceneCommand(command).then(async (resp) => {
|
engineCommandManager.sendSceneCommand(command).then(async (resp) => {
|
||||||
if (!(guiMode.mode === 'sketch')) return
|
if (!(guiMode.mode === 'sketch')) return
|
||||||
|
|
||||||
if (guiMode.sketchMode === 'selectFace') return
|
if (guiMode.sketchMode === 'selectFace') return
|
||||||
@ -240,9 +242,6 @@ export const Stream = ({ className = '' }) => {
|
|||||||
) {
|
) {
|
||||||
// Let's get the updated ast.
|
// Let's get the updated ast.
|
||||||
if (sketchGroupId === '') return
|
if (sketchGroupId === '') return
|
||||||
|
|
||||||
console.log('guiMode.pathId', guiMode.pathId)
|
|
||||||
|
|
||||||
// We have a problem if we do not have an id for the sketch group.
|
// We have a problem if we do not have an id for the sketch group.
|
||||||
if (
|
if (
|
||||||
guiMode.pathId === undefined ||
|
guiMode.pathId === undefined ||
|
||||||
@ -253,26 +252,14 @@ export const Stream = ({ className = '' }) => {
|
|||||||
|
|
||||||
let engineId = guiMode.pathId
|
let engineId = guiMode.pathId
|
||||||
|
|
||||||
try {
|
const updatedAst: Program = await modifyAstForSketch(
|
||||||
const updatedAst: Program = await modify_ast_for_sketch(
|
engineCommandManager,
|
||||||
engineCommandManager,
|
ast,
|
||||||
JSON.stringify(ast),
|
variableName,
|
||||||
variableName,
|
engineId
|
||||||
engineId
|
)
|
||||||
)
|
|
||||||
|
|
||||||
updateAst(updatedAst, false)
|
updateAst(updatedAst, false)
|
||||||
} catch (e: any) {
|
|
||||||
const parsed: RustKclError = JSON.parse(e.toString())
|
|
||||||
const kclError = new KCLError(
|
|
||||||
parsed.kind,
|
|
||||||
parsed.msg,
|
|
||||||
rangeTypeFix(parsed.sourceRanges)
|
|
||||||
)
|
|
||||||
|
|
||||||
console.log(kclError)
|
|
||||||
throw kclError
|
|
||||||
}
|
|
||||||
return
|
return
|
||||||
}
|
}
|
||||||
|
|
||||||
@ -285,7 +272,7 @@ export const Stream = ({ className = '' }) => {
|
|||||||
guiMode.waitingFirstClick &&
|
guiMode.waitingFirstClick &&
|
||||||
!isEditingExistingSketch
|
!isEditingExistingSketch
|
||||||
) {
|
) {
|
||||||
const curve = await engineCommandManager?.sendSceneCommand({
|
const curve = await engineCommandManager.sendSceneCommand({
|
||||||
type: 'modeling_cmd_req',
|
type: 'modeling_cmd_req',
|
||||||
cmd_id: uuidv4(),
|
cmd_id: uuidv4(),
|
||||||
cmd: {
|
cmd: {
|
||||||
@ -326,7 +313,7 @@ export const Stream = ({ className = '' }) => {
|
|||||||
resp?.data?.data?.entities_modified?.length &&
|
resp?.data?.data?.entities_modified?.length &&
|
||||||
(!guiMode.waitingFirstClick || isEditingExistingSketch)
|
(!guiMode.waitingFirstClick || isEditingExistingSketch)
|
||||||
) {
|
) {
|
||||||
const curve = await engineCommandManager?.sendSceneCommand({
|
const curve = await engineCommandManager.sendSceneCommand({
|
||||||
type: 'modeling_cmd_req',
|
type: 'modeling_cmd_req',
|
||||||
cmd_id: uuidv4(),
|
cmd_id: uuidv4(),
|
||||||
cmd: {
|
cmd: {
|
||||||
@ -371,12 +358,12 @@ export const Stream = ({ className = '' }) => {
|
|||||||
setGuiMode({
|
setGuiMode({
|
||||||
mode: 'default',
|
mode: 'default',
|
||||||
})
|
})
|
||||||
engineCommandManager?.sendSceneCommand({
|
engineCommandManager.sendSceneCommand({
|
||||||
type: 'modeling_cmd_req',
|
type: 'modeling_cmd_req',
|
||||||
cmd_id: uuidv4(),
|
cmd_id: uuidv4(),
|
||||||
cmd: { type: 'edit_mode_exit' },
|
cmd: { type: 'edit_mode_exit' },
|
||||||
})
|
})
|
||||||
engineCommandManager?.sendSceneCommand({
|
engineCommandManager.sendSceneCommand({
|
||||||
type: 'modeling_cmd_req',
|
type: 'modeling_cmd_req',
|
||||||
cmd_id: uuidv4(),
|
cmd_id: uuidv4(),
|
||||||
cmd: { type: 'default_camera_disable_sketch_mode' },
|
cmd: { type: 'default_camera_disable_sketch_mode' },
|
||||||
@ -415,6 +402,7 @@ export const Stream = ({ className = '' }) => {
|
|||||||
onWheel={handleScroll}
|
onWheel={handleScroll}
|
||||||
onPlay={() => setIsLoading(false)}
|
onPlay={() => setIsLoading(false)}
|
||||||
onMouseMoveCapture={handleMouseMove}
|
onMouseMoveCapture={handleMouseMove}
|
||||||
|
disablePictureInPicture
|
||||||
className={`w-full h-full ${isExecuting && 'blur-md'}`}
|
className={`w-full h-full ${isExecuting && 'blur-md'}`}
|
||||||
style={{ transitionDuration: '200ms', transitionProperty: 'filter' }}
|
style={{ transitionDuration: '200ms', transitionProperty: 'filter' }}
|
||||||
/>
|
/>
|
||||||
|
@ -30,6 +30,7 @@ import { isOverlap, roundOff } from 'lib/utils'
|
|||||||
import { kclErrToDiagnostic } from 'lang/errors'
|
import { kclErrToDiagnostic } from 'lang/errors'
|
||||||
import { CSSRuleObject } from 'tailwindcss/types/config'
|
import { CSSRuleObject } from 'tailwindcss/types/config'
|
||||||
import interact from '@replit/codemirror-interact'
|
import interact from '@replit/codemirror-interact'
|
||||||
|
import { engineCommandManager } from '../lang/std/engineConnection'
|
||||||
|
|
||||||
export const editorShortcutMeta = {
|
export const editorShortcutMeta = {
|
||||||
formatCode: {
|
formatCode: {
|
||||||
@ -52,7 +53,6 @@ export const TextEditor = ({
|
|||||||
code,
|
code,
|
||||||
deferredSetCode,
|
deferredSetCode,
|
||||||
editorView,
|
editorView,
|
||||||
engineCommandManager,
|
|
||||||
formatCode,
|
formatCode,
|
||||||
isLSPServerReady,
|
isLSPServerReady,
|
||||||
selectionRanges,
|
selectionRanges,
|
||||||
@ -64,7 +64,6 @@ export const TextEditor = ({
|
|||||||
code: s.code,
|
code: s.code,
|
||||||
deferredSetCode: s.deferredSetCode,
|
deferredSetCode: s.deferredSetCode,
|
||||||
editorView: s.editorView,
|
editorView: s.editorView,
|
||||||
engineCommandManager: s.engineCommandManager,
|
|
||||||
formatCode: s.formatCode,
|
formatCode: s.formatCode,
|
||||||
isLSPServerReady: s.isLSPServerReady,
|
isLSPServerReady: s.isLSPServerReady,
|
||||||
selectionRanges: s.selectionRanges,
|
selectionRanges: s.selectionRanges,
|
||||||
@ -173,7 +172,7 @@ export const TextEditor = ({
|
|||||||
const idBasedSelections = codeBasedSelections
|
const idBasedSelections = codeBasedSelections
|
||||||
.map(({ type, range }) => {
|
.map(({ type, range }) => {
|
||||||
const hasOverlap = Object.entries(
|
const hasOverlap = Object.entries(
|
||||||
engineCommandManager?.sourceRangeMap || {}
|
engineCommandManager.sourceRangeMap || {}
|
||||||
).filter(([_, sourceRange]) => {
|
).filter(([_, sourceRange]) => {
|
||||||
return isOverlap(sourceRange, range)
|
return isOverlap(sourceRange, range)
|
||||||
})
|
})
|
||||||
@ -186,7 +185,7 @@ export const TextEditor = ({
|
|||||||
})
|
})
|
||||||
.filter(Boolean) as any
|
.filter(Boolean) as any
|
||||||
|
|
||||||
engineCommandManager?.cusorsSelected({
|
engineCommandManager.cusorsSelected({
|
||||||
otherSelections: [],
|
otherSelections: [],
|
||||||
idBasedSelections,
|
idBasedSelections,
|
||||||
})
|
})
|
||||||
|
@ -1,6 +1,6 @@
|
|||||||
import { useState, useEffect } from 'react'
|
import { useState, useEffect } from 'react'
|
||||||
import { toolTips, useStore } from '../../useStore'
|
import { toolTips, useStore } from '../../useStore'
|
||||||
import { Value, VariableDeclarator } from '../../lang/abstractSyntaxTreeTypes'
|
import { Value, VariableDeclarator } from '../../lang/wasm'
|
||||||
import {
|
import {
|
||||||
getNodePathFromSourceRange,
|
getNodePathFromSourceRange,
|
||||||
getNodeFromPath,
|
getNodeFromPath,
|
||||||
|
@ -1,6 +1,6 @@
|
|||||||
import { useState, useEffect } from 'react'
|
import { useState, useEffect } from 'react'
|
||||||
import { toolTips, useStore } from '../../useStore'
|
import { toolTips, useStore } from '../../useStore'
|
||||||
import { Value, VariableDeclarator } from '../../lang/abstractSyntaxTreeTypes'
|
import { Value, VariableDeclarator } from '../../lang/wasm'
|
||||||
import {
|
import {
|
||||||
getNodePathFromSourceRange,
|
getNodePathFromSourceRange,
|
||||||
getNodeFromPath,
|
getNodeFromPath,
|
||||||
|
@ -1,6 +1,6 @@
|
|||||||
import { useState, useEffect } from 'react'
|
import { useState, useEffect } from 'react'
|
||||||
import { toolTips, useStore } from '../../useStore'
|
import { toolTips, useStore } from '../../useStore'
|
||||||
import { Value } from '../../lang/abstractSyntaxTreeTypes'
|
import { Value } from '../../lang/wasm'
|
||||||
import {
|
import {
|
||||||
getNodePathFromSourceRange,
|
getNodePathFromSourceRange,
|
||||||
getNodeFromPath,
|
getNodeFromPath,
|
||||||
|
@ -1,11 +1,7 @@
|
|||||||
import { useState, useEffect } from 'react'
|
import { useState, useEffect } from 'react'
|
||||||
import { create } from 'react-modal-promise'
|
import { create } from 'react-modal-promise'
|
||||||
import { toolTips, useStore } from '../../useStore'
|
import { toolTips, useStore } from '../../useStore'
|
||||||
import {
|
import { BinaryPart, Value, VariableDeclarator } from '../../lang/wasm'
|
||||||
BinaryPart,
|
|
||||||
Value,
|
|
||||||
VariableDeclarator,
|
|
||||||
} from '../../lang/abstractSyntaxTreeTypes'
|
|
||||||
import {
|
import {
|
||||||
getNodePathFromSourceRange,
|
getNodePathFromSourceRange,
|
||||||
getNodeFromPath,
|
getNodeFromPath,
|
||||||
|
@ -1,6 +1,6 @@
|
|||||||
import { useState, useEffect } from 'react'
|
import { useState, useEffect } from 'react'
|
||||||
import { toolTips, useStore } from '../../useStore'
|
import { toolTips, useStore } from '../../useStore'
|
||||||
import { Value } from '../../lang/abstractSyntaxTreeTypes'
|
import { Value } from '../../lang/wasm'
|
||||||
import {
|
import {
|
||||||
getNodePathFromSourceRange,
|
getNodePathFromSourceRange,
|
||||||
getNodeFromPath,
|
getNodeFromPath,
|
||||||
|
@ -1,7 +1,7 @@
|
|||||||
import { useState, useEffect } from 'react'
|
import { useState, useEffect } from 'react'
|
||||||
import { create } from 'react-modal-promise'
|
import { create } from 'react-modal-promise'
|
||||||
import { toolTips, useStore } from '../../useStore'
|
import { toolTips, useStore } from '../../useStore'
|
||||||
import { Value } from '../../lang/abstractSyntaxTreeTypes'
|
import { Value } from '../../lang/wasm'
|
||||||
import {
|
import {
|
||||||
getNodePathFromSourceRange,
|
getNodePathFromSourceRange,
|
||||||
getNodeFromPath,
|
getNodeFromPath,
|
||||||
@ -133,7 +133,7 @@ export const SetAbsDistance = ({ buttonType }: { buttonType: ButtonType }) => {
|
|||||||
callBack: updateCursors(setCursor, selectionRanges, pathToNodeMap),
|
callBack: updateCursors(setCursor, selectionRanges, pathToNodeMap),
|
||||||
})
|
})
|
||||||
} catch (e) {
|
} catch (e) {
|
||||||
console.log('e', e)
|
console.log('error', e)
|
||||||
}
|
}
|
||||||
}}
|
}}
|
||||||
disabled={!enableAngLen}
|
disabled={!enableAngLen}
|
||||||
|
@ -1,11 +1,7 @@
|
|||||||
import { useState, useEffect } from 'react'
|
import { useState, useEffect } from 'react'
|
||||||
import { create } from 'react-modal-promise'
|
import { create } from 'react-modal-promise'
|
||||||
import { toolTips, useStore } from '../../useStore'
|
import { toolTips, useStore } from '../../useStore'
|
||||||
import {
|
import { BinaryPart, Value, VariableDeclarator } from '../../lang/wasm'
|
||||||
BinaryPart,
|
|
||||||
Value,
|
|
||||||
VariableDeclarator,
|
|
||||||
} from '../../lang/abstractSyntaxTreeTypes'
|
|
||||||
import {
|
import {
|
||||||
getNodePathFromSourceRange,
|
getNodePathFromSourceRange,
|
||||||
getNodeFromPath,
|
getNodeFromPath,
|
||||||
|
@ -1,11 +1,7 @@
|
|||||||
import { useState, useEffect } from 'react'
|
import { useState, useEffect } from 'react'
|
||||||
import { create } from 'react-modal-promise'
|
import { create } from 'react-modal-promise'
|
||||||
import { toolTips, useStore } from '../../useStore'
|
import { toolTips, useStore } from '../../useStore'
|
||||||
import {
|
import { BinaryPart, Value, VariableDeclarator } from '../../lang/wasm'
|
||||||
BinaryPart,
|
|
||||||
Value,
|
|
||||||
VariableDeclarator,
|
|
||||||
} from '../../lang/abstractSyntaxTreeTypes'
|
|
||||||
import {
|
import {
|
||||||
getNodePathFromSourceRange,
|
getNodePathFromSourceRange,
|
||||||
getNodeFromPath,
|
getNodeFromPath,
|
||||||
@ -21,8 +17,6 @@ import { GetInfoModal } from '../SetHorVertDistanceModal'
|
|||||||
import { createLiteral, createVariableDeclaration } from '../../lang/modifyAst'
|
import { createLiteral, createVariableDeclaration } from '../../lang/modifyAst'
|
||||||
import { removeDoubleNegatives } from '../AvailableVarsHelpers'
|
import { removeDoubleNegatives } from '../AvailableVarsHelpers'
|
||||||
import { updateCursors } from '../../lang/util'
|
import { updateCursors } from '../../lang/util'
|
||||||
import { ActionIcon } from 'components/ActionIcon'
|
|
||||||
import { sketchButtonClassnames } from 'Toolbar'
|
|
||||||
|
|
||||||
const getModalInfo = create(GetInfoModal as any)
|
const getModalInfo = create(GetInfoModal as any)
|
||||||
|
|
||||||
|
@ -1,7 +1,7 @@
|
|||||||
import { useState, useEffect } from 'react'
|
import { useState, useEffect } from 'react'
|
||||||
import { create } from 'react-modal-promise'
|
import { create } from 'react-modal-promise'
|
||||||
import { toolTips, useStore } from '../../useStore'
|
import { toolTips, useStore } from '../../useStore'
|
||||||
import { Value } from '../../lang/abstractSyntaxTreeTypes'
|
import { Value } from '../../lang/wasm'
|
||||||
import {
|
import {
|
||||||
getNodePathFromSourceRange,
|
getNodePathFromSourceRange,
|
||||||
getNodeFromPath,
|
getNodeFromPath,
|
||||||
@ -147,7 +147,7 @@ export const SetAngleLength = ({
|
|||||||
callBack: updateCursors(setCursor, selectionRanges, pathToNodeMap),
|
callBack: updateCursors(setCursor, selectionRanges, pathToNodeMap),
|
||||||
})
|
})
|
||||||
} catch (e) {
|
} catch (e) {
|
||||||
console.log('e', e)
|
console.log('erorr', e)
|
||||||
}
|
}
|
||||||
}}
|
}}
|
||||||
disabled={!enableAngLen}
|
disabled={!enableAngLen}
|
||||||
|
@ -1,6 +1,11 @@
|
|||||||
import { Popover, Transition } from '@headlessui/react'
|
import { Popover, Transition } from '@headlessui/react'
|
||||||
import { ActionButton } from './ActionButton'
|
import { ActionButton } from './ActionButton'
|
||||||
import { faBars, faGear, faSignOutAlt } from '@fortawesome/free-solid-svg-icons'
|
import {
|
||||||
|
faBars,
|
||||||
|
faBug,
|
||||||
|
faGear,
|
||||||
|
faSignOutAlt,
|
||||||
|
} from '@fortawesome/free-solid-svg-icons'
|
||||||
import { faGithub } from '@fortawesome/free-brands-svg-icons'
|
import { faGithub } from '@fortawesome/free-brands-svg-icons'
|
||||||
import { useLocation, useNavigate } from 'react-router-dom'
|
import { useLocation, useNavigate } from 'react-router-dom'
|
||||||
import { Fragment, useState } from 'react'
|
import { Fragment, useState } from 'react'
|
||||||
@ -137,13 +142,21 @@ const UserSidebarMenu = ({ user }: { user?: User }) => {
|
|||||||
Settings
|
Settings
|
||||||
</ActionButton>
|
</ActionButton>
|
||||||
<ActionButton
|
<ActionButton
|
||||||
Element="link"
|
Element="externalLink"
|
||||||
to="https://github.com/KittyCAD/modeling-app/discussions"
|
to="https://github.com/KittyCAD/modeling-app/discussions"
|
||||||
icon={{ icon: faGithub }}
|
icon={{ icon: faGithub }}
|
||||||
className="border-transparent dark:border-transparent dark:hover:border-liquid-60"
|
className="border-transparent dark:border-transparent dark:hover:border-liquid-60"
|
||||||
>
|
>
|
||||||
Request a feature
|
Request a feature
|
||||||
</ActionButton>
|
</ActionButton>
|
||||||
|
<ActionButton
|
||||||
|
Element="externalLink"
|
||||||
|
to="https://github.com/KittyCAD/modeling-app/issues/new"
|
||||||
|
icon={{ icon: faBug }}
|
||||||
|
className="border-transparent dark:border-transparent dark:hover:border-liquid-60"
|
||||||
|
>
|
||||||
|
Report a bug
|
||||||
|
</ActionButton>
|
||||||
<ActionButton
|
<ActionButton
|
||||||
Element="button"
|
Element="button"
|
||||||
onClick={() => send('Log out')}
|
onClick={() => send('Log out')}
|
||||||
|
@ -109,7 +109,6 @@ export default class Client extends jsrpc.JSONRPCServerAndClient {
|
|||||||
}
|
}
|
||||||
}
|
}
|
||||||
messageString += message
|
messageString += message
|
||||||
// console.log(messageString)
|
|
||||||
return
|
return
|
||||||
})
|
})
|
||||||
|
|
||||||
|
@ -96,8 +96,6 @@ export class LanguageServerPlugin implements PluginValue {
|
|||||||
async sendChange({ documentText }: { documentText: string }) {
|
async sendChange({ documentText }: { documentText: string }) {
|
||||||
if (!this.client.ready) return
|
if (!this.client.ready) return
|
||||||
|
|
||||||
console.log(documentText.length)
|
|
||||||
|
|
||||||
if (documentText.length > 5000) {
|
if (documentText.length > 5000) {
|
||||||
// Clear out the text it thinks we have, large documents will throw a stack error.
|
// Clear out the text it thinks we have, large documents will throw a stack error.
|
||||||
// This is obviously not a good fix but it works for now til we figure
|
// This is obviously not a good fix but it works for now til we figure
|
||||||
|
@ -8,6 +8,7 @@ import { ArtifactMap, EngineCommandManager } from 'lang/std/engineConnection'
|
|||||||
import { Models } from '@kittycad/lib/dist/types/src'
|
import { Models } from '@kittycad/lib/dist/types/src'
|
||||||
import { isReducedMotion } from 'lang/util'
|
import { isReducedMotion } from 'lang/util'
|
||||||
import { isOverlap } from 'lib/utils'
|
import { isOverlap } from 'lib/utils'
|
||||||
|
import { engineCommandManager } from '../lang/std/engineConnection'
|
||||||
|
|
||||||
interface DefaultPlanes {
|
interface DefaultPlanes {
|
||||||
xy: string
|
xy: string
|
||||||
@ -17,19 +18,13 @@ interface DefaultPlanes {
|
|||||||
}
|
}
|
||||||
|
|
||||||
export function useAppMode() {
|
export function useAppMode() {
|
||||||
const {
|
const { guiMode, setGuiMode, selectionRanges, selectionRangeTypeMap } =
|
||||||
guiMode,
|
useStore((s) => ({
|
||||||
setGuiMode,
|
guiMode: s.guiMode,
|
||||||
selectionRanges,
|
setGuiMode: s.setGuiMode,
|
||||||
engineCommandManager,
|
selectionRanges: s.selectionRanges,
|
||||||
selectionRangeTypeMap,
|
selectionRangeTypeMap: s.selectionRangeTypeMap,
|
||||||
} = useStore((s) => ({
|
}))
|
||||||
guiMode: s.guiMode,
|
|
||||||
setGuiMode: s.setGuiMode,
|
|
||||||
selectionRanges: s.selectionRanges,
|
|
||||||
engineCommandManager: s.engineCommandManager,
|
|
||||||
selectionRangeTypeMap: s.selectionRangeTypeMap,
|
|
||||||
}))
|
|
||||||
const [defaultPlanes, setDefaultPlanes] = useState<DefaultPlanes | null>(null)
|
const [defaultPlanes, setDefaultPlanes] = useState<DefaultPlanes | null>(null)
|
||||||
useEffect(() => {
|
useEffect(() => {
|
||||||
if (
|
if (
|
||||||
@ -65,7 +60,7 @@ export function useAppMode() {
|
|||||||
setDefaultPlanesHidden(engineCommandManager, localDefaultPlanes, true)
|
setDefaultPlanesHidden(engineCommandManager, localDefaultPlanes, true)
|
||||||
// TODO figure out the plane to use based on the sketch
|
// TODO figure out the plane to use based on the sketch
|
||||||
// maybe it's easier to make a new plane than rely on the defaults
|
// maybe it's easier to make a new plane than rely on the defaults
|
||||||
await engineCommandManager?.sendSceneCommand({
|
await engineCommandManager.sendSceneCommand({
|
||||||
type: 'modeling_cmd_req',
|
type: 'modeling_cmd_req',
|
||||||
cmd_id: uuidv4(),
|
cmd_id: uuidv4(),
|
||||||
cmd: {
|
cmd: {
|
||||||
@ -135,7 +130,7 @@ export function useAppMode() {
|
|||||||
])
|
])
|
||||||
|
|
||||||
useEffect(() => {
|
useEffect(() => {
|
||||||
const unSub = engineCommandManager?.subscribeTo({
|
const unSub = engineCommandManager.subscribeTo({
|
||||||
event: 'select_with_point',
|
event: 'select_with_point',
|
||||||
callback: async ({ data }) => {
|
callback: async ({ data }) => {
|
||||||
if (!data.entity_id) return
|
if (!data.entity_id) return
|
||||||
@ -144,18 +139,16 @@ export function useAppMode() {
|
|||||||
// user clicked something else in the scene
|
// user clicked something else in the scene
|
||||||
return
|
return
|
||||||
}
|
}
|
||||||
const sketchModeResponse = await engineCommandManager?.sendSceneCommand(
|
const sketchModeResponse = await engineCommandManager.sendSceneCommand({
|
||||||
{
|
type: 'modeling_cmd_req',
|
||||||
type: 'modeling_cmd_req',
|
cmd_id: uuidv4(),
|
||||||
cmd_id: uuidv4(),
|
cmd: {
|
||||||
cmd: {
|
type: 'sketch_mode_enable',
|
||||||
type: 'sketch_mode_enable',
|
plane_id: data.entity_id,
|
||||||
plane_id: data.entity_id,
|
ortho: true,
|
||||||
ortho: true,
|
animated: !isReducedMotion(),
|
||||||
animated: !isReducedMotion(),
|
},
|
||||||
},
|
})
|
||||||
}
|
|
||||||
)
|
|
||||||
setDefaultPlanesHidden(engineCommandManager, defaultPlanes, true)
|
setDefaultPlanesHidden(engineCommandManager, defaultPlanes, true)
|
||||||
const sketchUuid = uuidv4()
|
const sketchUuid = uuidv4()
|
||||||
const proms: any[] = []
|
const proms: any[] = []
|
||||||
@ -178,8 +171,7 @@ export function useAppMode() {
|
|||||||
},
|
},
|
||||||
})
|
})
|
||||||
)
|
)
|
||||||
const res = await Promise.all(proms)
|
await Promise.all(proms)
|
||||||
console.log('res', res)
|
|
||||||
setGuiMode({
|
setGuiMode({
|
||||||
mode: 'sketch',
|
mode: 'sketch',
|
||||||
sketchMode: 'sketchEdit',
|
sketchMode: 'sketchEdit',
|
||||||
@ -209,7 +201,7 @@ async function createPlane(
|
|||||||
}
|
}
|
||||||
) {
|
) {
|
||||||
const planeId = uuidv4()
|
const planeId = uuidv4()
|
||||||
await engineCommandManager?.sendSceneCommand({
|
await engineCommandManager.sendSceneCommand({
|
||||||
type: 'modeling_cmd_req',
|
type: 'modeling_cmd_req',
|
||||||
cmd: {
|
cmd: {
|
||||||
type: 'make_plane',
|
type: 'make_plane',
|
||||||
@ -221,7 +213,7 @@ async function createPlane(
|
|||||||
},
|
},
|
||||||
cmd_id: planeId,
|
cmd_id: planeId,
|
||||||
})
|
})
|
||||||
await engineCommandManager?.sendSceneCommand({
|
await engineCommandManager.sendSceneCommand({
|
||||||
type: 'modeling_cmd_req',
|
type: 'modeling_cmd_req',
|
||||||
cmd: {
|
cmd: {
|
||||||
type: 'plane_set_color',
|
type: 'plane_set_color',
|
||||||
@ -234,12 +226,12 @@ async function createPlane(
|
|||||||
}
|
}
|
||||||
|
|
||||||
function setDefaultPlanesHidden(
|
function setDefaultPlanesHidden(
|
||||||
engineCommandManager: EngineCommandManager | undefined,
|
engineCommandManager: EngineCommandManager,
|
||||||
defaultPlanes: DefaultPlanes,
|
defaultPlanes: DefaultPlanes,
|
||||||
hidden: boolean
|
hidden: boolean
|
||||||
) {
|
) {
|
||||||
Object.values(defaultPlanes).forEach((planeId) => {
|
Object.values(defaultPlanes).forEach((planeId) => {
|
||||||
engineCommandManager?.sendSceneCommand({
|
engineCommandManager.sendSceneCommand({
|
||||||
type: 'modeling_cmd_req',
|
type: 'modeling_cmd_req',
|
||||||
cmd_id: uuidv4(),
|
cmd_id: uuidv4(),
|
||||||
cmd: {
|
cmd: {
|
||||||
|
@ -1,14 +1,9 @@
|
|||||||
import { useEffect } from 'react'
|
import { useEffect } from 'react'
|
||||||
import { useStore } from 'useStore'
|
import { useStore } from 'useStore'
|
||||||
|
import { engineCommandManager } from '../lang/std/engineConnection'
|
||||||
|
|
||||||
export function useEngineConnectionSubscriptions() {
|
export function useEngineConnectionSubscriptions() {
|
||||||
const {
|
const { setCursor2, setHighlightRange, highlightRange } = useStore((s) => ({
|
||||||
engineCommandManager,
|
|
||||||
setCursor2,
|
|
||||||
setHighlightRange,
|
|
||||||
highlightRange,
|
|
||||||
} = useStore((s) => ({
|
|
||||||
engineCommandManager: s.engineCommandManager,
|
|
||||||
setCursor2: s.setCursor2,
|
setCursor2: s.setCursor2,
|
||||||
setHighlightRange: s.setHighlightRange,
|
setHighlightRange: s.setHighlightRange,
|
||||||
highlightRange: s.highlightRange,
|
highlightRange: s.highlightRange,
|
||||||
|
@ -1,53 +1,93 @@
|
|||||||
import { useLayoutEffect } from 'react'
|
import { useLayoutEffect, useEffect, useRef } from 'react'
|
||||||
import { _executor } from '../lang/executor'
|
import { _executor } from '../lang/wasm'
|
||||||
import { useStore } from '../useStore'
|
import { useStore } from '../useStore'
|
||||||
import { EngineCommandManager } from '../lang/std/engineConnection'
|
import { engineCommandManager } from '../lang/std/engineConnection'
|
||||||
|
import { deferExecution } from 'lib/utils'
|
||||||
|
import { v4 as uuidv4 } from 'uuid'
|
||||||
|
|
||||||
export function useSetupEngineManager(
|
export function useSetupEngineManager(
|
||||||
streamRef: React.RefObject<HTMLDivElement>,
|
streamRef: React.RefObject<HTMLDivElement>,
|
||||||
token?: string
|
token?: string
|
||||||
) {
|
) {
|
||||||
const {
|
const {
|
||||||
setEngineCommandManager,
|
|
||||||
setMediaStream,
|
setMediaStream,
|
||||||
setIsStreamReady,
|
setIsStreamReady,
|
||||||
setStreamDimensions,
|
setStreamDimensions,
|
||||||
|
streamDimensions,
|
||||||
executeCode,
|
executeCode,
|
||||||
} = useStore((s) => ({
|
} = useStore((s) => ({
|
||||||
setEngineCommandManager: s.setEngineCommandManager,
|
|
||||||
setMediaStream: s.setMediaStream,
|
setMediaStream: s.setMediaStream,
|
||||||
setIsStreamReady: s.setIsStreamReady,
|
setIsStreamReady: s.setIsStreamReady,
|
||||||
setStreamDimensions: s.setStreamDimensions,
|
setStreamDimensions: s.setStreamDimensions,
|
||||||
|
streamDimensions: s.streamDimensions,
|
||||||
executeCode: s.executeCode,
|
executeCode: s.executeCode,
|
||||||
}))
|
}))
|
||||||
|
|
||||||
const streamWidth = streamRef?.current?.offsetWidth
|
const streamWidth = streamRef?.current?.offsetWidth
|
||||||
const streamHeight = streamRef?.current?.offsetHeight
|
const streamHeight = streamRef?.current?.offsetHeight
|
||||||
|
|
||||||
|
const hasSetNonZeroDimensions = useRef<boolean>(false)
|
||||||
|
|
||||||
|
useEffect(() => {
|
||||||
|
executeCode()
|
||||||
|
}, [])
|
||||||
|
|
||||||
|
useLayoutEffect(() => {
|
||||||
|
// Load the engine command manager once with the initial width and height,
|
||||||
|
// then we do not want to reload it.
|
||||||
|
const { width: quadWidth, height: quadHeight } = getDimensions(
|
||||||
|
streamWidth,
|
||||||
|
streamHeight
|
||||||
|
)
|
||||||
|
if (!hasSetNonZeroDimensions.current && quadHeight && quadWidth) {
|
||||||
|
engineCommandManager.start({
|
||||||
|
setMediaStream,
|
||||||
|
setIsStreamReady,
|
||||||
|
width: quadWidth,
|
||||||
|
height: quadHeight,
|
||||||
|
executeCode,
|
||||||
|
token,
|
||||||
|
})
|
||||||
|
setStreamDimensions({
|
||||||
|
streamWidth: quadWidth,
|
||||||
|
streamHeight: quadHeight,
|
||||||
|
})
|
||||||
|
hasSetNonZeroDimensions.current = true
|
||||||
|
}
|
||||||
|
}, [streamRef?.current?.offsetWidth, streamRef?.current?.offsetHeight])
|
||||||
|
|
||||||
|
useEffect(() => {
|
||||||
|
const handleResize = deferExecution(() => {
|
||||||
|
const { width, height } = getDimensions(
|
||||||
|
streamRef?.current?.offsetWidth,
|
||||||
|
streamRef?.current?.offsetHeight
|
||||||
|
)
|
||||||
|
if (
|
||||||
|
streamDimensions.streamWidth !== width ||
|
||||||
|
streamDimensions.streamHeight !== height
|
||||||
|
) {
|
||||||
|
engineCommandManager.handleResize({
|
||||||
|
streamWidth: width,
|
||||||
|
streamHeight: height,
|
||||||
|
})
|
||||||
|
setStreamDimensions({
|
||||||
|
streamWidth: width,
|
||||||
|
streamHeight: height,
|
||||||
|
})
|
||||||
|
}
|
||||||
|
}, 500)
|
||||||
|
|
||||||
|
window.addEventListener('resize', handleResize)
|
||||||
|
return () => {
|
||||||
|
window.removeEventListener('resize', handleResize)
|
||||||
|
}
|
||||||
|
}, [])
|
||||||
|
}
|
||||||
|
|
||||||
|
function getDimensions(streamWidth?: number, streamHeight?: number) {
|
||||||
const width = streamWidth ? streamWidth : 0
|
const width = streamWidth ? streamWidth : 0
|
||||||
const quadWidth = Math.round(width / 4) * 4
|
const quadWidth = Math.round(width / 4) * 4
|
||||||
const height = streamHeight ? streamHeight : 0
|
const height = streamHeight ? streamHeight : 0
|
||||||
const quadHeight = Math.round(height / 4) * 4
|
const quadHeight = Math.round(height / 4) * 4
|
||||||
|
return { width: quadWidth, height: quadHeight }
|
||||||
useLayoutEffect(() => {
|
|
||||||
setStreamDimensions({
|
|
||||||
streamWidth: quadWidth,
|
|
||||||
streamHeight: quadHeight,
|
|
||||||
})
|
|
||||||
if (!width || !height) return
|
|
||||||
const eng = new EngineCommandManager({
|
|
||||||
setMediaStream,
|
|
||||||
setIsStreamReady,
|
|
||||||
width: quadWidth,
|
|
||||||
height: quadHeight,
|
|
||||||
token,
|
|
||||||
})
|
|
||||||
setEngineCommandManager(eng)
|
|
||||||
eng.waitForReady.then(() => {
|
|
||||||
executeCode()
|
|
||||||
})
|
|
||||||
return () => {
|
|
||||||
eng?.tearDown()
|
|
||||||
}
|
|
||||||
}, [quadWidth, quadHeight])
|
|
||||||
}
|
}
|
||||||
|
@ -48,7 +48,7 @@ export function useConvertToVariable() {
|
|||||||
|
|
||||||
updateAst(_modifiedAst, true)
|
updateAst(_modifiedAst, true)
|
||||||
} catch (e) {
|
} catch (e) {
|
||||||
console.log('e', e)
|
console.log('error', e)
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@ -1,12 +1,11 @@
|
|||||||
import { parser_wasm } from './abstractSyntaxTree'
|
|
||||||
import { KCLError } from './errors'
|
import { KCLError } from './errors'
|
||||||
import { initPromise } from './rust'
|
import { initPromise, parse } from './wasm'
|
||||||
|
|
||||||
beforeAll(() => initPromise)
|
beforeAll(() => initPromise)
|
||||||
|
|
||||||
describe('testing AST', () => {
|
describe('testing AST', () => {
|
||||||
test('5 + 6', () => {
|
test('5 + 6', () => {
|
||||||
const result = parser_wasm('5 +6')
|
const result = parse('5 +6')
|
||||||
delete (result as any).nonCodeMeta
|
delete (result as any).nonCodeMeta
|
||||||
expect(result.body).toEqual([
|
expect(result.body).toEqual([
|
||||||
{
|
{
|
||||||
@ -37,7 +36,7 @@ describe('testing AST', () => {
|
|||||||
])
|
])
|
||||||
})
|
})
|
||||||
test('const myVar = 5', () => {
|
test('const myVar = 5', () => {
|
||||||
const { body } = parser_wasm('const myVar = 5')
|
const { body } = parse('const myVar = 5')
|
||||||
expect(body).toEqual([
|
expect(body).toEqual([
|
||||||
{
|
{
|
||||||
type: 'VariableDeclaration',
|
type: 'VariableDeclaration',
|
||||||
@ -71,7 +70,7 @@ describe('testing AST', () => {
|
|||||||
const code = `const myVar = 5
|
const code = `const myVar = 5
|
||||||
const newVar = myVar + 1
|
const newVar = myVar + 1
|
||||||
`
|
`
|
||||||
const { body } = parser_wasm(code)
|
const { body } = parse(code)
|
||||||
expect(body).toEqual([
|
expect(body).toEqual([
|
||||||
{
|
{
|
||||||
type: 'VariableDeclaration',
|
type: 'VariableDeclaration',
|
||||||
@ -143,7 +142,7 @@ const newVar = myVar + 1
|
|||||||
|
|
||||||
describe('testing function declaration', () => {
|
describe('testing function declaration', () => {
|
||||||
test('fn funcN = () => {}', () => {
|
test('fn funcN = () => {}', () => {
|
||||||
const { body } = parser_wasm('fn funcN = () => {}')
|
const { body } = parse('fn funcN = () => {}')
|
||||||
delete (body[0] as any).declarations[0].init.body.nonCodeMeta
|
delete (body[0] as any).declarations[0].init.body.nonCodeMeta
|
||||||
expect(body).toEqual([
|
expect(body).toEqual([
|
||||||
{
|
{
|
||||||
@ -179,7 +178,7 @@ describe('testing function declaration', () => {
|
|||||||
])
|
])
|
||||||
})
|
})
|
||||||
test('fn funcN = (a, b) => {return a + b}', () => {
|
test('fn funcN = (a, b) => {return a + b}', () => {
|
||||||
const { body } = parser_wasm(
|
const { body } = parse(
|
||||||
['fn funcN = (a, b) => {', ' return a + b', '}'].join('\n')
|
['fn funcN = (a, b) => {', ' return a + b', '}'].join('\n')
|
||||||
)
|
)
|
||||||
delete (body[0] as any).declarations[0].init.body.nonCodeMeta
|
delete (body[0] as any).declarations[0].init.body.nonCodeMeta
|
||||||
@ -256,7 +255,7 @@ describe('testing function declaration', () => {
|
|||||||
test('call expression assignment', () => {
|
test('call expression assignment', () => {
|
||||||
const code = `fn funcN = (a, b) => { return a + b }
|
const code = `fn funcN = (a, b) => { return a + b }
|
||||||
const myVar = funcN(1, 2)`
|
const myVar = funcN(1, 2)`
|
||||||
const { body } = parser_wasm(code)
|
const { body } = parse(code)
|
||||||
delete (body[0] as any).declarations[0].init.body.nonCodeMeta
|
delete (body[0] as any).declarations[0].init.body.nonCodeMeta
|
||||||
expect(body).toEqual([
|
expect(body).toEqual([
|
||||||
{
|
{
|
||||||
@ -388,7 +387,7 @@ describe('testing pipe operator special', () => {
|
|||||||
|> lineTo([1, 1], %)
|
|> lineTo([1, 1], %)
|
||||||
|> rx(45, %)
|
|> rx(45, %)
|
||||||
`
|
`
|
||||||
const { body } = parser_wasm(code)
|
const { body } = parse(code)
|
||||||
delete (body[0] as any).declarations[0].init.nonCodeMeta
|
delete (body[0] as any).declarations[0].init.nonCodeMeta
|
||||||
expect(body).toEqual([
|
expect(body).toEqual([
|
||||||
{
|
{
|
||||||
@ -404,7 +403,7 @@ describe('testing pipe operator special', () => {
|
|||||||
id: { type: 'Identifier', start: 6, end: 14, name: 'mySketch' },
|
id: { type: 'Identifier', start: 6, end: 14, name: 'mySketch' },
|
||||||
init: {
|
init: {
|
||||||
type: 'PipeExpression',
|
type: 'PipeExpression',
|
||||||
start: 15,
|
start: 17,
|
||||||
end: 145,
|
end: 145,
|
||||||
body: [
|
body: [
|
||||||
{
|
{
|
||||||
@ -624,7 +623,7 @@ describe('testing pipe operator special', () => {
|
|||||||
})
|
})
|
||||||
test('pipe operator with binary expression', () => {
|
test('pipe operator with binary expression', () => {
|
||||||
let code = `const myVar = 5 + 6 |> myFunc(45, %)`
|
let code = `const myVar = 5 + 6 |> myFunc(45, %)`
|
||||||
const { body } = parser_wasm(code)
|
const { body } = parse(code)
|
||||||
delete (body as any)[0].declarations[0].init.nonCodeMeta
|
delete (body as any)[0].declarations[0].init.nonCodeMeta
|
||||||
expect(body).toEqual([
|
expect(body).toEqual([
|
||||||
{
|
{
|
||||||
@ -645,7 +644,7 @@ describe('testing pipe operator special', () => {
|
|||||||
},
|
},
|
||||||
init: {
|
init: {
|
||||||
type: 'PipeExpression',
|
type: 'PipeExpression',
|
||||||
start: 12,
|
start: 14,
|
||||||
end: 36,
|
end: 36,
|
||||||
body: [
|
body: [
|
||||||
{
|
{
|
||||||
@ -706,7 +705,7 @@ describe('testing pipe operator special', () => {
|
|||||||
})
|
})
|
||||||
test('array expression', () => {
|
test('array expression', () => {
|
||||||
let code = `const yo = [1, '2', three, 4 + 5]`
|
let code = `const yo = [1, '2', three, 4 + 5]`
|
||||||
const { body } = parser_wasm(code)
|
const { body } = parse(code)
|
||||||
expect(body).toEqual([
|
expect(body).toEqual([
|
||||||
{
|
{
|
||||||
type: 'VariableDeclaration',
|
type: 'VariableDeclaration',
|
||||||
@ -781,7 +780,7 @@ describe('testing pipe operator special', () => {
|
|||||||
'const three = 3',
|
'const three = 3',
|
||||||
"const yo = {aStr: 'str', anum: 2, identifier: three, binExp: 4 + 5}",
|
"const yo = {aStr: 'str', anum: 2, identifier: three, binExp: 4 + 5}",
|
||||||
].join('\n')
|
].join('\n')
|
||||||
const { body } = parser_wasm(code)
|
const { body } = parse(code)
|
||||||
expect(body).toEqual([
|
expect(body).toEqual([
|
||||||
{
|
{
|
||||||
type: 'VariableDeclaration',
|
type: 'VariableDeclaration',
|
||||||
@ -925,7 +924,7 @@ describe('testing pipe operator special', () => {
|
|||||||
const code = `const yo = {key: {
|
const code = `const yo = {key: {
|
||||||
key2: 'value'
|
key2: 'value'
|
||||||
}}`
|
}}`
|
||||||
const { body } = parser_wasm(code)
|
const { body } = parse(code)
|
||||||
expect(body).toEqual([
|
expect(body).toEqual([
|
||||||
{
|
{
|
||||||
type: 'VariableDeclaration',
|
type: 'VariableDeclaration',
|
||||||
@ -993,7 +992,7 @@ describe('testing pipe operator special', () => {
|
|||||||
})
|
})
|
||||||
test('object expression with array ast', () => {
|
test('object expression with array ast', () => {
|
||||||
const code = `const yo = {key: [1, '2']}`
|
const code = `const yo = {key: [1, '2']}`
|
||||||
const { body } = parser_wasm(code)
|
const { body } = parse(code)
|
||||||
expect(body).toEqual([
|
expect(body).toEqual([
|
||||||
{
|
{
|
||||||
type: 'VariableDeclaration',
|
type: 'VariableDeclaration',
|
||||||
@ -1057,7 +1056,7 @@ describe('testing pipe operator special', () => {
|
|||||||
})
|
})
|
||||||
test('object memberExpression simple', () => {
|
test('object memberExpression simple', () => {
|
||||||
const code = `const prop = yo.one.two`
|
const code = `const prop = yo.one.two`
|
||||||
const { body } = parser_wasm(code)
|
const { body } = parse(code)
|
||||||
expect(body).toEqual([
|
expect(body).toEqual([
|
||||||
{
|
{
|
||||||
type: 'VariableDeclaration',
|
type: 'VariableDeclaration',
|
||||||
@ -1112,7 +1111,7 @@ describe('testing pipe operator special', () => {
|
|||||||
})
|
})
|
||||||
test('object memberExpression with square braces', () => {
|
test('object memberExpression with square braces', () => {
|
||||||
const code = `const prop = yo.one["two"]`
|
const code = `const prop = yo.one["two"]`
|
||||||
const { body } = parser_wasm(code)
|
const { body } = parse(code)
|
||||||
expect(body).toEqual([
|
expect(body).toEqual([
|
||||||
{
|
{
|
||||||
type: 'VariableDeclaration',
|
type: 'VariableDeclaration',
|
||||||
@ -1168,7 +1167,7 @@ describe('testing pipe operator special', () => {
|
|||||||
})
|
})
|
||||||
test('object memberExpression with two square braces literal and identifier', () => {
|
test('object memberExpression with two square braces literal and identifier', () => {
|
||||||
const code = `const prop = yo["one"][two]`
|
const code = `const prop = yo["one"][two]`
|
||||||
const { body } = parser_wasm(code)
|
const { body } = parse(code)
|
||||||
expect(body).toEqual([
|
expect(body).toEqual([
|
||||||
{
|
{
|
||||||
type: 'VariableDeclaration',
|
type: 'VariableDeclaration',
|
||||||
@ -1227,7 +1226,7 @@ describe('testing pipe operator special', () => {
|
|||||||
describe('nests binary expressions correctly', () => {
|
describe('nests binary expressions correctly', () => {
|
||||||
it('works with the simple case', () => {
|
it('works with the simple case', () => {
|
||||||
const code = `const yo = 1 + 2`
|
const code = `const yo = 1 + 2`
|
||||||
const { body } = parser_wasm(code)
|
const { body } = parse(code)
|
||||||
expect(body[0]).toEqual({
|
expect(body[0]).toEqual({
|
||||||
type: 'VariableDeclaration',
|
type: 'VariableDeclaration',
|
||||||
start: 0,
|
start: 0,
|
||||||
@ -1271,7 +1270,7 @@ describe('nests binary expressions correctly', () => {
|
|||||||
it('should nest according to precedence with multiply first', () => {
|
it('should nest according to precedence with multiply first', () => {
|
||||||
// should be binExp { binExp { lit-1 * lit-2 } + lit}
|
// should be binExp { binExp { lit-1 * lit-2 } + lit}
|
||||||
const code = `const yo = 1 * 2 + 3`
|
const code = `const yo = 1 * 2 + 3`
|
||||||
const { body } = parser_wasm(code)
|
const { body } = parse(code)
|
||||||
expect(body[0]).toEqual({
|
expect(body[0]).toEqual({
|
||||||
type: 'VariableDeclaration',
|
type: 'VariableDeclaration',
|
||||||
start: 0,
|
start: 0,
|
||||||
@ -1328,7 +1327,7 @@ describe('nests binary expressions correctly', () => {
|
|||||||
it('should nest according to precedence with sum first', () => {
|
it('should nest according to precedence with sum first', () => {
|
||||||
// should be binExp { lit-1 + binExp { lit-2 * lit-3 } }
|
// should be binExp { lit-1 + binExp { lit-2 * lit-3 } }
|
||||||
const code = `const yo = 1 + 2 * 3`
|
const code = `const yo = 1 + 2 * 3`
|
||||||
const { body } = parser_wasm(code)
|
const { body } = parse(code)
|
||||||
expect(body[0]).toEqual({
|
expect(body[0]).toEqual({
|
||||||
type: 'VariableDeclaration',
|
type: 'VariableDeclaration',
|
||||||
start: 0,
|
start: 0,
|
||||||
@ -1384,7 +1383,7 @@ describe('nests binary expressions correctly', () => {
|
|||||||
})
|
})
|
||||||
it('should nest properly with two opperators of equal precedence', () => {
|
it('should nest properly with two opperators of equal precedence', () => {
|
||||||
const code = `const yo = 1 + 2 - 3`
|
const code = `const yo = 1 + 2 - 3`
|
||||||
const { body } = parser_wasm(code)
|
const { body } = parse(code)
|
||||||
expect((body[0] as any).declarations[0].init).toEqual({
|
expect((body[0] as any).declarations[0].init).toEqual({
|
||||||
type: 'BinaryExpression',
|
type: 'BinaryExpression',
|
||||||
start: 11,
|
start: 11,
|
||||||
@ -1421,7 +1420,7 @@ describe('nests binary expressions correctly', () => {
|
|||||||
})
|
})
|
||||||
it('should nest properly with two opperators of equal (but higher) precedence', () => {
|
it('should nest properly with two opperators of equal (but higher) precedence', () => {
|
||||||
const code = `const yo = 1 * 2 / 3`
|
const code = `const yo = 1 * 2 / 3`
|
||||||
const { body } = parser_wasm(code)
|
const { body } = parse(code)
|
||||||
expect((body[0] as any).declarations[0].init).toEqual({
|
expect((body[0] as any).declarations[0].init).toEqual({
|
||||||
type: 'BinaryExpression',
|
type: 'BinaryExpression',
|
||||||
start: 11,
|
start: 11,
|
||||||
@ -1458,7 +1457,7 @@ describe('nests binary expressions correctly', () => {
|
|||||||
})
|
})
|
||||||
it('should nest properly with longer example', () => {
|
it('should nest properly with longer example', () => {
|
||||||
const code = `const yo = 1 + 2 * (3 - 4) / 5 + 6`
|
const code = `const yo = 1 + 2 * (3 - 4) / 5 + 6`
|
||||||
const { body } = parser_wasm(code)
|
const { body } = parse(code)
|
||||||
const init = (body[0] as any).declarations[0].init
|
const init = (body[0] as any).declarations[0].init
|
||||||
expect(init).toEqual({
|
expect(init).toEqual({
|
||||||
type: 'BinaryExpression',
|
type: 'BinaryExpression',
|
||||||
@ -1520,14 +1519,12 @@ const key = 'c'`
|
|||||||
value: 'this is a comment',
|
value: 'this is a comment',
|
||||||
},
|
},
|
||||||
}
|
}
|
||||||
const { nonCodeMeta } = parser_wasm(code)
|
const { nonCodeMeta } = parse(code)
|
||||||
expect(nonCodeMeta.nonCodeNodes[0]).toEqual(nonCodeMetaInstance)
|
expect(nonCodeMeta.nonCodeNodes[0]).toEqual(nonCodeMetaInstance)
|
||||||
|
|
||||||
// extra whitespace won't change it's position (0) or value (NB the start end would have changed though)
|
// extra whitespace won't change it's position (0) or value (NB the start end would have changed though)
|
||||||
const codeWithExtraStartWhitespace = '\n\n\n' + code
|
const codeWithExtraStartWhitespace = '\n\n\n' + code
|
||||||
const { nonCodeMeta: nonCodeMeta2 } = parser_wasm(
|
const { nonCodeMeta: nonCodeMeta2 } = parse(codeWithExtraStartWhitespace)
|
||||||
codeWithExtraStartWhitespace
|
|
||||||
)
|
|
||||||
expect(nonCodeMeta2.nonCodeNodes[0].value).toStrictEqual(
|
expect(nonCodeMeta2.nonCodeNodes[0].value).toStrictEqual(
|
||||||
nonCodeMetaInstance.value
|
nonCodeMetaInstance.value
|
||||||
)
|
)
|
||||||
@ -1545,7 +1542,7 @@ const key = 'c'`
|
|||||||
|> close(%)
|
|> close(%)
|
||||||
`
|
`
|
||||||
|
|
||||||
const { body } = parser_wasm(code)
|
const { body } = parse(code)
|
||||||
const indexOfSecondLineToExpression = 2
|
const indexOfSecondLineToExpression = 2
|
||||||
const sketchNonCodeMeta = (body as any)[0].declarations[0].init.nonCodeMeta
|
const sketchNonCodeMeta = (body as any)[0].declarations[0].init.nonCodeMeta
|
||||||
.nonCodeNodes
|
.nonCodeNodes
|
||||||
@ -1569,7 +1566,7 @@ const key = 'c'`
|
|||||||
' |> rx(90, %)',
|
' |> rx(90, %)',
|
||||||
].join('\n')
|
].join('\n')
|
||||||
|
|
||||||
const { body } = parser_wasm(code)
|
const { body } = parse(code)
|
||||||
const sketchNonCodeMeta = (body[0] as any).declarations[0].init.nonCodeMeta
|
const sketchNonCodeMeta = (body[0] as any).declarations[0].init.nonCodeMeta
|
||||||
.nonCodeNodes
|
.nonCodeNodes
|
||||||
expect(sketchNonCodeMeta[3]).toEqual({
|
expect(sketchNonCodeMeta[3]).toEqual({
|
||||||
@ -1587,7 +1584,7 @@ const key = 'c'`
|
|||||||
describe('test UnaryExpression', () => {
|
describe('test UnaryExpression', () => {
|
||||||
it('should parse a unary expression in simple var dec situation', () => {
|
it('should parse a unary expression in simple var dec situation', () => {
|
||||||
const code = `const myVar = -min(4, 100)`
|
const code = `const myVar = -min(4, 100)`
|
||||||
const { body } = parser_wasm(code)
|
const { body } = parse(code)
|
||||||
const myVarInit = (body?.[0] as any).declarations[0]?.init
|
const myVarInit = (body?.[0] as any).declarations[0]?.init
|
||||||
expect(myVarInit).toEqual({
|
expect(myVarInit).toEqual({
|
||||||
type: 'UnaryExpression',
|
type: 'UnaryExpression',
|
||||||
@ -1613,7 +1610,7 @@ describe('test UnaryExpression', () => {
|
|||||||
describe('testing nested call expressions', () => {
|
describe('testing nested call expressions', () => {
|
||||||
it('callExp in a binExp in a callExp', () => {
|
it('callExp in a binExp in a callExp', () => {
|
||||||
const code = 'const myVar = min(100, 1 + legLen(5, 3))'
|
const code = 'const myVar = min(100, 1 + legLen(5, 3))'
|
||||||
const { body } = parser_wasm(code)
|
const { body } = parse(code)
|
||||||
const myVarInit = (body?.[0] as any).declarations[0]?.init
|
const myVarInit = (body?.[0] as any).declarations[0]?.init
|
||||||
expect(myVarInit).toEqual({
|
expect(myVarInit).toEqual({
|
||||||
type: 'CallExpression',
|
type: 'CallExpression',
|
||||||
@ -1651,7 +1648,7 @@ describe('testing nested call expressions', () => {
|
|||||||
describe('should recognise callExpresions in binaryExpressions', () => {
|
describe('should recognise callExpresions in binaryExpressions', () => {
|
||||||
const code = "xLineTo(segEndX('seg02', %) + 1, %)"
|
const code = "xLineTo(segEndX('seg02', %) + 1, %)"
|
||||||
it('should recognise the callExp', () => {
|
it('should recognise the callExp', () => {
|
||||||
const { body } = parser_wasm(code)
|
const { body } = parse(code)
|
||||||
const callExpArgs = (body?.[0] as any).expression?.arguments
|
const callExpArgs = (body?.[0] as any).expression?.arguments
|
||||||
expect(callExpArgs).toEqual([
|
expect(callExpArgs).toEqual([
|
||||||
{
|
{
|
||||||
@ -1690,8 +1687,7 @@ describe('parsing errors', () => {
|
|||||||
|
|
||||||
let _theError
|
let _theError
|
||||||
try {
|
try {
|
||||||
const result = expect(parser_wasm(code))
|
const result = expect(parse(code))
|
||||||
console.log('result', result)
|
|
||||||
} catch (e) {
|
} catch (e) {
|
||||||
_theError = e
|
_theError = e
|
||||||
}
|
}
|
||||||
|
@ -1,48 +0,0 @@
|
|||||||
import { Program } from './abstractSyntaxTreeTypes'
|
|
||||||
import { parse_js } from '../wasm-lib/pkg/wasm_lib'
|
|
||||||
import { initPromise } from './rust'
|
|
||||||
import { Token } from './tokeniser'
|
|
||||||
import { KCLError } from './errors'
|
|
||||||
import { KclError as RustKclError } from '../wasm-lib/kcl/bindings/KclError'
|
|
||||||
|
|
||||||
export const rangeTypeFix = (ranges: number[][]): [number, number][] =>
|
|
||||||
ranges.map(([start, end]) => [start, end])
|
|
||||||
|
|
||||||
export const parser_wasm = (code: string): Program => {
|
|
||||||
try {
|
|
||||||
const program: Program = parse_js(code)
|
|
||||||
return program
|
|
||||||
} catch (e: any) {
|
|
||||||
const parsed: RustKclError = JSON.parse(e.toString())
|
|
||||||
const kclError = new KCLError(
|
|
||||||
parsed.kind,
|
|
||||||
parsed.msg,
|
|
||||||
rangeTypeFix(parsed.sourceRanges)
|
|
||||||
)
|
|
||||||
|
|
||||||
console.log(kclError)
|
|
||||||
throw kclError
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
export async function asyncParser(code: string): Promise<Program> {
|
|
||||||
await initPromise
|
|
||||||
try {
|
|
||||||
const program: Program = parse_js(code)
|
|
||||||
return program
|
|
||||||
} catch (e: any) {
|
|
||||||
const parsed: RustKclError = JSON.parse(e.toString())
|
|
||||||
const kclError = new KCLError(
|
|
||||||
parsed.kind,
|
|
||||||
parsed.msg,
|
|
||||||
rangeTypeFix(parsed.sourceRanges)
|
|
||||||
)
|
|
||||||
|
|
||||||
console.log(kclError)
|
|
||||||
throw kclError
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
export function rangeOfToken(token: Token | undefined): [number, number][] {
|
|
||||||
return token === undefined ? [] : [[token.start, token.end]]
|
|
||||||
}
|
|
@ -1,37 +0,0 @@
|
|||||||
export type { Program } from '../wasm-lib/kcl/bindings/Program'
|
|
||||||
export type { Value } from '../wasm-lib/kcl/bindings/Value'
|
|
||||||
export type { ObjectExpression } from '../wasm-lib/kcl/bindings/ObjectExpression'
|
|
||||||
export type { MemberExpression } from '../wasm-lib/kcl/bindings/MemberExpression'
|
|
||||||
export type { PipeExpression } from '../wasm-lib/kcl/bindings/PipeExpression'
|
|
||||||
export type { VariableDeclaration } from '../wasm-lib/kcl/bindings/VariableDeclaration'
|
|
||||||
export type { PipeSubstitution } from '../wasm-lib/kcl/bindings/PipeSubstitution'
|
|
||||||
export type { Identifier } from '../wasm-lib/kcl/bindings/Identifier'
|
|
||||||
export type { UnaryExpression } from '../wasm-lib/kcl/bindings/UnaryExpression'
|
|
||||||
export type { BinaryExpression } from '../wasm-lib/kcl/bindings/BinaryExpression'
|
|
||||||
export type { ReturnStatement } from '../wasm-lib/kcl/bindings/ReturnStatement'
|
|
||||||
export type { ExpressionStatement } from '../wasm-lib/kcl/bindings/ExpressionStatement'
|
|
||||||
export type { CallExpression } from '../wasm-lib/kcl/bindings/CallExpression'
|
|
||||||
export type { VariableDeclarator } from '../wasm-lib/kcl/bindings/VariableDeclarator'
|
|
||||||
export type { BinaryPart } from '../wasm-lib/kcl/bindings/BinaryPart'
|
|
||||||
export type { Literal } from '../wasm-lib/kcl/bindings/Literal'
|
|
||||||
export type { ArrayExpression } from '../wasm-lib/kcl/bindings/ArrayExpression'
|
|
||||||
|
|
||||||
export type SyntaxType =
|
|
||||||
| 'Program'
|
|
||||||
| 'ExpressionStatement'
|
|
||||||
| 'BinaryExpression'
|
|
||||||
| 'CallExpression'
|
|
||||||
| 'Identifier'
|
|
||||||
| 'ReturnStatement'
|
|
||||||
| 'VariableDeclaration'
|
|
||||||
| 'VariableDeclarator'
|
|
||||||
| 'MemberExpression'
|
|
||||||
| 'ArrayExpression'
|
|
||||||
| 'ObjectExpression'
|
|
||||||
| 'ObjectProperty'
|
|
||||||
| 'FunctionExpression'
|
|
||||||
| 'PipeExpression'
|
|
||||||
| 'PipeSubstitution'
|
|
||||||
| 'Literal'
|
|
||||||
| 'NonCodeNode'
|
|
||||||
| 'UnaryExpression'
|
|
@ -1,5 +1,4 @@
|
|||||||
import { parser_wasm } from './abstractSyntaxTree'
|
import { parse, initPromise } from './wasm'
|
||||||
import { initPromise } from './rust'
|
|
||||||
import { enginelessExecutor } from '../lib/testHelpers'
|
import { enginelessExecutor } from '../lib/testHelpers'
|
||||||
|
|
||||||
beforeAll(() => initPromise)
|
beforeAll(() => initPromise)
|
||||||
@ -13,7 +12,7 @@ const mySketch001 = startSketchAt([0, 0])
|
|||||||
|> lineTo([0.46, -5.82], %)
|
|> lineTo([0.46, -5.82], %)
|
||||||
// |> rx(45, %)
|
// |> rx(45, %)
|
||||||
show(mySketch001)`
|
show(mySketch001)`
|
||||||
const programMemory = await enginelessExecutor(parser_wasm(code))
|
const programMemory = await enginelessExecutor(parse(code))
|
||||||
// @ts-ignore
|
// @ts-ignore
|
||||||
const shown = programMemory?.return?.map(
|
const shown = programMemory?.return?.map(
|
||||||
// @ts-ignore
|
// @ts-ignore
|
||||||
@ -69,7 +68,7 @@ const mySketch001 = startSketchAt([0, 0])
|
|||||||
// |> rx(45, %)
|
// |> rx(45, %)
|
||||||
|> extrude(2, %)
|
|> extrude(2, %)
|
||||||
show(mySketch001)`
|
show(mySketch001)`
|
||||||
const programMemory = await enginelessExecutor(parser_wasm(code))
|
const programMemory = await enginelessExecutor(parse(code))
|
||||||
// @ts-ignore
|
// @ts-ignore
|
||||||
const shown = programMemory?.return?.map(
|
const shown = programMemory?.return?.map(
|
||||||
// @ts-ignore
|
// @ts-ignore
|
||||||
@ -109,7 +108,7 @@ const sk2 = startSketchAt([0, 0])
|
|||||||
|
|
||||||
|
|
||||||
show(theExtrude, sk2)`
|
show(theExtrude, sk2)`
|
||||||
const programMemory = await enginelessExecutor(parser_wasm(code))
|
const programMemory = await enginelessExecutor(parse(code))
|
||||||
// @ts-ignore
|
// @ts-ignore
|
||||||
const geos = programMemory?.return?.map(
|
const geos = programMemory?.return?.map(
|
||||||
// @ts-ignore
|
// @ts-ignore
|
||||||
|
@ -1,10 +1,7 @@
|
|||||||
import fs from 'node:fs'
|
import fs from 'node:fs'
|
||||||
|
|
||||||
import { parser_wasm } from './abstractSyntaxTree'
|
import { parse, ProgramMemory, SketchGroup, initPromise } from './wasm'
|
||||||
import { ProgramMemory, SketchGroup } from './executor'
|
|
||||||
import { initPromise } from './rust'
|
|
||||||
import { enginelessExecutor } from '../lib/testHelpers'
|
import { enginelessExecutor } from '../lib/testHelpers'
|
||||||
import { vi } from 'vitest'
|
|
||||||
import { KCLError } from './errors'
|
import { KCLError } from './errors'
|
||||||
|
|
||||||
beforeAll(() => initPromise)
|
beforeAll(() => initPromise)
|
||||||
@ -403,7 +400,7 @@ async function exe(
|
|||||||
code: string,
|
code: string,
|
||||||
programMemory: ProgramMemory = { root: {}, return: null }
|
programMemory: ProgramMemory = { root: {}, return: null }
|
||||||
) {
|
) {
|
||||||
const ast = parser_wasm(code)
|
const ast = parse(code)
|
||||||
|
|
||||||
const result = await enginelessExecutor(ast, programMemory)
|
const result = await enginelessExecutor(ast, programMemory)
|
||||||
return result
|
return result
|
||||||
|
@ -1,81 +0,0 @@
|
|||||||
import { Program } from './abstractSyntaxTreeTypes'
|
|
||||||
import {
|
|
||||||
EngineCommandManager,
|
|
||||||
ArtifactMap,
|
|
||||||
SourceRangeMap,
|
|
||||||
} from './std/engineConnection'
|
|
||||||
import { ProgramReturn } from '../wasm-lib/kcl/bindings/ProgramReturn'
|
|
||||||
import { MemoryItem } from '../wasm-lib/kcl/bindings/MemoryItem'
|
|
||||||
import { execute_wasm } from '../wasm-lib/pkg/wasm_lib'
|
|
||||||
import { KCLError } from './errors'
|
|
||||||
import { KclError as RustKclError } from '../wasm-lib/kcl/bindings/KclError'
|
|
||||||
import { rangeTypeFix } from './abstractSyntaxTree'
|
|
||||||
|
|
||||||
export type { SourceRange } from '../wasm-lib/kcl/bindings/SourceRange'
|
|
||||||
export type { Position } from '../wasm-lib/kcl/bindings/Position'
|
|
||||||
export type { Rotation } from '../wasm-lib/kcl/bindings/Rotation'
|
|
||||||
export type { Path } from '../wasm-lib/kcl/bindings/Path'
|
|
||||||
export type { SketchGroup } from '../wasm-lib/kcl/bindings/SketchGroup'
|
|
||||||
export type { MemoryItem } from '../wasm-lib/kcl/bindings/MemoryItem'
|
|
||||||
export type { ExtrudeSurface } from '../wasm-lib/kcl/bindings/ExtrudeSurface'
|
|
||||||
|
|
||||||
export type PathToNode = [string | number, string][]
|
|
||||||
|
|
||||||
interface Memory {
|
|
||||||
[key: string]: MemoryItem
|
|
||||||
}
|
|
||||||
|
|
||||||
export interface ProgramMemory {
|
|
||||||
root: Memory
|
|
||||||
return: ProgramReturn | null
|
|
||||||
}
|
|
||||||
|
|
||||||
export const executor = async (
|
|
||||||
node: Program,
|
|
||||||
programMemory: ProgramMemory = { root: {}, return: null },
|
|
||||||
engineCommandManager: EngineCommandManager,
|
|
||||||
// work around while the gemotry is still be stored on the frontend
|
|
||||||
// will be removed when the stream UI is added.
|
|
||||||
tempMapCallback: (a: {
|
|
||||||
artifactMap: ArtifactMap
|
|
||||||
sourceRangeMap: SourceRangeMap
|
|
||||||
}) => void = () => {}
|
|
||||||
): Promise<ProgramMemory> => {
|
|
||||||
engineCommandManager.startNewSession()
|
|
||||||
const _programMemory = await _executor(
|
|
||||||
node,
|
|
||||||
programMemory,
|
|
||||||
engineCommandManager
|
|
||||||
)
|
|
||||||
const { artifactMap, sourceRangeMap } =
|
|
||||||
await engineCommandManager.waitForAllCommands(node, _programMemory)
|
|
||||||
tempMapCallback({ artifactMap, sourceRangeMap })
|
|
||||||
|
|
||||||
engineCommandManager.endSession()
|
|
||||||
return _programMemory
|
|
||||||
}
|
|
||||||
|
|
||||||
export const _executor = async (
|
|
||||||
node: Program,
|
|
||||||
programMemory: ProgramMemory = { root: {}, return: null },
|
|
||||||
engineCommandManager: EngineCommandManager
|
|
||||||
): Promise<ProgramMemory> => {
|
|
||||||
try {
|
|
||||||
const memory: ProgramMemory = await execute_wasm(
|
|
||||||
JSON.stringify(node),
|
|
||||||
JSON.stringify(programMemory),
|
|
||||||
engineCommandManager
|
|
||||||
)
|
|
||||||
return memory
|
|
||||||
} catch (e: any) {
|
|
||||||
const parsed: RustKclError = JSON.parse(e.toString())
|
|
||||||
const kclError = new KCLError(
|
|
||||||
parsed.kind,
|
|
||||||
parsed.msg,
|
|
||||||
rangeTypeFix(parsed.sourceRanges)
|
|
||||||
)
|
|
||||||
|
|
||||||
console.log(kclError)
|
|
||||||
throw kclError
|
|
||||||
}
|
|
||||||
}
|
|
@ -1,7 +1,5 @@
|
|||||||
import { getNodePathFromSourceRange, getNodeFromPath } from './queryAst'
|
import { getNodePathFromSourceRange, getNodeFromPath } from './queryAst'
|
||||||
import { parser_wasm } from './abstractSyntaxTree'
|
import { Identifier, parse, initPromise } from './wasm'
|
||||||
import { initPromise } from './rust'
|
|
||||||
import { Identifier } from './abstractSyntaxTreeTypes'
|
|
||||||
|
|
||||||
beforeAll(() => initPromise)
|
beforeAll(() => initPromise)
|
||||||
|
|
||||||
@ -21,7 +19,7 @@ const sk3 = startSketchAt([0, 0])
|
|||||||
lineToSubstringIndex + subStr.length,
|
lineToSubstringIndex + subStr.length,
|
||||||
]
|
]
|
||||||
|
|
||||||
const ast = parser_wasm(code)
|
const ast = parse(code)
|
||||||
const nodePath = getNodePathFromSourceRange(ast, sourceRange)
|
const nodePath = getNodePathFromSourceRange(ast, sourceRange)
|
||||||
const { node } = getNodeFromPath<any>(ast, nodePath)
|
const { node } = getNodeFromPath<any>(ast, nodePath)
|
||||||
|
|
||||||
@ -46,7 +44,7 @@ const b1 = cube([0,0], 10)`
|
|||||||
subStrIndex + 'pos'.length,
|
subStrIndex + 'pos'.length,
|
||||||
]
|
]
|
||||||
|
|
||||||
const ast = parser_wasm(code)
|
const ast = parse(code)
|
||||||
const nodePath = getNodePathFromSourceRange(ast, sourceRange)
|
const nodePath = getNodePathFromSourceRange(ast, sourceRange)
|
||||||
const node = getNodeFromPath<Identifier>(ast, nodePath).node
|
const node = getNodeFromPath<Identifier>(ast, nodePath).node
|
||||||
|
|
||||||
@ -80,7 +78,7 @@ const b1 = cube([0,0], 10)`
|
|||||||
subStrIndex + 'scale'.length,
|
subStrIndex + 'scale'.length,
|
||||||
]
|
]
|
||||||
|
|
||||||
const ast = parser_wasm(code)
|
const ast = parse(code)
|
||||||
const nodePath = getNodePathFromSourceRange(ast, sourceRange)
|
const nodePath = getNodePathFromSourceRange(ast, sourceRange)
|
||||||
const node = getNodeFromPath<Identifier>(ast, nodePath).node
|
const node = getNodeFromPath<Identifier>(ast, nodePath).node
|
||||||
expect(nodePath).toEqual([
|
expect(nodePath).toEqual([
|
||||||
|
@ -1,4 +1,4 @@
|
|||||||
import { parser_wasm } from './abstractSyntaxTree'
|
import { parse, recast, initPromise } from './wasm'
|
||||||
import {
|
import {
|
||||||
createLiteral,
|
createLiteral,
|
||||||
createIdentifier,
|
createIdentifier,
|
||||||
@ -13,8 +13,6 @@ import {
|
|||||||
giveSketchFnCallTag,
|
giveSketchFnCallTag,
|
||||||
moveValueIntoNewVariable,
|
moveValueIntoNewVariable,
|
||||||
} from './modifyAst'
|
} from './modifyAst'
|
||||||
import { recast } from './recast'
|
|
||||||
import { initPromise } from './rust'
|
|
||||||
import { enginelessExecutor } from '../lib/testHelpers'
|
import { enginelessExecutor } from '../lib/testHelpers'
|
||||||
|
|
||||||
beforeAll(() => initPromise)
|
beforeAll(() => initPromise)
|
||||||
@ -126,7 +124,7 @@ function giveSketchFnCallTagTestHelper(
|
|||||||
// giveSketchFnCallTag inputs and outputs an ast, which is very verbose for testing
|
// giveSketchFnCallTag inputs and outputs an ast, which is very verbose for testing
|
||||||
// this wrapper changes the input and output to code
|
// this wrapper changes the input and output to code
|
||||||
// making it more of an integration test, but easier to read the test intention is the goal
|
// making it more of an integration test, but easier to read the test intention is the goal
|
||||||
const ast = parser_wasm(code)
|
const ast = parse(code)
|
||||||
const start = code.indexOf(searchStr)
|
const start = code.indexOf(searchStr)
|
||||||
const range: [number, number] = [start, start + searchStr.length]
|
const range: [number, number] = [start, start + searchStr.length]
|
||||||
const { modifiedAst, tag, isTagExisting } = giveSketchFnCallTag(ast, range)
|
const { modifiedAst, tag, isTagExisting } = giveSketchFnCallTag(ast, range)
|
||||||
@ -197,7 +195,7 @@ const part001 = startSketchAt([-1.2, 4.83])
|
|||||||
const yo2 = hmm([identifierGuy + 5])
|
const yo2 = hmm([identifierGuy + 5])
|
||||||
show(part001)`
|
show(part001)`
|
||||||
it('should move a binary expression into a new variable', async () => {
|
it('should move a binary expression into a new variable', async () => {
|
||||||
const ast = parser_wasm(code)
|
const ast = parse(code)
|
||||||
const programMemory = await enginelessExecutor(ast)
|
const programMemory = await enginelessExecutor(ast)
|
||||||
const startIndex = code.indexOf('100 + 100') + 1
|
const startIndex = code.indexOf('100 + 100') + 1
|
||||||
const { modifiedAst } = moveValueIntoNewVariable(
|
const { modifiedAst } = moveValueIntoNewVariable(
|
||||||
@ -211,7 +209,7 @@ show(part001)`
|
|||||||
expect(newCode).toContain(`angledLine([newVar, 3.09], %)`)
|
expect(newCode).toContain(`angledLine([newVar, 3.09], %)`)
|
||||||
})
|
})
|
||||||
it('should move a value into a new variable', async () => {
|
it('should move a value into a new variable', async () => {
|
||||||
const ast = parser_wasm(code)
|
const ast = parse(code)
|
||||||
const programMemory = await enginelessExecutor(ast)
|
const programMemory = await enginelessExecutor(ast)
|
||||||
const startIndex = code.indexOf('2.8') + 1
|
const startIndex = code.indexOf('2.8') + 1
|
||||||
const { modifiedAst } = moveValueIntoNewVariable(
|
const { modifiedAst } = moveValueIntoNewVariable(
|
||||||
@ -225,7 +223,7 @@ show(part001)`
|
|||||||
expect(newCode).toContain(`line([newVar, 0], %)`)
|
expect(newCode).toContain(`line([newVar, 0], %)`)
|
||||||
})
|
})
|
||||||
it('should move a callExpression into a new variable', async () => {
|
it('should move a callExpression into a new variable', async () => {
|
||||||
const ast = parser_wasm(code)
|
const ast = parse(code)
|
||||||
const programMemory = await enginelessExecutor(ast)
|
const programMemory = await enginelessExecutor(ast)
|
||||||
const startIndex = code.indexOf('def(')
|
const startIndex = code.indexOf('def(')
|
||||||
const { modifiedAst } = moveValueIntoNewVariable(
|
const { modifiedAst } = moveValueIntoNewVariable(
|
||||||
@ -239,7 +237,7 @@ show(part001)`
|
|||||||
expect(newCode).toContain(`angledLine([newVar, 3.09], %)`)
|
expect(newCode).toContain(`angledLine([newVar, 3.09], %)`)
|
||||||
})
|
})
|
||||||
it('should move a binary expression with call expression into a new variable', async () => {
|
it('should move a binary expression with call expression into a new variable', async () => {
|
||||||
const ast = parser_wasm(code)
|
const ast = parse(code)
|
||||||
const programMemory = await enginelessExecutor(ast)
|
const programMemory = await enginelessExecutor(ast)
|
||||||
const startIndex = code.indexOf('jkl(') + 1
|
const startIndex = code.indexOf('jkl(') + 1
|
||||||
const { modifiedAst } = moveValueIntoNewVariable(
|
const { modifiedAst } = moveValueIntoNewVariable(
|
||||||
@ -253,7 +251,7 @@ show(part001)`
|
|||||||
expect(newCode).toContain(`angledLine([newVar, 3.09], %)`)
|
expect(newCode).toContain(`angledLine([newVar, 3.09], %)`)
|
||||||
})
|
})
|
||||||
it('should move a identifier into a new variable', async () => {
|
it('should move a identifier into a new variable', async () => {
|
||||||
const ast = parser_wasm(code)
|
const ast = parse(code)
|
||||||
const programMemory = await enginelessExecutor(ast)
|
const programMemory = await enginelessExecutor(ast)
|
||||||
const startIndex = code.indexOf('identifierGuy +') + 1
|
const startIndex = code.indexOf('identifierGuy +') + 1
|
||||||
const { modifiedAst } = moveValueIntoNewVariable(
|
const { modifiedAst } = moveValueIntoNewVariable(
|
||||||
|
@ -14,14 +14,15 @@ import {
|
|||||||
ObjectExpression,
|
ObjectExpression,
|
||||||
UnaryExpression,
|
UnaryExpression,
|
||||||
BinaryExpression,
|
BinaryExpression,
|
||||||
} from './abstractSyntaxTreeTypes'
|
PathToNode,
|
||||||
|
ProgramMemory,
|
||||||
|
} from './wasm'
|
||||||
import {
|
import {
|
||||||
findAllPreviousVariables,
|
findAllPreviousVariables,
|
||||||
getNodeFromPath,
|
getNodeFromPath,
|
||||||
getNodePathFromSourceRange,
|
getNodePathFromSourceRange,
|
||||||
isNodeSafeToReplace,
|
isNodeSafeToReplace,
|
||||||
} from './queryAst'
|
} from './queryAst'
|
||||||
import { PathToNode, ProgramMemory } from './executor'
|
|
||||||
import {
|
import {
|
||||||
addTagForSketchOnFace,
|
addTagForSketchOnFace,
|
||||||
getFirstArg,
|
getFirstArg,
|
||||||
|
@ -1,11 +1,10 @@
|
|||||||
import { parser_wasm } from './abstractSyntaxTree'
|
import { parse, recast, initPromise } from './wasm'
|
||||||
import {
|
import {
|
||||||
findAllPreviousVariables,
|
findAllPreviousVariables,
|
||||||
isNodeSafeToReplace,
|
isNodeSafeToReplace,
|
||||||
isTypeInValue,
|
isTypeInValue,
|
||||||
getNodePathFromSourceRange,
|
getNodePathFromSourceRange,
|
||||||
} from './queryAst'
|
} from './queryAst'
|
||||||
import { initPromise } from './rust'
|
|
||||||
import { enginelessExecutor } from '../lib/testHelpers'
|
import { enginelessExecutor } from '../lib/testHelpers'
|
||||||
import {
|
import {
|
||||||
createArrayExpression,
|
createArrayExpression,
|
||||||
@ -13,7 +12,6 @@ import {
|
|||||||
createLiteral,
|
createLiteral,
|
||||||
createPipeSubstitution,
|
createPipeSubstitution,
|
||||||
} from './modifyAst'
|
} from './modifyAst'
|
||||||
import { recast } from './recast'
|
|
||||||
|
|
||||||
beforeAll(() => initPromise)
|
beforeAll(() => initPromise)
|
||||||
|
|
||||||
@ -36,7 +34,7 @@ const variableBelowShouldNotBeIncluded = 3
|
|||||||
|
|
||||||
show(part001)`
|
show(part001)`
|
||||||
const rangeStart = code.indexOf('// selection-range-7ish-before-this') - 7
|
const rangeStart = code.indexOf('// selection-range-7ish-before-this') - 7
|
||||||
const ast = parser_wasm(code)
|
const ast = parse(code)
|
||||||
const programMemory = await enginelessExecutor(ast)
|
const programMemory = await enginelessExecutor(ast)
|
||||||
|
|
||||||
const { variables, bodyPath, insertIndex } = findAllPreviousVariables(
|
const { variables, bodyPath, insertIndex } = findAllPreviousVariables(
|
||||||
@ -70,7 +68,7 @@ const yo = 5 + 6
|
|||||||
const yo2 = hmm([identifierGuy + 5])
|
const yo2 = hmm([identifierGuy + 5])
|
||||||
show(part001)`
|
show(part001)`
|
||||||
it('find a safe binaryExpression', () => {
|
it('find a safe binaryExpression', () => {
|
||||||
const ast = parser_wasm(code)
|
const ast = parse(code)
|
||||||
const rangeStart = code.indexOf('100 + 100') + 2
|
const rangeStart = code.indexOf('100 + 100') + 2
|
||||||
const result = isNodeSafeToReplace(ast, [rangeStart, rangeStart])
|
const result = isNodeSafeToReplace(ast, [rangeStart, rangeStart])
|
||||||
expect(result.isSafe).toBe(true)
|
expect(result.isSafe).toBe(true)
|
||||||
@ -84,7 +82,7 @@ show(part001)`
|
|||||||
expect(outCode).toContain(`angledLine([replaceName, 3.09], %)`)
|
expect(outCode).toContain(`angledLine([replaceName, 3.09], %)`)
|
||||||
})
|
})
|
||||||
it('find a safe Identifier', () => {
|
it('find a safe Identifier', () => {
|
||||||
const ast = parser_wasm(code)
|
const ast = parse(code)
|
||||||
const rangeStart = code.indexOf('abc')
|
const rangeStart = code.indexOf('abc')
|
||||||
const result = isNodeSafeToReplace(ast, [rangeStart, rangeStart])
|
const result = isNodeSafeToReplace(ast, [rangeStart, rangeStart])
|
||||||
expect(result.isSafe).toBe(true)
|
expect(result.isSafe).toBe(true)
|
||||||
@ -92,7 +90,7 @@ show(part001)`
|
|||||||
expect(code.slice(result.value.start, result.value.end)).toBe('abc')
|
expect(code.slice(result.value.start, result.value.end)).toBe('abc')
|
||||||
})
|
})
|
||||||
it('find a safe CallExpression', () => {
|
it('find a safe CallExpression', () => {
|
||||||
const ast = parser_wasm(code)
|
const ast = parse(code)
|
||||||
const rangeStart = code.indexOf('def')
|
const rangeStart = code.indexOf('def')
|
||||||
const result = isNodeSafeToReplace(ast, [rangeStart, rangeStart])
|
const result = isNodeSafeToReplace(ast, [rangeStart, rangeStart])
|
||||||
expect(result.isSafe).toBe(true)
|
expect(result.isSafe).toBe(true)
|
||||||
@ -106,7 +104,7 @@ show(part001)`
|
|||||||
expect(outCode).toContain(`angledLine([replaceName, 3.09], %)`)
|
expect(outCode).toContain(`angledLine([replaceName, 3.09], %)`)
|
||||||
})
|
})
|
||||||
it('find an UNsafe CallExpression, as it has a PipeSubstitution', () => {
|
it('find an UNsafe CallExpression, as it has a PipeSubstitution', () => {
|
||||||
const ast = parser_wasm(code)
|
const ast = parse(code)
|
||||||
const rangeStart = code.indexOf('ghi')
|
const rangeStart = code.indexOf('ghi')
|
||||||
const range: [number, number] = [rangeStart, rangeStart]
|
const range: [number, number] = [rangeStart, rangeStart]
|
||||||
const result = isNodeSafeToReplace(ast, range)
|
const result = isNodeSafeToReplace(ast, range)
|
||||||
@ -115,7 +113,7 @@ show(part001)`
|
|||||||
expect(code.slice(result.value.start, result.value.end)).toBe('ghi(%)')
|
expect(code.slice(result.value.start, result.value.end)).toBe('ghi(%)')
|
||||||
})
|
})
|
||||||
it('find an UNsafe Identifier, as it is a callee', () => {
|
it('find an UNsafe Identifier, as it is a callee', () => {
|
||||||
const ast = parser_wasm(code)
|
const ast = parse(code)
|
||||||
const rangeStart = code.indexOf('ine([2.8,')
|
const rangeStart = code.indexOf('ine([2.8,')
|
||||||
const result = isNodeSafeToReplace(ast, [rangeStart, rangeStart])
|
const result = isNodeSafeToReplace(ast, [rangeStart, rangeStart])
|
||||||
expect(result.isSafe).toBe(false)
|
expect(result.isSafe).toBe(false)
|
||||||
@ -125,7 +123,7 @@ show(part001)`
|
|||||||
)
|
)
|
||||||
})
|
})
|
||||||
it("find a safe BinaryExpression that's assigned to a variable", () => {
|
it("find a safe BinaryExpression that's assigned to a variable", () => {
|
||||||
const ast = parser_wasm(code)
|
const ast = parse(code)
|
||||||
const rangeStart = code.indexOf('5 + 6') + 1
|
const rangeStart = code.indexOf('5 + 6') + 1
|
||||||
const result = isNodeSafeToReplace(ast, [rangeStart, rangeStart])
|
const result = isNodeSafeToReplace(ast, [rangeStart, rangeStart])
|
||||||
expect(result.isSafe).toBe(true)
|
expect(result.isSafe).toBe(true)
|
||||||
@ -139,7 +137,7 @@ show(part001)`
|
|||||||
expect(outCode).toContain(`const yo = replaceName`)
|
expect(outCode).toContain(`const yo = replaceName`)
|
||||||
})
|
})
|
||||||
it('find a safe BinaryExpression that has a CallExpression within', () => {
|
it('find a safe BinaryExpression that has a CallExpression within', () => {
|
||||||
const ast = parser_wasm(code)
|
const ast = parse(code)
|
||||||
const rangeStart = code.indexOf('jkl') + 1
|
const rangeStart = code.indexOf('jkl') + 1
|
||||||
const result = isNodeSafeToReplace(ast, [rangeStart, rangeStart])
|
const result = isNodeSafeToReplace(ast, [rangeStart, rangeStart])
|
||||||
expect(result.isSafe).toBe(true)
|
expect(result.isSafe).toBe(true)
|
||||||
@ -155,7 +153,7 @@ show(part001)`
|
|||||||
expect(outCode).toContain(`angledLine([replaceName, 3.09], %)`)
|
expect(outCode).toContain(`angledLine([replaceName, 3.09], %)`)
|
||||||
})
|
})
|
||||||
it('find a safe BinaryExpression within a CallExpression', () => {
|
it('find a safe BinaryExpression within a CallExpression', () => {
|
||||||
const ast = parser_wasm(code)
|
const ast = parse(code)
|
||||||
const rangeStart = code.indexOf('identifierGuy') + 1
|
const rangeStart = code.indexOf('identifierGuy') + 1
|
||||||
const result = isNodeSafeToReplace(ast, [rangeStart, rangeStart])
|
const result = isNodeSafeToReplace(ast, [rangeStart, rangeStart])
|
||||||
expect(result.isSafe).toBe(true)
|
expect(result.isSafe).toBe(true)
|
||||||
@ -203,7 +201,7 @@ show(part001)`
|
|||||||
it('finds the second line when cursor is put at the end', () => {
|
it('finds the second line when cursor is put at the end', () => {
|
||||||
const searchLn = `line([0.94, 2.61], %)`
|
const searchLn = `line([0.94, 2.61], %)`
|
||||||
const sourceIndex = code.indexOf(searchLn) + searchLn.length
|
const sourceIndex = code.indexOf(searchLn) + searchLn.length
|
||||||
const ast = parser_wasm(code)
|
const ast = parse(code)
|
||||||
const result = getNodePathFromSourceRange(ast, [sourceIndex, sourceIndex])
|
const result = getNodePathFromSourceRange(ast, [sourceIndex, sourceIndex])
|
||||||
expect(result).toEqual([
|
expect(result).toEqual([
|
||||||
['body', ''],
|
['body', ''],
|
||||||
@ -218,7 +216,7 @@ show(part001)`
|
|||||||
it('finds the last line when cursor is put at the end', () => {
|
it('finds the last line when cursor is put at the end', () => {
|
||||||
const searchLn = `line([-0.21, -1.4], %)`
|
const searchLn = `line([-0.21, -1.4], %)`
|
||||||
const sourceIndex = code.indexOf(searchLn) + searchLn.length
|
const sourceIndex = code.indexOf(searchLn) + searchLn.length
|
||||||
const ast = parser_wasm(code)
|
const ast = parse(code)
|
||||||
const result = getNodePathFromSourceRange(ast, [sourceIndex, sourceIndex])
|
const result = getNodePathFromSourceRange(ast, [sourceIndex, sourceIndex])
|
||||||
const expected = [
|
const expected = [
|
||||||
['body', ''],
|
['body', ''],
|
||||||
|
@ -1,4 +1,3 @@
|
|||||||
import { PathToNode, ProgramMemory, SketchGroup, SourceRange } from './executor'
|
|
||||||
import { Selection, ToolTip } from '../useStore'
|
import { Selection, ToolTip } from '../useStore'
|
||||||
import {
|
import {
|
||||||
BinaryExpression,
|
BinaryExpression,
|
||||||
@ -10,7 +9,11 @@ import {
|
|||||||
VariableDeclaration,
|
VariableDeclaration,
|
||||||
ReturnStatement,
|
ReturnStatement,
|
||||||
ArrayExpression,
|
ArrayExpression,
|
||||||
} from './abstractSyntaxTreeTypes'
|
PathToNode,
|
||||||
|
ProgramMemory,
|
||||||
|
SketchGroup,
|
||||||
|
SourceRange,
|
||||||
|
} from './wasm'
|
||||||
import { createIdentifier, splitPathAtLastIndex } from './modifyAst'
|
import { createIdentifier, splitPathAtLastIndex } from './modifyAst'
|
||||||
import { getSketchSegmentFromSourceRange } from './std/sketchConstraints'
|
import { getSketchSegmentFromSourceRange } from './std/sketchConstraints'
|
||||||
import { getAngle } from '../lib/utils'
|
import { getAngle } from '../lib/utils'
|
||||||
|
@ -1,8 +1,5 @@
|
|||||||
import { recast } from './recast'
|
import { parse, Program, recast, initPromise } from './wasm'
|
||||||
import { parser_wasm } from './abstractSyntaxTree'
|
|
||||||
import { Program } from './abstractSyntaxTreeTypes'
|
|
||||||
import fs from 'node:fs'
|
import fs from 'node:fs'
|
||||||
import { initPromise } from './rust'
|
|
||||||
|
|
||||||
beforeAll(() => initPromise)
|
beforeAll(() => initPromise)
|
||||||
|
|
||||||
@ -366,6 +363,6 @@ describe('it recasts binary expression using brackets where needed', () => {
|
|||||||
// helpers
|
// helpers
|
||||||
|
|
||||||
function code2ast(code: string): { ast: Program } {
|
function code2ast(code: string): { ast: Program } {
|
||||||
const ast = parser_wasm(code)
|
const ast = parse(code)
|
||||||
return { ast }
|
return { ast }
|
||||||
}
|
}
|
||||||
|
@ -1,13 +0,0 @@
|
|||||||
import { Program } from './abstractSyntaxTreeTypes'
|
|
||||||
import { recast_wasm } from '../wasm-lib/pkg/wasm_lib'
|
|
||||||
|
|
||||||
export const recast = (ast: Program): string => {
|
|
||||||
try {
|
|
||||||
const s: string = recast_wasm(JSON.stringify(ast))
|
|
||||||
return s
|
|
||||||
} catch (e) {
|
|
||||||
// TODO: do something real with the error.
|
|
||||||
console.log('recast', e)
|
|
||||||
throw e
|
|
||||||
}
|
|
||||||
}
|
|
@ -1,20 +0,0 @@
|
|||||||
import init from '../wasm-lib/pkg/wasm_lib'
|
|
||||||
|
|
||||||
const initialise = async () => {
|
|
||||||
const baseUrl =
|
|
||||||
typeof window === 'undefined'
|
|
||||||
? 'http://127.0.0.1:3000'
|
|
||||||
: window.location.origin.includes('tauri://localhost')
|
|
||||||
? 'tauri://localhost'
|
|
||||||
: window.location.origin.includes('localhost')
|
|
||||||
? 'http://localhost:3000'
|
|
||||||
: window.location.origin && window.location.origin !== 'null'
|
|
||||||
? window.location.origin
|
|
||||||
: 'http://localhost:3000'
|
|
||||||
const fullUrl = baseUrl + '/wasm_lib_bg.wasm'
|
|
||||||
const input = await fetch(fullUrl)
|
|
||||||
const buffer = await input.arrayBuffer()
|
|
||||||
return init(buffer)
|
|
||||||
}
|
|
||||||
|
|
||||||
export const initPromise = initialise()
|
|
@ -1,4 +1,9 @@
|
|||||||
import { ProgramMemory, SourceRange } from 'lang/executor'
|
import {
|
||||||
|
ProgramMemory,
|
||||||
|
SourceRange,
|
||||||
|
Program,
|
||||||
|
VariableDeclarator,
|
||||||
|
} from 'lang/wasm'
|
||||||
import { Selections } from 'useStore'
|
import { Selections } from 'useStore'
|
||||||
import { VITE_KC_API_WS_MODELING_URL, VITE_KC_CONNECTION_TIMEOUT_MS } from 'env'
|
import { VITE_KC_API_WS_MODELING_URL, VITE_KC_CONNECTION_TIMEOUT_MS } from 'env'
|
||||||
import { Models } from '@kittycad/lib'
|
import { Models } from '@kittycad/lib'
|
||||||
@ -6,7 +11,6 @@ import { exportSave } from 'lib/exportSave'
|
|||||||
import { v4 as uuidv4 } from 'uuid'
|
import { v4 as uuidv4 } from 'uuid'
|
||||||
import * as Sentry from '@sentry/react'
|
import * as Sentry from '@sentry/react'
|
||||||
import { getNodeFromPath, getNodePathFromSourceRange } from 'lang/queryAst'
|
import { getNodeFromPath, getNodePathFromSourceRange } from 'lang/queryAst'
|
||||||
import { Program, VariableDeclarator } from 'lang/abstractSyntaxTreeTypes'
|
|
||||||
|
|
||||||
let lastMessage = ''
|
let lastMessage = ''
|
||||||
|
|
||||||
@ -356,6 +360,11 @@ export class EngineConnection {
|
|||||||
if (this.shouldTrace()) {
|
if (this.shouldTrace()) {
|
||||||
iceSpan.resolve?.()
|
iceSpan.resolve?.()
|
||||||
}
|
}
|
||||||
|
} else if (this.pc?.iceConnectionState === 'failed') {
|
||||||
|
// failed is a terminal state; let's explicitly kill the
|
||||||
|
// connection to the server at this point.
|
||||||
|
console.log('failed to negotiate ice connection; restarting')
|
||||||
|
this.close()
|
||||||
}
|
}
|
||||||
})
|
})
|
||||||
|
|
||||||
@ -487,9 +496,11 @@ export class EngineConnection {
|
|||||||
|
|
||||||
this.onDataChannelOpen(this)
|
this.onDataChannelOpen(this)
|
||||||
|
|
||||||
this.onEngineConnectionOpen(this)
|
|
||||||
this.ready = true
|
this.ready = true
|
||||||
this.connecting = false
|
this.connecting = false
|
||||||
|
// Do this after we set the connection is ready to avoid errors when
|
||||||
|
// we try to send messages before the connection is ready.
|
||||||
|
this.onEngineConnectionOpen(this)
|
||||||
})
|
})
|
||||||
|
|
||||||
this.unreliableDataChannel.addEventListener('close', (event) => {
|
this.unreliableDataChannel.addEventListener('close', (event) => {
|
||||||
@ -582,6 +593,9 @@ export class EngineCommandManager {
|
|||||||
outSequence = 1
|
outSequence = 1
|
||||||
inSequence = 1
|
inSequence = 1
|
||||||
engineConnection?: EngineConnection
|
engineConnection?: EngineConnection
|
||||||
|
// Folks should realize that wait for ready does not get called _everytime_
|
||||||
|
// the connection resets and restarts, it only gets called the first time.
|
||||||
|
// Be careful what you put here.
|
||||||
waitForReady: Promise<void> = new Promise(() => {})
|
waitForReady: Promise<void> = new Promise(() => {})
|
||||||
private resolveReady = () => {}
|
private resolveReady = () => {}
|
||||||
|
|
||||||
@ -595,19 +609,36 @@ export class EngineCommandManager {
|
|||||||
[localUnsubscribeId: string]: (a: any) => void
|
[localUnsubscribeId: string]: (a: any) => void
|
||||||
}
|
}
|
||||||
} = {} as any
|
} = {} as any
|
||||||
constructor({
|
|
||||||
|
constructor() {
|
||||||
|
this.engineConnection = undefined
|
||||||
|
}
|
||||||
|
|
||||||
|
start({
|
||||||
setMediaStream,
|
setMediaStream,
|
||||||
setIsStreamReady,
|
setIsStreamReady,
|
||||||
width,
|
width,
|
||||||
height,
|
height,
|
||||||
|
executeCode,
|
||||||
token,
|
token,
|
||||||
}: {
|
}: {
|
||||||
setMediaStream: (stream: MediaStream) => void
|
setMediaStream: (stream: MediaStream) => void
|
||||||
setIsStreamReady: (isStreamReady: boolean) => void
|
setIsStreamReady: (isStreamReady: boolean) => void
|
||||||
width: number
|
width: number
|
||||||
height: number
|
height: number
|
||||||
|
executeCode: (code?: string, force?: boolean) => void
|
||||||
token?: string
|
token?: string
|
||||||
}) {
|
}) {
|
||||||
|
if (width === 0 || height === 0) {
|
||||||
|
return
|
||||||
|
}
|
||||||
|
|
||||||
|
// If we already have an engine connection, just need to resize the stream.
|
||||||
|
if (this.engineConnection) {
|
||||||
|
this.handleResize({ streamWidth: width, streamHeight: height })
|
||||||
|
return
|
||||||
|
}
|
||||||
|
|
||||||
this.waitForReady = new Promise((resolve) => {
|
this.waitForReady = new Promise((resolve) => {
|
||||||
this.resolveReady = resolve
|
this.resolveReady = resolve
|
||||||
})
|
})
|
||||||
@ -618,6 +649,32 @@ export class EngineCommandManager {
|
|||||||
onEngineConnectionOpen: () => {
|
onEngineConnectionOpen: () => {
|
||||||
this.resolveReady()
|
this.resolveReady()
|
||||||
setIsStreamReady(true)
|
setIsStreamReady(true)
|
||||||
|
|
||||||
|
// Make the axis gizmo.
|
||||||
|
// We do this after the connection opened to avoid a race condition.
|
||||||
|
// Connected opened is the last thing that happens when the stream
|
||||||
|
// is ready.
|
||||||
|
// We also do this here because we want to ensure we create the gizmo
|
||||||
|
// and execute the code everytime the stream is restarted.
|
||||||
|
const gizmoId = uuidv4()
|
||||||
|
this.sendSceneCommand({
|
||||||
|
type: 'modeling_cmd_req',
|
||||||
|
cmd_id: gizmoId,
|
||||||
|
cmd: {
|
||||||
|
type: 'make_axes_gizmo',
|
||||||
|
clobber: false,
|
||||||
|
// If true, axes gizmo will be placed in the corner of the screen.
|
||||||
|
// If false, it will be placed at the origin of the scene.
|
||||||
|
gizmo_mode: true,
|
||||||
|
},
|
||||||
|
})
|
||||||
|
|
||||||
|
// We execute the code here to make sure if the stream was to
|
||||||
|
// restart in a session, we want to make sure to execute the code.
|
||||||
|
// We force it to re-execute the code because we want to make sure
|
||||||
|
// the code is executed everytime the stream is restarted.
|
||||||
|
// We pass undefined for the code so it reads from the current state.
|
||||||
|
executeCode(undefined, true)
|
||||||
},
|
},
|
||||||
onClose: () => {
|
onClose: () => {
|
||||||
setIsStreamReady(false)
|
setIsStreamReady(false)
|
||||||
@ -689,6 +746,30 @@ export class EngineCommandManager {
|
|||||||
|
|
||||||
this.engineConnection?.connect()
|
this.engineConnection?.connect()
|
||||||
}
|
}
|
||||||
|
handleResize({
|
||||||
|
streamWidth,
|
||||||
|
streamHeight,
|
||||||
|
}: {
|
||||||
|
streamWidth: number
|
||||||
|
streamHeight: number
|
||||||
|
}) {
|
||||||
|
console.log('handleResize', streamWidth, streamHeight)
|
||||||
|
if (!this.engineConnection?.isReady()) {
|
||||||
|
return
|
||||||
|
}
|
||||||
|
|
||||||
|
const resizeCmd: EngineCommand = {
|
||||||
|
type: 'modeling_cmd_req',
|
||||||
|
cmd_id: uuidv4(),
|
||||||
|
cmd: {
|
||||||
|
type: 'reconfigure_stream',
|
||||||
|
width: streamWidth,
|
||||||
|
height: streamHeight,
|
||||||
|
fps: 60,
|
||||||
|
},
|
||||||
|
}
|
||||||
|
this.engineConnection?.send(resizeCmd)
|
||||||
|
}
|
||||||
handleModelingCommand(message: WebSocketResponse, id: string) {
|
handleModelingCommand(message: WebSocketResponse, id: string) {
|
||||||
if (message.type !== 'modeling') {
|
if (message.type !== 'modeling') {
|
||||||
return
|
return
|
||||||
@ -854,6 +935,14 @@ export class EngineCommandManager {
|
|||||||
})
|
})
|
||||||
}
|
}
|
||||||
sendSceneCommand(command: EngineCommand): Promise<any> {
|
sendSceneCommand(command: EngineCommand): Promise<any> {
|
||||||
|
if (this.engineConnection === undefined) {
|
||||||
|
return Promise.resolve()
|
||||||
|
}
|
||||||
|
|
||||||
|
if (!this.engineConnection?.isReady()) {
|
||||||
|
return Promise.resolve()
|
||||||
|
}
|
||||||
|
|
||||||
if (
|
if (
|
||||||
command.type === 'modeling_cmd_req' &&
|
command.type === 'modeling_cmd_req' &&
|
||||||
command.cmd.type !== lastMessage
|
command.cmd.type !== lastMessage
|
||||||
@ -861,9 +950,6 @@ export class EngineCommandManager {
|
|||||||
console.log('sending command', command.cmd.type)
|
console.log('sending command', command.cmd.type)
|
||||||
lastMessage = command.cmd.type
|
lastMessage = command.cmd.type
|
||||||
}
|
}
|
||||||
if (!this.engineConnection?.isReady()) {
|
|
||||||
return Promise.resolve()
|
|
||||||
}
|
|
||||||
if (command.type !== 'modeling_cmd_req') return Promise.resolve()
|
if (command.type !== 'modeling_cmd_req') return Promise.resolve()
|
||||||
const cmd = command.cmd
|
const cmd = command.cmd
|
||||||
if (
|
if (
|
||||||
@ -905,6 +991,9 @@ export class EngineCommandManager {
|
|||||||
range: SourceRange
|
range: SourceRange
|
||||||
command: EngineCommand | string
|
command: EngineCommand | string
|
||||||
}): Promise<any> {
|
}): Promise<any> {
|
||||||
|
if (this.engineConnection === undefined) {
|
||||||
|
return Promise.resolve()
|
||||||
|
}
|
||||||
this.sourceRangeMap[id] = range
|
this.sourceRangeMap[id] = range
|
||||||
|
|
||||||
if (!this.engineConnection?.isReady()) {
|
if (!this.engineConnection?.isReady()) {
|
||||||
@ -950,6 +1039,9 @@ export class EngineCommandManager {
|
|||||||
rangeStr: string,
|
rangeStr: string,
|
||||||
commandStr: string
|
commandStr: string
|
||||||
): Promise<any> {
|
): Promise<any> {
|
||||||
|
if (this.engineConnection === undefined) {
|
||||||
|
return Promise.resolve()
|
||||||
|
}
|
||||||
if (id === undefined) {
|
if (id === undefined) {
|
||||||
throw new Error('id is undefined')
|
throw new Error('id is undefined')
|
||||||
}
|
}
|
||||||
@ -1000,6 +1092,9 @@ export class EngineCommandManager {
|
|||||||
}
|
}
|
||||||
}
|
}
|
||||||
private async fixIdMappings(ast: Program, programMemory: ProgramMemory) {
|
private async fixIdMappings(ast: Program, programMemory: ProgramMemory) {
|
||||||
|
if (this.engineConnection === undefined) {
|
||||||
|
return
|
||||||
|
}
|
||||||
/* This is a temporary solution since the cmd_ids that are sent through when
|
/* This is a temporary solution since the cmd_ids that are sent through when
|
||||||
sending 'extend_path' ids are not used as the segment ids.
|
sending 'extend_path' ids are not used as the segment ids.
|
||||||
|
|
||||||
@ -1079,3 +1174,5 @@ export class EngineCommandManager {
|
|||||||
})
|
})
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
export const engineCommandManager = new EngineCommandManager()
|
||||||
|
@ -6,13 +6,9 @@ import {
|
|||||||
getXComponent,
|
getXComponent,
|
||||||
addCloseToPipe,
|
addCloseToPipe,
|
||||||
} from './sketch'
|
} from './sketch'
|
||||||
import { parser_wasm } from '../abstractSyntaxTree'
|
import { parse, recast, initPromise } from '../wasm'
|
||||||
import { getNodePathFromSourceRange } from '../queryAst'
|
import { getNodePathFromSourceRange } from '../queryAst'
|
||||||
import { recast } from '../recast'
|
|
||||||
import { enginelessExecutor } from '../../lib/testHelpers'
|
import { enginelessExecutor } from '../../lib/testHelpers'
|
||||||
import { initPromise } from '../rust'
|
|
||||||
|
|
||||||
beforeAll(() => initPromise)
|
|
||||||
|
|
||||||
const eachQuad: [number, [number, number]][] = [
|
const eachQuad: [number, [number, number]][] = [
|
||||||
[-315, [1, 1]],
|
[-315, [1, 1]],
|
||||||
@ -29,6 +25,8 @@ const eachQuad: [number, [number, number]][] = [
|
|||||||
[675, [1, -1]],
|
[675, [1, -1]],
|
||||||
]
|
]
|
||||||
|
|
||||||
|
beforeAll(() => initPromise)
|
||||||
|
|
||||||
describe('testing getYComponent', () => {
|
describe('testing getYComponent', () => {
|
||||||
it('should return the vertical component of a vector correctly when given angles in each quadrant (and with angles < 0, or > 360)', () => {
|
it('should return the vertical component of a vector correctly when given angles in each quadrant (and with angles < 0, or > 360)', () => {
|
||||||
const expected: [number, number][] = []
|
const expected: [number, number][] = []
|
||||||
@ -106,7 +104,7 @@ show(mySketch001)
|
|||||||
`
|
`
|
||||||
const code = genCode(lineToChange)
|
const code = genCode(lineToChange)
|
||||||
const expectedCode = genCode(lineAfterChange)
|
const expectedCode = genCode(lineAfterChange)
|
||||||
const ast = parser_wasm(code)
|
const ast = parse(code)
|
||||||
const programMemory = await enginelessExecutor(ast)
|
const programMemory = await enginelessExecutor(ast)
|
||||||
const sourceStart = code.indexOf(lineToChange)
|
const sourceStart = code.indexOf(lineToChange)
|
||||||
const { modifiedAst } = changeSketchArguments(
|
const { modifiedAst } = changeSketchArguments(
|
||||||
@ -144,7 +142,7 @@ const mySketch001 = startSketchAt([0, 0])
|
|||||||
|> lineTo([-1.59, -1.54], %)
|
|> lineTo([-1.59, -1.54], %)
|
||||||
|> lineTo([0.46, -5.82], %)
|
|> lineTo([0.46, -5.82], %)
|
||||||
show(mySketch001)`
|
show(mySketch001)`
|
||||||
const ast = parser_wasm(code)
|
const ast = parse(code)
|
||||||
const programMemory = await enginelessExecutor(ast)
|
const programMemory = await enginelessExecutor(ast)
|
||||||
const sourceStart = code.indexOf(lineToChange)
|
const sourceStart = code.indexOf(lineToChange)
|
||||||
expect(sourceStart).toBe(66)
|
expect(sourceStart).toBe(66)
|
||||||
@ -205,7 +203,7 @@ describe('testing addTagForSketchOnFace', () => {
|
|||||||
show(mySketch001)
|
show(mySketch001)
|
||||||
`
|
`
|
||||||
const code = genCode(originalLine)
|
const code = genCode(originalLine)
|
||||||
const ast = parser_wasm(code)
|
const ast = parse(code)
|
||||||
const programMemory = await enginelessExecutor(ast)
|
const programMemory = await enginelessExecutor(ast)
|
||||||
const sourceStart = code.indexOf(originalLine)
|
const sourceStart = code.indexOf(originalLine)
|
||||||
const sourceRange: [number, number] = [
|
const sourceRange: [number, number] = [
|
||||||
|
@ -4,9 +4,6 @@ import {
|
|||||||
SketchGroup,
|
SketchGroup,
|
||||||
SourceRange,
|
SourceRange,
|
||||||
PathToNode,
|
PathToNode,
|
||||||
MemoryItem,
|
|
||||||
} from '../executor'
|
|
||||||
import {
|
|
||||||
Program,
|
Program,
|
||||||
PipeExpression,
|
PipeExpression,
|
||||||
CallExpression,
|
CallExpression,
|
||||||
@ -14,7 +11,7 @@ import {
|
|||||||
Value,
|
Value,
|
||||||
Literal,
|
Literal,
|
||||||
VariableDeclaration,
|
VariableDeclaration,
|
||||||
} from '../abstractSyntaxTreeTypes'
|
} from '../wasm'
|
||||||
import {
|
import {
|
||||||
getNodeFromPath,
|
getNodeFromPath,
|
||||||
getNodeFromPathCurry,
|
getNodeFromPathCurry,
|
||||||
@ -38,7 +35,6 @@ import {
|
|||||||
findUniqueName,
|
findUniqueName,
|
||||||
} from '../modifyAst'
|
} from '../modifyAst'
|
||||||
import { roundOff, getLength, getAngle } from '../../lib/utils'
|
import { roundOff, getLength, getAngle } from '../../lib/utils'
|
||||||
import { getSketchSegmentFromSourceRange } from './sketchConstraints'
|
|
||||||
import { perpendicularDistance } from 'sketch-helpers'
|
import { perpendicularDistance } from 'sketch-helpers'
|
||||||
|
|
||||||
export type Coords2d = [number, number]
|
export type Coords2d = [number, number]
|
||||||
|
@ -1,12 +1,9 @@
|
|||||||
import { parser_wasm } from '../abstractSyntaxTree'
|
import { parse, SketchGroup, recast, initPromise } from '../wasm'
|
||||||
import { SketchGroup } from '../executor'
|
|
||||||
import {
|
import {
|
||||||
ConstraintType,
|
ConstraintType,
|
||||||
getTransformInfos,
|
getTransformInfos,
|
||||||
transformAstSketchLines,
|
transformAstSketchLines,
|
||||||
} from './sketchcombos'
|
} from './sketchcombos'
|
||||||
import { recast } from '../recast'
|
|
||||||
import { initPromise } from '../rust'
|
|
||||||
import { getSketchSegmentFromSourceRange } from './sketchConstraints'
|
import { getSketchSegmentFromSourceRange } from './sketchConstraints'
|
||||||
import { Selection } from '../../useStore'
|
import { Selection } from '../../useStore'
|
||||||
import { enginelessExecutor } from '../../lib/testHelpers'
|
import { enginelessExecutor } from '../../lib/testHelpers'
|
||||||
@ -31,7 +28,7 @@ async function testingSwapSketchFnCall({
|
|||||||
type: 'default',
|
type: 'default',
|
||||||
range: [startIndex, startIndex + callToSwap.length],
|
range: [startIndex, startIndex + callToSwap.length],
|
||||||
}
|
}
|
||||||
const ast = parser_wasm(inputCode)
|
const ast = parse(inputCode)
|
||||||
const programMemory = await enginelessExecutor(ast)
|
const programMemory = await enginelessExecutor(ast)
|
||||||
const selections = {
|
const selections = {
|
||||||
codeBasedSelections: [range],
|
codeBasedSelections: [range],
|
||||||
@ -381,7 +378,7 @@ const part001 = startSketchAt([0, 0.04]) // segment-in-start
|
|||||||
|> xLine(3.54, %)
|
|> xLine(3.54, %)
|
||||||
show(part001)`
|
show(part001)`
|
||||||
it('normal case works', async () => {
|
it('normal case works', async () => {
|
||||||
const programMemory = await enginelessExecutor(parser_wasm(code))
|
const programMemory = await enginelessExecutor(parse(code))
|
||||||
const index = code.indexOf('// normal-segment') - 7
|
const index = code.indexOf('// normal-segment') - 7
|
||||||
const { __geoMeta, ...segment } = getSketchSegmentFromSourceRange(
|
const { __geoMeta, ...segment } = getSketchSegmentFromSourceRange(
|
||||||
programMemory.root['part001'] as SketchGroup,
|
programMemory.root['part001'] as SketchGroup,
|
||||||
@ -395,7 +392,7 @@ show(part001)`
|
|||||||
})
|
})
|
||||||
})
|
})
|
||||||
it('verify it works when the segment is in the `start` property', async () => {
|
it('verify it works when the segment is in the `start` property', async () => {
|
||||||
const programMemory = await enginelessExecutor(parser_wasm(code))
|
const programMemory = await enginelessExecutor(parse(code))
|
||||||
const index = code.indexOf('// segment-in-start') - 7
|
const index = code.indexOf('// segment-in-start') - 7
|
||||||
const { __geoMeta, ...segment } = getSketchSegmentFromSourceRange(
|
const { __geoMeta, ...segment } = getSketchSegmentFromSourceRange(
|
||||||
programMemory.root['part001'] as SketchGroup,
|
programMemory.root['part001'] as SketchGroup,
|
||||||
|
@ -3,8 +3,10 @@ import {
|
|||||||
Program,
|
Program,
|
||||||
VariableDeclarator,
|
VariableDeclarator,
|
||||||
CallExpression,
|
CallExpression,
|
||||||
} from '../abstractSyntaxTreeTypes'
|
SketchGroup,
|
||||||
import { SketchGroup, SourceRange, Path } from '../executor'
|
SourceRange,
|
||||||
|
Path,
|
||||||
|
} from '../wasm'
|
||||||
|
|
||||||
export function getSketchSegmentFromSourceRange(
|
export function getSketchSegmentFromSourceRange(
|
||||||
sketchGroup: SketchGroup,
|
sketchGroup: SketchGroup,
|
||||||
|
@ -1,5 +1,4 @@
|
|||||||
import { parser_wasm } from '../abstractSyntaxTree'
|
import { parse, Value, recast, initPromise } from '../wasm'
|
||||||
import { Value } from '../abstractSyntaxTreeTypes'
|
|
||||||
import {
|
import {
|
||||||
getConstraintType,
|
getConstraintType,
|
||||||
getTransformInfos,
|
getTransformInfos,
|
||||||
@ -8,10 +7,8 @@ import {
|
|||||||
ConstraintType,
|
ConstraintType,
|
||||||
getConstraintLevelFromSourceRange,
|
getConstraintLevelFromSourceRange,
|
||||||
} from './sketchcombos'
|
} from './sketchcombos'
|
||||||
import { initPromise } from '../rust'
|
|
||||||
import { Selections, ToolTip } from '../../useStore'
|
import { Selections, ToolTip } from '../../useStore'
|
||||||
import { enginelessExecutor } from '../../lib/testHelpers'
|
import { enginelessExecutor } from '../../lib/testHelpers'
|
||||||
import { recast } from '../../lang/recast'
|
|
||||||
|
|
||||||
beforeAll(() => initPromise)
|
beforeAll(() => initPromise)
|
||||||
|
|
||||||
@ -63,7 +60,7 @@ describe('testing getConstraintType', () => {
|
|||||||
function getConstraintTypeFromSourceHelper(
|
function getConstraintTypeFromSourceHelper(
|
||||||
code: string
|
code: string
|
||||||
): ReturnType<typeof getConstraintType> {
|
): ReturnType<typeof getConstraintType> {
|
||||||
const ast = parser_wasm(code)
|
const ast = parse(code)
|
||||||
const args = (ast.body[0] as any).expression.arguments[0].elements as [
|
const args = (ast.body[0] as any).expression.arguments[0].elements as [
|
||||||
Value,
|
Value,
|
||||||
Value
|
Value
|
||||||
@ -74,7 +71,7 @@ function getConstraintTypeFromSourceHelper(
|
|||||||
function getConstraintTypeFromSourceHelper2(
|
function getConstraintTypeFromSourceHelper2(
|
||||||
code: string
|
code: string
|
||||||
): ReturnType<typeof getConstraintType> {
|
): ReturnType<typeof getConstraintType> {
|
||||||
const ast = parser_wasm(code)
|
const ast = parse(code)
|
||||||
const arg = (ast.body[0] as any).expression.arguments[0] as Value
|
const arg = (ast.body[0] as any).expression.arguments[0] as Value
|
||||||
const fnName = (ast.body[0] as any).expression.callee.name as ToolTip
|
const fnName = (ast.body[0] as any).expression.callee.name as ToolTip
|
||||||
return getConstraintType(arg, fnName)
|
return getConstraintType(arg, fnName)
|
||||||
@ -199,7 +196,7 @@ const part001 = startSketchAt([0, 0])
|
|||||||
show(part001)
|
show(part001)
|
||||||
`
|
`
|
||||||
it('should transform the ast', async () => {
|
it('should transform the ast', async () => {
|
||||||
const ast = parser_wasm(inputScript)
|
const ast = parse(inputScript)
|
||||||
const selectionRanges: Selections['codeBasedSelections'] = inputScript
|
const selectionRanges: Selections['codeBasedSelections'] = inputScript
|
||||||
.split('\n')
|
.split('\n')
|
||||||
.filter((ln) => ln.includes('//'))
|
.filter((ln) => ln.includes('//'))
|
||||||
@ -286,7 +283,7 @@ const part001 = startSketchAt([0, 0])
|
|||||||
|> angledLineToY([301, myVar], %) // select for vertical constraint 10
|
|> angledLineToY([301, myVar], %) // select for vertical constraint 10
|
||||||
show(part001)
|
show(part001)
|
||||||
`
|
`
|
||||||
const ast = parser_wasm(inputScript)
|
const ast = parse(inputScript)
|
||||||
const selectionRanges: Selections['codeBasedSelections'] = inputScript
|
const selectionRanges: Selections['codeBasedSelections'] = inputScript
|
||||||
.split('\n')
|
.split('\n')
|
||||||
.filter((ln) => ln.includes('// select for horizontal constraint'))
|
.filter((ln) => ln.includes('// select for horizontal constraint'))
|
||||||
@ -344,7 +341,7 @@ const part001 = startSketchAt([0, 0])
|
|||||||
|> yLineTo(myVar, %) // select for vertical constraint 10
|
|> yLineTo(myVar, %) // select for vertical constraint 10
|
||||||
show(part001)
|
show(part001)
|
||||||
`
|
`
|
||||||
const ast = parser_wasm(inputScript)
|
const ast = parse(inputScript)
|
||||||
const selectionRanges: Selections['codeBasedSelections'] = inputScript
|
const selectionRanges: Selections['codeBasedSelections'] = inputScript
|
||||||
.split('\n')
|
.split('\n')
|
||||||
.filter((ln) => ln.includes('// select for vertical constraint'))
|
.filter((ln) => ln.includes('// select for vertical constraint'))
|
||||||
@ -435,7 +432,7 @@ async function helperThing(
|
|||||||
linesOfInterest: string[],
|
linesOfInterest: string[],
|
||||||
constraint: ConstraintType
|
constraint: ConstraintType
|
||||||
): Promise<string> {
|
): Promise<string> {
|
||||||
const ast = parser_wasm(inputScript)
|
const ast = parse(inputScript)
|
||||||
const selectionRanges: Selections['codeBasedSelections'] = inputScript
|
const selectionRanges: Selections['codeBasedSelections'] = inputScript
|
||||||
.split('\n')
|
.split('\n')
|
||||||
.filter((ln) =>
|
.filter((ln) =>
|
||||||
@ -498,7 +495,7 @@ const part001 = startSketchAt([-0.01, -0.05])
|
|||||||
|> xLine(-3.43 + 0, %) // full
|
|> xLine(-3.43 + 0, %) // full
|
||||||
|> angledLineOfXLength([243 + 0, 1.2 + 0], %) // full
|
|> angledLineOfXLength([243 + 0, 1.2 + 0], %) // full
|
||||||
show(part001)`
|
show(part001)`
|
||||||
const ast = parser_wasm(code)
|
const ast = parse(code)
|
||||||
const constraintLevels: ReturnType<
|
const constraintLevels: ReturnType<
|
||||||
typeof getConstraintLevelFromSourceRange
|
typeof getConstraintLevelFromSourceRange
|
||||||
>[] = ['full', 'partial', 'free']
|
>[] = ['full', 'partial', 'free']
|
||||||
|
@ -6,7 +6,9 @@ import {
|
|||||||
Value,
|
Value,
|
||||||
BinaryPart,
|
BinaryPart,
|
||||||
VariableDeclarator,
|
VariableDeclarator,
|
||||||
} from '../abstractSyntaxTreeTypes'
|
PathToNode,
|
||||||
|
ProgramMemory,
|
||||||
|
} from '../wasm'
|
||||||
import {
|
import {
|
||||||
getNodeFromPath,
|
getNodeFromPath,
|
||||||
getNodeFromPathCurry,
|
getNodeFromPathCurry,
|
||||||
@ -25,10 +27,8 @@ import {
|
|||||||
giveSketchFnCallTag,
|
giveSketchFnCallTag,
|
||||||
} from '../modifyAst'
|
} from '../modifyAst'
|
||||||
import { createFirstArg, getFirstArg, replaceSketchLine } from './sketch'
|
import { createFirstArg, getFirstArg, replaceSketchLine } from './sketch'
|
||||||
import { PathToNode, ProgramMemory } from '../executor'
|
|
||||||
import { getSketchSegmentFromSourceRange } from './sketchConstraints'
|
import { getSketchSegmentFromSourceRange } from './sketchConstraints'
|
||||||
import { getAngle, roundOff, normaliseAngle } from '../../lib/utils'
|
import { getAngle, roundOff, normaliseAngle } from '../../lib/utils'
|
||||||
import { MemoryItem } from 'wasm-lib/kcl/bindings/MemoryItem'
|
|
||||||
|
|
||||||
type LineInputsType =
|
type LineInputsType =
|
||||||
| 'xAbsolute'
|
| 'xAbsolute'
|
||||||
@ -1279,7 +1279,7 @@ export function getTransformInfos(
|
|||||||
}) as TransformInfo[]
|
}) as TransformInfo[]
|
||||||
return theTransforms
|
return theTransforms
|
||||||
} catch (error) {
|
} catch (error) {
|
||||||
console.log(error)
|
console.log('error', error)
|
||||||
return []
|
return []
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
@ -1,6 +1,5 @@
|
|||||||
import { parser_wasm } from '../abstractSyntaxTree'
|
import { parse, initPromise } from '../wasm'
|
||||||
import { enginelessExecutor } from '../../lib/testHelpers'
|
import { enginelessExecutor } from '../../lib/testHelpers'
|
||||||
import { initPromise } from '../rust'
|
|
||||||
|
|
||||||
beforeAll(() => initPromise)
|
beforeAll(() => initPromise)
|
||||||
|
|
||||||
@ -17,9 +16,9 @@ describe('testing angledLineThatIntersects', () => {
|
|||||||
}, %)
|
}, %)
|
||||||
const intersect = segEndX('yo2', part001)
|
const intersect = segEndX('yo2', part001)
|
||||||
show(part001)`
|
show(part001)`
|
||||||
const { root } = await enginelessExecutor(parser_wasm(code('-1')))
|
const { root } = await enginelessExecutor(parse(code('-1')))
|
||||||
expect(root.intersect.value).toBe(1 + Math.sqrt(2))
|
expect(root.intersect.value).toBe(1 + Math.sqrt(2))
|
||||||
const { root: noOffset } = await enginelessExecutor(parser_wasm(code('0')))
|
const { root: noOffset } = await enginelessExecutor(parse(code('0')))
|
||||||
expect(noOffset.intersect.value).toBeCloseTo(1)
|
expect(noOffset.intersect.value).toBeCloseTo(1)
|
||||||
})
|
})
|
||||||
})
|
})
|
||||||
|
@ -1,7 +1,12 @@
|
|||||||
import { ProgramMemory, Path, SourceRange } from '../executor'
|
import {
|
||||||
import { Program, Value } from '../abstractSyntaxTreeTypes'
|
ProgramMemory,
|
||||||
|
Path,
|
||||||
|
SourceRange,
|
||||||
|
Program,
|
||||||
|
Value,
|
||||||
|
PathToNode,
|
||||||
|
} from '../wasm'
|
||||||
import { ToolTip } from '../../useStore'
|
import { ToolTip } from '../../useStore'
|
||||||
import { PathToNode } from '../executor'
|
|
||||||
import { EngineCommandManager } from './engineConnection'
|
import { EngineCommandManager } from './engineConnection'
|
||||||
|
|
||||||
export interface InternalFirstArg {
|
export interface InternalFirstArg {
|
||||||
|
@ -1,5 +1,4 @@
|
|||||||
import { lexer, asyncLexer } from './tokeniser'
|
import { lexer, initPromise } from './wasm'
|
||||||
import { initPromise } from './rust'
|
|
||||||
|
|
||||||
beforeAll(() => initPromise)
|
beforeAll(() => initPromise)
|
||||||
|
|
||||||
@ -10,9 +9,9 @@ describe('testing lexer', () => {
|
|||||||
const code3 = `const yo = 45 /* this is a comment
|
const code3 = `const yo = 45 /* this is a comment
|
||||||
const ya = 6 */
|
const ya = 6 */
|
||||||
const yi=45`
|
const yi=45`
|
||||||
expect(await asyncLexer(code)).toEqual(lexer(code))
|
expect(lexer(code)).toEqual(lexer(code))
|
||||||
expect(await asyncLexer(code2)).toEqual(lexer(code2))
|
expect(lexer(code2)).toEqual(lexer(code2))
|
||||||
expect(await asyncLexer(code3)).toEqual(lexer(code3))
|
expect(lexer(code3)).toEqual(lexer(code3))
|
||||||
})
|
})
|
||||||
it('test lexer', () => {
|
it('test lexer', () => {
|
||||||
expect(stringSummaryLexer('1 + 2')).toEqual([
|
expect(stringSummaryLexer('1 + 2')).toEqual([
|
||||||
|
@ -1,28 +0,0 @@
|
|||||||
import { lexer_js } from '../wasm-lib/pkg/wasm_lib'
|
|
||||||
import { initPromise } from './rust'
|
|
||||||
import { Token } from '../wasm-lib/kcl/bindings/Token'
|
|
||||||
|
|
||||||
export type { Token } from '../wasm-lib/kcl/bindings/Token'
|
|
||||||
|
|
||||||
export async function asyncLexer(str: string): Promise<Token[]> {
|
|
||||||
await initPromise
|
|
||||||
try {
|
|
||||||
const tokens: Token[] = lexer_js(str)
|
|
||||||
return tokens
|
|
||||||
} catch (e) {
|
|
||||||
// TODO: do something real with the error.
|
|
||||||
console.log('lexer', e)
|
|
||||||
throw e
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
export function lexer(str: string): Token[] {
|
|
||||||
try {
|
|
||||||
const tokens: Token[] = lexer_js(str)
|
|
||||||
return tokens
|
|
||||||
} catch (e) {
|
|
||||||
// TODO: do something real with the error.
|
|
||||||
console.log('lexer', e)
|
|
||||||
throw e
|
|
||||||
}
|
|
||||||
}
|
|
@ -1,6 +1,5 @@
|
|||||||
import { Selections, StoreState } from '../useStore'
|
import { Selections, StoreState } from '../useStore'
|
||||||
import { Program } from './abstractSyntaxTreeTypes'
|
import { Program, PathToNode } from './wasm'
|
||||||
import { PathToNode } from './executor'
|
|
||||||
import { getNodeFromPath } from './queryAst'
|
import { getNodeFromPath } from './queryAst'
|
||||||
|
|
||||||
export function updateCursors(
|
export function updateCursors(
|
||||||
|
215
src/lang/wasm.ts
Normal file
215
src/lang/wasm.ts
Normal file
@ -0,0 +1,215 @@
|
|||||||
|
import init, {
|
||||||
|
parse_wasm,
|
||||||
|
recast_wasm,
|
||||||
|
execute_wasm,
|
||||||
|
lexer_wasm,
|
||||||
|
modify_ast_for_sketch_wasm,
|
||||||
|
} from '../wasm-lib/pkg/wasm_lib'
|
||||||
|
import { KCLError } from './errors'
|
||||||
|
import { KclError as RustKclError } from '../wasm-lib/kcl/bindings/KclError'
|
||||||
|
import {
|
||||||
|
EngineCommandManager,
|
||||||
|
ArtifactMap,
|
||||||
|
SourceRangeMap,
|
||||||
|
} from './std/engineConnection'
|
||||||
|
import { ProgramReturn } from '../wasm-lib/kcl/bindings/ProgramReturn'
|
||||||
|
import { MemoryItem } from '../wasm-lib/kcl/bindings/MemoryItem'
|
||||||
|
import type { Program } from '../wasm-lib/kcl/bindings/Program'
|
||||||
|
import type { Token } from '../wasm-lib/kcl/bindings/Token'
|
||||||
|
|
||||||
|
export type { Program } from '../wasm-lib/kcl/bindings/Program'
|
||||||
|
export type { Value } from '../wasm-lib/kcl/bindings/Value'
|
||||||
|
export type { ObjectExpression } from '../wasm-lib/kcl/bindings/ObjectExpression'
|
||||||
|
export type { MemberExpression } from '../wasm-lib/kcl/bindings/MemberExpression'
|
||||||
|
export type { PipeExpression } from '../wasm-lib/kcl/bindings/PipeExpression'
|
||||||
|
export type { VariableDeclaration } from '../wasm-lib/kcl/bindings/VariableDeclaration'
|
||||||
|
export type { PipeSubstitution } from '../wasm-lib/kcl/bindings/PipeSubstitution'
|
||||||
|
export type { Identifier } from '../wasm-lib/kcl/bindings/Identifier'
|
||||||
|
export type { UnaryExpression } from '../wasm-lib/kcl/bindings/UnaryExpression'
|
||||||
|
export type { BinaryExpression } from '../wasm-lib/kcl/bindings/BinaryExpression'
|
||||||
|
export type { ReturnStatement } from '../wasm-lib/kcl/bindings/ReturnStatement'
|
||||||
|
export type { ExpressionStatement } from '../wasm-lib/kcl/bindings/ExpressionStatement'
|
||||||
|
export type { CallExpression } from '../wasm-lib/kcl/bindings/CallExpression'
|
||||||
|
export type { VariableDeclarator } from '../wasm-lib/kcl/bindings/VariableDeclarator'
|
||||||
|
export type { BinaryPart } from '../wasm-lib/kcl/bindings/BinaryPart'
|
||||||
|
export type { Literal } from '../wasm-lib/kcl/bindings/Literal'
|
||||||
|
export type { ArrayExpression } from '../wasm-lib/kcl/bindings/ArrayExpression'
|
||||||
|
|
||||||
|
export type SyntaxType =
|
||||||
|
| 'Program'
|
||||||
|
| 'ExpressionStatement'
|
||||||
|
| 'BinaryExpression'
|
||||||
|
| 'CallExpression'
|
||||||
|
| 'Identifier'
|
||||||
|
| 'ReturnStatement'
|
||||||
|
| 'VariableDeclaration'
|
||||||
|
| 'VariableDeclarator'
|
||||||
|
| 'MemberExpression'
|
||||||
|
| 'ArrayExpression'
|
||||||
|
| 'ObjectExpression'
|
||||||
|
| 'ObjectProperty'
|
||||||
|
| 'FunctionExpression'
|
||||||
|
| 'PipeExpression'
|
||||||
|
| 'PipeSubstitution'
|
||||||
|
| 'Literal'
|
||||||
|
| 'NonCodeNode'
|
||||||
|
| 'UnaryExpression'
|
||||||
|
|
||||||
|
export type { SourceRange } from '../wasm-lib/kcl/bindings/SourceRange'
|
||||||
|
export type { Position } from '../wasm-lib/kcl/bindings/Position'
|
||||||
|
export type { Rotation } from '../wasm-lib/kcl/bindings/Rotation'
|
||||||
|
export type { Path } from '../wasm-lib/kcl/bindings/Path'
|
||||||
|
export type { SketchGroup } from '../wasm-lib/kcl/bindings/SketchGroup'
|
||||||
|
export type { MemoryItem } from '../wasm-lib/kcl/bindings/MemoryItem'
|
||||||
|
export type { ExtrudeSurface } from '../wasm-lib/kcl/bindings/ExtrudeSurface'
|
||||||
|
|
||||||
|
// Initialise the wasm module.
|
||||||
|
const initialise = async () => {
|
||||||
|
const baseUrl =
|
||||||
|
typeof window === 'undefined'
|
||||||
|
? 'http://127.0.0.1:3000'
|
||||||
|
: window.location.origin.includes('tauri://localhost')
|
||||||
|
? 'tauri://localhost'
|
||||||
|
: window.location.origin.includes('localhost')
|
||||||
|
? 'http://localhost:3000'
|
||||||
|
: window.location.origin && window.location.origin !== 'null'
|
||||||
|
? window.location.origin
|
||||||
|
: 'http://localhost:3000'
|
||||||
|
const fullUrl = baseUrl + '/wasm_lib_bg.wasm'
|
||||||
|
const input = await fetch(fullUrl)
|
||||||
|
const buffer = await input.arrayBuffer()
|
||||||
|
return init(buffer)
|
||||||
|
}
|
||||||
|
|
||||||
|
export const initPromise = initialise()
|
||||||
|
|
||||||
|
export const rangeTypeFix = (ranges: number[][]): [number, number][] =>
|
||||||
|
ranges.map(([start, end]) => [start, end])
|
||||||
|
|
||||||
|
export const parse = (code: string): Program => {
|
||||||
|
try {
|
||||||
|
const program: Program = parse_wasm(code)
|
||||||
|
return program
|
||||||
|
} catch (e: any) {
|
||||||
|
const parsed: RustKclError = JSON.parse(e.toString())
|
||||||
|
const kclError = new KCLError(
|
||||||
|
parsed.kind,
|
||||||
|
parsed.msg,
|
||||||
|
rangeTypeFix(parsed.sourceRanges)
|
||||||
|
)
|
||||||
|
|
||||||
|
console.log(kclError)
|
||||||
|
throw kclError
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
export type PathToNode = [string | number, string][]
|
||||||
|
|
||||||
|
interface Memory {
|
||||||
|
[key: string]: MemoryItem
|
||||||
|
}
|
||||||
|
|
||||||
|
export interface ProgramMemory {
|
||||||
|
root: Memory
|
||||||
|
return: ProgramReturn | null
|
||||||
|
}
|
||||||
|
|
||||||
|
export const executor = async (
|
||||||
|
node: Program,
|
||||||
|
programMemory: ProgramMemory = { root: {}, return: null },
|
||||||
|
engineCommandManager: EngineCommandManager,
|
||||||
|
// work around while the gemotry is still be stored on the frontend
|
||||||
|
// will be removed when the stream UI is added.
|
||||||
|
tempMapCallback: (a: {
|
||||||
|
artifactMap: ArtifactMap
|
||||||
|
sourceRangeMap: SourceRangeMap
|
||||||
|
}) => void = () => {}
|
||||||
|
): Promise<ProgramMemory> => {
|
||||||
|
engineCommandManager.startNewSession()
|
||||||
|
const _programMemory = await _executor(
|
||||||
|
node,
|
||||||
|
programMemory,
|
||||||
|
engineCommandManager
|
||||||
|
)
|
||||||
|
const { artifactMap, sourceRangeMap } =
|
||||||
|
await engineCommandManager.waitForAllCommands(node, _programMemory)
|
||||||
|
tempMapCallback({ artifactMap, sourceRangeMap })
|
||||||
|
|
||||||
|
engineCommandManager.endSession()
|
||||||
|
return _programMemory
|
||||||
|
}
|
||||||
|
|
||||||
|
export const _executor = async (
|
||||||
|
node: Program,
|
||||||
|
programMemory: ProgramMemory = { root: {}, return: null },
|
||||||
|
engineCommandManager: EngineCommandManager
|
||||||
|
): Promise<ProgramMemory> => {
|
||||||
|
try {
|
||||||
|
const memory: ProgramMemory = await execute_wasm(
|
||||||
|
JSON.stringify(node),
|
||||||
|
JSON.stringify(programMemory),
|
||||||
|
engineCommandManager
|
||||||
|
)
|
||||||
|
return memory
|
||||||
|
} catch (e: any) {
|
||||||
|
const parsed: RustKclError = JSON.parse(e.toString())
|
||||||
|
const kclError = new KCLError(
|
||||||
|
parsed.kind,
|
||||||
|
parsed.msg,
|
||||||
|
rangeTypeFix(parsed.sourceRanges)
|
||||||
|
)
|
||||||
|
|
||||||
|
console.log(kclError)
|
||||||
|
throw kclError
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
export const recast = (ast: Program): string => {
|
||||||
|
try {
|
||||||
|
const s: string = recast_wasm(JSON.stringify(ast))
|
||||||
|
return s
|
||||||
|
} catch (e) {
|
||||||
|
// TODO: do something real with the error.
|
||||||
|
console.log('recast error', e)
|
||||||
|
throw e
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
export function lexer(str: string): Token[] {
|
||||||
|
try {
|
||||||
|
const tokens: Token[] = lexer_wasm(str)
|
||||||
|
return tokens
|
||||||
|
} catch (e) {
|
||||||
|
// TODO: do something real with the error.
|
||||||
|
console.log('lexer error', e)
|
||||||
|
throw e
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
export const modifyAstForSketch = async (
|
||||||
|
engineCommandManager: EngineCommandManager,
|
||||||
|
ast: Program,
|
||||||
|
variableName: string,
|
||||||
|
engineId: string
|
||||||
|
): Promise<Program> => {
|
||||||
|
try {
|
||||||
|
const updatedAst: Program = await modify_ast_for_sketch_wasm(
|
||||||
|
engineCommandManager,
|
||||||
|
JSON.stringify(ast),
|
||||||
|
variableName,
|
||||||
|
engineId
|
||||||
|
)
|
||||||
|
|
||||||
|
return updatedAst
|
||||||
|
} catch (e: any) {
|
||||||
|
const parsed: RustKclError = JSON.parse(e.toString())
|
||||||
|
const kclError = new KCLError(
|
||||||
|
parsed.kind,
|
||||||
|
parsed.msg,
|
||||||
|
rangeTypeFix(parsed.sourceRanges)
|
||||||
|
)
|
||||||
|
|
||||||
|
console.log(kclError)
|
||||||
|
throw kclError
|
||||||
|
}
|
||||||
|
}
|
@ -1,19 +1,27 @@
|
|||||||
export const bracket = `// Material: 6061-T6 Aluminum
|
export const bracket = `const sigmaAllow = 15000 // psi
|
||||||
const sigmaAllow = 35000 // psi
|
const width = 11 // inch
|
||||||
const width = 9 // inch
|
|
||||||
const p = 150 // Force on shelf - lbs
|
const p = 150 // Force on shelf - lbs
|
||||||
const distance = 6 // inches
|
const distance = 12 // inches
|
||||||
const FOS = 2
|
const FOS = 2
|
||||||
|
const thickness = sqrt(distance * p * FOS * 6 / ( sigmaAllow * width ))
|
||||||
|
const filletR = thickness * 2
|
||||||
|
const shelfMountL = 9
|
||||||
|
const wallMountL = 8
|
||||||
|
|
||||||
const leg1 = 5 // inches
|
|
||||||
const leg2 = 8 // inches
|
|
||||||
const thickness = sqrt(distance * p * FOS * 6 / sigmaAllow / width) // inches
|
|
||||||
const bracket = startSketchAt([0, 0])
|
const bracket = startSketchAt([0, 0])
|
||||||
|> line([0, leg1], %)
|
|> line([0, wallMountL], %)
|
||||||
|> line([leg2, 0], %)
|
|> tangentalArc({
|
||||||
|
radius: filletR,
|
||||||
|
offset: 90
|
||||||
|
}, %)
|
||||||
|
|> line([-shelfMountL, 0], %)
|
||||||
|> line([0, -thickness], %)
|
|> line([0, -thickness], %)
|
||||||
|> line([-leg2 + thickness, 0], %)
|
|> line([shelfMountL, 0], %)
|
||||||
|> line([0, -leg1 + thickness], %)
|
|> tangentalArc({
|
||||||
|
radius: filletR - thickness,
|
||||||
|
offset: -90
|
||||||
|
}, %)
|
||||||
|
|> line([0, -wallMountL], %)
|
||||||
|> close(%)
|
|> close(%)
|
||||||
|> extrude(width, %)
|
|> extrude(width, %)
|
||||||
|
|
||||||
|
@ -25,7 +25,7 @@ export async function exportSave(data: ArrayBuffer) {
|
|||||||
}
|
}
|
||||||
|
|
||||||
// Write the file.
|
// Write the file.
|
||||||
await writeBinaryFile(filePath, uintArray)
|
await writeBinaryFile(filePath, file.contents)
|
||||||
} else {
|
} else {
|
||||||
// Download the file to the user's computer.
|
// Download the file to the user's computer.
|
||||||
// Now we need to download the files to the user's downloads folder.
|
// Now we need to download the files to the user's downloads folder.
|
||||||
@ -39,6 +39,6 @@ export async function exportSave(data: ArrayBuffer) {
|
|||||||
}
|
}
|
||||||
} catch (e) {
|
} catch (e) {
|
||||||
// TODO: do something real with the error.
|
// TODO: do something real with the error.
|
||||||
console.log('export', e)
|
console.log('export error', e)
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
@ -36,7 +36,7 @@ export async function initializeProjectDirectory(directory: string) {
|
|||||||
try {
|
try {
|
||||||
docDirectory = await documentDir()
|
docDirectory = await documentDir()
|
||||||
} catch (e) {
|
} catch (e) {
|
||||||
console.log(e)
|
console.log('error', e)
|
||||||
docDirectory = await homeDir() // seems to work better on Linux
|
docDirectory = await homeDir() // seems to work better on Linux
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@ -1,10 +1,8 @@
|
|||||||
import { Program } from '../lang/abstractSyntaxTreeTypes'
|
import { Program, ProgramMemory, _executor, SourceRange } from '../lang/wasm'
|
||||||
import { ProgramMemory, _executor } from '../lang/executor'
|
|
||||||
import {
|
import {
|
||||||
EngineCommandManager,
|
EngineCommandManager,
|
||||||
EngineCommand,
|
EngineCommand,
|
||||||
} from '../lang/std/engineConnection'
|
} from '../lang/std/engineConnection'
|
||||||
import { SourceRange } from 'lang/executor'
|
|
||||||
import { Models } from '@kittycad/lib'
|
import { Models } from '@kittycad/lib'
|
||||||
|
|
||||||
type WebSocketResponse = Models['OkWebSocketResponseData_type']
|
type WebSocketResponse = Models['OkWebSocketResponseData_type']
|
||||||
@ -75,11 +73,13 @@ export async function executor(
|
|||||||
ast: Program,
|
ast: Program,
|
||||||
pm: ProgramMemory = { root: {}, return: null }
|
pm: ProgramMemory = { root: {}, return: null }
|
||||||
): Promise<ProgramMemory> {
|
): Promise<ProgramMemory> {
|
||||||
const engineCommandManager = new EngineCommandManager({
|
const engineCommandManager = new EngineCommandManager()
|
||||||
|
engineCommandManager.start({
|
||||||
setIsStreamReady: () => {},
|
setIsStreamReady: () => {},
|
||||||
setMediaStream: () => {},
|
setMediaStream: () => {},
|
||||||
width: 100,
|
width: 0,
|
||||||
height: 100,
|
height: 0,
|
||||||
|
executeCode: () => {},
|
||||||
})
|
})
|
||||||
await engineCommandManager.waitForReady
|
await engineCommandManager.waitForReady
|
||||||
engineCommandManager.startNewSession()
|
engineCommandManager.startNewSession()
|
||||||
|
@ -1,5 +1,5 @@
|
|||||||
import { isOverlap, roundOff } from './utils'
|
import { isOverlap, roundOff } from './utils'
|
||||||
import { SourceRange } from '../lang/executor'
|
import { SourceRange } from '../lang/wasm'
|
||||||
|
|
||||||
describe('testing isOverlapping', () => {
|
describe('testing isOverlapping', () => {
|
||||||
testBothOrders([0, 3], [3, 10])
|
testBothOrders([0, 3], [3, 10])
|
||||||
|
@ -1,4 +1,4 @@
|
|||||||
import { SourceRange } from '../lang/executor'
|
import { SourceRange } from '../lang/wasm'
|
||||||
|
|
||||||
export function isOverlap(a: SourceRange, b: SourceRange) {
|
export function isOverlap(a: SourceRange, b: SourceRange) {
|
||||||
const [startingRange, secondRange] = a[0] < b[0] ? [a, b] : [b, a]
|
const [startingRange, secondRange] = a[0] < b[0] ? [a, b] : [b, a]
|
||||||
|
@ -2,6 +2,7 @@ import { assign, createMachine } from 'xstate'
|
|||||||
import { CommandBarMeta } from '../lib/commands'
|
import { CommandBarMeta } from '../lib/commands'
|
||||||
import { Themes, getSystemTheme, setThemeClass } from '../lib/theme'
|
import { Themes, getSystemTheme, setThemeClass } from '../lib/theme'
|
||||||
import { CameraSystem, cameraSystems } from 'lib/cameraControls'
|
import { CameraSystem, cameraSystems } from 'lib/cameraControls'
|
||||||
|
import { Models } from '@kittycad/lib'
|
||||||
|
|
||||||
export const DEFAULT_PROJECT_NAME = 'project-$nnn'
|
export const DEFAULT_PROJECT_NAME = 'project-$nnn'
|
||||||
|
|
||||||
@ -11,11 +12,11 @@ export enum UnitSystem {
|
|||||||
}
|
}
|
||||||
|
|
||||||
export const baseUnits = {
|
export const baseUnits = {
|
||||||
imperial: ['in', 'ft'],
|
imperial: ['in', 'ft', 'yd'],
|
||||||
metric: ['mm', 'cm', 'm'],
|
metric: ['mm', 'cm', 'm'],
|
||||||
} as const
|
} as const
|
||||||
|
|
||||||
export type BaseUnit = 'in' | 'ft' | 'mm' | 'cm' | 'm'
|
export type BaseUnit = Models['UnitLength_type']
|
||||||
|
|
||||||
export const baseUnitsUnion = Object.values(baseUnits).flatMap((v) => v)
|
export const baseUnitsUnion = Object.values(baseUnits).flatMap((v) => v)
|
||||||
|
|
||||||
|
@ -1,24 +1,25 @@
|
|||||||
import { create } from 'zustand'
|
import { create } from 'zustand'
|
||||||
import { persist } from 'zustand/middleware'
|
import { persist } from 'zustand/middleware'
|
||||||
import { addLineHighlight, EditorView } from './editor/highlightextension'
|
import { addLineHighlight, EditorView } from './editor/highlightextension'
|
||||||
import { parser_wasm } from './lang/abstractSyntaxTree'
|
|
||||||
import { Program } from './lang/abstractSyntaxTreeTypes'
|
|
||||||
import { getNodeFromPath } from './lang/queryAst'
|
|
||||||
import { enginelessExecutor } from './lib/testHelpers'
|
|
||||||
import {
|
import {
|
||||||
|
parse,
|
||||||
|
Program,
|
||||||
|
_executor,
|
||||||
|
recast,
|
||||||
ProgramMemory,
|
ProgramMemory,
|
||||||
Position,
|
Position,
|
||||||
PathToNode,
|
PathToNode,
|
||||||
Rotation,
|
Rotation,
|
||||||
SourceRange,
|
SourceRange,
|
||||||
} from './lang/executor'
|
} from './lang/wasm'
|
||||||
import { recast } from './lang/recast'
|
import { getNodeFromPath } from './lang/queryAst'
|
||||||
|
import { enginelessExecutor } from './lib/testHelpers'
|
||||||
import { EditorSelection } from '@codemirror/state'
|
import { EditorSelection } from '@codemirror/state'
|
||||||
import { EngineCommandManager } from './lang/std/engineConnection'
|
import { EngineCommandManager } from './lang/std/engineConnection'
|
||||||
import { KCLError } from './lang/errors'
|
import { KCLError } from './lang/errors'
|
||||||
import { deferExecution } from 'lib/utils'
|
import { deferExecution } from 'lib/utils'
|
||||||
import { _executor } from './lang/executor'
|
|
||||||
import { bracket } from 'lib/exampleKcl'
|
import { bracket } from 'lib/exampleKcl'
|
||||||
|
import { engineCommandManager } from './lang/std/engineConnection'
|
||||||
|
|
||||||
export type Selection = {
|
export type Selection = {
|
||||||
type: 'default' | 'line-end' | 'line-mid'
|
type: 'default' | 'line-end' | 'line-mid'
|
||||||
@ -156,14 +157,12 @@ export interface StoreState {
|
|||||||
code: string
|
code: string
|
||||||
setCode: (code: string) => void
|
setCode: (code: string) => void
|
||||||
deferredSetCode: (code: string) => void
|
deferredSetCode: (code: string) => void
|
||||||
executeCode: (code?: string) => void
|
executeCode: (code?: string, force?: boolean) => void
|
||||||
formatCode: () => void
|
formatCode: () => void
|
||||||
programMemory: ProgramMemory
|
programMemory: ProgramMemory
|
||||||
setProgramMemory: (programMemory: ProgramMemory) => void
|
setProgramMemory: (programMemory: ProgramMemory) => void
|
||||||
isShiftDown: boolean
|
isShiftDown: boolean
|
||||||
setIsShiftDown: (isShiftDown: boolean) => void
|
setIsShiftDown: (isShiftDown: boolean) => void
|
||||||
engineCommandManager?: EngineCommandManager
|
|
||||||
setEngineCommandManager: (engineCommandManager: EngineCommandManager) => void
|
|
||||||
mediaStream?: MediaStream
|
mediaStream?: MediaStream
|
||||||
setMediaStream: (mediaStream: MediaStream) => void
|
setMediaStream: (mediaStream: MediaStream) => void
|
||||||
isStreamReady: boolean
|
isStreamReady: boolean
|
||||||
@ -222,11 +221,12 @@ export const useStore = create<StoreState>()(
|
|||||||
editorView.dispatch({ effects: addLineHighlight.of(selection) })
|
editorView.dispatch({ effects: addLineHighlight.of(selection) })
|
||||||
}
|
}
|
||||||
},
|
},
|
||||||
executeCode: async (code) => {
|
executeCode: async (code, force) => {
|
||||||
const result = await executeCode({
|
const result = await executeCode({
|
||||||
code: code || get().code,
|
code: code || get().code,
|
||||||
lastAst: get().ast,
|
lastAst: get().ast,
|
||||||
engineCommandManager: get().engineCommandManager,
|
engineCommandManager: engineCommandManager,
|
||||||
|
force,
|
||||||
})
|
})
|
||||||
if (!result.isChange) {
|
if (!result.isChange) {
|
||||||
return
|
return
|
||||||
@ -332,8 +332,6 @@ export const useStore = create<StoreState>()(
|
|||||||
executeAst: async (ast) => {
|
executeAst: async (ast) => {
|
||||||
const _ast = ast || get().ast
|
const _ast = ast || get().ast
|
||||||
if (!get().isStreamReady) return
|
if (!get().isStreamReady) return
|
||||||
const engineCommandManager = get().engineCommandManager!
|
|
||||||
if (!engineCommandManager) return
|
|
||||||
|
|
||||||
set({ isExecuting: true })
|
set({ isExecuting: true })
|
||||||
const { logs, errors, programMemory } = await executeAst({
|
const { logs, errors, programMemory } = await executeAst({
|
||||||
@ -350,8 +348,6 @@ export const useStore = create<StoreState>()(
|
|||||||
executeAstMock: async (ast) => {
|
executeAstMock: async (ast) => {
|
||||||
const _ast = ast || get().ast
|
const _ast = ast || get().ast
|
||||||
if (!get().isStreamReady) return
|
if (!get().isStreamReady) return
|
||||||
const engineCommandManager = get().engineCommandManager!
|
|
||||||
if (!engineCommandManager) return
|
|
||||||
|
|
||||||
const { logs, errors, programMemory } = await executeAst({
|
const { logs, errors, programMemory } = await executeAst({
|
||||||
ast: _ast,
|
ast: _ast,
|
||||||
@ -371,7 +367,7 @@ export const useStore = create<StoreState>()(
|
|||||||
{ focusPath, callBack = () => {} } = {}
|
{ focusPath, callBack = () => {} } = {}
|
||||||
) => {
|
) => {
|
||||||
const newCode = recast(ast)
|
const newCode = recast(ast)
|
||||||
const astWithUpdatedSource = parser_wasm(newCode)
|
const astWithUpdatedSource = parse(newCode)
|
||||||
callBack(astWithUpdatedSource)
|
callBack(astWithUpdatedSource)
|
||||||
|
|
||||||
set({
|
set({
|
||||||
@ -427,7 +423,7 @@ export const useStore = create<StoreState>()(
|
|||||||
},
|
},
|
||||||
formatCode: async () => {
|
formatCode: async () => {
|
||||||
const code = get().code
|
const code = get().code
|
||||||
const ast = parser_wasm(code)
|
const ast = parse(code)
|
||||||
const newCode = recast(ast)
|
const newCode = recast(ast)
|
||||||
set({ code: newCode, ast })
|
set({ code: newCode, ast })
|
||||||
},
|
},
|
||||||
@ -435,8 +431,6 @@ export const useStore = create<StoreState>()(
|
|||||||
setProgramMemory: (programMemory) => set({ programMemory }),
|
setProgramMemory: (programMemory) => set({ programMemory }),
|
||||||
isShiftDown: false,
|
isShiftDown: false,
|
||||||
setIsShiftDown: (isShiftDown) => set({ isShiftDown }),
|
setIsShiftDown: (isShiftDown) => set({ isShiftDown }),
|
||||||
setEngineCommandManager: (engineCommandManager) =>
|
|
||||||
set({ engineCommandManager }),
|
|
||||||
setMediaStream: (mediaStream) => set({ mediaStream }),
|
setMediaStream: (mediaStream) => set({ mediaStream }),
|
||||||
isStreamReady: false,
|
isStreamReady: false,
|
||||||
setIsStreamReady: (isStreamReady) => set({ isStreamReady }),
|
setIsStreamReady: (isStreamReady) => set({ isStreamReady }),
|
||||||
@ -454,7 +448,9 @@ export const useStore = create<StoreState>()(
|
|||||||
fileId: '',
|
fileId: '',
|
||||||
setFileId: (fileId) => set({ fileId }),
|
setFileId: (fileId) => set({ fileId }),
|
||||||
streamDimensions: { streamWidth: 1280, streamHeight: 720 },
|
streamDimensions: { streamWidth: 1280, streamHeight: 720 },
|
||||||
setStreamDimensions: (streamDimensions) => set({ streamDimensions }),
|
setStreamDimensions: (streamDimensions) => {
|
||||||
|
set({ streamDimensions })
|
||||||
|
},
|
||||||
isExecuting: false,
|
isExecuting: false,
|
||||||
setIsExecuting: (isExecuting) => set({ isExecuting }),
|
setIsExecuting: (isExecuting) => set({ isExecuting }),
|
||||||
|
|
||||||
@ -516,10 +512,12 @@ async function executeCode({
|
|||||||
engineCommandManager,
|
engineCommandManager,
|
||||||
code,
|
code,
|
||||||
lastAst,
|
lastAst,
|
||||||
|
force,
|
||||||
}: {
|
}: {
|
||||||
code: string
|
code: string
|
||||||
lastAst: Program
|
lastAst: Program
|
||||||
engineCommandManager?: EngineCommandManager
|
engineCommandManager: EngineCommandManager
|
||||||
|
force?: boolean
|
||||||
}): Promise<
|
}): Promise<
|
||||||
| {
|
| {
|
||||||
logs: string[]
|
logs: string[]
|
||||||
@ -532,14 +530,14 @@ async function executeCode({
|
|||||||
> {
|
> {
|
||||||
let ast: Program
|
let ast: Program
|
||||||
try {
|
try {
|
||||||
ast = parser_wasm(code)
|
ast = parse(code)
|
||||||
} catch (e) {
|
} catch (e) {
|
||||||
let errors: KCLError[] = []
|
let errors: KCLError[] = []
|
||||||
let logs: string[] = [JSON.stringify(e)]
|
let logs: string[] = [JSON.stringify(e)]
|
||||||
if (e instanceof KCLError) {
|
if (e instanceof KCLError) {
|
||||||
errors = [e]
|
errors = [e]
|
||||||
logs = []
|
logs = []
|
||||||
if (e.msg === 'file is empty') engineCommandManager?.endSession()
|
if (e.msg === 'file is empty') engineCommandManager.endSession()
|
||||||
}
|
}
|
||||||
return {
|
return {
|
||||||
isChange: true,
|
isChange: true,
|
||||||
@ -562,7 +560,7 @@ async function executeCode({
|
|||||||
}
|
}
|
||||||
// Check if the ast we have is equal to the ast in the storage.
|
// Check if the ast we have is equal to the ast in the storage.
|
||||||
// If it is, we don't need to update the ast.
|
// If it is, we don't need to update the ast.
|
||||||
if (!engineCommandManager || JSON.stringify(ast) === JSON.stringify(lastAst))
|
if (JSON.stringify(ast) === JSON.stringify(lastAst) && !force)
|
||||||
return { isChange: false }
|
return { isChange: false }
|
||||||
|
|
||||||
const { logs, errors, programMemory } = await executeAst({
|
const { logs, errors, programMemory } = await executeAst({
|
||||||
|
@ -6,10 +6,10 @@
|
|||||||
serial-integration = { max-threads = 4 }
|
serial-integration = { max-threads = 4 }
|
||||||
|
|
||||||
[profile.default]
|
[profile.default]
|
||||||
slow-timeout = { period = "10s", terminate-after = 1 }
|
slow-timeout = { period = "60s", terminate-after = 1 }
|
||||||
|
|
||||||
[profile.ci]
|
[profile.ci]
|
||||||
slow-timeout = { period = "60s", terminate-after = 10 }
|
slow-timeout = { period = "120s", terminate-after = 10 }
|
||||||
|
|
||||||
[[profile.default.overrides]]
|
[[profile.default.overrides]]
|
||||||
filter = "test(serial_test_)"
|
filter = "test(serial_test_)"
|
||||||
|
97
src/wasm-lib/Cargo.lock
generated
97
src/wasm-lib/Cargo.lock
generated
@ -71,9 +71,9 @@ checksum = "4b46cbb362ab8752921c97e041f5e366ee6297bd428a31275b9fcf1e380f7299"
|
|||||||
|
|
||||||
[[package]]
|
[[package]]
|
||||||
name = "anstream"
|
name = "anstream"
|
||||||
version = "0.5.0"
|
version = "0.6.1"
|
||||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||||
checksum = "b1f58811cfac344940f1a400b6e6231ce35171f614f26439e80f8c1465c5cc0c"
|
checksum = "f6cd65a4b849ace0b7f6daeebcc1a1d111282227ca745458c61dbf670e52a597"
|
||||||
dependencies = [
|
dependencies = [
|
||||||
"anstyle",
|
"anstyle",
|
||||||
"anstyle-parse",
|
"anstyle-parse",
|
||||||
@ -109,9 +109,9 @@ dependencies = [
|
|||||||
|
|
||||||
[[package]]
|
[[package]]
|
||||||
name = "anstyle-wincon"
|
name = "anstyle-wincon"
|
||||||
version = "2.1.0"
|
version = "3.0.0"
|
||||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||||
checksum = "58f54d10c6dfa51283a066ceab3ec1ab78d13fae00aa49243a45e4571fb79dfd"
|
checksum = "0238ca56c96dfa37bdf7c373c8886dd591322500aceeeccdb2216fe06dc2f796"
|
||||||
dependencies = [
|
dependencies = [
|
||||||
"anstyle",
|
"anstyle",
|
||||||
"windows-sys 0.48.0",
|
"windows-sys 0.48.0",
|
||||||
@ -440,9 +440,9 @@ dependencies = [
|
|||||||
|
|
||||||
[[package]]
|
[[package]]
|
||||||
name = "clap"
|
name = "clap"
|
||||||
version = "4.4.4"
|
version = "4.4.6"
|
||||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||||
checksum = "b1d7b8d5ec32af0fadc644bf1fd509a688c2103b185644bb1e29d164e0703136"
|
checksum = "d04704f56c2cde07f43e8e2c154b43f216dc5c92fc98ada720177362f953b956"
|
||||||
dependencies = [
|
dependencies = [
|
||||||
"clap_builder",
|
"clap_builder",
|
||||||
"clap_derive",
|
"clap_derive",
|
||||||
@ -450,9 +450,9 @@ dependencies = [
|
|||||||
|
|
||||||
[[package]]
|
[[package]]
|
||||||
name = "clap_builder"
|
name = "clap_builder"
|
||||||
version = "4.4.4"
|
version = "4.4.6"
|
||||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||||
checksum = "5179bb514e4d7c2051749d8fcefa2ed6d06a9f4e6d69faf3805f5d80b8cf8d56"
|
checksum = "0e231faeaca65ebd1ea3c737966bf858971cd38c3849107aa3ea7de90a804e45"
|
||||||
dependencies = [
|
dependencies = [
|
||||||
"anstream",
|
"anstream",
|
||||||
"anstyle",
|
"anstyle",
|
||||||
@ -695,6 +695,20 @@ dependencies = [
|
|||||||
"syn 2.0.37",
|
"syn 2.0.37",
|
||||||
]
|
]
|
||||||
|
|
||||||
|
[[package]]
|
||||||
|
name = "derive-docs"
|
||||||
|
version = "0.1.4"
|
||||||
|
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||||
|
checksum = "c357dec14992ba88803535217ed83d6f6cd80efcb8fa8e3f8a30a9b84fadc1c7"
|
||||||
|
dependencies = [
|
||||||
|
"convert_case",
|
||||||
|
"proc-macro2",
|
||||||
|
"quote",
|
||||||
|
"serde",
|
||||||
|
"serde_tokenstream",
|
||||||
|
"syn 2.0.37",
|
||||||
|
]
|
||||||
|
|
||||||
[[package]]
|
[[package]]
|
||||||
name = "diff"
|
name = "diff"
|
||||||
version = "0.1.13"
|
version = "0.1.13"
|
||||||
@ -788,9 +802,9 @@ dependencies = [
|
|||||||
|
|
||||||
[[package]]
|
[[package]]
|
||||||
name = "expectorate"
|
name = "expectorate"
|
||||||
version = "1.0.7"
|
version = "1.1.0"
|
||||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||||
checksum = "710ab6a2d57038a835d66f78d5af3fa5d27c1ec4682f823b9203c48826cb0591"
|
checksum = "de6f19b25bdfa2747ae775f37cd109c31f1272d4e4c83095be0727840aa1d75f"
|
||||||
dependencies = [
|
dependencies = [
|
||||||
"console",
|
"console",
|
||||||
"newline-converter",
|
"newline-converter",
|
||||||
@ -1376,7 +1390,7 @@ dependencies = [
|
|||||||
|
|
||||||
[[package]]
|
[[package]]
|
||||||
name = "kcl-lib"
|
name = "kcl-lib"
|
||||||
version = "0.1.31"
|
version = "0.1.32"
|
||||||
dependencies = [
|
dependencies = [
|
||||||
"anyhow",
|
"anyhow",
|
||||||
"async-recursion",
|
"async-recursion",
|
||||||
@ -1385,7 +1399,7 @@ dependencies = [
|
|||||||
"clap",
|
"clap",
|
||||||
"criterion",
|
"criterion",
|
||||||
"dashmap",
|
"dashmap",
|
||||||
"derive-docs",
|
"derive-docs 0.1.4 (registry+https://github.com/rust-lang/crates.io-index)",
|
||||||
"expectorate",
|
"expectorate",
|
||||||
"futures",
|
"futures",
|
||||||
"itertools 0.11.0",
|
"itertools 0.11.0",
|
||||||
@ -1394,7 +1408,6 @@ dependencies = [
|
|||||||
"lazy_static",
|
"lazy_static",
|
||||||
"parse-display",
|
"parse-display",
|
||||||
"pretty_assertions",
|
"pretty_assertions",
|
||||||
"regex",
|
|
||||||
"reqwest",
|
"reqwest",
|
||||||
"schemars",
|
"schemars",
|
||||||
"serde",
|
"serde",
|
||||||
@ -1408,13 +1421,14 @@ dependencies = [
|
|||||||
"wasm-bindgen",
|
"wasm-bindgen",
|
||||||
"wasm-bindgen-futures",
|
"wasm-bindgen-futures",
|
||||||
"web-sys",
|
"web-sys",
|
||||||
|
"winnow",
|
||||||
]
|
]
|
||||||
|
|
||||||
[[package]]
|
[[package]]
|
||||||
name = "kittycad"
|
name = "kittycad"
|
||||||
version = "0.2.26"
|
version = "0.2.28"
|
||||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||||
checksum = "e2623ee601ce203476229df3f9d3a14664cb43e3f7455e9ac8ed91aacaa6163d"
|
checksum = "35b2f9302648dbb06fd7121687f9505fc3179eba84111a06d76b246e3158f5dc"
|
||||||
dependencies = [
|
dependencies = [
|
||||||
"anyhow",
|
"anyhow",
|
||||||
"async-trait",
|
"async-trait",
|
||||||
@ -1719,7 +1733,7 @@ checksum = "0ab1bc2a289d34bd04a330323ac98a1b4bc82c9d9fcb1e66b63caa84da26b575"
|
|||||||
[[package]]
|
[[package]]
|
||||||
name = "openapitor"
|
name = "openapitor"
|
||||||
version = "0.0.9"
|
version = "0.0.9"
|
||||||
source = "git+https://github.com/KittyCAD/kittycad.rs?branch=main#61a16059b3eaf8793a2a2e1edbc0d770f284fea3"
|
source = "git+https://github.com/KittyCAD/kittycad.rs?branch=main#fa0345c514fcc9ae6cd74ae35c8e5c2800fec34f"
|
||||||
dependencies = [
|
dependencies = [
|
||||||
"Inflector",
|
"Inflector",
|
||||||
"anyhow",
|
"anyhow",
|
||||||
@ -2203,9 +2217,9 @@ checksum = "dbb5fb1acd8a1a18b3dd5be62d25485eb770e05afb408a9627d14d451bae12da"
|
|||||||
|
|
||||||
[[package]]
|
[[package]]
|
||||||
name = "reqwest"
|
name = "reqwest"
|
||||||
version = "0.11.20"
|
version = "0.11.21"
|
||||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||||
checksum = "3e9ad3fe7488d7e34558a2033d45a0c90b72d97b4f80705666fea71472e2e6a1"
|
checksum = "78fdbab6a7e1d7b13cc8ff10197f47986b41c639300cc3c8158cac7847c9bbef"
|
||||||
dependencies = [
|
dependencies = [
|
||||||
"base64 0.21.4",
|
"base64 0.21.4",
|
||||||
"bytes",
|
"bytes",
|
||||||
@ -2230,6 +2244,7 @@ dependencies = [
|
|||||||
"serde",
|
"serde",
|
||||||
"serde_json",
|
"serde_json",
|
||||||
"serde_urlencoded",
|
"serde_urlencoded",
|
||||||
|
"system-configuration",
|
||||||
"tokio",
|
"tokio",
|
||||||
"tokio-rustls",
|
"tokio-rustls",
|
||||||
"tower-service",
|
"tower-service",
|
||||||
@ -2879,6 +2894,27 @@ dependencies = [
|
|||||||
"unicode-ident",
|
"unicode-ident",
|
||||||
]
|
]
|
||||||
|
|
||||||
|
[[package]]
|
||||||
|
name = "system-configuration"
|
||||||
|
version = "0.5.1"
|
||||||
|
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||||
|
checksum = "ba3a3adc5c275d719af8cb4272ea1c4a6d668a777f37e115f6d11ddbc1c8e0e7"
|
||||||
|
dependencies = [
|
||||||
|
"bitflags 1.3.2",
|
||||||
|
"core-foundation",
|
||||||
|
"system-configuration-sys",
|
||||||
|
]
|
||||||
|
|
||||||
|
[[package]]
|
||||||
|
name = "system-configuration-sys"
|
||||||
|
version = "0.5.0"
|
||||||
|
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||||
|
checksum = "a75fb188eb626b924683e3b95e3a48e63551fcfb51949de2f06a9d91dbee93c9"
|
||||||
|
dependencies = [
|
||||||
|
"core-foundation-sys",
|
||||||
|
"libc",
|
||||||
|
]
|
||||||
|
|
||||||
[[package]]
|
[[package]]
|
||||||
name = "take_mut"
|
name = "take_mut"
|
||||||
version = "0.2.2"
|
version = "0.2.2"
|
||||||
@ -2945,18 +2981,18 @@ dependencies = [
|
|||||||
|
|
||||||
[[package]]
|
[[package]]
|
||||||
name = "thiserror"
|
name = "thiserror"
|
||||||
version = "1.0.48"
|
version = "1.0.49"
|
||||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||||
checksum = "9d6d7a740b8a666a7e828dd00da9c0dc290dff53154ea77ac109281de90589b7"
|
checksum = "1177e8c6d7ede7afde3585fd2513e611227efd6481bd78d2e82ba1ce16557ed4"
|
||||||
dependencies = [
|
dependencies = [
|
||||||
"thiserror-impl",
|
"thiserror-impl",
|
||||||
]
|
]
|
||||||
|
|
||||||
[[package]]
|
[[package]]
|
||||||
name = "thiserror-impl"
|
name = "thiserror-impl"
|
||||||
version = "1.0.48"
|
version = "1.0.49"
|
||||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||||
checksum = "49922ecae66cc8a249b77e68d1d0623c1b2c514f0060c27cdc68bd62a1219d35"
|
checksum = "10712f02019e9288794769fba95cd6847df9874d49d871d062172f9dd41bc4cc"
|
||||||
dependencies = [
|
dependencies = [
|
||||||
"proc-macro2",
|
"proc-macro2",
|
||||||
"quote",
|
"quote",
|
||||||
@ -3081,9 +3117,9 @@ dependencies = [
|
|||||||
|
|
||||||
[[package]]
|
[[package]]
|
||||||
name = "tokio-tungstenite"
|
name = "tokio-tungstenite"
|
||||||
version = "0.20.0"
|
version = "0.20.1"
|
||||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||||
checksum = "2b2dbec703c26b00d74844519606ef15d09a7d6857860f84ad223dec002ddea2"
|
checksum = "212d5dcb2a1ce06d81107c3d0ffa3121fe974b73f068c8282cb1c32328113b6c"
|
||||||
dependencies = [
|
dependencies = [
|
||||||
"futures-util",
|
"futures-util",
|
||||||
"log",
|
"log",
|
||||||
@ -3303,9 +3339,9 @@ dependencies = [
|
|||||||
|
|
||||||
[[package]]
|
[[package]]
|
||||||
name = "tungstenite"
|
name = "tungstenite"
|
||||||
version = "0.20.0"
|
version = "0.20.1"
|
||||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||||
checksum = "e862a1c4128df0112ab625f55cd5c934bcb4312ba80b39ae4b4835a3fd58e649"
|
checksum = "9e3dac10fd62eaf6617d3a904ae222845979aec67c615d1c842b4002c7666fb9"
|
||||||
dependencies = [
|
dependencies = [
|
||||||
"byteorder",
|
"byteorder",
|
||||||
"bytes",
|
"bytes",
|
||||||
@ -3792,6 +3828,15 @@ version = "0.48.5"
|
|||||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||||
checksum = "ed94fce61571a4006852b7389a063ab983c02eb1bb37b47f8272ce92d06d9538"
|
checksum = "ed94fce61571a4006852b7389a063ab983c02eb1bb37b47f8272ce92d06d9538"
|
||||||
|
|
||||||
|
[[package]]
|
||||||
|
name = "winnow"
|
||||||
|
version = "0.5.15"
|
||||||
|
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||||
|
checksum = "7c2e3184b9c4e92ad5167ca73039d0c42476302ab603e2fec4487511f38ccefc"
|
||||||
|
dependencies = [
|
||||||
|
"memchr",
|
||||||
|
]
|
||||||
|
|
||||||
[[package]]
|
[[package]]
|
||||||
name = "winreg"
|
name = "winreg"
|
||||||
version = "0.50.0"
|
version = "0.50.0"
|
||||||
|
@ -11,7 +11,7 @@ crate-type = ["cdylib"]
|
|||||||
bson = { version = "2.7.0", features = ["uuid-1", "chrono"] }
|
bson = { version = "2.7.0", features = ["uuid-1", "chrono"] }
|
||||||
gloo-utils = "0.2.0"
|
gloo-utils = "0.2.0"
|
||||||
kcl-lib = { path = "kcl" }
|
kcl-lib = { path = "kcl" }
|
||||||
kittycad = { version = "0.2.25", default-features = false, features = ["js"] }
|
kittycad = { version = "0.2.27", default-features = false, features = ["js"] }
|
||||||
serde_json = "1.0.107"
|
serde_json = "1.0.107"
|
||||||
uuid = { version = "1.4.1", features = ["v4", "js", "serde"] }
|
uuid = { version = "1.4.1", features = ["v4", "js", "serde"] }
|
||||||
wasm-bindgen = "0.2.87"
|
wasm-bindgen = "0.2.87"
|
||||||
@ -20,9 +20,9 @@ wasm-bindgen-futures = "0.4.37"
|
|||||||
[dev-dependencies]
|
[dev-dependencies]
|
||||||
anyhow = "1"
|
anyhow = "1"
|
||||||
image = "0.24.7"
|
image = "0.24.7"
|
||||||
kittycad = "0.2.25"
|
kittycad = "0.2.27"
|
||||||
pretty_assertions = "1.4.0"
|
pretty_assertions = "1.4.0"
|
||||||
reqwest = { version = "0.11.20", default-features = false }
|
reqwest = { version = "0.11.21", default-features = false }
|
||||||
tokio = { version = "1.32.0", features = ["rt-multi-thread", "macros", "time"] }
|
tokio = { version = "1.32.0", features = ["rt-multi-thread", "macros", "time"] }
|
||||||
twenty-twenty = "0.6.1"
|
twenty-twenty = "0.6.1"
|
||||||
uuid = { version = "1.4.1", features = ["v4", "js", "serde"] }
|
uuid = { version = "1.4.1", features = ["v4", "js", "serde"] }
|
||||||
|
@ -19,6 +19,6 @@ serde_tokenstream = "0.2"
|
|||||||
syn = { version = "2.0.37", features = ["full"] }
|
syn = { version = "2.0.37", features = ["full"] }
|
||||||
|
|
||||||
[dev-dependencies]
|
[dev-dependencies]
|
||||||
expectorate = "1.0.7"
|
expectorate = "1.1.0"
|
||||||
openapitor = { git = "https://github.com/KittyCAD/kittycad.rs", branch = "main" }
|
openapitor = { git = "https://github.com/KittyCAD/kittycad.rs", branch = "main" }
|
||||||
pretty_assertions = "1.4.0"
|
pretty_assertions = "1.4.0"
|
||||||
|
@ -1,7 +1,7 @@
|
|||||||
[package]
|
[package]
|
||||||
name = "kcl-lib"
|
name = "kcl-lib"
|
||||||
description = "KittyCAD Language"
|
description = "KittyCAD Language"
|
||||||
version = "0.1.31"
|
version = "0.1.32"
|
||||||
edition = "2021"
|
edition = "2021"
|
||||||
license = "MIT"
|
license = "MIT"
|
||||||
|
|
||||||
@ -11,20 +11,20 @@ license = "MIT"
|
|||||||
anyhow = { version = "1.0.75", features = ["backtrace"] }
|
anyhow = { version = "1.0.75", features = ["backtrace"] }
|
||||||
async-recursion = "1.0.5"
|
async-recursion = "1.0.5"
|
||||||
async-trait = "0.1.73"
|
async-trait = "0.1.73"
|
||||||
clap = { version = "4.4.3", features = ["cargo", "derive", "env", "unicode"], optional = true }
|
clap = { version = "4.4.6", features = ["cargo", "derive", "env", "unicode"], optional = true }
|
||||||
dashmap = "5.5.3"
|
dashmap = "5.5.3"
|
||||||
#derive-docs = { version = "0.1.4" }
|
derive-docs = { version = "0.1.4" }
|
||||||
derive-docs = { path = "../derive-docs" }
|
#derive-docs = { path = "../derive-docs" }
|
||||||
kittycad = { version = "0.2.25", default-features = false, features = ["js"] }
|
kittycad = { version = "0.2.27", default-features = false, features = ["js"] }
|
||||||
lazy_static = "1.4.0"
|
lazy_static = "1.4.0"
|
||||||
parse-display = "0.8.2"
|
parse-display = "0.8.2"
|
||||||
regex = "1.7.1"
|
|
||||||
schemars = { version = "0.8", features = ["impl_json_schema", "url", "uuid1"] }
|
schemars = { version = "0.8", features = ["impl_json_schema", "url", "uuid1"] }
|
||||||
serde = { version = "1.0.188", features = ["derive"] }
|
serde = { version = "1.0.188", features = ["derive"] }
|
||||||
serde_json = "1.0.107"
|
serde_json = "1.0.107"
|
||||||
thiserror = "1.0.48"
|
thiserror = "1.0.49"
|
||||||
ts-rs = { version = "7", package = "ts-rs-json-value", features = ["serde-json-impl", "schemars-impl", "uuid-impl"] }
|
ts-rs = { version = "7", package = "ts-rs-json-value", features = ["serde-json-impl", "schemars-impl", "uuid-impl"] }
|
||||||
uuid = { version = "1.4.1", features = ["v4", "js", "serde"] }
|
uuid = { version = "1.4.1", features = ["v4", "js", "serde"] }
|
||||||
|
winnow = "0.5.15"
|
||||||
|
|
||||||
[target.'cfg(target_arch = "wasm32")'.dependencies]
|
[target.'cfg(target_arch = "wasm32")'.dependencies]
|
||||||
js-sys = { version = "0.3.64" }
|
js-sys = { version = "0.3.64" }
|
||||||
@ -36,7 +36,7 @@ web-sys = { version = "0.3.64", features = ["console"] }
|
|||||||
[target.'cfg(not(target_arch = "wasm32"))'.dependencies]
|
[target.'cfg(not(target_arch = "wasm32"))'.dependencies]
|
||||||
bson = { version = "2.7.0", features = ["uuid-1", "chrono"] }
|
bson = { version = "2.7.0", features = ["uuid-1", "chrono"] }
|
||||||
futures = { version = "0.3.28" }
|
futures = { version = "0.3.28" }
|
||||||
reqwest = { version = "0.11.20", default-features = false }
|
reqwest = { version = "0.11.21", default-features = false }
|
||||||
tokio = { version = "1.32.0", features = ["full"] }
|
tokio = { version = "1.32.0", features = ["full"] }
|
||||||
tokio-tungstenite = { version = "0.20.0", features = ["rustls-tls-native-roots"] }
|
tokio-tungstenite = { version = "0.20.0", features = ["rustls-tls-native-roots"] }
|
||||||
tower-lsp = { version = "0.20.0", features = ["proposed"] }
|
tower-lsp = { version = "0.20.0", features = ["proposed"] }
|
||||||
@ -52,7 +52,7 @@ debug = true
|
|||||||
|
|
||||||
[dev-dependencies]
|
[dev-dependencies]
|
||||||
criterion = "0.5.1"
|
criterion = "0.5.1"
|
||||||
expectorate = "1.0.7"
|
expectorate = "1.1.0"
|
||||||
itertools = "0.11.0"
|
itertools = "0.11.0"
|
||||||
pretty_assertions = "1.4.0"
|
pretty_assertions = "1.4.0"
|
||||||
tokio = { version = "1.32.0", features = ["rt-multi-thread", "macros", "time"] }
|
tokio = { version = "1.32.0", features = ["rt-multi-thread", "macros", "time"] }
|
||||||
|
@ -1,24 +1,32 @@
|
|||||||
use criterion::{criterion_group, criterion_main, Criterion};
|
use criterion::{black_box, criterion_group, criterion_main, Criterion};
|
||||||
|
|
||||||
pub fn criterion_benchmark(c: &mut Criterion) {
|
pub fn bench_lex(c: &mut Criterion) {
|
||||||
c.bench_function("parse + lex cube", |b| b.iter(|| lex_and_parse(CUBE_PROGRAM)));
|
c.bench_function("lex_cube", |b| b.iter(|| lex(CUBE_PROGRAM)));
|
||||||
c.bench_function("parse + lex big kitt", |b| {
|
c.bench_function("lex_big_kitt", |b| b.iter(|| lex(KITT_PROGRAM)));
|
||||||
b.iter(|| lex_and_parse(include_str!("../../tests/executor/inputs/kittycad_svg.kcl")))
|
c.bench_function("lex_pipes_on_pipes", |b| b.iter(|| lex(PIPES_PROGRAM)));
|
||||||
});
|
}
|
||||||
c.bench_function("parse + lex pipes_on_pipes", |b| {
|
|
||||||
b.iter(|| lex_and_parse(include_str!("../../tests/executor/inputs/pipes_on_pipes.kcl")))
|
pub fn bench_lex_parse(c: &mut Criterion) {
|
||||||
});
|
c.bench_function("parse_lex_cube", |b| b.iter(|| lex_and_parse(CUBE_PROGRAM)));
|
||||||
|
c.bench_function("parse_lex_big_kitt", |b| b.iter(|| lex_and_parse(KITT_PROGRAM)));
|
||||||
|
c.bench_function("parse_lex_pipes_on_pipes", |b| b.iter(|| lex_and_parse(PIPES_PROGRAM)));
|
||||||
|
}
|
||||||
|
|
||||||
|
fn lex(program: &str) {
|
||||||
|
black_box(kcl_lib::token::lexer(program));
|
||||||
}
|
}
|
||||||
|
|
||||||
fn lex_and_parse(program: &str) {
|
fn lex_and_parse(program: &str) {
|
||||||
let tokens = kcl_lib::tokeniser::lexer(program);
|
let tokens = kcl_lib::token::lexer(program);
|
||||||
let parser = kcl_lib::parser::Parser::new(tokens);
|
let parser = kcl_lib::parser::Parser::new(tokens);
|
||||||
parser.ast().unwrap();
|
black_box(parser.ast().unwrap());
|
||||||
}
|
}
|
||||||
|
|
||||||
criterion_group!(benches, criterion_benchmark);
|
criterion_group!(benches, bench_lex, bench_lex_parse);
|
||||||
criterion_main!(benches);
|
criterion_main!(benches);
|
||||||
|
|
||||||
|
const KITT_PROGRAM: &str = include_str!("../../tests/executor/inputs/kittycad_svg.kcl");
|
||||||
|
const PIPES_PROGRAM: &str = include_str!("../../tests/executor/inputs/pipes_on_pipes.kcl");
|
||||||
const CUBE_PROGRAM: &str = r#"fn cube = (pos, scale) => {
|
const CUBE_PROGRAM: &str = r#"fn cube = (pos, scale) => {
|
||||||
const sg = startSketchAt(pos)
|
const sg = startSketchAt(pos)
|
||||||
|> line([0, scale], %)
|
|> line([0, scale], %)
|
||||||
|
15
src/wasm-lib/kcl/fuzz/Cargo.lock
generated
15
src/wasm-lib/kcl/fuzz/Cargo.lock
generated
@ -709,7 +709,6 @@ dependencies = [
|
|||||||
"kittycad",
|
"kittycad",
|
||||||
"lazy_static",
|
"lazy_static",
|
||||||
"parse-display",
|
"parse-display",
|
||||||
"regex",
|
|
||||||
"reqwest",
|
"reqwest",
|
||||||
"schemars",
|
"schemars",
|
||||||
"serde",
|
"serde",
|
||||||
@ -723,6 +722,7 @@ dependencies = [
|
|||||||
"wasm-bindgen",
|
"wasm-bindgen",
|
||||||
"wasm-bindgen-futures",
|
"wasm-bindgen-futures",
|
||||||
"web-sys",
|
"web-sys",
|
||||||
|
"winnow",
|
||||||
]
|
]
|
||||||
|
|
||||||
[[package]]
|
[[package]]
|
||||||
@ -1878,9 +1878,9 @@ dependencies = [
|
|||||||
|
|
||||||
[[package]]
|
[[package]]
|
||||||
name = "tungstenite"
|
name = "tungstenite"
|
||||||
version = "0.20.0"
|
version = "0.20.1"
|
||||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||||
checksum = "e862a1c4128df0112ab625f55cd5c934bcb4312ba80b39ae4b4835a3fd58e649"
|
checksum = "9e3dac10fd62eaf6617d3a904ae222845979aec67c615d1c842b4002c7666fb9"
|
||||||
dependencies = [
|
dependencies = [
|
||||||
"byteorder",
|
"byteorder",
|
||||||
"bytes",
|
"bytes",
|
||||||
@ -2158,6 +2158,15 @@ version = "0.48.5"
|
|||||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||||
checksum = "ed94fce61571a4006852b7389a063ab983c02eb1bb37b47f8272ce92d06d9538"
|
checksum = "ed94fce61571a4006852b7389a063ab983c02eb1bb37b47f8272ce92d06d9538"
|
||||||
|
|
||||||
|
[[package]]
|
||||||
|
name = "winnow"
|
||||||
|
version = "0.5.15"
|
||||||
|
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||||
|
checksum = "7c2e3184b9c4e92ad5167ca73039d0c42476302ab603e2fec4487511f38ccefc"
|
||||||
|
dependencies = [
|
||||||
|
"memchr",
|
||||||
|
]
|
||||||
|
|
||||||
[[package]]
|
[[package]]
|
||||||
name = "winreg"
|
name = "winreg"
|
||||||
version = "0.50.0"
|
version = "0.50.0"
|
||||||
|
@ -166,7 +166,7 @@ pub async fn modify_ast_for_sketch(
|
|||||||
let recasted = program.recast(&FormatOptions::default(), 0);
|
let recasted = program.recast(&FormatOptions::default(), 0);
|
||||||
|
|
||||||
// Re-parse the ast so we get the correct source ranges.
|
// Re-parse the ast so we get the correct source ranges.
|
||||||
let tokens = crate::tokeniser::lexer(&recasted);
|
let tokens = crate::token::lexer(&recasted);
|
||||||
let parser = crate::parser::Parser::new(tokens);
|
let parser = crate::parser::Parser::new(tokens);
|
||||||
*program = parser.ast()?;
|
*program = parser.ast()?;
|
||||||
|
|
||||||
|
@ -258,6 +258,23 @@ impl Program {
|
|||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
/// Replace a value with the new value, use the source range for matching the exact value.
|
||||||
|
pub fn replace_value(&mut self, source_range: SourceRange, new_value: Value) {
|
||||||
|
for item in &mut self.body {
|
||||||
|
match item {
|
||||||
|
BodyItem::ExpressionStatement(ref mut expression_statement) => expression_statement
|
||||||
|
.expression
|
||||||
|
.replace_value(source_range, new_value.clone()),
|
||||||
|
BodyItem::VariableDeclaration(ref mut variable_declaration) => {
|
||||||
|
variable_declaration.replace_value(source_range, new_value.clone())
|
||||||
|
}
|
||||||
|
BodyItem::ReturnStatement(ref mut return_statement) => {
|
||||||
|
return_statement.argument.replace_value(source_range, new_value.clone())
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
/// Get the variable declaration with the given name.
|
/// Get the variable declaration with the given name.
|
||||||
pub fn get_variable(&self, name: &str) -> Option<&VariableDeclarator> {
|
pub fn get_variable(&self, name: &str) -> Option<&VariableDeclarator> {
|
||||||
for item in &self.body {
|
for item in &self.body {
|
||||||
@ -393,6 +410,27 @@ impl Value {
|
|||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
pub fn replace_value(&mut self, source_range: SourceRange, new_value: Value) {
|
||||||
|
if source_range == self.clone().into() {
|
||||||
|
*self = new_value;
|
||||||
|
return;
|
||||||
|
}
|
||||||
|
|
||||||
|
match self {
|
||||||
|
Value::BinaryExpression(ref mut bin_exp) => bin_exp.replace_value(source_range, new_value),
|
||||||
|
Value::ArrayExpression(ref mut array_exp) => array_exp.replace_value(source_range, new_value),
|
||||||
|
Value::ObjectExpression(ref mut obj_exp) => obj_exp.replace_value(source_range, new_value),
|
||||||
|
Value::MemberExpression(_) => {}
|
||||||
|
Value::Literal(_) => {}
|
||||||
|
Value::FunctionExpression(ref mut func_exp) => func_exp.replace_value(source_range, new_value),
|
||||||
|
Value::CallExpression(ref mut call_exp) => call_exp.replace_value(source_range, new_value),
|
||||||
|
Value::Identifier(_) => {}
|
||||||
|
Value::PipeExpression(ref mut pipe_exp) => pipe_exp.replace_value(source_range, new_value),
|
||||||
|
Value::UnaryExpression(ref mut unary_exp) => unary_exp.replace_value(source_range, new_value),
|
||||||
|
Value::PipeSubstitution(_) => {}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
pub fn start(&self) -> usize {
|
pub fn start(&self) -> usize {
|
||||||
match self {
|
match self {
|
||||||
Value::Literal(literal) => literal.start(),
|
Value::Literal(literal) => literal.start(),
|
||||||
@ -538,6 +576,23 @@ impl BinaryPart {
|
|||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
pub fn replace_value(&mut self, source_range: SourceRange, new_value: Value) {
|
||||||
|
match self {
|
||||||
|
BinaryPart::Literal(_) => {}
|
||||||
|
BinaryPart::Identifier(_) => {}
|
||||||
|
BinaryPart::BinaryExpression(ref mut binary_expression) => {
|
||||||
|
binary_expression.replace_value(source_range, new_value)
|
||||||
|
}
|
||||||
|
BinaryPart::CallExpression(ref mut call_expression) => {
|
||||||
|
call_expression.replace_value(source_range, new_value)
|
||||||
|
}
|
||||||
|
BinaryPart::UnaryExpression(ref mut unary_expression) => {
|
||||||
|
unary_expression.replace_value(source_range, new_value)
|
||||||
|
}
|
||||||
|
BinaryPart::MemberExpression(_) => {}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
fn recast(&self, options: &FormatOptions, indentation_level: usize) -> String {
|
fn recast(&self, options: &FormatOptions, indentation_level: usize) -> String {
|
||||||
match &self {
|
match &self {
|
||||||
BinaryPart::Literal(literal) => literal.recast(),
|
BinaryPart::Literal(literal) => literal.recast(),
|
||||||
@ -801,6 +856,12 @@ impl CallExpression {
|
|||||||
})
|
})
|
||||||
}
|
}
|
||||||
|
|
||||||
|
pub fn replace_value(&mut self, source_range: SourceRange, new_value: Value) {
|
||||||
|
for arg in &mut self.arguments {
|
||||||
|
arg.replace_value(source_range, new_value.clone());
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
fn recast(&self, options: &FormatOptions, indentation_level: usize, is_in_pipe: bool) -> String {
|
fn recast(&self, options: &FormatOptions, indentation_level: usize, is_in_pipe: bool) -> String {
|
||||||
format!(
|
format!(
|
||||||
"{}({})",
|
"{}({})",
|
||||||
@ -1014,6 +1075,12 @@ impl VariableDeclaration {
|
|||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
pub fn replace_value(&mut self, source_range: SourceRange, new_value: Value) {
|
||||||
|
for declaration in &mut self.declarations {
|
||||||
|
declaration.init.replace_value(source_range, new_value.clone());
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
/// Returns a value that includes the given character position.
|
/// Returns a value that includes the given character position.
|
||||||
pub fn get_value_for_position(&self, pos: usize) -> Option<&Value> {
|
pub fn get_value_for_position(&self, pos: usize) -> Option<&Value> {
|
||||||
for declaration in &self.declarations {
|
for declaration in &self.declarations {
|
||||||
@ -1367,6 +1434,12 @@ impl ArrayExpression {
|
|||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
pub fn replace_value(&mut self, source_range: SourceRange, new_value: Value) {
|
||||||
|
for element in &mut self.elements {
|
||||||
|
element.replace_value(source_range, new_value.clone());
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
pub fn get_constraint_level(&self) -> ConstraintLevel {
|
pub fn get_constraint_level(&self) -> ConstraintLevel {
|
||||||
if self.elements.is_empty() {
|
if self.elements.is_empty() {
|
||||||
return ConstraintLevel::Ignore {
|
return ConstraintLevel::Ignore {
|
||||||
@ -1517,6 +1590,12 @@ impl ObjectExpression {
|
|||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
pub fn replace_value(&mut self, source_range: SourceRange, new_value: Value) {
|
||||||
|
for property in &mut self.properties {
|
||||||
|
property.value.replace_value(source_range, new_value.clone());
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
pub fn get_constraint_level(&self) -> ConstraintLevel {
|
pub fn get_constraint_level(&self) -> ConstraintLevel {
|
||||||
if self.properties.is_empty() {
|
if self.properties.is_empty() {
|
||||||
return ConstraintLevel::Ignore {
|
return ConstraintLevel::Ignore {
|
||||||
@ -1961,6 +2040,11 @@ impl BinaryExpression {
|
|||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
pub fn replace_value(&mut self, source_range: SourceRange, new_value: Value) {
|
||||||
|
self.left.replace_value(source_range, new_value.clone());
|
||||||
|
self.right.replace_value(source_range, new_value);
|
||||||
|
}
|
||||||
|
|
||||||
pub fn get_constraint_level(&self) -> ConstraintLevel {
|
pub fn get_constraint_level(&self) -> ConstraintLevel {
|
||||||
let left_constraint_level = self.left.get_constraint_level();
|
let left_constraint_level = self.left.get_constraint_level();
|
||||||
let right_constraint_level = self.right.get_constraint_level();
|
let right_constraint_level = self.right.get_constraint_level();
|
||||||
@ -2183,6 +2267,10 @@ impl UnaryExpression {
|
|||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
pub fn replace_value(&mut self, source_range: SourceRange, new_value: Value) {
|
||||||
|
self.argument.replace_value(source_range, new_value);
|
||||||
|
}
|
||||||
|
|
||||||
pub fn get_constraint_level(&self) -> ConstraintLevel {
|
pub fn get_constraint_level(&self) -> ConstraintLevel {
|
||||||
self.argument.get_constraint_level()
|
self.argument.get_constraint_level()
|
||||||
}
|
}
|
||||||
@ -2278,6 +2366,12 @@ impl PipeExpression {
|
|||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
pub fn replace_value(&mut self, source_range: SourceRange, new_value: Value) {
|
||||||
|
for value in &mut self.body {
|
||||||
|
value.replace_value(source_range, new_value.clone());
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
pub fn get_constraint_level(&self) -> ConstraintLevel {
|
pub fn get_constraint_level(&self) -> ConstraintLevel {
|
||||||
if self.body.is_empty() {
|
if self.body.is_empty() {
|
||||||
return ConstraintLevel::Ignore {
|
return ConstraintLevel::Ignore {
|
||||||
@ -2420,6 +2514,10 @@ impl FunctionExpression {
|
|||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
pub fn replace_value(&mut self, source_range: SourceRange, new_value: Value) {
|
||||||
|
self.body.replace_value(source_range, new_value);
|
||||||
|
}
|
||||||
|
|
||||||
pub fn recast(&self, options: &FormatOptions, indentation_level: usize) -> String {
|
pub fn recast(&self, options: &FormatOptions, indentation_level: usize) -> String {
|
||||||
// We don't want to end with a new line inside nested functions.
|
// We don't want to end with a new line inside nested functions.
|
||||||
let mut new_options = options.clone();
|
let mut new_options = options.clone();
|
||||||
@ -2691,7 +2789,7 @@ fn ghi = (x) => {
|
|||||||
}
|
}
|
||||||
|
|
||||||
show(part001)"#;
|
show(part001)"#;
|
||||||
let tokens = crate::tokeniser::lexer(code);
|
let tokens = crate::token::lexer(code);
|
||||||
let parser = crate::parser::Parser::new(tokens);
|
let parser = crate::parser::Parser::new(tokens);
|
||||||
let program = parser.ast().unwrap();
|
let program = parser.ast().unwrap();
|
||||||
let symbols = program.get_lsp_symbols(code);
|
let symbols = program.get_lsp_symbols(code);
|
||||||
@ -2719,7 +2817,7 @@ show(part001)
|
|||||||
let some_program_string = r#"const part001 = startSketchAt([0.0, 5.0])
|
let some_program_string = r#"const part001 = startSketchAt([0.0, 5.0])
|
||||||
|> line([0.4900857016, -0.0240763666], %)
|
|> line([0.4900857016, -0.0240763666], %)
|
||||||
|> line([0.6804562304, 0.9087880491], %)"#;
|
|> line([0.6804562304, 0.9087880491], %)"#;
|
||||||
let tokens = crate::tokeniser::lexer(some_program_string);
|
let tokens = crate::token::lexer(some_program_string);
|
||||||
let parser = crate::parser::Parser::new(tokens);
|
let parser = crate::parser::Parser::new(tokens);
|
||||||
let program = parser.ast().unwrap();
|
let program = parser.ast().unwrap();
|
||||||
|
|
||||||
@ -2738,7 +2836,7 @@ show(part001)
|
|||||||
let some_program_string = r#"const part001 = startSketchAt([0.0, 5.0])
|
let some_program_string = r#"const part001 = startSketchAt([0.0, 5.0])
|
||||||
|> line([0.4900857016, -0.0240763666], %) // hello world
|
|> line([0.4900857016, -0.0240763666], %) // hello world
|
||||||
|> line([0.6804562304, 0.9087880491], %)"#;
|
|> line([0.6804562304, 0.9087880491], %)"#;
|
||||||
let tokens = crate::tokeniser::lexer(some_program_string);
|
let tokens = crate::token::lexer(some_program_string);
|
||||||
let parser = crate::parser::Parser::new(tokens);
|
let parser = crate::parser::Parser::new(tokens);
|
||||||
let program = parser.ast().unwrap();
|
let program = parser.ast().unwrap();
|
||||||
|
|
||||||
@ -2757,7 +2855,7 @@ show(part001)
|
|||||||
|> line([0.4900857016, -0.0240763666], %)
|
|> line([0.4900857016, -0.0240763666], %)
|
||||||
// hello world
|
// hello world
|
||||||
|> line([0.6804562304, 0.9087880491], %)"#;
|
|> line([0.6804562304, 0.9087880491], %)"#;
|
||||||
let tokens = crate::tokeniser::lexer(some_program_string);
|
let tokens = crate::token::lexer(some_program_string);
|
||||||
let parser = crate::parser::Parser::new(tokens);
|
let parser = crate::parser::Parser::new(tokens);
|
||||||
let program = parser.ast().unwrap();
|
let program = parser.ast().unwrap();
|
||||||
|
|
||||||
@ -2783,7 +2881,7 @@ show(part001)
|
|||||||
// this is also a comment
|
// this is also a comment
|
||||||
return things
|
return things
|
||||||
}"#;
|
}"#;
|
||||||
let tokens = crate::tokeniser::lexer(some_program_string);
|
let tokens = crate::token::lexer(some_program_string);
|
||||||
let parser = crate::parser::Parser::new(tokens);
|
let parser = crate::parser::Parser::new(tokens);
|
||||||
let program = parser.ast().unwrap();
|
let program = parser.ast().unwrap();
|
||||||
|
|
||||||
@ -2820,7 +2918,7 @@ const mySk1 = startSketchAt([0, 0])
|
|||||||
|> ry(45, %)
|
|> ry(45, %)
|
||||||
|> rx(45, %)
|
|> rx(45, %)
|
||||||
// one more for good measure"#;
|
// one more for good measure"#;
|
||||||
let tokens = crate::tokeniser::lexer(some_program_string);
|
let tokens = crate::token::lexer(some_program_string);
|
||||||
let parser = crate::parser::Parser::new(tokens);
|
let parser = crate::parser::Parser::new(tokens);
|
||||||
let program = parser.ast().unwrap();
|
let program = parser.ast().unwrap();
|
||||||
|
|
||||||
@ -2859,7 +2957,7 @@ a comment between pipe expression statements */
|
|||||||
|> line([-0.42, -1.72], %)
|
|> line([-0.42, -1.72], %)
|
||||||
|
|
||||||
show(part001)"#;
|
show(part001)"#;
|
||||||
let tokens = crate::tokeniser::lexer(some_program_string);
|
let tokens = crate::token::lexer(some_program_string);
|
||||||
let parser = crate::parser::Parser::new(tokens);
|
let parser = crate::parser::Parser::new(tokens);
|
||||||
let program = parser.ast().unwrap();
|
let program = parser.ast().unwrap();
|
||||||
|
|
||||||
@ -2885,7 +2983,7 @@ const yo = [
|
|||||||
" hey oooooo really long long long"
|
" hey oooooo really long long long"
|
||||||
]
|
]
|
||||||
"#;
|
"#;
|
||||||
let tokens = crate::tokeniser::lexer(some_program_string);
|
let tokens = crate::token::lexer(some_program_string);
|
||||||
let parser = crate::parser::Parser::new(tokens);
|
let parser = crate::parser::Parser::new(tokens);
|
||||||
let program = parser.ast().unwrap();
|
let program = parser.ast().unwrap();
|
||||||
|
|
||||||
@ -2903,7 +3001,7 @@ const key = 'c'
|
|||||||
const things = "things"
|
const things = "things"
|
||||||
|
|
||||||
// this is also a comment"#;
|
// this is also a comment"#;
|
||||||
let tokens = crate::tokeniser::lexer(some_program_string);
|
let tokens = crate::token::lexer(some_program_string);
|
||||||
let parser = crate::parser::Parser::new(tokens);
|
let parser = crate::parser::Parser::new(tokens);
|
||||||
let program = parser.ast().unwrap();
|
let program = parser.ast().unwrap();
|
||||||
|
|
||||||
@ -2921,7 +3019,7 @@ const things = "things"
|
|||||||
// a comment
|
// a comment
|
||||||
"
|
"
|
||||||
}"#;
|
}"#;
|
||||||
let tokens = crate::tokeniser::lexer(some_program_string);
|
let tokens = crate::token::lexer(some_program_string);
|
||||||
let parser = crate::parser::Parser::new(tokens);
|
let parser = crate::parser::Parser::new(tokens);
|
||||||
let program = parser.ast().unwrap();
|
let program = parser.ast().unwrap();
|
||||||
|
|
||||||
@ -2946,7 +3044,7 @@ const part001 = startSketchAt([0, 0])
|
|||||||
-angleToMatchLengthY('seg01', myVar, %),
|
-angleToMatchLengthY('seg01', myVar, %),
|
||||||
myVar
|
myVar
|
||||||
], %) // ln-lineTo-yAbsolute should use angleToMatchLengthY helper"#;
|
], %) // ln-lineTo-yAbsolute should use angleToMatchLengthY helper"#;
|
||||||
let tokens = crate::tokeniser::lexer(some_program_string);
|
let tokens = crate::token::lexer(some_program_string);
|
||||||
let parser = crate::parser::Parser::new(tokens);
|
let parser = crate::parser::Parser::new(tokens);
|
||||||
let program = parser.ast().unwrap();
|
let program = parser.ast().unwrap();
|
||||||
|
|
||||||
@ -2972,7 +3070,7 @@ const part001 = startSketchAt([0, 0])
|
|||||||
myVar
|
myVar
|
||||||
], %) // ln-lineTo-yAbsolute should use angleToMatchLengthY helper
|
], %) // ln-lineTo-yAbsolute should use angleToMatchLengthY helper
|
||||||
"#;
|
"#;
|
||||||
let tokens = crate::tokeniser::lexer(some_program_string);
|
let tokens = crate::token::lexer(some_program_string);
|
||||||
let parser = crate::parser::Parser::new(tokens);
|
let parser = crate::parser::Parser::new(tokens);
|
||||||
let program = parser.ast().unwrap();
|
let program = parser.ast().unwrap();
|
||||||
|
|
||||||
@ -3003,7 +3101,7 @@ fn ghi = (part001) => {
|
|||||||
}
|
}
|
||||||
|
|
||||||
show(part001)"#;
|
show(part001)"#;
|
||||||
let tokens = crate::tokeniser::lexer(some_program_string);
|
let tokens = crate::token::lexer(some_program_string);
|
||||||
let parser = crate::parser::Parser::new(tokens);
|
let parser = crate::parser::Parser::new(tokens);
|
||||||
let mut program = parser.ast().unwrap();
|
let mut program = parser.ast().unwrap();
|
||||||
program.rename_symbol("mySuperCoolPart", 6);
|
program.rename_symbol("mySuperCoolPart", 6);
|
||||||
@ -3034,7 +3132,7 @@ show(mySuperCoolPart)
|
|||||||
let some_program_string = r#"fn ghi = (x, y, z) => {
|
let some_program_string = r#"fn ghi = (x, y, z) => {
|
||||||
return x
|
return x
|
||||||
}"#;
|
}"#;
|
||||||
let tokens = crate::tokeniser::lexer(some_program_string);
|
let tokens = crate::token::lexer(some_program_string);
|
||||||
let parser = crate::parser::Parser::new(tokens);
|
let parser = crate::parser::Parser::new(tokens);
|
||||||
let mut program = parser.ast().unwrap();
|
let mut program = parser.ast().unwrap();
|
||||||
program.rename_symbol("newName", 10);
|
program.rename_symbol("newName", 10);
|
||||||
@ -3063,7 +3161,7 @@ const firstExtrude = startSketchAt([0,0])
|
|||||||
|> extrude(h, %)
|
|> extrude(h, %)
|
||||||
|
|
||||||
show(firstExtrude)"#;
|
show(firstExtrude)"#;
|
||||||
let tokens = crate::tokeniser::lexer(some_program_string);
|
let tokens = crate::token::lexer(some_program_string);
|
||||||
let parser = crate::parser::Parser::new(tokens);
|
let parser = crate::parser::Parser::new(tokens);
|
||||||
let program = parser.ast().unwrap();
|
let program = parser.ast().unwrap();
|
||||||
|
|
||||||
@ -3089,7 +3187,7 @@ show(firstExtrude)
|
|||||||
#[tokio::test(flavor = "multi_thread")]
|
#[tokio::test(flavor = "multi_thread")]
|
||||||
async fn test_recast_math_start_negative() {
|
async fn test_recast_math_start_negative() {
|
||||||
let some_program_string = r#"const myVar = -5 + 6"#;
|
let some_program_string = r#"const myVar = -5 + 6"#;
|
||||||
let tokens = crate::tokeniser::lexer(some_program_string);
|
let tokens = crate::token::lexer(some_program_string);
|
||||||
let parser = crate::parser::Parser::new(tokens);
|
let parser = crate::parser::Parser::new(tokens);
|
||||||
let program = parser.ast().unwrap();
|
let program = parser.ast().unwrap();
|
||||||
|
|
||||||
@ -3105,7 +3203,7 @@ const FOS = 2
|
|||||||
const sigmaAllow = 8
|
const sigmaAllow = 8
|
||||||
const width = 20
|
const width = 20
|
||||||
const thickness = sqrt(distance * p * FOS * 6 / (sigmaAllow * width))"#;
|
const thickness = sqrt(distance * p * FOS * 6 / (sigmaAllow * width))"#;
|
||||||
let tokens = crate::tokeniser::lexer(some_program_string);
|
let tokens = crate::token::lexer(some_program_string);
|
||||||
let parser = crate::parser::Parser::new(tokens);
|
let parser = crate::parser::Parser::new(tokens);
|
||||||
let program = parser.ast().unwrap();
|
let program = parser.ast().unwrap();
|
||||||
|
|
||||||
|
@ -620,6 +620,22 @@ pub async fn execute(
|
|||||||
let result = call_expr.execute(memory, &mut pipe_info, engine).await?;
|
let result = call_expr.execute(memory, &mut pipe_info, engine).await?;
|
||||||
args.push(result);
|
args.push(result);
|
||||||
}
|
}
|
||||||
|
Value::BinaryExpression(binary_expression) => {
|
||||||
|
let result = binary_expression.get_result(memory, &mut pipe_info, engine).await?;
|
||||||
|
args.push(result);
|
||||||
|
}
|
||||||
|
Value::UnaryExpression(unary_expression) => {
|
||||||
|
let result = unary_expression.get_result(memory, &mut pipe_info, engine).await?;
|
||||||
|
args.push(result);
|
||||||
|
}
|
||||||
|
Value::ObjectExpression(object_expression) => {
|
||||||
|
let result = object_expression.execute(memory, &mut pipe_info, engine).await?;
|
||||||
|
args.push(result);
|
||||||
|
}
|
||||||
|
Value::ArrayExpression(array_expression) => {
|
||||||
|
let result = array_expression.execute(memory, &mut pipe_info, engine).await?;
|
||||||
|
args.push(result);
|
||||||
|
}
|
||||||
// We do nothing for the rest.
|
// We do nothing for the rest.
|
||||||
_ => (),
|
_ => (),
|
||||||
}
|
}
|
||||||
@ -679,7 +695,7 @@ pub async fn execute(
|
|||||||
message: format!(
|
message: format!(
|
||||||
"Expected {} arguments, got {}",
|
"Expected {} arguments, got {}",
|
||||||
function_expression.params.len(),
|
function_expression.params.len(),
|
||||||
args.len()
|
args.len(),
|
||||||
),
|
),
|
||||||
source_ranges: vec![(&function_expression).into()],
|
source_ranges: vec![(&function_expression).into()],
|
||||||
}));
|
}));
|
||||||
@ -804,7 +820,7 @@ mod tests {
|
|||||||
use super::*;
|
use super::*;
|
||||||
|
|
||||||
pub async fn parse_execute(code: &str) -> Result<ProgramMemory> {
|
pub async fn parse_execute(code: &str) -> Result<ProgramMemory> {
|
||||||
let tokens = crate::tokeniser::lexer(code);
|
let tokens = crate::token::lexer(code);
|
||||||
let parser = crate::parser::Parser::new(tokens);
|
let parser = crate::parser::Parser::new(tokens);
|
||||||
let program = parser.ast()?;
|
let program = parser.ast()?;
|
||||||
let mut mem: ProgramMemory = Default::default();
|
let mut mem: ProgramMemory = Default::default();
|
||||||
|
@ -9,4 +9,4 @@ pub mod math_parser;
|
|||||||
pub mod parser;
|
pub mod parser;
|
||||||
pub mod server;
|
pub mod server;
|
||||||
pub mod std;
|
pub mod std;
|
||||||
pub mod tokeniser;
|
pub mod token;
|
||||||
|
@ -10,8 +10,8 @@ use crate::{
|
|||||||
},
|
},
|
||||||
errors::{KclError, KclErrorDetails},
|
errors::{KclError, KclErrorDetails},
|
||||||
executor::SourceRange,
|
executor::SourceRange,
|
||||||
parser::{is_not_code_token, Parser},
|
parser::Parser,
|
||||||
tokeniser::{Token, TokenType},
|
token::{Token, TokenType},
|
||||||
};
|
};
|
||||||
|
|
||||||
#[derive(Debug, PartialEq, Eq, Deserialize, Serialize, Clone, ts_rs::TS)]
|
#[derive(Debug, PartialEq, Eq, Deserialize, Serialize, Clone, ts_rs::TS)]
|
||||||
@ -334,7 +334,7 @@ impl ReversePolishNotation {
|
|||||||
return rpn.parse();
|
return rpn.parse();
|
||||||
}
|
}
|
||||||
|
|
||||||
if is_not_code_token(current_token) {
|
if !current_token.is_code_token() {
|
||||||
let rpn = ReversePolishNotation::new(&self.parser.tokens[1..], &self.previous_postfix, &self.operators);
|
let rpn = ReversePolishNotation::new(&self.parser.tokens[1..], &self.previous_postfix, &self.operators);
|
||||||
return rpn.parse();
|
return rpn.parse();
|
||||||
}
|
}
|
||||||
@ -704,7 +704,7 @@ mod test {
|
|||||||
|
|
||||||
#[test]
|
#[test]
|
||||||
fn test_parse_expression() {
|
fn test_parse_expression() {
|
||||||
let tokens = crate::tokeniser::lexer("1 + 2");
|
let tokens = crate::token::lexer("1 + 2");
|
||||||
let mut parser = MathParser::new(&tokens);
|
let mut parser = MathParser::new(&tokens);
|
||||||
let result = parser.parse().unwrap();
|
let result = parser.parse().unwrap();
|
||||||
assert_eq!(
|
assert_eq!(
|
||||||
@ -731,7 +731,7 @@ mod test {
|
|||||||
|
|
||||||
#[test]
|
#[test]
|
||||||
fn test_parse_expression_add_no_spaces() {
|
fn test_parse_expression_add_no_spaces() {
|
||||||
let tokens = crate::tokeniser::lexer("1+2");
|
let tokens = crate::token::lexer("1+2");
|
||||||
let mut parser = MathParser::new(&tokens);
|
let mut parser = MathParser::new(&tokens);
|
||||||
let result = parser.parse().unwrap();
|
let result = parser.parse().unwrap();
|
||||||
assert_eq!(
|
assert_eq!(
|
||||||
@ -758,7 +758,7 @@ mod test {
|
|||||||
|
|
||||||
#[test]
|
#[test]
|
||||||
fn test_parse_expression_sub_no_spaces() {
|
fn test_parse_expression_sub_no_spaces() {
|
||||||
let tokens = crate::tokeniser::lexer("1 -2");
|
let tokens = crate::token::lexer("1 -2");
|
||||||
let mut parser = MathParser::new(&tokens);
|
let mut parser = MathParser::new(&tokens);
|
||||||
let result = parser.parse().unwrap();
|
let result = parser.parse().unwrap();
|
||||||
assert_eq!(
|
assert_eq!(
|
||||||
@ -785,7 +785,7 @@ mod test {
|
|||||||
|
|
||||||
#[test]
|
#[test]
|
||||||
fn test_parse_expression_plus_followed_by_star() {
|
fn test_parse_expression_plus_followed_by_star() {
|
||||||
let tokens = crate::tokeniser::lexer("1 + 2 * 3");
|
let tokens = crate::token::lexer("1 + 2 * 3");
|
||||||
let mut parser = MathParser::new(&tokens);
|
let mut parser = MathParser::new(&tokens);
|
||||||
let result = parser.parse().unwrap();
|
let result = parser.parse().unwrap();
|
||||||
assert_eq!(
|
assert_eq!(
|
||||||
@ -823,7 +823,7 @@ mod test {
|
|||||||
|
|
||||||
#[test]
|
#[test]
|
||||||
fn test_parse_expression_with_parentheses() {
|
fn test_parse_expression_with_parentheses() {
|
||||||
let tokens = crate::tokeniser::lexer("1 * ( 2 + 3 )");
|
let tokens = crate::token::lexer("1 * ( 2 + 3 )");
|
||||||
let mut parser = MathParser::new(&tokens);
|
let mut parser = MathParser::new(&tokens);
|
||||||
let result = parser.parse().unwrap();
|
let result = parser.parse().unwrap();
|
||||||
assert_eq!(
|
assert_eq!(
|
||||||
@ -861,7 +861,7 @@ mod test {
|
|||||||
|
|
||||||
#[test]
|
#[test]
|
||||||
fn test_parse_expression_parens_in_middle() {
|
fn test_parse_expression_parens_in_middle() {
|
||||||
let tokens = crate::tokeniser::lexer("1 * ( 2 + 3 ) / 4");
|
let tokens = crate::token::lexer("1 * ( 2 + 3 ) / 4");
|
||||||
let mut parser = MathParser::new(&tokens);
|
let mut parser = MathParser::new(&tokens);
|
||||||
let result = parser.parse().unwrap();
|
let result = parser.parse().unwrap();
|
||||||
assert_eq!(
|
assert_eq!(
|
||||||
@ -910,7 +910,7 @@ mod test {
|
|||||||
|
|
||||||
#[test]
|
#[test]
|
||||||
fn test_parse_expression_parans_and_predence() {
|
fn test_parse_expression_parans_and_predence() {
|
||||||
let tokens = crate::tokeniser::lexer("1 + ( 2 + 3 ) / 4");
|
let tokens = crate::token::lexer("1 + ( 2 + 3 ) / 4");
|
||||||
let mut parser = MathParser::new(&tokens);
|
let mut parser = MathParser::new(&tokens);
|
||||||
let result = parser.parse().unwrap();
|
let result = parser.parse().unwrap();
|
||||||
assert_eq!(
|
assert_eq!(
|
||||||
@ -958,7 +958,7 @@ mod test {
|
|||||||
}
|
}
|
||||||
#[test]
|
#[test]
|
||||||
fn test_parse_expression_nested() {
|
fn test_parse_expression_nested() {
|
||||||
let tokens = crate::tokeniser::lexer("1 * (( 2 + 3 ) / 4 + 5 )");
|
let tokens = crate::token::lexer("1 * (( 2 + 3 ) / 4 + 5 )");
|
||||||
let mut parser = MathParser::new(&tokens);
|
let mut parser = MathParser::new(&tokens);
|
||||||
let result = parser.parse().unwrap();
|
let result = parser.parse().unwrap();
|
||||||
assert_eq!(
|
assert_eq!(
|
||||||
@ -1017,7 +1017,7 @@ mod test {
|
|||||||
}
|
}
|
||||||
#[test]
|
#[test]
|
||||||
fn test_parse_expression_redundant_braces() {
|
fn test_parse_expression_redundant_braces() {
|
||||||
let tokens = crate::tokeniser::lexer("1 * ((( 2 + 3 )))");
|
let tokens = crate::token::lexer("1 * ((( 2 + 3 )))");
|
||||||
let mut parser = MathParser::new(&tokens);
|
let mut parser = MathParser::new(&tokens);
|
||||||
let result = parser.parse().unwrap();
|
let result = parser.parse().unwrap();
|
||||||
assert_eq!(
|
assert_eq!(
|
||||||
@ -1055,7 +1055,7 @@ mod test {
|
|||||||
|
|
||||||
#[test]
|
#[test]
|
||||||
fn test_reverse_polish_notation_simple() {
|
fn test_reverse_polish_notation_simple() {
|
||||||
let parser = ReversePolishNotation::new(&crate::tokeniser::lexer("1 + 2"), &[], &[]);
|
let parser = ReversePolishNotation::new(&crate::token::lexer("1 + 2"), &[], &[]);
|
||||||
let result = parser.parse().unwrap();
|
let result = parser.parse().unwrap();
|
||||||
assert_eq!(
|
assert_eq!(
|
||||||
result,
|
result,
|
||||||
@ -1084,7 +1084,7 @@ mod test {
|
|||||||
|
|
||||||
#[test]
|
#[test]
|
||||||
fn test_reverse_polish_notation_complex() {
|
fn test_reverse_polish_notation_complex() {
|
||||||
let parser = ReversePolishNotation::new(&crate::tokeniser::lexer("1 + 2 * 3"), &[], &[]);
|
let parser = ReversePolishNotation::new(&crate::token::lexer("1 + 2 * 3"), &[], &[]);
|
||||||
let result = parser.parse().unwrap();
|
let result = parser.parse().unwrap();
|
||||||
assert_eq!(
|
assert_eq!(
|
||||||
result,
|
result,
|
||||||
@ -1125,7 +1125,7 @@ mod test {
|
|||||||
|
|
||||||
#[test]
|
#[test]
|
||||||
fn test_reverse_polish_notation_complex_with_parentheses() {
|
fn test_reverse_polish_notation_complex_with_parentheses() {
|
||||||
let parser = ReversePolishNotation::new(&crate::tokeniser::lexer("1 * ( 2 + 3 )"), &[], &[]);
|
let parser = ReversePolishNotation::new(&crate::token::lexer("1 * ( 2 + 3 )"), &[], &[]);
|
||||||
let result = parser.parse().unwrap();
|
let result = parser.parse().unwrap();
|
||||||
assert_eq!(
|
assert_eq!(
|
||||||
result,
|
result,
|
||||||
@ -1179,7 +1179,7 @@ mod test {
|
|||||||
#[test]
|
#[test]
|
||||||
fn test_parse_expression_redundant_braces_around_literal() {
|
fn test_parse_expression_redundant_braces_around_literal() {
|
||||||
let code = "2 + (((3)))";
|
let code = "2 + (((3)))";
|
||||||
let tokens = crate::tokeniser::lexer(code);
|
let tokens = crate::token::lexer(code);
|
||||||
let mut parser = MathParser::new(&tokens);
|
let mut parser = MathParser::new(&tokens);
|
||||||
let result = parser.parse().unwrap();
|
let result = parser.parse().unwrap();
|
||||||
assert_eq!(
|
assert_eq!(
|
||||||
@ -1274,7 +1274,7 @@ mod test {
|
|||||||
#[test]
|
#[test]
|
||||||
fn test_parse_expression_braces_around_lots_of_math() {
|
fn test_parse_expression_braces_around_lots_of_math() {
|
||||||
let code = "(distance * p * FOS * 6 / (sigmaAllow * width))";
|
let code = "(distance * p * FOS * 6 / (sigmaAllow * width))";
|
||||||
let tokens = crate::tokeniser::lexer(code);
|
let tokens = crate::token::lexer(code);
|
||||||
let mut parser = MathParser::new(&tokens);
|
let mut parser = MathParser::new(&tokens);
|
||||||
let result = parser.parse();
|
let result = parser.parse();
|
||||||
assert!(result.is_ok());
|
assert!(result.is_ok());
|
||||||
@ -1283,7 +1283,7 @@ mod test {
|
|||||||
#[test]
|
#[test]
|
||||||
fn test_parse_expression_braces_around_internals_lots_of_math() {
|
fn test_parse_expression_braces_around_internals_lots_of_math() {
|
||||||
let code = "distance * p * FOS * 6 / (sigmaAllow * width)";
|
let code = "distance * p * FOS * 6 / (sigmaAllow * width)";
|
||||||
let tokens = crate::tokeniser::lexer(code);
|
let tokens = crate::token::lexer(code);
|
||||||
let mut parser = MathParser::new(&tokens);
|
let mut parser = MathParser::new(&tokens);
|
||||||
let result = parser.parse();
|
let result = parser.parse();
|
||||||
assert!(result.is_ok());
|
assert!(result.is_ok());
|
||||||
|
@ -10,7 +10,7 @@ use crate::{
|
|||||||
},
|
},
|
||||||
errors::{KclError, KclErrorDetails},
|
errors::{KclError, KclErrorDetails},
|
||||||
math_parser::MathParser,
|
math_parser::MathParser,
|
||||||
tokeniser::{Token, TokenType},
|
token::{Token, TokenType},
|
||||||
};
|
};
|
||||||
|
|
||||||
pub const PIPE_SUBSTITUTION_OPERATOR: &str = "%";
|
pub const PIPE_SUBSTITUTION_OPERATOR: &str = "%";
|
||||||
@ -249,7 +249,7 @@ impl Parser {
|
|||||||
}
|
}
|
||||||
|
|
||||||
let current_token = self.get_token(index)?;
|
let current_token = self.get_token(index)?;
|
||||||
if is_not_code_token(current_token) {
|
if !current_token.is_code_token() {
|
||||||
return self.find_end_of_non_code_node(index + 1);
|
return self.find_end_of_non_code_node(index + 1);
|
||||||
}
|
}
|
||||||
|
|
||||||
@ -262,7 +262,7 @@ impl Parser {
|
|||||||
}
|
}
|
||||||
|
|
||||||
let current_token = self.get_token(index)?;
|
let current_token = self.get_token(index)?;
|
||||||
if is_not_code_token(current_token) {
|
if !current_token.is_code_token() {
|
||||||
return self.find_start_of_non_code_node(index - 1);
|
return self.find_start_of_non_code_node(index - 1);
|
||||||
}
|
}
|
||||||
|
|
||||||
@ -365,7 +365,7 @@ impl Parser {
|
|||||||
});
|
});
|
||||||
};
|
};
|
||||||
|
|
||||||
if is_not_code_token(token) {
|
if !token.is_code_token() {
|
||||||
let non_code_node = self.make_non_code_node(new_index)?;
|
let non_code_node = self.make_non_code_node(new_index)?;
|
||||||
let new_new_index = non_code_node.1 + 1;
|
let new_new_index = non_code_node.1 + 1;
|
||||||
let bonus_non_code_node = non_code_node.0;
|
let bonus_non_code_node = non_code_node.0;
|
||||||
@ -1283,7 +1283,11 @@ impl Parser {
|
|||||||
let end_token = self.get_token(pipe_body_result.last_index)?;
|
let end_token = self.get_token(pipe_body_result.last_index)?;
|
||||||
Ok(PipeExpressionResult {
|
Ok(PipeExpressionResult {
|
||||||
expression: PipeExpression {
|
expression: PipeExpression {
|
||||||
start: current_token.start,
|
start: pipe_body_result
|
||||||
|
.body
|
||||||
|
.first()
|
||||||
|
.map(|v| v.start())
|
||||||
|
.unwrap_or(current_token.start),
|
||||||
end: end_token.end,
|
end: end_token.end,
|
||||||
body: pipe_body_result.body,
|
body: pipe_body_result.body,
|
||||||
non_code_meta: pipe_body_result.non_code_meta,
|
non_code_meta: pipe_body_result.non_code_meta,
|
||||||
@ -1623,7 +1627,7 @@ impl Parser {
|
|||||||
});
|
});
|
||||||
}
|
}
|
||||||
|
|
||||||
if is_not_code_token(token) {
|
if !token.is_code_token() {
|
||||||
let next_token = self.next_meaningful_token(token_index, Some(0))?;
|
let next_token = self.next_meaningful_token(token_index, Some(0))?;
|
||||||
if let Some(node) = &next_token.non_code_node {
|
if let Some(node) = &next_token.non_code_node {
|
||||||
if previous_body.is_empty() {
|
if previous_body.is_empty() {
|
||||||
@ -1788,12 +1792,6 @@ impl Parser {
|
|||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
pub fn is_not_code_token(token: &Token) -> bool {
|
|
||||||
token.token_type == TokenType::Whitespace
|
|
||||||
|| token.token_type == TokenType::LineComment
|
|
||||||
|| token.token_type == TokenType::BlockComment
|
|
||||||
}
|
|
||||||
|
|
||||||
#[cfg(test)]
|
#[cfg(test)]
|
||||||
mod tests {
|
mod tests {
|
||||||
use pretty_assertions::assert_eq;
|
use pretty_assertions::assert_eq;
|
||||||
@ -1803,7 +1801,7 @@ mod tests {
|
|||||||
|
|
||||||
#[test]
|
#[test]
|
||||||
fn test_make_identifier() {
|
fn test_make_identifier() {
|
||||||
let tokens = crate::tokeniser::lexer("a");
|
let tokens = crate::token::lexer("a");
|
||||||
let parser = Parser::new(tokens);
|
let parser = Parser::new(tokens);
|
||||||
let identifier = parser.make_identifier(0).unwrap();
|
let identifier = parser.make_identifier(0).unwrap();
|
||||||
assert_eq!(
|
assert_eq!(
|
||||||
@ -1818,7 +1816,7 @@ mod tests {
|
|||||||
|
|
||||||
#[test]
|
#[test]
|
||||||
fn test_make_identifier_with_const_myvar_equals_5_and_index_2() {
|
fn test_make_identifier_with_const_myvar_equals_5_and_index_2() {
|
||||||
let tokens = crate::tokeniser::lexer("const myVar = 5");
|
let tokens = crate::token::lexer("const myVar = 5");
|
||||||
let parser = Parser::new(tokens);
|
let parser = Parser::new(tokens);
|
||||||
let identifier = parser.make_identifier(2).unwrap();
|
let identifier = parser.make_identifier(2).unwrap();
|
||||||
assert_eq!(
|
assert_eq!(
|
||||||
@ -1833,7 +1831,7 @@ mod tests {
|
|||||||
|
|
||||||
#[test]
|
#[test]
|
||||||
fn test_make_identifier_multiline() {
|
fn test_make_identifier_multiline() {
|
||||||
let tokens = crate::tokeniser::lexer("const myVar = 5\nconst newVar = myVar + 1");
|
let tokens = crate::token::lexer("const myVar = 5\nconst newVar = myVar + 1");
|
||||||
let parser = Parser::new(tokens);
|
let parser = Parser::new(tokens);
|
||||||
let identifier = parser.make_identifier(2).unwrap();
|
let identifier = parser.make_identifier(2).unwrap();
|
||||||
assert_eq!(
|
assert_eq!(
|
||||||
@ -1857,7 +1855,7 @@ mod tests {
|
|||||||
|
|
||||||
#[test]
|
#[test]
|
||||||
fn test_make_identifier_call_expression() {
|
fn test_make_identifier_call_expression() {
|
||||||
let tokens = crate::tokeniser::lexer("log(5, \"hello\", aIdentifier)");
|
let tokens = crate::token::lexer("log(5, \"hello\", aIdentifier)");
|
||||||
let parser = Parser::new(tokens);
|
let parser = Parser::new(tokens);
|
||||||
let identifier = parser.make_identifier(0).unwrap();
|
let identifier = parser.make_identifier(0).unwrap();
|
||||||
assert_eq!(
|
assert_eq!(
|
||||||
@ -1880,7 +1878,7 @@ mod tests {
|
|||||||
}
|
}
|
||||||
#[test]
|
#[test]
|
||||||
fn test_make_non_code_node() {
|
fn test_make_non_code_node() {
|
||||||
let tokens = crate::tokeniser::lexer("log(5, \"hello\", aIdentifier)");
|
let tokens = crate::token::lexer("log(5, \"hello\", aIdentifier)");
|
||||||
let parser = Parser::new(tokens);
|
let parser = Parser::new(tokens);
|
||||||
let index = 4;
|
let index = 4;
|
||||||
let expected_output = (None, 4);
|
let expected_output = (None, 4);
|
||||||
@ -1889,7 +1887,7 @@ mod tests {
|
|||||||
let index = 7;
|
let index = 7;
|
||||||
let expected_output = (None, 7);
|
let expected_output = (None, 7);
|
||||||
assert_eq!(parser.make_non_code_node(index).unwrap(), expected_output);
|
assert_eq!(parser.make_non_code_node(index).unwrap(), expected_output);
|
||||||
let tokens = crate::tokeniser::lexer(
|
let tokens = crate::token::lexer(
|
||||||
r#"
|
r#"
|
||||||
const yo = { a: { b: { c: '123' } } }
|
const yo = { a: { b: { c: '123' } } }
|
||||||
// this is a comment
|
// this is a comment
|
||||||
@ -1920,7 +1918,7 @@ const key = 'c'"#,
|
|||||||
31,
|
31,
|
||||||
);
|
);
|
||||||
assert_eq!(parser.make_non_code_node(index).unwrap(), expected_output);
|
assert_eq!(parser.make_non_code_node(index).unwrap(), expected_output);
|
||||||
let tokens = crate::tokeniser::lexer(
|
let tokens = crate::token::lexer(
|
||||||
r#"const mySketch = startSketchAt([0,0])
|
r#"const mySketch = startSketchAt([0,0])
|
||||||
|> lineTo({ to: [0, 1], tag: 'myPath' }, %)
|
|> lineTo({ to: [0, 1], tag: 'myPath' }, %)
|
||||||
|> lineTo([1, 1], %) /* this is
|
|> lineTo([1, 1], %) /* this is
|
||||||
@ -1946,7 +1944,7 @@ const key = 'c'"#,
|
|||||||
|
|
||||||
#[test]
|
#[test]
|
||||||
fn test_collect_object_keys() {
|
fn test_collect_object_keys() {
|
||||||
let tokens = crate::tokeniser::lexer("const prop = yo.one[\"two\"]");
|
let tokens = crate::token::lexer("const prop = yo.one[\"two\"]");
|
||||||
let parser = Parser::new(tokens);
|
let parser = Parser::new(tokens);
|
||||||
let keys_info = parser.collect_object_keys(6, None, false).unwrap();
|
let keys_info = parser.collect_object_keys(6, None, false).unwrap();
|
||||||
assert_eq!(keys_info.len(), 2);
|
assert_eq!(keys_info.len(), 2);
|
||||||
@ -1966,7 +1964,7 @@ const key = 'c'"#,
|
|||||||
|
|
||||||
#[test]
|
#[test]
|
||||||
fn test_make_literal_call_expression() {
|
fn test_make_literal_call_expression() {
|
||||||
let tokens = crate::tokeniser::lexer("log(5, \"hello\", aIdentifier)");
|
let tokens = crate::token::lexer("log(5, \"hello\", aIdentifier)");
|
||||||
let parser = Parser::new(tokens);
|
let parser = Parser::new(tokens);
|
||||||
let literal = parser.make_literal(2).unwrap();
|
let literal = parser.make_literal(2).unwrap();
|
||||||
assert_eq!(
|
assert_eq!(
|
||||||
@ -1990,74 +1988,88 @@ const key = 'c'"#,
|
|||||||
);
|
);
|
||||||
}
|
}
|
||||||
|
|
||||||
|
#[test]
|
||||||
|
fn test_is_code_token() {
|
||||||
|
let tokens = [
|
||||||
|
Token {
|
||||||
|
token_type: TokenType::Word,
|
||||||
|
start: 0,
|
||||||
|
end: 3,
|
||||||
|
value: "log".to_string(),
|
||||||
|
},
|
||||||
|
Token {
|
||||||
|
token_type: TokenType::Brace,
|
||||||
|
start: 3,
|
||||||
|
end: 4,
|
||||||
|
value: "(".to_string(),
|
||||||
|
},
|
||||||
|
Token {
|
||||||
|
token_type: TokenType::Number,
|
||||||
|
start: 4,
|
||||||
|
end: 5,
|
||||||
|
value: "5".to_string(),
|
||||||
|
},
|
||||||
|
Token {
|
||||||
|
token_type: TokenType::Comma,
|
||||||
|
start: 5,
|
||||||
|
end: 6,
|
||||||
|
value: ",".to_string(),
|
||||||
|
},
|
||||||
|
Token {
|
||||||
|
token_type: TokenType::String,
|
||||||
|
start: 7,
|
||||||
|
end: 14,
|
||||||
|
value: "\"hello\"".to_string(),
|
||||||
|
},
|
||||||
|
Token {
|
||||||
|
token_type: TokenType::Word,
|
||||||
|
start: 16,
|
||||||
|
end: 27,
|
||||||
|
value: "aIdentifier".to_string(),
|
||||||
|
},
|
||||||
|
Token {
|
||||||
|
token_type: TokenType::Brace,
|
||||||
|
start: 27,
|
||||||
|
end: 28,
|
||||||
|
value: ")".to_string(),
|
||||||
|
},
|
||||||
|
];
|
||||||
|
for (i, token) in tokens.iter().enumerate() {
|
||||||
|
assert!(token.is_code_token(), "failed test {i}: {token:?}")
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
#[test]
|
#[test]
|
||||||
fn test_is_not_code_token() {
|
fn test_is_not_code_token() {
|
||||||
assert!(!is_not_code_token(&Token {
|
let tokens = [
|
||||||
token_type: TokenType::Word,
|
Token {
|
||||||
start: 0,
|
token_type: TokenType::Whitespace,
|
||||||
end: 3,
|
start: 6,
|
||||||
value: "log".to_string(),
|
end: 7,
|
||||||
}));
|
value: " ".to_string(),
|
||||||
assert!(!is_not_code_token(&Token {
|
},
|
||||||
token_type: TokenType::Brace,
|
Token {
|
||||||
start: 3,
|
token_type: TokenType::BlockComment,
|
||||||
end: 4,
|
start: 28,
|
||||||
value: "(".to_string(),
|
end: 30,
|
||||||
}));
|
value: "/* abte */".to_string(),
|
||||||
assert!(!is_not_code_token(&Token {
|
},
|
||||||
token_type: TokenType::Number,
|
Token {
|
||||||
start: 4,
|
token_type: TokenType::LineComment,
|
||||||
end: 5,
|
start: 30,
|
||||||
value: "5".to_string(),
|
end: 33,
|
||||||
}));
|
value: "// yoyo a line".to_string(),
|
||||||
assert!(!is_not_code_token(&Token {
|
},
|
||||||
token_type: TokenType::Comma,
|
];
|
||||||
start: 5,
|
for (i, token) in tokens.iter().enumerate() {
|
||||||
end: 6,
|
assert!(!token.is_code_token(), "failed test {i}: {token:?}")
|
||||||
value: ",".to_string(),
|
}
|
||||||
}));
|
|
||||||
assert!(is_not_code_token(&Token {
|
|
||||||
token_type: TokenType::Whitespace,
|
|
||||||
start: 6,
|
|
||||||
end: 7,
|
|
||||||
value: " ".to_string(),
|
|
||||||
}));
|
|
||||||
assert!(!is_not_code_token(&Token {
|
|
||||||
token_type: TokenType::String,
|
|
||||||
start: 7,
|
|
||||||
end: 14,
|
|
||||||
value: "\"hello\"".to_string(),
|
|
||||||
}));
|
|
||||||
assert!(!is_not_code_token(&Token {
|
|
||||||
token_type: TokenType::Word,
|
|
||||||
start: 16,
|
|
||||||
end: 27,
|
|
||||||
value: "aIdentifier".to_string(),
|
|
||||||
}));
|
|
||||||
assert!(!is_not_code_token(&Token {
|
|
||||||
token_type: TokenType::Brace,
|
|
||||||
start: 27,
|
|
||||||
end: 28,
|
|
||||||
value: ")".to_string(),
|
|
||||||
}));
|
|
||||||
assert!(is_not_code_token(&Token {
|
|
||||||
token_type: TokenType::BlockComment,
|
|
||||||
start: 28,
|
|
||||||
end: 30,
|
|
||||||
value: "/* abte */".to_string(),
|
|
||||||
}));
|
|
||||||
assert!(is_not_code_token(&Token {
|
|
||||||
token_type: TokenType::LineComment,
|
|
||||||
start: 30,
|
|
||||||
end: 33,
|
|
||||||
value: "// yoyo a line".to_string(),
|
|
||||||
}));
|
|
||||||
}
|
}
|
||||||
|
|
||||||
#[test]
|
#[test]
|
||||||
fn test_next_meaningful_token() {
|
fn test_next_meaningful_token() {
|
||||||
let _offset = 1;
|
let _offset = 1;
|
||||||
let tokens = crate::tokeniser::lexer(
|
let tokens = crate::token::lexer(
|
||||||
r#"const mySketch = startSketchAt([0,0])
|
r#"const mySketch = startSketchAt([0,0])
|
||||||
|> lineTo({ to: [0, 1], tag: 'myPath' }, %)
|
|> lineTo({ to: [0, 1], tag: 'myPath' }, %)
|
||||||
|> lineTo([1, 1], %) /* this is
|
|> lineTo([1, 1], %) /* this is
|
||||||
@ -2443,7 +2455,7 @@ const key = 'c'"#,
|
|||||||
|
|
||||||
#[test]
|
#[test]
|
||||||
fn test_find_closing_brace() {
|
fn test_find_closing_brace() {
|
||||||
let tokens = crate::tokeniser::lexer(
|
let tokens = crate::token::lexer(
|
||||||
r#"const mySketch = startSketchAt([0,0])
|
r#"const mySketch = startSketchAt([0,0])
|
||||||
|> lineTo({ to: [0, 1], tag: 'myPath' }, %)
|
|> lineTo({ to: [0, 1], tag: 'myPath' }, %)
|
||||||
|> lineTo([1, 1], %) /* this is
|
|> lineTo([1, 1], %) /* this is
|
||||||
@ -2460,16 +2472,16 @@ const key = 'c'"#,
|
|||||||
assert_eq!(parser.find_closing_brace(90, 0, "").unwrap(), 92);
|
assert_eq!(parser.find_closing_brace(90, 0, "").unwrap(), 92);
|
||||||
|
|
||||||
let basic = "( hey )";
|
let basic = "( hey )";
|
||||||
let parser = Parser::new(crate::tokeniser::lexer(basic));
|
let parser = Parser::new(crate::token::lexer(basic));
|
||||||
assert_eq!(parser.find_closing_brace(0, 0, "").unwrap(), 4);
|
assert_eq!(parser.find_closing_brace(0, 0, "").unwrap(), 4);
|
||||||
|
|
||||||
let handles_non_zero_index = "(indexForBracketToRightOfThisIsTwo(shouldBeFour)AndNotThisSix)";
|
let handles_non_zero_index = "(indexForBracketToRightOfThisIsTwo(shouldBeFour)AndNotThisSix)";
|
||||||
let parser = Parser::new(crate::tokeniser::lexer(handles_non_zero_index));
|
let parser = Parser::new(crate::token::lexer(handles_non_zero_index));
|
||||||
assert_eq!(parser.find_closing_brace(2, 0, "").unwrap(), 4);
|
assert_eq!(parser.find_closing_brace(2, 0, "").unwrap(), 4);
|
||||||
assert_eq!(parser.find_closing_brace(0, 0, "").unwrap(), 6);
|
assert_eq!(parser.find_closing_brace(0, 0, "").unwrap(), 6);
|
||||||
|
|
||||||
let handles_nested = "{a{b{c(}d]}eathou athoeu tah u} thatOneToTheLeftIsLast }";
|
let handles_nested = "{a{b{c(}d]}eathou athoeu tah u} thatOneToTheLeftIsLast }";
|
||||||
let parser = Parser::new(crate::tokeniser::lexer(handles_nested));
|
let parser = Parser::new(crate::token::lexer(handles_nested));
|
||||||
assert_eq!(parser.find_closing_brace(0, 0, "").unwrap(), 18);
|
assert_eq!(parser.find_closing_brace(0, 0, "").unwrap(), 18);
|
||||||
|
|
||||||
// TODO expect error when not started on a brace
|
// TODO expect error when not started on a brace
|
||||||
@ -2477,7 +2489,7 @@ const key = 'c'"#,
|
|||||||
|
|
||||||
#[test]
|
#[test]
|
||||||
fn test_is_call_expression() {
|
fn test_is_call_expression() {
|
||||||
let tokens = crate::tokeniser::lexer(
|
let tokens = crate::token::lexer(
|
||||||
r#"const mySketch = startSketchAt([0,0])
|
r#"const mySketch = startSketchAt([0,0])
|
||||||
|> lineTo({ to: [0, 1], tag: 'myPath' }, %)
|
|> lineTo({ to: [0, 1], tag: 'myPath' }, %)
|
||||||
|> lineTo([1, 1], %) /* this is
|
|> lineTo([1, 1], %) /* this is
|
||||||
@ -2498,7 +2510,7 @@ const key = 'c'"#,
|
|||||||
|
|
||||||
#[test]
|
#[test]
|
||||||
fn test_find_next_declaration_keyword() {
|
fn test_find_next_declaration_keyword() {
|
||||||
let tokens = crate::tokeniser::lexer(
|
let tokens = crate::token::lexer(
|
||||||
r#"const mySketch = startSketchAt([0,0])
|
r#"const mySketch = startSketchAt([0,0])
|
||||||
|> lineTo({ to: [0, 1], tag: 'myPath' }, %)
|
|> lineTo({ to: [0, 1], tag: 'myPath' }, %)
|
||||||
|> lineTo([1, 1], %) /* this is
|
|> lineTo([1, 1], %) /* this is
|
||||||
@ -2513,7 +2525,7 @@ const key = 'c'"#,
|
|||||||
TokenReturn { token: None, index: 92 }
|
TokenReturn { token: None, index: 92 }
|
||||||
);
|
);
|
||||||
|
|
||||||
let tokens = crate::tokeniser::lexer(
|
let tokens = crate::token::lexer(
|
||||||
r#"const myVar = 5
|
r#"const myVar = 5
|
||||||
const newVar = myVar + 1
|
const newVar = myVar + 1
|
||||||
"#,
|
"#,
|
||||||
@ -2543,7 +2555,7 @@ const newVar = myVar + 1
|
|||||||
lineTo(2, 3)
|
lineTo(2, 3)
|
||||||
} |> rx(45, %)
|
} |> rx(45, %)
|
||||||
"#;
|
"#;
|
||||||
let tokens = crate::tokeniser::lexer(code);
|
let tokens = crate::token::lexer(code);
|
||||||
let parser = Parser::new(tokens);
|
let parser = Parser::new(tokens);
|
||||||
assert_eq!(
|
assert_eq!(
|
||||||
parser.has_pipe_operator(0, None).unwrap(),
|
parser.has_pipe_operator(0, None).unwrap(),
|
||||||
@ -2562,7 +2574,7 @@ const newVar = myVar + 1
|
|||||||
lineTo(2, 3)
|
lineTo(2, 3)
|
||||||
} |> rx(45, %) |> rx(45, %)
|
} |> rx(45, %) |> rx(45, %)
|
||||||
"#;
|
"#;
|
||||||
let tokens = crate::tokeniser::lexer(code);
|
let tokens = crate::token::lexer(code);
|
||||||
let parser = Parser::new(tokens);
|
let parser = Parser::new(tokens);
|
||||||
assert_eq!(
|
assert_eq!(
|
||||||
parser.has_pipe_operator(0, None).unwrap(),
|
parser.has_pipe_operator(0, None).unwrap(),
|
||||||
@ -2584,7 +2596,7 @@ const newVar = myVar + 1
|
|||||||
const yo = myFunc(9()
|
const yo = myFunc(9()
|
||||||
|> rx(45, %)
|
|> rx(45, %)
|
||||||
"#;
|
"#;
|
||||||
let tokens = crate::tokeniser::lexer(code);
|
let tokens = crate::token::lexer(code);
|
||||||
let parser = Parser::new(tokens);
|
let parser = Parser::new(tokens);
|
||||||
assert_eq!(
|
assert_eq!(
|
||||||
parser.has_pipe_operator(0, None).unwrap(),
|
parser.has_pipe_operator(0, None).unwrap(),
|
||||||
@ -2596,7 +2608,7 @@ const yo = myFunc(9()
|
|||||||
);
|
);
|
||||||
|
|
||||||
let code = "const myVar2 = 5 + 1 |> myFn(%)";
|
let code = "const myVar2 = 5 + 1 |> myFn(%)";
|
||||||
let tokens = crate::tokeniser::lexer(code);
|
let tokens = crate::token::lexer(code);
|
||||||
let parser = Parser::new(tokens);
|
let parser = Parser::new(tokens);
|
||||||
assert_eq!(
|
assert_eq!(
|
||||||
parser.has_pipe_operator(1, None).unwrap(),
|
parser.has_pipe_operator(1, None).unwrap(),
|
||||||
@ -2618,7 +2630,7 @@ const yo = myFunc(9()
|
|||||||
lineTo(1,1)
|
lineTo(1,1)
|
||||||
} |> rx(90, %)
|
} |> rx(90, %)
|
||||||
show(mySk1)"#;
|
show(mySk1)"#;
|
||||||
let tokens = crate::tokeniser::lexer(code);
|
let tokens = crate::token::lexer(code);
|
||||||
let parser = Parser::new(tokens.clone());
|
let parser = Parser::new(tokens.clone());
|
||||||
let token_with_my_path_index = tokens.iter().position(|token| token.value == "myPath").unwrap();
|
let token_with_my_path_index = tokens.iter().position(|token| token.value == "myPath").unwrap();
|
||||||
// loop through getting the token and it's index
|
// loop through getting the token and it's index
|
||||||
@ -2658,7 +2670,7 @@ show(mySk1)"#;
|
|||||||
|
|
||||||
#[test]
|
#[test]
|
||||||
fn test_make_member_expression() {
|
fn test_make_member_expression() {
|
||||||
let tokens = crate::tokeniser::lexer("const prop = yo.one[\"two\"]");
|
let tokens = crate::token::lexer("const prop = yo.one[\"two\"]");
|
||||||
let parser = Parser::new(tokens);
|
let parser = Parser::new(tokens);
|
||||||
let member_expression_return = parser.make_member_expression(6).unwrap();
|
let member_expression_return = parser.make_member_expression(6).unwrap();
|
||||||
let member_expression = member_expression_return.expression;
|
let member_expression = member_expression_return.expression;
|
||||||
@ -2700,63 +2712,63 @@ show(mySk1)"#;
|
|||||||
#[test]
|
#[test]
|
||||||
fn test_find_end_of_binary_expression() {
|
fn test_find_end_of_binary_expression() {
|
||||||
let code = "1 + 2 * 3\nconst yo = 5";
|
let code = "1 + 2 * 3\nconst yo = 5";
|
||||||
let tokens = crate::tokeniser::lexer(code);
|
let tokens = crate::token::lexer(code);
|
||||||
let parser = Parser::new(tokens.clone());
|
let parser = Parser::new(tokens.clone());
|
||||||
let end = parser.find_end_of_binary_expression(0).unwrap();
|
let end = parser.find_end_of_binary_expression(0).unwrap();
|
||||||
assert_eq!(tokens[end].value, "3");
|
assert_eq!(tokens[end].value, "3");
|
||||||
|
|
||||||
let code = "(1 + 25) / 5 - 3\nconst yo = 5";
|
let code = "(1 + 25) / 5 - 3\nconst yo = 5";
|
||||||
let tokens = crate::tokeniser::lexer(code);
|
let tokens = crate::token::lexer(code);
|
||||||
let parser = Parser::new(tokens.clone());
|
let parser = Parser::new(tokens.clone());
|
||||||
let end = parser.find_end_of_binary_expression(0).unwrap();
|
let end = parser.find_end_of_binary_expression(0).unwrap();
|
||||||
assert_eq!(tokens[end].value, "3");
|
assert_eq!(tokens[end].value, "3");
|
||||||
let index_of_5 = code.find('5').unwrap();
|
let index_of_5 = code.find('5').unwrap();
|
||||||
let end_starting_at_the_5 = parser.find_end_of_binary_expression(index_of_5).unwrap();
|
let end_starting_at_the_5 = parser.find_end_of_binary_expression(index_of_5).unwrap();
|
||||||
assert_eq!(end_starting_at_the_5, end);
|
assert_eq!(end_starting_at_the_5, end);
|
||||||
// whole thing wraped
|
// whole thing wrapped
|
||||||
let code = "((1 + 2) / 5 - 3)\nconst yo = 5";
|
let code = "((1 + 2) / 5 - 3)\nconst yo = 5";
|
||||||
let tokens = crate::tokeniser::lexer(code);
|
let tokens = crate::token::lexer(code);
|
||||||
let parser = Parser::new(tokens.clone());
|
let parser = Parser::new(tokens.clone());
|
||||||
let end = parser.find_end_of_binary_expression(0).unwrap();
|
let end = parser.find_end_of_binary_expression(0).unwrap();
|
||||||
assert_eq!(tokens[end].end, code.find("3)").unwrap() + 2);
|
assert_eq!(tokens[end].end, code.find("3)").unwrap() + 2);
|
||||||
// whole thing wraped but given index after the first brace
|
// whole thing wrapped but given index after the first brace
|
||||||
let code = "((1 + 2) / 5 - 3)\nconst yo = 5";
|
let code = "((1 + 2) / 5 - 3)\nconst yo = 5";
|
||||||
let tokens = crate::tokeniser::lexer(code);
|
let tokens = crate::token::lexer(code);
|
||||||
let parser = Parser::new(tokens.clone());
|
let parser = Parser::new(tokens.clone());
|
||||||
let end = parser.find_end_of_binary_expression(1).unwrap();
|
let end = parser.find_end_of_binary_expression(1).unwrap();
|
||||||
assert_eq!(tokens[end].value, "3");
|
assert_eq!(tokens[end].value, "3");
|
||||||
// given the index of a small wrapped section i.e. `1 + 2` in ((1 + 2) / 5 - 3)'
|
// given the index of a small wrapped section i.e. `1 + 2` in ((1 + 2) / 5 - 3)'
|
||||||
let code = "((1 + 2) / 5 - 3)\nconst yo = 5";
|
let code = "((1 + 2) / 5 - 3)\nconst yo = 5";
|
||||||
let tokens = crate::tokeniser::lexer(code);
|
let tokens = crate::token::lexer(code);
|
||||||
let parser = Parser::new(tokens.clone());
|
let parser = Parser::new(tokens.clone());
|
||||||
let end = parser.find_end_of_binary_expression(2).unwrap();
|
let end = parser.find_end_of_binary_expression(2).unwrap();
|
||||||
assert_eq!(tokens[end].value, "2");
|
assert_eq!(tokens[end].value, "2");
|
||||||
// lots of silly nesting
|
// lots of silly nesting
|
||||||
let code = "(1 + 2) / (5 - (3))\nconst yo = 5";
|
let code = "(1 + 2) / (5 - (3))\nconst yo = 5";
|
||||||
let tokens = crate::tokeniser::lexer(code);
|
let tokens = crate::token::lexer(code);
|
||||||
let parser = Parser::new(tokens.clone());
|
let parser = Parser::new(tokens.clone());
|
||||||
let end = parser.find_end_of_binary_expression(0).unwrap();
|
let end = parser.find_end_of_binary_expression(0).unwrap();
|
||||||
assert_eq!(tokens[end].end, code.find("))").unwrap() + 2);
|
assert_eq!(tokens[end].end, code.find("))").unwrap() + 2);
|
||||||
// with pipe operator at the end
|
// with pipe operator at the end
|
||||||
let code = "(1 + 2) / (5 - (3))\n |> fn(%)";
|
let code = "(1 + 2) / (5 - (3))\n |> fn(%)";
|
||||||
let tokens = crate::tokeniser::lexer(code);
|
let tokens = crate::token::lexer(code);
|
||||||
let parser = Parser::new(tokens.clone());
|
let parser = Parser::new(tokens.clone());
|
||||||
let end = parser.find_end_of_binary_expression(0).unwrap();
|
let end = parser.find_end_of_binary_expression(0).unwrap();
|
||||||
assert_eq!(tokens[end].end, code.find("))").unwrap() + 2);
|
assert_eq!(tokens[end].end, code.find("))").unwrap() + 2);
|
||||||
// with call expression at the start of binary expression
|
// with call expression at the start of binary expression
|
||||||
let code = "yo(2) + 3\n |> fn(%)";
|
let code = "yo(2) + 3\n |> fn(%)";
|
||||||
let tokens = crate::tokeniser::lexer(code);
|
let tokens = crate::token::lexer(code);
|
||||||
let parser = Parser::new(tokens.clone());
|
let parser = Parser::new(tokens.clone());
|
||||||
let end = parser.find_end_of_binary_expression(0).unwrap();
|
let end = parser.find_end_of_binary_expression(0).unwrap();
|
||||||
assert_eq!(tokens[end].value, "3");
|
assert_eq!(tokens[end].value, "3");
|
||||||
// with call expression at the end of binary expression
|
// with call expression at the end of binary expression
|
||||||
let code = "3 + yo(2)\n |> fn(%)";
|
let code = "3 + yo(2)\n |> fn(%)";
|
||||||
let tokens = crate::tokeniser::lexer(code);
|
let tokens = crate::token::lexer(code);
|
||||||
let parser = Parser::new(tokens);
|
let parser = Parser::new(tokens);
|
||||||
let _end = parser.find_end_of_binary_expression(0).unwrap();
|
let _end = parser.find_end_of_binary_expression(0).unwrap();
|
||||||
// with call expression at the end of binary expression
|
// with call expression at the end of binary expression
|
||||||
let code = "-legX + 2, ";
|
let code = "-legX + 2, ";
|
||||||
let tokens = crate::tokeniser::lexer(code);
|
let tokens = crate::token::lexer(code);
|
||||||
let parser = Parser::new(tokens.clone());
|
let parser = Parser::new(tokens.clone());
|
||||||
let end = parser.find_end_of_binary_expression(0).unwrap();
|
let end = parser.find_end_of_binary_expression(0).unwrap();
|
||||||
assert_eq!(tokens[end].value, "2");
|
assert_eq!(tokens[end].value, "2");
|
||||||
@ -2765,7 +2777,7 @@ show(mySk1)"#;
|
|||||||
#[test]
|
#[test]
|
||||||
fn test_make_array_expression() {
|
fn test_make_array_expression() {
|
||||||
// input_index: 6, output_index: 14, output: {"type":"ArrayExpression","start":11,"end":26,"elements":[{"type":"Literal","start":12,"end":15,"value":"1","raw":"\"1\""},{"type":"Literal","start":17,"end":18,"value":2,"raw":"2"},{"type":"Identifier","start":20,"end":25,"name":"three"}]}
|
// input_index: 6, output_index: 14, output: {"type":"ArrayExpression","start":11,"end":26,"elements":[{"type":"Literal","start":12,"end":15,"value":"1","raw":"\"1\""},{"type":"Literal","start":17,"end":18,"value":2,"raw":"2"},{"type":"Identifier","start":20,"end":25,"name":"three"}]}
|
||||||
let tokens = crate::tokeniser::lexer("const yo = [\"1\", 2, three]");
|
let tokens = crate::token::lexer("const yo = [\"1\", 2, three]");
|
||||||
let parser = Parser::new(tokens);
|
let parser = Parser::new(tokens);
|
||||||
let array_expression = parser.make_array_expression(6).unwrap();
|
let array_expression = parser.make_array_expression(6).unwrap();
|
||||||
let expression = array_expression.expression;
|
let expression = array_expression.expression;
|
||||||
@ -2804,7 +2816,7 @@ show(mySk1)"#;
|
|||||||
|
|
||||||
#[test]
|
#[test]
|
||||||
fn test_make_call_expression() {
|
fn test_make_call_expression() {
|
||||||
let tokens = crate::tokeniser::lexer("foo(\"a\", a, 3)");
|
let tokens = crate::token::lexer("foo(\"a\", a, 3)");
|
||||||
let parser = Parser::new(tokens);
|
let parser = Parser::new(tokens);
|
||||||
let result = parser.make_call_expression(0).unwrap();
|
let result = parser.make_call_expression(0).unwrap();
|
||||||
assert_eq!(result.last_index, 9);
|
assert_eq!(result.last_index, 9);
|
||||||
@ -2838,7 +2850,7 @@ show(mySk1)"#;
|
|||||||
|
|
||||||
#[test]
|
#[test]
|
||||||
fn test_make_variable_declaration() {
|
fn test_make_variable_declaration() {
|
||||||
let tokens = crate::tokeniser::lexer(
|
let tokens = crate::token::lexer(
|
||||||
r#"const yo = startSketch([0, 0])
|
r#"const yo = startSketch([0, 0])
|
||||||
|> lineTo([1, myVar], %)
|
|> lineTo([1, myVar], %)
|
||||||
|> foo(myVar2, %)
|
|> foo(myVar2, %)
|
||||||
@ -2908,7 +2920,7 @@ show(mySk1)"#;
|
|||||||
|
|
||||||
#[test]
|
#[test]
|
||||||
fn test_make_body() {
|
fn test_make_body() {
|
||||||
let tokens = crate::tokeniser::lexer("const myVar = 5");
|
let tokens = crate::token::lexer("const myVar = 5");
|
||||||
let parser = Parser::new(tokens);
|
let parser = Parser::new(tokens);
|
||||||
let body = parser
|
let body = parser
|
||||||
.make_body(
|
.make_body(
|
||||||
@ -2926,7 +2938,7 @@ show(mySk1)"#;
|
|||||||
#[test]
|
#[test]
|
||||||
fn test_abstract_syntax_tree() {
|
fn test_abstract_syntax_tree() {
|
||||||
let code = "5 +6";
|
let code = "5 +6";
|
||||||
let parser = Parser::new(crate::tokeniser::lexer(code));
|
let parser = Parser::new(crate::token::lexer(code));
|
||||||
let result = parser.ast().unwrap();
|
let result = parser.ast().unwrap();
|
||||||
let expected_result = Program {
|
let expected_result = Program {
|
||||||
start: 0,
|
start: 0,
|
||||||
@ -2964,8 +2976,8 @@ show(mySk1)"#;
|
|||||||
#[test]
|
#[test]
|
||||||
fn test_empty_file() {
|
fn test_empty_file() {
|
||||||
let some_program_string = r#""#;
|
let some_program_string = r#""#;
|
||||||
let tokens = crate::tokeniser::lexer(some_program_string);
|
let tokens = crate::token::lexer(some_program_string);
|
||||||
let parser = crate::parser::Parser::new(tokens);
|
let parser = Parser::new(tokens);
|
||||||
let result = parser.ast();
|
let result = parser.ast();
|
||||||
assert!(result.is_err());
|
assert!(result.is_err());
|
||||||
assert!(result.err().unwrap().to_string().contains("file is empty"));
|
assert!(result.err().unwrap().to_string().contains("file is empty"));
|
||||||
@ -2973,7 +2985,7 @@ show(mySk1)"#;
|
|||||||
|
|
||||||
#[test]
|
#[test]
|
||||||
fn test_parse_half_pipe_small() {
|
fn test_parse_half_pipe_small() {
|
||||||
let tokens = crate::tokeniser::lexer(
|
let tokens = crate::token::lexer(
|
||||||
"const secondExtrude = startSketchAt([0,0])
|
"const secondExtrude = startSketchAt([0,0])
|
||||||
|",
|
|",
|
||||||
);
|
);
|
||||||
@ -2985,14 +2997,14 @@ show(mySk1)"#;
|
|||||||
|
|
||||||
#[test]
|
#[test]
|
||||||
fn test_parse_member_expression_double_nested_braces() {
|
fn test_parse_member_expression_double_nested_braces() {
|
||||||
let tokens = crate::tokeniser::lexer(r#"const prop = yo["one"][two]"#);
|
let tokens = crate::token::lexer(r#"const prop = yo["one"][two]"#);
|
||||||
let parser = Parser::new(tokens);
|
let parser = Parser::new(tokens);
|
||||||
parser.ast().unwrap();
|
parser.ast().unwrap();
|
||||||
}
|
}
|
||||||
|
|
||||||
#[test]
|
#[test]
|
||||||
fn test_parse_member_expression_binary_expression_period_number_first() {
|
fn test_parse_member_expression_binary_expression_period_number_first() {
|
||||||
let tokens = crate::tokeniser::lexer(
|
let tokens = crate::token::lexer(
|
||||||
r#"const obj = { a: 1, b: 2 }
|
r#"const obj = { a: 1, b: 2 }
|
||||||
const height = 1 - obj.a"#,
|
const height = 1 - obj.a"#,
|
||||||
);
|
);
|
||||||
@ -3002,7 +3014,7 @@ const height = 1 - obj.a"#,
|
|||||||
|
|
||||||
#[test]
|
#[test]
|
||||||
fn test_parse_member_expression_binary_expression_brace_number_first() {
|
fn test_parse_member_expression_binary_expression_brace_number_first() {
|
||||||
let tokens = crate::tokeniser::lexer(
|
let tokens = crate::token::lexer(
|
||||||
r#"const obj = { a: 1, b: 2 }
|
r#"const obj = { a: 1, b: 2 }
|
||||||
const height = 1 - obj["a"]"#,
|
const height = 1 - obj["a"]"#,
|
||||||
);
|
);
|
||||||
@ -3012,7 +3024,7 @@ const height = 1 - obj["a"]"#,
|
|||||||
|
|
||||||
#[test]
|
#[test]
|
||||||
fn test_parse_member_expression_binary_expression_brace_number_second() {
|
fn test_parse_member_expression_binary_expression_brace_number_second() {
|
||||||
let tokens = crate::tokeniser::lexer(
|
let tokens = crate::token::lexer(
|
||||||
r#"const obj = { a: 1, b: 2 }
|
r#"const obj = { a: 1, b: 2 }
|
||||||
const height = obj["a"] - 1"#,
|
const height = obj["a"] - 1"#,
|
||||||
);
|
);
|
||||||
@ -3022,7 +3034,7 @@ const height = obj["a"] - 1"#,
|
|||||||
|
|
||||||
#[test]
|
#[test]
|
||||||
fn test_parse_member_expression_binary_expression_in_array_number_first() {
|
fn test_parse_member_expression_binary_expression_in_array_number_first() {
|
||||||
let tokens = crate::tokeniser::lexer(
|
let tokens = crate::token::lexer(
|
||||||
r#"const obj = { a: 1, b: 2 }
|
r#"const obj = { a: 1, b: 2 }
|
||||||
const height = [1 - obj["a"], 0]"#,
|
const height = [1 - obj["a"], 0]"#,
|
||||||
);
|
);
|
||||||
@ -3032,7 +3044,7 @@ const height = [1 - obj["a"], 0]"#,
|
|||||||
|
|
||||||
#[test]
|
#[test]
|
||||||
fn test_parse_member_expression_binary_expression_in_array_number_second() {
|
fn test_parse_member_expression_binary_expression_in_array_number_second() {
|
||||||
let tokens = crate::tokeniser::lexer(
|
let tokens = crate::token::lexer(
|
||||||
r#"const obj = { a: 1, b: 2 }
|
r#"const obj = { a: 1, b: 2 }
|
||||||
const height = [obj["a"] - 1, 0]"#,
|
const height = [obj["a"] - 1, 0]"#,
|
||||||
);
|
);
|
||||||
@ -3042,7 +3054,7 @@ const height = [obj["a"] - 1, 0]"#,
|
|||||||
|
|
||||||
#[test]
|
#[test]
|
||||||
fn test_parse_member_expression_binary_expression_in_array_number_second_missing_space() {
|
fn test_parse_member_expression_binary_expression_in_array_number_second_missing_space() {
|
||||||
let tokens = crate::tokeniser::lexer(
|
let tokens = crate::token::lexer(
|
||||||
r#"const obj = { a: 1, b: 2 }
|
r#"const obj = { a: 1, b: 2 }
|
||||||
const height = [obj["a"] -1, 0]"#,
|
const height = [obj["a"] -1, 0]"#,
|
||||||
);
|
);
|
||||||
@ -3052,7 +3064,7 @@ const height = [obj["a"] -1, 0]"#,
|
|||||||
|
|
||||||
#[test]
|
#[test]
|
||||||
fn test_parse_half_pipe() {
|
fn test_parse_half_pipe() {
|
||||||
let tokens = crate::tokeniser::lexer(
|
let tokens = crate::token::lexer(
|
||||||
"const height = 10
|
"const height = 10
|
||||||
|
|
||||||
const firstExtrude = startSketchAt([0,0])
|
const firstExtrude = startSketchAt([0,0])
|
||||||
@ -3075,15 +3087,17 @@ const secondExtrude = startSketchAt([0,0])
|
|||||||
|
|
||||||
#[test]
|
#[test]
|
||||||
fn test_parse_greater_bang() {
|
fn test_parse_greater_bang() {
|
||||||
let tokens = crate::tokeniser::lexer(">!");
|
let tokens = crate::token::lexer(">!");
|
||||||
let parser = Parser::new(tokens);
|
let parser = Parser::new(tokens);
|
||||||
let result = parser.ast();
|
let err = parser.ast().unwrap_err();
|
||||||
assert!(result.is_ok());
|
// TODO: Better errors when program cannot tokenize.
|
||||||
|
// https://github.com/KittyCAD/modeling-app/issues/696
|
||||||
|
assert!(err.to_string().contains("file is empty"));
|
||||||
}
|
}
|
||||||
|
|
||||||
#[test]
|
#[test]
|
||||||
fn test_parse_z_percent_parens() {
|
fn test_parse_z_percent_parens() {
|
||||||
let tokens = crate::tokeniser::lexer("z%)");
|
let tokens = crate::token::lexer("z%)");
|
||||||
let parser = Parser::new(tokens);
|
let parser = Parser::new(tokens);
|
||||||
let result = parser.ast();
|
let result = parser.ast();
|
||||||
assert!(result.is_err());
|
assert!(result.is_err());
|
||||||
@ -3092,15 +3106,17 @@ const secondExtrude = startSketchAt([0,0])
|
|||||||
|
|
||||||
#[test]
|
#[test]
|
||||||
fn test_parse_parens_unicode() {
|
fn test_parse_parens_unicode() {
|
||||||
let tokens = crate::tokeniser::lexer("(ޜ");
|
let tokens = crate::token::lexer("(ޜ");
|
||||||
let parser = Parser::new(tokens);
|
let parser = Parser::new(tokens);
|
||||||
let result = parser.ast();
|
let result = parser.ast();
|
||||||
assert!(result.is_ok());
|
// TODO: Better errors when program cannot tokenize.
|
||||||
|
// https://github.com/KittyCAD/modeling-app/issues/696
|
||||||
|
assert!(result.is_err());
|
||||||
}
|
}
|
||||||
|
|
||||||
#[test]
|
#[test]
|
||||||
fn test_parse_negative_in_array_binary_expression() {
|
fn test_parse_negative_in_array_binary_expression() {
|
||||||
let tokens = crate::tokeniser::lexer(
|
let tokens = crate::token::lexer(
|
||||||
r#"const leg1 = 5
|
r#"const leg1 = 5
|
||||||
const thickness = 0.56
|
const thickness = 0.56
|
||||||
|
|
||||||
@ -3114,7 +3130,7 @@ const bracket = [-leg2 + thickness, 0]
|
|||||||
|
|
||||||
#[test]
|
#[test]
|
||||||
fn test_parse_nested_open_brackets() {
|
fn test_parse_nested_open_brackets() {
|
||||||
let tokens = crate::tokeniser::lexer(
|
let tokens = crate::token::lexer(
|
||||||
r#"
|
r#"
|
||||||
z(-[["#,
|
z(-[["#,
|
||||||
);
|
);
|
||||||
@ -3129,31 +3145,38 @@ z(-[["#,
|
|||||||
|
|
||||||
#[test]
|
#[test]
|
||||||
fn test_parse_weird_new_line_function() {
|
fn test_parse_weird_new_line_function() {
|
||||||
let tokens = crate::tokeniser::lexer(
|
let tokens = crate::token::lexer(
|
||||||
r#"z
|
r#"z
|
||||||
(--#"#,
|
(--#"#,
|
||||||
);
|
);
|
||||||
let parser = Parser::new(tokens);
|
let parser = Parser::new(tokens);
|
||||||
let result = parser.ast();
|
let result = parser.ast();
|
||||||
assert!(result.is_err());
|
assert!(result.is_err());
|
||||||
|
// TODO: Better errors when program cannot tokenize.
|
||||||
|
// https://github.com/KittyCAD/modeling-app/issues/696
|
||||||
assert_eq!(
|
assert_eq!(
|
||||||
result.err().unwrap().to_string(),
|
result.err().unwrap().to_string(),
|
||||||
r#"syntax: KclErrorDetails { source_ranges: [SourceRange([0, 1])], message: "missing a closing brace for the function call" }"#
|
r#"semantic: KclErrorDetails { source_ranges: [], message: "file is empty" }"#
|
||||||
);
|
);
|
||||||
}
|
}
|
||||||
|
|
||||||
#[test]
|
#[test]
|
||||||
fn test_parse_weird_lots_of_fancy_brackets() {
|
fn test_parse_weird_lots_of_fancy_brackets() {
|
||||||
let tokens = crate::tokeniser::lexer(r#"zz({{{{{{{{)iegAng{{{{{{{##"#);
|
let tokens = crate::token::lexer(r#"zz({{{{{{{{)iegAng{{{{{{{##"#);
|
||||||
let parser = Parser::new(tokens);
|
let parser = Parser::new(tokens);
|
||||||
let result = parser.ast();
|
let result = parser.ast();
|
||||||
assert!(result.is_err());
|
assert!(result.is_err());
|
||||||
assert!(result.err().unwrap().to_string().contains("unexpected end"));
|
// TODO: Better errors when program cannot tokenize.
|
||||||
|
// https://github.com/KittyCAD/modeling-app/issues/696
|
||||||
|
assert_eq!(
|
||||||
|
result.err().unwrap().to_string(),
|
||||||
|
r#"semantic: KclErrorDetails { source_ranges: [], message: "file is empty" }"#
|
||||||
|
);
|
||||||
}
|
}
|
||||||
|
|
||||||
#[test]
|
#[test]
|
||||||
fn test_parse_weird_close_before_open() {
|
fn test_parse_weird_close_before_open() {
|
||||||
let tokens = crate::tokeniser::lexer(
|
let tokens = crate::token::lexer(
|
||||||
r#"fn)n
|
r#"fn)n
|
||||||
e
|
e
|
||||||
["#,
|
["#,
|
||||||
@ -3170,7 +3193,7 @@ e
|
|||||||
|
|
||||||
#[test]
|
#[test]
|
||||||
fn test_parse_weird_close_before_nada() {
|
fn test_parse_weird_close_before_nada() {
|
||||||
let tokens = crate::tokeniser::lexer(r#"fn)n-"#);
|
let tokens = crate::token::lexer(r#"fn)n-"#);
|
||||||
let parser = Parser::new(tokens);
|
let parser = Parser::new(tokens);
|
||||||
let result = parser.ast();
|
let result = parser.ast();
|
||||||
assert!(result.is_err());
|
assert!(result.is_err());
|
||||||
@ -3179,7 +3202,7 @@ e
|
|||||||
|
|
||||||
#[test]
|
#[test]
|
||||||
fn test_parse_weird_lots_of_slashes() {
|
fn test_parse_weird_lots_of_slashes() {
|
||||||
let tokens = crate::tokeniser::lexer(
|
let tokens = crate::token::lexer(
|
||||||
r#"J///////////o//+///////////P++++*++++++P///////˟
|
r#"J///////////o//+///////////P++++*++++++P///////˟
|
||||||
++4"#,
|
++4"#,
|
||||||
);
|
);
|
||||||
@ -3196,7 +3219,7 @@ e
|
|||||||
#[test]
|
#[test]
|
||||||
fn test_parse_expand_array() {
|
fn test_parse_expand_array() {
|
||||||
let code = "const myArray = [0..10]";
|
let code = "const myArray = [0..10]";
|
||||||
let parser = Parser::new(crate::tokeniser::lexer(code));
|
let parser = Parser::new(crate::token::lexer(code));
|
||||||
let result = parser.ast().unwrap();
|
let result = parser.ast().unwrap();
|
||||||
let expected_result = Program {
|
let expected_result = Program {
|
||||||
start: 0,
|
start: 0,
|
||||||
@ -3299,8 +3322,8 @@ e
|
|||||||
#[test]
|
#[test]
|
||||||
fn test_error_keyword_in_variable() {
|
fn test_error_keyword_in_variable() {
|
||||||
let some_program_string = r#"const let = "thing""#;
|
let some_program_string = r#"const let = "thing""#;
|
||||||
let tokens = crate::tokeniser::lexer(some_program_string);
|
let tokens = crate::token::lexer(some_program_string);
|
||||||
let parser = crate::parser::Parser::new(tokens);
|
let parser = Parser::new(tokens);
|
||||||
let result = parser.ast();
|
let result = parser.ast();
|
||||||
assert!(result.is_err());
|
assert!(result.is_err());
|
||||||
assert_eq!(
|
assert_eq!(
|
||||||
@ -3312,8 +3335,8 @@ e
|
|||||||
#[test]
|
#[test]
|
||||||
fn test_error_keyword_in_fn_name() {
|
fn test_error_keyword_in_fn_name() {
|
||||||
let some_program_string = r#"fn let = () {}"#;
|
let some_program_string = r#"fn let = () {}"#;
|
||||||
let tokens = crate::tokeniser::lexer(some_program_string);
|
let tokens = crate::token::lexer(some_program_string);
|
||||||
let parser = crate::parser::Parser::new(tokens);
|
let parser = Parser::new(tokens);
|
||||||
let result = parser.ast();
|
let result = parser.ast();
|
||||||
assert!(result.is_err());
|
assert!(result.is_err());
|
||||||
assert_eq!(
|
assert_eq!(
|
||||||
@ -3325,8 +3348,8 @@ e
|
|||||||
#[test]
|
#[test]
|
||||||
fn test_error_stdlib_in_fn_name() {
|
fn test_error_stdlib_in_fn_name() {
|
||||||
let some_program_string = r#"fn cos = () {}"#;
|
let some_program_string = r#"fn cos = () {}"#;
|
||||||
let tokens = crate::tokeniser::lexer(some_program_string);
|
let tokens = crate::token::lexer(some_program_string);
|
||||||
let parser = crate::parser::Parser::new(tokens);
|
let parser = Parser::new(tokens);
|
||||||
let result = parser.ast();
|
let result = parser.ast();
|
||||||
assert!(result.is_err());
|
assert!(result.is_err());
|
||||||
assert_eq!(
|
assert_eq!(
|
||||||
@ -3340,8 +3363,8 @@ e
|
|||||||
let some_program_string = r#"fn thing = (let) => {
|
let some_program_string = r#"fn thing = (let) => {
|
||||||
return 1
|
return 1
|
||||||
}"#;
|
}"#;
|
||||||
let tokens = crate::tokeniser::lexer(some_program_string);
|
let tokens = crate::token::lexer(some_program_string);
|
||||||
let parser = crate::parser::Parser::new(tokens);
|
let parser = Parser::new(tokens);
|
||||||
let result = parser.ast();
|
let result = parser.ast();
|
||||||
assert!(result.is_err());
|
assert!(result.is_err());
|
||||||
assert_eq!(
|
assert_eq!(
|
||||||
@ -3355,8 +3378,8 @@ e
|
|||||||
let some_program_string = r#"fn thing = (cos) => {
|
let some_program_string = r#"fn thing = (cos) => {
|
||||||
return 1
|
return 1
|
||||||
}"#;
|
}"#;
|
||||||
let tokens = crate::tokeniser::lexer(some_program_string);
|
let tokens = crate::token::lexer(some_program_string);
|
||||||
let parser = crate::parser::Parser::new(tokens);
|
let parser = Parser::new(tokens);
|
||||||
let result = parser.ast();
|
let result = parser.ast();
|
||||||
assert!(result.is_err());
|
assert!(result.is_err());
|
||||||
assert_eq!(
|
assert_eq!(
|
||||||
@ -3373,8 +3396,8 @@ e
|
|||||||
}
|
}
|
||||||
firstPrimeNumber()
|
firstPrimeNumber()
|
||||||
"#;
|
"#;
|
||||||
let tokens = crate::tokeniser::lexer(program);
|
let tokens = crate::token::lexer(program);
|
||||||
let parser = crate::parser::Parser::new(tokens);
|
let parser = Parser::new(tokens);
|
||||||
let _ast = parser.ast().unwrap();
|
let _ast = parser.ast().unwrap();
|
||||||
}
|
}
|
||||||
|
|
||||||
@ -3386,8 +3409,8 @@ e
|
|||||||
|
|
||||||
thing(false)
|
thing(false)
|
||||||
"#;
|
"#;
|
||||||
let tokens = crate::tokeniser::lexer(some_program_string);
|
let tokens = crate::token::lexer(some_program_string);
|
||||||
let parser = crate::parser::Parser::new(tokens);
|
let parser = Parser::new(tokens);
|
||||||
parser.ast().unwrap();
|
parser.ast().unwrap();
|
||||||
}
|
}
|
||||||
|
|
||||||
@ -3403,8 +3426,8 @@ thing(false)
|
|||||||
"#,
|
"#,
|
||||||
name
|
name
|
||||||
);
|
);
|
||||||
let tokens = crate::tokeniser::lexer(&some_program_string);
|
let tokens = crate::token::lexer(&some_program_string);
|
||||||
let parser = crate::parser::Parser::new(tokens);
|
let parser = Parser::new(tokens);
|
||||||
let result = parser.ast();
|
let result = parser.ast();
|
||||||
assert!(result.is_err());
|
assert!(result.is_err());
|
||||||
assert_eq!(
|
assert_eq!(
|
||||||
@ -3421,8 +3444,8 @@ thing(false)
|
|||||||
#[test]
|
#[test]
|
||||||
fn test_error_define_var_as_function() {
|
fn test_error_define_var_as_function() {
|
||||||
let some_program_string = r#"fn thing = "thing""#;
|
let some_program_string = r#"fn thing = "thing""#;
|
||||||
let tokens = crate::tokeniser::lexer(some_program_string);
|
let tokens = crate::token::lexer(some_program_string);
|
||||||
let parser = crate::parser::Parser::new(tokens);
|
let parser = Parser::new(tokens);
|
||||||
let result = parser.ast();
|
let result = parser.ast();
|
||||||
assert!(result.is_err());
|
assert!(result.is_err());
|
||||||
assert_eq!(
|
assert_eq!(
|
||||||
@ -3450,8 +3473,8 @@ const pt2 = b2[0]
|
|||||||
|
|
||||||
show(b1)
|
show(b1)
|
||||||
show(b2)"#;
|
show(b2)"#;
|
||||||
let tokens = crate::tokeniser::lexer(some_program_string);
|
let tokens = crate::token::lexer(some_program_string);
|
||||||
let parser = crate::parser::Parser::new(tokens);
|
let parser = Parser::new(tokens);
|
||||||
parser.ast().unwrap();
|
parser.ast().unwrap();
|
||||||
}
|
}
|
||||||
|
|
||||||
@ -3459,18 +3482,36 @@ show(b2)"#;
|
|||||||
fn test_math_with_stdlib() {
|
fn test_math_with_stdlib() {
|
||||||
let some_program_string = r#"const d2r = pi() / 2
|
let some_program_string = r#"const d2r = pi() / 2
|
||||||
let other_thing = 2 * cos(3)"#;
|
let other_thing = 2 * cos(3)"#;
|
||||||
let tokens = crate::tokeniser::lexer(some_program_string);
|
let tokens = crate::token::lexer(some_program_string);
|
||||||
let parser = crate::parser::Parser::new(tokens);
|
let parser = Parser::new(tokens);
|
||||||
parser.ast().unwrap();
|
parser.ast().unwrap();
|
||||||
}
|
}
|
||||||
|
|
||||||
#[test]
|
#[test]
|
||||||
#[ignore] // ignore until more stack fixes
|
|
||||||
fn test_parse_pipes_on_pipes() {
|
fn test_parse_pipes_on_pipes() {
|
||||||
let code = include_str!("../../tests/executor/inputs/pipes_on_pipes.kcl");
|
let code = include_str!("../../tests/executor/inputs/pipes_on_pipes.kcl");
|
||||||
|
|
||||||
let tokens = crate::tokeniser::lexer(code);
|
let tokens = crate::token::lexer(code);
|
||||||
let parser = crate::parser::Parser::new(tokens);
|
let parser = Parser::new(tokens);
|
||||||
|
parser.ast().unwrap();
|
||||||
|
}
|
||||||
|
|
||||||
|
#[test]
|
||||||
|
fn test_negative_arguments() {
|
||||||
|
let some_program_string = r#"fn box = (p, h, l, w) => {
|
||||||
|
const myBox = startSketchAt(p)
|
||||||
|
|> line([0, l], %)
|
||||||
|
|> line([w, 0], %)
|
||||||
|
|> line([0, -l], %)
|
||||||
|
|> close(%)
|
||||||
|
|> extrude(h, %)
|
||||||
|
|
||||||
|
return myBox
|
||||||
|
}
|
||||||
|
let myBox = box([0,0], -3, -16, -10)
|
||||||
|
show(myBox)"#;
|
||||||
|
let tokens = crate::token::lexer(some_program_string);
|
||||||
|
let parser = Parser::new(tokens);
|
||||||
parser.ast().unwrap();
|
parser.ast().unwrap();
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
@ -34,7 +34,7 @@ pub struct Backend {
|
|||||||
/// The types of tokens the server supports.
|
/// The types of tokens the server supports.
|
||||||
pub token_types: Vec<SemanticTokenType>,
|
pub token_types: Vec<SemanticTokenType>,
|
||||||
/// Token maps.
|
/// Token maps.
|
||||||
pub token_map: DashMap<String, Vec<crate::tokeniser::Token>>,
|
pub token_map: DashMap<String, Vec<crate::token::Token>>,
|
||||||
/// AST maps.
|
/// AST maps.
|
||||||
pub ast_map: DashMap<String, crate::ast::types::Program>,
|
pub ast_map: DashMap<String, crate::ast::types::Program>,
|
||||||
/// Current code.
|
/// Current code.
|
||||||
@ -56,7 +56,7 @@ impl Backend {
|
|||||||
// Lets update the tokens.
|
// Lets update the tokens.
|
||||||
self.current_code_map
|
self.current_code_map
|
||||||
.insert(params.uri.to_string(), params.text.clone());
|
.insert(params.uri.to_string(), params.text.clone());
|
||||||
let tokens = crate::tokeniser::lexer(¶ms.text);
|
let tokens = crate::token::lexer(¶ms.text);
|
||||||
self.token_map.insert(params.uri.to_string(), tokens.clone());
|
self.token_map.insert(params.uri.to_string(), tokens.clone());
|
||||||
|
|
||||||
// Update the semantic tokens map.
|
// Update the semantic tokens map.
|
||||||
@ -69,9 +69,7 @@ impl Backend {
|
|||||||
continue;
|
continue;
|
||||||
};
|
};
|
||||||
|
|
||||||
if token.token_type == crate::tokeniser::TokenType::Word
|
if token.token_type == crate::token::TokenType::Word && self.stdlib_completions.contains_key(&token.value) {
|
||||||
&& self.stdlib_completions.contains_key(&token.value)
|
|
||||||
{
|
|
||||||
// This is a stdlib function.
|
// This is a stdlib function.
|
||||||
token_type = SemanticTokenType::FUNCTION;
|
token_type = SemanticTokenType::FUNCTION;
|
||||||
}
|
}
|
||||||
@ -549,7 +547,7 @@ impl LanguageServer for Backend {
|
|||||||
// Parse the ast.
|
// Parse the ast.
|
||||||
// I don't know if we need to do this again since it should be updated in the context.
|
// I don't know if we need to do this again since it should be updated in the context.
|
||||||
// But I figure better safe than sorry since this will write back out to the file.
|
// But I figure better safe than sorry since this will write back out to the file.
|
||||||
let tokens = crate::tokeniser::lexer(¤t_code);
|
let tokens = crate::token::lexer(¤t_code);
|
||||||
let parser = crate::parser::Parser::new(tokens);
|
let parser = crate::parser::Parser::new(tokens);
|
||||||
let Ok(ast) = parser.ast() else {
|
let Ok(ast) = parser.ast() else {
|
||||||
return Ok(None);
|
return Ok(None);
|
||||||
@ -581,7 +579,7 @@ impl LanguageServer for Backend {
|
|||||||
// Parse the ast.
|
// Parse the ast.
|
||||||
// I don't know if we need to do this again since it should be updated in the context.
|
// I don't know if we need to do this again since it should be updated in the context.
|
||||||
// But I figure better safe than sorry since this will write back out to the file.
|
// But I figure better safe than sorry since this will write back out to the file.
|
||||||
let tokens = crate::tokeniser::lexer(¤t_code);
|
let tokens = crate::token::lexer(¤t_code);
|
||||||
let parser = crate::parser::Parser::new(tokens);
|
let parser = crate::parser::Parser::new(tokens);
|
||||||
let Ok(mut ast) = parser.ast() else {
|
let Ok(mut ast) = parser.ast() else {
|
||||||
return Ok(None);
|
return Ok(None);
|
||||||
|
@ -61,6 +61,8 @@ impl StdLib {
|
|||||||
Box::new(crate::std::sketch::StartSketchAt),
|
Box::new(crate::std::sketch::StartSketchAt),
|
||||||
Box::new(crate::std::sketch::Close),
|
Box::new(crate::std::sketch::Close),
|
||||||
Box::new(crate::std::sketch::Arc),
|
Box::new(crate::std::sketch::Arc),
|
||||||
|
Box::new(crate::std::sketch::TangentalArc),
|
||||||
|
Box::new(crate::std::sketch::TangentalArcTo),
|
||||||
Box::new(crate::std::sketch::BezierCurve),
|
Box::new(crate::std::sketch::BezierCurve),
|
||||||
Box::new(crate::std::math::Cos),
|
Box::new(crate::std::math::Cos),
|
||||||
Box::new(crate::std::math::Sin),
|
Box::new(crate::std::math::Sin),
|
||||||
|
@ -856,26 +856,6 @@ async fn inner_arc(data: ArcData, sketch_group: Box<SketchGroup>, args: Args) ->
|
|||||||
)
|
)
|
||||||
.await?;
|
.await?;
|
||||||
|
|
||||||
// TODO: Dont do this (move path pen) - mike
|
|
||||||
// lets review what the needs are here and see if any existing arc endpoints can accomplish this
|
|
||||||
|
|
||||||
// Move the path pen to the end of the arc.
|
|
||||||
// Since that is where we want to draw the next path.
|
|
||||||
// TODO: the engine should automatically move the pen to the end of the arc.
|
|
||||||
// This just seems inefficient.
|
|
||||||
args.send_modeling_cmd(
|
|
||||||
id,
|
|
||||||
ModelingCmd::MovePathPen {
|
|
||||||
path: sketch_group.id,
|
|
||||||
to: Point3D {
|
|
||||||
x: end.x,
|
|
||||||
y: end.y,
|
|
||||||
z: 0.0,
|
|
||||||
},
|
|
||||||
},
|
|
||||||
)
|
|
||||||
.await?;
|
|
||||||
|
|
||||||
let current_path = Path::ToPoint {
|
let current_path = Path::ToPoint {
|
||||||
base: BasePath {
|
base: BasePath {
|
||||||
from: from.into(),
|
from: from.into(),
|
||||||
@ -899,6 +879,214 @@ async fn inner_arc(data: ArcData, sketch_group: Box<SketchGroup>, args: Args) ->
|
|||||||
Ok(new_sketch_group)
|
Ok(new_sketch_group)
|
||||||
}
|
}
|
||||||
|
|
||||||
|
/// Data to draw a tangental arc.
|
||||||
|
#[derive(Debug, Clone, Deserialize, Serialize, PartialEq, JsonSchema, ts_rs::TS)]
|
||||||
|
#[ts(export)]
|
||||||
|
#[serde(rename_all = "camelCase", untagged)]
|
||||||
|
pub enum TangentalArcData {
|
||||||
|
RadiusAndOffset {
|
||||||
|
/// Radius of the arc.
|
||||||
|
/// Not to be confused with Raiders of the Lost Ark.
|
||||||
|
radius: f64,
|
||||||
|
/// Offset of the arc, in degrees.
|
||||||
|
offset: f64,
|
||||||
|
},
|
||||||
|
/// A point with a tag.
|
||||||
|
PointWithTag {
|
||||||
|
/// Where the arc should end. Must lie in the same plane as the current path pen position. Must not be colinear with current path pen position.
|
||||||
|
to: [f64; 2],
|
||||||
|
/// The tag.
|
||||||
|
tag: String,
|
||||||
|
},
|
||||||
|
/// A point where the arc should end. Must lie in the same plane as the current path pen position. Must not be colinear with current path pen position.
|
||||||
|
Point([f64; 2]),
|
||||||
|
}
|
||||||
|
|
||||||
|
/// Draw a tangental arc.
|
||||||
|
pub async fn tangental_arc(args: Args) -> Result<MemoryItem, KclError> {
|
||||||
|
let (data, sketch_group): (TangentalArcData, Box<SketchGroup>) = args.get_data_and_sketch_group()?;
|
||||||
|
|
||||||
|
let new_sketch_group = inner_tangental_arc(data, sketch_group, args).await?;
|
||||||
|
Ok(MemoryItem::SketchGroup(new_sketch_group))
|
||||||
|
}
|
||||||
|
|
||||||
|
/// Draw an arc.
|
||||||
|
#[stdlib {
|
||||||
|
name = "tangentalArc",
|
||||||
|
}]
|
||||||
|
async fn inner_tangental_arc(
|
||||||
|
data: TangentalArcData,
|
||||||
|
sketch_group: Box<SketchGroup>,
|
||||||
|
args: Args,
|
||||||
|
) -> Result<Box<SketchGroup>, KclError> {
|
||||||
|
let from: Point2d = sketch_group.get_coords_from_paths()?;
|
||||||
|
|
||||||
|
let id = uuid::Uuid::new_v4();
|
||||||
|
|
||||||
|
let to = match &data {
|
||||||
|
TangentalArcData::RadiusAndOffset { radius, offset } => {
|
||||||
|
// Calculate the end point from the angle and radius.
|
||||||
|
let end_angle = Angle::from_degrees(*offset);
|
||||||
|
let start_angle = Angle::from_degrees(0.0);
|
||||||
|
let (_, to) = arc_center_and_end(from, start_angle, end_angle, *radius);
|
||||||
|
|
||||||
|
args.send_modeling_cmd(
|
||||||
|
id,
|
||||||
|
ModelingCmd::ExtendPath {
|
||||||
|
path: sketch_group.id,
|
||||||
|
segment: kittycad::types::PathSegment::TangentialArc {
|
||||||
|
radius: *radius,
|
||||||
|
offset: kittycad::types::Angle {
|
||||||
|
unit: kittycad::types::UnitAngle::Degrees,
|
||||||
|
value: *offset,
|
||||||
|
},
|
||||||
|
},
|
||||||
|
},
|
||||||
|
)
|
||||||
|
.await?;
|
||||||
|
to.into()
|
||||||
|
}
|
||||||
|
TangentalArcData::PointWithTag { to, .. } => {
|
||||||
|
args.send_modeling_cmd(
|
||||||
|
id,
|
||||||
|
ModelingCmd::ExtendPath {
|
||||||
|
path: sketch_group.id,
|
||||||
|
segment: kittycad::types::PathSegment::TangentialArcTo {
|
||||||
|
angle_snap_increment: None,
|
||||||
|
to: kittycad::types::Point3D {
|
||||||
|
x: to[0],
|
||||||
|
y: to[1],
|
||||||
|
z: 0.0,
|
||||||
|
},
|
||||||
|
},
|
||||||
|
},
|
||||||
|
)
|
||||||
|
.await?;
|
||||||
|
|
||||||
|
*to
|
||||||
|
}
|
||||||
|
TangentalArcData::Point(to) => {
|
||||||
|
args.send_modeling_cmd(
|
||||||
|
id,
|
||||||
|
ModelingCmd::ExtendPath {
|
||||||
|
path: sketch_group.id,
|
||||||
|
segment: kittycad::types::PathSegment::TangentialArcTo {
|
||||||
|
angle_snap_increment: None,
|
||||||
|
to: kittycad::types::Point3D {
|
||||||
|
x: to[0],
|
||||||
|
y: to[1],
|
||||||
|
z: 0.0,
|
||||||
|
},
|
||||||
|
},
|
||||||
|
},
|
||||||
|
)
|
||||||
|
.await?;
|
||||||
|
|
||||||
|
*to
|
||||||
|
}
|
||||||
|
};
|
||||||
|
|
||||||
|
let to = [from.x + to[0], from.y + to[1]];
|
||||||
|
|
||||||
|
let current_path = Path::ToPoint {
|
||||||
|
base: BasePath {
|
||||||
|
from: from.into(),
|
||||||
|
to,
|
||||||
|
name: "".to_string(),
|
||||||
|
geo_meta: GeoMeta {
|
||||||
|
id,
|
||||||
|
metadata: args.source_range.into(),
|
||||||
|
},
|
||||||
|
},
|
||||||
|
};
|
||||||
|
|
||||||
|
let mut new_sketch_group = sketch_group.clone();
|
||||||
|
new_sketch_group.value.push(current_path);
|
||||||
|
|
||||||
|
Ok(new_sketch_group)
|
||||||
|
}
|
||||||
|
|
||||||
|
/// Data to draw a tangental arc to a specific point.
|
||||||
|
#[derive(Debug, Clone, Deserialize, Serialize, PartialEq, JsonSchema, ts_rs::TS)]
|
||||||
|
#[ts(export)]
|
||||||
|
#[serde(rename_all = "camelCase", untagged)]
|
||||||
|
pub enum TangentalArcToData {
|
||||||
|
/// A point with a tag.
|
||||||
|
PointWithTag {
|
||||||
|
/// Where the arc should end. Must lie in the same plane as the current path pen position. Must not be colinear with current path pen position.
|
||||||
|
to: [f64; 2],
|
||||||
|
/// The tag.
|
||||||
|
tag: String,
|
||||||
|
},
|
||||||
|
/// A point where the arc should end. Must lie in the same plane as the current path pen position. Must not be colinear with current path pen position.
|
||||||
|
Point([f64; 2]),
|
||||||
|
}
|
||||||
|
|
||||||
|
/// Draw a tangental arc to a specific point.
|
||||||
|
pub async fn tangental_arc_to(args: Args) -> Result<MemoryItem, KclError> {
|
||||||
|
let (data, sketch_group): (TangentalArcToData, Box<SketchGroup>) = args.get_data_and_sketch_group()?;
|
||||||
|
|
||||||
|
let new_sketch_group = inner_tangental_arc_to(data, sketch_group, args).await?;
|
||||||
|
Ok(MemoryItem::SketchGroup(new_sketch_group))
|
||||||
|
}
|
||||||
|
|
||||||
|
/// Draw an arc.
|
||||||
|
#[stdlib {
|
||||||
|
name = "tangentalArcTo",
|
||||||
|
}]
|
||||||
|
async fn inner_tangental_arc_to(
|
||||||
|
data: TangentalArcToData,
|
||||||
|
sketch_group: Box<SketchGroup>,
|
||||||
|
args: Args,
|
||||||
|
) -> Result<Box<SketchGroup>, KclError> {
|
||||||
|
let from: Point2d = sketch_group.get_coords_from_paths()?;
|
||||||
|
let to = match &data {
|
||||||
|
TangentalArcToData::PointWithTag { to, .. } => to,
|
||||||
|
TangentalArcToData::Point(to) => to,
|
||||||
|
};
|
||||||
|
|
||||||
|
let delta = [to[0] - from.x, to[1] - from.y];
|
||||||
|
|
||||||
|
let id = uuid::Uuid::new_v4();
|
||||||
|
|
||||||
|
args.send_modeling_cmd(
|
||||||
|
id,
|
||||||
|
ModelingCmd::ExtendPath {
|
||||||
|
path: sketch_group.id,
|
||||||
|
segment: kittycad::types::PathSegment::TangentialArcTo {
|
||||||
|
angle_snap_increment: None,
|
||||||
|
to: kittycad::types::Point3D {
|
||||||
|
x: delta[0],
|
||||||
|
y: delta[1],
|
||||||
|
z: 0.0,
|
||||||
|
},
|
||||||
|
},
|
||||||
|
},
|
||||||
|
)
|
||||||
|
.await?;
|
||||||
|
|
||||||
|
let current_path = Path::ToPoint {
|
||||||
|
base: BasePath {
|
||||||
|
from: from.into(),
|
||||||
|
to: *to,
|
||||||
|
name: if let TangentalArcToData::PointWithTag { tag, .. } = data {
|
||||||
|
tag.to_string()
|
||||||
|
} else {
|
||||||
|
"".to_string()
|
||||||
|
},
|
||||||
|
geo_meta: GeoMeta {
|
||||||
|
id,
|
||||||
|
metadata: args.source_range.into(),
|
||||||
|
},
|
||||||
|
},
|
||||||
|
};
|
||||||
|
|
||||||
|
let mut new_sketch_group = sketch_group.clone();
|
||||||
|
new_sketch_group.value.push(current_path);
|
||||||
|
|
||||||
|
Ok(new_sketch_group)
|
||||||
|
}
|
||||||
|
|
||||||
/// Data to draw a bezier curve.
|
/// Data to draw a bezier curve.
|
||||||
#[derive(Debug, Clone, Deserialize, Serialize, PartialEq, ts_rs::TS, JsonSchema)]
|
#[derive(Debug, Clone, Deserialize, Serialize, PartialEq, ts_rs::TS, JsonSchema)]
|
||||||
#[ts(export)]
|
#[ts(export)]
|
||||||
|
@ -10,6 +10,15 @@ pub struct Angle {
|
|||||||
degrees: f64,
|
degrees: f64,
|
||||||
}
|
}
|
||||||
|
|
||||||
|
impl From<kittycad::types::Angle> for Angle {
|
||||||
|
fn from(angle: kittycad::types::Angle) -> Self {
|
||||||
|
match angle.unit {
|
||||||
|
kittycad::types::UnitAngle::Degrees => Self::from_degrees(angle.value),
|
||||||
|
kittycad::types::UnitAngle::Radians => Self::from_radians(angle.value),
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
impl Angle {
|
impl Angle {
|
||||||
const ZERO: Self = Self { degrees: 0.0 };
|
const ZERO: Self = Self { degrees: 0.0 };
|
||||||
/// Make an angle of the given degrees.
|
/// Make an angle of the given degrees.
|
||||||
|
173
src/wasm-lib/kcl/src/token.rs
Normal file
173
src/wasm-lib/kcl/src/token.rs
Normal file
@ -0,0 +1,173 @@
|
|||||||
|
use std::str::FromStr;
|
||||||
|
|
||||||
|
use anyhow::Result;
|
||||||
|
use parse_display::{Display, FromStr};
|
||||||
|
use schemars::JsonSchema;
|
||||||
|
use serde::{Deserialize, Serialize};
|
||||||
|
use tower_lsp::lsp_types::SemanticTokenType;
|
||||||
|
|
||||||
|
mod tokeniser;
|
||||||
|
|
||||||
|
/// The types of tokens.
|
||||||
|
#[derive(Debug, PartialEq, Eq, Copy, Clone, Deserialize, Serialize, ts_rs::TS, JsonSchema, FromStr, Display)]
|
||||||
|
#[ts(export)]
|
||||||
|
#[serde(rename_all = "camelCase")]
|
||||||
|
#[display(style = "camelCase")]
|
||||||
|
pub enum TokenType {
|
||||||
|
/// A number.
|
||||||
|
Number,
|
||||||
|
/// A word.
|
||||||
|
Word,
|
||||||
|
/// An operator.
|
||||||
|
Operator,
|
||||||
|
/// A string.
|
||||||
|
String,
|
||||||
|
/// A keyword.
|
||||||
|
Keyword,
|
||||||
|
/// A brace.
|
||||||
|
Brace,
|
||||||
|
/// Whitespace.
|
||||||
|
Whitespace,
|
||||||
|
/// A comma.
|
||||||
|
Comma,
|
||||||
|
/// A colon.
|
||||||
|
Colon,
|
||||||
|
/// A period.
|
||||||
|
Period,
|
||||||
|
/// A double period: `..`.
|
||||||
|
DoublePeriod,
|
||||||
|
/// A line comment.
|
||||||
|
LineComment,
|
||||||
|
/// A block comment.
|
||||||
|
BlockComment,
|
||||||
|
/// A function name.
|
||||||
|
Function,
|
||||||
|
}
|
||||||
|
|
||||||
|
/// Most KCL tokens correspond to LSP semantic tokens (but not all).
|
||||||
|
impl TryFrom<TokenType> for SemanticTokenType {
|
||||||
|
type Error = anyhow::Error;
|
||||||
|
fn try_from(token_type: TokenType) -> Result<Self> {
|
||||||
|
Ok(match token_type {
|
||||||
|
TokenType::Number => Self::NUMBER,
|
||||||
|
TokenType::Word => Self::VARIABLE,
|
||||||
|
TokenType::Keyword => Self::KEYWORD,
|
||||||
|
TokenType::Operator => Self::OPERATOR,
|
||||||
|
TokenType::String => Self::STRING,
|
||||||
|
TokenType::LineComment => Self::COMMENT,
|
||||||
|
TokenType::BlockComment => Self::COMMENT,
|
||||||
|
TokenType::Function => Self::FUNCTION,
|
||||||
|
TokenType::Whitespace
|
||||||
|
| TokenType::Brace
|
||||||
|
| TokenType::Comma
|
||||||
|
| TokenType::Colon
|
||||||
|
| TokenType::Period
|
||||||
|
| TokenType::DoublePeriod => {
|
||||||
|
anyhow::bail!("unsupported token type: {:?}", token_type)
|
||||||
|
}
|
||||||
|
})
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
impl TokenType {
|
||||||
|
// This is for the lsp server.
|
||||||
|
pub fn all_semantic_token_types() -> Result<Vec<SemanticTokenType>> {
|
||||||
|
let mut settings = schemars::gen::SchemaSettings::openapi3();
|
||||||
|
settings.inline_subschemas = true;
|
||||||
|
let mut generator = schemars::gen::SchemaGenerator::new(settings);
|
||||||
|
|
||||||
|
let schema = TokenType::json_schema(&mut generator);
|
||||||
|
let schemars::schema::Schema::Object(o) = &schema else {
|
||||||
|
anyhow::bail!("expected object schema: {:#?}", schema);
|
||||||
|
};
|
||||||
|
let Some(subschemas) = &o.subschemas else {
|
||||||
|
anyhow::bail!("expected subschemas: {:#?}", schema);
|
||||||
|
};
|
||||||
|
let Some(one_ofs) = &subschemas.one_of else {
|
||||||
|
anyhow::bail!("expected one_of: {:#?}", schema);
|
||||||
|
};
|
||||||
|
|
||||||
|
let mut semantic_tokens = vec![];
|
||||||
|
for one_of in one_ofs {
|
||||||
|
let schemars::schema::Schema::Object(o) = one_of else {
|
||||||
|
anyhow::bail!("expected object one_of: {:#?}", one_of);
|
||||||
|
};
|
||||||
|
|
||||||
|
let Some(enum_values) = o.enum_values.as_ref() else {
|
||||||
|
anyhow::bail!("expected enum values: {:#?}", o);
|
||||||
|
};
|
||||||
|
|
||||||
|
if enum_values.len() > 1 {
|
||||||
|
anyhow::bail!("expected only one enum value: {:#?}", o);
|
||||||
|
}
|
||||||
|
|
||||||
|
if enum_values.is_empty() {
|
||||||
|
anyhow::bail!("expected at least one enum value: {:#?}", o);
|
||||||
|
}
|
||||||
|
|
||||||
|
let label = TokenType::from_str(&enum_values[0].to_string().replace('"', ""))?;
|
||||||
|
if let Ok(semantic_token_type) = SemanticTokenType::try_from(label) {
|
||||||
|
semantic_tokens.push(semantic_token_type);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
Ok(semantic_tokens)
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
#[derive(Debug, PartialEq, Eq, Deserialize, Serialize, Clone, ts_rs::TS)]
|
||||||
|
#[ts(export)]
|
||||||
|
pub struct Token {
|
||||||
|
#[serde(rename = "type")]
|
||||||
|
pub token_type: TokenType,
|
||||||
|
/// Offset in the source code where this token begins.
|
||||||
|
pub start: usize,
|
||||||
|
/// Offset in the source code where this token ends.
|
||||||
|
pub end: usize,
|
||||||
|
pub value: String,
|
||||||
|
}
|
||||||
|
|
||||||
|
impl Token {
|
||||||
|
pub fn from_range(range: std::ops::Range<usize>, token_type: TokenType, value: String) -> Self {
|
||||||
|
Self {
|
||||||
|
start: range.start,
|
||||||
|
end: range.end,
|
||||||
|
value,
|
||||||
|
token_type,
|
||||||
|
}
|
||||||
|
}
|
||||||
|
pub fn is_code_token(&self) -> bool {
|
||||||
|
!matches!(
|
||||||
|
self.token_type,
|
||||||
|
TokenType::Whitespace | TokenType::LineComment | TokenType::BlockComment
|
||||||
|
)
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
impl From<Token> for crate::executor::SourceRange {
|
||||||
|
fn from(token: Token) -> Self {
|
||||||
|
Self([token.start, token.end])
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
impl From<&Token> for crate::executor::SourceRange {
|
||||||
|
fn from(token: &Token) -> Self {
|
||||||
|
Self([token.start, token.end])
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
pub fn lexer(s: &str) -> Vec<Token> {
|
||||||
|
tokeniser::lexer(s).unwrap_or_default()
|
||||||
|
}
|
||||||
|
|
||||||
|
#[cfg(test)]
|
||||||
|
mod tests {
|
||||||
|
use super::*;
|
||||||
|
|
||||||
|
// We have this as a test so we can ensure it never panics with an unwrap in the server.
|
||||||
|
#[test]
|
||||||
|
fn test_token_type_to_semantic_token_type() {
|
||||||
|
let semantic_types = TokenType::all_semantic_token_types().unwrap();
|
||||||
|
assert!(!semantic_types.is_empty());
|
||||||
|
}
|
||||||
|
}
|
1464
src/wasm-lib/kcl/src/token/tokeniser.rs
Normal file
1464
src/wasm-lib/kcl/src/token/tokeniser.rs
Normal file
File diff suppressed because it is too large
Load Diff
@ -1,749 +0,0 @@
|
|||||||
use std::str::FromStr;
|
|
||||||
|
|
||||||
use anyhow::Result;
|
|
||||||
use lazy_static::lazy_static;
|
|
||||||
use parse_display::{Display, FromStr};
|
|
||||||
use regex::bytes::Regex;
|
|
||||||
use schemars::JsonSchema;
|
|
||||||
use serde::{Deserialize, Serialize};
|
|
||||||
use tower_lsp::lsp_types::SemanticTokenType;
|
|
||||||
|
|
||||||
/// The types of tokens.
|
|
||||||
#[derive(Debug, PartialEq, Eq, Copy, Clone, Deserialize, Serialize, ts_rs::TS, JsonSchema, FromStr, Display)]
|
|
||||||
#[ts(export)]
|
|
||||||
#[serde(rename_all = "camelCase")]
|
|
||||||
#[display(style = "camelCase")]
|
|
||||||
pub enum TokenType {
|
|
||||||
/// A number.
|
|
||||||
Number,
|
|
||||||
/// A word.
|
|
||||||
Word,
|
|
||||||
/// An operator.
|
|
||||||
Operator,
|
|
||||||
/// A string.
|
|
||||||
String,
|
|
||||||
/// A keyword.
|
|
||||||
Keyword,
|
|
||||||
/// A brace.
|
|
||||||
Brace,
|
|
||||||
/// Whitespace.
|
|
||||||
Whitespace,
|
|
||||||
/// A comma.
|
|
||||||
Comma,
|
|
||||||
/// A colon.
|
|
||||||
Colon,
|
|
||||||
/// A period.
|
|
||||||
Period,
|
|
||||||
/// A double period: `..`.
|
|
||||||
DoublePeriod,
|
|
||||||
/// A line comment.
|
|
||||||
LineComment,
|
|
||||||
/// A block comment.
|
|
||||||
BlockComment,
|
|
||||||
/// A function name.
|
|
||||||
Function,
|
|
||||||
}
|
|
||||||
|
|
||||||
/// Most KCL tokens correspond to LSP semantic tokens (but not all).
|
|
||||||
impl TryFrom<TokenType> for SemanticTokenType {
|
|
||||||
type Error = anyhow::Error;
|
|
||||||
fn try_from(token_type: TokenType) -> Result<Self> {
|
|
||||||
Ok(match token_type {
|
|
||||||
TokenType::Number => Self::NUMBER,
|
|
||||||
TokenType::Word => Self::VARIABLE,
|
|
||||||
TokenType::Keyword => Self::KEYWORD,
|
|
||||||
TokenType::Operator => Self::OPERATOR,
|
|
||||||
TokenType::String => Self::STRING,
|
|
||||||
TokenType::LineComment => Self::COMMENT,
|
|
||||||
TokenType::BlockComment => Self::COMMENT,
|
|
||||||
TokenType::Function => Self::FUNCTION,
|
|
||||||
TokenType::Whitespace
|
|
||||||
| TokenType::Brace
|
|
||||||
| TokenType::Comma
|
|
||||||
| TokenType::Colon
|
|
||||||
| TokenType::Period
|
|
||||||
| TokenType::DoublePeriod => {
|
|
||||||
anyhow::bail!("unsupported token type: {:?}", token_type)
|
|
||||||
}
|
|
||||||
})
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
impl TokenType {
|
|
||||||
// This is for the lsp server.
|
|
||||||
pub fn all_semantic_token_types() -> Result<Vec<SemanticTokenType>> {
|
|
||||||
let mut settings = schemars::gen::SchemaSettings::openapi3();
|
|
||||||
settings.inline_subschemas = true;
|
|
||||||
let mut generator = schemars::gen::SchemaGenerator::new(settings);
|
|
||||||
|
|
||||||
let schema = TokenType::json_schema(&mut generator);
|
|
||||||
let schemars::schema::Schema::Object(o) = &schema else {
|
|
||||||
anyhow::bail!("expected object schema: {:#?}", schema);
|
|
||||||
};
|
|
||||||
let Some(subschemas) = &o.subschemas else {
|
|
||||||
anyhow::bail!("expected subschemas: {:#?}", schema);
|
|
||||||
};
|
|
||||||
let Some(one_ofs) = &subschemas.one_of else {
|
|
||||||
anyhow::bail!("expected one_of: {:#?}", schema);
|
|
||||||
};
|
|
||||||
|
|
||||||
let mut semantic_tokens = vec![];
|
|
||||||
for one_of in one_ofs {
|
|
||||||
let schemars::schema::Schema::Object(o) = one_of else {
|
|
||||||
anyhow::bail!("expected object one_of: {:#?}", one_of);
|
|
||||||
};
|
|
||||||
|
|
||||||
let Some(enum_values) = o.enum_values.as_ref() else {
|
|
||||||
anyhow::bail!("expected enum values: {:#?}", o);
|
|
||||||
};
|
|
||||||
|
|
||||||
if enum_values.len() > 1 {
|
|
||||||
anyhow::bail!("expected only one enum value: {:#?}", o);
|
|
||||||
}
|
|
||||||
|
|
||||||
if enum_values.is_empty() {
|
|
||||||
anyhow::bail!("expected at least one enum value: {:#?}", o);
|
|
||||||
}
|
|
||||||
|
|
||||||
let label = TokenType::from_str(&enum_values[0].to_string().replace('"', ""))?;
|
|
||||||
if let Ok(semantic_token_type) = SemanticTokenType::try_from(label) {
|
|
||||||
semantic_tokens.push(semantic_token_type);
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
Ok(semantic_tokens)
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
#[derive(Debug, PartialEq, Eq, Deserialize, Serialize, Clone, ts_rs::TS)]
|
|
||||||
#[ts(export)]
|
|
||||||
pub struct Token {
|
|
||||||
#[serde(rename = "type")]
|
|
||||||
pub token_type: TokenType,
|
|
||||||
/// Offset in the source code where this token begins.
|
|
||||||
pub start: usize,
|
|
||||||
/// Offset in the source code where this token ends.
|
|
||||||
pub end: usize,
|
|
||||||
pub value: String,
|
|
||||||
}
|
|
||||||
|
|
||||||
impl From<Token> for crate::executor::SourceRange {
|
|
||||||
fn from(token: Token) -> Self {
|
|
||||||
Self([token.start, token.end])
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
impl From<&Token> for crate::executor::SourceRange {
|
|
||||||
fn from(token: &Token) -> Self {
|
|
||||||
Self([token.start, token.end])
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
lazy_static! {
|
|
||||||
static ref NUMBER: Regex = Regex::new(r"^(\d+(\.\d*)?|\.\d+)\b").unwrap();
|
|
||||||
static ref WHITESPACE: Regex = Regex::new(r"\s+").unwrap();
|
|
||||||
static ref WORD: Regex = Regex::new(r"^[a-zA-Z_][a-zA-Z0-9_]*").unwrap();
|
|
||||||
// TODO: these should be generated using our struct types for these.
|
|
||||||
static ref KEYWORD: Regex =
|
|
||||||
Regex::new(r"^(if|else|for|while|return|break|continue|fn|let|mut|loop|true|false|nil|and|or|not|var|const)\b").unwrap();
|
|
||||||
static ref OPERATOR: Regex = Regex::new(r"^(>=|<=|==|=>|!= |\|>|\*|\+|-|/|%|=|<|>|\||\^)").unwrap();
|
|
||||||
static ref STRING: Regex = Regex::new(r#"^"([^"\\]|\\.)*"|'([^'\\]|\\.)*'"#).unwrap();
|
|
||||||
static ref BLOCK_START: Regex = Regex::new(r"^\{").unwrap();
|
|
||||||
static ref BLOCK_END: Regex = Regex::new(r"^\}").unwrap();
|
|
||||||
static ref PARAN_START: Regex = Regex::new(r"^\(").unwrap();
|
|
||||||
static ref PARAN_END: Regex = Regex::new(r"^\)").unwrap();
|
|
||||||
static ref ARRAY_START: Regex = Regex::new(r"^\[").unwrap();
|
|
||||||
static ref ARRAY_END: Regex = Regex::new(r"^\]").unwrap();
|
|
||||||
static ref COMMA: Regex = Regex::new(r"^,").unwrap();
|
|
||||||
static ref COLON: Regex = Regex::new(r"^:").unwrap();
|
|
||||||
static ref PERIOD: Regex = Regex::new(r"^\.").unwrap();
|
|
||||||
static ref DOUBLE_PERIOD: Regex = Regex::new(r"^\.\.").unwrap();
|
|
||||||
static ref LINECOMMENT: Regex = Regex::new(r"^//.*").unwrap();
|
|
||||||
static ref BLOCKCOMMENT: Regex = Regex::new(r"^/\*[\s\S]*?\*/").unwrap();
|
|
||||||
}
|
|
||||||
|
|
||||||
fn is_number(character: &[u8]) -> bool {
|
|
||||||
NUMBER.is_match(character)
|
|
||||||
}
|
|
||||||
fn is_whitespace(character: &[u8]) -> bool {
|
|
||||||
WHITESPACE.is_match(character)
|
|
||||||
}
|
|
||||||
fn is_word(character: &[u8]) -> bool {
|
|
||||||
WORD.is_match(character)
|
|
||||||
}
|
|
||||||
fn is_keyword(character: &[u8]) -> bool {
|
|
||||||
KEYWORD.is_match(character)
|
|
||||||
}
|
|
||||||
fn is_string(character: &[u8]) -> bool {
|
|
||||||
match STRING.find(character) {
|
|
||||||
Some(m) => m.start() == 0,
|
|
||||||
None => false,
|
|
||||||
}
|
|
||||||
}
|
|
||||||
fn is_operator(character: &[u8]) -> bool {
|
|
||||||
OPERATOR.is_match(character)
|
|
||||||
}
|
|
||||||
fn is_block_start(character: &[u8]) -> bool {
|
|
||||||
BLOCK_START.is_match(character)
|
|
||||||
}
|
|
||||||
fn is_block_end(character: &[u8]) -> bool {
|
|
||||||
BLOCK_END.is_match(character)
|
|
||||||
}
|
|
||||||
fn is_paren_start(character: &[u8]) -> bool {
|
|
||||||
PARAN_START.is_match(character)
|
|
||||||
}
|
|
||||||
fn is_paren_end(character: &[u8]) -> bool {
|
|
||||||
PARAN_END.is_match(character)
|
|
||||||
}
|
|
||||||
fn is_array_start(character: &[u8]) -> bool {
|
|
||||||
ARRAY_START.is_match(character)
|
|
||||||
}
|
|
||||||
fn is_array_end(character: &[u8]) -> bool {
|
|
||||||
ARRAY_END.is_match(character)
|
|
||||||
}
|
|
||||||
fn is_comma(character: &[u8]) -> bool {
|
|
||||||
COMMA.is_match(character)
|
|
||||||
}
|
|
||||||
fn is_colon(character: &[u8]) -> bool {
|
|
||||||
COLON.is_match(character)
|
|
||||||
}
|
|
||||||
fn is_double_period(character: &[u8]) -> bool {
|
|
||||||
DOUBLE_PERIOD.is_match(character)
|
|
||||||
}
|
|
||||||
fn is_period(character: &[u8]) -> bool {
|
|
||||||
PERIOD.is_match(character)
|
|
||||||
}
|
|
||||||
fn is_line_comment(character: &[u8]) -> bool {
|
|
||||||
LINECOMMENT.is_match(character)
|
|
||||||
}
|
|
||||||
fn is_block_comment(character: &[u8]) -> bool {
|
|
||||||
BLOCKCOMMENT.is_match(character)
|
|
||||||
}
|
|
||||||
|
|
||||||
fn match_first(s: &[u8], regex: &Regex) -> Option<String> {
|
|
||||||
regex
|
|
||||||
.find(s)
|
|
||||||
.map(|the_match| String::from_utf8_lossy(the_match.as_bytes()).into())
|
|
||||||
}
|
|
||||||
|
|
||||||
fn make_token(token_type: TokenType, value: &str, start: usize) -> Token {
|
|
||||||
Token {
|
|
||||||
token_type,
|
|
||||||
value: value.to_string(),
|
|
||||||
start,
|
|
||||||
end: start + value.len(),
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
fn return_token_at_index(str_from_index: &[u8], start_index: usize) -> Option<Token> {
|
|
||||||
if is_string(str_from_index) {
|
|
||||||
return Some(make_token(
|
|
||||||
TokenType::String,
|
|
||||||
&match_first(str_from_index, &STRING)?,
|
|
||||||
start_index,
|
|
||||||
));
|
|
||||||
}
|
|
||||||
let is_line_comment_bool = is_line_comment(str_from_index);
|
|
||||||
if is_line_comment_bool || is_block_comment(str_from_index) {
|
|
||||||
return Some(make_token(
|
|
||||||
if is_line_comment_bool {
|
|
||||||
TokenType::LineComment
|
|
||||||
} else {
|
|
||||||
TokenType::BlockComment
|
|
||||||
},
|
|
||||||
&match_first(
|
|
||||||
str_from_index,
|
|
||||||
if is_line_comment_bool {
|
|
||||||
&LINECOMMENT
|
|
||||||
} else {
|
|
||||||
&BLOCKCOMMENT
|
|
||||||
},
|
|
||||||
)?,
|
|
||||||
start_index,
|
|
||||||
));
|
|
||||||
}
|
|
||||||
if is_paren_end(str_from_index) {
|
|
||||||
return Some(make_token(
|
|
||||||
TokenType::Brace,
|
|
||||||
&match_first(str_from_index, &PARAN_END)?,
|
|
||||||
start_index,
|
|
||||||
));
|
|
||||||
}
|
|
||||||
if is_paren_start(str_from_index) {
|
|
||||||
return Some(make_token(
|
|
||||||
TokenType::Brace,
|
|
||||||
&match_first(str_from_index, &PARAN_START)?,
|
|
||||||
start_index,
|
|
||||||
));
|
|
||||||
}
|
|
||||||
if is_block_start(str_from_index) {
|
|
||||||
return Some(make_token(
|
|
||||||
TokenType::Brace,
|
|
||||||
&match_first(str_from_index, &BLOCK_START)?,
|
|
||||||
start_index,
|
|
||||||
));
|
|
||||||
}
|
|
||||||
if is_block_end(str_from_index) {
|
|
||||||
return Some(make_token(
|
|
||||||
TokenType::Brace,
|
|
||||||
&match_first(str_from_index, &BLOCK_END)?,
|
|
||||||
start_index,
|
|
||||||
));
|
|
||||||
}
|
|
||||||
if is_array_start(str_from_index) {
|
|
||||||
return Some(make_token(
|
|
||||||
TokenType::Brace,
|
|
||||||
&match_first(str_from_index, &ARRAY_START)?,
|
|
||||||
start_index,
|
|
||||||
));
|
|
||||||
}
|
|
||||||
if is_array_end(str_from_index) {
|
|
||||||
return Some(make_token(
|
|
||||||
TokenType::Brace,
|
|
||||||
&match_first(str_from_index, &ARRAY_END)?,
|
|
||||||
start_index,
|
|
||||||
));
|
|
||||||
}
|
|
||||||
if is_comma(str_from_index) {
|
|
||||||
return Some(make_token(
|
|
||||||
TokenType::Comma,
|
|
||||||
&match_first(str_from_index, &COMMA)?,
|
|
||||||
start_index,
|
|
||||||
));
|
|
||||||
}
|
|
||||||
if is_operator(str_from_index) {
|
|
||||||
return Some(make_token(
|
|
||||||
TokenType::Operator,
|
|
||||||
&match_first(str_from_index, &OPERATOR)?,
|
|
||||||
start_index,
|
|
||||||
));
|
|
||||||
}
|
|
||||||
if is_number(str_from_index) {
|
|
||||||
return Some(make_token(
|
|
||||||
TokenType::Number,
|
|
||||||
&match_first(str_from_index, &NUMBER)?,
|
|
||||||
start_index,
|
|
||||||
));
|
|
||||||
}
|
|
||||||
if is_keyword(str_from_index) {
|
|
||||||
return Some(make_token(
|
|
||||||
TokenType::Keyword,
|
|
||||||
&match_first(str_from_index, &KEYWORD)?,
|
|
||||||
start_index,
|
|
||||||
));
|
|
||||||
}
|
|
||||||
if is_word(str_from_index) {
|
|
||||||
return Some(make_token(
|
|
||||||
TokenType::Word,
|
|
||||||
&match_first(str_from_index, &WORD)?,
|
|
||||||
start_index,
|
|
||||||
));
|
|
||||||
}
|
|
||||||
if is_colon(str_from_index) {
|
|
||||||
return Some(make_token(
|
|
||||||
TokenType::Colon,
|
|
||||||
&match_first(str_from_index, &COLON)?,
|
|
||||||
start_index,
|
|
||||||
));
|
|
||||||
}
|
|
||||||
if is_double_period(str_from_index) {
|
|
||||||
return Some(make_token(
|
|
||||||
TokenType::DoublePeriod,
|
|
||||||
&match_first(str_from_index, &DOUBLE_PERIOD)?,
|
|
||||||
start_index,
|
|
||||||
));
|
|
||||||
}
|
|
||||||
if is_period(str_from_index) {
|
|
||||||
return Some(make_token(
|
|
||||||
TokenType::Period,
|
|
||||||
&match_first(str_from_index, &PERIOD)?,
|
|
||||||
start_index,
|
|
||||||
));
|
|
||||||
}
|
|
||||||
if is_whitespace(str_from_index) {
|
|
||||||
return Some(make_token(
|
|
||||||
TokenType::Whitespace,
|
|
||||||
&match_first(str_from_index, &WHITESPACE)?,
|
|
||||||
start_index,
|
|
||||||
));
|
|
||||||
}
|
|
||||||
None
|
|
||||||
}
|
|
||||||
|
|
||||||
pub fn lexer(s: &str) -> Vec<Token> {
|
|
||||||
let mut current_index = 0;
|
|
||||||
let mut tokens = Vec::new();
|
|
||||||
let n = s.len();
|
|
||||||
let b = s.as_bytes();
|
|
||||||
while current_index < n {
|
|
||||||
let token = return_token_at_index(&b[current_index..], current_index);
|
|
||||||
let Some(token) = token else {
|
|
||||||
current_index += 1;
|
|
||||||
continue;
|
|
||||||
};
|
|
||||||
let token_length = token.value.len();
|
|
||||||
tokens.push(token);
|
|
||||||
current_index += token_length;
|
|
||||||
}
|
|
||||||
tokens
|
|
||||||
}
|
|
||||||
|
|
||||||
#[cfg(test)]
|
|
||||||
mod tests {
|
|
||||||
use pretty_assertions::assert_eq;
|
|
||||||
|
|
||||||
use super::*;
|
|
||||||
|
|
||||||
#[test]
|
|
||||||
fn is_number_test() {
|
|
||||||
assert!(is_number("1".as_bytes()));
|
|
||||||
assert!(is_number("1 abc".as_bytes()));
|
|
||||||
assert!(is_number("1.1".as_bytes()));
|
|
||||||
assert!(is_number("1.1 abc".as_bytes()));
|
|
||||||
assert!(!is_number("a".as_bytes()));
|
|
||||||
|
|
||||||
assert!(is_number("1".as_bytes()));
|
|
||||||
assert!(is_number(".1".as_bytes()));
|
|
||||||
assert!(is_number("5?".as_bytes()));
|
|
||||||
assert!(is_number("5 + 6".as_bytes()));
|
|
||||||
assert!(is_number("5 + a".as_bytes()));
|
|
||||||
assert!(is_number("5.5".as_bytes()));
|
|
||||||
|
|
||||||
assert!(!is_number("1abc".as_bytes()));
|
|
||||||
assert!(!is_number("a".as_bytes()));
|
|
||||||
assert!(!is_number("?".as_bytes()));
|
|
||||||
assert!(!is_number("?5".as_bytes()));
|
|
||||||
}
|
|
||||||
|
|
||||||
#[test]
|
|
||||||
fn is_whitespace_test() {
|
|
||||||
assert!(is_whitespace(" ".as_bytes()));
|
|
||||||
assert!(is_whitespace(" ".as_bytes()));
|
|
||||||
assert!(is_whitespace(" a".as_bytes()));
|
|
||||||
assert!(is_whitespace("a ".as_bytes()));
|
|
||||||
|
|
||||||
assert!(!is_whitespace("a".as_bytes()));
|
|
||||||
assert!(!is_whitespace("?".as_bytes()));
|
|
||||||
}
|
|
||||||
|
|
||||||
#[test]
|
|
||||||
fn is_word_test() {
|
|
||||||
assert!(is_word("a".as_bytes()));
|
|
||||||
assert!(is_word("a ".as_bytes()));
|
|
||||||
assert!(is_word("a5".as_bytes()));
|
|
||||||
assert!(is_word("a5a".as_bytes()));
|
|
||||||
|
|
||||||
assert!(!is_word("5".as_bytes()));
|
|
||||||
assert!(!is_word("5a".as_bytes()));
|
|
||||||
assert!(!is_word("5a5".as_bytes()));
|
|
||||||
}
|
|
||||||
|
|
||||||
#[test]
|
|
||||||
fn is_string_test() {
|
|
||||||
assert!(is_string("\"\"".as_bytes()));
|
|
||||||
assert!(is_string("\"a\"".as_bytes()));
|
|
||||||
assert!(is_string("\"a\" ".as_bytes()));
|
|
||||||
assert!(is_string("\"a\"5".as_bytes()));
|
|
||||||
assert!(is_string("'a'5".as_bytes()));
|
|
||||||
assert!(is_string("\"with escaped \\\" backslash\"".as_bytes()));
|
|
||||||
|
|
||||||
assert!(!is_string("\"".as_bytes()));
|
|
||||||
assert!(!is_string("\"a".as_bytes()));
|
|
||||||
assert!(!is_string("a\"".as_bytes()));
|
|
||||||
assert!(!is_string(" \"a\"".as_bytes()));
|
|
||||||
assert!(!is_string("5\"a\"".as_bytes()));
|
|
||||||
assert!(!is_string("a + 'str'".as_bytes()));
|
|
||||||
assert!(is_string("'c'".as_bytes()));
|
|
||||||
}
|
|
||||||
|
|
||||||
#[test]
|
|
||||||
fn is_operator_test() {
|
|
||||||
assert!(is_operator("+".as_bytes()));
|
|
||||||
assert!(is_operator("+ ".as_bytes()));
|
|
||||||
assert!(is_operator("-".as_bytes()));
|
|
||||||
assert!(is_operator("<=".as_bytes()));
|
|
||||||
assert!(is_operator("<= ".as_bytes()));
|
|
||||||
assert!(is_operator(">=".as_bytes()));
|
|
||||||
assert!(is_operator(">= ".as_bytes()));
|
|
||||||
assert!(is_operator("> ".as_bytes()));
|
|
||||||
assert!(is_operator("< ".as_bytes()));
|
|
||||||
assert!(is_operator("| ".as_bytes()));
|
|
||||||
assert!(is_operator("|> ".as_bytes()));
|
|
||||||
assert!(is_operator("^ ".as_bytes()));
|
|
||||||
assert!(is_operator("% ".as_bytes()));
|
|
||||||
assert!(is_operator("+* ".as_bytes()));
|
|
||||||
|
|
||||||
assert!(!is_operator("5 + 5".as_bytes()));
|
|
||||||
assert!(!is_operator("a".as_bytes()));
|
|
||||||
assert!(!is_operator("a+".as_bytes()));
|
|
||||||
assert!(!is_operator("a+5".as_bytes()));
|
|
||||||
assert!(!is_operator("5a+5".as_bytes()));
|
|
||||||
assert!(!is_operator(", newVar".as_bytes()));
|
|
||||||
assert!(!is_operator(",".as_bytes()));
|
|
||||||
}
|
|
||||||
|
|
||||||
#[test]
|
|
||||||
fn is_block_start_test() {
|
|
||||||
assert!(is_block_start("{".as_bytes()));
|
|
||||||
assert!(is_block_start("{ ".as_bytes()));
|
|
||||||
assert!(is_block_start("{5".as_bytes()));
|
|
||||||
assert!(is_block_start("{a".as_bytes()));
|
|
||||||
assert!(is_block_start("{5 ".as_bytes()));
|
|
||||||
|
|
||||||
assert!(!is_block_start("5".as_bytes()));
|
|
||||||
assert!(!is_block_start("5 + 5".as_bytes()));
|
|
||||||
assert!(!is_block_start("5{ + 5".as_bytes()));
|
|
||||||
assert!(!is_block_start("a{ + 5".as_bytes()));
|
|
||||||
assert!(!is_block_start(" { + 5".as_bytes()));
|
|
||||||
}
|
|
||||||
|
|
||||||
#[test]
|
|
||||||
fn is_block_end_test() {
|
|
||||||
assert!(is_block_end("}".as_bytes()));
|
|
||||||
assert!(is_block_end("} ".as_bytes()));
|
|
||||||
assert!(is_block_end("}5".as_bytes()));
|
|
||||||
assert!(is_block_end("}5 ".as_bytes()));
|
|
||||||
|
|
||||||
assert!(!is_block_end("5".as_bytes()));
|
|
||||||
assert!(!is_block_end("5 + 5".as_bytes()));
|
|
||||||
assert!(!is_block_end("5} + 5".as_bytes()));
|
|
||||||
assert!(!is_block_end(" } + 5".as_bytes()));
|
|
||||||
}
|
|
||||||
|
|
||||||
#[test]
|
|
||||||
fn is_paren_start_test() {
|
|
||||||
assert!(is_paren_start("(".as_bytes()));
|
|
||||||
assert!(is_paren_start("( ".as_bytes()));
|
|
||||||
assert!(is_paren_start("(5".as_bytes()));
|
|
||||||
assert!(is_paren_start("(5 ".as_bytes()));
|
|
||||||
assert!(is_paren_start("(5 + 5".as_bytes()));
|
|
||||||
assert!(is_paren_start("(5 + 5)".as_bytes()));
|
|
||||||
assert!(is_paren_start("(5 + 5) ".as_bytes()));
|
|
||||||
|
|
||||||
assert!(!is_paren_start("5".as_bytes()));
|
|
||||||
assert!(!is_paren_start("5 + 5".as_bytes()));
|
|
||||||
assert!(!is_paren_start("5( + 5)".as_bytes()));
|
|
||||||
assert!(!is_paren_start(" ( + 5)".as_bytes()));
|
|
||||||
}
|
|
||||||
|
|
||||||
#[test]
|
|
||||||
fn is_paren_end_test() {
|
|
||||||
assert!(is_paren_end(")".as_bytes()));
|
|
||||||
assert!(is_paren_end(") ".as_bytes()));
|
|
||||||
assert!(is_paren_end(")5".as_bytes()));
|
|
||||||
assert!(is_paren_end(")5 ".as_bytes()));
|
|
||||||
|
|
||||||
assert!(!is_paren_end("5".as_bytes()));
|
|
||||||
assert!(!is_paren_end("5 + 5".as_bytes()));
|
|
||||||
assert!(!is_paren_end("5) + 5".as_bytes()));
|
|
||||||
assert!(!is_paren_end(" ) + 5".as_bytes()));
|
|
||||||
}
|
|
||||||
|
|
||||||
#[test]
|
|
||||||
fn is_comma_test() {
|
|
||||||
assert!(is_comma(",".as_bytes()));
|
|
||||||
assert!(is_comma(", ".as_bytes()));
|
|
||||||
assert!(is_comma(",5".as_bytes()));
|
|
||||||
assert!(is_comma(",5 ".as_bytes()));
|
|
||||||
|
|
||||||
assert!(!is_comma("5".as_bytes()));
|
|
||||||
assert!(!is_comma("5 + 5".as_bytes()));
|
|
||||||
assert!(!is_comma("5, + 5".as_bytes()));
|
|
||||||
assert!(!is_comma(" , + 5".as_bytes()));
|
|
||||||
}
|
|
||||||
|
|
||||||
#[test]
|
|
||||||
fn is_line_comment_test() {
|
|
||||||
assert!(is_line_comment("//".as_bytes()));
|
|
||||||
assert!(is_line_comment("// ".as_bytes()));
|
|
||||||
assert!(is_line_comment("//5".as_bytes()));
|
|
||||||
assert!(is_line_comment("//5 ".as_bytes()));
|
|
||||||
|
|
||||||
assert!(!is_line_comment("5".as_bytes()));
|
|
||||||
assert!(!is_line_comment("5 + 5".as_bytes()));
|
|
||||||
assert!(!is_line_comment("5// + 5".as_bytes()));
|
|
||||||
assert!(!is_line_comment(" // + 5".as_bytes()));
|
|
||||||
}
|
|
||||||
|
|
||||||
#[test]
|
|
||||||
fn is_block_comment_test() {
|
|
||||||
assert!(is_block_comment("/* */".as_bytes()));
|
|
||||||
assert!(is_block_comment("/***/".as_bytes()));
|
|
||||||
assert!(is_block_comment("/*5*/".as_bytes()));
|
|
||||||
assert!(is_block_comment("/*5 */".as_bytes()));
|
|
||||||
|
|
||||||
assert!(!is_block_comment("/*".as_bytes()));
|
|
||||||
assert!(!is_block_comment("5".as_bytes()));
|
|
||||||
assert!(!is_block_comment("5 + 5".as_bytes()));
|
|
||||||
assert!(!is_block_comment("5/* + 5".as_bytes()));
|
|
||||||
assert!(!is_block_comment(" /* + 5".as_bytes()));
|
|
||||||
assert!(!is_block_comment(
|
|
||||||
r#" /* and
|
|
||||||
here
|
|
||||||
*/
|
|
||||||
"#
|
|
||||||
.as_bytes()
|
|
||||||
));
|
|
||||||
}
|
|
||||||
|
|
||||||
#[test]
|
|
||||||
fn make_token_test() {
|
|
||||||
assert_eq!(
|
|
||||||
make_token(TokenType::Keyword, "const", 56),
|
|
||||||
Token {
|
|
||||||
token_type: TokenType::Keyword,
|
|
||||||
value: "const".to_string(),
|
|
||||||
start: 56,
|
|
||||||
end: 61,
|
|
||||||
}
|
|
||||||
);
|
|
||||||
}
|
|
||||||
|
|
||||||
#[test]
|
|
||||||
fn return_token_at_index_test() {
|
|
||||||
assert_eq!(
|
|
||||||
return_token_at_index("const".as_bytes(), 0),
|
|
||||||
Some(Token {
|
|
||||||
token_type: TokenType::Keyword,
|
|
||||||
value: "const".to_string(),
|
|
||||||
start: 0,
|
|
||||||
end: 5,
|
|
||||||
})
|
|
||||||
);
|
|
||||||
assert_eq!(
|
|
||||||
return_token_at_index("4554".as_bytes(), 2),
|
|
||||||
Some(Token {
|
|
||||||
token_type: TokenType::Number,
|
|
||||||
value: "4554".to_string(),
|
|
||||||
start: 2,
|
|
||||||
end: 6,
|
|
||||||
})
|
|
||||||
);
|
|
||||||
}
|
|
||||||
|
|
||||||
#[test]
|
|
||||||
fn lexer_test() {
|
|
||||||
assert_eq!(
|
|
||||||
lexer("const a=5"),
|
|
||||||
vec![
|
|
||||||
Token {
|
|
||||||
token_type: TokenType::Keyword,
|
|
||||||
value: "const".to_string(),
|
|
||||||
start: 0,
|
|
||||||
end: 5,
|
|
||||||
},
|
|
||||||
Token {
|
|
||||||
token_type: TokenType::Whitespace,
|
|
||||||
value: " ".to_string(),
|
|
||||||
start: 5,
|
|
||||||
end: 6,
|
|
||||||
},
|
|
||||||
Token {
|
|
||||||
token_type: TokenType::Word,
|
|
||||||
value: "a".to_string(),
|
|
||||||
start: 6,
|
|
||||||
end: 7,
|
|
||||||
},
|
|
||||||
Token {
|
|
||||||
token_type: TokenType::Operator,
|
|
||||||
value: "=".to_string(),
|
|
||||||
start: 7,
|
|
||||||
end: 8,
|
|
||||||
},
|
|
||||||
Token {
|
|
||||||
token_type: TokenType::Number,
|
|
||||||
value: "5".to_string(),
|
|
||||||
start: 8,
|
|
||||||
end: 9,
|
|
||||||
},
|
|
||||||
]
|
|
||||||
);
|
|
||||||
assert_eq!(
|
|
||||||
lexer("54 + 22500 + 6"),
|
|
||||||
vec![
|
|
||||||
Token {
|
|
||||||
token_type: TokenType::Number,
|
|
||||||
value: "54".to_string(),
|
|
||||||
start: 0,
|
|
||||||
end: 2,
|
|
||||||
},
|
|
||||||
Token {
|
|
||||||
token_type: TokenType::Whitespace,
|
|
||||||
value: " ".to_string(),
|
|
||||||
start: 2,
|
|
||||||
end: 3,
|
|
||||||
},
|
|
||||||
Token {
|
|
||||||
token_type: TokenType::Operator,
|
|
||||||
value: "+".to_string(),
|
|
||||||
start: 3,
|
|
||||||
end: 4,
|
|
||||||
},
|
|
||||||
Token {
|
|
||||||
token_type: TokenType::Whitespace,
|
|
||||||
value: " ".to_string(),
|
|
||||||
start: 4,
|
|
||||||
end: 5,
|
|
||||||
},
|
|
||||||
Token {
|
|
||||||
token_type: TokenType::Number,
|
|
||||||
value: "22500".to_string(),
|
|
||||||
start: 5,
|
|
||||||
end: 10,
|
|
||||||
},
|
|
||||||
Token {
|
|
||||||
token_type: TokenType::Whitespace,
|
|
||||||
value: " ".to_string(),
|
|
||||||
start: 10,
|
|
||||||
end: 11,
|
|
||||||
},
|
|
||||||
Token {
|
|
||||||
token_type: TokenType::Operator,
|
|
||||||
value: "+".to_string(),
|
|
||||||
start: 11,
|
|
||||||
end: 12,
|
|
||||||
},
|
|
||||||
Token {
|
|
||||||
token_type: TokenType::Whitespace,
|
|
||||||
value: " ".to_string(),
|
|
||||||
start: 12,
|
|
||||||
end: 13,
|
|
||||||
},
|
|
||||||
Token {
|
|
||||||
token_type: TokenType::Number,
|
|
||||||
value: "6".to_string(),
|
|
||||||
start: 13,
|
|
||||||
end: 14,
|
|
||||||
},
|
|
||||||
]
|
|
||||||
);
|
|
||||||
}
|
|
||||||
|
|
||||||
// We have this as a test so we can ensure it never panics with an unwrap in the server.
|
|
||||||
#[test]
|
|
||||||
fn test_token_type_to_semantic_token_type() {
|
|
||||||
let semantic_types = TokenType::all_semantic_token_types().unwrap();
|
|
||||||
assert!(!semantic_types.is_empty());
|
|
||||||
}
|
|
||||||
|
|
||||||
#[test]
|
|
||||||
fn test_lexer_negative_word() {
|
|
||||||
assert_eq!(
|
|
||||||
lexer("-legX"),
|
|
||||||
vec![
|
|
||||||
Token {
|
|
||||||
token_type: TokenType::Operator,
|
|
||||||
value: "-".to_string(),
|
|
||||||
start: 0,
|
|
||||||
end: 1,
|
|
||||||
},
|
|
||||||
Token {
|
|
||||||
token_type: TokenType::Word,
|
|
||||||
value: "legX".to_string(),
|
|
||||||
start: 1,
|
|
||||||
end: 5,
|
|
||||||
},
|
|
||||||
]
|
|
||||||
);
|
|
||||||
}
|
|
||||||
}
|
|
@ -36,7 +36,7 @@ pub async fn execute_wasm(
|
|||||||
// wasm_bindgen wrapper for execute
|
// wasm_bindgen wrapper for execute
|
||||||
#[cfg(target_arch = "wasm32")]
|
#[cfg(target_arch = "wasm32")]
|
||||||
#[wasm_bindgen]
|
#[wasm_bindgen]
|
||||||
pub async fn modify_ast_for_sketch(
|
pub async fn modify_ast_for_sketch_wasm(
|
||||||
manager: kcl_lib::engine::conn_wasm::EngineCommandManager,
|
manager: kcl_lib::engine::conn_wasm::EngineCommandManager,
|
||||||
program_str: &str,
|
program_str: &str,
|
||||||
sketch_name: &str,
|
sketch_name: &str,
|
||||||
@ -83,14 +83,14 @@ pub fn deserialize_files(data: &[u8]) -> Result<JsValue, JsError> {
|
|||||||
// wasm_bindgen wrapper for lexer
|
// wasm_bindgen wrapper for lexer
|
||||||
// test for this function and by extension lexer are done in javascript land src/lang/tokeniser.test.ts
|
// test for this function and by extension lexer are done in javascript land src/lang/tokeniser.test.ts
|
||||||
#[wasm_bindgen]
|
#[wasm_bindgen]
|
||||||
pub fn lexer_js(js: &str) -> Result<JsValue, JsError> {
|
pub fn lexer_wasm(js: &str) -> Result<JsValue, JsError> {
|
||||||
let tokens = kcl_lib::tokeniser::lexer(js);
|
let tokens = kcl_lib::token::lexer(js);
|
||||||
Ok(JsValue::from_serde(&tokens)?)
|
Ok(JsValue::from_serde(&tokens)?)
|
||||||
}
|
}
|
||||||
|
|
||||||
#[wasm_bindgen]
|
#[wasm_bindgen]
|
||||||
pub fn parse_js(js: &str) -> Result<JsValue, String> {
|
pub fn parse_wasm(js: &str) -> Result<JsValue, String> {
|
||||||
let tokens = kcl_lib::tokeniser::lexer(js);
|
let tokens = kcl_lib::token::lexer(js);
|
||||||
let parser = kcl_lib::parser::Parser::new(tokens);
|
let parser = kcl_lib::parser::Parser::new(tokens);
|
||||||
let program = parser.ast().map_err(String::from)?;
|
let program = parser.ast().map_err(String::from)?;
|
||||||
// The serde-wasm-bindgen does not work here because of weird HashMap issues so we use the
|
// The serde-wasm-bindgen does not work here because of weird HashMap issues so we use the
|
||||||
@ -149,7 +149,7 @@ pub async fn lsp_run(config: ServerConfig) -> Result<(), JsValue> {
|
|||||||
let stdlib_signatures = get_signatures_from_stdlib(&stdlib).map_err(|e| e.to_string())?;
|
let stdlib_signatures = get_signatures_from_stdlib(&stdlib).map_err(|e| e.to_string())?;
|
||||||
// We can unwrap here because we know the tokeniser is valid, since
|
// We can unwrap here because we know the tokeniser is valid, since
|
||||||
// we have a test for it.
|
// we have a test for it.
|
||||||
let token_types = kcl_lib::tokeniser::TokenType::all_semantic_token_types().unwrap();
|
let token_types = kcl_lib::token::TokenType::all_semantic_token_types().unwrap();
|
||||||
|
|
||||||
let (service, socket) = LspService::new(|client| Backend {
|
let (service, socket) = LspService::new(|client| Backend {
|
||||||
client,
|
client,
|
||||||
|
@ -306,5 +306,5 @@ const svg = startSketchAt([0, 0])
|
|||||||
|> lineTo([13.44, -10.92], %) // HorizontalLineRelative
|
|> lineTo([13.44, -10.92], %) // HorizontalLineRelative
|
||||||
|> lineTo([13.44, -13.44], %) // VerticalLineHorizonal
|
|> lineTo([13.44, -13.44], %) // VerticalLineHorizonal
|
||||||
|> lineTo([14.28, -13.44], %) // HorizontalLineRelative
|
|> lineTo([14.28, -13.44], %) // HorizontalLineRelative
|
||||||
|> close(%);
|
|> close(%)
|
||||||
show(svg);
|
show(svg)
|
||||||
|
@ -466,5 +466,5 @@ const svg = startSketchAt([0, 0])
|
|||||||
|> bezierCurve({ control1: [-4, -3], control2: [-2.66, -3.67], to: [-3.32, -3.34] }, %) // CubicBezierAbsolute
|
|> bezierCurve({ control1: [-4, -3], control2: [-2.66, -3.67], to: [-3.32, -3.34] }, %) // CubicBezierAbsolute
|
||||||
|> bezierCurve({ control1: [0, -2], control2: [-2.68, -2.67], to: [-1.36, -2.34] }, %) // CubicBezierAbsolute
|
|> bezierCurve({ control1: [0, -2], control2: [-2.68, -2.67], to: [-1.36, -2.34] }, %) // CubicBezierAbsolute
|
||||||
|> bezierCurve({ control1: [0, -0], control2: [0, -1.34], to: [0, -0.68] }, %) // CubicBezierAbsolute
|
|> bezierCurve({ control1: [0, -0], control2: [0, -1.34], to: [0, -0.68] }, %) // CubicBezierAbsolute
|
||||||
|> close(%);
|
|> close(%)
|
||||||
show(svg);
|
show(svg)
|
||||||
|
@ -32,7 +32,7 @@ async fn execute_and_snapshot(code: &str) -> Result<image::DynamicImage> {
|
|||||||
// Create a temporary file to write the output to.
|
// Create a temporary file to write the output to.
|
||||||
let output_file = std::env::temp_dir().join(format!("kcl_output_{}.png", uuid::Uuid::new_v4()));
|
let output_file = std::env::temp_dir().join(format!("kcl_output_{}.png", uuid::Uuid::new_v4()));
|
||||||
|
|
||||||
let tokens = kcl_lib::tokeniser::lexer(code);
|
let tokens = kcl_lib::token::lexer(code);
|
||||||
let parser = kcl_lib::parser::Parser::new(tokens);
|
let parser = kcl_lib::parser::Parser::new(tokens);
|
||||||
let program = parser.ast()?;
|
let program = parser.ast()?;
|
||||||
let mut mem: kcl_lib::executor::ProgramMemory = Default::default();
|
let mut mem: kcl_lib::executor::ProgramMemory = Default::default();
|
||||||
@ -148,6 +148,41 @@ show(bracket)"#;
|
|||||||
twenty_twenty::assert_image("tests/executor/outputs/parametric.png", &result, 1.0);
|
twenty_twenty::assert_image("tests/executor/outputs/parametric.png", &result, 1.0);
|
||||||
}
|
}
|
||||||
|
|
||||||
|
#[tokio::test(flavor = "multi_thread")]
|
||||||
|
async fn serial_test_execute_parametric_with_tan_arc_example() {
|
||||||
|
let code = r#"const sigmaAllow = 15000 // psi
|
||||||
|
const width = 11 // inch
|
||||||
|
const p = 150 // Force on shelf - lbs
|
||||||
|
const distance = 12 // inches
|
||||||
|
const FOS = 2
|
||||||
|
const thickness = sqrt(distance * p * FOS * 6 / ( sigmaAllow * width ))
|
||||||
|
const filletR = thickness * 2
|
||||||
|
const shelfMountL = 9
|
||||||
|
const wallMountL = 8
|
||||||
|
|
||||||
|
const bracket = startSketchAt([0, 0])
|
||||||
|
|> line([0, wallMountL], %)
|
||||||
|
|> tangentalArc({
|
||||||
|
radius: filletR,
|
||||||
|
offset: 90
|
||||||
|
}, %)
|
||||||
|
|> line([-shelfMountL, 0], %)
|
||||||
|
|> line([0, -thickness], %)
|
||||||
|
|> line([shelfMountL, 0], %)
|
||||||
|
|> tangentalArc({
|
||||||
|
radius: filletR - thickness,
|
||||||
|
offset: -90
|
||||||
|
}, %)
|
||||||
|
|> line([0, -wallMountL], %)
|
||||||
|
|> close(%)
|
||||||
|
|> extrude(width, %)
|
||||||
|
|
||||||
|
show(bracket)"#;
|
||||||
|
|
||||||
|
let result = execute_and_snapshot(code).await.unwrap();
|
||||||
|
twenty_twenty::assert_image("tests/executor/outputs/parametric_with_tan_arc.png", &result, 1.0);
|
||||||
|
}
|
||||||
|
|
||||||
#[tokio::test(flavor = "multi_thread")]
|
#[tokio::test(flavor = "multi_thread")]
|
||||||
async fn serial_test_execute_engine_error_return() {
|
async fn serial_test_execute_engine_error_return() {
|
||||||
let code = r#"const part001 = startSketchAt([5.5229, 5.25217])
|
let code = r#"const part001 = startSketchAt([5.5229, 5.25217])
|
||||||
@ -210,3 +245,84 @@ show(b2)"#;
|
|||||||
1.0,
|
1.0,
|
||||||
);
|
);
|
||||||
}
|
}
|
||||||
|
|
||||||
|
#[tokio::test(flavor = "multi_thread")]
|
||||||
|
async fn test_close_arc() {
|
||||||
|
let code = r#"const center = [0,0]
|
||||||
|
const radius = 40
|
||||||
|
const height = 3
|
||||||
|
|
||||||
|
const body = startSketchAt([center[0]+radius, center[1]])
|
||||||
|
|> arc({angle_end: 360, angle_start: 0, radius: radius}, %)
|
||||||
|
|> close(%)
|
||||||
|
|> extrude(height, %)
|
||||||
|
|
||||||
|
show(body)"#;
|
||||||
|
|
||||||
|
let result = execute_and_snapshot(code).await.unwrap();
|
||||||
|
twenty_twenty::assert_image("tests/executor/outputs/close_arc.png", &result, 1.0);
|
||||||
|
}
|
||||||
|
|
||||||
|
#[tokio::test(flavor = "multi_thread")]
|
||||||
|
async fn test_negative_args() {
|
||||||
|
let code = r#"const width = 5
|
||||||
|
const height = 10
|
||||||
|
const length = 12
|
||||||
|
|
||||||
|
fn box = (sk1, sk2, scale) => {
|
||||||
|
const boxSketch = startSketchAt([sk1, sk2])
|
||||||
|
|> line([0, scale], %)
|
||||||
|
|> line([scale, 0], %)
|
||||||
|
|> line([0, -scale], %)
|
||||||
|
|> close(%)
|
||||||
|
|> extrude(scale, %)
|
||||||
|
return boxSketch
|
||||||
|
}
|
||||||
|
|
||||||
|
box(0, 0, 5)
|
||||||
|
box(10, 23, 8)
|
||||||
|
let thing = box(-12, -15, 10)
|
||||||
|
box(-20, -5, 10)"#;
|
||||||
|
|
||||||
|
let result = execute_and_snapshot(code).await.unwrap();
|
||||||
|
twenty_twenty::assert_image("tests/executor/outputs/negative_args.png", &result, 1.0);
|
||||||
|
}
|
||||||
|
|
||||||
|
#[tokio::test(flavor = "multi_thread")]
|
||||||
|
async fn test_basic_tangental_arc() {
|
||||||
|
let code = r#"const boxSketch = startSketchAt([0, 0])
|
||||||
|
|> line([0, 10], %)
|
||||||
|
|> tangentalArc({radius: 5, offset: 90}, %)
|
||||||
|
|> line([5, -15], %)
|
||||||
|
|> extrude(10, %)
|
||||||
|
"#;
|
||||||
|
|
||||||
|
let result = execute_and_snapshot(code).await.unwrap();
|
||||||
|
twenty_twenty::assert_image("tests/executor/outputs/tangental_arc.png", &result, 1.0);
|
||||||
|
}
|
||||||
|
|
||||||
|
#[tokio::test(flavor = "multi_thread")]
|
||||||
|
async fn test_basic_tangental_arc_with_point() {
|
||||||
|
let code = r#"const boxSketch = startSketchAt([0, 0])
|
||||||
|
|> line([0, 10], %)
|
||||||
|
|> tangentalArc([-5, 5], %)
|
||||||
|
|> line([5, -15], %)
|
||||||
|
|> extrude(10, %)
|
||||||
|
"#;
|
||||||
|
|
||||||
|
let result = execute_and_snapshot(code).await.unwrap();
|
||||||
|
twenty_twenty::assert_image("tests/executor/outputs/tangental_arc_with_point.png", &result, 1.0);
|
||||||
|
}
|
||||||
|
|
||||||
|
#[tokio::test(flavor = "multi_thread")]
|
||||||
|
async fn test_basic_tangental_arc_to() {
|
||||||
|
let code = r#"const boxSketch = startSketchAt([0, 0])
|
||||||
|
|> line([0, 10], %)
|
||||||
|
|> tangentalArcTo([-5, 15], %)
|
||||||
|
|> line([5, -15], %)
|
||||||
|
|> extrude(10, %)
|
||||||
|
"#;
|
||||||
|
|
||||||
|
let result = execute_and_snapshot(code).await.unwrap();
|
||||||
|
twenty_twenty::assert_image("tests/executor/outputs/tangental_arc_to.png", &result, 1.0);
|
||||||
|
}
|
||||||
|
BIN
src/wasm-lib/tests/executor/outputs/close_arc.png
Normal file
BIN
src/wasm-lib/tests/executor/outputs/close_arc.png
Normal file
Binary file not shown.
After Width: | Height: | Size: 96 KiB |
BIN
src/wasm-lib/tests/executor/outputs/negative_args.png
Normal file
BIN
src/wasm-lib/tests/executor/outputs/negative_args.png
Normal file
Binary file not shown.
After Width: | Height: | Size: 78 KiB |
BIN
src/wasm-lib/tests/executor/outputs/parametric_with_tan_arc.png
Normal file
BIN
src/wasm-lib/tests/executor/outputs/parametric_with_tan_arc.png
Normal file
Binary file not shown.
After Width: | Height: | Size: 71 KiB |
BIN
src/wasm-lib/tests/executor/outputs/tangental_arc.png
Normal file
BIN
src/wasm-lib/tests/executor/outputs/tangental_arc.png
Normal file
Binary file not shown.
After Width: | Height: | Size: 70 KiB |
BIN
src/wasm-lib/tests/executor/outputs/tangental_arc_to.png
Normal file
BIN
src/wasm-lib/tests/executor/outputs/tangental_arc_to.png
Normal file
Binary file not shown.
After Width: | Height: | Size: 70 KiB |
Some files were not shown because too many files have changed in this diff Show More
Reference in New Issue
Block a user